1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2023 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 (APSRQ_REGNUM 104) ; Q bit pseudo register
43 (APSRGE_REGNUM 105) ; GE bits pseudo register
44 (VPR_REGNUM 106) ; Vector Predication Register - MVE register.
47 ;; 3rd operand to select_dominance_cc_mode
54 ;; conditional compare combination
65 ;;---------------------------------------------------------------------------
68 ;; Processor type. This is created automatically from arm-cores.def.
69 (include "arm-tune.md")
71 ;; Instruction classification types
74 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
75 ; generating ARM code. This is used to control the length of some insn
76 ; patterns that share the same RTL in both ARM and Thumb code.
77 (define_attr "is_thumb" "yes,no"
78 (const (if_then_else (symbol_ref "TARGET_THUMB")
79 (const_string "yes") (const_string "no"))))
81 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
82 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
84 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
85 (define_attr "is_thumb1" "yes,no"
86 (const (if_then_else (symbol_ref "TARGET_THUMB1")
87 (const_string "yes") (const_string "no"))))
89 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
90 ; The arm_restrict_it flag enables the "short IT" feature which
91 ; restricts IT blocks to a single 16-bit instruction.
92 ; This attribute should only be used on 16-bit Thumb-2 instructions
93 ; which may be predicated (the "predicable" attribute must be set).
94 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
96 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
97 ; This attribute should only be used on instructions which may emit
98 ; an IT block in their expansion which is not a short IT.
99 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
101 ; Mark an instruction sequence as the required way of loading a
102 ; constant when -mpure-code is enabled (which implies
103 ; arm_disable_literal_pool)
104 (define_attr "required_for_purecode" "no,yes" (const_string "no"))
106 ;; Operand number of an input operand that is shifted. Zero if the
107 ;; given instruction does not shift one of its input operands.
108 (define_attr "shift" "" (const_int 0))
110 ;; [For compatibility with AArch64 in pipeline models]
111 ;; Attribute that specifies whether or not the instruction touches fp
113 (define_attr "fp" "no,yes" (const_string "no"))
115 ; Floating Point Unit. If we only have floating point emulation, then there
116 ; is no point in scheduling the floating point insns. (Well, for best
117 ; performance we should try and group them together).
118 (define_attr "fpu" "none,vfp"
119 (const (symbol_ref "arm_fpu_attr")))
121 ; Predicated means that the insn form is conditionally executed based on a
122 ; predicate. We default to 'no' because no Thumb patterns match this rule
123 ; and not all ARM insns do.
124 (define_attr "predicated" "yes,no" (const_string "no"))
126 ; LENGTH of an instruction (in bytes)
127 (define_attr "length" ""
130 ; The architecture which supports the instruction (or alternative).
131 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
132 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
133 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
134 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
135 ; Baseline. "fix_vlldm" is for fixing the v8-m/v8.1-m VLLDM erratum.
136 ; This attribute is used to compute attribute "enabled",
137 ; use type "any" to enable an alternative in all cases.
138 (define_attr "arch" "any, a, t, 32, t1, t2, v6,nov6, v6t2, \
139 v8mb, fix_vlldm, iwmmxt, iwmmxt2, armv6_or_vfpv3, \
141 (const_string "any"))
143 (define_attr "arch_enabled" "no,yes"
144 (cond [(eq_attr "arch" "any")
147 (and (eq_attr "arch" "a")
148 (match_test "TARGET_ARM"))
151 (and (eq_attr "arch" "t")
152 (match_test "TARGET_THUMB"))
155 (and (eq_attr "arch" "t1")
156 (match_test "TARGET_THUMB1"))
159 (and (eq_attr "arch" "t2")
160 (match_test "TARGET_THUMB2"))
163 (and (eq_attr "arch" "32")
164 (match_test "TARGET_32BIT"))
167 (and (eq_attr "arch" "v6")
168 (match_test "TARGET_32BIT && arm_arch6"))
171 (and (eq_attr "arch" "nov6")
172 (match_test "TARGET_32BIT && !arm_arch6"))
175 (and (eq_attr "arch" "v6t2")
176 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
179 (and (eq_attr "arch" "v8mb")
180 (match_test "TARGET_THUMB1 && arm_arch8"))
183 (and (eq_attr "arch" "fix_vlldm")
184 (match_test "fix_vlldm"))
187 (and (eq_attr "arch" "iwmmxt2")
188 (match_test "TARGET_REALLY_IWMMXT2"))
191 (and (eq_attr "arch" "armv6_or_vfpv3")
192 (match_test "arm_arch6 || TARGET_VFP3"))
195 (and (eq_attr "arch" "neon")
196 (match_test "TARGET_NEON"))
199 (and (eq_attr "arch" "mve")
200 (match_test "TARGET_HAVE_MVE"))
204 (const_string "no")))
206 (define_attr "opt" "any,speed,size"
207 (const_string "any"))
209 (define_attr "opt_enabled" "no,yes"
210 (cond [(eq_attr "opt" "any")
213 (and (eq_attr "opt" "speed")
214 (match_test "optimize_function_for_speed_p (cfun)"))
217 (and (eq_attr "opt" "size")
218 (match_test "optimize_function_for_size_p (cfun)"))
219 (const_string "yes")]
220 (const_string "no")))
222 (define_attr "use_literal_pool" "no,yes"
223 (cond [(and (eq_attr "type" "f_loads,f_loadd")
224 (match_test "CONSTANT_P (operands[1])"))
225 (const_string "yes")]
226 (const_string "no")))
228 ; Enable all alternatives that are both arch_enabled and insn_enabled.
229 ; FIXME:: opt_enabled has been temporarily removed till the time we have
230 ; an attribute that allows the use of such alternatives.
231 ; This depends on caching of speed_p, size_p on a per
232 ; alternative basis. The problem is that the enabled attribute
233 ; cannot depend on any state that is not cached or is not constant
234 ; for a compilation unit. We probably need a generic "hot/cold"
235 ; alternative which if implemented can help with this. We disable this
236 ; until such a time as this is implemented and / or the improvements or
237 ; regressions with removing this attribute are double checked.
238 ; See ashldi3_neon and <shift>di3_neon in neon.md.
240 (define_attr "enabled" "no,yes"
241 (cond [(and (eq_attr "predicable_short_it" "no")
242 (and (eq_attr "predicated" "yes")
243 (match_test "arm_restrict_it")))
246 (and (eq_attr "enabled_for_short_it" "no")
247 (match_test "arm_restrict_it"))
250 (and (eq_attr "required_for_purecode" "yes")
251 (not (match_test "arm_disable_literal_pool")))
254 (eq_attr "arch_enabled" "no")
256 (const_string "yes")))
258 ; POOL_RANGE is how far away from a constant pool entry that this insn
259 ; can be placed. If the distance is zero, then this insn will never
260 ; reference the pool.
261 ; Note that for Thumb constant pools the PC value is rounded down to the
262 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
263 ; Thumb insns) should be set to <max_range> - 2.
264 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
265 ; before its address. It is set to <max_range> - (8 + <data_size>).
266 (define_attr "arm_pool_range" "" (const_int 0))
267 (define_attr "thumb2_pool_range" "" (const_int 0))
268 (define_attr "arm_neg_pool_range" "" (const_int 0))
269 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
271 (define_attr "pool_range" ""
272 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
273 (attr "arm_pool_range")))
274 (define_attr "neg_pool_range" ""
275 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
276 (attr "arm_neg_pool_range")))
278 ; An assembler sequence may clobber the condition codes without us knowing.
279 ; If such an insn references the pool, then we have no way of knowing how,
280 ; so use the most conservative value for pool_range.
281 (define_asm_attributes
282 [(set_attr "conds" "clob")
283 (set_attr "length" "4")
284 (set_attr "pool_range" "250")])
286 ; Load scheduling, set from the arm_ld_sched variable
287 ; initialized by arm_option_override()
288 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
290 ; condition codes: this one is used by final_prescan_insn to speed up
291 ; conditionalizing instructions. It saves having to scan the rtl to see if
292 ; it uses or alters the condition codes.
294 ; USE means that the condition codes are used by the insn in the process of
295 ; outputting code, this means (at present) that we can't use the insn in
298 ; SET means that the purpose of the insn is to set the condition codes in a
299 ; well defined manner.
301 ; CLOB means that the condition codes are altered in an undefined manner, if
302 ; they are altered at all
304 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
305 ; that the instruction does not use or alter the condition codes.
307 ; NOCOND means that the instruction does not use or alter the condition
308 ; codes but can be converted into a conditionally exectuted instruction.
310 (define_attr "conds" "use,set,clob,unconditional,nocond"
312 (ior (eq_attr "is_thumb1" "yes")
313 (eq_attr "type" "call"))
314 (const_string "clob")
316 (ior (eq_attr "is_neon_type" "yes")
317 (eq_attr "is_mve_type" "yes"))
318 (const_string "unconditional")
319 (const_string "nocond"))))
321 ; Predicable means that the insn can be conditionally executed based on
322 ; an automatically added predicate (additional patterns are generated by
323 ; gen...). We default to 'no' because no Thumb patterns match this rule
324 ; and not all ARM patterns do.
325 (define_attr "predicable" "no,yes" (const_string "no"))
327 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
328 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
329 ; suffer blockages enough to warrant modelling this (and it can adversely
330 ; affect the schedule).
331 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
333 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
334 ; to stall the processor. Used with model_wbuf above.
335 (define_attr "write_conflict" "no,yes"
336 (if_then_else (eq_attr "type"
339 (const_string "no")))
341 ; Classify the insns into those that take one cycle and those that take more
342 ; than one on the main cpu execution unit.
343 (define_attr "core_cycles" "single,multi"
344 (if_then_else (eq_attr "type"
345 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
346 alu_shift_imm_lsl_1to4, alu_shift_imm_other, alu_shift_reg, alu_dsp_reg,\
347 alus_ext, alus_imm, alus_sreg,\
348 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
349 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
350 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
351 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
352 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
353 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
354 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
355 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
356 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
357 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
358 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
359 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
360 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
361 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
362 (const_string "single")
363 (const_string "multi")))
365 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
366 ;; distant label. Only applicable to Thumb code.
367 (define_attr "far_jump" "yes,no" (const_string "no"))
370 ;; The number of machine instructions this pattern expands to.
371 ;; Used for Thumb-2 conditional execution.
372 (define_attr "ce_count" "" (const_int 1))
374 ;;---------------------------------------------------------------------------
377 (include "unspecs.md")
379 ;;---------------------------------------------------------------------------
382 (include "iterators.md")
384 ;;---------------------------------------------------------------------------
387 (include "predicates.md")
388 (include "constraints.md")
390 ;;---------------------------------------------------------------------------
391 ;; Pipeline descriptions
393 (define_attr "tune_cortexr4" "yes,no"
395 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
397 (const_string "no"))))
399 ;; True if the generic scheduling description should be used.
401 (define_attr "generic_sched" "yes,no"
403 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
404 arm926ejs,arm10e,arm1026ejs,arm1136js,\
405 arm1136jfs,cortexa5,cortexa7,cortexa8,\
406 cortexa9,cortexa12,cortexa15,cortexa17,\
407 cortexa53,cortexa57,cortexm4,cortexm7,\
408 exynosm1,marvell_pj4,xgene1")
409 (eq_attr "tune_cortexr4" "yes"))
411 (const_string "yes"))))
413 (define_attr "generic_vfp" "yes,no"
415 (and (eq_attr "fpu" "vfp")
416 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
417 cortexa8,cortexa9,cortexa53,cortexm4,\
418 cortexm7,marvell_pj4,xgene1")
419 (eq_attr "tune_cortexr4" "no"))
421 (const_string "no"))))
423 (include "marvell-f-iwmmxt.md")
424 (include "arm-generic.md")
425 (include "arm926ejs.md")
426 (include "arm1020e.md")
427 (include "arm1026ejs.md")
428 (include "arm1136jfs.md")
430 (include "fa606te.md")
431 (include "fa626te.md")
432 (include "fmp626.md")
433 (include "fa726te.md")
434 (include "cortex-a5.md")
435 (include "cortex-a7.md")
436 (include "cortex-a8.md")
437 (include "cortex-a9.md")
438 (include "cortex-a15.md")
439 (include "cortex-a17.md")
440 (include "cortex-a53.md")
441 (include "cortex-a57.md")
442 (include "cortex-r4.md")
443 (include "cortex-r4f.md")
444 (include "cortex-m7.md")
445 (include "cortex-m4.md")
446 (include "cortex-m4-fpu.md")
447 (include "exynos-m1.md")
449 (include "marvell-pj4.md")
450 (include "xgene1.md")
452 ;; define_subst and associated attributes
454 (define_subst "add_setq"
455 [(set (match_operand:SI 0 "" "")
456 (match_operand:SI 1 "" ""))]
460 (set (reg:CC APSRQ_REGNUM)
461 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
463 (define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
464 (define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
467 ;;---------------------------------------------------------------------------
472 ;; Note: For DImode insns, there is normally no reason why operands should
473 ;; not be in the same register, what we don't want is for something being
474 ;; written to partially overlap something that is an input.
476 (define_expand "adddi3"
478 [(set (match_operand:DI 0 "s_register_operand")
479 (plus:DI (match_operand:DI 1 "s_register_operand")
480 (match_operand:DI 2 "reg_or_int_operand")))
481 (clobber (reg:CC CC_REGNUM))])]
486 if (!REG_P (operands[2]))
487 operands[2] = force_reg (DImode, operands[2]);
491 rtx lo_result, hi_result, lo_dest, hi_dest;
492 rtx lo_op1, hi_op1, lo_op2, hi_op2;
493 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
495 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
496 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
498 if (lo_op2 == const0_rtx)
501 if (!arm_add_operand (hi_op2, SImode))
502 hi_op2 = force_reg (SImode, hi_op2);
503 /* Assume hi_op2 won't also be zero. */
504 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
508 if (!arm_add_operand (lo_op2, SImode))
509 lo_op2 = force_reg (SImode, lo_op2);
510 if (!arm_not_operand (hi_op2, SImode))
511 hi_op2 = force_reg (SImode, hi_op2);
513 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
514 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
516 if (hi_op2 == const0_rtx)
517 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
519 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
522 if (lo_result != lo_dest)
523 emit_move_insn (lo_result, lo_dest);
524 if (hi_result != hi_dest)
525 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
531 (define_expand "addvsi4"
532 [(match_operand:SI 0 "s_register_operand")
533 (match_operand:SI 1 "s_register_operand")
534 (match_operand:SI 2 "arm_add_operand")
535 (match_operand 3 "")]
538 if (CONST_INT_P (operands[2]))
539 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
541 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
542 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
547 (define_expand "addvdi4"
548 [(match_operand:DI 0 "s_register_operand")
549 (match_operand:DI 1 "s_register_operand")
550 (match_operand:DI 2 "reg_or_int_operand")
551 (match_operand 3 "")]
554 rtx lo_result, hi_result;
555 rtx lo_op1, hi_op1, lo_op2, hi_op2;
556 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
558 lo_result = gen_lowpart (SImode, operands[0]);
559 hi_result = gen_highpart (SImode, operands[0]);
561 if (lo_op2 == const0_rtx)
563 emit_move_insn (lo_result, lo_op1);
564 if (!arm_add_operand (hi_op2, SImode))
565 hi_op2 = force_reg (SImode, hi_op2);
567 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
571 if (!arm_add_operand (lo_op2, SImode))
572 lo_op2 = force_reg (SImode, lo_op2);
573 if (!arm_not_operand (hi_op2, SImode))
574 hi_op2 = force_reg (SImode, hi_op2);
576 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
578 if (hi_op2 == const0_rtx)
579 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
580 else if (CONST_INT_P (hi_op2))
581 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
583 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
585 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
591 (define_expand "addsi3_cin_vout_reg"
596 (plus:DI (match_dup 4)
597 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
598 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
599 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
601 (set (match_operand:SI 0 "s_register_operand")
602 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
606 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
607 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
608 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
609 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
613 (define_insn "*addsi3_cin_vout_reg_insn"
614 [(set (reg:CC_V CC_REGNUM)
618 (match_operand:DI 3 "arm_carry_operation" "")
619 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
620 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
622 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
625 (set (match_operand:SI 0 "s_register_operand" "=l,r")
626 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
632 [(set_attr "type" "alus_sreg")
633 (set_attr "arch" "t2,*")
634 (set_attr "length" "2,4")]
637 (define_expand "addsi3_cin_vout_imm"
642 (plus:DI (match_dup 4)
643 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
645 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
647 (set (match_operand:SI 0 "s_register_operand")
648 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
649 (match_operand 2 "arm_adcimm_operand")))])]
652 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
659 (define_insn "*addsi3_cin_vout_imm_insn"
660 [(set (reg:CC_V CC_REGNUM)
664 (match_operand:DI 3 "arm_carry_operation" "")
665 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
666 (match_operand 2 "arm_adcimm_operand" "I,K"))
668 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
671 (set (match_operand:SI 0 "s_register_operand" "=r,r")
672 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
677 sbcs%?\\t%0, %1, #%B2"
678 [(set_attr "type" "alus_imm")]
681 (define_expand "addsi3_cin_vout_0"
685 (plus:DI (match_dup 3)
686 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
687 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
688 (set (match_operand:SI 0 "s_register_operand")
689 (plus:SI (match_dup 4) (match_dup 1)))])]
692 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
693 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
694 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
695 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
699 (define_insn "*addsi3_cin_vout_0_insn"
700 [(set (reg:CC_V CC_REGNUM)
703 (match_operand:DI 2 "arm_carry_operation" "")
704 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
705 (sign_extend:DI (plus:SI
706 (match_operand:SI 3 "arm_carry_operation" "")
708 (set (match_operand:SI 0 "s_register_operand" "=r")
709 (plus:SI (match_dup 3) (match_dup 1)))]
711 "adcs%?\\t%0, %1, #0"
712 [(set_attr "type" "alus_imm")]
715 (define_expand "uaddvsi4"
716 [(match_operand:SI 0 "s_register_operand")
717 (match_operand:SI 1 "s_register_operand")
718 (match_operand:SI 2 "arm_add_operand")
719 (match_operand 3 "")]
722 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
723 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
728 (define_expand "uaddvdi4"
729 [(match_operand:DI 0 "s_register_operand")
730 (match_operand:DI 1 "s_register_operand")
731 (match_operand:DI 2 "reg_or_int_operand")
732 (match_operand 3 "")]
735 rtx lo_result, hi_result;
736 rtx lo_op1, hi_op1, lo_op2, hi_op2;
737 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
739 lo_result = gen_lowpart (SImode, operands[0]);
740 hi_result = gen_highpart (SImode, operands[0]);
742 if (lo_op2 == const0_rtx)
744 emit_move_insn (lo_result, lo_op1);
745 if (!arm_add_operand (hi_op2, SImode))
746 hi_op2 = force_reg (SImode, hi_op2);
748 emit_insn (gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
752 if (!arm_add_operand (lo_op2, SImode))
753 lo_op2 = force_reg (SImode, lo_op2);
754 if (!arm_not_operand (hi_op2, SImode))
755 hi_op2 = force_reg (SImode, hi_op2);
757 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
759 if (hi_op2 == const0_rtx)
760 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
761 else if (CONST_INT_P (hi_op2))
762 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
764 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
766 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
772 (define_expand "addsi3_cin_cout_reg"
777 (plus:DI (match_dup 4)
778 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
779 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
780 (const_int 4294967296)))
781 (set (match_operand:SI 0 "s_register_operand")
782 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
786 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
787 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
788 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
789 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
793 (define_insn "*addsi3_cin_cout_reg_insn"
794 [(set (reg:CC_ADC CC_REGNUM)
798 (match_operand:DI 3 "arm_carry_operation" "")
799 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
800 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
801 (const_int 4294967296)))
802 (set (match_operand:SI 0 "s_register_operand" "=l,r")
803 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
810 [(set_attr "type" "alus_sreg")
811 (set_attr "arch" "t2,*")
812 (set_attr "length" "2,4")]
815 (define_expand "addsi3_cin_cout_imm"
820 (plus:DI (match_dup 4)
821 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
823 (const_int 4294967296)))
824 (set (match_operand:SI 0 "s_register_operand")
825 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
826 (match_operand:SI 2 "arm_adcimm_operand")))])]
829 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
833 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
837 (define_insn "*addsi3_cin_cout_imm_insn"
838 [(set (reg:CC_ADC CC_REGNUM)
842 (match_operand:DI 3 "arm_carry_operation" "")
843 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
844 (match_operand:DI 5 "const_int_operand" "n,n"))
845 (const_int 4294967296)))
846 (set (match_operand:SI 0 "s_register_operand" "=r,r")
847 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
849 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
851 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
854 sbcs%?\\t%0, %1, #%B2"
855 [(set_attr "type" "alus_imm")]
858 (define_expand "addsi3_cin_cout_0"
862 (plus:DI (match_dup 3)
863 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
864 (const_int 4294967296)))
865 (set (match_operand:SI 0 "s_register_operand")
866 (plus:SI (match_dup 4) (match_dup 1)))])]
869 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
870 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
871 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
872 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
876 (define_insn "*addsi3_cin_cout_0_insn"
877 [(set (reg:CC_ADC CC_REGNUM)
880 (match_operand:DI 2 "arm_carry_operation" "")
881 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
882 (const_int 4294967296)))
883 (set (match_operand:SI 0 "s_register_operand" "=r")
884 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
886 "adcs%?\\t%0, %1, #0"
887 [(set_attr "type" "alus_imm")]
890 (define_expand "addsi3"
891 [(set (match_operand:SI 0 "s_register_operand")
892 (plus:SI (match_operand:SI 1 "s_register_operand")
893 (match_operand:SI 2 "reg_or_int_operand")))]
896 if (TARGET_32BIT && CONST_INT_P (operands[2]))
898 arm_split_constant (PLUS, SImode, NULL_RTX,
899 INTVAL (operands[2]), operands[0], operands[1],
900 optimize && can_create_pseudo_p ());
906 ; If there is a scratch available, this will be faster than synthesizing the
909 [(match_scratch:SI 3 "r")
910 (set (match_operand:SI 0 "arm_general_register_operand" "")
911 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
912 (match_operand:SI 2 "const_int_operand" "")))]
914 !(const_ok_for_arm (INTVAL (operands[2]))
915 || const_ok_for_arm (-INTVAL (operands[2])))
916 && const_ok_for_arm (~INTVAL (operands[2]))"
917 [(set (match_dup 3) (match_dup 2))
918 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
922 ;; The r/r/k alternative is required when reloading the address
923 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
924 ;; put the duplicated register first, and not try the commutative version.
925 (define_insn_and_split "*arm_addsi3"
926 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
927 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
928 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
944 subw%?\\t%0, %1, #%n2
945 subw%?\\t%0, %1, #%n2
948 && CONST_INT_P (operands[2])
949 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
950 && (reload_completed || !arm_eliminable_register (operands[1]))"
951 [(clobber (const_int 0))]
953 arm_split_constant (PLUS, SImode, curr_insn,
954 INTVAL (operands[2]), operands[0],
958 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
959 (set_attr "predicable" "yes")
960 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
961 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
962 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
963 (const_string "alu_imm")
964 (const_string "alu_sreg")))
968 (define_insn "addsi3_compareV_reg"
969 [(set (reg:CC_V CC_REGNUM)
972 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
973 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
974 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
975 (set (match_operand:SI 0 "register_operand" "=l,r,r")
976 (plus:SI (match_dup 1) (match_dup 2)))]
978 "adds%?\\t%0, %1, %2"
979 [(set_attr "conds" "set")
980 (set_attr "arch" "t2,t2,*")
981 (set_attr "length" "2,2,4")
982 (set_attr "type" "alus_sreg")]
985 (define_insn "*addsi3_compareV_reg_nosum"
986 [(set (reg:CC_V CC_REGNUM)
989 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
990 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
991 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
994 [(set_attr "conds" "set")
995 (set_attr "arch" "t2,*")
996 (set_attr "length" "2,4")
997 (set_attr "type" "alus_sreg")]
1000 (define_insn "subvsi3_intmin"
1001 [(set (reg:CC_V CC_REGNUM)
1005 (match_operand:SI 1 "register_operand" "r"))
1006 (const_int 2147483648))
1007 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
1008 (set (match_operand:SI 0 "register_operand" "=r")
1009 (plus:SI (match_dup 1) (const_int -2147483648)))]
1011 "subs%?\\t%0, %1, #-2147483648"
1012 [(set_attr "conds" "set")
1013 (set_attr "type" "alus_imm")]
1016 (define_insn "addsi3_compareV_imm"
1017 [(set (reg:CC_V CC_REGNUM)
1021 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
1022 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
1023 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
1024 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
1025 (plus:SI (match_dup 1) (match_dup 2)))]
1027 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
1031 subs%?\\t%0, %1, #%n2
1032 subs%?\\t%0, %0, #%n2
1034 subs%?\\t%0, %1, #%n2"
1035 [(set_attr "conds" "set")
1036 (set_attr "arch" "t2,t2,t2,t2,*,*")
1037 (set_attr "length" "2,2,2,2,4,4")
1038 (set_attr "type" "alus_imm")]
1041 (define_insn "addsi3_compareV_imm_nosum"
1042 [(set (reg:CC_V CC_REGNUM)
1046 (match_operand:SI 0 "register_operand" "l,r,r"))
1047 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1048 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1050 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1055 [(set_attr "conds" "set")
1056 (set_attr "arch" "t2,*,*")
1057 (set_attr "length" "2,4,4")
1058 (set_attr "type" "alus_imm")]
1061 ;; We can handle more constants efficently if we can clobber either a scratch
1062 ;; or the other source operand. We deliberately leave this late as in
1063 ;; high register pressure situations it's not worth forcing any reloads.
1065 [(match_scratch:SI 2 "l")
1066 (set (reg:CC_V CC_REGNUM)
1070 (match_operand:SI 0 "low_register_operand"))
1071 (match_operand 1 "const_int_operand"))
1072 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1074 && satisfies_constraint_Pd (operands[1])"
1076 (set (reg:CC_V CC_REGNUM)
1078 (plus:DI (sign_extend:DI (match_dup 0))
1079 (sign_extend:DI (match_dup 1)))
1080 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1081 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1085 [(set (reg:CC_V CC_REGNUM)
1089 (match_operand:SI 0 "low_register_operand"))
1090 (match_operand 1 "const_int_operand"))
1091 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1093 && dead_or_set_p (peep2_next_insn (0), operands[0])
1094 && satisfies_constraint_Py (operands[1])"
1096 (set (reg:CC_V CC_REGNUM)
1098 (plus:DI (sign_extend:DI (match_dup 0))
1099 (sign_extend:DI (match_dup 1)))
1100 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1101 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1104 (define_insn "addsi3_compare0"
1105 [(set (reg:CC_NZ CC_REGNUM)
1107 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1108 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1110 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1111 (plus:SI (match_dup 1) (match_dup 2)))]
1115 subs%?\\t%0, %1, #%n2
1116 adds%?\\t%0, %1, %2"
1117 [(set_attr "conds" "set")
1118 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1121 (define_insn "*addsi3_compare0_scratch"
1122 [(set (reg:CC_NZ CC_REGNUM)
1124 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1125 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1132 [(set_attr "conds" "set")
1133 (set_attr "predicable" "yes")
1134 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1137 (define_insn "*compare_negsi_si"
1138 [(set (reg:CC_Z CC_REGNUM)
1140 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1141 (match_operand:SI 1 "s_register_operand" "l,r")))]
1144 [(set_attr "conds" "set")
1145 (set_attr "predicable" "yes")
1146 (set_attr "arch" "t2,*")
1147 (set_attr "length" "2,4")
1148 (set_attr "predicable_short_it" "yes,no")
1149 (set_attr "type" "alus_sreg")]
1152 ;; This is the canonicalization of subsi3_compare when the
1153 ;; addend is a constant.
1154 (define_insn "cmpsi2_addneg"
1155 [(set (reg:CC CC_REGNUM)
1157 (match_operand:SI 1 "s_register_operand" "r,r")
1158 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1159 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1160 (plus:SI (match_dup 1)
1161 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1163 && (INTVAL (operands[2])
1164 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1166 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1167 in different condition codes (like cmn rather than like cmp), so that
1168 alternative comes first. Both alternatives can match for any 0x??000000
1169 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1170 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1171 as it is shorter. */
1172 if (which_alternative == 0 && operands[3] != const1_rtx)
1173 return "subs%?\\t%0, %1, #%n3";
1175 return "adds%?\\t%0, %1, %3";
1177 [(set_attr "conds" "set")
1178 (set_attr "type" "alus_sreg")]
1181 ;; Convert the sequence
1183 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1187 ;; bcs dest ((unsigned)rn >= 1)
1188 ;; similarly for the beq variant using bcc.
1189 ;; This is a common looping idiom (while (n--))
1191 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1192 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1194 (set (match_operand 2 "cc_register" "")
1195 (compare (match_dup 0) (const_int -1)))
1197 (if_then_else (match_operator 3 "equality_operator"
1198 [(match_dup 2) (const_int 0)])
1199 (match_operand 4 "" "")
1200 (match_operand 5 "" "")))]
1201 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1205 (match_dup 1) (const_int 1)))
1206 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1208 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1211 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1212 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1215 operands[2], const0_rtx);"
1218 ;; The next four insns work because they compare the result with one of
1219 ;; the operands, and we know that the use of the condition code is
1220 ;; either GEU or LTU, so we can use the carry flag from the addition
1221 ;; instead of doing the compare a second time.
1222 (define_insn "addsi3_compare_op1"
1223 [(set (reg:CC_C CC_REGNUM)
1225 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1226 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1228 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1229 (plus:SI (match_dup 1) (match_dup 2)))]
1234 subs%?\\t%0, %1, #%n2
1235 subs%?\\t%0, %0, #%n2
1237 subs%?\\t%0, %1, #%n2"
1238 [(set_attr "conds" "set")
1239 (set_attr "arch" "t2,t2,t2,t2,*,*")
1240 (set_attr "length" "2,2,2,2,4,4")
1242 (if_then_else (match_operand 2 "const_int_operand")
1243 (const_string "alu_imm")
1244 (const_string "alu_sreg")))]
1247 (define_insn "*addsi3_compare_op2"
1248 [(set (reg:CC_C CC_REGNUM)
1250 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1251 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1253 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1254 (plus:SI (match_dup 1) (match_dup 2)))]
1259 subs%?\\t%0, %1, #%n2
1260 subs%?\\t%0, %0, #%n2
1262 subs%?\\t%0, %1, #%n2"
1263 [(set_attr "conds" "set")
1264 (set_attr "arch" "t2,t2,t2,t2,*,*")
1265 (set_attr "length" "2,2,2,2,4,4")
1267 (if_then_else (match_operand 2 "const_int_operand")
1268 (const_string "alu_imm")
1269 (const_string "alu_sreg")))]
1272 (define_insn "*compare_addsi2_op0"
1273 [(set (reg:CC_C CC_REGNUM)
1275 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1276 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1284 [(set_attr "conds" "set")
1285 (set_attr "predicable" "yes")
1286 (set_attr "arch" "t2,t2,*,*")
1287 (set_attr "predicable_short_it" "yes,yes,no,no")
1288 (set_attr "length" "2,2,4,4")
1290 (if_then_else (match_operand 1 "const_int_operand")
1291 (const_string "alu_imm")
1292 (const_string "alu_sreg")))]
1295 (define_insn "*compare_addsi2_op1"
1296 [(set (reg:CC_C CC_REGNUM)
1298 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1299 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1307 [(set_attr "conds" "set")
1308 (set_attr "predicable" "yes")
1309 (set_attr "arch" "t2,t2,*,*")
1310 (set_attr "predicable_short_it" "yes,yes,no,no")
1311 (set_attr "length" "2,2,4,4")
1313 (if_then_else (match_operand 1 "const_int_operand")
1314 (const_string "alu_imm")
1315 (const_string "alu_sreg")))]
1318 (define_insn "addsi3_carryin"
1319 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1320 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1321 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1322 (match_operand:SI 3 "arm_carry_operation" "")))]
1327 sbc%?\\t%0, %1, #%B2"
1328 [(set_attr "conds" "use")
1329 (set_attr "predicable" "yes")
1330 (set_attr "arch" "t2,*,*")
1331 (set_attr "length" "4")
1332 (set_attr "predicable_short_it" "yes,no,no")
1333 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1336 ;; Canonicalization of the above when the immediate is zero.
1337 (define_insn "add0si3_carryin"
1338 [(set (match_operand:SI 0 "s_register_operand" "=r")
1339 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1340 (match_operand:SI 1 "arm_not_operand" "r")))]
1342 "adc%?\\t%0, %1, #0"
1343 [(set_attr "conds" "use")
1344 (set_attr "predicable" "yes")
1345 (set_attr "length" "4")
1346 (set_attr "type" "adc_imm")]
1349 (define_insn "*addsi3_carryin_alt2"
1350 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1351 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1352 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1353 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1358 sbc%?\\t%0, %1, #%B2"
1359 [(set_attr "conds" "use")
1360 (set_attr "predicable" "yes")
1361 (set_attr "arch" "t2,*,*")
1362 (set_attr "length" "4")
1363 (set_attr "predicable_short_it" "yes,no,no")
1364 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1367 (define_insn "*addsi3_carryin_shift"
1368 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1370 (match_operator:SI 2 "shift_operator"
1371 [(match_operand:SI 3 "s_register_operand" "r,r")
1372 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1373 (match_operand:SI 5 "arm_carry_operation" ""))
1374 (match_operand:SI 1 "s_register_operand" "r,r")))]
1376 "adc%?\\t%0, %1, %3%S2"
1377 [(set_attr "conds" "use")
1378 (set_attr "arch" "32,a")
1379 (set_attr "shift" "3")
1380 (set_attr "predicable" "yes")
1381 (set_attr "autodetect_type" "alu_shift_operator2")]
1384 (define_insn "*addsi3_carryin_clobercc"
1385 [(set (match_operand:SI 0 "s_register_operand" "=r")
1386 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1387 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1388 (match_operand:SI 3 "arm_carry_operation" "")))
1389 (clobber (reg:CC CC_REGNUM))]
1391 "adcs%?\\t%0, %1, %2"
1392 [(set_attr "conds" "set")
1393 (set_attr "type" "adcs_reg")]
1396 (define_expand "subvsi4"
1397 [(match_operand:SI 0 "s_register_operand")
1398 (match_operand:SI 1 "arm_rhs_operand")
1399 (match_operand:SI 2 "arm_add_operand")
1400 (match_operand 3 "")]
1403 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1405 /* If both operands are constants we can decide the result statically. */
1406 wi::overflow_type overflow;
1407 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1408 rtx_mode_t (operands[2], SImode),
1410 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1411 if (overflow != wi::OVF_NONE)
1412 emit_jump_insn (gen_jump (operands[3]));
1415 else if (CONST_INT_P (operands[2]))
1417 operands[2] = GEN_INT (-INTVAL (operands[2]));
1418 /* Special case for INT_MIN. */
1419 if (INTVAL (operands[2]) == 0x80000000)
1420 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1422 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1425 else if (CONST_INT_P (operands[1]))
1426 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1428 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1430 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1434 (define_expand "subvdi4"
1435 [(match_operand:DI 0 "s_register_operand")
1436 (match_operand:DI 1 "reg_or_int_operand")
1437 (match_operand:DI 2 "reg_or_int_operand")
1438 (match_operand 3 "")]
1441 rtx lo_result, hi_result;
1442 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1443 lo_result = gen_lowpart (SImode, operands[0]);
1444 hi_result = gen_highpart (SImode, operands[0]);
1445 machine_mode mode = CCmode;
1447 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1449 /* If both operands are constants we can decide the result statically. */
1450 wi::overflow_type overflow;
1451 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1452 rtx_mode_t (operands[2], DImode),
1454 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1455 if (overflow != wi::OVF_NONE)
1456 emit_jump_insn (gen_jump (operands[3]));
1459 else if (CONST_INT_P (operands[1]))
1461 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1463 if (const_ok_for_arm (INTVAL (lo_op1)))
1465 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1466 GEN_INT (~UINTVAL (lo_op1))));
1467 /* We could potentially use RSC here in Arm state, but not
1468 in Thumb, so it's probably not worth the effort of handling
1470 hi_op1 = force_reg (SImode, hi_op1);
1474 operands[1] = force_reg (DImode, operands[1]);
1477 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1479 if (lo_op2 == const0_rtx)
1481 emit_move_insn (lo_result, lo_op1);
1482 if (!arm_add_operand (hi_op2, SImode))
1483 hi_op2 = force_reg (SImode, hi_op2);
1484 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1488 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1489 lo_op2 = force_reg (SImode, lo_op2);
1490 if (CONST_INT_P (lo_op2))
1491 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1492 gen_int_mode (-INTVAL (lo_op2), SImode)));
1494 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1497 if (!arm_not_operand (hi_op2, SImode))
1498 hi_op2 = force_reg (SImode, hi_op2);
1499 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1500 if (CONST_INT_P (hi_op2))
1501 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1502 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1503 gen_rtx_LTU (DImode, ccreg,
1506 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1507 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1508 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1509 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1514 (define_expand "usubvsi4"
1515 [(match_operand:SI 0 "s_register_operand")
1516 (match_operand:SI 1 "arm_rhs_operand")
1517 (match_operand:SI 2 "arm_add_operand")
1518 (match_operand 3 "")]
1521 machine_mode mode = CCmode;
1522 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1524 /* If both operands are constants we can decide the result statically. */
1525 wi::overflow_type overflow;
1526 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1527 rtx_mode_t (operands[2], SImode),
1528 UNSIGNED, &overflow);
1529 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1530 if (overflow != wi::OVF_NONE)
1531 emit_jump_insn (gen_jump (operands[3]));
1534 else if (CONST_INT_P (operands[2]))
1535 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1536 gen_int_mode (-INTVAL (operands[2]),
1538 else if (CONST_INT_P (operands[1]))
1541 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1542 GEN_INT (~UINTVAL (operands[1]))));
1545 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1546 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1551 (define_expand "usubvdi4"
1552 [(match_operand:DI 0 "s_register_operand")
1553 (match_operand:DI 1 "reg_or_int_operand")
1554 (match_operand:DI 2 "reg_or_int_operand")
1555 (match_operand 3 "")]
1558 rtx lo_result, hi_result;
1559 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1560 lo_result = gen_lowpart (SImode, operands[0]);
1561 hi_result = gen_highpart (SImode, operands[0]);
1562 machine_mode mode = CCmode;
1564 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1566 /* If both operands are constants we can decide the result statically. */
1567 wi::overflow_type overflow;
1568 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1569 rtx_mode_t (operands[2], DImode),
1570 UNSIGNED, &overflow);
1571 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1572 if (overflow != wi::OVF_NONE)
1573 emit_jump_insn (gen_jump (operands[3]));
1576 else if (CONST_INT_P (operands[1]))
1578 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1580 if (const_ok_for_arm (INTVAL (lo_op1)))
1582 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1583 GEN_INT (~UINTVAL (lo_op1))));
1584 /* We could potentially use RSC here in Arm state, but not
1585 in Thumb, so it's probably not worth the effort of handling
1587 hi_op1 = force_reg (SImode, hi_op1);
1591 operands[1] = force_reg (DImode, operands[1]);
1594 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1596 if (lo_op2 == const0_rtx)
1598 emit_move_insn (lo_result, lo_op1);
1599 if (!arm_add_operand (hi_op2, SImode))
1600 hi_op2 = force_reg (SImode, hi_op2);
1601 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1605 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1606 lo_op2 = force_reg (SImode, lo_op2);
1607 if (CONST_INT_P (lo_op2))
1608 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1609 gen_int_mode (-INTVAL (lo_op2), SImode)));
1611 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1614 if (!arm_not_operand (hi_op2, SImode))
1615 hi_op2 = force_reg (SImode, hi_op2);
1616 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1617 if (CONST_INT_P (hi_op2))
1618 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1619 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1620 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1621 gen_rtx_LTU (DImode, ccreg,
1624 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1625 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1626 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1627 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1632 (define_insn "subsi3_compare1"
1633 [(set (reg:CC CC_REGNUM)
1635 (match_operand:SI 1 "register_operand" "r")
1636 (match_operand:SI 2 "register_operand" "r")))
1637 (set (match_operand:SI 0 "register_operand" "=r")
1638 (minus:SI (match_dup 1) (match_dup 2)))]
1640 "subs%?\\t%0, %1, %2"
1641 [(set_attr "conds" "set")
1642 (set_attr "type" "alus_sreg")]
1645 (define_insn "subvsi3"
1646 [(set (reg:CC_V CC_REGNUM)
1649 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1650 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1651 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1652 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1653 (minus:SI (match_dup 1) (match_dup 2)))]
1655 "subs%?\\t%0, %1, %2"
1656 [(set_attr "conds" "set")
1657 (set_attr "arch" "t2,*")
1658 (set_attr "length" "2,4")
1659 (set_attr "type" "alus_sreg")]
1662 (define_insn "subvsi3_imm1"
1663 [(set (reg:CC_V CC_REGNUM)
1666 (match_operand 1 "arm_immediate_operand" "I")
1667 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1668 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1669 (set (match_operand:SI 0 "s_register_operand" "=r")
1670 (minus:SI (match_dup 1) (match_dup 2)))]
1672 "rsbs%?\\t%0, %2, %1"
1673 [(set_attr "conds" "set")
1674 (set_attr "type" "alus_imm")]
1677 (define_insn "subsi3_carryin"
1678 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1679 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1680 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1681 (match_operand:SI 3 "arm_borrow_operation" "")))]
1686 sbc%?\\t%0, %2, %2, lsl #1"
1687 [(set_attr "conds" "use")
1688 (set_attr "arch" "*,a,t2")
1689 (set_attr "predicable" "yes")
1690 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm_lsl_1to4")]
1693 ;; Special canonicalization of the above when operand1 == (const_int 1):
1694 ;; in this case the 'borrow' needs to treated like subtracting from the carry.
1695 (define_insn "rsbsi_carryin_reg"
1696 [(set (match_operand:SI 0 "s_register_operand" "=r")
1697 (minus:SI (match_operand:SI 1 "arm_carry_operation" "")
1698 (match_operand:SI 2 "s_register_operand" "r")))]
1700 "rsc%?\\t%0, %2, #1"
1701 [(set_attr "conds" "use")
1702 (set_attr "predicable" "yes")
1703 (set_attr "type" "adc_imm")]
1706 ;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
1707 ;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
1708 (define_insn "*add_not_cin"
1709 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1711 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
1712 (match_operand:SI 3 "arm_carry_operation" ""))
1713 (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
1714 "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
1718 [(set_attr "conds" "use")
1719 (set_attr "predicable" "yes")
1720 (set_attr "arch" "*,a")
1721 (set_attr "type" "adc_reg,adc_imm")]
1724 ;; On Arm we can also use the same trick when the non-inverted operand is
1725 ;; shifted, using RSC.
1726 (define_insn "add_not_shift_cin"
1727 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1729 (plus:SI (match_operator:SI 3 "shift_operator"
1730 [(match_operand:SI 1 "s_register_operand" "r,r")
1731 (match_operand:SI 2 "shift_amount_operand" "M,r")])
1732 (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
1733 (match_operand:SI 5 "arm_carry_operation" "")))]
1735 "rsc%?\\t%0, %4, %1%S3"
1736 [(set_attr "conds" "use")
1737 (set_attr "predicable" "yes")
1738 (set_attr "autodetect_type" "alu_shift_operator3")]
1741 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1742 [(set (reg:<CC_EXTEND> CC_REGNUM)
1743 (compare:<CC_EXTEND>
1744 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1745 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1746 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1747 (clobber (match_scratch:SI 0 "=l,r"))]
1750 [(set_attr "conds" "set")
1751 (set_attr "arch" "t2,*")
1752 (set_attr "length" "2,4")
1753 (set_attr "type" "adc_reg")]
1756 ;; Similar to the above, but handling a constant which has a different
1757 ;; canonicalization.
1758 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1759 [(set (reg:<CC_EXTEND> CC_REGNUM)
1760 (compare:<CC_EXTEND>
1761 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1762 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1763 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1764 (clobber (match_scratch:SI 0 "=l,r"))]
1768 adcs\\t%0, %1, #%B2"
1769 [(set_attr "conds" "set")
1770 (set_attr "type" "adc_imm")]
1773 ;; Further canonicalization when the constant is zero.
1774 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1775 [(set (reg:<CC_EXTEND> CC_REGNUM)
1776 (compare:<CC_EXTEND>
1777 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1778 (match_operand:DI 2 "arm_borrow_operation" "")))
1779 (clobber (match_scratch:SI 0 "=l,r"))]
1782 [(set_attr "conds" "set")
1783 (set_attr "type" "adc_imm")]
1786 (define_insn "*subsi3_carryin_const"
1787 [(set (match_operand:SI 0 "s_register_operand" "=r")
1789 (match_operand:SI 1 "s_register_operand" "r")
1790 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1791 (match_operand:SI 3 "arm_borrow_operation" "")))]
1793 "sbc\\t%0, %1, #%n2"
1794 [(set_attr "conds" "use")
1795 (set_attr "type" "adc_imm")]
1798 (define_insn "*subsi3_carryin_const0"
1799 [(set (match_operand:SI 0 "s_register_operand" "=r")
1800 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1801 (match_operand:SI 2 "arm_borrow_operation" "")))]
1804 [(set_attr "conds" "use")
1805 (set_attr "type" "adc_imm")]
1808 (define_insn "*subsi3_carryin_shift"
1809 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1811 (match_operand:SI 1 "s_register_operand" "r,r")
1812 (match_operator:SI 2 "shift_operator"
1813 [(match_operand:SI 3 "s_register_operand" "r,r")
1814 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1815 (match_operand:SI 5 "arm_borrow_operation" "")))]
1817 "sbc%?\\t%0, %1, %3%S2"
1818 [(set_attr "conds" "use")
1819 (set_attr "arch" "32,a")
1820 (set_attr "shift" "3")
1821 (set_attr "predicable" "yes")
1822 (set_attr "autodetect_type" "alu_shift_operator2")]
1825 (define_insn "*subsi3_carryin_shift_alt"
1826 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1828 (match_operand:SI 1 "s_register_operand" "r,r")
1829 (match_operand:SI 5 "arm_borrow_operation" ""))
1830 (match_operator:SI 2 "shift_operator"
1831 [(match_operand:SI 3 "s_register_operand" "r,r")
1832 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1834 "sbc%?\\t%0, %1, %3%S2"
1835 [(set_attr "conds" "use")
1836 (set_attr "arch" "32,a")
1837 (set_attr "shift" "3")
1838 (set_attr "predicable" "yes")
1839 (set_attr "autodetect_type" "alu_shift_operator2")]
1843 (define_insn "*rsbsi3_carryin_shift"
1844 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1846 (match_operator:SI 2 "shift_operator"
1847 [(match_operand:SI 3 "s_register_operand" "r,r")
1848 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1849 (match_operand:SI 1 "s_register_operand" "r,r"))
1850 (match_operand:SI 5 "arm_borrow_operation" "")))]
1852 "rsc%?\\t%0, %1, %3%S2"
1853 [(set_attr "conds" "use")
1854 (set_attr "predicable" "yes")
1855 (set_attr "autodetect_type" "alu_shift_operator2")]
1858 (define_insn "*rsbsi3_carryin_shift_alt"
1859 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1861 (match_operator:SI 2 "shift_operator"
1862 [(match_operand:SI 3 "s_register_operand" "r,r")
1863 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1864 (match_operand:SI 5 "arm_borrow_operation" ""))
1865 (match_operand:SI 1 "s_register_operand" "r,r")))]
1867 "rsc%?\\t%0, %1, %3%S2"
1868 [(set_attr "conds" "use")
1869 (set_attr "predicable" "yes")
1870 (set_attr "autodetect_type" "alu_shift_operator2")]
1873 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1875 [(set (match_operand:SI 0 "s_register_operand" "")
1876 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1877 (match_operand:SI 2 "s_register_operand" ""))
1879 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1881 [(set (match_dup 3) (match_dup 1))
1882 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1884 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1887 (define_expand "addsf3"
1888 [(set (match_operand:SF 0 "s_register_operand")
1889 (plus:SF (match_operand:SF 1 "s_register_operand")
1890 (match_operand:SF 2 "s_register_operand")))]
1891 "TARGET_32BIT && TARGET_HARD_FLOAT"
1895 (define_expand "adddf3"
1896 [(set (match_operand:DF 0 "s_register_operand")
1897 (plus:DF (match_operand:DF 1 "s_register_operand")
1898 (match_operand:DF 2 "s_register_operand")))]
1899 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1903 (define_expand "subdi3"
1905 [(set (match_operand:DI 0 "s_register_operand")
1906 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1907 (match_operand:DI 2 "s_register_operand")))
1908 (clobber (reg:CC CC_REGNUM))])]
1913 if (!REG_P (operands[1]))
1914 operands[1] = force_reg (DImode, operands[1]);
1918 rtx lo_result, hi_result, lo_dest, hi_dest;
1919 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1922 /* Since operands[1] may be an integer, pass it second, so that
1923 any necessary simplifications will be done on the decomposed
1925 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1927 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1928 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1930 if (!arm_rhs_operand (lo_op1, SImode))
1931 lo_op1 = force_reg (SImode, lo_op1);
1933 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1934 || !arm_rhs_operand (hi_op1, SImode))
1935 hi_op1 = force_reg (SImode, hi_op1);
1938 if (lo_op1 == const0_rtx)
1940 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1941 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1943 else if (CONST_INT_P (lo_op1))
1945 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1946 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1947 GEN_INT (~UINTVAL (lo_op1))));
1951 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1952 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1955 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1957 if (hi_op1 == const0_rtx)
1958 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1960 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1962 if (lo_result != lo_dest)
1963 emit_move_insn (lo_result, lo_dest);
1965 if (hi_result != hi_dest)
1966 emit_move_insn (hi_result, hi_dest);
1973 (define_expand "subsi3"
1974 [(set (match_operand:SI 0 "s_register_operand")
1975 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1976 (match_operand:SI 2 "s_register_operand")))]
1979 if (CONST_INT_P (operands[1]))
1983 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1984 operands[1] = force_reg (SImode, operands[1]);
1987 arm_split_constant (MINUS, SImode, NULL_RTX,
1988 INTVAL (operands[1]), operands[0],
1990 optimize && can_create_pseudo_p ());
1994 else /* TARGET_THUMB1 */
1995 operands[1] = force_reg (SImode, operands[1]);
2000 ; ??? Check Thumb-2 split length
2001 (define_insn_and_split "*arm_subsi3_insn"
2002 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
2003 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
2004 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
2016 "&& (CONST_INT_P (operands[1])
2017 && !const_ok_for_arm (INTVAL (operands[1])))"
2018 [(clobber (const_int 0))]
2020 arm_split_constant (MINUS, SImode, curr_insn,
2021 INTVAL (operands[1]), operands[0], operands[2], 0);
2024 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
2025 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
2026 (set_attr "predicable" "yes")
2027 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
2028 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
2032 [(match_scratch:SI 3 "r")
2033 (set (match_operand:SI 0 "arm_general_register_operand" "")
2034 (minus:SI (match_operand:SI 1 "const_int_operand" "")
2035 (match_operand:SI 2 "arm_general_register_operand" "")))]
2037 && !const_ok_for_arm (INTVAL (operands[1]))
2038 && const_ok_for_arm (~INTVAL (operands[1]))"
2039 [(set (match_dup 3) (match_dup 1))
2040 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
2044 (define_insn "subsi3_compare0"
2045 [(set (reg:CC_NZ CC_REGNUM)
2047 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2048 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
2050 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2051 (minus:SI (match_dup 1) (match_dup 2)))]
2056 rsbs%?\\t%0, %2, %1"
2057 [(set_attr "conds" "set")
2058 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
2061 (define_insn "subsi3_compare"
2062 [(set (reg:CC CC_REGNUM)
2063 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2064 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
2065 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2066 (minus:SI (match_dup 1) (match_dup 2)))]
2071 rsbs%?\\t%0, %2, %1"
2072 [(set_attr "conds" "set")
2073 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
2076 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
2077 ;; rather than (0 cmp reg). This gives the same results for unsigned
2078 ;; and equality compares which is what we mostly need here.
2079 (define_insn "rsb_imm_compare"
2080 [(set (reg:CC_RSB CC_REGNUM)
2081 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2082 (match_operand 3 "const_int_operand" "")))
2083 (set (match_operand:SI 0 "s_register_operand" "=r")
2084 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
2086 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2088 [(set_attr "conds" "set")
2089 (set_attr "type" "alus_imm")]
2092 ;; Similarly, but the result is unused.
2093 (define_insn "rsb_imm_compare_scratch"
2094 [(set (reg:CC_RSB CC_REGNUM)
2095 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2096 (match_operand 1 "arm_not_immediate_operand" "K")))
2097 (clobber (match_scratch:SI 0 "=r"))]
2099 "rsbs\\t%0, %2, #%B1"
2100 [(set_attr "conds" "set")
2101 (set_attr "type" "alus_imm")]
2104 ;; Compare the sum of a value plus a carry against a constant. Uses
2105 ;; RSC, so the result is swapped. Only available on Arm
2106 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2107 [(set (reg:CC_SWP CC_REGNUM)
2109 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2110 (match_operand:DI 3 "arm_borrow_operation" ""))
2111 (match_operand 1 "arm_immediate_operand" "I")))
2112 (clobber (match_scratch:SI 0 "=r"))]
2115 [(set_attr "conds" "set")
2116 (set_attr "type" "alus_imm")]
2119 (define_insn "usubvsi3_borrow"
2120 [(set (reg:CC_B CC_REGNUM)
2122 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2123 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2125 (match_operand:SI 2 "s_register_operand" "l,r")))))
2126 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2127 (minus:SI (match_dup 1)
2128 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2131 "sbcs%?\\t%0, %1, %2"
2132 [(set_attr "conds" "set")
2133 (set_attr "arch" "t2,*")
2134 (set_attr "length" "2,4")]
2137 (define_insn "usubvsi3_borrow_imm"
2138 [(set (reg:CC_B CC_REGNUM)
2140 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2141 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2142 (match_operand:DI 3 "const_int_operand" "n,n"))))
2143 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2144 (minus:SI (match_dup 1)
2145 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2146 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2148 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2151 adcs%?\\t%0, %1, #%B2"
2152 [(set_attr "conds" "set")
2153 (set_attr "type" "alus_imm")]
2156 (define_insn "subvsi3_borrow"
2157 [(set (reg:CC_V CC_REGNUM)
2161 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2162 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2163 (match_operand:DI 4 "arm_borrow_operation" ""))
2165 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2166 (match_operand:SI 3 "arm_borrow_operation" "")))))
2167 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2168 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2171 "sbcs%?\\t%0, %1, %2"
2172 [(set_attr "conds" "set")
2173 (set_attr "arch" "t2,*")
2174 (set_attr "length" "2,4")]
2177 (define_insn "subvsi3_borrow_imm"
2178 [(set (reg:CC_V CC_REGNUM)
2182 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2183 (match_operand 2 "arm_adcimm_operand" "I,K"))
2184 (match_operand:DI 4 "arm_borrow_operation" ""))
2186 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2187 (match_operand:SI 3 "arm_borrow_operation" "")))))
2188 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2189 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2192 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2195 adcs%?\\t%0, %1, #%B2"
2196 [(set_attr "conds" "set")
2197 (set_attr "type" "alus_imm")]
2200 (define_expand "subsf3"
2201 [(set (match_operand:SF 0 "s_register_operand")
2202 (minus:SF (match_operand:SF 1 "s_register_operand")
2203 (match_operand:SF 2 "s_register_operand")))]
2204 "TARGET_32BIT && TARGET_HARD_FLOAT"
2208 (define_expand "subdf3"
2209 [(set (match_operand:DF 0 "s_register_operand")
2210 (minus:DF (match_operand:DF 1 "s_register_operand")
2211 (match_operand:DF 2 "s_register_operand")))]
2212 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2217 ;; Multiplication insns
2219 (define_expand "mulhi3"
2220 [(set (match_operand:HI 0 "s_register_operand")
2221 (mult:HI (match_operand:HI 1 "s_register_operand")
2222 (match_operand:HI 2 "s_register_operand")))]
2223 "TARGET_DSP_MULTIPLY"
2226 rtx result = gen_reg_rtx (SImode);
2227 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2228 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2233 (define_expand "mulsi3"
2234 [(set (match_operand:SI 0 "s_register_operand")
2235 (mult:SI (match_operand:SI 2 "s_register_operand")
2236 (match_operand:SI 1 "s_register_operand")))]
2241 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2243 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2244 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2245 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2247 "mul%?\\t%0, %2, %1"
2248 [(set_attr "type" "mul")
2249 (set_attr "predicable" "yes")
2250 (set_attr "arch" "t2,v6,nov6,nov6")
2251 (set_attr "length" "4")
2252 (set_attr "predicable_short_it" "yes,no,*,*")]
2255 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2256 ;; reusing the same register.
2259 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2261 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2262 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2263 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2265 "mla%?\\t%0, %3, %2, %1"
2266 [(set_attr "type" "mla")
2267 (set_attr "predicable" "yes")
2268 (set_attr "arch" "v6,nov6,nov6,nov6")]
2272 [(set (match_operand:SI 0 "s_register_operand" "=r")
2274 (match_operand:SI 1 "s_register_operand" "r")
2275 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2276 (match_operand:SI 2 "s_register_operand" "r"))))]
2277 "TARGET_32BIT && arm_arch_thumb2"
2278 "mls%?\\t%0, %3, %2, %1"
2279 [(set_attr "type" "mla")
2280 (set_attr "predicable" "yes")]
2283 (define_insn "*mulsi3_compare0"
2284 [(set (reg:CC_NZ CC_REGNUM)
2285 (compare:CC_NZ (mult:SI
2286 (match_operand:SI 2 "s_register_operand" "r,r")
2287 (match_operand:SI 1 "s_register_operand" "%0,r"))
2289 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2290 (mult:SI (match_dup 2) (match_dup 1)))]
2291 "TARGET_ARM && !arm_arch6"
2292 "muls%?\\t%0, %2, %1"
2293 [(set_attr "conds" "set")
2294 (set_attr "type" "muls")]
2297 (define_insn "*mulsi3_compare0_v6"
2298 [(set (reg:CC_NZ CC_REGNUM)
2299 (compare:CC_NZ (mult:SI
2300 (match_operand:SI 2 "s_register_operand" "r")
2301 (match_operand:SI 1 "s_register_operand" "r"))
2303 (set (match_operand:SI 0 "s_register_operand" "=r")
2304 (mult:SI (match_dup 2) (match_dup 1)))]
2305 "TARGET_ARM && arm_arch6 && optimize_size"
2306 "muls%?\\t%0, %2, %1"
2307 [(set_attr "conds" "set")
2308 (set_attr "type" "muls")]
2311 (define_insn "*mulsi_compare0_scratch"
2312 [(set (reg:CC_NZ CC_REGNUM)
2313 (compare:CC_NZ (mult:SI
2314 (match_operand:SI 2 "s_register_operand" "r,r")
2315 (match_operand:SI 1 "s_register_operand" "%0,r"))
2317 (clobber (match_scratch:SI 0 "=&r,&r"))]
2318 "TARGET_ARM && !arm_arch6"
2319 "muls%?\\t%0, %2, %1"
2320 [(set_attr "conds" "set")
2321 (set_attr "type" "muls")]
2324 (define_insn "*mulsi_compare0_scratch_v6"
2325 [(set (reg:CC_NZ CC_REGNUM)
2326 (compare:CC_NZ (mult:SI
2327 (match_operand:SI 2 "s_register_operand" "r")
2328 (match_operand:SI 1 "s_register_operand" "r"))
2330 (clobber (match_scratch:SI 0 "=r"))]
2331 "TARGET_ARM && arm_arch6 && optimize_size"
2332 "muls%?\\t%0, %2, %1"
2333 [(set_attr "conds" "set")
2334 (set_attr "type" "muls")]
2337 (define_insn "*mulsi3addsi_compare0"
2338 [(set (reg:CC_NZ CC_REGNUM)
2341 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2342 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2343 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2345 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2346 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2348 "TARGET_ARM && arm_arch6"
2349 "mlas%?\\t%0, %2, %1, %3"
2350 [(set_attr "conds" "set")
2351 (set_attr "type" "mlas")]
2354 (define_insn "*mulsi3addsi_compare0_v6"
2355 [(set (reg:CC_NZ CC_REGNUM)
2358 (match_operand:SI 2 "s_register_operand" "r")
2359 (match_operand:SI 1 "s_register_operand" "r"))
2360 (match_operand:SI 3 "s_register_operand" "r"))
2362 (set (match_operand:SI 0 "s_register_operand" "=r")
2363 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2365 "TARGET_ARM && arm_arch6 && optimize_size"
2366 "mlas%?\\t%0, %2, %1, %3"
2367 [(set_attr "conds" "set")
2368 (set_attr "type" "mlas")]
2371 (define_insn "*mulsi3addsi_compare0_scratch"
2372 [(set (reg:CC_NZ CC_REGNUM)
2375 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2376 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2377 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2379 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2380 "TARGET_ARM && !arm_arch6"
2381 "mlas%?\\t%0, %2, %1, %3"
2382 [(set_attr "conds" "set")
2383 (set_attr "type" "mlas")]
2386 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2387 [(set (reg:CC_NZ CC_REGNUM)
2390 (match_operand:SI 2 "s_register_operand" "r")
2391 (match_operand:SI 1 "s_register_operand" "r"))
2392 (match_operand:SI 3 "s_register_operand" "r"))
2394 (clobber (match_scratch:SI 0 "=r"))]
2395 "TARGET_ARM && arm_arch6 && optimize_size"
2396 "mlas%?\\t%0, %2, %1, %3"
2397 [(set_attr "conds" "set")
2398 (set_attr "type" "mlas")]
2401 ;; 32x32->64 widening multiply.
2402 ;; The only difference between the v3-5 and v6+ versions is the requirement
2403 ;; that the output does not overlap with either input.
2405 (define_expand "<Us>mulsidi3"
2406 [(set (match_operand:DI 0 "s_register_operand")
2408 (SE:DI (match_operand:SI 1 "s_register_operand"))
2409 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2412 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2413 gen_highpart (SImode, operands[0]),
2414 operands[1], operands[2]));
2419 (define_insn "<US>mull"
2420 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2422 (match_operand:SI 2 "s_register_operand" "%r,r")
2423 (match_operand:SI 3 "s_register_operand" "r,r")))
2424 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2427 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2430 "<US>mull%?\\t%0, %1, %2, %3"
2431 [(set_attr "type" "umull")
2432 (set_attr "predicable" "yes")
2433 (set_attr "arch" "v6,nov6")]
2436 (define_expand "<Us>maddsidi4"
2437 [(set (match_operand:DI 0 "s_register_operand")
2440 (SE:DI (match_operand:SI 1 "s_register_operand"))
2441 (SE:DI (match_operand:SI 2 "s_register_operand")))
2442 (match_operand:DI 3 "s_register_operand")))]
2445 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2446 gen_lowpart (SImode, operands[3]),
2447 gen_highpart (SImode, operands[0]),
2448 gen_highpart (SImode, operands[3]),
2449 operands[1], operands[2]));
2454 (define_insn "<US>mlal"
2455 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2458 (match_operand:SI 4 "s_register_operand" "%r,r")
2459 (match_operand:SI 5 "s_register_operand" "r,r"))
2460 (match_operand:SI 1 "s_register_operand" "0,0")))
2461 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2466 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2467 (zero_extend:DI (match_dup 1)))
2469 (match_operand:SI 3 "s_register_operand" "2,2")))]
2471 "<US>mlal%?\\t%0, %2, %4, %5"
2472 [(set_attr "type" "umlal")
2473 (set_attr "predicable" "yes")
2474 (set_attr "arch" "v6,nov6")]
2477 (define_expand "<US>mulsi3_highpart"
2479 [(set (match_operand:SI 0 "s_register_operand")
2483 (SE:DI (match_operand:SI 1 "s_register_operand"))
2484 (SE:DI (match_operand:SI 2 "s_register_operand")))
2486 (clobber (match_scratch:SI 3 ""))])]
2491 (define_insn "*<US>mull_high"
2492 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2496 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2497 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2499 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2501 "<US>mull%?\\t%3, %0, %2, %1"
2502 [(set_attr "type" "umull")
2503 (set_attr "predicable" "yes")
2504 (set_attr "arch" "v6,nov6,nov6")]
2507 (define_insn "mulhisi3"
2508 [(set (match_operand:SI 0 "s_register_operand" "=r")
2509 (mult:SI (sign_extend:SI
2510 (match_operand:HI 1 "s_register_operand" "%r"))
2512 (match_operand:HI 2 "s_register_operand" "r"))))]
2513 "TARGET_DSP_MULTIPLY"
2514 "smulbb%?\\t%0, %1, %2"
2515 [(set_attr "type" "smulxy")
2516 (set_attr "predicable" "yes")]
2519 (define_insn "*mulhisi3tb"
2520 [(set (match_operand:SI 0 "s_register_operand" "=r")
2521 (mult:SI (ashiftrt:SI
2522 (match_operand:SI 1 "s_register_operand" "r")
2525 (match_operand:HI 2 "s_register_operand" "r"))))]
2526 "TARGET_DSP_MULTIPLY"
2527 "smultb%?\\t%0, %1, %2"
2528 [(set_attr "type" "smulxy")
2529 (set_attr "predicable" "yes")]
2532 (define_insn "*mulhisi3bt"
2533 [(set (match_operand:SI 0 "s_register_operand" "=r")
2534 (mult:SI (sign_extend:SI
2535 (match_operand:HI 1 "s_register_operand" "r"))
2537 (match_operand:SI 2 "s_register_operand" "r")
2539 "TARGET_DSP_MULTIPLY"
2540 "smulbt%?\\t%0, %1, %2"
2541 [(set_attr "type" "smulxy")
2542 (set_attr "predicable" "yes")]
2545 (define_insn "*mulhisi3tt"
2546 [(set (match_operand:SI 0 "s_register_operand" "=r")
2547 (mult:SI (ashiftrt:SI
2548 (match_operand:SI 1 "s_register_operand" "r")
2551 (match_operand:SI 2 "s_register_operand" "r")
2553 "TARGET_DSP_MULTIPLY"
2554 "smultt%?\\t%0, %1, %2"
2555 [(set_attr "type" "smulxy")
2556 (set_attr "predicable" "yes")]
2559 (define_expand "maddhisi4"
2560 [(set (match_operand:SI 0 "s_register_operand")
2561 (plus:SI (mult:SI (sign_extend:SI
2562 (match_operand:HI 1 "s_register_operand"))
2564 (match_operand:HI 2 "s_register_operand")))
2565 (match_operand:SI 3 "s_register_operand")))]
2566 "TARGET_DSP_MULTIPLY"
2568 /* If this function reads the Q bit from ACLE intrinsics break up the
2569 multiplication and accumulation as an overflow during accumulation will
2570 clobber the Q flag. */
2573 rtx tmp = gen_reg_rtx (SImode);
2574 emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
2575 emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
2581 (define_insn "*arm_maddhisi4"
2582 [(set (match_operand:SI 0 "s_register_operand" "=r")
2583 (plus:SI (mult:SI (sign_extend:SI
2584 (match_operand:HI 1 "s_register_operand" "r"))
2586 (match_operand:HI 2 "s_register_operand" "r")))
2587 (match_operand:SI 3 "s_register_operand" "r")))]
2588 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2589 "smlabb%?\\t%0, %1, %2, %3"
2590 [(set_attr "type" "smlaxy")
2591 (set_attr "predicable" "yes")]
2594 (define_insn "arm_smlabb_setq"
2595 [(set (match_operand:SI 0 "s_register_operand" "=r")
2596 (plus:SI (mult:SI (sign_extend:SI
2597 (match_operand:HI 1 "s_register_operand" "r"))
2599 (match_operand:HI 2 "s_register_operand" "r")))
2600 (match_operand:SI 3 "s_register_operand" "r")))
2601 (set (reg:CC APSRQ_REGNUM)
2602 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2603 "TARGET_DSP_MULTIPLY"
2604 "smlabb%?\\t%0, %1, %2, %3"
2605 [(set_attr "type" "smlaxy")
2606 (set_attr "predicable" "yes")]
2609 (define_expand "arm_smlabb"
2610 [(match_operand:SI 0 "s_register_operand")
2611 (match_operand:SI 1 "s_register_operand")
2612 (match_operand:SI 2 "s_register_operand")
2613 (match_operand:SI 3 "s_register_operand")]
2614 "TARGET_DSP_MULTIPLY"
2616 rtx mult1 = gen_lowpart (HImode, operands[1]);
2617 rtx mult2 = gen_lowpart (HImode, operands[2]);
2619 emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
2621 emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
2626 ;; Note: there is no maddhisi4ibt because this one is canonical form
2627 (define_insn "maddhisi4tb"
2628 [(set (match_operand:SI 0 "s_register_operand" "=r")
2629 (plus:SI (mult:SI (ashiftrt:SI
2630 (match_operand:SI 1 "s_register_operand" "r")
2633 (match_operand:HI 2 "s_register_operand" "r")))
2634 (match_operand:SI 3 "s_register_operand" "r")))]
2635 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2636 "smlatb%?\\t%0, %1, %2, %3"
2637 [(set_attr "type" "smlaxy")
2638 (set_attr "predicable" "yes")]
2641 (define_insn "arm_smlatb_setq"
2642 [(set (match_operand:SI 0 "s_register_operand" "=r")
2643 (plus:SI (mult:SI (ashiftrt:SI
2644 (match_operand:SI 1 "s_register_operand" "r")
2647 (match_operand:HI 2 "s_register_operand" "r")))
2648 (match_operand:SI 3 "s_register_operand" "r")))
2649 (set (reg:CC APSRQ_REGNUM)
2650 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2651 "TARGET_DSP_MULTIPLY"
2652 "smlatb%?\\t%0, %1, %2, %3"
2653 [(set_attr "type" "smlaxy")
2654 (set_attr "predicable" "yes")]
2657 (define_expand "arm_smlatb"
2658 [(match_operand:SI 0 "s_register_operand")
2659 (match_operand:SI 1 "s_register_operand")
2660 (match_operand:SI 2 "s_register_operand")
2661 (match_operand:SI 3 "s_register_operand")]
2662 "TARGET_DSP_MULTIPLY"
2664 rtx mult2 = gen_lowpart (HImode, operands[2]);
2666 emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
2667 mult2, operands[3]));
2669 emit_insn (gen_maddhisi4tb (operands[0], operands[1],
2670 mult2, operands[3]));
2675 (define_insn "maddhisi4tt"
2676 [(set (match_operand:SI 0 "s_register_operand" "=r")
2677 (plus:SI (mult:SI (ashiftrt:SI
2678 (match_operand:SI 1 "s_register_operand" "r")
2681 (match_operand:SI 2 "s_register_operand" "r")
2683 (match_operand:SI 3 "s_register_operand" "r")))]
2684 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2685 "smlatt%?\\t%0, %1, %2, %3"
2686 [(set_attr "type" "smlaxy")
2687 (set_attr "predicable" "yes")]
2690 (define_insn "arm_smlatt_setq"
2691 [(set (match_operand:SI 0 "s_register_operand" "=r")
2692 (plus:SI (mult:SI (ashiftrt:SI
2693 (match_operand:SI 1 "s_register_operand" "r")
2696 (match_operand:SI 2 "s_register_operand" "r")
2698 (match_operand:SI 3 "s_register_operand" "r")))
2699 (set (reg:CC APSRQ_REGNUM)
2700 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2701 "TARGET_DSP_MULTIPLY"
2702 "smlatt%?\\t%0, %1, %2, %3"
2703 [(set_attr "type" "smlaxy")
2704 (set_attr "predicable" "yes")]
2707 (define_expand "arm_smlatt"
2708 [(match_operand:SI 0 "s_register_operand")
2709 (match_operand:SI 1 "s_register_operand")
2710 (match_operand:SI 2 "s_register_operand")
2711 (match_operand:SI 3 "s_register_operand")]
2712 "TARGET_DSP_MULTIPLY"
2715 emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
2716 operands[2], operands[3]));
2718 emit_insn (gen_maddhisi4tt (operands[0], operands[1],
2719 operands[2], operands[3]));
2724 (define_insn "maddhidi4"
2725 [(set (match_operand:DI 0 "s_register_operand" "=r")
2727 (mult:DI (sign_extend:DI
2728 (match_operand:HI 1 "s_register_operand" "r"))
2730 (match_operand:HI 2 "s_register_operand" "r")))
2731 (match_operand:DI 3 "s_register_operand" "0")))]
2732 "TARGET_DSP_MULTIPLY"
2733 "smlalbb%?\\t%Q0, %R0, %1, %2"
2734 [(set_attr "type" "smlalxy")
2735 (set_attr "predicable" "yes")])
2737 ;; Note: there is no maddhidi4ibt because this one is canonical form
2738 (define_insn "*maddhidi4tb"
2739 [(set (match_operand:DI 0 "s_register_operand" "=r")
2741 (mult:DI (sign_extend:DI
2743 (match_operand:SI 1 "s_register_operand" "r")
2746 (match_operand:HI 2 "s_register_operand" "r")))
2747 (match_operand:DI 3 "s_register_operand" "0")))]
2748 "TARGET_DSP_MULTIPLY"
2749 "smlaltb%?\\t%Q0, %R0, %1, %2"
2750 [(set_attr "type" "smlalxy")
2751 (set_attr "predicable" "yes")])
2753 (define_insn "*maddhidi4tt"
2754 [(set (match_operand:DI 0 "s_register_operand" "=r")
2756 (mult:DI (sign_extend:DI
2758 (match_operand:SI 1 "s_register_operand" "r")
2762 (match_operand:SI 2 "s_register_operand" "r")
2764 (match_operand:DI 3 "s_register_operand" "0")))]
2765 "TARGET_DSP_MULTIPLY"
2766 "smlaltt%?\\t%Q0, %R0, %1, %2"
2767 [(set_attr "type" "smlalxy")
2768 (set_attr "predicable" "yes")])
2770 (define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
2771 [(set (match_operand:SI 0 "s_register_operand" "=r")
2773 [(match_operand:SI 1 "s_register_operand" "r")
2774 (match_operand:SI 2 "s_register_operand" "r")
2775 (match_operand:SI 3 "s_register_operand" "r")]
2777 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
2778 "<smlaw_op>%?\\t%0, %1, %2, %3"
2779 [(set_attr "type" "smlaxy")
2780 (set_attr "predicable" "yes")]
2783 (define_expand "arm_<smlaw_op>"
2784 [(set (match_operand:SI 0 "s_register_operand")
2786 [(match_operand:SI 1 "s_register_operand")
2787 (match_operand:SI 2 "s_register_operand")
2788 (match_operand:SI 3 "s_register_operand")]
2790 "TARGET_DSP_MULTIPLY"
2793 emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
2794 operands[2], operands[3]));
2796 emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
2797 operands[2], operands[3]));
2802 (define_expand "mulsf3"
2803 [(set (match_operand:SF 0 "s_register_operand")
2804 (mult:SF (match_operand:SF 1 "s_register_operand")
2805 (match_operand:SF 2 "s_register_operand")))]
2806 "TARGET_32BIT && TARGET_HARD_FLOAT"
2810 (define_expand "muldf3"
2811 [(set (match_operand:DF 0 "s_register_operand")
2812 (mult:DF (match_operand:DF 1 "s_register_operand")
2813 (match_operand:DF 2 "s_register_operand")))]
2814 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2820 (define_expand "divsf3"
2821 [(set (match_operand:SF 0 "s_register_operand")
2822 (div:SF (match_operand:SF 1 "s_register_operand")
2823 (match_operand:SF 2 "s_register_operand")))]
2824 "TARGET_32BIT && TARGET_HARD_FLOAT"
2827 (define_expand "divdf3"
2828 [(set (match_operand:DF 0 "s_register_operand")
2829 (div:DF (match_operand:DF 1 "s_register_operand")
2830 (match_operand:DF 2 "s_register_operand")))]
2831 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2835 ; Expand logical operations. The mid-end expander does not split off memory
2836 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2837 ; So an explicit expander is needed to generate better code.
2839 (define_expand "<LOGICAL:optab>di3"
2840 [(set (match_operand:DI 0 "s_register_operand")
2841 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2842 (match_operand:DI 2 "arm_<optab>di_operand")))]
2845 rtx low = simplify_gen_binary (<CODE>, SImode,
2846 gen_lowpart (SImode, operands[1]),
2847 gen_lowpart (SImode, operands[2]));
2848 rtx high = simplify_gen_binary (<CODE>, SImode,
2849 gen_highpart (SImode, operands[1]),
2850 gen_highpart_mode (SImode, DImode,
2853 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2854 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2859 (define_expand "one_cmpldi2"
2860 [(set (match_operand:DI 0 "s_register_operand")
2861 (not:DI (match_operand:DI 1 "s_register_operand")))]
2864 rtx low = simplify_gen_unary (NOT, SImode,
2865 gen_lowpart (SImode, operands[1]),
2867 rtx high = simplify_gen_unary (NOT, SImode,
2868 gen_highpart_mode (SImode, DImode,
2872 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2873 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2878 ;; Split DImode and, ior, xor operations. Simply perform the logical
2879 ;; operation on the upper and lower halves of the registers.
2880 ;; This is needed for atomic operations in arm_split_atomic_op.
2881 ;; Avoid splitting IWMMXT instructions.
2883 [(set (match_operand:DI 0 "s_register_operand" "")
2884 (match_operator:DI 6 "logical_binary_operator"
2885 [(match_operand:DI 1 "s_register_operand" "")
2886 (match_operand:DI 2 "s_register_operand" "")]))]
2887 "TARGET_32BIT && reload_completed
2888 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2889 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2890 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2893 operands[3] = gen_highpart (SImode, operands[0]);
2894 operands[0] = gen_lowpart (SImode, operands[0]);
2895 operands[4] = gen_highpart (SImode, operands[1]);
2896 operands[1] = gen_lowpart (SImode, operands[1]);
2897 operands[5] = gen_highpart (SImode, operands[2]);
2898 operands[2] = gen_lowpart (SImode, operands[2]);
2902 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2903 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2905 [(set (match_operand:DI 0 "s_register_operand")
2906 (not:DI (match_operand:DI 1 "s_register_operand")))]
2908 [(set (match_dup 0) (not:SI (match_dup 1)))
2909 (set (match_dup 2) (not:SI (match_dup 3)))]
2912 operands[2] = gen_highpart (SImode, operands[0]);
2913 operands[0] = gen_lowpart (SImode, operands[0]);
2914 operands[3] = gen_highpart (SImode, operands[1]);
2915 operands[1] = gen_lowpart (SImode, operands[1]);
2919 (define_expand "andsi3"
2920 [(set (match_operand:SI 0 "s_register_operand")
2921 (and:SI (match_operand:SI 1 "s_register_operand")
2922 (match_operand:SI 2 "reg_or_int_operand")))]
2927 if (CONST_INT_P (operands[2]))
2929 if (INTVAL (operands[2]) == 255 && arm_arch6)
2931 operands[1] = convert_to_mode (QImode, operands[1], 1);
2932 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2936 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2937 operands[2] = force_reg (SImode, operands[2]);
2940 arm_split_constant (AND, SImode, NULL_RTX,
2941 INTVAL (operands[2]), operands[0],
2943 optimize && can_create_pseudo_p ());
2949 else /* TARGET_THUMB1 */
2951 if (!CONST_INT_P (operands[2]))
2953 rtx tmp = force_reg (SImode, operands[2]);
2954 if (rtx_equal_p (operands[0], operands[1]))
2958 operands[2] = operands[1];
2966 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2968 operands[2] = force_reg (SImode,
2969 GEN_INT (~INTVAL (operands[2])));
2971 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2976 for (i = 9; i <= 31; i++)
2978 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2980 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2984 else if ((HOST_WIDE_INT_1 << i) - 1
2985 == ~INTVAL (operands[2]))
2987 rtx shift = GEN_INT (i);
2988 rtx reg = gen_reg_rtx (SImode);
2990 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2991 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2997 operands[2] = force_reg (SImode, operands[2]);
3003 ; ??? Check split length for Thumb-2
3004 (define_insn_and_split "*arm_andsi3_insn"
3005 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r,r")
3006 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,0,r")
3007 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,Dj,?n")))]
3012 bic%?\\t%0, %1, #%B2
3017 && CONST_INT_P (operands[2])
3018 && !(const_ok_for_arm (INTVAL (operands[2]))
3019 || const_ok_for_arm (~INTVAL (operands[2]))
3021 && satisfies_constraint_Dj (operands[2])
3022 && (rtx_equal_p (operands[0], operands[1])
3023 || !reload_completed)))"
3024 [(clobber (const_int 0))]
3026 arm_split_constant (AND, SImode, curr_insn,
3027 INTVAL (operands[2]), operands[0], operands[1], 0);
3030 [(set_attr "length" "4,4,4,4,4,16")
3031 (set_attr "predicable" "yes")
3032 (set_attr "predicable_short_it" "no,yes,no,no,no,no")
3033 (set_attr "arch" "*,*,*,*,v6t2,*")
3034 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,bfm,logic_imm")]
3037 (define_insn "*andsi3_compare0"
3038 [(set (reg:CC_NZ CC_REGNUM)
3040 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
3041 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
3043 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3044 (and:SI (match_dup 1) (match_dup 2)))]
3048 bics%?\\t%0, %1, #%B2
3049 ands%?\\t%0, %1, %2"
3050 [(set_attr "conds" "set")
3051 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3054 (define_insn "*andsi3_compare0_scratch"
3055 [(set (reg:CC_NZ CC_REGNUM)
3057 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
3058 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
3060 (clobber (match_scratch:SI 2 "=X,r,X"))]
3064 bics%?\\t%2, %0, #%B1
3066 [(set_attr "conds" "set")
3067 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3070 (define_insn "*zeroextractsi_compare0_scratch"
3071 [(set (reg:CC_NZ CC_REGNUM)
3072 (compare:CC_NZ (zero_extract:SI
3073 (match_operand:SI 0 "s_register_operand" "r")
3074 (match_operand 1 "const_int_operand" "n")
3075 (match_operand 2 "const_int_operand" "n"))
3078 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
3079 && INTVAL (operands[1]) > 0
3080 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
3081 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
3083 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
3084 << INTVAL (operands[2]));
3085 output_asm_insn (\"tst%?\\t%0, %1\", operands);
3088 [(set_attr "conds" "set")
3089 (set_attr "predicable" "yes")
3090 (set_attr "type" "logics_imm")]
3093 (define_insn_and_split "*ne_zeroextractsi"
3094 [(set (match_operand:SI 0 "s_register_operand" "=r")
3095 (ne:SI (zero_extract:SI
3096 (match_operand:SI 1 "s_register_operand" "r")
3097 (match_operand:SI 2 "const_int_operand" "n")
3098 (match_operand:SI 3 "const_int_operand" "n"))
3100 (clobber (reg:CC CC_REGNUM))]
3102 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3103 && INTVAL (operands[2]) > 0
3104 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3105 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3108 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3109 && INTVAL (operands[2]) > 0
3110 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3111 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3112 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3113 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3115 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3117 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3118 (match_dup 0) (const_int 1)))]
3120 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3121 << INTVAL (operands[3]));
3123 [(set_attr "conds" "clob")
3124 (set (attr "length")
3125 (if_then_else (eq_attr "is_thumb" "yes")
3128 (set_attr "type" "multiple")]
3131 (define_insn_and_split "*ne_zeroextractsi_shifted"
3132 [(set (match_operand:SI 0 "s_register_operand" "=r")
3133 (ne:SI (zero_extract:SI
3134 (match_operand:SI 1 "s_register_operand" "r")
3135 (match_operand:SI 2 "const_int_operand" "n")
3138 (clobber (reg:CC CC_REGNUM))]
3142 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3143 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3145 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3147 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3148 (match_dup 0) (const_int 1)))]
3150 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3152 [(set_attr "conds" "clob")
3153 (set_attr "length" "8")
3154 (set_attr "type" "multiple")]
3157 (define_insn_and_split "*ite_ne_zeroextractsi"
3158 [(set (match_operand:SI 0 "s_register_operand" "=r")
3159 (if_then_else:SI (ne (zero_extract:SI
3160 (match_operand:SI 1 "s_register_operand" "r")
3161 (match_operand:SI 2 "const_int_operand" "n")
3162 (match_operand:SI 3 "const_int_operand" "n"))
3164 (match_operand:SI 4 "arm_not_operand" "rIK")
3166 (clobber (reg:CC CC_REGNUM))]
3168 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3169 && INTVAL (operands[2]) > 0
3170 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3171 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3172 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3175 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3176 && INTVAL (operands[2]) > 0
3177 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3178 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3179 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3180 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3181 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3183 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3185 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3186 (match_dup 0) (match_dup 4)))]
3188 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3189 << INTVAL (operands[3]));
3191 [(set_attr "conds" "clob")
3192 (set_attr "length" "8")
3193 (set_attr "type" "multiple")]
3196 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
3197 [(set (match_operand:SI 0 "s_register_operand" "=r")
3198 (if_then_else:SI (ne (zero_extract:SI
3199 (match_operand:SI 1 "s_register_operand" "r")
3200 (match_operand:SI 2 "const_int_operand" "n")
3203 (match_operand:SI 3 "arm_not_operand" "rIK")
3205 (clobber (reg:CC CC_REGNUM))]
3206 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3208 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3209 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3210 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3212 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3214 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3215 (match_dup 0) (match_dup 3)))]
3217 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3219 [(set_attr "conds" "clob")
3220 (set_attr "length" "8")
3221 (set_attr "type" "multiple")]
3224 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
3226 [(set (match_operand:SI 0 "s_register_operand" "")
3227 (match_operator:SI 1 "shiftable_operator"
3228 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3229 (match_operand:SI 3 "const_int_operand" "")
3230 (match_operand:SI 4 "const_int_operand" ""))
3231 (match_operand:SI 5 "s_register_operand" "")]))
3232 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3234 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3237 [(lshiftrt:SI (match_dup 6) (match_dup 4))
3240 HOST_WIDE_INT temp = INTVAL (operands[3]);
3242 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3243 operands[4] = GEN_INT (32 - temp);
3248 [(set (match_operand:SI 0 "s_register_operand" "")
3249 (match_operator:SI 1 "shiftable_operator"
3250 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3251 (match_operand:SI 3 "const_int_operand" "")
3252 (match_operand:SI 4 "const_int_operand" ""))
3253 (match_operand:SI 5 "s_register_operand" "")]))
3254 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3256 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3259 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3262 HOST_WIDE_INT temp = INTVAL (operands[3]);
3264 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3265 operands[4] = GEN_INT (32 - temp);
3269 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3270 ;;; represented by the bitfield, then this will produce incorrect results.
3271 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3272 ;;; which have a real bit-field insert instruction, the truncation happens
3273 ;;; in the bit-field insert instruction itself. Since arm does not have a
3274 ;;; bit-field insert instruction, we would have to emit code here to truncate
3275 ;;; the value before we insert. This loses some of the advantage of having
3276 ;;; this insv pattern, so this pattern needs to be reevalutated.
3278 (define_expand "insv"
3279 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3280 (match_operand 1 "general_operand")
3281 (match_operand 2 "general_operand"))
3282 (match_operand 3 "reg_or_int_operand"))]
3283 "TARGET_ARM || arm_arch_thumb2"
3286 int start_bit = INTVAL (operands[2]);
3287 int width = INTVAL (operands[1]);
3288 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3289 rtx target, subtarget;
3291 if (arm_arch_thumb2)
3293 if (unaligned_access && MEM_P (operands[0])
3294 && s_register_operand (operands[3], GET_MODE (operands[3]))
3295 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3299 if (BYTES_BIG_ENDIAN)
3300 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3305 base_addr = adjust_address (operands[0], SImode,
3306 start_bit / BITS_PER_UNIT);
3307 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3311 rtx tmp = gen_reg_rtx (HImode);
3313 base_addr = adjust_address (operands[0], HImode,
3314 start_bit / BITS_PER_UNIT);
3315 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3316 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3320 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3322 bool use_bfi = TRUE;
3324 if (CONST_INT_P (operands[3]))
3326 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3330 emit_insn (gen_insv_zero (operands[0], operands[1],
3335 /* See if the set can be done with a single orr instruction. */
3336 if (val == mask && const_ok_for_arm (val << start_bit))
3342 if (!REG_P (operands[3]))
3343 operands[3] = force_reg (SImode, operands[3]);
3345 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3354 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3357 target = copy_rtx (operands[0]);
3358 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3359 subreg as the final target. */
3360 if (GET_CODE (target) == SUBREG)
3362 subtarget = gen_reg_rtx (SImode);
3363 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3364 < GET_MODE_SIZE (SImode))
3365 target = SUBREG_REG (target);
3370 if (CONST_INT_P (operands[3]))
3372 /* Since we are inserting a known constant, we may be able to
3373 reduce the number of bits that we have to clear so that
3374 the mask becomes simple. */
3375 /* ??? This code does not check to see if the new mask is actually
3376 simpler. It may not be. */
3377 rtx op1 = gen_reg_rtx (SImode);
3378 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3379 start of this pattern. */
3380 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3381 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3383 emit_insn (gen_andsi3 (op1, operands[0],
3384 gen_int_mode (~mask2, SImode)));
3385 emit_insn (gen_iorsi3 (subtarget, op1,
3386 gen_int_mode (op3_value << start_bit, SImode)));
3388 else if (start_bit == 0
3389 && !(const_ok_for_arm (mask)
3390 || const_ok_for_arm (~mask)))
3392 /* A Trick, since we are setting the bottom bits in the word,
3393 we can shift operand[3] up, operand[0] down, OR them together
3394 and rotate the result back again. This takes 3 insns, and
3395 the third might be mergeable into another op. */
3396 /* The shift up copes with the possibility that operand[3] is
3397 wider than the bitfield. */
3398 rtx op0 = gen_reg_rtx (SImode);
3399 rtx op1 = gen_reg_rtx (SImode);
3401 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3402 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3403 emit_insn (gen_iorsi3 (op1, op1, op0));
3404 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3406 else if ((width + start_bit == 32)
3407 && !(const_ok_for_arm (mask)
3408 || const_ok_for_arm (~mask)))
3410 /* Similar trick, but slightly less efficient. */
3412 rtx op0 = gen_reg_rtx (SImode);
3413 rtx op1 = gen_reg_rtx (SImode);
3415 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3416 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3417 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3418 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3422 rtx op0 = gen_int_mode (mask, SImode);
3423 rtx op1 = gen_reg_rtx (SImode);
3424 rtx op2 = gen_reg_rtx (SImode);
3426 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3428 rtx tmp = gen_reg_rtx (SImode);
3430 emit_insn (gen_movsi (tmp, op0));
3434 /* Mask out any bits in operand[3] that are not needed. */
3435 emit_insn (gen_andsi3 (op1, operands[3], op0));
3437 if (CONST_INT_P (op0)
3438 && (const_ok_for_arm (mask << start_bit)
3439 || const_ok_for_arm (~(mask << start_bit))))
3441 op0 = gen_int_mode (~(mask << start_bit), SImode);
3442 emit_insn (gen_andsi3 (op2, operands[0], op0));
3446 if (CONST_INT_P (op0))
3448 rtx tmp = gen_reg_rtx (SImode);
3450 emit_insn (gen_movsi (tmp, op0));
3455 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3457 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3461 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3463 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3466 if (subtarget != target)
3468 /* If TARGET is still a SUBREG, then it must be wider than a word,
3469 so we must be careful only to set the subword we were asked to. */
3470 if (GET_CODE (target) == SUBREG)
3471 emit_move_insn (target, subtarget);
3473 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3480 (define_insn_and_split "insv_zero"
3481 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3482 (match_operand:SI 1 "const_int_M_operand" "M")
3483 (match_operand:SI 2 "const_int_M_operand" "M"))
3488 [(set (match_dup 0) (and:SI (match_dup 0) (match_dup 1)))]
3490 /* Convert back to a normal AND operation, so that we can take advantage
3491 of BIC and AND when appropriate; we'll still emit BFC if that's the
3492 right thing to do. */
3493 unsigned HOST_WIDE_INT width = UINTVAL (operands[1]);
3494 unsigned HOST_WIDE_INT lsb = UINTVAL (operands[2]);
3495 unsigned HOST_WIDE_INT mask = (HOST_WIDE_INT_1U << width) - 1;
3497 operands[1] = gen_int_mode (~(mask << lsb), SImode);
3499 [(set_attr "length" "4")
3500 (set_attr "predicable" "yes")
3501 (set_attr "type" "bfm")]
3504 (define_insn "insv_t2"
3505 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3506 (match_operand:SI 1 "const_int_M_operand" "M")
3507 (match_operand:SI 2 "const_int_M_operand" "M"))
3508 (match_operand:SI 3 "s_register_operand" "r"))]
3510 "bfi%?\t%0, %3, %2, %1"
3511 [(set_attr "length" "4")
3512 (set_attr "predicable" "yes")
3513 (set_attr "type" "bfm")]
3517 [(set (match_operand:SI 0 "s_register_operand" "=r")
3518 (ior:SI (and:SI (match_operand:SI 1 "s_register_operand" "0")
3519 (match_operand 2 "const_int_operand" "Dj"))
3521 (match_operand:SI 3 "s_register_operand" "r")
3522 (match_operand 4 "const_int_operand" "i"))
3523 (match_operand 5 "const_int_operand" "i"))))]
3525 && UINTVAL (operands[4]) < 32
3526 && UINTVAL (operands[2]) == ~UINTVAL (operands[5])
3527 && (exact_log2 (UINTVAL (operands[5])
3528 + (HOST_WIDE_INT_1U << UINTVAL (operands[4])))
3530 "bfi%?\t%0, %3, %V2"
3531 [(set_attr "length" "4")
3532 (set_attr "predicable" "yes")
3533 (set_attr "type" "bfm")]
3536 (define_insn "*bfi_alt1"
3537 [(set (match_operand:SI 0 "s_register_operand" "=r")
3538 (ior:SI (and:SI (ashift:SI
3539 (match_operand:SI 3 "s_register_operand" "r")
3540 (match_operand 4 "const_int_operand" "i"))
3541 (match_operand 5 "const_int_operand" "i"))
3542 (and:SI (match_operand:SI 1 "s_register_operand" "0")
3543 (match_operand 2 "const_int_operand" "Dj"))))]
3545 && UINTVAL (operands[4]) < 32
3546 && UINTVAL (operands[2]) == ~UINTVAL (operands[5])
3547 && (exact_log2 (UINTVAL (operands[5])
3548 + (HOST_WIDE_INT_1U << UINTVAL (operands[4])))
3550 "bfi%?\t%0, %3, %V2"
3551 [(set_attr "length" "4")
3552 (set_attr "predicable" "yes")
3553 (set_attr "type" "bfm")]
3556 (define_insn "*bfi_alt2"
3557 [(set (match_operand:SI 0 "s_register_operand" "=r")
3558 (ior:SI (and:SI (match_operand:SI 1 "s_register_operand" "0")
3559 (match_operand 2 "const_int_operand" "i"))
3560 (and:SI (match_operand:SI 3 "s_register_operand" "r")
3561 (match_operand 4 "const_int_operand" "i"))))]
3563 && UINTVAL (operands[2]) == ~UINTVAL (operands[4])
3564 && exact_log2 (UINTVAL (operands[4]) + 1) >= 0"
3565 "bfi%?\t%0, %3, %V2"
3566 [(set_attr "length" "4")
3567 (set_attr "predicable" "yes")
3568 (set_attr "type" "bfm")]
3571 (define_insn "*bfi_alt3"
3572 [(set (match_operand:SI 0 "s_register_operand" "=r")
3573 (ior:SI (and:SI (match_operand:SI 3 "s_register_operand" "r")
3574 (match_operand 4 "const_int_operand" "i"))
3575 (and:SI (match_operand:SI 1 "s_register_operand" "0")
3576 (match_operand 2 "const_int_operand" "i"))))]
3578 && UINTVAL (operands[2]) == ~UINTVAL (operands[4])
3579 && exact_log2 (UINTVAL (operands[4]) + 1) >= 0"
3580 "bfi%?\t%0, %3, %V2"
3581 [(set_attr "length" "4")
3582 (set_attr "predicable" "yes")
3583 (set_attr "type" "bfm")]
3586 (define_insn "andsi_notsi_si"
3587 [(set (match_operand:SI 0 "s_register_operand" "=r")
3588 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3589 (match_operand:SI 1 "s_register_operand" "r")))]
3591 "bic%?\\t%0, %1, %2"
3592 [(set_attr "predicable" "yes")
3593 (set_attr "type" "logic_reg")]
3596 (define_insn "andsi_not_shiftsi_si"
3597 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3598 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3599 [(match_operand:SI 2 "s_register_operand" "r,r")
3600 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3601 (match_operand:SI 1 "s_register_operand" "r,r")))]
3603 "bic%?\\t%0, %1, %2%S4"
3604 [(set_attr "predicable" "yes")
3605 (set_attr "shift" "2")
3606 (set_attr "arch" "32,a")
3607 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3610 ;; Shifted bics pattern used to set up CC status register and not reusing
3611 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3612 ;; does not support shift by register.
3613 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3614 [(set (reg:CC_NZ CC_REGNUM)
3616 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3617 [(match_operand:SI 1 "s_register_operand" "r,r")
3618 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3619 (match_operand:SI 3 "s_register_operand" "r,r"))
3621 (clobber (match_scratch:SI 4 "=r,r"))]
3623 "bics%?\\t%4, %3, %1%S0"
3624 [(set_attr "predicable" "yes")
3625 (set_attr "arch" "32,a")
3626 (set_attr "conds" "set")
3627 (set_attr "shift" "1")
3628 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3631 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3632 ;; getting reused later.
3633 (define_insn "andsi_not_shiftsi_si_scc"
3634 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3636 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3637 [(match_operand:SI 1 "s_register_operand" "r,r")
3638 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3639 (match_operand:SI 3 "s_register_operand" "r,r"))
3641 (set (match_operand:SI 4 "s_register_operand" "=r,r")
3642 (and:SI (not:SI (match_op_dup 0
3647 "bics%?\\t%4, %3, %1%S0"
3648 [(set_attr "predicable" "yes")
3649 (set_attr "arch" "32,a")
3650 (set_attr "conds" "set")
3651 (set_attr "shift" "1")
3652 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3655 (define_insn "*andsi_notsi_si_compare0"
3656 [(set (reg:CC_NZ CC_REGNUM)
3658 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3659 (match_operand:SI 1 "s_register_operand" "r"))
3661 (set (match_operand:SI 0 "s_register_operand" "=r")
3662 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3665 [(set_attr "conds" "set")
3666 (set_attr "type" "logics_shift_reg")]
3669 (define_insn "*andsi_notsi_si_compare0_scratch"
3670 [(set (reg:CC_NZ CC_REGNUM)
3672 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3673 (match_operand:SI 1 "s_register_operand" "r"))
3675 (clobber (match_scratch:SI 0 "=r"))]
3678 [(set_attr "conds" "set")
3679 (set_attr "type" "logics_shift_reg")]
3682 (define_expand "iorsi3"
3683 [(set (match_operand:SI 0 "s_register_operand")
3684 (ior:SI (match_operand:SI 1 "s_register_operand")
3685 (match_operand:SI 2 "reg_or_int_operand")))]
3688 if (CONST_INT_P (operands[2]))
3692 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3693 operands[2] = force_reg (SImode, operands[2]);
3696 arm_split_constant (IOR, SImode, NULL_RTX,
3697 INTVAL (operands[2]), operands[0],
3699 optimize && can_create_pseudo_p ());
3703 else /* TARGET_THUMB1 */
3705 rtx tmp = force_reg (SImode, operands[2]);
3706 if (rtx_equal_p (operands[0], operands[1]))
3710 operands[2] = operands[1];
3718 (define_insn_and_split "*iorsi3_insn"
3719 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3720 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3721 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3726 orn%?\\t%0, %1, #%B2
3730 && CONST_INT_P (operands[2])
3731 && !(const_ok_for_arm (INTVAL (operands[2]))
3732 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3733 [(clobber (const_int 0))]
3735 arm_split_constant (IOR, SImode, curr_insn,
3736 INTVAL (operands[2]), operands[0], operands[1], 0);
3739 [(set_attr "length" "4,4,4,4,16")
3740 (set_attr "arch" "32,t2,t2,32,32")
3741 (set_attr "predicable" "yes")
3742 (set_attr "predicable_short_it" "no,yes,no,no,no")
3743 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3747 [(match_scratch:SI 3 "r")
3748 (set (match_operand:SI 0 "arm_general_register_operand" "")
3749 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3750 (match_operand:SI 2 "const_int_operand" "")))]
3752 && !const_ok_for_arm (INTVAL (operands[2]))
3753 && const_ok_for_arm (~INTVAL (operands[2]))"
3754 [(set (match_dup 3) (match_dup 2))
3755 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3759 (define_insn "*iorsi3_compare0"
3760 [(set (reg:CC_NZ CC_REGNUM)
3762 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3763 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3765 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3766 (ior:SI (match_dup 1) (match_dup 2)))]
3768 "orrs%?\\t%0, %1, %2"
3769 [(set_attr "conds" "set")
3770 (set_attr "arch" "*,t2,*")
3771 (set_attr "length" "4,2,4")
3772 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3775 (define_insn "*iorsi3_compare0_scratch"
3776 [(set (reg:CC_NZ CC_REGNUM)
3778 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3779 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3781 (clobber (match_scratch:SI 0 "=r,l,r"))]
3783 "orrs%?\\t%0, %1, %2"
3784 [(set_attr "conds" "set")
3785 (set_attr "arch" "*,t2,*")
3786 (set_attr "length" "4,2,4")
3787 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3790 (define_expand "xorsi3"
3791 [(set (match_operand:SI 0 "s_register_operand")
3792 (xor:SI (match_operand:SI 1 "s_register_operand")
3793 (match_operand:SI 2 "reg_or_int_operand")))]
3795 "if (CONST_INT_P (operands[2]))
3799 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3800 operands[2] = force_reg (SImode, operands[2]);
3803 arm_split_constant (XOR, SImode, NULL_RTX,
3804 INTVAL (operands[2]), operands[0],
3806 optimize && can_create_pseudo_p ());
3810 else /* TARGET_THUMB1 */
3812 rtx tmp = force_reg (SImode, operands[2]);
3813 if (rtx_equal_p (operands[0], operands[1]))
3817 operands[2] = operands[1];
3824 (define_insn_and_split "*arm_xorsi3"
3825 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3826 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3827 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3835 && CONST_INT_P (operands[2])
3836 && !const_ok_for_arm (INTVAL (operands[2]))"
3837 [(clobber (const_int 0))]
3839 arm_split_constant (XOR, SImode, curr_insn,
3840 INTVAL (operands[2]), operands[0], operands[1], 0);
3843 [(set_attr "length" "4,4,4,16")
3844 (set_attr "predicable" "yes")
3845 (set_attr "predicable_short_it" "no,yes,no,no")
3846 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3849 (define_insn "*xorsi3_compare0"
3850 [(set (reg:CC_NZ CC_REGNUM)
3851 (compare:CC_NZ (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3852 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3854 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3855 (xor:SI (match_dup 1) (match_dup 2)))]
3857 "eors%?\\t%0, %1, %2"
3858 [(set_attr "conds" "set")
3859 (set_attr "type" "logics_imm,logics_reg")]
3862 (define_insn "*xorsi3_compare0_scratch"
3863 [(set (reg:CC_NZ CC_REGNUM)
3864 (compare:CC_NZ (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3865 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3869 [(set_attr "conds" "set")
3870 (set_attr "type" "logics_imm,logics_reg")]
3873 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3874 ; (NOT D) we can sometimes merge the final NOT into one of the following
3878 [(set (match_operand:SI 0 "s_register_operand" "")
3879 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3880 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3881 (match_operand:SI 3 "arm_rhs_operand" "")))
3882 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3884 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3885 (not:SI (match_dup 3))))
3886 (set (match_dup 0) (not:SI (match_dup 4)))]
3890 (define_insn_and_split "*andsi_iorsi3_notsi"
3891 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3892 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3893 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3894 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3896 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3897 "&& reload_completed"
3898 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3899 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3901 /* If operands[3] is a constant make sure to fold the NOT into it
3902 to avoid creating a NOT of a CONST_INT. */
3903 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3904 if (CONST_INT_P (not_rtx))
3906 operands[4] = operands[0];
3907 operands[5] = not_rtx;
3911 operands[5] = operands[0];
3912 operands[4] = not_rtx;
3915 [(set_attr "length" "8")
3916 (set_attr "ce_count" "2")
3917 (set_attr "predicable" "yes")
3918 (set_attr "type" "multiple")]
3921 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3922 ; insns are available?
3924 [(set (match_operand:SI 0 "s_register_operand" "")
3925 (match_operator:SI 1 "logical_binary_operator"
3926 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3927 (match_operand:SI 3 "const_int_operand" "")
3928 (match_operand:SI 4 "const_int_operand" ""))
3929 (match_operator:SI 9 "logical_binary_operator"
3930 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3931 (match_operand:SI 6 "const_int_operand" ""))
3932 (match_operand:SI 7 "s_register_operand" "")])]))
3933 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3935 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3936 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3939 [(ashift:SI (match_dup 2) (match_dup 4))
3943 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3946 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3950 [(set (match_operand:SI 0 "s_register_operand" "")
3951 (match_operator:SI 1 "logical_binary_operator"
3952 [(match_operator:SI 9 "logical_binary_operator"
3953 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3954 (match_operand:SI 6 "const_int_operand" ""))
3955 (match_operand:SI 7 "s_register_operand" "")])
3956 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3957 (match_operand:SI 3 "const_int_operand" "")
3958 (match_operand:SI 4 "const_int_operand" ""))]))
3959 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3961 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3962 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3965 [(ashift:SI (match_dup 2) (match_dup 4))
3969 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3972 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3976 [(set (match_operand:SI 0 "s_register_operand" "")
3977 (match_operator:SI 1 "logical_binary_operator"
3978 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3979 (match_operand:SI 3 "const_int_operand" "")
3980 (match_operand:SI 4 "const_int_operand" ""))
3981 (match_operator:SI 9 "logical_binary_operator"
3982 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3983 (match_operand:SI 6 "const_int_operand" ""))
3984 (match_operand:SI 7 "s_register_operand" "")])]))
3985 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3987 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3988 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3991 [(ashift:SI (match_dup 2) (match_dup 4))
3995 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3998 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
4002 [(set (match_operand:SI 0 "s_register_operand" "")
4003 (match_operator:SI 1 "logical_binary_operator"
4004 [(match_operator:SI 9 "logical_binary_operator"
4005 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
4006 (match_operand:SI 6 "const_int_operand" ""))
4007 (match_operand:SI 7 "s_register_operand" "")])
4008 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
4009 (match_operand:SI 3 "const_int_operand" "")
4010 (match_operand:SI 4 "const_int_operand" ""))]))
4011 (clobber (match_operand:SI 8 "s_register_operand" ""))]
4013 && GET_CODE (operands[1]) == GET_CODE (operands[9])
4014 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
4017 [(ashift:SI (match_dup 2) (match_dup 4))
4021 [(ashiftrt:SI (match_dup 8) (match_dup 6))
4024 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
4028 ;; Minimum and maximum insns
4030 (define_expand "smaxsi3"
4032 (set (match_operand:SI 0 "s_register_operand")
4033 (smax:SI (match_operand:SI 1 "s_register_operand")
4034 (match_operand:SI 2 "arm_rhs_operand")))
4035 (clobber (reg:CC CC_REGNUM))])]
4038 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
4040 /* No need for a clobber of the condition code register here. */
4041 emit_insn (gen_rtx_SET (operands[0],
4042 gen_rtx_SMAX (SImode, operands[1],
4048 (define_insn "*smax_0"
4049 [(set (match_operand:SI 0 "s_register_operand" "=r")
4050 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
4053 "bic%?\\t%0, %1, %1, asr #31"
4054 [(set_attr "predicable" "yes")
4055 (set_attr "type" "logic_shift_reg")]
4058 (define_insn "*smax_m1"
4059 [(set (match_operand:SI 0 "s_register_operand" "=r")
4060 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
4063 "orr%?\\t%0, %1, %1, asr #31"
4064 [(set_attr "predicable" "yes")
4065 (set_attr "type" "logic_shift_reg")]
4068 (define_insn_and_split "*arm_smax_insn"
4069 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4070 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4071 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4072 (clobber (reg:CC CC_REGNUM))]
4075 ; cmp\\t%1, %2\;movlt\\t%0, %2
4076 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
4078 [(set (reg:CC CC_REGNUM)
4079 (compare:CC (match_dup 1) (match_dup 2)))
4081 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
4085 [(set_attr "conds" "clob")
4086 (set_attr "length" "8,12")
4087 (set_attr "type" "multiple")]
4090 (define_expand "sminsi3"
4092 (set (match_operand:SI 0 "s_register_operand")
4093 (smin:SI (match_operand:SI 1 "s_register_operand")
4094 (match_operand:SI 2 "arm_rhs_operand")))
4095 (clobber (reg:CC CC_REGNUM))])]
4098 if (operands[2] == const0_rtx)
4100 /* No need for a clobber of the condition code register here. */
4101 emit_insn (gen_rtx_SET (operands[0],
4102 gen_rtx_SMIN (SImode, operands[1],
4108 (define_insn "*smin_0"
4109 [(set (match_operand:SI 0 "s_register_operand" "=r")
4110 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
4113 "and%?\\t%0, %1, %1, asr #31"
4114 [(set_attr "predicable" "yes")
4115 (set_attr "type" "logic_shift_reg")]
4118 (define_insn_and_split "*arm_smin_insn"
4119 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4120 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4121 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4122 (clobber (reg:CC CC_REGNUM))]
4125 ; cmp\\t%1, %2\;movge\\t%0, %2
4126 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
4128 [(set (reg:CC CC_REGNUM)
4129 (compare:CC (match_dup 1) (match_dup 2)))
4131 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
4135 [(set_attr "conds" "clob")
4136 (set_attr "length" "8,12")
4137 (set_attr "type" "multiple,multiple")]
4140 (define_expand "umaxsi3"
4142 (set (match_operand:SI 0 "s_register_operand")
4143 (umax:SI (match_operand:SI 1 "s_register_operand")
4144 (match_operand:SI 2 "arm_rhs_operand")))
4145 (clobber (reg:CC CC_REGNUM))])]
4150 (define_insn_and_split "*arm_umaxsi3"
4151 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4152 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4153 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4154 (clobber (reg:CC CC_REGNUM))]
4157 ; cmp\\t%1, %2\;movcc\\t%0, %2
4158 ; cmp\\t%1, %2\;movcs\\t%0, %1
4159 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
4161 [(set (reg:CC CC_REGNUM)
4162 (compare:CC (match_dup 1) (match_dup 2)))
4164 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
4168 [(set_attr "conds" "clob")
4169 (set_attr "length" "8,8,12")
4170 (set_attr "type" "store_4")]
4173 (define_expand "uminsi3"
4175 (set (match_operand:SI 0 "s_register_operand")
4176 (umin:SI (match_operand:SI 1 "s_register_operand")
4177 (match_operand:SI 2 "arm_rhs_operand")))
4178 (clobber (reg:CC CC_REGNUM))])]
4183 (define_insn_and_split "*arm_uminsi3"
4184 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4185 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4186 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4187 (clobber (reg:CC CC_REGNUM))]
4190 ; cmp\\t%1, %2\;movcs\\t%0, %2
4191 ; cmp\\t%1, %2\;movcc\\t%0, %1
4192 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
4194 [(set (reg:CC CC_REGNUM)
4195 (compare:CC (match_dup 1) (match_dup 2)))
4197 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
4201 [(set_attr "conds" "clob")
4202 (set_attr "length" "8,8,12")
4203 (set_attr "type" "store_4")]
4206 (define_insn "*store_minmaxsi"
4207 [(set (match_operand:SI 0 "memory_operand" "=m")
4208 (match_operator:SI 3 "minmax_operator"
4209 [(match_operand:SI 1 "s_register_operand" "r")
4210 (match_operand:SI 2 "s_register_operand" "r")]))
4211 (clobber (reg:CC CC_REGNUM))]
4212 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
4214 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
4215 operands[1], operands[2]);
4216 output_asm_insn (\"cmp\\t%1, %2\", operands);
4218 output_asm_insn (\"ite\t%d3\", operands);
4219 output_asm_insn (\"str%d3\\t%1, %0\", operands);
4220 output_asm_insn (\"str%D3\\t%2, %0\", operands);
4223 [(set_attr "conds" "clob")
4224 (set (attr "length")
4225 (if_then_else (eq_attr "is_thumb" "yes")
4228 (set_attr "type" "store_4")]
4231 ; Reject the frame pointer in operand[1], since reloading this after
4232 ; it has been eliminated can cause carnage.
4233 (define_insn "*minmax_arithsi"
4234 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4235 (match_operator:SI 4 "shiftable_operator"
4236 [(match_operator:SI 5 "minmax_operator"
4237 [(match_operand:SI 2 "s_register_operand" "r,r")
4238 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
4239 (match_operand:SI 1 "s_register_operand" "0,?r")]))
4240 (clobber (reg:CC CC_REGNUM))]
4241 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
4244 enum rtx_code code = GET_CODE (operands[4]);
4247 if (which_alternative != 0 || operands[3] != const0_rtx
4248 || (code != PLUS && code != IOR && code != XOR))
4253 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4254 operands[2], operands[3]);
4255 output_asm_insn (\"cmp\\t%2, %3\", operands);
4259 output_asm_insn (\"ite\\t%d5\", operands);
4261 output_asm_insn (\"it\\t%d5\", operands);
4263 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4265 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4268 [(set_attr "conds" "clob")
4269 (set (attr "length")
4270 (if_then_else (eq_attr "is_thumb" "yes")
4273 (set_attr "type" "multiple")]
4276 ; Reject the frame pointer in operand[1], since reloading this after
4277 ; it has been eliminated can cause carnage.
4278 (define_insn_and_split "*minmax_arithsi_non_canon"
4279 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4281 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4282 (match_operator:SI 4 "minmax_operator"
4283 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4284 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4285 (clobber (reg:CC CC_REGNUM))]
4286 "TARGET_32BIT && !arm_eliminable_register (operands[1])
4287 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4289 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4290 [(set (reg:CC CC_REGNUM)
4291 (compare:CC (match_dup 2) (match_dup 3)))
4293 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4295 (minus:SI (match_dup 1)
4297 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4301 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4302 operands[2], operands[3]);
4303 enum rtx_code rc = minmax_code (operands[4]);
4304 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4305 operands[2], operands[3]);
4307 if (mode == CCFPmode || mode == CCFPEmode)
4308 rc = reverse_condition_maybe_unordered (rc);
4310 rc = reverse_condition (rc);
4311 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4312 if (CONST_INT_P (operands[3]))
4313 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4315 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4317 [(set_attr "conds" "clob")
4318 (set (attr "length")
4319 (if_then_else (eq_attr "is_thumb" "yes")
4322 (set_attr "type" "multiple")]
4326 (define_expand "arm_<ss_op>"
4327 [(set (match_operand:SI 0 "s_register_operand")
4328 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
4329 (match_operand:SI 2 "s_register_operand")))]
4330 "TARGET_DSP_MULTIPLY"
4333 emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
4334 operands[1], operands[2]));
4336 emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
4341 (define_insn "arm_<ss_op><add_clobber_q_name>_insn"
4342 [(set (match_operand:SI 0 "s_register_operand" "=r")
4343 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
4344 (match_operand:SI 2 "s_register_operand" "r")))]
4345 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
4346 "<ss_op>%?\t%0, %1, %2"
4347 [(set_attr "predicable" "yes")
4348 (set_attr "type" "alu_dsp_reg")]
4351 (define_code_iterator SAT [smin smax])
4352 (define_code_attr SATrev [(smin "smax") (smax "smin")])
4353 (define_code_attr SATlo [(smin "1") (smax "2")])
4354 (define_code_attr SAThi [(smin "2") (smax "1")])
4356 (define_expand "arm_ssat"
4357 [(match_operand:SI 0 "s_register_operand")
4358 (match_operand:SI 1 "s_register_operand")
4359 (match_operand:SI 2 "const_int_operand")]
4360 "TARGET_32BIT && arm_arch6"
4362 HOST_WIDE_INT val = INTVAL (operands[2]);
4363 /* The builtin checking code should have ensured the right
4364 range for the immediate. */
4365 gcc_assert (IN_RANGE (val, 1, 32));
4366 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
4367 HOST_WIDE_INT lower_bound = -upper_bound - 1;
4368 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4369 rtx lo_rtx = gen_int_mode (lower_bound, SImode);
4371 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
4372 up_rtx, operands[1]));
4374 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4379 (define_expand "arm_usat"
4380 [(match_operand:SI 0 "s_register_operand")
4381 (match_operand:SI 1 "s_register_operand")
4382 (match_operand:SI 2 "const_int_operand")]
4383 "TARGET_32BIT && arm_arch6"
4385 HOST_WIDE_INT val = INTVAL (operands[2]);
4386 /* The builtin checking code should have ensured the right
4387 range for the immediate. */
4388 gcc_assert (IN_RANGE (val, 0, 31));
4389 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
4390 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4391 rtx lo_rtx = CONST0_RTX (SImode);
4393 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
4396 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4401 (define_insn "arm_get_apsr"
4402 [(set (match_operand:SI 0 "s_register_operand" "=r")
4403 (unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
4406 [(set_attr "predicable" "yes")
4407 (set_attr "conds" "use")]
4410 (define_insn "arm_set_apsr"
4411 [(set (reg:CC APSRQ_REGNUM)
4413 [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
4415 "msr%?\tAPSR_nzcvq, %0"
4416 [(set_attr "predicable" "yes")
4417 (set_attr "conds" "set")]
4420 ;; Read the APSR and extract the Q bit (bit 27)
4421 (define_expand "arm_saturation_occurred"
4422 [(match_operand:SI 0 "s_register_operand")]
4425 rtx apsr = gen_reg_rtx (SImode);
4426 emit_insn (gen_arm_get_apsr (apsr));
4427 emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
4428 gen_int_mode (27, SImode)));
4433 ;; Read the APSR and set the Q bit (bit position 27) according to operand 0
4434 (define_expand "arm_set_saturation"
4435 [(match_operand:SI 0 "reg_or_int_operand")]
4438 rtx apsr = gen_reg_rtx (SImode);
4439 emit_insn (gen_arm_get_apsr (apsr));
4440 rtx to_insert = gen_reg_rtx (SImode);
4441 if (CONST_INT_P (operands[0]))
4442 emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
4443 ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
4446 rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
4447 emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
4448 CONST0_RTX (SImode)));
4450 emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
4451 gen_int_mode (27, SImode), to_insert));
4452 emit_insn (gen_arm_set_apsr (apsr));
4457 (define_insn "satsi_<SAT:code><add_clobber_q_name>"
4458 [(set (match_operand:SI 0 "s_register_operand" "=r")
4459 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4460 (match_operand:SI 1 "const_int_operand" "i"))
4461 (match_operand:SI 2 "const_int_operand" "i")))]
4462 "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
4463 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4467 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4468 &mask, &signed_sat))
4471 operands[1] = GEN_INT (mask);
4473 return "ssat%?\t%0, %1, %3";
4475 return "usat%?\t%0, %1, %3";
4477 [(set_attr "predicable" "yes")
4478 (set_attr "type" "alus_imm")]
4481 (define_insn "*satsi_<SAT:code>_shift"
4482 [(set (match_operand:SI 0 "s_register_operand" "=r")
4483 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4484 [(match_operand:SI 4 "s_register_operand" "r")
4485 (match_operand:SI 5 "const_int_operand" "i")])
4486 (match_operand:SI 1 "const_int_operand" "i"))
4487 (match_operand:SI 2 "const_int_operand" "i")))]
4488 "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
4489 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4493 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4494 &mask, &signed_sat))
4497 operands[1] = GEN_INT (mask);
4499 return "ssat%?\t%0, %1, %4%S3";
4501 return "usat%?\t%0, %1, %4%S3";
4503 [(set_attr "predicable" "yes")
4504 (set_attr "shift" "3")
4505 (set_attr "type" "logic_shift_reg")])
4507 ;; Custom Datapath Extension insns.
4508 (define_insn "arm_cx1<mode>"
4509 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4510 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4511 (match_operand:SI 2 "const_int_ccde1_operand" "i")]
4514 "cx1<cde_suffix>\\tp%c1, <cde_dest>, %2"
4515 [(set_attr "type" "coproc")]
4518 (define_insn "arm_cx1a<mode>"
4519 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4520 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4521 (match_operand:SIDI 2 "s_register_operand" "0")
4522 (match_operand:SI 3 "const_int_ccde1_operand" "i")]
4525 "cx1<cde_suffix>a\\tp%c1, <cde_dest>, %3"
4526 [(set_attr "type" "coproc")]
4529 (define_insn "arm_cx2<mode>"
4530 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4531 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4532 (match_operand:SI 2 "s_register_operand" "r")
4533 (match_operand:SI 3 "const_int_ccde2_operand" "i")]
4536 "cx2<cde_suffix>\\tp%c1, <cde_dest>, %2, %3"
4537 [(set_attr "type" "coproc")]
4540 (define_insn "arm_cx2a<mode>"
4541 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4542 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4543 (match_operand:SIDI 2 "s_register_operand" "0")
4544 (match_operand:SI 3 "s_register_operand" "r")
4545 (match_operand:SI 4 "const_int_ccde2_operand" "i")]
4548 "cx2<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4"
4549 [(set_attr "type" "coproc")]
4552 (define_insn "arm_cx3<mode>"
4553 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4554 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4555 (match_operand:SI 2 "s_register_operand" "r")
4556 (match_operand:SI 3 "s_register_operand" "r")
4557 (match_operand:SI 4 "const_int_ccde3_operand" "i")]
4560 "cx3<cde_suffix>\\tp%c1, <cde_dest>, %2, %3, %4"
4561 [(set_attr "type" "coproc")]
4564 (define_insn "arm_cx3a<mode>"
4565 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4566 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4567 (match_operand:SIDI 2 "s_register_operand" "0")
4568 (match_operand:SI 3 "s_register_operand" "r")
4569 (match_operand:SI 4 "s_register_operand" "r")
4570 (match_operand:SI 5 "const_int_ccde3_operand" "i")]
4573 "cx3<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4, %5"
4574 [(set_attr "type" "coproc")]
4577 ;; Shift and rotation insns
4579 (define_expand "ashldi3"
4580 [(set (match_operand:DI 0 "s_register_operand")
4581 (ashift:DI (match_operand:DI 1 "s_register_operand")
4582 (match_operand:SI 2 "reg_or_int_operand")))]
4585 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN)
4587 if (!reg_or_int_operand (operands[2], SImode))
4588 operands[2] = force_reg (SImode, operands[2]);
4590 /* Armv8.1-M Mainline double shifts are not expanded. */
4591 if (arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2]))
4592 && (REG_P (operands[2]) || INTVAL(operands[2]) != 32))
4594 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4595 emit_insn (gen_movdi (operands[0], operands[1]));
4597 emit_insn (gen_thumb2_lsll (operands[0], operands[2]));
4602 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4603 operands[2], gen_reg_rtx (SImode),
4604 gen_reg_rtx (SImode));
4608 (define_expand "ashlsi3"
4609 [(set (match_operand:SI 0 "s_register_operand")
4610 (ashift:SI (match_operand:SI 1 "s_register_operand")
4611 (match_operand:SI 2 "arm_rhs_operand")))]
4614 if (CONST_INT_P (operands[2])
4615 && (UINTVAL (operands[2])) > 31)
4617 emit_insn (gen_movsi (operands[0], const0_rtx));
4623 (define_expand "ashrdi3"
4624 [(set (match_operand:DI 0 "s_register_operand")
4625 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4626 (match_operand:SI 2 "reg_or_int_operand")))]
4629 /* Armv8.1-M Mainline double shifts are not expanded. */
4630 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4631 && arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2])))
4633 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4634 emit_insn (gen_movdi (operands[0], operands[1]));
4636 emit_insn (gen_thumb2_asrl (operands[0], operands[2]));
4640 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4641 operands[2], gen_reg_rtx (SImode),
4642 gen_reg_rtx (SImode));
4646 (define_expand "ashrsi3"
4647 [(set (match_operand:SI 0 "s_register_operand")
4648 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4649 (match_operand:SI 2 "arm_rhs_operand")))]
4652 if (CONST_INT_P (operands[2])
4653 && UINTVAL (operands[2]) > 31)
4654 operands[2] = GEN_INT (31);
4658 (define_expand "lshrdi3"
4659 [(set (match_operand:DI 0 "s_register_operand")
4660 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4661 (match_operand:SI 2 "reg_or_int_operand")))]
4664 /* Armv8.1-M Mainline double shifts are not expanded. */
4665 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4666 && long_shift_imm (operands[2], GET_MODE (operands[2])))
4668 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4669 emit_insn (gen_movdi (operands[0], operands[1]));
4671 emit_insn (gen_thumb2_lsrl (operands[0], operands[2]));
4675 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4676 operands[2], gen_reg_rtx (SImode),
4677 gen_reg_rtx (SImode));
4681 (define_expand "lshrsi3"
4682 [(set (match_operand:SI 0 "s_register_operand")
4683 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4684 (match_operand:SI 2 "arm_rhs_operand")))]
4687 if (CONST_INT_P (operands[2])
4688 && (UINTVAL (operands[2])) > 31)
4690 emit_insn (gen_movsi (operands[0], const0_rtx));
4696 (define_expand "rotlsi3"
4697 [(set (match_operand:SI 0 "s_register_operand")
4698 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4699 (match_operand:SI 2 "reg_or_int_operand")))]
4702 if (CONST_INT_P (operands[2]))
4703 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4706 rtx reg = gen_reg_rtx (SImode);
4707 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4713 (define_expand "rotrsi3"
4714 [(set (match_operand:SI 0 "s_register_operand")
4715 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4716 (match_operand:SI 2 "arm_rhs_operand")))]
4721 if (CONST_INT_P (operands[2])
4722 && UINTVAL (operands[2]) > 31)
4723 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4725 else /* TARGET_THUMB1 */
4727 if (CONST_INT_P (operands [2]))
4728 operands [2] = force_reg (SImode, operands[2]);
4733 (define_insn "*arm_shiftsi3"
4734 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4735 (match_operator:SI 3 "shift_operator"
4736 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4737 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4739 "* return arm_output_shift(operands, 0);"
4740 [(set_attr "predicable" "yes")
4741 (set_attr "arch" "t2,t2,*,*")
4742 (set_attr "predicable_short_it" "yes,yes,no,no")
4743 (set_attr "length" "4")
4744 (set_attr "shift" "1")
4745 (set_attr "autodetect_type" "alu_shift_operator3")]
4748 (define_insn "*shiftsi3_compare0"
4749 [(set (reg:CC_NZ CC_REGNUM)
4750 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4751 [(match_operand:SI 1 "s_register_operand" "r,r")
4752 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4754 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4755 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4757 "* return arm_output_shift(operands, 1);"
4758 [(set_attr "conds" "set")
4759 (set_attr "shift" "1")
4760 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4763 (define_insn "*shiftsi3_compare0_scratch"
4764 [(set (reg:CC_NZ CC_REGNUM)
4765 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4766 [(match_operand:SI 1 "s_register_operand" "r,r")
4767 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4769 (clobber (match_scratch:SI 0 "=r,r"))]
4771 "* return arm_output_shift(operands, 1);"
4772 [(set_attr "conds" "set")
4773 (set_attr "shift" "1")
4774 (set_attr "type" "shift_imm,shift_reg")]
4777 (define_insn "*not_shiftsi"
4778 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4779 (not:SI (match_operator:SI 3 "shift_operator"
4780 [(match_operand:SI 1 "s_register_operand" "r,r")
4781 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4784 [(set_attr "predicable" "yes")
4785 (set_attr "shift" "1")
4786 (set_attr "arch" "32,a")
4787 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4789 (define_insn "*not_shiftsi_compare0"
4790 [(set (reg:CC_NZ CC_REGNUM)
4792 (not:SI (match_operator:SI 3 "shift_operator"
4793 [(match_operand:SI 1 "s_register_operand" "r,r")
4794 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4796 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4797 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4799 "mvns%?\\t%0, %1%S3"
4800 [(set_attr "conds" "set")
4801 (set_attr "shift" "1")
4802 (set_attr "arch" "32,a")
4803 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4805 (define_insn "*not_shiftsi_compare0_scratch"
4806 [(set (reg:CC_NZ CC_REGNUM)
4808 (not:SI (match_operator:SI 3 "shift_operator"
4809 [(match_operand:SI 1 "s_register_operand" "r,r")
4810 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4812 (clobber (match_scratch:SI 0 "=r,r"))]
4814 "mvns%?\\t%0, %1%S3"
4815 [(set_attr "conds" "set")
4816 (set_attr "shift" "1")
4817 (set_attr "arch" "32,a")
4818 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4820 ;; We don't really have extzv, but defining this using shifts helps
4821 ;; to reduce register pressure later on.
4823 (define_expand "extzv"
4824 [(set (match_operand 0 "s_register_operand")
4825 (zero_extract (match_operand 1 "nonimmediate_operand")
4826 (match_operand 2 "const_int_operand")
4827 (match_operand 3 "const_int_operand")))]
4828 "TARGET_THUMB1 || arm_arch_thumb2"
4831 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4832 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4834 if (arm_arch_thumb2)
4836 HOST_WIDE_INT width = INTVAL (operands[2]);
4837 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4839 if (unaligned_access && MEM_P (operands[1])
4840 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4844 if (BYTES_BIG_ENDIAN)
4845 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4850 base_addr = adjust_address (operands[1], SImode,
4851 bitpos / BITS_PER_UNIT);
4852 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4856 rtx dest = operands[0];
4857 rtx tmp = gen_reg_rtx (SImode);
4859 /* We may get a paradoxical subreg here. Strip it off. */
4860 if (GET_CODE (dest) == SUBREG
4861 && GET_MODE (dest) == SImode
4862 && GET_MODE (SUBREG_REG (dest)) == HImode)
4863 dest = SUBREG_REG (dest);
4865 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4868 base_addr = adjust_address (operands[1], HImode,
4869 bitpos / BITS_PER_UNIT);
4870 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4871 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4875 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4877 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4885 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4888 operands[3] = GEN_INT (rshift);
4892 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4896 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4897 operands[3], gen_reg_rtx (SImode)));
4902 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4904 (define_expand "extzv_t1"
4905 [(set (match_operand:SI 4 "s_register_operand")
4906 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4907 (match_operand:SI 2 "const_int_operand")))
4908 (set (match_operand:SI 0 "s_register_operand")
4909 (lshiftrt:SI (match_dup 4)
4910 (match_operand:SI 3 "const_int_operand")))]
4914 (define_expand "extv"
4915 [(set (match_operand 0 "s_register_operand")
4916 (sign_extract (match_operand 1 "nonimmediate_operand")
4917 (match_operand 2 "const_int_operand")
4918 (match_operand 3 "const_int_operand")))]
4921 HOST_WIDE_INT width = INTVAL (operands[2]);
4922 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4924 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4925 && (bitpos % BITS_PER_UNIT) == 0)
4929 if (BYTES_BIG_ENDIAN)
4930 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4934 base_addr = adjust_address (operands[1], SImode,
4935 bitpos / BITS_PER_UNIT);
4936 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4940 rtx dest = operands[0];
4941 rtx tmp = gen_reg_rtx (SImode);
4943 /* We may get a paradoxical subreg here. Strip it off. */
4944 if (GET_CODE (dest) == SUBREG
4945 && GET_MODE (dest) == SImode
4946 && GET_MODE (SUBREG_REG (dest)) == HImode)
4947 dest = SUBREG_REG (dest);
4949 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4952 base_addr = adjust_address (operands[1], HImode,
4953 bitpos / BITS_PER_UNIT);
4954 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4955 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4960 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4962 else if (GET_MODE (operands[0]) == SImode
4963 && GET_MODE (operands[1]) == SImode)
4965 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4973 ; Helper to expand register forms of extv with the proper modes.
4975 (define_expand "extv_regsi"
4976 [(set (match_operand:SI 0 "s_register_operand")
4977 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4978 (match_operand 2 "const_int_operand")
4979 (match_operand 3 "const_int_operand")))]
4984 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4986 (define_insn "unaligned_loaddi"
4987 [(set (match_operand:DI 0 "s_register_operand" "=r")
4988 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4989 UNSPEC_UNALIGNED_LOAD))]
4990 "TARGET_32BIT && TARGET_LDRD"
4992 return output_move_double (operands, true, NULL);
4994 [(set_attr "length" "8")
4995 (set_attr "type" "load_8")])
4997 (define_insn "unaligned_loadsi"
4998 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4999 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
5000 UNSPEC_UNALIGNED_LOAD))]
5003 ldr\t%0, %1\t@ unaligned
5004 ldr%?\t%0, %1\t@ unaligned
5005 ldr%?\t%0, %1\t@ unaligned"
5006 [(set_attr "arch" "t1,t2,32")
5007 (set_attr "length" "2,2,4")
5008 (set_attr "predicable" "no,yes,yes")
5009 (set_attr "predicable_short_it" "no,yes,no")
5010 (set_attr "type" "load_4")])
5012 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
5013 ;; address (there's no immediate format). That's tricky to support
5014 ;; here and we don't really need this pattern for that case, so only
5015 ;; enable for 32-bit ISAs.
5016 (define_insn "unaligned_loadhis"
5017 [(set (match_operand:SI 0 "s_register_operand" "=r")
5019 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
5020 UNSPEC_UNALIGNED_LOAD)))]
5021 "unaligned_access && TARGET_32BIT"
5022 "ldrsh%?\t%0, %1\t@ unaligned"
5023 [(set_attr "predicable" "yes")
5024 (set_attr "type" "load_byte")])
5026 (define_insn "unaligned_loadhiu"
5027 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
5029 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
5030 UNSPEC_UNALIGNED_LOAD)))]
5033 ldrh\t%0, %1\t@ unaligned
5034 ldrh%?\t%0, %1\t@ unaligned
5035 ldrh%?\t%0, %1\t@ unaligned"
5036 [(set_attr "arch" "t1,t2,32")
5037 (set_attr "length" "2,2,4")
5038 (set_attr "predicable" "no,yes,yes")
5039 (set_attr "predicable_short_it" "no,yes,no")
5040 (set_attr "type" "load_byte")])
5042 (define_insn "unaligned_storedi"
5043 [(set (match_operand:DI 0 "memory_operand" "=m")
5044 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
5045 UNSPEC_UNALIGNED_STORE))]
5046 "TARGET_32BIT && TARGET_LDRD"
5048 return output_move_double (operands, true, NULL);
5050 [(set_attr "length" "8")
5051 (set_attr "type" "store_8")])
5053 (define_insn "unaligned_storesi"
5054 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
5055 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
5056 UNSPEC_UNALIGNED_STORE))]
5059 str\t%1, %0\t@ unaligned
5060 str%?\t%1, %0\t@ unaligned
5061 str%?\t%1, %0\t@ unaligned"
5062 [(set_attr "arch" "t1,t2,32")
5063 (set_attr "length" "2,2,4")
5064 (set_attr "predicable" "no,yes,yes")
5065 (set_attr "predicable_short_it" "no,yes,no")
5066 (set_attr "type" "store_4")])
5068 (define_insn "unaligned_storehi"
5069 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
5070 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
5071 UNSPEC_UNALIGNED_STORE))]
5074 strh\t%1, %0\t@ unaligned
5075 strh%?\t%1, %0\t@ unaligned
5076 strh%?\t%1, %0\t@ unaligned"
5077 [(set_attr "arch" "t1,t2,32")
5078 (set_attr "length" "2,2,4")
5079 (set_attr "predicable" "no,yes,yes")
5080 (set_attr "predicable_short_it" "no,yes,no")
5081 (set_attr "type" "store_4")])
5084 (define_insn "*extv_reg"
5085 [(set (match_operand:SI 0 "s_register_operand" "=r")
5086 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5087 (match_operand:SI 2 "const_int_operand" "n")
5088 (match_operand:SI 3 "const_int_operand" "n")))]
5090 && IN_RANGE (INTVAL (operands[3]), 0, 31)
5091 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5092 "sbfx%?\t%0, %1, %3, %2"
5093 [(set_attr "length" "4")
5094 (set_attr "predicable" "yes")
5095 (set_attr "type" "bfm")]
5098 (define_insn "extzv_t2"
5099 [(set (match_operand:SI 0 "s_register_operand" "=r")
5100 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5101 (match_operand:SI 2 "const_int_operand" "n")
5102 (match_operand:SI 3 "const_int_operand" "n")))]
5104 && IN_RANGE (INTVAL (operands[3]), 0, 31)
5105 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5106 "ubfx%?\t%0, %1, %3, %2"
5107 [(set_attr "length" "4")
5108 (set_attr "predicable" "yes")
5109 (set_attr "type" "bfm")]
5113 ;; Division instructions
5114 (define_insn "divsi3"
5115 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5116 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
5117 (match_operand:SI 2 "s_register_operand" "r,r")))]
5122 [(set_attr "arch" "32,v8mb")
5123 (set_attr "predicable" "yes")
5124 (set_attr "type" "sdiv")]
5127 (define_insn "udivsi3"
5128 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5129 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
5130 (match_operand:SI 2 "s_register_operand" "r,r")))]
5135 [(set_attr "arch" "32,v8mb")
5136 (set_attr "predicable" "yes")
5137 (set_attr "type" "udiv")]
5141 ;; Unary arithmetic insns
5143 (define_expand "negv<SIDI:mode>3"
5144 [(match_operand:SIDI 0 "s_register_operand")
5145 (match_operand:SIDI 1 "s_register_operand")
5146 (match_operand 2 "")]
5149 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
5154 (define_expand "negsi2"
5155 [(set (match_operand:SI 0 "s_register_operand")
5156 (neg:SI (match_operand:SI 1 "s_register_operand")))]
5161 (define_insn "*arm_negsi2"
5162 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5163 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5165 "rsb%?\\t%0, %1, #0"
5166 [(set_attr "predicable" "yes")
5167 (set_attr "predicable_short_it" "yes,no")
5168 (set_attr "arch" "t2,*")
5169 (set_attr "length" "4")
5170 (set_attr "type" "alu_imm")]
5173 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
5174 ;; rather than (0 cmp reg). This gives the same results for unsigned
5175 ;; and equality compares which is what we mostly need here.
5176 (define_insn "negsi2_0compare"
5177 [(set (reg:CC_RSB CC_REGNUM)
5178 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
5180 (set (match_operand:SI 0 "s_register_operand" "=l,r")
5181 (neg:SI (match_dup 1)))]
5186 [(set_attr "conds" "set")
5187 (set_attr "arch" "t2,*")
5188 (set_attr "length" "2,*")
5189 (set_attr "type" "alus_imm")]
5192 (define_insn "negsi2_carryin"
5193 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5194 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
5195 (match_operand:SI 2 "arm_borrow_operation" "")))]
5199 sbc\\t%0, %1, %1, lsl #1"
5200 [(set_attr "conds" "use")
5201 (set_attr "arch" "a,t2")
5202 (set_attr "type" "adc_imm,adc_reg")]
5205 (define_expand "negsf2"
5206 [(set (match_operand:SF 0 "s_register_operand")
5207 (neg:SF (match_operand:SF 1 "s_register_operand")))]
5208 "TARGET_32BIT && TARGET_HARD_FLOAT"
5212 (define_expand "negdf2"
5213 [(set (match_operand:DF 0 "s_register_operand")
5214 (neg:DF (match_operand:DF 1 "s_register_operand")))]
5215 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5218 ;; abssi2 doesn't really clobber the condition codes if a different register
5219 ;; is being set. To keep things simple, assume during rtl manipulations that
5220 ;; it does, but tell the final scan operator the truth. Similarly for
5223 (define_expand "abssi2"
5225 [(set (match_operand:SI 0 "s_register_operand")
5226 (abs:SI (match_operand:SI 1 "s_register_operand")))
5227 (clobber (match_dup 2))])]
5231 operands[2] = gen_rtx_SCRATCH (SImode);
5233 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5236 (define_insn_and_split "*arm_abssi2"
5237 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5238 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5239 (clobber (reg:CC CC_REGNUM))]
5242 "&& reload_completed"
5245 /* if (which_alternative == 0) */
5246 if (REGNO(operands[0]) == REGNO(operands[1]))
5248 /* Emit the pattern:
5249 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5250 [(set (reg:CC CC_REGNUM)
5251 (compare:CC (match_dup 0) (const_int 0)))
5252 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5253 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5255 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5256 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5257 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5258 (gen_rtx_LT (SImode,
5259 gen_rtx_REG (CCmode, CC_REGNUM),
5261 (gen_rtx_SET (operands[0],
5262 (gen_rtx_MINUS (SImode,
5269 /* Emit the pattern:
5270 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5272 (xor:SI (match_dup 1)
5273 (ashiftrt:SI (match_dup 1) (const_int 31))))
5275 (minus:SI (match_dup 0)
5276 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5278 emit_insn (gen_rtx_SET (operands[0],
5279 gen_rtx_XOR (SImode,
5280 gen_rtx_ASHIFTRT (SImode,
5284 emit_insn (gen_rtx_SET (operands[0],
5285 gen_rtx_MINUS (SImode,
5287 gen_rtx_ASHIFTRT (SImode,
5293 [(set_attr "conds" "clob,*")
5294 (set_attr "shift" "1")
5295 (set_attr "predicable" "no, yes")
5296 (set_attr "length" "8")
5297 (set_attr "type" "multiple")]
5300 (define_insn_and_split "*arm_neg_abssi2"
5301 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5302 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5303 (clobber (reg:CC CC_REGNUM))]
5306 "&& reload_completed"
5309 /* if (which_alternative == 0) */
5310 if (REGNO (operands[0]) == REGNO (operands[1]))
5312 /* Emit the pattern:
5313 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5315 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5316 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5317 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5319 gen_rtx_REG (CCmode, CC_REGNUM),
5321 gen_rtx_SET (operands[0],
5322 (gen_rtx_MINUS (SImode,
5328 /* Emit the pattern:
5329 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5331 emit_insn (gen_rtx_SET (operands[0],
5332 gen_rtx_XOR (SImode,
5333 gen_rtx_ASHIFTRT (SImode,
5337 emit_insn (gen_rtx_SET (operands[0],
5338 gen_rtx_MINUS (SImode,
5339 gen_rtx_ASHIFTRT (SImode,
5346 [(set_attr "conds" "clob,*")
5347 (set_attr "shift" "1")
5348 (set_attr "predicable" "no, yes")
5349 (set_attr "length" "8")
5350 (set_attr "type" "multiple")]
5353 (define_expand "abssf2"
5354 [(set (match_operand:SF 0 "s_register_operand")
5355 (abs:SF (match_operand:SF 1 "s_register_operand")))]
5356 "TARGET_32BIT && TARGET_HARD_FLOAT"
5359 (define_expand "absdf2"
5360 [(set (match_operand:DF 0 "s_register_operand")
5361 (abs:DF (match_operand:DF 1 "s_register_operand")))]
5362 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5365 (define_expand "sqrtsf2"
5366 [(set (match_operand:SF 0 "s_register_operand")
5367 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
5368 "TARGET_32BIT && TARGET_HARD_FLOAT"
5371 (define_expand "sqrtdf2"
5372 [(set (match_operand:DF 0 "s_register_operand")
5373 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
5374 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5377 (define_expand "one_cmplsi2"
5378 [(set (match_operand:SI 0 "s_register_operand")
5379 (not:SI (match_operand:SI 1 "s_register_operand")))]
5384 (define_insn "*arm_one_cmplsi2"
5385 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5386 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5389 [(set_attr "predicable" "yes")
5390 (set_attr "predicable_short_it" "yes,no")
5391 (set_attr "arch" "t2,*")
5392 (set_attr "length" "4")
5393 (set_attr "type" "mvn_reg")]
5396 (define_insn "*notsi_compare0"
5397 [(set (reg:CC_NZ CC_REGNUM)
5398 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5400 (set (match_operand:SI 0 "s_register_operand" "=r")
5401 (not:SI (match_dup 1)))]
5404 [(set_attr "conds" "set")
5405 (set_attr "type" "mvn_reg")]
5408 (define_insn "*notsi_compare0_scratch"
5409 [(set (reg:CC_NZ CC_REGNUM)
5410 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5412 (clobber (match_scratch:SI 0 "=r"))]
5415 [(set_attr "conds" "set")
5416 (set_attr "type" "mvn_reg")]
5419 ;; Fixed <--> Floating conversion insns
5421 (define_expand "floatsihf2"
5422 [(set (match_operand:HF 0 "general_operand")
5423 (float:HF (match_operand:SI 1 "general_operand")))]
5427 rtx op1 = gen_reg_rtx (SFmode);
5428 expand_float (op1, operands[1], 0);
5429 op1 = convert_to_mode (HFmode, op1, 0);
5430 emit_move_insn (operands[0], op1);
5435 (define_expand "floatdihf2"
5436 [(set (match_operand:HF 0 "general_operand")
5437 (float:HF (match_operand:DI 1 "general_operand")))]
5441 rtx op1 = gen_reg_rtx (SFmode);
5442 expand_float (op1, operands[1], 0);
5443 op1 = convert_to_mode (HFmode, op1, 0);
5444 emit_move_insn (operands[0], op1);
5449 (define_expand "floatsisf2"
5450 [(set (match_operand:SF 0 "s_register_operand")
5451 (float:SF (match_operand:SI 1 "s_register_operand")))]
5452 "TARGET_32BIT && TARGET_HARD_FLOAT"
5456 (define_expand "floatsidf2"
5457 [(set (match_operand:DF 0 "s_register_operand")
5458 (float:DF (match_operand:SI 1 "s_register_operand")))]
5459 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5463 (define_expand "fix_trunchfsi2"
5464 [(set (match_operand:SI 0 "general_operand")
5465 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
5469 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5470 expand_fix (operands[0], op1, 0);
5475 (define_expand "fix_trunchfdi2"
5476 [(set (match_operand:DI 0 "general_operand")
5477 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
5481 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5482 expand_fix (operands[0], op1, 0);
5487 (define_expand "fix_truncsfsi2"
5488 [(set (match_operand:SI 0 "s_register_operand")
5489 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
5490 "TARGET_32BIT && TARGET_HARD_FLOAT"
5494 (define_expand "fix_truncdfsi2"
5495 [(set (match_operand:SI 0 "s_register_operand")
5496 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
5497 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5503 (define_expand "truncdfsf2"
5504 [(set (match_operand:SF 0 "s_register_operand")
5506 (match_operand:DF 1 "s_register_operand")))]
5507 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5511 ;; DFmode to HFmode conversions on targets without a single-step hardware
5512 ;; instruction for it would have to go through SFmode. This is dangerous
5513 ;; as it introduces double rounding.
5515 ;; Disable this pattern unless we are in an unsafe math mode, or we have
5516 ;; a single-step instruction.
5518 (define_expand "truncdfhf2"
5519 [(set (match_operand:HF 0 "s_register_operand")
5521 (match_operand:DF 1 "s_register_operand")))]
5522 "(TARGET_EITHER && flag_unsafe_math_optimizations)
5523 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
5525 /* We don't have a direct instruction for this, so we must be in
5526 an unsafe math mode, and going via SFmode. */
5528 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5531 op1 = convert_to_mode (SFmode, operands[1], 0);
5532 op1 = convert_to_mode (HFmode, op1, 0);
5533 emit_move_insn (operands[0], op1);
5536 /* Otherwise, we will pick this up as a single instruction with
5537 no intermediary rounding. */
5541 ;; Zero and sign extension instructions.
5543 (define_expand "zero_extend<mode>di2"
5544 [(set (match_operand:DI 0 "s_register_operand" "")
5545 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5546 "TARGET_32BIT <qhs_zextenddi_cond>"
5548 rtx res_lo, res_hi, op0_lo, op0_hi;
5549 res_lo = gen_lowpart (SImode, operands[0]);
5550 res_hi = gen_highpart (SImode, operands[0]);
5551 if (can_create_pseudo_p ())
5553 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5554 op0_hi = gen_reg_rtx (SImode);
5558 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5561 if (<MODE>mode != SImode)
5562 emit_insn (gen_rtx_SET (op0_lo,
5563 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5564 emit_insn (gen_movsi (op0_hi, const0_rtx));
5565 if (res_lo != op0_lo)
5566 emit_move_insn (res_lo, op0_lo);
5567 if (res_hi != op0_hi)
5568 emit_move_insn (res_hi, op0_hi);
5573 (define_expand "extend<mode>di2"
5574 [(set (match_operand:DI 0 "s_register_operand" "")
5575 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5576 "TARGET_32BIT <qhs_sextenddi_cond>"
5578 rtx res_lo, res_hi, op0_lo, op0_hi;
5579 res_lo = gen_lowpart (SImode, operands[0]);
5580 res_hi = gen_highpart (SImode, operands[0]);
5581 if (can_create_pseudo_p ())
5583 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5584 op0_hi = gen_reg_rtx (SImode);
5588 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5591 if (<MODE>mode != SImode)
5592 emit_insn (gen_rtx_SET (op0_lo,
5593 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5594 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5595 if (res_lo != op0_lo)
5596 emit_move_insn (res_lo, op0_lo);
5597 if (res_hi != op0_hi)
5598 emit_move_insn (res_hi, op0_hi);
5603 ;; Splits for all extensions to DImode
5605 [(set (match_operand:DI 0 "s_register_operand" "")
5606 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5608 [(set (match_dup 0) (match_dup 1))]
5610 rtx lo_part = gen_lowpart (SImode, operands[0]);
5611 machine_mode src_mode = GET_MODE (operands[1]);
5613 if (src_mode == SImode)
5614 emit_move_insn (lo_part, operands[1]);
5616 emit_insn (gen_rtx_SET (lo_part,
5617 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5618 operands[0] = gen_highpart (SImode, operands[0]);
5619 operands[1] = const0_rtx;
5623 [(set (match_operand:DI 0 "s_register_operand" "")
5624 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5626 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5628 rtx lo_part = gen_lowpart (SImode, operands[0]);
5629 machine_mode src_mode = GET_MODE (operands[1]);
5631 if (src_mode == SImode)
5632 emit_move_insn (lo_part, operands[1]);
5634 emit_insn (gen_rtx_SET (lo_part,
5635 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5636 operands[1] = lo_part;
5637 operands[0] = gen_highpart (SImode, operands[0]);
5640 (define_expand "zero_extendhisi2"
5641 [(set (match_operand:SI 0 "s_register_operand")
5642 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5645 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5647 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5650 if (!arm_arch6 && !MEM_P (operands[1]))
5652 rtx t = gen_lowpart (SImode, operands[1]);
5653 rtx tmp = gen_reg_rtx (SImode);
5654 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5655 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5661 [(set (match_operand:SI 0 "s_register_operand" "")
5662 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5663 "!TARGET_THUMB2 && !arm_arch6"
5664 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5665 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5667 operands[2] = gen_lowpart (SImode, operands[1]);
5670 (define_insn "*arm_zero_extendhisi2"
5671 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5672 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5673 "TARGET_ARM && arm_arch4 && !arm_arch6"
5677 [(set_attr "type" "alu_shift_reg,load_byte")
5678 (set_attr "predicable" "yes")]
5681 (define_insn "*arm_zero_extendhisi2_v6"
5682 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5683 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5684 "TARGET_ARM && arm_arch6"
5688 [(set_attr "predicable" "yes")
5689 (set_attr "type" "extend,load_byte")]
5692 (define_insn "*arm_zero_extendhisi2addsi"
5693 [(set (match_operand:SI 0 "s_register_operand" "=r")
5694 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5695 (match_operand:SI 2 "s_register_operand" "r")))]
5697 "uxtah%?\\t%0, %2, %1"
5698 [(set_attr "type" "alu_shift_reg")
5699 (set_attr "predicable" "yes")]
5702 (define_expand "zero_extendqisi2"
5703 [(set (match_operand:SI 0 "s_register_operand")
5704 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5707 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5709 emit_insn (gen_andsi3 (operands[0],
5710 gen_lowpart (SImode, operands[1]),
5714 if (!arm_arch6 && !MEM_P (operands[1]))
5716 rtx t = gen_lowpart (SImode, operands[1]);
5717 rtx tmp = gen_reg_rtx (SImode);
5718 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5719 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5725 [(set (match_operand:SI 0 "s_register_operand" "")
5726 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5728 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5729 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5731 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5734 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5739 (define_insn "*arm_zero_extendqisi2"
5740 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5741 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5742 "TARGET_ARM && !arm_arch6"
5745 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5746 [(set_attr "length" "8,4")
5747 (set_attr "type" "alu_shift_reg,load_byte")
5748 (set_attr "predicable" "yes")]
5751 (define_insn "*arm_zero_extendqisi2_v6"
5752 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5753 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5754 "TARGET_ARM && arm_arch6"
5757 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5758 [(set_attr "type" "extend,load_byte")
5759 (set_attr "predicable" "yes")]
5762 (define_insn "*arm_zero_extendqisi2addsi"
5763 [(set (match_operand:SI 0 "s_register_operand" "=r")
5764 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5765 (match_operand:SI 2 "s_register_operand" "r")))]
5767 "uxtab%?\\t%0, %2, %1"
5768 [(set_attr "predicable" "yes")
5769 (set_attr "type" "alu_shift_reg")]
5773 [(set (match_operand:SI 0 "s_register_operand" "")
5774 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5775 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5776 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5777 [(set (match_dup 2) (match_dup 1))
5778 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5783 [(set (match_operand:SI 0 "s_register_operand" "")
5784 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5785 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5786 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5787 [(set (match_dup 2) (match_dup 1))
5788 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5794 [(set (match_operand:SI 0 "s_register_operand" "")
5795 (IOR_XOR:SI (and:SI (ashift:SI
5796 (match_operand:SI 1 "s_register_operand" "")
5797 (match_operand:SI 2 "const_int_operand" ""))
5798 (match_operand:SI 3 "const_int_operand" ""))
5800 (match_operator 5 "subreg_lowpart_operator"
5801 [(match_operand:SI 4 "s_register_operand" "")]))))]
5803 && (UINTVAL (operands[3])
5804 == (GET_MODE_MASK (GET_MODE (operands[5]))
5805 & (GET_MODE_MASK (GET_MODE (operands[5]))
5806 << (INTVAL (operands[2])))))"
5807 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5809 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5810 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5813 (define_insn "*compareqi_eq0"
5814 [(set (reg:CC_Z CC_REGNUM)
5815 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5819 [(set_attr "conds" "set")
5820 (set_attr "predicable" "yes")
5821 (set_attr "type" "logic_imm")]
5824 (define_expand "extendhisi2"
5825 [(set (match_operand:SI 0 "s_register_operand")
5826 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5831 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5834 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5836 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5840 if (!arm_arch6 && !MEM_P (operands[1]))
5842 rtx t = gen_lowpart (SImode, operands[1]);
5843 rtx tmp = gen_reg_rtx (SImode);
5844 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5845 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5852 [(set (match_operand:SI 0 "register_operand" "")
5853 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5854 (clobber (match_scratch:SI 2 ""))])]
5856 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5857 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5859 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5862 ;; This pattern will only be used when ldsh is not available
5863 (define_expand "extendhisi2_mem"
5864 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5866 (zero_extend:SI (match_dup 7)))
5867 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5868 (set (match_operand:SI 0 "" "")
5869 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5874 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5876 mem1 = change_address (operands[1], QImode, addr);
5877 mem2 = change_address (operands[1], QImode,
5878 plus_constant (Pmode, addr, 1));
5879 operands[0] = gen_lowpart (SImode, operands[0]);
5881 operands[2] = gen_reg_rtx (SImode);
5882 operands[3] = gen_reg_rtx (SImode);
5883 operands[6] = gen_reg_rtx (SImode);
5886 if (BYTES_BIG_ENDIAN)
5888 operands[4] = operands[2];
5889 operands[5] = operands[3];
5893 operands[4] = operands[3];
5894 operands[5] = operands[2];
5900 [(set (match_operand:SI 0 "register_operand" "")
5901 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5903 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5904 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5906 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5909 (define_insn "*arm_extendhisi2"
5910 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5911 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5912 "TARGET_ARM && arm_arch4 && !arm_arch6"
5916 [(set_attr "length" "8,4")
5917 (set_attr "type" "alu_shift_reg,load_byte")
5918 (set_attr "predicable" "yes")]
5921 ;; ??? Check Thumb-2 pool range
5922 (define_insn "*arm_extendhisi2_v6"
5923 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5924 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5925 "TARGET_32BIT && arm_arch6"
5929 [(set_attr "type" "extend,load_byte")
5930 (set_attr "predicable" "yes")]
5933 (define_insn "*arm_extendhisi2addsi"
5934 [(set (match_operand:SI 0 "s_register_operand" "=r")
5935 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5936 (match_operand:SI 2 "s_register_operand" "r")))]
5938 "sxtah%?\\t%0, %2, %1"
5939 [(set_attr "type" "alu_shift_reg")]
5942 (define_expand "extendqihi2"
5944 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5946 (set (match_operand:HI 0 "s_register_operand")
5947 (ashiftrt:SI (match_dup 2)
5952 if (arm_arch4 && MEM_P (operands[1]))
5954 emit_insn (gen_rtx_SET (operands[0],
5955 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5958 if (!s_register_operand (operands[1], QImode))
5959 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5960 operands[0] = gen_lowpart (SImode, operands[0]);
5961 operands[1] = gen_lowpart (SImode, operands[1]);
5962 operands[2] = gen_reg_rtx (SImode);
5966 (define_insn "*arm_extendqihi_insn"
5967 [(set (match_operand:HI 0 "s_register_operand" "=r")
5968 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5969 "TARGET_ARM && arm_arch4"
5971 [(set_attr "type" "load_byte")
5972 (set_attr "predicable" "yes")]
5975 (define_expand "extendqisi2"
5976 [(set (match_operand:SI 0 "s_register_operand")
5977 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5980 if (!arm_arch4 && MEM_P (operands[1]))
5981 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5983 if (!arm_arch6 && !MEM_P (operands[1]))
5985 rtx t = gen_lowpart (SImode, operands[1]);
5986 rtx tmp = gen_reg_rtx (SImode);
5987 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5988 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5994 [(set (match_operand:SI 0 "register_operand" "")
5995 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5997 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5998 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
6000 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
6003 (define_insn "*arm_extendqisi"
6004 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
6005 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
6006 "TARGET_ARM && arm_arch4 && !arm_arch6"
6010 [(set_attr "length" "8,4")
6011 (set_attr "type" "alu_shift_reg,load_byte")
6012 (set_attr "predicable" "yes")]
6015 (define_insn "*arm_extendqisi_v6"
6016 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
6018 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
6019 "TARGET_ARM && arm_arch6"
6023 [(set_attr "type" "extend,load_byte")
6024 (set_attr "predicable" "yes")]
6027 (define_insn "*arm_extendqisi2addsi"
6028 [(set (match_operand:SI 0 "s_register_operand" "=r")
6029 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
6030 (match_operand:SI 2 "s_register_operand" "r")))]
6032 "sxtab%?\\t%0, %2, %1"
6033 [(set_attr "type" "alu_shift_reg")
6034 (set_attr "predicable" "yes")]
6037 (define_insn "arm_<sup>xtb16"
6038 [(set (match_operand:SI 0 "s_register_operand" "=r")
6040 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
6042 "<sup>xtb16%?\\t%0, %1"
6043 [(set_attr "predicable" "yes")
6044 (set_attr "type" "alu_dsp_reg")])
6046 (define_insn "arm_<simd32_op>"
6047 [(set (match_operand:SI 0 "s_register_operand" "=r")
6049 [(match_operand:SI 1 "s_register_operand" "r")
6050 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
6052 "<simd32_op>%?\\t%0, %1, %2"
6053 [(set_attr "predicable" "yes")
6054 (set_attr "type" "alu_dsp_reg")])
6056 (define_insn "arm_usada8"
6057 [(set (match_operand:SI 0 "s_register_operand" "=r")
6059 [(match_operand:SI 1 "s_register_operand" "r")
6060 (match_operand:SI 2 "s_register_operand" "r")
6061 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
6063 "usada8%?\\t%0, %1, %2, %3"
6064 [(set_attr "predicable" "yes")
6065 (set_attr "type" "alu_dsp_reg")])
6067 (define_insn "arm_<simd32_op>"
6068 [(set (match_operand:DI 0 "s_register_operand" "=r")
6070 [(match_operand:SI 1 "s_register_operand" "r")
6071 (match_operand:SI 2 "s_register_operand" "r")
6072 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
6074 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
6075 [(set_attr "predicable" "yes")
6076 (set_attr "type" "smlald")])
6078 (define_insn "arm_<simd32_op>"
6079 [(set (match_operand:SI 0 "s_register_operand" "=r")
6081 [(match_operand:SI 1 "s_register_operand" "r")
6082 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
6083 (set (reg:CC APSRGE_REGNUM)
6084 (unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
6086 "<simd32_op>%?\\t%0, %1, %2"
6087 [(set_attr "predicable" "yes")
6088 (set_attr "type" "alu_sreg")])
6090 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6091 [(set (match_operand:SI 0 "s_register_operand" "=r")
6093 [(match_operand:SI 1 "s_register_operand" "r")
6094 (match_operand:SI 2 "s_register_operand" "r")
6095 (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
6096 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6097 "<simd32_op>%?\\t%0, %1, %2, %3"
6098 [(set_attr "predicable" "yes")
6099 (set_attr "type" "alu_sreg")])
6101 (define_expand "arm_<simd32_op>"
6102 [(set (match_operand:SI 0 "s_register_operand")
6104 [(match_operand:SI 1 "s_register_operand")
6105 (match_operand:SI 2 "s_register_operand")
6106 (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
6110 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6111 operands[2], operands[3]));
6113 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6114 operands[2], operands[3]));
6119 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6120 [(set (match_operand:SI 0 "s_register_operand" "=r")
6122 [(match_operand:SI 1 "s_register_operand" "r")
6123 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
6124 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6125 "<simd32_op>%?\\t%0, %1, %2"
6126 [(set_attr "predicable" "yes")
6127 (set_attr "type" "alu_sreg")])
6129 (define_expand "arm_<simd32_op>"
6130 [(set (match_operand:SI 0 "s_register_operand")
6132 [(match_operand:SI 1 "s_register_operand")
6133 (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
6137 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6140 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6146 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6147 [(set (match_operand:SI 0 "s_register_operand" "=r")
6149 [(match_operand:SI 1 "s_register_operand" "r")
6150 (match_operand:SI 2 "<sup>sat16_imm" "i")] USSAT16))]
6151 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6152 "<simd32_op>%?\\t%0, %2, %1"
6153 [(set_attr "predicable" "yes")
6154 (set_attr "type" "alu_sreg")])
6156 (define_expand "arm_<simd32_op>"
6157 [(set (match_operand:SI 0 "s_register_operand")
6159 [(match_operand:SI 1 "s_register_operand")
6160 (match_operand:SI 2 "<sup>sat16_imm")] USSAT16))]
6164 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6167 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6173 (define_insn "arm_sel"
6174 [(set (match_operand:SI 0 "s_register_operand" "=r")
6176 [(match_operand:SI 1 "s_register_operand" "r")
6177 (match_operand:SI 2 "s_register_operand" "r")
6178 (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
6180 "sel%?\\t%0, %1, %2"
6181 [(set_attr "predicable" "yes")
6182 (set_attr "type" "alu_sreg")])
6184 (define_expand "extendsfdf2"
6185 [(set (match_operand:DF 0 "s_register_operand")
6186 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
6187 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6191 ;; HFmode -> DFmode conversions where we don't have an instruction for it
6192 ;; must go through SFmode.
6194 ;; This is always safe for an extend.
6196 (define_expand "extendhfdf2"
6197 [(set (match_operand:DF 0 "s_register_operand")
6198 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
6201 /* We don't have a direct instruction for this, so go via SFmode. */
6202 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
6205 op1 = convert_to_mode (SFmode, operands[1], 0);
6206 op1 = convert_to_mode (DFmode, op1, 0);
6207 emit_insn (gen_movdf (operands[0], op1));
6210 /* Otherwise, we're done producing RTL and will pick up the correct
6211 pattern to do this with one rounding-step in a single instruction. */
6215 ;; Move insns (including loads and stores)
6217 ;; XXX Just some ideas about movti.
6218 ;; I don't think these are a good idea on the arm, there just aren't enough
6220 ;;(define_expand "loadti"
6221 ;; [(set (match_operand:TI 0 "s_register_operand")
6222 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
6225 ;;(define_expand "storeti"
6226 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
6227 ;; (match_operand:TI 1 "s_register_operand"))]
6230 ;;(define_expand "movti"
6231 ;; [(set (match_operand:TI 0 "general_operand")
6232 ;; (match_operand:TI 1 "general_operand"))]
6238 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
6239 ;; operands[1] = copy_to_reg (operands[1]);
6240 ;; if (MEM_P (operands[0]))
6241 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6242 ;; else if (MEM_P (operands[1]))
6243 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6247 ;; emit_insn (insn);
6251 ;; Recognize garbage generated above.
6254 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6255 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6259 ;; register mem = (which_alternative < 3);
6260 ;; register const char *template;
6262 ;; operands[mem] = XEXP (operands[mem], 0);
6263 ;; switch (which_alternative)
6265 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6266 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6267 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6268 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6269 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6270 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6272 ;; output_asm_insn (template, operands);
6276 (define_expand "movdi"
6277 [(set (match_operand:DI 0 "general_operand")
6278 (match_operand:DI 1 "general_operand"))]
6281 gcc_checking_assert (aligned_operand (operands[0], DImode));
6282 gcc_checking_assert (aligned_operand (operands[1], DImode));
6283 if (can_create_pseudo_p ())
6285 if (!REG_P (operands[0]))
6286 operands[1] = force_reg (DImode, operands[1]);
6288 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
6289 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
6291 /* Avoid LDRD's into an odd-numbered register pair in ARM state
6292 when expanding function calls. */
6293 gcc_assert (can_create_pseudo_p ());
6294 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
6296 /* Perform load into legal reg pair first, then move. */
6297 rtx reg = gen_reg_rtx (DImode);
6298 emit_insn (gen_movdi (reg, operands[1]));
6301 emit_move_insn (gen_lowpart (SImode, operands[0]),
6302 gen_lowpart (SImode, operands[1]));
6303 emit_move_insn (gen_highpart (SImode, operands[0]),
6304 gen_highpart (SImode, operands[1]));
6307 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
6308 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
6310 /* Avoid STRD's from an odd-numbered register pair in ARM state
6311 when expanding function prologue. */
6312 gcc_assert (can_create_pseudo_p ());
6313 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
6314 ? gen_reg_rtx (DImode)
6316 emit_move_insn (gen_lowpart (SImode, split_dest),
6317 gen_lowpart (SImode, operands[1]));
6318 emit_move_insn (gen_highpart (SImode, split_dest),
6319 gen_highpart (SImode, operands[1]));
6320 if (split_dest != operands[0])
6321 emit_insn (gen_movdi (operands[0], split_dest));
6327 (define_insn "*arm_movdi"
6328 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
6329 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
6331 && !(TARGET_HARD_FLOAT)
6332 && !(TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT)
6334 && ( register_operand (operands[0], DImode)
6335 || register_operand (operands[1], DImode))"
6337 switch (which_alternative)
6344 /* Cannot load it directly, split to load it via MOV / MOVT. */
6345 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6349 return output_move_double (operands, true, NULL);
6352 [(set_attr "length" "8,12,16,8,8")
6353 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6354 (set_attr "arm_pool_range" "*,*,*,1020,*")
6355 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6356 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6357 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6361 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6362 (match_operand:ANY64 1 "immediate_operand" ""))]
6365 && (arm_disable_literal_pool
6366 || (arm_const_double_inline_cost (operands[1])
6367 <= arm_max_const_double_inline_cost ()))"
6370 arm_split_constant (SET, SImode, curr_insn,
6371 INTVAL (gen_lowpart (SImode, operands[1])),
6372 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6373 arm_split_constant (SET, SImode, curr_insn,
6374 INTVAL (gen_highpart_mode (SImode,
6375 GET_MODE (operands[0]),
6377 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6382 ; If optimizing for size, or if we have load delay slots, then
6383 ; we want to split the constant into two separate operations.
6384 ; In both cases this may split a trivial part into a single data op
6385 ; leaving a single complex constant to load. We can also get longer
6386 ; offsets in a LDR which means we get better chances of sharing the pool
6387 ; entries. Finally, we can normally do a better job of scheduling
6388 ; LDR instructions than we can with LDM.
6389 ; This pattern will only match if the one above did not.
6391 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6392 (match_operand:ANY64 1 "const_double_operand" ""))]
6393 "TARGET_ARM && reload_completed
6394 && arm_const_double_by_parts (operands[1])"
6395 [(set (match_dup 0) (match_dup 1))
6396 (set (match_dup 2) (match_dup 3))]
6398 operands[2] = gen_highpart (SImode, operands[0]);
6399 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6401 operands[0] = gen_lowpart (SImode, operands[0]);
6402 operands[1] = gen_lowpart (SImode, operands[1]);
6407 [(set (match_operand:ANY64_BF 0 "arm_general_register_operand" "")
6408 (match_operand:ANY64_BF 1 "arm_general_register_operand" ""))]
6409 "TARGET_EITHER && reload_completed"
6410 [(set (match_dup 0) (match_dup 1))
6411 (set (match_dup 2) (match_dup 3))]
6413 operands[2] = gen_highpart (SImode, operands[0]);
6414 operands[3] = gen_highpart (SImode, operands[1]);
6415 operands[0] = gen_lowpart (SImode, operands[0]);
6416 operands[1] = gen_lowpart (SImode, operands[1]);
6418 /* Handle a partial overlap. */
6419 if (rtx_equal_p (operands[0], operands[3]))
6421 rtx tmp0 = operands[0];
6422 rtx tmp1 = operands[1];
6424 operands[0] = operands[2];
6425 operands[1] = operands[3];
6432 ;; We can't actually do base+index doubleword loads if the index and
6433 ;; destination overlap. Split here so that we at least have chance to
6436 [(set (match_operand:DI 0 "s_register_operand" "")
6437 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6438 (match_operand:SI 2 "s_register_operand" ""))))]
6440 && reg_overlap_mentioned_p (operands[0], operands[1])
6441 && reg_overlap_mentioned_p (operands[0], operands[2])"
6443 (plus:SI (match_dup 1)
6446 (mem:DI (match_dup 4)))]
6448 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6452 (define_expand "movsi"
6453 [(set (match_operand:SI 0 "general_operand")
6454 (match_operand:SI 1 "general_operand"))]
6458 rtx base, offset, tmp;
6460 gcc_checking_assert (aligned_operand (operands[0], SImode));
6461 gcc_checking_assert (aligned_operand (operands[1], SImode));
6462 if (TARGET_32BIT || TARGET_HAVE_MOVT)
6464 /* Everything except mem = const or mem = mem can be done easily. */
6465 if (MEM_P (operands[0]))
6466 operands[1] = force_reg (SImode, operands[1]);
6467 if (arm_general_register_operand (operands[0], SImode)
6468 && CONST_INT_P (operands[1])
6469 && !(const_ok_for_arm (INTVAL (operands[1]))
6470 || const_ok_for_arm (~INTVAL (operands[1]))))
6472 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
6474 emit_insn (gen_rtx_SET (operands[0], operands[1]));
6479 arm_split_constant (SET, SImode, NULL_RTX,
6480 INTVAL (operands[1]), operands[0], NULL_RTX,
6481 optimize && can_create_pseudo_p ());
6486 else /* Target doesn't have MOVT... */
6488 if (can_create_pseudo_p ())
6490 if (!REG_P (operands[0]))
6491 operands[1] = force_reg (SImode, operands[1]);
6495 split_const (operands[1], &base, &offset);
6496 if (INTVAL (offset) != 0
6497 && targetm.cannot_force_const_mem (SImode, operands[1]))
6499 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6500 emit_move_insn (tmp, base);
6501 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6505 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
6507 /* Recognize the case where operand[1] is a reference to thread-local
6508 data and load its address to a register. Offsets have been split off
6510 if (arm_tls_referenced_p (operands[1]))
6511 operands[1] = legitimize_tls_address (operands[1], tmp);
6513 && (CONSTANT_P (operands[1])
6514 || symbol_mentioned_p (operands[1])
6515 || label_mentioned_p (operands[1])))
6517 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
6522 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6523 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6524 ;; so this does not matter.
6525 (define_insn "*arm_movt"
6526 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
6527 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
6528 (match_operand:SI 2 "general_operand" "i,i")))]
6529 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
6531 movt%?\t%0, #:upper16:%c2
6532 movt\t%0, #:upper16:%c2"
6533 [(set_attr "arch" "32,v8mb")
6534 (set_attr "predicable" "yes")
6535 (set_attr "length" "4")
6536 (set_attr "type" "alu_sreg")]
6539 (define_insn "*arm_movsi_insn"
6540 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6541 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6542 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
6543 && ( register_operand (operands[0], SImode)
6544 || register_operand (operands[1], SImode))"
6552 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
6553 (set_attr "predicable" "yes")
6554 (set_attr "arch" "*,*,*,v6t2,*,*")
6555 (set_attr "pool_range" "*,*,*,*,4096,*")
6556 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6560 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6561 (match_operand:SI 1 "const_int_operand" ""))]
6562 "(TARGET_32BIT || TARGET_HAVE_MOVT)
6563 && (!(const_ok_for_arm (INTVAL (operands[1]))
6564 || const_ok_for_arm (~INTVAL (operands[1]))))"
6565 [(clobber (const_int 0))]
6567 arm_split_constant (SET, SImode, NULL_RTX,
6568 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6573 ;; A normal way to do (symbol + offset) requires three instructions at least
6574 ;; (depends on how big the offset is) as below:
6575 ;; movw r0, #:lower16:g
6576 ;; movw r0, #:upper16:g
6579 ;; A better way would be:
6580 ;; movw r0, #:lower16:g+4
6581 ;; movw r0, #:upper16:g+4
6583 ;; The limitation of this way is that the length of offset should be a 16-bit
6584 ;; signed value, because current assembler only supports REL type relocation for
6585 ;; such case. If the more powerful RELA type is supported in future, we should
6586 ;; update this pattern to go with better way.
6588 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6589 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
6590 (match_operand:SI 2 "const_int_operand" ""))))]
6593 && arm_disable_literal_pool
6595 && GET_CODE (operands[1]) == SYMBOL_REF"
6596 [(clobber (const_int 0))]
6598 int offset = INTVAL (operands[2]);
6600 if (offset < -0x8000 || offset > 0x7fff)
6602 arm_emit_movpair (operands[0], operands[1]);
6603 emit_insn (gen_rtx_SET (operands[0],
6604 gen_rtx_PLUS (SImode, operands[0], operands[2])));
6608 rtx op = gen_rtx_CONST (SImode,
6609 gen_rtx_PLUS (SImode, operands[1], operands[2]));
6610 arm_emit_movpair (operands[0], op);
6615 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6616 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6617 ;; and lo_sum would be merged back into memory load at cprop. However,
6618 ;; if the default is to prefer movt/movw rather than a load from the constant
6619 ;; pool, the performance is better.
6621 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6622 (match_operand:SI 1 "general_operand" ""))]
6623 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6624 && !target_word_relocations
6625 && !arm_tls_referenced_p (operands[1])"
6626 [(clobber (const_int 0))]
6628 arm_emit_movpair (operands[0], operands[1]);
6632 ;; When generating pic, we need to load the symbol offset into a register.
6633 ;; So that the optimizer does not confuse this with a normal symbol load
6634 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6635 ;; since that is the only type of relocation we can use.
6637 ;; Wrap calculation of the whole PIC address in a single pattern for the
6638 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6639 ;; a PIC address involves two loads from memory, so we want to CSE it
6640 ;; as often as possible.
6641 ;; This pattern will be split into one of the pic_load_addr_* patterns
6642 ;; and a move after GCSE optimizations.
6644 ;; Note: Update arm.cc: legitimize_pic_address() when changing this pattern.
6645 (define_expand "calculate_pic_address"
6646 [(set (match_operand:SI 0 "register_operand")
6647 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6648 (unspec:SI [(match_operand:SI 2 "" "")]
6653 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6655 [(set (match_operand:SI 0 "register_operand" "")
6656 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6657 (unspec:SI [(match_operand:SI 2 "" "")]
6660 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6661 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6662 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6665 ;; operand1 is the memory address to go into
6666 ;; pic_load_addr_32bit.
6667 ;; operand2 is the PIC label to be emitted
6668 ;; from pic_add_dot_plus_eight.
6669 ;; We do this to allow hoisting of the entire insn.
6670 (define_insn_and_split "pic_load_addr_unified"
6671 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6672 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6673 (match_operand:SI 2 "" "")]
6674 UNSPEC_PIC_UNIFIED))]
6677 "&& reload_completed"
6678 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6679 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6680 (match_dup 2)] UNSPEC_PIC_BASE))]
6681 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6682 [(set_attr "type" "load_4,load_4,load_4")
6683 (set_attr "pool_range" "4096,4094,1022")
6684 (set_attr "neg_pool_range" "4084,0,0")
6685 (set_attr "arch" "a,t2,t1")
6686 (set_attr "length" "8,6,4")]
6689 ;; The rather odd constraints on the following are to force reload to leave
6690 ;; the insn alone, and to force the minipool generation pass to then move
6691 ;; the GOT symbol to memory.
6693 (define_insn "pic_load_addr_32bit"
6694 [(set (match_operand:SI 0 "s_register_operand" "=r")
6695 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6696 "TARGET_32BIT && flag_pic"
6698 [(set_attr "type" "load_4")
6699 (set (attr "pool_range")
6700 (if_then_else (eq_attr "is_thumb" "no")
6703 (set (attr "neg_pool_range")
6704 (if_then_else (eq_attr "is_thumb" "no")
6709 (define_insn "pic_load_addr_thumb1"
6710 [(set (match_operand:SI 0 "s_register_operand" "=l")
6711 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6712 "TARGET_THUMB1 && flag_pic"
6714 [(set_attr "type" "load_4")
6715 (set (attr "pool_range") (const_int 1018))]
6718 (define_insn "pic_add_dot_plus_four"
6719 [(set (match_operand:SI 0 "register_operand" "=r")
6720 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6722 (match_operand 2 "" "")]
6726 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6727 INTVAL (operands[2]));
6728 return \"add\\t%0, %|pc\";
6730 [(set_attr "length" "2")
6731 (set_attr "type" "alu_sreg")]
6734 (define_insn "pic_add_dot_plus_eight"
6735 [(set (match_operand:SI 0 "register_operand" "=r")
6736 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6738 (match_operand 2 "" "")]
6742 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6743 INTVAL (operands[2]));
6744 return \"add%?\\t%0, %|pc, %1\";
6746 [(set_attr "predicable" "yes")
6747 (set_attr "type" "alu_sreg")]
6750 (define_insn "tls_load_dot_plus_eight"
6751 [(set (match_operand:SI 0 "register_operand" "=r")
6752 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6754 (match_operand 2 "" "")]
6758 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6759 INTVAL (operands[2]));
6760 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6762 [(set_attr "predicable" "yes")
6763 (set_attr "type" "load_4")]
6766 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6767 ;; followed by a load. These sequences can be crunched down to
6768 ;; tls_load_dot_plus_eight by a peephole.
6771 [(set (match_operand:SI 0 "register_operand" "")
6772 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6774 (match_operand 1 "" "")]
6776 (set (match_operand:SI 2 "arm_general_register_operand" "")
6777 (mem:SI (match_dup 0)))]
6778 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6780 (mem:SI (unspec:SI [(match_dup 3)
6787 (define_insn "pic_offset_arm"
6788 [(set (match_operand:SI 0 "register_operand" "=r")
6789 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6790 (unspec:SI [(match_operand:SI 2 "" "X")]
6791 UNSPEC_PIC_OFFSET))))]
6792 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6793 "ldr%?\\t%0, [%1,%2]"
6794 [(set_attr "type" "load_4")]
6797 (define_expand "builtin_setjmp_receiver"
6798 [(label_ref (match_operand 0 "" ""))]
6802 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6804 if (arm_pic_register != INVALID_REGNUM)
6805 arm_load_pic_register (1UL << 3, NULL_RTX);
6809 ;; If copying one reg to another we can set the condition codes according to
6810 ;; its value. Such a move is common after a return from subroutine and the
6811 ;; result is being tested against zero.
6813 (define_insn "*movsi_compare0"
6814 [(set (reg:CC CC_REGNUM)
6815 (compare:CC (match_operand:SI 1 "s_register_operand" "0,0,l,rk,rk")
6817 (set (match_operand:SI 0 "s_register_operand" "=l,rk,l,r,rk")
6825 subs%?\\t%0, %1, #0"
6826 [(set_attr "conds" "set")
6827 (set_attr "arch" "t2,*,t2,t2,a")
6828 (set_attr "type" "alus_imm")
6829 (set_attr "length" "2,4,2,4,4")]
6832 ;; Subroutine to store a half word from a register into memory.
6833 ;; Operand 0 is the source register (HImode)
6834 ;; Operand 1 is the destination address in a register (SImode)
6836 ;; In both this routine and the next, we must be careful not to spill
6837 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6838 ;; can generate unrecognizable rtl.
6840 (define_expand "storehi"
6841 [;; store the low byte
6842 (set (match_operand 1 "" "") (match_dup 3))
6843 ;; extract the high byte
6845 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6846 ;; store the high byte
6847 (set (match_dup 4) (match_dup 5))]
6851 rtx op1 = operands[1];
6852 rtx addr = XEXP (op1, 0);
6853 enum rtx_code code = GET_CODE (addr);
6855 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6857 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6859 operands[4] = adjust_address (op1, QImode, 1);
6860 operands[1] = adjust_address (operands[1], QImode, 0);
6861 operands[3] = gen_lowpart (QImode, operands[0]);
6862 operands[0] = gen_lowpart (SImode, operands[0]);
6863 operands[2] = gen_reg_rtx (SImode);
6864 operands[5] = gen_lowpart (QImode, operands[2]);
6868 (define_expand "storehi_bigend"
6869 [(set (match_dup 4) (match_dup 3))
6871 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6872 (set (match_operand 1 "" "") (match_dup 5))]
6876 rtx op1 = operands[1];
6877 rtx addr = XEXP (op1, 0);
6878 enum rtx_code code = GET_CODE (addr);
6880 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6882 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6884 operands[4] = adjust_address (op1, QImode, 1);
6885 operands[1] = adjust_address (operands[1], QImode, 0);
6886 operands[3] = gen_lowpart (QImode, operands[0]);
6887 operands[0] = gen_lowpart (SImode, operands[0]);
6888 operands[2] = gen_reg_rtx (SImode);
6889 operands[5] = gen_lowpart (QImode, operands[2]);
6893 ;; Subroutine to store a half word integer constant into memory.
6894 (define_expand "storeinthi"
6895 [(set (match_operand 0 "" "")
6896 (match_operand 1 "" ""))
6897 (set (match_dup 3) (match_dup 2))]
6901 HOST_WIDE_INT value = INTVAL (operands[1]);
6902 rtx addr = XEXP (operands[0], 0);
6903 rtx op0 = operands[0];
6904 enum rtx_code code = GET_CODE (addr);
6906 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6908 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6910 operands[1] = gen_reg_rtx (SImode);
6911 if (BYTES_BIG_ENDIAN)
6913 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6914 if ((value & 255) == ((value >> 8) & 255))
6915 operands[2] = operands[1];
6918 operands[2] = gen_reg_rtx (SImode);
6919 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6924 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6925 if ((value & 255) == ((value >> 8) & 255))
6926 operands[2] = operands[1];
6929 operands[2] = gen_reg_rtx (SImode);
6930 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6934 operands[3] = adjust_address (op0, QImode, 1);
6935 operands[0] = adjust_address (operands[0], QImode, 0);
6936 operands[2] = gen_lowpart (QImode, operands[2]);
6937 operands[1] = gen_lowpart (QImode, operands[1]);
6941 (define_expand "storehi_single_op"
6942 [(set (match_operand:HI 0 "memory_operand")
6943 (match_operand:HI 1 "general_operand"))]
6944 "TARGET_32BIT && arm_arch4"
6946 if (!s_register_operand (operands[1], HImode))
6947 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6951 (define_expand "movhi"
6952 [(set (match_operand:HI 0 "general_operand")
6953 (match_operand:HI 1 "general_operand"))]
6956 gcc_checking_assert (aligned_operand (operands[0], HImode));
6957 gcc_checking_assert (aligned_operand (operands[1], HImode));
6960 if (can_create_pseudo_p ())
6962 if (MEM_P (operands[0]))
6966 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6969 if (CONST_INT_P (operands[1]))
6970 emit_insn (gen_storeinthi (operands[0], operands[1]));
6973 if (MEM_P (operands[1]))
6974 operands[1] = force_reg (HImode, operands[1]);
6975 if (BYTES_BIG_ENDIAN)
6976 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6978 emit_insn (gen_storehi (operands[1], operands[0]));
6982 /* Sign extend a constant, and keep it in an SImode reg. */
6983 else if (CONST_INT_P (operands[1]))
6985 rtx reg = gen_reg_rtx (SImode);
6986 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6988 /* If the constant is already valid, leave it alone. */
6989 if (!const_ok_for_arm (val))
6991 /* If setting all the top bits will make the constant
6992 loadable in a single instruction, then set them.
6993 Otherwise, sign extend the number. */
6995 if (const_ok_for_arm (~(val | ~0xffff)))
6997 else if (val & 0x8000)
7001 emit_insn (gen_movsi (reg, GEN_INT (val)));
7002 operands[1] = gen_lowpart (HImode, reg);
7004 else if (arm_arch4 && optimize && can_create_pseudo_p ()
7005 && MEM_P (operands[1]))
7007 rtx reg = gen_reg_rtx (SImode);
7009 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7010 operands[1] = gen_lowpart (HImode, reg);
7012 else if (!arm_arch4)
7014 if (MEM_P (operands[1]))
7017 rtx offset = const0_rtx;
7018 rtx reg = gen_reg_rtx (SImode);
7020 if ((REG_P (base = XEXP (operands[1], 0))
7021 || (GET_CODE (base) == PLUS
7022 && (CONST_INT_P (offset = XEXP (base, 1)))
7023 && ((INTVAL(offset) & 1) != 1)
7024 && REG_P (base = XEXP (base, 0))))
7025 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
7029 new_rtx = widen_memory_access (operands[1], SImode,
7030 ((INTVAL (offset) & ~3)
7031 - INTVAL (offset)));
7032 emit_insn (gen_movsi (reg, new_rtx));
7033 if (((INTVAL (offset) & 2) != 0)
7034 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
7036 rtx reg2 = gen_reg_rtx (SImode);
7038 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
7043 emit_insn (gen_movhi_bytes (reg, operands[1]));
7045 operands[1] = gen_lowpart (HImode, reg);
7049 /* Handle loading a large integer during reload. */
7050 else if (CONST_INT_P (operands[1])
7051 && !const_ok_for_arm (INTVAL (operands[1]))
7052 && !const_ok_for_arm (~INTVAL (operands[1])))
7054 /* Writing a constant to memory needs a scratch, which should
7055 be handled with SECONDARY_RELOADs. */
7056 gcc_assert (REG_P (operands[0]));
7058 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7059 emit_insn (gen_movsi (operands[0], operands[1]));
7063 else if (TARGET_THUMB2)
7065 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
7066 if (can_create_pseudo_p ())
7068 if (!REG_P (operands[0]))
7069 operands[1] = force_reg (HImode, operands[1]);
7070 /* Zero extend a constant, and keep it in an SImode reg. */
7071 else if (CONST_INT_P (operands[1]))
7073 rtx reg = gen_reg_rtx (SImode);
7074 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
7076 emit_insn (gen_movsi (reg, GEN_INT (val)));
7077 operands[1] = gen_lowpart (HImode, reg);
7081 else /* TARGET_THUMB1 */
7083 if (can_create_pseudo_p ())
7085 if (CONST_INT_P (operands[1]))
7087 rtx reg = gen_reg_rtx (SImode);
7089 emit_insn (gen_movsi (reg, operands[1]));
7090 operands[1] = gen_lowpart (HImode, reg);
7093 /* ??? We shouldn't really get invalid addresses here, but this can
7094 happen if we are passed a SP (never OK for HImode/QImode) or
7095 virtual register (also rejected as illegitimate for HImode/QImode)
7096 relative address. */
7097 /* ??? This should perhaps be fixed elsewhere, for instance, in
7098 fixup_stack_1, by checking for other kinds of invalid addresses,
7099 e.g. a bare reference to a virtual register. This may confuse the
7100 alpha though, which must handle this case differently. */
7101 if (MEM_P (operands[0])
7102 && !memory_address_p (GET_MODE (operands[0]),
7103 XEXP (operands[0], 0)))
7105 = replace_equiv_address (operands[0],
7106 copy_to_reg (XEXP (operands[0], 0)));
7108 if (MEM_P (operands[1])
7109 && !memory_address_p (GET_MODE (operands[1]),
7110 XEXP (operands[1], 0)))
7112 = replace_equiv_address (operands[1],
7113 copy_to_reg (XEXP (operands[1], 0)));
7115 if (MEM_P (operands[1]) && optimize > 0)
7117 rtx reg = gen_reg_rtx (SImode);
7119 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7120 operands[1] = gen_lowpart (HImode, reg);
7123 if (MEM_P (operands[0]))
7124 operands[1] = force_reg (HImode, operands[1]);
7126 else if (CONST_INT_P (operands[1])
7127 && !satisfies_constraint_I (operands[1]))
7129 /* Handle loading a large integer during reload. */
7131 /* Writing a constant to memory needs a scratch, which should
7132 be handled with SECONDARY_RELOADs. */
7133 gcc_assert (REG_P (operands[0]));
7135 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7136 emit_insn (gen_movsi (operands[0], operands[1]));
7143 (define_expand "movhi_bytes"
7144 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7146 (zero_extend:SI (match_dup 6)))
7147 (set (match_operand:SI 0 "" "")
7148 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7153 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7155 mem1 = change_address (operands[1], QImode, addr);
7156 mem2 = change_address (operands[1], QImode,
7157 plus_constant (Pmode, addr, 1));
7158 operands[0] = gen_lowpart (SImode, operands[0]);
7160 operands[2] = gen_reg_rtx (SImode);
7161 operands[3] = gen_reg_rtx (SImode);
7164 if (BYTES_BIG_ENDIAN)
7166 operands[4] = operands[2];
7167 operands[5] = operands[3];
7171 operands[4] = operands[3];
7172 operands[5] = operands[2];
7177 (define_expand "movhi_bigend"
7179 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
7182 (ashiftrt:SI (match_dup 2) (const_int 16)))
7183 (set (match_operand:HI 0 "s_register_operand")
7187 operands[2] = gen_reg_rtx (SImode);
7188 operands[3] = gen_reg_rtx (SImode);
7189 operands[4] = gen_lowpart (HImode, operands[3]);
7193 ;; Pattern to recognize insn generated default case above
7194 (define_insn "*movhi_insn_arch4"
7195 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
7196 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
7198 && arm_arch4 && !TARGET_HARD_FLOAT
7199 && (register_operand (operands[0], HImode)
7200 || register_operand (operands[1], HImode))"
7202 mov%?\\t%0, %1\\t%@ movhi
7203 mvn%?\\t%0, #%B1\\t%@ movhi
7204 movw%?\\t%0, %L1\\t%@ movhi
7205 strh%?\\t%1, %0\\t%@ movhi
7206 ldrh%?\\t%0, %1\\t%@ movhi"
7207 [(set_attr "predicable" "yes")
7208 (set_attr "pool_range" "*,*,*,*,256")
7209 (set_attr "neg_pool_range" "*,*,*,*,244")
7210 (set_attr "arch" "*,*,v6t2,*,*")
7211 (set_attr_alternative "type"
7212 [(if_then_else (match_operand 1 "const_int_operand" "")
7213 (const_string "mov_imm" )
7214 (const_string "mov_reg"))
7215 (const_string "mvn_imm")
7216 (const_string "mov_imm")
7217 (const_string "store_4")
7218 (const_string "load_4")])]
7221 (define_insn "*movhi_bytes"
7222 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7223 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
7224 "TARGET_ARM && !TARGET_HARD_FLOAT"
7226 mov%?\\t%0, %1\\t%@ movhi
7227 mov%?\\t%0, %1\\t%@ movhi
7228 mvn%?\\t%0, #%B1\\t%@ movhi"
7229 [(set_attr "predicable" "yes")
7230 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
7233 ;; We use a DImode scratch because we may occasionally need an additional
7234 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
7235 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
7236 ;; The reload_in<m> and reload_out<m> patterns require special constraints
7237 ;; to be correctly handled in default_secondary_reload function.
7238 (define_expand "reload_outhi"
7239 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7240 (match_operand:HI 1 "s_register_operand" "r")
7241 (match_operand:DI 2 "s_register_operand" "=&l")])]
7244 arm_reload_out_hi (operands);
7246 thumb_reload_out_hi (operands);
7251 (define_expand "reload_inhi"
7252 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7253 (match_operand:HI 1 "arm_reload_memory_operand" "o")
7254 (match_operand:DI 2 "s_register_operand" "=&r")])]
7258 arm_reload_in_hi (operands);
7260 thumb_reload_out_hi (operands);
7264 (define_expand "movqi"
7265 [(set (match_operand:QI 0 "general_operand")
7266 (match_operand:QI 1 "general_operand"))]
7269 /* Everything except mem = const or mem = mem can be done easily */
7271 if (can_create_pseudo_p ())
7273 if (CONST_INT_P (operands[1]))
7275 rtx reg = gen_reg_rtx (SImode);
7277 /* For thumb we want an unsigned immediate, then we are more likely
7278 to be able to use a movs insn. */
7280 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7282 emit_insn (gen_movsi (reg, operands[1]));
7283 operands[1] = gen_lowpart (QImode, reg);
7288 /* ??? We shouldn't really get invalid addresses here, but this can
7289 happen if we are passed a SP (never OK for HImode/QImode) or
7290 virtual register (also rejected as illegitimate for HImode/QImode)
7291 relative address. */
7292 /* ??? This should perhaps be fixed elsewhere, for instance, in
7293 fixup_stack_1, by checking for other kinds of invalid addresses,
7294 e.g. a bare reference to a virtual register. This may confuse the
7295 alpha though, which must handle this case differently. */
7296 if (MEM_P (operands[0])
7297 && !memory_address_p (GET_MODE (operands[0]),
7298 XEXP (operands[0], 0)))
7300 = replace_equiv_address (operands[0],
7301 copy_to_reg (XEXP (operands[0], 0)));
7302 if (MEM_P (operands[1])
7303 && !memory_address_p (GET_MODE (operands[1]),
7304 XEXP (operands[1], 0)))
7306 = replace_equiv_address (operands[1],
7307 copy_to_reg (XEXP (operands[1], 0)));
7310 if (MEM_P (operands[1]) && optimize > 0)
7312 rtx reg = gen_reg_rtx (SImode);
7314 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7315 operands[1] = gen_lowpart (QImode, reg);
7318 if (MEM_P (operands[0]))
7319 operands[1] = force_reg (QImode, operands[1]);
7321 else if (TARGET_THUMB
7322 && CONST_INT_P (operands[1])
7323 && !satisfies_constraint_I (operands[1]))
7325 /* Handle loading a large integer during reload. */
7327 /* Writing a constant to memory needs a scratch, which should
7328 be handled with SECONDARY_RELOADs. */
7329 gcc_assert (REG_P (operands[0]));
7331 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7332 emit_insn (gen_movsi (operands[0], operands[1]));
7338 (define_insn "*arm_movqi_insn"
7339 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7340 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
7342 && ( register_operand (operands[0], QImode)
7343 || register_operand (operands[1], QImode))"
7354 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
7355 (set_attr "predicable" "yes")
7356 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
7357 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7358 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7361 ;; HFmode and BFmode moves.
7362 (define_expand "mov<mode>"
7363 [(set (match_operand:HFBF 0 "general_operand")
7364 (match_operand:HFBF 1 "general_operand"))]
7367 gcc_checking_assert (aligned_operand (operands[0], <MODE>mode));
7368 gcc_checking_assert (aligned_operand (operands[1], <MODE>mode));
7371 if (MEM_P (operands[0]))
7372 operands[1] = force_reg (<MODE>mode, operands[1]);
7374 else /* TARGET_THUMB1 */
7376 if (can_create_pseudo_p ())
7378 if (!REG_P (operands[0]))
7379 operands[1] = force_reg (<MODE>mode, operands[1]);
7385 (define_insn "*arm32_mov<mode>"
7386 [(set (match_operand:HFBF 0 "nonimmediate_operand" "=r,m,r,r")
7387 (match_operand:HFBF 1 "general_operand" " m,r,r,F"))]
7389 && !TARGET_HARD_FLOAT
7391 && ( s_register_operand (operands[0], <MODE>mode)
7392 || s_register_operand (operands[1], <MODE>mode))"
7394 switch (which_alternative)
7396 case 0: /* ARM register from memory */
7397 return \"ldrh%?\\t%0, %1\\t%@ __<fporbf>\";
7398 case 1: /* memory from ARM register */
7399 return \"strh%?\\t%1, %0\\t%@ __<fporbf>\";
7400 case 2: /* ARM register from ARM register */
7401 return \"mov%?\\t%0, %1\\t%@ __<fporbf>\";
7402 case 3: /* ARM register from constant */
7407 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
7409 ops[0] = operands[0];
7410 ops[1] = GEN_INT (bits);
7411 ops[2] = GEN_INT (bits & 0xff00);
7412 ops[3] = GEN_INT (bits & 0x00ff);
7414 if (arm_arch_thumb2)
7415 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7417 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7424 [(set_attr "conds" "unconditional")
7425 (set_attr "type" "load_4,store_4,mov_reg,multiple")
7426 (set_attr "length" "4,4,4,8")
7427 (set_attr "predicable" "yes")]
7430 (define_expand "movsf"
7431 [(set (match_operand:SF 0 "general_operand")
7432 (match_operand:SF 1 "general_operand"))]
7435 gcc_checking_assert (aligned_operand (operands[0], SFmode));
7436 gcc_checking_assert (aligned_operand (operands[1], SFmode));
7439 if (MEM_P (operands[0]))
7440 operands[1] = force_reg (SFmode, operands[1]);
7442 else /* TARGET_THUMB1 */
7444 if (can_create_pseudo_p ())
7446 if (!REG_P (operands[0]))
7447 operands[1] = force_reg (SFmode, operands[1]);
7451 /* Cannot load it directly, generate a load with clobber so that it can be
7452 loaded via GPR with MOV / MOVT. */
7453 if (arm_disable_literal_pool
7454 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7455 && CONST_DOUBLE_P (operands[1])
7457 && !vfp3_const_double_rtx (operands[1]))
7459 rtx clobreg = gen_reg_rtx (SFmode);
7460 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
7467 ;; Transform a floating-point move of a constant into a core register into
7468 ;; an SImode operation.
7470 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7471 (match_operand:SF 1 "immediate_operand" ""))]
7474 && CONST_DOUBLE_P (operands[1])"
7475 [(set (match_dup 2) (match_dup 3))]
7477 operands[2] = gen_lowpart (SImode, operands[0]);
7478 operands[3] = gen_lowpart (SImode, operands[1]);
7479 if (operands[2] == 0 || operands[3] == 0)
7484 (define_insn "*arm_movsf_soft_insn"
7485 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7486 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7488 && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7489 && (!MEM_P (operands[0])
7490 || register_operand (operands[1], SFmode))"
7492 switch (which_alternative)
7494 case 0: return \"mov%?\\t%0, %1\";
7496 /* Cannot load it directly, split to load it via MOV / MOVT. */
7497 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7499 return \"ldr%?\\t%0, %1\\t%@ float\";
7500 case 2: return \"str%?\\t%1, %0\\t%@ float\";
7501 default: gcc_unreachable ();
7504 [(set_attr "predicable" "yes")
7505 (set_attr "type" "mov_reg,load_4,store_4")
7506 (set_attr "arm_pool_range" "*,4096,*")
7507 (set_attr "thumb2_pool_range" "*,4094,*")
7508 (set_attr "arm_neg_pool_range" "*,4084,*")
7509 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7512 ;; Splitter for the above.
7514 [(set (match_operand:SF 0 "s_register_operand")
7515 (match_operand:SF 1 "const_double_operand"))]
7516 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7520 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
7521 rtx cst = gen_int_mode (buf, SImode);
7522 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
7527 (define_expand "movdf"
7528 [(set (match_operand:DF 0 "general_operand")
7529 (match_operand:DF 1 "general_operand"))]
7532 gcc_checking_assert (aligned_operand (operands[0], DFmode));
7533 gcc_checking_assert (aligned_operand (operands[1], DFmode));
7536 if (MEM_P (operands[0]))
7537 operands[1] = force_reg (DFmode, operands[1]);
7539 else /* TARGET_THUMB */
7541 if (can_create_pseudo_p ())
7543 if (!REG_P (operands[0]))
7544 operands[1] = force_reg (DFmode, operands[1]);
7548 /* Cannot load it directly, generate a load with clobber so that it can be
7549 loaded via GPR with MOV / MOVT. */
7550 if (arm_disable_literal_pool
7551 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7552 && CONSTANT_P (operands[1])
7554 && !arm_const_double_rtx (operands[1])
7555 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
7557 rtx clobreg = gen_reg_rtx (DFmode);
7558 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
7565 ;; Reloading a df mode value stored in integer regs to memory can require a
7567 ;; Another reload_out<m> pattern that requires special constraints.
7568 (define_expand "reload_outdf"
7569 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7570 (match_operand:DF 1 "s_register_operand" "r")
7571 (match_operand:SI 2 "s_register_operand" "=&r")]
7575 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7578 operands[2] = XEXP (operands[0], 0);
7579 else if (code == POST_INC || code == PRE_DEC)
7581 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7582 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7583 emit_insn (gen_movdi (operands[0], operands[1]));
7586 else if (code == PRE_INC)
7588 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7590 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7593 else if (code == POST_DEC)
7594 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7596 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7597 XEXP (XEXP (operands[0], 0), 1)));
7599 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
7602 if (code == POST_DEC)
7603 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7609 (define_insn "*movdf_soft_insn"
7610 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
7611 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
7612 "TARGET_32BIT && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7613 && ( register_operand (operands[0], DFmode)
7614 || register_operand (operands[1], DFmode))"
7616 switch (which_alternative)
7623 /* Cannot load it directly, split to load it via MOV / MOVT. */
7624 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7628 return output_move_double (operands, true, NULL);
7631 [(set_attr "length" "8,12,16,8,8")
7632 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
7633 (set_attr "arm_pool_range" "*,*,*,1020,*")
7634 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7635 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7636 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7639 ;; Splitter for the above.
7641 [(set (match_operand:DF 0 "s_register_operand")
7642 (match_operand:DF 1 "const_double_operand"))]
7643 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7647 int order = BYTES_BIG_ENDIAN ? 1 : 0;
7648 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7649 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7650 ival |= (zext_hwi (buf[1 - order], 32) << 32);
7651 rtx cst = gen_int_mode (ival, DImode);
7652 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7658 ;; load- and store-multiple insns
7659 ;; The arm can load/store any set of registers, provided that they are in
7660 ;; ascending order, but these expanders assume a contiguous set.
7662 (define_expand "load_multiple"
7663 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7664 (match_operand:SI 1 "" ""))
7665 (use (match_operand:SI 2 "" ""))])]
7668 HOST_WIDE_INT offset = 0;
7670 /* Support only fixed point registers. */
7671 if (!CONST_INT_P (operands[2])
7672 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7673 || INTVAL (operands[2]) < 2
7674 || !MEM_P (operands[1])
7675 || !REG_P (operands[0])
7676 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7677 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7681 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7682 INTVAL (operands[2]),
7683 force_reg (SImode, XEXP (operands[1], 0)),
7684 FALSE, operands[1], &offset);
7687 (define_expand "store_multiple"
7688 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7689 (match_operand:SI 1 "" ""))
7690 (use (match_operand:SI 2 "" ""))])]
7693 HOST_WIDE_INT offset = 0;
7695 /* Support only fixed point registers. */
7696 if (!CONST_INT_P (operands[2])
7697 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7698 || INTVAL (operands[2]) < 2
7699 || !REG_P (operands[1])
7700 || !MEM_P (operands[0])
7701 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7702 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7706 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7707 INTVAL (operands[2]),
7708 force_reg (SImode, XEXP (operands[0], 0)),
7709 FALSE, operands[0], &offset);
7713 (define_expand "setmemsi"
7714 [(match_operand:BLK 0 "general_operand")
7715 (match_operand:SI 1 "const_int_operand")
7716 (match_operand:SI 2 "const_int_operand")
7717 (match_operand:SI 3 "const_int_operand")]
7720 if (arm_gen_setmem (operands))
7727 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7728 ;; We could let this apply for blocks of less than this, but it clobbers so
7729 ;; many registers that there is then probably a better way.
7731 (define_expand "cpymemqi"
7732 [(match_operand:BLK 0 "general_operand")
7733 (match_operand:BLK 1 "general_operand")
7734 (match_operand:SI 2 "const_int_operand")
7735 (match_operand:SI 3 "const_int_operand")]
7740 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7741 && !optimize_function_for_size_p (cfun))
7743 if (gen_cpymem_ldrd_strd (operands))
7748 if (arm_gen_cpymemqi (operands))
7752 else /* TARGET_THUMB1 */
7754 if ( INTVAL (operands[3]) != 4
7755 || INTVAL (operands[2]) > 48)
7758 thumb_expand_cpymemqi (operands);
7765 ;; Compare & branch insns
7766 ;; The range calculations are based as follows:
7767 ;; For forward branches, the address calculation returns the address of
7768 ;; the next instruction. This is 2 beyond the branch instruction.
7769 ;; For backward branches, the address calculation returns the address of
7770 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7771 ;; instruction for the shortest sequence, and 4 before the branch instruction
7772 ;; if we have to jump around an unconditional branch.
7773 ;; To the basic branch range the PC offset must be added (this is +4).
7774 ;; So for forward branches we have
7775 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7776 ;; And for backward branches we have
7777 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7779 ;; In 16-bit Thumb these ranges are:
7780 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7781 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7783 ;; In 32-bit Thumb these ranges are:
7784 ;; For a 'b' +/- 16MB is not checked for.
7785 ;; For a 'b<cond>' pos_range = 1048574, neg_range = -1048576 giving
7786 ;; (-1048568 -> 1048576).
7788 (define_expand "cbranchsi4"
7789 [(set (pc) (if_then_else
7790 (match_operator 0 "expandable_comparison_operator"
7791 [(match_operand:SI 1 "s_register_operand")
7792 (match_operand:SI 2 "nonmemory_operand")])
7793 (label_ref (match_operand 3 "" ""))
7799 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7801 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7805 if (thumb1_cmpneg_operand (operands[2], SImode))
7807 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7808 operands[3], operands[0]));
7811 if (!thumb1_cmp_operand (operands[2], SImode))
7812 operands[2] = force_reg (SImode, operands[2]);
7815 (define_expand "cbranchsf4"
7816 [(set (pc) (if_then_else
7817 (match_operator 0 "expandable_comparison_operator"
7818 [(match_operand:SF 1 "s_register_operand")
7819 (match_operand:SF 2 "vfp_compare_operand")])
7820 (label_ref (match_operand 3 "" ""))
7822 "TARGET_32BIT && TARGET_HARD_FLOAT"
7823 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7824 operands[3])); DONE;"
7827 (define_expand "cbranchdf4"
7828 [(set (pc) (if_then_else
7829 (match_operator 0 "expandable_comparison_operator"
7830 [(match_operand:DF 1 "s_register_operand")
7831 (match_operand:DF 2 "vfp_compare_operand")])
7832 (label_ref (match_operand 3 "" ""))
7834 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7835 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7836 operands[3])); DONE;"
7839 (define_expand "cbranchdi4"
7840 [(set (pc) (if_then_else
7841 (match_operator 0 "expandable_comparison_operator"
7842 [(match_operand:DI 1 "s_register_operand")
7843 (match_operand:DI 2 "reg_or_int_operand")])
7844 (label_ref (match_operand 3 "" ""))
7848 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7850 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7856 ;; Comparison and test insns
7858 (define_insn "*arm_cmpsi_insn"
7859 [(set (reg:CC CC_REGNUM)
7860 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7861 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7869 [(set_attr "conds" "set")
7870 (set_attr "arch" "t2,t2,any,any,any")
7871 (set_attr "length" "2,2,4,4,4")
7872 (set_attr "predicable" "yes")
7873 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7874 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7877 (define_insn "*cmpsi_shiftsi"
7878 [(set (reg:CC CC_REGNUM)
7879 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7880 (match_operator:SI 3 "shift_operator"
7881 [(match_operand:SI 1 "s_register_operand" "r,r")
7882 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7885 [(set_attr "conds" "set")
7886 (set_attr "shift" "1")
7887 (set_attr "arch" "32,a")
7888 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7890 (define_insn "*cmpsi_shiftsi_swp"
7891 [(set (reg:CC_SWP CC_REGNUM)
7892 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7893 [(match_operand:SI 1 "s_register_operand" "r,r")
7894 (match_operand:SI 2 "shift_amount_operand" "M,r")])
7895 (match_operand:SI 0 "s_register_operand" "r,r")))]
7898 [(set_attr "conds" "set")
7899 (set_attr "shift" "1")
7900 (set_attr "arch" "32,a")
7901 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7903 (define_insn "*arm_cmpsi_negshiftsi_si"
7904 [(set (reg:CC_Z CC_REGNUM)
7906 (neg:SI (match_operator:SI 1 "shift_operator"
7907 [(match_operand:SI 2 "s_register_operand" "r,r")
7908 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7909 (match_operand:SI 0 "s_register_operand" "r,r")))]
7912 [(set_attr "conds" "set")
7913 (set_attr "arch" "32,a")
7914 (set_attr "shift" "2")
7915 (set_attr "type" "alus_shift_imm,alus_shift_reg")
7916 (set_attr "predicable" "yes")]
7919 ; This insn allows redundant compares to be removed by cse, nothing should
7920 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7921 ; is deleted later on. The match_dup will match the mode here, so that
7922 ; mode changes of the condition codes aren't lost by this even though we don't
7923 ; specify what they are.
7925 (define_insn "*deleted_compare"
7926 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7928 "\\t%@ deleted compare"
7929 [(set_attr "conds" "set")
7930 (set_attr "length" "0")
7931 (set_attr "type" "no_insn")]
7935 ;; Conditional branch insns
7937 (define_expand "cbranch_cc"
7939 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7940 (match_operand 2 "" "")])
7941 (label_ref (match_operand 3 "" ""))
7944 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7945 operands[1], operands[2], NULL_RTX);
7946 operands[2] = const0_rtx;"
7950 ;; Patterns to match conditional branch insns.
7953 (define_insn "arm_cond_branch"
7955 (if_then_else (match_operator 1 "arm_comparison_operator"
7956 [(match_operand 2 "cc_register" "") (const_int 0)])
7957 (label_ref (match_operand 0 "" ""))
7961 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7963 arm_ccfsm_state += 2;
7966 switch (get_attr_length (insn))
7968 case 2: /* Thumb2 16-bit b{cond}. */
7969 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
7973 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
7974 unconditional branch b. */
7975 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%D1\t");
7978 [(set_attr "conds" "use")
7979 (set_attr "type" "branch")
7980 (set (attr "length")
7981 (if_then_else (match_test "!TARGET_THUMB2")
7983 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
7986 ;; Check if target is within 16-bit Thumb2 b{cond} range.
7987 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7988 (le (minus (match_dup 0) (pc)) (const_int 256)))
7990 ;; Target is Thumb2, within narrow range.
7991 ;; Generate b{cond}.
7994 ;; Check if target is within 32-bit Thumb2 b{cond} range.
7995 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7996 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7998 ;; Target is Thumb2, within wide range.
8001 ;; Target is Thumb2, out of range.
8002 ;; Generate narrow b{cond} and unconditional branch b.
8006 (define_insn "*arm_cond_branch_reversed"
8008 (if_then_else (match_operator 1 "arm_comparison_operator"
8009 [(match_operand 2 "cc_register" "") (const_int 0)])
8011 (label_ref (match_operand 0 "" ""))))]
8014 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8016 arm_ccfsm_state += 2;
8019 switch (get_attr_length (insn))
8021 case 2: /* Thumb2 16-bit b{cond}. */
8022 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
8026 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
8027 unconditional branch b. */
8028 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%d1\t");
8031 [(set_attr "conds" "use")
8032 (set_attr "type" "branch")
8033 (set (attr "length")
8034 (if_then_else (match_test "!TARGET_THUMB2")
8036 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
8039 ;; Check if target is within 16-bit Thumb2 b{cond} range.
8040 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8041 (le (minus (match_dup 0) (pc)) (const_int 256)))
8043 ;; Target is Thumb2, within narrow range.
8044 ;; Generate b{cond}.
8047 ;; Check if target is within 32-bit Thumb2 b{cond} range.
8048 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
8049 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
8051 ;; Target is Thumb2, within wide range.
8052 ;; Generate b{cond}.
8054 ;; Target is Thumb2, out of range.
8055 ;; Generate narrow b{cond} and unconditional branch b.
8063 (define_expand "cstore_cc"
8064 [(set (match_operand:SI 0 "s_register_operand")
8065 (match_operator:SI 1 "" [(match_operand 2 "" "")
8066 (match_operand 3 "" "")]))]
8068 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
8069 operands[2], operands[3], NULL_RTX);
8070 operands[3] = const0_rtx;"
8073 (define_insn_and_split "*mov_scc"
8074 [(set (match_operand:SI 0 "s_register_operand" "=r")
8075 (match_operator:SI 1 "arm_comparison_operator_mode"
8076 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8078 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8081 (if_then_else:SI (match_dup 1)
8085 [(set_attr "conds" "use")
8086 (set_attr "length" "8")
8087 (set_attr "type" "multiple")]
8090 (define_insn "*negscc_borrow"
8091 [(set (match_operand:SI 0 "s_register_operand" "=r")
8092 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
8095 [(set_attr "conds" "use")
8096 (set_attr "length" "4")
8097 (set_attr "type" "adc_reg")]
8100 (define_insn_and_split "*mov_negscc"
8101 [(set (match_operand:SI 0 "s_register_operand" "=r")
8102 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
8103 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8104 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
8105 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8108 (if_then_else:SI (match_dup 1)
8112 operands[3] = GEN_INT (~0);
8114 [(set_attr "conds" "use")
8115 (set_attr "length" "8")
8116 (set_attr "type" "multiple")]
8119 (define_insn_and_split "*mov_notscc"
8120 [(set (match_operand:SI 0 "s_register_operand" "=r")
8121 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8122 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8124 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8127 (if_then_else:SI (match_dup 1)
8131 operands[3] = GEN_INT (~1);
8132 operands[4] = GEN_INT (~0);
8134 [(set_attr "conds" "use")
8135 (set_attr "length" "8")
8136 (set_attr "type" "multiple")]
8139 (define_expand "cstoresi4"
8140 [(set (match_operand:SI 0 "s_register_operand")
8141 (match_operator:SI 1 "expandable_comparison_operator"
8142 [(match_operand:SI 2 "s_register_operand")
8143 (match_operand:SI 3 "reg_or_int_operand")]))]
8144 "TARGET_32BIT || TARGET_THUMB1"
8146 rtx op3, scratch, scratch2;
8150 if (!arm_add_operand (operands[3], SImode))
8151 operands[3] = force_reg (SImode, operands[3]);
8152 emit_insn (gen_cstore_cc (operands[0], operands[1],
8153 operands[2], operands[3]));
8157 if (operands[3] == const0_rtx)
8159 switch (GET_CODE (operands[1]))
8162 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8166 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8170 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8171 NULL_RTX, 0, OPTAB_WIDEN);
8172 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8173 NULL_RTX, 0, OPTAB_WIDEN);
8174 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8175 operands[0], 1, OPTAB_WIDEN);
8179 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8181 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8182 NULL_RTX, 1, OPTAB_WIDEN);
8186 scratch = expand_binop (SImode, ashr_optab, operands[2],
8187 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8188 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8189 NULL_RTX, 0, OPTAB_WIDEN);
8190 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8194 /* LT is handled by generic code. No need for unsigned with 0. */
8201 switch (GET_CODE (operands[1]))
8204 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8205 NULL_RTX, 0, OPTAB_WIDEN);
8206 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8210 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8211 NULL_RTX, 0, OPTAB_WIDEN);
8212 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8216 op3 = force_reg (SImode, operands[3]);
8218 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8219 NULL_RTX, 1, OPTAB_WIDEN);
8220 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8221 NULL_RTX, 0, OPTAB_WIDEN);
8222 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8228 if (!thumb1_cmp_operand (op3, SImode))
8229 op3 = force_reg (SImode, op3);
8230 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8231 NULL_RTX, 0, OPTAB_WIDEN);
8232 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8233 NULL_RTX, 1, OPTAB_WIDEN);
8234 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8239 op3 = force_reg (SImode, operands[3]);
8240 scratch = force_reg (SImode, const0_rtx);
8241 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8247 if (!thumb1_cmp_operand (op3, SImode))
8248 op3 = force_reg (SImode, op3);
8249 scratch = force_reg (SImode, const0_rtx);
8250 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8256 if (!thumb1_cmp_operand (op3, SImode))
8257 op3 = force_reg (SImode, op3);
8258 scratch = gen_reg_rtx (SImode);
8259 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8263 op3 = force_reg (SImode, operands[3]);
8264 scratch = gen_reg_rtx (SImode);
8265 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8268 /* No good sequences for GT, LT. */
8275 (define_expand "cstorehf4"
8276 [(set (match_operand:SI 0 "s_register_operand")
8277 (match_operator:SI 1 "expandable_comparison_operator"
8278 [(match_operand:HF 2 "s_register_operand")
8279 (match_operand:HF 3 "vfp_compare_operand")]))]
8280 "TARGET_VFP_FP16INST"
8282 if (!arm_validize_comparison (&operands[1],
8287 emit_insn (gen_cstore_cc (operands[0], operands[1],
8288 operands[2], operands[3]));
8293 (define_expand "cstoresf4"
8294 [(set (match_operand:SI 0 "s_register_operand")
8295 (match_operator:SI 1 "expandable_comparison_operator"
8296 [(match_operand:SF 2 "s_register_operand")
8297 (match_operand:SF 3 "vfp_compare_operand")]))]
8298 "TARGET_32BIT && TARGET_HARD_FLOAT"
8299 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8300 operands[2], operands[3])); DONE;"
8303 (define_expand "cstoredf4"
8304 [(set (match_operand:SI 0 "s_register_operand")
8305 (match_operator:SI 1 "expandable_comparison_operator"
8306 [(match_operand:DF 2 "s_register_operand")
8307 (match_operand:DF 3 "vfp_compare_operand")]))]
8308 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8309 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8310 operands[2], operands[3])); DONE;"
8313 (define_expand "cstoredi4"
8314 [(set (match_operand:SI 0 "s_register_operand")
8315 (match_operator:SI 1 "expandable_comparison_operator"
8316 [(match_operand:DI 2 "s_register_operand")
8317 (match_operand:DI 3 "reg_or_int_operand")]))]
8320 if (!arm_validize_comparison (&operands[1],
8324 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8331 ;; Conditional move insns
8333 (define_expand "movsicc"
8334 [(set (match_operand:SI 0 "s_register_operand")
8335 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
8336 (match_operand:SI 2 "arm_not_operand")
8337 (match_operand:SI 3 "arm_not_operand")))]
8344 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8345 &XEXP (operands[1], 1)))
8348 code = GET_CODE (operands[1]);
8349 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8350 XEXP (operands[1], 1), NULL_RTX);
8351 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8355 (define_expand "movhfcc"
8356 [(set (match_operand:HF 0 "s_register_operand")
8357 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
8358 (match_operand:HF 2 "s_register_operand")
8359 (match_operand:HF 3 "s_register_operand")))]
8360 "TARGET_VFP_FP16INST"
8363 enum rtx_code code = GET_CODE (operands[1]);
8366 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8367 &XEXP (operands[1], 1)))
8370 code = GET_CODE (operands[1]);
8371 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8372 XEXP (operands[1], 1), NULL_RTX);
8373 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8377 (define_expand "movsfcc"
8378 [(set (match_operand:SF 0 "s_register_operand")
8379 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
8380 (match_operand:SF 2 "s_register_operand")
8381 (match_operand:SF 3 "s_register_operand")))]
8382 "TARGET_32BIT && TARGET_HARD_FLOAT"
8385 enum rtx_code code = GET_CODE (operands[1]);
8388 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8389 &XEXP (operands[1], 1)))
8392 code = GET_CODE (operands[1]);
8393 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8394 XEXP (operands[1], 1), NULL_RTX);
8395 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8399 (define_expand "movdfcc"
8400 [(set (match_operand:DF 0 "s_register_operand")
8401 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
8402 (match_operand:DF 2 "s_register_operand")
8403 (match_operand:DF 3 "s_register_operand")))]
8404 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8407 enum rtx_code code = GET_CODE (operands[1]);
8410 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8411 &XEXP (operands[1], 1)))
8413 code = GET_CODE (operands[1]);
8414 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8415 XEXP (operands[1], 1), NULL_RTX);
8416 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8420 (define_insn "*cmov<mode>"
8421 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8422 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8423 [(match_operand 2 "cc_register" "") (const_int 0)])
8424 (match_operand:SDF 3 "s_register_operand"
8426 (match_operand:SDF 4 "s_register_operand"
8427 "<F_constraint>")))]
8428 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
8431 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8438 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8443 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8449 [(set_attr "conds" "use")
8450 (set_attr "type" "fcsel")]
8453 (define_insn "*cmovhf"
8454 [(set (match_operand:HF 0 "s_register_operand" "=t")
8455 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
8456 [(match_operand 2 "cc_register" "") (const_int 0)])
8457 (match_operand:HF 3 "s_register_operand" "t")
8458 (match_operand:HF 4 "s_register_operand" "t")))]
8459 "TARGET_VFP_FP16INST"
8462 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8469 return \"vsel%d1.f16\\t%0, %3, %4\";
8474 return \"vsel%D1.f16\\t%0, %4, %3\";
8480 [(set_attr "conds" "use")
8481 (set_attr "type" "fcsel")]
8484 (define_insn_and_split "*movsicc_insn"
8485 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8487 (match_operator 3 "arm_comparison_operator"
8488 [(match_operand 4 "cc_register" "") (const_int 0)])
8489 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8490 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8501 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8502 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8503 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8504 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8505 "&& reload_completed"
8508 enum rtx_code rev_code;
8512 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8514 gen_rtx_SET (operands[0], operands[1])));
8516 rev_code = GET_CODE (operands[3]);
8517 mode = GET_MODE (operands[4]);
8518 if (mode == CCFPmode || mode == CCFPEmode)
8519 rev_code = reverse_condition_maybe_unordered (rev_code);
8521 rev_code = reverse_condition (rev_code);
8523 rev_cond = gen_rtx_fmt_ee (rev_code,
8527 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8529 gen_rtx_SET (operands[0], operands[2])));
8532 [(set_attr "length" "4,4,4,4,8,8,8,8")
8533 (set_attr "conds" "use")
8534 (set_attr_alternative "type"
8535 [(if_then_else (match_operand 2 "const_int_operand" "")
8536 (const_string "mov_imm")
8537 (const_string "mov_reg"))
8538 (const_string "mvn_imm")
8539 (if_then_else (match_operand 1 "const_int_operand" "")
8540 (const_string "mov_imm")
8541 (const_string "mov_reg"))
8542 (const_string "mvn_imm")
8543 (const_string "multiple")
8544 (const_string "multiple")
8545 (const_string "multiple")
8546 (const_string "multiple")])]
8549 (define_insn "*movsfcc_soft_insn"
8550 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8551 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8552 [(match_operand 4 "cc_register" "") (const_int 0)])
8553 (match_operand:SF 1 "s_register_operand" "0,r")
8554 (match_operand:SF 2 "s_register_operand" "r,0")))]
8555 "TARGET_ARM && TARGET_SOFT_FLOAT"
8559 [(set_attr "conds" "use")
8560 (set_attr "type" "mov_reg")]
8564 ;; Jump and linkage insns
8566 (define_expand "jump"
8568 (label_ref (match_operand 0 "" "")))]
8573 (define_insn "*arm_jump"
8575 (label_ref (match_operand 0 "" "")))]
8579 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8581 arm_ccfsm_state += 2;
8584 return \"b%?\\t%l0\";
8587 [(set_attr "predicable" "yes")
8588 (set (attr "length")
8590 (and (match_test "TARGET_THUMB2")
8591 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8592 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8595 (set_attr "type" "branch")]
8598 (define_expand "call"
8599 [(parallel [(call (match_operand 0 "memory_operand")
8600 (match_operand 1 "general_operand"))
8601 (use (match_operand 2 "" ""))
8602 (clobber (reg:SI LR_REGNUM))])]
8607 tree addr = MEM_EXPR (operands[0]);
8609 /* In an untyped call, we can get NULL for operand 2. */
8610 if (operands[2] == NULL_RTX)
8611 operands[2] = const0_rtx;
8613 /* Decide if we should generate indirect calls by loading the
8614 32-bit address of the callee into a register before performing the
8616 callee = XEXP (operands[0], 0);
8617 if (GET_CODE (callee) == SYMBOL_REF
8618 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8620 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8622 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
8623 /* Indirect call: set r9 with FDPIC value of callee. */
8624 XEXP (operands[0], 0)
8625 = arm_load_function_descriptor (XEXP (operands[0], 0));
8627 if (detect_cmse_nonsecure_call (addr))
8629 pat = gen_nonsecure_call_internal (operands[0], operands[1],
8631 emit_call_insn (pat);
8635 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8636 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
8639 /* Restore FDPIC register (r9) after call. */
8642 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8643 rtx initial_fdpic_reg
8644 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8646 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8647 initial_fdpic_reg));
8654 (define_insn "restore_pic_register_after_call"
8655 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
8656 (unspec:SI [(match_dup 0)
8657 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
8658 UNSPEC_PIC_RESTORE))]
8665 (define_expand "call_internal"
8666 [(parallel [(call (match_operand 0 "memory_operand")
8667 (match_operand 1 "general_operand"))
8668 (use (match_operand 2 "" ""))
8669 (clobber (reg:SI LR_REGNUM))])])
8671 (define_expand "nonsecure_call_internal"
8672 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
8673 UNSPEC_NONSECURE_MEM)
8674 (match_operand 1 "general_operand"))
8675 (use (match_operand 2 "" ""))
8676 (clobber (reg:SI LR_REGNUM))])]
8679 rtx addr = XEXP (operands[0], 0);
8680 rtx tmp = REG_P (addr) ? addr : force_reg (SImode, addr);
8682 if (!TARGET_HAVE_FPCXT_CMSE)
8684 rtx r4 = gen_rtx_REG (SImode, R4_REGNUM);
8685 emit_move_insn (r4, tmp);
8690 operands[0] = replace_equiv_address (operands[0], tmp);
8694 (define_insn "*call_reg_armv5"
8695 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8696 (match_operand 1 "" ""))
8697 (use (match_operand 2 "" ""))
8698 (clobber (reg:SI LR_REGNUM))]
8699 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8701 [(set_attr "type" "call")]
8704 (define_insn "*call_reg_arm"
8705 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8706 (match_operand 1 "" ""))
8707 (use (match_operand 2 "" ""))
8708 (clobber (reg:SI LR_REGNUM))]
8709 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8711 return output_call (operands);
8713 ;; length is worst case, normally it is only two
8714 [(set_attr "length" "12")
8715 (set_attr "type" "call")]
8719 (define_expand "call_value"
8720 [(parallel [(set (match_operand 0 "" "")
8721 (call (match_operand 1 "memory_operand")
8722 (match_operand 2 "general_operand")))
8723 (use (match_operand 3 "" ""))
8724 (clobber (reg:SI LR_REGNUM))])]
8729 tree addr = MEM_EXPR (operands[1]);
8731 /* In an untyped call, we can get NULL for operand 2. */
8732 if (operands[3] == 0)
8733 operands[3] = const0_rtx;
8735 /* Decide if we should generate indirect calls by loading the
8736 32-bit address of the callee into a register before performing the
8738 callee = XEXP (operands[1], 0);
8739 if (GET_CODE (callee) == SYMBOL_REF
8740 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8742 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8744 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8745 /* Indirect call: set r9 with FDPIC value of callee. */
8746 XEXP (operands[1], 0)
8747 = arm_load_function_descriptor (XEXP (operands[1], 0));
8749 if (detect_cmse_nonsecure_call (addr))
8751 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8752 operands[2], operands[3]);
8753 emit_call_insn (pat);
8757 pat = gen_call_value_internal (operands[0], operands[1],
8758 operands[2], operands[3]);
8759 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8762 /* Restore FDPIC register (r9) after call. */
8765 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8766 rtx initial_fdpic_reg
8767 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8769 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8770 initial_fdpic_reg));
8777 (define_expand "call_value_internal"
8778 [(parallel [(set (match_operand 0 "" "")
8779 (call (match_operand 1 "memory_operand")
8780 (match_operand 2 "general_operand")))
8781 (use (match_operand 3 "" ""))
8782 (clobber (reg:SI LR_REGNUM))])])
8784 (define_expand "nonsecure_call_value_internal"
8785 [(parallel [(set (match_operand 0 "" "")
8786 (call (unspec:SI [(match_operand 1 "memory_operand")]
8787 UNSPEC_NONSECURE_MEM)
8788 (match_operand 2 "general_operand")))
8789 (use (match_operand 3 "" ""))
8790 (clobber (reg:SI LR_REGNUM))])]
8794 if (!TARGET_HAVE_FPCXT_CMSE)
8797 copy_to_suggested_reg (XEXP (operands[1], 0),
8798 gen_rtx_REG (SImode, R4_REGNUM),
8801 operands[1] = replace_equiv_address (operands[1], tmp);
8805 (define_insn "*call_value_reg_armv5"
8806 [(set (match_operand 0 "" "")
8807 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8808 (match_operand 2 "" "")))
8809 (use (match_operand 3 "" ""))
8810 (clobber (reg:SI LR_REGNUM))]
8811 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8813 [(set_attr "type" "call")]
8816 (define_insn "*call_value_reg_arm"
8817 [(set (match_operand 0 "" "")
8818 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8819 (match_operand 2 "" "")))
8820 (use (match_operand 3 "" ""))
8821 (clobber (reg:SI LR_REGNUM))]
8822 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8824 return output_call (&operands[1]);
8826 [(set_attr "length" "12")
8827 (set_attr "type" "call")]
8830 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8831 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8833 (define_insn "*call_symbol"
8834 [(call (mem:SI (match_operand:SI 0 "" ""))
8835 (match_operand 1 "" ""))
8836 (use (match_operand 2 "" ""))
8837 (clobber (reg:SI LR_REGNUM))]
8839 && !SIBLING_CALL_P (insn)
8840 && (GET_CODE (operands[0]) == SYMBOL_REF)
8841 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8844 rtx op = operands[0];
8846 /* Switch mode now when possible. */
8847 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8848 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8849 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8851 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8853 [(set_attr "type" "call")]
8856 (define_insn "*call_value_symbol"
8857 [(set (match_operand 0 "" "")
8858 (call (mem:SI (match_operand:SI 1 "" ""))
8859 (match_operand:SI 2 "" "")))
8860 (use (match_operand 3 "" ""))
8861 (clobber (reg:SI LR_REGNUM))]
8863 && !SIBLING_CALL_P (insn)
8864 && (GET_CODE (operands[1]) == SYMBOL_REF)
8865 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8868 rtx op = operands[1];
8870 /* Switch mode now when possible. */
8871 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8872 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8873 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8875 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8877 [(set_attr "type" "call")]
8880 (define_expand "sibcall_internal"
8881 [(parallel [(call (match_operand 0 "memory_operand")
8882 (match_operand 1 "general_operand"))
8884 (use (match_operand 2 "" ""))])])
8886 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8887 (define_expand "sibcall"
8888 [(parallel [(call (match_operand 0 "memory_operand")
8889 (match_operand 1 "general_operand"))
8891 (use (match_operand 2 "" ""))])]
8897 if ((!REG_P (XEXP (operands[0], 0))
8898 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8899 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8900 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8901 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8903 if (operands[2] == NULL_RTX)
8904 operands[2] = const0_rtx;
8906 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8907 arm_emit_call_insn (pat, operands[0], true);
8912 (define_expand "sibcall_value_internal"
8913 [(parallel [(set (match_operand 0 "" "")
8914 (call (match_operand 1 "memory_operand")
8915 (match_operand 2 "general_operand")))
8917 (use (match_operand 3 "" ""))])])
8919 (define_expand "sibcall_value"
8920 [(parallel [(set (match_operand 0 "" "")
8921 (call (match_operand 1 "memory_operand")
8922 (match_operand 2 "general_operand")))
8924 (use (match_operand 3 "" ""))])]
8930 if ((!REG_P (XEXP (operands[1], 0))
8931 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8932 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8933 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8934 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8936 if (operands[3] == NULL_RTX)
8937 operands[3] = const0_rtx;
8939 pat = gen_sibcall_value_internal (operands[0], operands[1],
8940 operands[2], operands[3]);
8941 arm_emit_call_insn (pat, operands[1], true);
8946 (define_insn "*sibcall_insn"
8947 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8948 (match_operand 1 "" ""))
8950 (use (match_operand 2 "" ""))]
8951 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8953 if (which_alternative == 1)
8954 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8957 if (arm_arch5t || arm_arch4t)
8958 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8960 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8963 [(set_attr "type" "call")]
8966 (define_insn "*sibcall_value_insn"
8967 [(set (match_operand 0 "" "")
8968 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8969 (match_operand 2 "" "")))
8971 (use (match_operand 3 "" ""))]
8972 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8974 if (which_alternative == 1)
8975 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8978 if (arm_arch5t || arm_arch4t)
8979 return \"bx%?\\t%1\";
8981 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8984 [(set_attr "type" "call")]
8987 (define_expand "<return_str>return"
8989 "(TARGET_ARM || (TARGET_THUMB2
8990 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8991 && !IS_STACKALIGN (arm_current_func_type ())))
8992 <return_cond_false>"
8997 thumb2_expand_return (<return_simple_p>);
9004 ;; Often the return insn will be the same as loading from memory, so set attr
9005 (define_insn "*arm_return"
9007 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
9010 if (arm_ccfsm_state == 2)
9012 arm_ccfsm_state += 2;
9015 return output_return_instruction (const_true_rtx, true, false, false);
9017 [(set_attr "type" "load_4")
9018 (set_attr "length" "12")
9019 (set_attr "predicable" "yes")]
9022 (define_insn "*cond_<return_str>return"
9024 (if_then_else (match_operator 0 "arm_comparison_operator"
9025 [(match_operand 1 "cc_register" "") (const_int 0)])
9028 "TARGET_ARM <return_cond_true>"
9031 if (arm_ccfsm_state == 2)
9033 arm_ccfsm_state += 2;
9036 return output_return_instruction (operands[0], true, false,
9039 [(set_attr "conds" "use")
9040 (set_attr "length" "12")
9041 (set_attr "type" "load_4")]
9044 (define_insn "*cond_<return_str>return_inverted"
9046 (if_then_else (match_operator 0 "arm_comparison_operator"
9047 [(match_operand 1 "cc_register" "") (const_int 0)])
9050 "TARGET_ARM <return_cond_true>"
9053 if (arm_ccfsm_state == 2)
9055 arm_ccfsm_state += 2;
9058 return output_return_instruction (operands[0], true, true,
9061 [(set_attr "conds" "use")
9062 (set_attr "length" "12")
9063 (set_attr "type" "load_4")]
9066 (define_insn "*arm_simple_return"
9071 if (arm_ccfsm_state == 2)
9073 arm_ccfsm_state += 2;
9076 return output_return_instruction (const_true_rtx, true, false, true);
9078 [(set_attr "type" "branch")
9079 (set_attr "length" "4")
9080 (set_attr "predicable" "yes")]
9083 ;; Generate a sequence of instructions to determine if the processor is
9084 ;; in 26-bit or 32-bit mode, and return the appropriate return address
9087 (define_expand "return_addr_mask"
9089 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9091 (set (match_operand:SI 0 "s_register_operand")
9092 (if_then_else:SI (eq (match_dup 1) (const_int 0))
9094 (const_int 67108860)))] ; 0x03fffffc
9097 operands[1] = gen_rtx_REG (CC_NZmode, CC_REGNUM);
9100 (define_insn "*check_arch2"
9101 [(set (match_operand:CC_NZ 0 "cc_register" "")
9102 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9105 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9106 [(set_attr "length" "8")
9107 (set_attr "conds" "set")
9108 (set_attr "type" "multiple")]
9111 ;; Call subroutine returning any type.
9113 (define_expand "untyped_call"
9114 [(parallel [(call (match_operand 0 "" "")
9116 (match_operand 1 "" "")
9117 (match_operand 2 "" "")])]
9118 "TARGET_EITHER && !TARGET_FDPIC"
9122 rtx par = gen_rtx_PARALLEL (VOIDmode,
9123 rtvec_alloc (XVECLEN (operands[2], 0)));
9124 rtx addr = gen_reg_rtx (Pmode);
9128 emit_move_insn (addr, XEXP (operands[1], 0));
9129 mem = change_address (operands[1], BLKmode, addr);
9131 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9133 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9135 /* Default code only uses r0 as a return value, but we could
9136 be using anything up to 4 registers. */
9137 if (REGNO (src) == R0_REGNUM)
9138 src = gen_rtx_REG (TImode, R0_REGNUM);
9140 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9142 size += GET_MODE_SIZE (GET_MODE (src));
9145 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
9149 for (i = 0; i < XVECLEN (par, 0); i++)
9151 HOST_WIDE_INT offset = 0;
9152 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9155 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9157 mem = change_address (mem, GET_MODE (reg), NULL);
9158 if (REGNO (reg) == R0_REGNUM)
9160 /* On thumb we have to use a write-back instruction. */
9161 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9162 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9163 size = TARGET_ARM ? 16 : 0;
9167 emit_move_insn (mem, reg);
9168 size = GET_MODE_SIZE (GET_MODE (reg));
9172 /* The optimizer does not know that the call sets the function value
9173 registers we stored in the result block. We avoid problems by
9174 claiming that all hard registers are used and clobbered at this
9176 emit_insn (gen_blockage ());
9182 (define_expand "untyped_return"
9183 [(match_operand:BLK 0 "memory_operand")
9184 (match_operand 1 "" "")]
9185 "TARGET_EITHER && !TARGET_FDPIC"
9189 rtx addr = gen_reg_rtx (Pmode);
9193 emit_move_insn (addr, XEXP (operands[0], 0));
9194 mem = change_address (operands[0], BLKmode, addr);
9196 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9198 HOST_WIDE_INT offset = 0;
9199 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9202 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9204 mem = change_address (mem, GET_MODE (reg), NULL);
9205 if (REGNO (reg) == R0_REGNUM)
9207 /* On thumb we have to use a write-back instruction. */
9208 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9209 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9210 size = TARGET_ARM ? 16 : 0;
9214 emit_move_insn (reg, mem);
9215 size = GET_MODE_SIZE (GET_MODE (reg));
9219 /* Emit USE insns before the return. */
9220 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9221 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9223 /* Construct the return. */
9224 expand_naked_return ();
9230 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9231 ;; all of memory. This blocks insns from being moved across this point.
9233 (define_insn "blockage"
9234 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9237 [(set_attr "length" "0")
9238 (set_attr "type" "block")]
9241 ;; Since we hard code r0 here use the 'o' constraint to prevent
9242 ;; provoking undefined behaviour in the hardware with putting out
9243 ;; auto-increment operations with potentially r0 as the base register.
9244 (define_insn "probe_stack"
9245 [(set (match_operand:SI 0 "memory_operand" "=o")
9246 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
9249 [(set_attr "type" "store_4")
9250 (set_attr "predicable" "yes")]
9253 (define_insn "probe_stack_range"
9254 [(set (match_operand:SI 0 "register_operand" "=r")
9255 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
9256 (match_operand:SI 2 "register_operand" "r")]
9257 VUNSPEC_PROBE_STACK_RANGE))]
9260 return output_probe_stack_range (operands[0], operands[2]);
9262 [(set_attr "type" "multiple")
9263 (set_attr "conds" "clob")]
9266 ;; Named patterns for stack smashing protection.
9267 (define_expand "stack_protect_combined_set"
9269 [(set (match_operand:SI 0 "memory_operand")
9270 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9272 (clobber (match_scratch:SI 2 ""))
9273 (clobber (match_scratch:SI 3 ""))])]
9274 "arm_stack_protector_guard == SSP_GLOBAL"
9278 ;; Use a separate insn from the above expand to be able to have the mem outside
9279 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9280 ;; try to reload the guard since we need to control how PIC access is done in
9281 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9282 ;; legitimize_pic_address ()).
9283 (define_insn_and_split "*stack_protect_combined_set_insn"
9284 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9285 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9287 (clobber (match_scratch:SI 2 "=&l,&r"))
9288 (clobber (match_scratch:SI 3 "=&l,&r"))]
9292 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
9294 (clobber (match_dup 2))])]
9302 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9304 pic_reg = operands[3];
9306 /* Forces recomputing of GOT base now. */
9307 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
9308 true /*compute_now*/);
9312 if (address_operand (operands[1], SImode))
9313 operands[2] = operands[1];
9316 rtx mem = force_const_mem (SImode, operands[1]);
9317 if (!general_operand (mem, SImode))
9319 emit_move_insn (operands[2], XEXP (mem, 0));
9320 mem = replace_equiv_address (mem, operands[2], false);
9322 emit_move_insn (operands[2], mem);
9326 [(set_attr "arch" "t1,32")]
9329 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
9330 ;; canary value does not live beyond the life of this sequence.
9331 (define_insn "*stack_protect_set_insn"
9332 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9333 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
9335 (clobber (match_dup 1))]
9338 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
9339 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
9340 [(set_attr "length" "8,12")
9341 (set_attr "conds" "clob,nocond")
9342 (set_attr "type" "multiple")
9343 (set_attr "arch" "t1,32")]
9346 (define_expand "stack_protect_combined_test"
9350 (eq (match_operand:SI 0 "memory_operand")
9351 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9353 (label_ref (match_operand 2))
9355 (clobber (match_scratch:SI 3 ""))
9356 (clobber (match_scratch:SI 4 ""))
9357 (clobber (reg:CC CC_REGNUM))])]
9358 "arm_stack_protector_guard == SSP_GLOBAL"
9362 ;; Use a separate insn from the above expand to be able to have the mem outside
9363 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9364 ;; try to reload the guard since we need to control how PIC access is done in
9365 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9366 ;; legitimize_pic_address ()).
9367 (define_insn_and_split "*stack_protect_combined_test_insn"
9370 (eq (match_operand:SI 0 "memory_operand" "m,m")
9371 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9373 (label_ref (match_operand 2))
9375 (clobber (match_scratch:SI 3 "=&l,&r"))
9376 (clobber (match_scratch:SI 4 "=&l,&r"))
9377 (clobber (reg:CC CC_REGNUM))]
9390 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9392 pic_reg = operands[4];
9394 /* Forces recomputing of GOT base now. */
9395 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
9396 true /*compute_now*/);
9400 if (address_operand (operands[1], SImode))
9401 operands[3] = operands[1];
9404 rtx mem = force_const_mem (SImode, operands[1]);
9405 if (!general_operand (mem, SImode))
9407 emit_move_insn (operands[3], XEXP (mem, 0));
9408 mem = replace_equiv_address (mem, operands[3], false);
9410 emit_move_insn (operands[3], mem);
9415 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
9417 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9418 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9419 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9423 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
9425 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
9426 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
9431 [(set_attr "arch" "t1,32")]
9434 ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the
9435 ;; canary value does not live beyond the end of this sequence.
9436 (define_insn "arm_stack_protect_test_insn"
9437 [(set (reg:CC_Z CC_REGNUM)
9438 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
9439 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
9442 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
9443 (clobber (match_dup 2))]
9445 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;mov\t%2, #0"
9446 [(set_attr "length" "12,16")
9447 (set_attr "conds" "set")
9448 (set_attr "type" "multiple")
9449 (set_attr "arch" "t,32")]
9452 (define_expand "stack_protect_set"
9453 [(match_operand:SI 0 "memory_operand")
9454 (match_operand:SI 1 "memory_operand")]
9455 "arm_stack_protector_guard == SSP_TLSREG"
9458 operands[1] = arm_stack_protect_tls_canary_mem (false /* reload */);
9459 emit_insn (gen_stack_protect_set_tls (operands[0], operands[1]));
9464 ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the
9465 ;; canary value does not live beyond the life of this sequence.
9466 (define_insn "stack_protect_set_tls"
9467 [(set (match_operand:SI 0 "memory_operand" "=m")
9468 (unspec:SI [(match_operand:SI 1 "memory_operand" "m")]
9470 (set (match_scratch:SI 2 "=&r") (const_int 0))]
9472 "ldr\\t%2, %1\;str\\t%2, %0\;mov\t%2, #0"
9473 [(set_attr "length" "12")
9474 (set_attr "conds" "unconditional")
9475 (set_attr "type" "multiple")]
9478 (define_expand "stack_protect_test"
9479 [(match_operand:SI 0 "memory_operand")
9480 (match_operand:SI 1 "memory_operand")
9481 (match_operand:SI 2)]
9482 "arm_stack_protector_guard == SSP_TLSREG"
9485 operands[1] = arm_stack_protect_tls_canary_mem (true /* reload */);
9486 emit_insn (gen_stack_protect_test_tls (operands[0], operands[1]));
9488 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9489 rtx eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9490 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9495 (define_insn "stack_protect_test_tls"
9496 [(set (reg:CC_Z CC_REGNUM)
9497 (compare:CC_Z (unspec:SI [(match_operand:SI 0 "memory_operand" "m")
9498 (match_operand:SI 1 "memory_operand" "m")]
9501 (clobber (match_scratch:SI 2 "=&r"))
9502 (clobber (match_scratch:SI 3 "=&r"))]
9504 "ldr\t%2, %0\;ldr\t%3, %1\;eors\t%2, %3, %2\;mov\t%3, #0"
9505 [(set_attr "length" "16")
9506 (set_attr "conds" "set")
9507 (set_attr "type" "multiple")]
9510 (define_expand "casesi"
9511 [(match_operand:SI 0 "s_register_operand") ; index to jump on
9512 (match_operand:SI 1 "const_int_operand") ; lower bound
9513 (match_operand:SI 2 "const_int_operand") ; total range
9514 (match_operand:SI 3 "" "") ; table label
9515 (match_operand:SI 4 "" "")] ; Out of range label
9516 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
9519 enum insn_code code;
9520 if (operands[1] != const0_rtx)
9522 rtx reg = gen_reg_rtx (SImode);
9524 emit_insn (gen_addsi3 (reg, operands[0],
9525 gen_int_mode (-INTVAL (operands[1]),
9531 code = CODE_FOR_arm_casesi_internal;
9532 else if (TARGET_THUMB1)
9533 code = CODE_FOR_thumb1_casesi_internal_pic;
9535 code = CODE_FOR_thumb2_casesi_internal_pic;
9537 code = CODE_FOR_thumb2_casesi_internal;
9539 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9540 operands[2] = force_reg (SImode, operands[2]);
9542 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9543 operands[3], operands[4]));
9548 ;; The USE in this pattern is needed to tell flow analysis that this is
9549 ;; a CASESI insn. It has no other purpose.
9550 (define_expand "arm_casesi_internal"
9551 [(parallel [(set (pc)
9553 (leu (match_operand:SI 0 "s_register_operand")
9554 (match_operand:SI 1 "arm_rhs_operand"))
9556 (label_ref:SI (match_operand 3 ""))))
9557 (clobber (reg:CC CC_REGNUM))
9558 (use (label_ref:SI (match_operand 2 "")))])]
9561 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
9562 operands[4] = gen_rtx_PLUS (SImode, operands[4],
9563 gen_rtx_LABEL_REF (SImode, operands[2]));
9564 operands[4] = gen_rtx_MEM (SImode, operands[4]);
9565 MEM_READONLY_P (operands[4]) = 1;
9566 MEM_NOTRAP_P (operands[4]) = 1;
9569 (define_insn "*arm_casesi_internal"
9570 [(parallel [(set (pc)
9572 (leu (match_operand:SI 0 "s_register_operand" "r")
9573 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9574 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9575 (label_ref:SI (match_operand 2 "" ""))))
9576 (label_ref:SI (match_operand 3 "" ""))))
9577 (clobber (reg:CC CC_REGNUM))
9578 (use (label_ref:SI (match_dup 2)))])]
9582 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9583 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9585 [(set_attr "conds" "clob")
9586 (set_attr "length" "12")
9587 (set_attr "type" "multiple")]
9590 (define_expand "indirect_jump"
9592 (match_operand:SI 0 "s_register_operand"))]
9595 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9596 address and use bx. */
9600 tmp = gen_reg_rtx (SImode);
9601 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9607 ;; NB Never uses BX.
9608 (define_insn "*arm_indirect_jump"
9610 (match_operand:SI 0 "s_register_operand" "r"))]
9612 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9613 [(set_attr "predicable" "yes")
9614 (set_attr "type" "branch")]
9617 (define_insn "*load_indirect_jump"
9619 (match_operand:SI 0 "memory_operand" "m"))]
9621 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9622 [(set_attr "type" "load_4")
9623 (set_attr "pool_range" "4096")
9624 (set_attr "neg_pool_range" "4084")
9625 (set_attr "predicable" "yes")]
9635 [(set (attr "length")
9636 (if_then_else (eq_attr "is_thumb" "yes")
9639 (set_attr "type" "mov_reg")]
9643 [(trap_if (const_int 1) (const_int 0))]
9647 return \".inst\\t0xe7f000f0\";
9649 return \".inst\\t0xdeff\";
9651 [(set (attr "length")
9652 (if_then_else (eq_attr "is_thumb" "yes")
9655 (set_attr "type" "trap")
9656 (set_attr "conds" "unconditional")]
9660 ;; Patterns to allow combination of arithmetic, cond code and shifts
9662 (define_insn "*<arith_shift_insn>_multsi"
9663 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9665 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
9666 (match_operand:SI 3 "power_of_two_operand" ""))
9667 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
9669 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
9670 [(set_attr "predicable" "yes")
9671 (set_attr "shift" "2")
9672 (set_attr "arch" "a,t2")
9673 (set_attr "autodetect_type" "alu_shift_mul_op3")])
9675 (define_insn "*<arith_shift_insn>_shiftsi"
9676 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9678 (match_operator:SI 2 "shift_nomul_operator"
9679 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9680 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
9681 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
9682 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
9683 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
9684 [(set_attr "predicable" "yes")
9685 (set_attr "shift" "3")
9686 (set_attr "arch" "a,t2,a")
9687 (set_attr "autodetect_type" "alu_shift_operator2")])
9690 [(set (match_operand:SI 0 "s_register_operand" "")
9691 (match_operator:SI 1 "shiftable_operator"
9692 [(match_operator:SI 2 "shiftable_operator"
9693 [(match_operator:SI 3 "shift_operator"
9694 [(match_operand:SI 4 "s_register_operand" "")
9695 (match_operand:SI 5 "reg_or_int_operand" "")])
9696 (match_operand:SI 6 "s_register_operand" "")])
9697 (match_operand:SI 7 "arm_rhs_operand" "")]))
9698 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9701 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9704 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9707 (define_insn "*arith_shiftsi_compare0"
9708 [(set (reg:CC_NZ CC_REGNUM)
9710 (match_operator:SI 1 "shiftable_operator"
9711 [(match_operator:SI 3 "shift_operator"
9712 [(match_operand:SI 4 "s_register_operand" "r,r")
9713 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9714 (match_operand:SI 2 "s_register_operand" "r,r")])
9716 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9717 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9720 "%i1s%?\\t%0, %2, %4%S3"
9721 [(set_attr "conds" "set")
9722 (set_attr "shift" "4")
9723 (set_attr "arch" "32,a")
9724 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9726 (define_insn "*arith_shiftsi_compare0_scratch"
9727 [(set (reg:CC_NZ CC_REGNUM)
9729 (match_operator:SI 1 "shiftable_operator"
9730 [(match_operator:SI 3 "shift_operator"
9731 [(match_operand:SI 4 "s_register_operand" "r,r")
9732 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9733 (match_operand:SI 2 "s_register_operand" "r,r")])
9735 (clobber (match_scratch:SI 0 "=r,r"))]
9737 "%i1s%?\\t%0, %2, %4%S3"
9738 [(set_attr "conds" "set")
9739 (set_attr "shift" "4")
9740 (set_attr "arch" "32,a")
9741 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9743 (define_insn "*sub_shiftsi"
9744 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9745 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9746 (match_operator:SI 2 "shift_operator"
9747 [(match_operand:SI 3 "s_register_operand" "r,r")
9748 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9750 "sub%?\\t%0, %1, %3%S2"
9751 [(set_attr "predicable" "yes")
9752 (set_attr "predicable_short_it" "no")
9753 (set_attr "shift" "3")
9754 (set_attr "arch" "32,a")
9755 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9757 (define_insn "*sub_shiftsi_compare0"
9758 [(set (reg:CC_NZ CC_REGNUM)
9760 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9761 (match_operator:SI 2 "shift_operator"
9762 [(match_operand:SI 3 "s_register_operand" "r,r")
9763 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9765 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9766 (minus:SI (match_dup 1)
9767 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9769 "subs%?\\t%0, %1, %3%S2"
9770 [(set_attr "conds" "set")
9771 (set_attr "shift" "3")
9772 (set_attr "arch" "32,a")
9773 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9775 (define_insn "*sub_shiftsi_compare0_scratch"
9776 [(set (reg:CC_NZ CC_REGNUM)
9778 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9779 (match_operator:SI 2 "shift_operator"
9780 [(match_operand:SI 3 "s_register_operand" "r,r")
9781 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9783 (clobber (match_scratch:SI 0 "=r,r"))]
9785 "subs%?\\t%0, %1, %3%S2"
9786 [(set_attr "conds" "set")
9787 (set_attr "shift" "3")
9788 (set_attr "arch" "32,a")
9789 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9792 (define_insn_and_split "*and_scc"
9793 [(set (match_operand:SI 0 "s_register_operand" "=r")
9794 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9795 [(match_operand 2 "cc_register" "") (const_int 0)])
9796 (match_operand:SI 3 "s_register_operand" "r")))]
9798 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9799 "&& reload_completed"
9800 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9801 (cond_exec (match_dup 4) (set (match_dup 0)
9802 (and:SI (match_dup 3) (const_int 1))))]
9804 machine_mode mode = GET_MODE (operands[2]);
9805 enum rtx_code rc = GET_CODE (operands[1]);
9807 /* Note that operands[4] is the same as operands[1],
9808 but with VOIDmode as the result. */
9809 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9810 if (mode == CCFPmode || mode == CCFPEmode)
9811 rc = reverse_condition_maybe_unordered (rc);
9813 rc = reverse_condition (rc);
9814 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9816 [(set_attr "conds" "use")
9817 (set_attr "type" "multiple")
9818 (set_attr "length" "8")]
9821 (define_insn_and_split "*ior_scc"
9822 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9823 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9824 [(match_operand 2 "cc_register" "") (const_int 0)])
9825 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9830 "&& reload_completed
9831 && REGNO (operands [0]) != REGNO (operands[3])"
9832 ;; && which_alternative == 1
9833 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9834 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9835 (cond_exec (match_dup 4) (set (match_dup 0)
9836 (ior:SI (match_dup 3) (const_int 1))))]
9838 machine_mode mode = GET_MODE (operands[2]);
9839 enum rtx_code rc = GET_CODE (operands[1]);
9841 /* Note that operands[4] is the same as operands[1],
9842 but with VOIDmode as the result. */
9843 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9844 if (mode == CCFPmode || mode == CCFPEmode)
9845 rc = reverse_condition_maybe_unordered (rc);
9847 rc = reverse_condition (rc);
9848 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9850 [(set_attr "conds" "use")
9851 (set_attr "length" "4,8")
9852 (set_attr "type" "logic_imm,multiple")]
9855 ; A series of splitters for the compare_scc pattern below. Note that
9856 ; order is important.
9858 [(set (match_operand:SI 0 "s_register_operand" "")
9859 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9861 (clobber (reg:CC CC_REGNUM))]
9862 "TARGET_32BIT && reload_completed"
9863 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9866 [(set (match_operand:SI 0 "s_register_operand" "")
9867 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9869 (clobber (reg:CC CC_REGNUM))]
9870 "TARGET_32BIT && reload_completed"
9871 [(set (match_dup 0) (not:SI (match_dup 1)))
9872 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9875 [(set (match_operand:SI 0 "s_register_operand" "")
9876 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9878 (clobber (reg:CC CC_REGNUM))]
9879 "arm_arch5t && TARGET_32BIT"
9880 [(set (match_dup 0) (clz:SI (match_dup 1)))
9881 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9885 [(set (match_operand:SI 0 "s_register_operand" "")
9886 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9888 (clobber (reg:CC CC_REGNUM))]
9889 "TARGET_32BIT && reload_completed"
9891 [(set (reg:CC CC_REGNUM)
9892 (compare:CC (const_int 1) (match_dup 1)))
9894 (minus:SI (const_int 1) (match_dup 1)))])
9895 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9896 (set (match_dup 0) (const_int 0)))])
9899 [(set (match_operand:SI 0 "s_register_operand" "")
9900 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9901 (match_operand:SI 2 "const_int_operand" "")))
9902 (clobber (reg:CC CC_REGNUM))]
9903 "TARGET_32BIT && reload_completed"
9905 [(set (reg:CC CC_REGNUM)
9906 (compare:CC (match_dup 1) (match_dup 2)))
9907 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9908 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9909 (set (match_dup 0) (const_int 1)))]
9911 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9915 [(set (match_operand:SI 0 "s_register_operand" "")
9916 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9917 (match_operand:SI 2 "arm_add_operand" "")))
9918 (clobber (reg:CC CC_REGNUM))]
9919 "TARGET_32BIT && reload_completed"
9921 [(set (reg:CC_NZ CC_REGNUM)
9922 (compare:CC_NZ (minus:SI (match_dup 1) (match_dup 2))
9924 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9925 (cond_exec (ne:CC_NZ (reg:CC_NZ CC_REGNUM) (const_int 0))
9926 (set (match_dup 0) (const_int 1)))])
9928 (define_insn_and_split "*compare_scc"
9929 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9930 (match_operator:SI 1 "arm_comparison_operator"
9931 [(match_operand:SI 2 "s_register_operand" "r,r")
9932 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9933 (clobber (reg:CC CC_REGNUM))]
9936 "&& reload_completed"
9937 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9938 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9939 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9942 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9943 operands[2], operands[3]);
9944 enum rtx_code rc = GET_CODE (operands[1]);
9946 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9948 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9949 if (mode == CCFPmode || mode == CCFPEmode)
9950 rc = reverse_condition_maybe_unordered (rc);
9952 rc = reverse_condition (rc);
9953 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9955 [(set_attr "type" "multiple")]
9958 ;; Attempt to improve the sequence generated by the compare_scc splitters
9959 ;; not to use conditional execution.
9961 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9965 [(set (reg:CC CC_REGNUM)
9966 (compare:CC (match_operand:SI 1 "register_operand" "")
9968 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9969 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9970 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9971 (set (match_dup 0) (const_int 1)))]
9972 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9973 [(set (match_dup 0) (clz:SI (match_dup 1)))
9974 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9977 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9981 [(set (reg:CC CC_REGNUM)
9982 (compare:CC (match_operand:SI 1 "register_operand" "")
9984 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9985 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9986 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9987 (set (match_dup 0) (const_int 1)))
9988 (match_scratch:SI 2 "r")]
9989 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9991 [(set (reg:CC CC_REGNUM)
9992 (compare:CC (const_int 0) (match_dup 1)))
9993 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9995 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9996 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9999 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
10000 ;; sub Rd, Reg1, reg2
10004 [(set (reg:CC CC_REGNUM)
10005 (compare:CC (match_operand:SI 1 "register_operand" "")
10006 (match_operand:SI 2 "arm_rhs_operand" "")))
10007 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10008 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10009 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10010 (set (match_dup 0) (const_int 1)))]
10011 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
10012 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
10013 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
10014 (set (match_dup 0) (clz:SI (match_dup 0)))
10015 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
10019 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
10020 ;; sub T1, Reg1, reg2
10024 [(set (reg:CC CC_REGNUM)
10025 (compare:CC (match_operand:SI 1 "register_operand" "")
10026 (match_operand:SI 2 "arm_rhs_operand" "")))
10027 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10028 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10029 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10030 (set (match_dup 0) (const_int 1)))
10031 (match_scratch:SI 3 "r")]
10032 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10033 [(set (match_dup 3) (match_dup 4))
10035 [(set (reg:CC CC_REGNUM)
10036 (compare:CC (const_int 0) (match_dup 3)))
10037 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
10039 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
10040 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
10042 if (CONST_INT_P (operands[2]))
10043 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
10045 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
10048 (define_insn "*cond_move"
10049 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10050 (if_then_else:SI (match_operator 3 "equality_operator"
10051 [(match_operator 4 "arm_comparison_operator"
10052 [(match_operand 5 "cc_register" "") (const_int 0)])
10054 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10055 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
10058 if (GET_CODE (operands[3]) == NE)
10060 if (which_alternative != 1)
10061 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
10062 if (which_alternative != 0)
10063 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
10066 if (which_alternative != 0)
10067 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10068 if (which_alternative != 1)
10069 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
10072 [(set_attr "conds" "use")
10073 (set_attr_alternative "type"
10074 [(if_then_else (match_operand 2 "const_int_operand" "")
10075 (const_string "mov_imm")
10076 (const_string "mov_reg"))
10077 (if_then_else (match_operand 1 "const_int_operand" "")
10078 (const_string "mov_imm")
10079 (const_string "mov_reg"))
10080 (const_string "multiple")])
10081 (set_attr "length" "4,4,8")]
10084 (define_insn "*cond_arith"
10085 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10086 (match_operator:SI 5 "shiftable_operator"
10087 [(match_operator:SI 4 "arm_comparison_operator"
10088 [(match_operand:SI 2 "s_register_operand" "r,r")
10089 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10090 (match_operand:SI 1 "s_register_operand" "0,?r")]))
10091 (clobber (reg:CC CC_REGNUM))]
10094 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
10095 return \"%i5\\t%0, %1, %2, lsr #31\";
10097 output_asm_insn (\"cmp\\t%2, %3\", operands);
10098 if (GET_CODE (operands[5]) == AND)
10099 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
10100 else if (GET_CODE (operands[5]) == MINUS)
10101 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
10102 else if (which_alternative != 0)
10103 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10104 return \"%i5%d4\\t%0, %1, #1\";
10106 [(set_attr "conds" "clob")
10107 (set_attr "length" "12")
10108 (set_attr "type" "multiple")]
10111 (define_insn "*cond_sub"
10112 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10113 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
10114 (match_operator:SI 4 "arm_comparison_operator"
10115 [(match_operand:SI 2 "s_register_operand" "r,r")
10116 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10117 (clobber (reg:CC CC_REGNUM))]
10120 output_asm_insn (\"cmp\\t%2, %3\", operands);
10121 if (which_alternative != 0)
10122 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10123 return \"sub%d4\\t%0, %1, #1\";
10125 [(set_attr "conds" "clob")
10126 (set_attr "length" "8,12")
10127 (set_attr "type" "multiple")]
10130 (define_insn "*cmp_ite0"
10131 [(set (match_operand 6 "dominant_cc_register" "")
10134 (match_operator 4 "arm_comparison_operator"
10135 [(match_operand:SI 0 "s_register_operand"
10136 "l,l,l,r,r,r,r,r,r")
10137 (match_operand:SI 1 "arm_add_operand"
10138 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10139 (match_operator:SI 5 "arm_comparison_operator"
10140 [(match_operand:SI 2 "s_register_operand"
10141 "l,r,r,l,l,r,r,r,r")
10142 (match_operand:SI 3 "arm_add_operand"
10143 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10149 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10151 {\"cmp%d5\\t%0, %1\",
10152 \"cmp%d4\\t%2, %3\"},
10153 {\"cmn%d5\\t%0, #%n1\",
10154 \"cmp%d4\\t%2, %3\"},
10155 {\"cmp%d5\\t%0, %1\",
10156 \"cmn%d4\\t%2, #%n3\"},
10157 {\"cmn%d5\\t%0, #%n1\",
10158 \"cmn%d4\\t%2, #%n3\"}
10160 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10165 \"cmn\\t%0, #%n1\"},
10166 {\"cmn\\t%2, #%n3\",
10168 {\"cmn\\t%2, #%n3\",
10169 \"cmn\\t%0, #%n1\"}
10171 static const char * const ite[2] =
10176 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10177 CMP_CMP, CMN_CMP, CMP_CMP,
10178 CMN_CMP, CMP_CMN, CMN_CMN};
10180 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10182 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10183 if (TARGET_THUMB2) {
10184 output_asm_insn (ite[swap], operands);
10186 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10189 [(set_attr "conds" "set")
10190 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10191 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10192 (set_attr "type" "multiple")
10193 (set_attr_alternative "length"
10199 (if_then_else (eq_attr "is_thumb" "no")
10202 (if_then_else (eq_attr "is_thumb" "no")
10205 (if_then_else (eq_attr "is_thumb" "no")
10208 (if_then_else (eq_attr "is_thumb" "no")
10213 (define_insn "*cmp_ite1"
10214 [(set (match_operand 6 "dominant_cc_register" "")
10217 (match_operator 4 "arm_comparison_operator"
10218 [(match_operand:SI 0 "s_register_operand"
10219 "l,l,l,r,r,r,r,r,r")
10220 (match_operand:SI 1 "arm_add_operand"
10221 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10222 (match_operator:SI 5 "arm_comparison_operator"
10223 [(match_operand:SI 2 "s_register_operand"
10224 "l,r,r,l,l,r,r,r,r")
10225 (match_operand:SI 3 "arm_add_operand"
10226 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10232 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10236 {\"cmn\\t%0, #%n1\",
10239 \"cmn\\t%2, #%n3\"},
10240 {\"cmn\\t%0, #%n1\",
10241 \"cmn\\t%2, #%n3\"}
10243 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10245 {\"cmp%d4\\t%2, %3\",
10246 \"cmp%D5\\t%0, %1\"},
10247 {\"cmp%d4\\t%2, %3\",
10248 \"cmn%D5\\t%0, #%n1\"},
10249 {\"cmn%d4\\t%2, #%n3\",
10250 \"cmp%D5\\t%0, %1\"},
10251 {\"cmn%d4\\t%2, #%n3\",
10252 \"cmn%D5\\t%0, #%n1\"}
10254 static const char * const ite[2] =
10259 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10260 CMP_CMP, CMN_CMP, CMP_CMP,
10261 CMN_CMP, CMP_CMN, CMN_CMN};
10263 comparison_dominates_p (GET_CODE (operands[5]),
10264 reverse_condition (GET_CODE (operands[4])));
10266 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10267 if (TARGET_THUMB2) {
10268 output_asm_insn (ite[swap], operands);
10270 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10273 [(set_attr "conds" "set")
10274 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10275 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10276 (set_attr_alternative "length"
10282 (if_then_else (eq_attr "is_thumb" "no")
10285 (if_then_else (eq_attr "is_thumb" "no")
10288 (if_then_else (eq_attr "is_thumb" "no")
10291 (if_then_else (eq_attr "is_thumb" "no")
10294 (set_attr "type" "multiple")]
10297 (define_insn "*cmp_and"
10298 [(set (match_operand 6 "dominant_cc_register" "")
10301 (match_operator 4 "arm_comparison_operator"
10302 [(match_operand:SI 0 "s_register_operand"
10303 "l,l,l,r,r,r,r,r,r,r")
10304 (match_operand:SI 1 "arm_add_operand"
10305 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10306 (match_operator:SI 5 "arm_comparison_operator"
10307 [(match_operand:SI 2 "s_register_operand"
10308 "l,r,r,l,l,r,r,r,r,r")
10309 (match_operand:SI 3 "arm_add_operand"
10310 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10315 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10317 {\"cmp%d5\\t%0, %1\",
10318 \"cmp%d4\\t%2, %3\"},
10319 {\"cmn%d5\\t%0, #%n1\",
10320 \"cmp%d4\\t%2, %3\"},
10321 {\"cmp%d5\\t%0, %1\",
10322 \"cmn%d4\\t%2, #%n3\"},
10323 {\"cmn%d5\\t%0, #%n1\",
10324 \"cmn%d4\\t%2, #%n3\"}
10326 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10331 \"cmn\\t%0, #%n1\"},
10332 {\"cmn\\t%2, #%n3\",
10334 {\"cmn\\t%2, #%n3\",
10335 \"cmn\\t%0, #%n1\"}
10337 static const char *const ite[2] =
10342 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10343 CMP_CMP, CMN_CMP, CMP_CMP,
10344 CMP_CMP, CMN_CMP, CMP_CMN,
10347 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10349 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10350 if (TARGET_THUMB2) {
10351 output_asm_insn (ite[swap], operands);
10353 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10356 [(set_attr "conds" "set")
10357 (set_attr "predicable" "no")
10358 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10359 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10360 (set_attr_alternative "length"
10367 (if_then_else (eq_attr "is_thumb" "no")
10370 (if_then_else (eq_attr "is_thumb" "no")
10373 (if_then_else (eq_attr "is_thumb" "no")
10376 (if_then_else (eq_attr "is_thumb" "no")
10379 (set_attr "type" "multiple")]
10382 (define_insn "*cmp_ior"
10383 [(set (match_operand 6 "dominant_cc_register" "")
10386 (match_operator 4 "arm_comparison_operator"
10387 [(match_operand:SI 0 "s_register_operand"
10388 "l,l,l,r,r,r,r,r,r,r")
10389 (match_operand:SI 1 "arm_add_operand"
10390 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10391 (match_operator:SI 5 "arm_comparison_operator"
10392 [(match_operand:SI 2 "s_register_operand"
10393 "l,r,r,l,l,r,r,r,r,r")
10394 (match_operand:SI 3 "arm_add_operand"
10395 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10400 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10404 {\"cmn\\t%0, #%n1\",
10407 \"cmn\\t%2, #%n3\"},
10408 {\"cmn\\t%0, #%n1\",
10409 \"cmn\\t%2, #%n3\"}
10411 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10413 {\"cmp%D4\\t%2, %3\",
10414 \"cmp%D5\\t%0, %1\"},
10415 {\"cmp%D4\\t%2, %3\",
10416 \"cmn%D5\\t%0, #%n1\"},
10417 {\"cmn%D4\\t%2, #%n3\",
10418 \"cmp%D5\\t%0, %1\"},
10419 {\"cmn%D4\\t%2, #%n3\",
10420 \"cmn%D5\\t%0, #%n1\"}
10422 static const char *const ite[2] =
10427 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10428 CMP_CMP, CMN_CMP, CMP_CMP,
10429 CMP_CMP, CMN_CMP, CMP_CMN,
10432 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10434 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10435 if (TARGET_THUMB2) {
10436 output_asm_insn (ite[swap], operands);
10438 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10442 [(set_attr "conds" "set")
10443 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10444 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10445 (set_attr_alternative "length"
10452 (if_then_else (eq_attr "is_thumb" "no")
10455 (if_then_else (eq_attr "is_thumb" "no")
10458 (if_then_else (eq_attr "is_thumb" "no")
10461 (if_then_else (eq_attr "is_thumb" "no")
10464 (set_attr "type" "multiple")]
10467 (define_insn_and_split "*ior_scc_scc"
10468 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10469 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10470 [(match_operand:SI 1 "s_register_operand" "l,r")
10471 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10472 (match_operator:SI 6 "arm_comparison_operator"
10473 [(match_operand:SI 4 "s_register_operand" "l,r")
10474 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10475 (clobber (reg:CC CC_REGNUM))]
10477 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10480 "TARGET_32BIT && reload_completed"
10481 [(set (match_dup 7)
10484 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10485 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10487 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10489 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10492 [(set_attr "conds" "clob")
10493 (set_attr "enabled_for_short_it" "yes,no")
10494 (set_attr "length" "16")
10495 (set_attr "type" "multiple")]
10498 ; If the above pattern is followed by a CMP insn, then the compare is
10499 ; redundant, since we can rework the conditional instruction that follows.
10500 (define_insn_and_split "*ior_scc_scc_cmp"
10501 [(set (match_operand 0 "dominant_cc_register" "")
10502 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10503 [(match_operand:SI 1 "s_register_operand" "l,r")
10504 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10505 (match_operator:SI 6 "arm_comparison_operator"
10506 [(match_operand:SI 4 "s_register_operand" "l,r")
10507 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10509 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10510 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10511 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10514 "TARGET_32BIT && reload_completed"
10515 [(set (match_dup 0)
10518 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10519 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10521 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10523 [(set_attr "conds" "set")
10524 (set_attr "enabled_for_short_it" "yes,no")
10525 (set_attr "length" "16")
10526 (set_attr "type" "multiple")]
10529 (define_insn_and_split "*and_scc_scc"
10530 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10531 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10532 [(match_operand:SI 1 "s_register_operand" "l,r")
10533 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10534 (match_operator:SI 6 "arm_comparison_operator"
10535 [(match_operand:SI 4 "s_register_operand" "l,r")
10536 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10537 (clobber (reg:CC CC_REGNUM))]
10539 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10542 "TARGET_32BIT && reload_completed
10543 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10545 [(set (match_dup 7)
10548 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10549 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10551 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10553 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10556 [(set_attr "conds" "clob")
10557 (set_attr "enabled_for_short_it" "yes,no")
10558 (set_attr "length" "16")
10559 (set_attr "type" "multiple")]
10562 ; If the above pattern is followed by a CMP insn, then the compare is
10563 ; redundant, since we can rework the conditional instruction that follows.
10564 (define_insn_and_split "*and_scc_scc_cmp"
10565 [(set (match_operand 0 "dominant_cc_register" "")
10566 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10567 [(match_operand:SI 1 "s_register_operand" "l,r")
10568 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10569 (match_operator:SI 6 "arm_comparison_operator"
10570 [(match_operand:SI 4 "s_register_operand" "l,r")
10571 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10573 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10574 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10575 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10578 "TARGET_32BIT && reload_completed"
10579 [(set (match_dup 0)
10582 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10583 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10585 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10587 [(set_attr "conds" "set")
10588 (set_attr "enabled_for_short_it" "yes,no")
10589 (set_attr "length" "16")
10590 (set_attr "type" "multiple")]
10593 ;; If there is no dominance in the comparison, then we can still save an
10594 ;; instruction in the AND case, since we can know that the second compare
10595 ;; need only zero the value if false (if true, then the value is already
10597 (define_insn_and_split "*and_scc_scc_nodom"
10598 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10599 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10600 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10601 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10602 (match_operator:SI 6 "arm_comparison_operator"
10603 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10604 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10605 (clobber (reg:CC CC_REGNUM))]
10607 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10610 "TARGET_32BIT && reload_completed"
10611 [(parallel [(set (match_dup 0)
10612 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10613 (clobber (reg:CC CC_REGNUM))])
10614 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10616 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10619 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10620 operands[4], operands[5]),
10622 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10624 [(set_attr "conds" "clob")
10625 (set_attr "length" "20")
10626 (set_attr "type" "multiple")]
10630 [(set (reg:CC_NZ CC_REGNUM)
10631 (compare:CC_NZ (ior:SI
10632 (and:SI (match_operand:SI 0 "s_register_operand" "")
10634 (match_operator:SI 1 "arm_comparison_operator"
10635 [(match_operand:SI 2 "s_register_operand" "")
10636 (match_operand:SI 3 "arm_add_operand" "")]))
10638 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10640 [(set (match_dup 4)
10641 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10643 (set (reg:CC_NZ CC_REGNUM)
10644 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10649 [(set (reg:CC_NZ CC_REGNUM)
10650 (compare:CC_NZ (ior:SI
10651 (match_operator:SI 1 "arm_comparison_operator"
10652 [(match_operand:SI 2 "s_register_operand" "")
10653 (match_operand:SI 3 "arm_add_operand" "")])
10654 (and:SI (match_operand:SI 0 "s_register_operand" "")
10657 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10659 [(set (match_dup 4)
10660 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10662 (set (reg:CC_NZ CC_REGNUM)
10663 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10666 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10668 (define_insn_and_split "*negscc"
10669 [(set (match_operand:SI 0 "s_register_operand" "=r")
10670 (neg:SI (match_operator 3 "arm_comparison_operator"
10671 [(match_operand:SI 1 "s_register_operand" "r")
10672 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10673 (clobber (reg:CC CC_REGNUM))]
10676 "&& reload_completed"
10679 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10681 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10683 /* Emit mov\\t%0, %1, asr #31 */
10684 emit_insn (gen_rtx_SET (operands[0],
10685 gen_rtx_ASHIFTRT (SImode,
10690 else if (GET_CODE (operands[3]) == NE)
10692 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10693 if (CONST_INT_P (operands[2]))
10694 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10695 gen_int_mode (-INTVAL (operands[2]),
10698 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10700 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10701 gen_rtx_NE (SImode,
10704 gen_rtx_SET (operands[0],
10710 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10711 emit_insn (gen_rtx_SET (cc_reg,
10712 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10713 enum rtx_code rc = GET_CODE (operands[3]);
10715 rc = reverse_condition (rc);
10716 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10717 gen_rtx_fmt_ee (rc,
10721 gen_rtx_SET (operands[0], const0_rtx)));
10722 rc = GET_CODE (operands[3]);
10723 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10724 gen_rtx_fmt_ee (rc,
10728 gen_rtx_SET (operands[0],
10734 [(set_attr "conds" "clob")
10735 (set_attr "length" "12")
10736 (set_attr "type" "multiple")]
10739 (define_insn_and_split "movcond_addsi"
10740 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
10742 (match_operator 5 "comparison_operator"
10743 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
10744 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
10746 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
10747 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
10748 (clobber (reg:CC CC_REGNUM))]
10751 "&& reload_completed"
10752 [(set (reg:CC_NZ CC_REGNUM)
10754 (plus:SI (match_dup 3)
10757 (set (match_dup 0) (match_dup 1))
10758 (cond_exec (match_dup 6)
10759 (set (match_dup 0) (match_dup 2)))]
10762 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
10763 operands[3], operands[4]);
10764 enum rtx_code rc = GET_CODE (operands[5]);
10765 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10766 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
10767 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
10768 rc = reverse_condition (rc);
10770 std::swap (operands[1], operands[2]);
10772 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10775 [(set_attr "conds" "clob")
10776 (set_attr "enabled_for_short_it" "no,yes,yes")
10777 (set_attr "type" "multiple")]
10780 (define_insn "movcond"
10781 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10783 (match_operator 5 "arm_comparison_operator"
10784 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10785 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10786 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10787 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10788 (clobber (reg:CC CC_REGNUM))]
10791 if (GET_CODE (operands[5]) == LT
10792 && (operands[4] == const0_rtx))
10794 if (which_alternative != 1 && REG_P (operands[1]))
10796 if (operands[2] == const0_rtx)
10797 return \"and\\t%0, %1, %3, asr #31\";
10798 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10800 else if (which_alternative != 0 && REG_P (operands[2]))
10802 if (operands[1] == const0_rtx)
10803 return \"bic\\t%0, %2, %3, asr #31\";
10804 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10806 /* The only case that falls through to here is when both ops 1 & 2
10810 if (GET_CODE (operands[5]) == GE
10811 && (operands[4] == const0_rtx))
10813 if (which_alternative != 1 && REG_P (operands[1]))
10815 if (operands[2] == const0_rtx)
10816 return \"bic\\t%0, %1, %3, asr #31\";
10817 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10819 else if (which_alternative != 0 && REG_P (operands[2]))
10821 if (operands[1] == const0_rtx)
10822 return \"and\\t%0, %2, %3, asr #31\";
10823 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10825 /* The only case that falls through to here is when both ops 1 & 2
10828 if (CONST_INT_P (operands[4])
10829 && !const_ok_for_arm (INTVAL (operands[4])))
10830 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10832 output_asm_insn (\"cmp\\t%3, %4\", operands);
10833 if (which_alternative != 0)
10834 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10835 if (which_alternative != 1)
10836 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10839 [(set_attr "conds" "clob")
10840 (set_attr "length" "8,8,12")
10841 (set_attr "type" "multiple")]
10844 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10846 (define_insn "*ifcompare_plus_move"
10847 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10848 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10849 [(match_operand:SI 4 "s_register_operand" "r,r")
10850 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10852 (match_operand:SI 2 "s_register_operand" "r,r")
10853 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10854 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10855 (clobber (reg:CC CC_REGNUM))]
10858 [(set_attr "conds" "clob")
10859 (set_attr "length" "8,12")
10860 (set_attr "type" "multiple")]
10863 (define_insn "*if_plus_move"
10864 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10866 (match_operator 4 "arm_comparison_operator"
10867 [(match_operand 5 "cc_register" "") (const_int 0)])
10869 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10870 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10871 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10874 add%d4\\t%0, %2, %3
10875 sub%d4\\t%0, %2, #%n3
10876 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10877 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10878 [(set_attr "conds" "use")
10879 (set_attr "length" "4,4,8,8")
10880 (set_attr_alternative "type"
10881 [(if_then_else (match_operand 3 "const_int_operand" "")
10882 (const_string "alu_imm" )
10883 (const_string "alu_sreg"))
10884 (const_string "alu_imm")
10885 (const_string "multiple")
10886 (const_string "multiple")])]
10889 (define_insn "*ifcompare_move_plus"
10890 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10891 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10892 [(match_operand:SI 4 "s_register_operand" "r,r")
10893 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10894 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10896 (match_operand:SI 2 "s_register_operand" "r,r")
10897 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10898 (clobber (reg:CC CC_REGNUM))]
10901 [(set_attr "conds" "clob")
10902 (set_attr "length" "8,12")
10903 (set_attr "type" "multiple")]
10906 (define_insn "*if_move_plus"
10907 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10909 (match_operator 4 "arm_comparison_operator"
10910 [(match_operand 5 "cc_register" "") (const_int 0)])
10911 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10913 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10914 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10917 add%D4\\t%0, %2, %3
10918 sub%D4\\t%0, %2, #%n3
10919 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10920 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10921 [(set_attr "conds" "use")
10922 (set_attr "length" "4,4,8,8")
10923 (set_attr_alternative "type"
10924 [(if_then_else (match_operand 3 "const_int_operand" "")
10925 (const_string "alu_imm" )
10926 (const_string "alu_sreg"))
10927 (const_string "alu_imm")
10928 (const_string "multiple")
10929 (const_string "multiple")])]
10932 (define_insn "*ifcompare_arith_arith"
10933 [(set (match_operand:SI 0 "s_register_operand" "=r")
10934 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10935 [(match_operand:SI 5 "s_register_operand" "r")
10936 (match_operand:SI 6 "arm_add_operand" "rIL")])
10937 (match_operator:SI 8 "shiftable_operator"
10938 [(match_operand:SI 1 "s_register_operand" "r")
10939 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10940 (match_operator:SI 7 "shiftable_operator"
10941 [(match_operand:SI 3 "s_register_operand" "r")
10942 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10943 (clobber (reg:CC CC_REGNUM))]
10946 [(set_attr "conds" "clob")
10947 (set_attr "length" "12")
10948 (set_attr "type" "multiple")]
10951 (define_insn "*if_arith_arith"
10952 [(set (match_operand:SI 0 "s_register_operand" "=r")
10953 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10954 [(match_operand 8 "cc_register" "") (const_int 0)])
10955 (match_operator:SI 6 "shiftable_operator"
10956 [(match_operand:SI 1 "s_register_operand" "r")
10957 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10958 (match_operator:SI 7 "shiftable_operator"
10959 [(match_operand:SI 3 "s_register_operand" "r")
10960 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10962 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10963 [(set_attr "conds" "use")
10964 (set_attr "length" "8")
10965 (set_attr "type" "multiple")]
10968 (define_insn "*ifcompare_arith_move"
10969 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10970 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10971 [(match_operand:SI 2 "s_register_operand" "r,r")
10972 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10973 (match_operator:SI 7 "shiftable_operator"
10974 [(match_operand:SI 4 "s_register_operand" "r,r")
10975 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10976 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10977 (clobber (reg:CC CC_REGNUM))]
10980 /* If we have an operation where (op x 0) is the identity operation and
10981 the conditional operator is LT or GE and we are comparing against zero and
10982 everything is in registers then we can do this in two instructions. */
10983 if (operands[3] == const0_rtx
10984 && GET_CODE (operands[7]) != AND
10985 && REG_P (operands[5])
10986 && REG_P (operands[1])
10987 && REGNO (operands[1]) == REGNO (operands[4])
10988 && REGNO (operands[4]) != REGNO (operands[0]))
10990 if (GET_CODE (operands[6]) == LT)
10991 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10992 else if (GET_CODE (operands[6]) == GE)
10993 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10995 if (CONST_INT_P (operands[3])
10996 && !const_ok_for_arm (INTVAL (operands[3])))
10997 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10999 output_asm_insn (\"cmp\\t%2, %3\", operands);
11000 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
11001 if (which_alternative != 0)
11002 return \"mov%D6\\t%0, %1\";
11005 [(set_attr "conds" "clob")
11006 (set_attr "length" "8,12")
11007 (set_attr "type" "multiple")]
11010 (define_insn "*if_arith_move"
11011 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11012 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11013 [(match_operand 6 "cc_register" "") (const_int 0)])
11014 (match_operator:SI 5 "shiftable_operator"
11015 [(match_operand:SI 2 "s_register_operand" "r,r")
11016 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
11017 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
11020 %I5%d4\\t%0, %2, %3
11021 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
11022 [(set_attr "conds" "use")
11023 (set_attr "length" "4,8")
11024 (set_attr_alternative "type"
11025 [(if_then_else (match_operand 3 "const_int_operand" "")
11026 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
11027 (const_string "alu_shift_imm_lsl_1to4")
11028 (const_string "alu_shift_imm_other"))
11029 (const_string "alu_shift_reg"))
11030 (const_string "multiple")])]
11033 (define_insn "*ifcompare_move_arith"
11034 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11035 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11036 [(match_operand:SI 4 "s_register_operand" "r,r")
11037 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11038 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11039 (match_operator:SI 7 "shiftable_operator"
11040 [(match_operand:SI 2 "s_register_operand" "r,r")
11041 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
11042 (clobber (reg:CC CC_REGNUM))]
11045 /* If we have an operation where (op x 0) is the identity operation and
11046 the conditional operator is LT or GE and we are comparing against zero and
11047 everything is in registers then we can do this in two instructions */
11048 if (operands[5] == const0_rtx
11049 && GET_CODE (operands[7]) != AND
11050 && REG_P (operands[3])
11051 && REG_P (operands[1])
11052 && REGNO (operands[1]) == REGNO (operands[2])
11053 && REGNO (operands[2]) != REGNO (operands[0]))
11055 if (GET_CODE (operands[6]) == GE)
11056 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11057 else if (GET_CODE (operands[6]) == LT)
11058 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11061 if (CONST_INT_P (operands[5])
11062 && !const_ok_for_arm (INTVAL (operands[5])))
11063 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
11065 output_asm_insn (\"cmp\\t%4, %5\", operands);
11067 if (which_alternative != 0)
11068 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
11069 return \"%I7%D6\\t%0, %2, %3\";
11071 [(set_attr "conds" "clob")
11072 (set_attr "length" "8,12")
11073 (set_attr "type" "multiple")]
11076 (define_insn "*if_move_arith"
11077 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11079 (match_operator 4 "arm_comparison_operator"
11080 [(match_operand 6 "cc_register" "") (const_int 0)])
11081 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11082 (match_operator:SI 5 "shiftable_operator"
11083 [(match_operand:SI 2 "s_register_operand" "r,r")
11084 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
11087 %I5%D4\\t%0, %2, %3
11088 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
11089 [(set_attr "conds" "use")
11090 (set_attr "length" "4,8")
11091 (set_attr_alternative "type"
11092 [(if_then_else (match_operand 3 "const_int_operand" "")
11093 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
11094 (const_string "alu_shift_imm_lsl_1to4")
11095 (const_string "alu_shift_imm_other"))
11096 (const_string "alu_shift_reg"))
11097 (const_string "multiple")])]
11100 (define_insn "*ifcompare_move_not"
11101 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11103 (match_operator 5 "arm_comparison_operator"
11104 [(match_operand:SI 3 "s_register_operand" "r,r")
11105 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11106 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11108 (match_operand:SI 2 "s_register_operand" "r,r"))))
11109 (clobber (reg:CC CC_REGNUM))]
11112 [(set_attr "conds" "clob")
11113 (set_attr "length" "8,12")
11114 (set_attr "type" "multiple")]
11117 (define_insn "*if_move_not"
11118 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11120 (match_operator 4 "arm_comparison_operator"
11121 [(match_operand 3 "cc_register" "") (const_int 0)])
11122 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11123 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11127 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
11128 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
11129 [(set_attr "conds" "use")
11130 (set_attr "type" "mvn_reg")
11131 (set_attr "length" "4,8,8")
11132 (set_attr "type" "mvn_reg,multiple,multiple")]
11135 (define_insn "*ifcompare_not_move"
11136 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11138 (match_operator 5 "arm_comparison_operator"
11139 [(match_operand:SI 3 "s_register_operand" "r,r")
11140 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11142 (match_operand:SI 2 "s_register_operand" "r,r"))
11143 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11144 (clobber (reg:CC CC_REGNUM))]
11147 [(set_attr "conds" "clob")
11148 (set_attr "length" "8,12")
11149 (set_attr "type" "multiple")]
11152 (define_insn "*if_not_move"
11153 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11155 (match_operator 4 "arm_comparison_operator"
11156 [(match_operand 3 "cc_register" "") (const_int 0)])
11157 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11158 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11162 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11163 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11164 [(set_attr "conds" "use")
11165 (set_attr "type" "mvn_reg,multiple,multiple")
11166 (set_attr "length" "4,8,8")]
11169 (define_insn "*ifcompare_shift_move"
11170 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11172 (match_operator 6 "arm_comparison_operator"
11173 [(match_operand:SI 4 "s_register_operand" "r,r")
11174 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11175 (match_operator:SI 7 "shift_operator"
11176 [(match_operand:SI 2 "s_register_operand" "r,r")
11177 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11178 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11179 (clobber (reg:CC CC_REGNUM))]
11182 [(set_attr "conds" "clob")
11183 (set_attr "length" "8,12")
11184 (set_attr "type" "multiple")]
11187 (define_insn "*if_shift_move"
11188 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11190 (match_operator 5 "arm_comparison_operator"
11191 [(match_operand 6 "cc_register" "") (const_int 0)])
11192 (match_operator:SI 4 "shift_operator"
11193 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11194 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11195 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11199 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11200 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11201 [(set_attr "conds" "use")
11202 (set_attr "shift" "2")
11203 (set_attr "length" "4,8,8")
11204 (set_attr_alternative "type"
11205 [(if_then_else (match_operand 3 "const_int_operand" "")
11206 (const_string "mov_shift" )
11207 (const_string "mov_shift_reg"))
11208 (const_string "multiple")
11209 (const_string "multiple")])]
11212 (define_insn "*ifcompare_move_shift"
11213 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11215 (match_operator 6 "arm_comparison_operator"
11216 [(match_operand:SI 4 "s_register_operand" "r,r")
11217 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11218 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11219 (match_operator:SI 7 "shift_operator"
11220 [(match_operand:SI 2 "s_register_operand" "r,r")
11221 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11222 (clobber (reg:CC CC_REGNUM))]
11225 [(set_attr "conds" "clob")
11226 (set_attr "length" "8,12")
11227 (set_attr "type" "multiple")]
11230 (define_insn "*if_move_shift"
11231 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11233 (match_operator 5 "arm_comparison_operator"
11234 [(match_operand 6 "cc_register" "") (const_int 0)])
11235 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11236 (match_operator:SI 4 "shift_operator"
11237 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11238 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11242 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11243 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11244 [(set_attr "conds" "use")
11245 (set_attr "shift" "2")
11246 (set_attr "length" "4,8,8")
11247 (set_attr_alternative "type"
11248 [(if_then_else (match_operand 3 "const_int_operand" "")
11249 (const_string "mov_shift" )
11250 (const_string "mov_shift_reg"))
11251 (const_string "multiple")
11252 (const_string "multiple")])]
11255 (define_insn "*ifcompare_shift_shift"
11256 [(set (match_operand:SI 0 "s_register_operand" "=r")
11258 (match_operator 7 "arm_comparison_operator"
11259 [(match_operand:SI 5 "s_register_operand" "r")
11260 (match_operand:SI 6 "arm_add_operand" "rIL")])
11261 (match_operator:SI 8 "shift_operator"
11262 [(match_operand:SI 1 "s_register_operand" "r")
11263 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11264 (match_operator:SI 9 "shift_operator"
11265 [(match_operand:SI 3 "s_register_operand" "r")
11266 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11267 (clobber (reg:CC CC_REGNUM))]
11270 [(set_attr "conds" "clob")
11271 (set_attr "length" "12")
11272 (set_attr "type" "multiple")]
11275 (define_insn "*if_shift_shift"
11276 [(set (match_operand:SI 0 "s_register_operand" "=r")
11278 (match_operator 5 "arm_comparison_operator"
11279 [(match_operand 8 "cc_register" "") (const_int 0)])
11280 (match_operator:SI 6 "shift_operator"
11281 [(match_operand:SI 1 "s_register_operand" "r")
11282 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11283 (match_operator:SI 7 "shift_operator"
11284 [(match_operand:SI 3 "s_register_operand" "r")
11285 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11287 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11288 [(set_attr "conds" "use")
11289 (set_attr "shift" "1")
11290 (set_attr "length" "8")
11291 (set (attr "type") (if_then_else
11292 (and (match_operand 2 "const_int_operand" "")
11293 (match_operand 4 "const_int_operand" ""))
11294 (const_string "mov_shift")
11295 (const_string "mov_shift_reg")))]
11298 (define_insn "*ifcompare_not_arith"
11299 [(set (match_operand:SI 0 "s_register_operand" "=r")
11301 (match_operator 6 "arm_comparison_operator"
11302 [(match_operand:SI 4 "s_register_operand" "r")
11303 (match_operand:SI 5 "arm_add_operand" "rIL")])
11304 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11305 (match_operator:SI 7 "shiftable_operator"
11306 [(match_operand:SI 2 "s_register_operand" "r")
11307 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11308 (clobber (reg:CC CC_REGNUM))]
11311 [(set_attr "conds" "clob")
11312 (set_attr "length" "12")
11313 (set_attr "type" "multiple")]
11316 (define_insn "*if_not_arith"
11317 [(set (match_operand:SI 0 "s_register_operand" "=r")
11319 (match_operator 5 "arm_comparison_operator"
11320 [(match_operand 4 "cc_register" "") (const_int 0)])
11321 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11322 (match_operator:SI 6 "shiftable_operator"
11323 [(match_operand:SI 2 "s_register_operand" "r")
11324 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11326 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11327 [(set_attr "conds" "use")
11328 (set_attr "type" "mvn_reg")
11329 (set_attr "length" "8")]
11332 (define_insn "*ifcompare_arith_not"
11333 [(set (match_operand:SI 0 "s_register_operand" "=r")
11335 (match_operator 6 "arm_comparison_operator"
11336 [(match_operand:SI 4 "s_register_operand" "r")
11337 (match_operand:SI 5 "arm_add_operand" "rIL")])
11338 (match_operator:SI 7 "shiftable_operator"
11339 [(match_operand:SI 2 "s_register_operand" "r")
11340 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11341 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11342 (clobber (reg:CC CC_REGNUM))]
11345 [(set_attr "conds" "clob")
11346 (set_attr "length" "12")
11347 (set_attr "type" "multiple")]
11350 (define_insn "*if_arith_not"
11351 [(set (match_operand:SI 0 "s_register_operand" "=r")
11353 (match_operator 5 "arm_comparison_operator"
11354 [(match_operand 4 "cc_register" "") (const_int 0)])
11355 (match_operator:SI 6 "shiftable_operator"
11356 [(match_operand:SI 2 "s_register_operand" "r")
11357 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11358 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11360 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11361 [(set_attr "conds" "use")
11362 (set_attr "type" "multiple")
11363 (set_attr "length" "8")]
11366 (define_insn "*ifcompare_neg_move"
11367 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11369 (match_operator 5 "arm_comparison_operator"
11370 [(match_operand:SI 3 "s_register_operand" "r,r")
11371 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11372 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11373 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11374 (clobber (reg:CC CC_REGNUM))]
11377 [(set_attr "conds" "clob")
11378 (set_attr "length" "8,12")
11379 (set_attr "type" "multiple")]
11382 (define_insn_and_split "*if_neg_move"
11383 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11385 (match_operator 4 "arm_comparison_operator"
11386 [(match_operand 3 "cc_register" "") (const_int 0)])
11387 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
11388 (match_operand:SI 1 "s_register_operand" "0,0")))]
11389 "TARGET_32BIT && !TARGET_COND_ARITH"
11391 "&& reload_completed"
11392 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
11393 (set (match_dup 0) (neg:SI (match_dup 2))))]
11395 [(set_attr "conds" "use")
11396 (set_attr "length" "4")
11397 (set_attr "arch" "t2,32")
11398 (set_attr "enabled_for_short_it" "yes,no")
11399 (set_attr "type" "logic_shift_imm")]
11402 (define_insn "*ifcompare_move_neg"
11403 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11405 (match_operator 5 "arm_comparison_operator"
11406 [(match_operand:SI 3 "s_register_operand" "r,r")
11407 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11408 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11409 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11410 (clobber (reg:CC CC_REGNUM))]
11413 [(set_attr "conds" "clob")
11414 (set_attr "length" "8,12")
11415 (set_attr "type" "multiple")]
11418 (define_insn_and_split "*if_move_neg"
11419 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11421 (match_operator 4 "arm_comparison_operator"
11422 [(match_operand 3 "cc_register" "") (const_int 0)])
11423 (match_operand:SI 1 "s_register_operand" "0,0")
11424 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
11427 "&& reload_completed"
11428 [(cond_exec (match_dup 5)
11429 (set (match_dup 0) (neg:SI (match_dup 2))))]
11431 machine_mode mode = GET_MODE (operands[3]);
11432 rtx_code rc = GET_CODE (operands[4]);
11434 if (mode == CCFPmode || mode == CCFPEmode)
11435 rc = reverse_condition_maybe_unordered (rc);
11437 rc = reverse_condition (rc);
11439 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
11441 [(set_attr "conds" "use")
11442 (set_attr "length" "4")
11443 (set_attr "arch" "t2,32")
11444 (set_attr "enabled_for_short_it" "yes,no")
11445 (set_attr "type" "logic_shift_imm")]
11448 (define_insn "*arith_adjacentmem"
11449 [(set (match_operand:SI 0 "s_register_operand" "=r")
11450 (match_operator:SI 1 "shiftable_operator"
11451 [(match_operand:SI 2 "memory_operand" "m")
11452 (match_operand:SI 3 "memory_operand" "m")]))
11453 (clobber (match_scratch:SI 4 "=r"))]
11454 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11460 HOST_WIDE_INT val1 = 0, val2 = 0;
11462 if (REGNO (operands[0]) > REGNO (operands[4]))
11464 ldm[1] = operands[4];
11465 ldm[2] = operands[0];
11469 ldm[1] = operands[0];
11470 ldm[2] = operands[4];
11473 base_reg = XEXP (operands[2], 0);
11475 if (!REG_P (base_reg))
11477 val1 = INTVAL (XEXP (base_reg, 1));
11478 base_reg = XEXP (base_reg, 0);
11481 if (!REG_P (XEXP (operands[3], 0)))
11482 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11484 arith[0] = operands[0];
11485 arith[3] = operands[1];
11499 if (val1 !=0 && val2 != 0)
11503 if (val1 == 4 || val2 == 4)
11504 /* Other val must be 8, since we know they are adjacent and neither
11506 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
11507 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11509 ldm[0] = ops[0] = operands[4];
11511 ops[2] = GEN_INT (val1);
11512 output_add_immediate (ops);
11514 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11516 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11520 /* Offset is out of range for a single add, so use two ldr. */
11523 ops[2] = GEN_INT (val1);
11524 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11526 ops[2] = GEN_INT (val2);
11527 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11530 else if (val1 != 0)
11533 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11535 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11540 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11542 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11544 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11547 [(set_attr "length" "12")
11548 (set_attr "predicable" "yes")
11549 (set_attr "type" "load_4")]
11552 ; This pattern is never tried by combine, so do it as a peephole
11555 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11556 (match_operand:SI 1 "arm_general_register_operand" ""))
11557 (set (reg:CC CC_REGNUM)
11558 (compare:CC (match_dup 1) (const_int 0)))]
11560 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11561 (set (match_dup 0) (match_dup 1))])]
11566 [(set (match_operand:SI 0 "s_register_operand" "")
11567 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11569 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11570 [(match_operand:SI 3 "s_register_operand" "")
11571 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11572 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11574 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11575 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11580 ;; This split can be used because CC_Z mode implies that the following
11581 ;; branch will be an equality, or an unsigned inequality, so the sign
11582 ;; extension is not needed.
11585 [(set (reg:CC_Z CC_REGNUM)
11587 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11589 (match_operand 1 "const_int_operand" "")))
11590 (clobber (match_scratch:SI 2 ""))]
11592 && ((UINTVAL (operands[1]))
11593 == ((UINTVAL (operands[1])) >> 24) << 24)"
11594 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11595 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11597 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11600 ;; ??? Check the patterns above for Thumb-2 usefulness
11602 (define_expand "prologue"
11603 [(clobber (const_int 0))]
11606 arm_expand_prologue ();
11608 thumb1_expand_prologue ();
11613 (define_expand "epilogue"
11614 [(clobber (const_int 0))]
11617 if (crtl->calls_eh_return)
11618 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11621 thumb1_expand_epilogue ();
11622 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11623 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11625 else if (HAVE_return)
11627 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11628 no need for explicit testing again. */
11629 emit_jump_insn (gen_return ());
11631 else if (TARGET_32BIT)
11633 arm_expand_epilogue (true);
11639 ;; Note - although unspec_volatile's USE all hard registers,
11640 ;; USEs are ignored after relaod has completed. Thus we need
11641 ;; to add an unspec of the link register to ensure that flow
11642 ;; does not think that it is unused by the sibcall branch that
11643 ;; will replace the standard function epilogue.
11644 (define_expand "sibcall_epilogue"
11645 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11646 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11649 arm_expand_epilogue (false);
11654 (define_expand "eh_epilogue"
11655 [(use (match_operand:SI 0 "register_operand"))
11656 (use (match_operand:SI 1 "register_operand"))
11657 (use (match_operand:SI 2 "register_operand"))]
11661 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11662 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11664 rtx ra = gen_rtx_REG (Pmode, 2);
11666 emit_move_insn (ra, operands[2]);
11669 /* This is a hack -- we may have crystalized the function type too
11671 cfun->machine->func_type = 0;
11675 ;; This split is only used during output to reduce the number of patterns
11676 ;; that need assembler instructions adding to them. We allowed the setting
11677 ;; of the conditions to be implicit during rtl generation so that
11678 ;; the conditional compare patterns would work. However this conflicts to
11679 ;; some extent with the conditional data operations, so we have to split them
11682 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11683 ;; conditional execution sufficient?
11686 [(set (match_operand:SI 0 "s_register_operand" "")
11687 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11688 [(match_operand 2 "" "") (match_operand 3 "" "")])
11690 (match_operand 4 "" "")))
11691 (clobber (reg:CC CC_REGNUM))]
11692 "TARGET_ARM && reload_completed"
11693 [(set (match_dup 5) (match_dup 6))
11694 (cond_exec (match_dup 7)
11695 (set (match_dup 0) (match_dup 4)))]
11698 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11699 operands[2], operands[3]);
11700 enum rtx_code rc = GET_CODE (operands[1]);
11702 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11703 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11704 if (mode == CCFPmode || mode == CCFPEmode)
11705 rc = reverse_condition_maybe_unordered (rc);
11707 rc = reverse_condition (rc);
11709 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11714 [(set (match_operand:SI 0 "s_register_operand" "")
11715 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11716 [(match_operand 2 "" "") (match_operand 3 "" "")])
11717 (match_operand 4 "" "")
11719 (clobber (reg:CC CC_REGNUM))]
11720 "TARGET_ARM && reload_completed"
11721 [(set (match_dup 5) (match_dup 6))
11722 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11723 (set (match_dup 0) (match_dup 4)))]
11726 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11727 operands[2], operands[3]);
11729 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11730 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11735 [(set (match_operand:SI 0 "s_register_operand" "")
11736 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11737 [(match_operand 2 "" "") (match_operand 3 "" "")])
11738 (match_operand 4 "" "")
11739 (match_operand 5 "" "")))
11740 (clobber (reg:CC CC_REGNUM))]
11741 "TARGET_ARM && reload_completed"
11742 [(set (match_dup 6) (match_dup 7))
11743 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11744 (set (match_dup 0) (match_dup 4)))
11745 (cond_exec (match_dup 8)
11746 (set (match_dup 0) (match_dup 5)))]
11749 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11750 operands[2], operands[3]);
11751 enum rtx_code rc = GET_CODE (operands[1]);
11753 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11754 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11755 if (mode == CCFPmode || mode == CCFPEmode)
11756 rc = reverse_condition_maybe_unordered (rc);
11758 rc = reverse_condition (rc);
11760 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11765 [(set (match_operand:SI 0 "s_register_operand" "")
11766 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11767 [(match_operand:SI 2 "s_register_operand" "")
11768 (match_operand:SI 3 "arm_add_operand" "")])
11769 (match_operand:SI 4 "arm_rhs_operand" "")
11771 (match_operand:SI 5 "s_register_operand" ""))))
11772 (clobber (reg:CC CC_REGNUM))]
11773 "TARGET_ARM && reload_completed"
11774 [(set (match_dup 6) (match_dup 7))
11775 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11776 (set (match_dup 0) (match_dup 4)))
11777 (cond_exec (match_dup 8)
11778 (set (match_dup 0) (not:SI (match_dup 5))))]
11781 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11782 operands[2], operands[3]);
11783 enum rtx_code rc = GET_CODE (operands[1]);
11785 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11786 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11787 if (mode == CCFPmode || mode == CCFPEmode)
11788 rc = reverse_condition_maybe_unordered (rc);
11790 rc = reverse_condition (rc);
11792 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11796 (define_insn "*cond_move_not"
11797 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11798 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11799 [(match_operand 3 "cc_register" "") (const_int 0)])
11800 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11802 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11806 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11807 [(set_attr "conds" "use")
11808 (set_attr "type" "mvn_reg,multiple")
11809 (set_attr "length" "4,8")]
11812 ;; The next two patterns occur when an AND operation is followed by a
11813 ;; scc insn sequence
11815 (define_insn "*sign_extract_onebit"
11816 [(set (match_operand:SI 0 "s_register_operand" "=r")
11817 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11819 (match_operand:SI 2 "const_int_operand" "n")))
11820 (clobber (reg:CC CC_REGNUM))]
11823 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11824 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11825 return \"mvnne\\t%0, #0\";
11827 [(set_attr "conds" "clob")
11828 (set_attr "length" "8")
11829 (set_attr "type" "multiple")]
11832 (define_insn "*not_signextract_onebit"
11833 [(set (match_operand:SI 0 "s_register_operand" "=r")
11835 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11837 (match_operand:SI 2 "const_int_operand" "n"))))
11838 (clobber (reg:CC CC_REGNUM))]
11841 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11842 output_asm_insn (\"tst\\t%1, %2\", operands);
11843 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11844 return \"movne\\t%0, #0\";
11846 [(set_attr "conds" "clob")
11847 (set_attr "length" "12")
11848 (set_attr "type" "multiple")]
11850 ;; ??? The above patterns need auditing for Thumb-2
11852 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11853 ;; expressions. For simplicity, the first register is also in the unspec
11855 ;; To avoid the usage of GNU extension, the length attribute is computed
11856 ;; in a C function arm_attr_length_push_multi.
11857 (define_insn "*push_multi"
11858 [(match_parallel 2 "multi_register_push"
11859 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11860 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11861 UNSPEC_PUSH_MULT))])]
11865 int num_saves = XVECLEN (operands[2], 0);
11867 /* For the StrongARM at least it is faster to
11868 use STR to store only a single register.
11869 In Thumb mode always use push, and the assembler will pick
11870 something appropriate. */
11871 if (num_saves == 1 && TARGET_ARM)
11872 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11879 strcpy (pattern, \"push%?\\t{%1\");
11881 strcpy (pattern, \"push\\t{%1\");
11883 for (i = 1; i < num_saves; i++)
11885 strcat (pattern, \", %|\");
11887 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11890 strcat (pattern, \"}\");
11891 output_asm_insn (pattern, operands);
11896 [(set_attr "type" "store_16")
11897 (set (attr "length")
11898 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11901 (define_insn "stack_tie"
11902 [(set (mem:BLK (scratch))
11903 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11904 (match_operand:SI 1 "s_register_operand" "rk")]
11908 [(set_attr "length" "0")
11909 (set_attr "type" "block")]
11912 ;; Pop (as used in epilogue RTL)
11914 (define_insn "*load_multiple_with_writeback"
11915 [(match_parallel 0 "load_multiple_operation"
11916 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11917 (plus:SI (match_dup 1)
11918 (match_operand:SI 2 "const_int_I_operand" "I")))
11919 (set (match_operand:SI 3 "s_register_operand" "=rk")
11920 (mem:SI (match_dup 1)))
11922 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11925 arm_output_multireg_pop (operands, /*return_pc=*/false,
11926 /*cond=*/const_true_rtx,
11932 [(set_attr "type" "load_16")
11933 (set_attr "predicable" "yes")
11934 (set (attr "length")
11935 (symbol_ref "arm_attr_length_pop_multi (operands,
11936 /*return_pc=*/false,
11937 /*write_back_p=*/true)"))]
11940 ;; Pop with return (as used in epilogue RTL)
11942 ;; This instruction is generated when the registers are popped at the end of
11943 ;; epilogue. Here, instead of popping the value into LR and then generating
11944 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11946 (define_insn "*pop_multiple_with_writeback_and_return"
11947 [(match_parallel 0 "pop_multiple_return"
11949 (set (match_operand:SI 1 "s_register_operand" "+rk")
11950 (plus:SI (match_dup 1)
11951 (match_operand:SI 2 "const_int_I_operand" "I")))
11952 (set (match_operand:SI 3 "s_register_operand" "=rk")
11953 (mem:SI (match_dup 1)))
11955 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11958 arm_output_multireg_pop (operands, /*return_pc=*/true,
11959 /*cond=*/const_true_rtx,
11965 [(set_attr "type" "load_16")
11966 (set_attr "predicable" "yes")
11967 (set (attr "length")
11968 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11969 /*write_back_p=*/true)"))]
11972 (define_insn "*pop_multiple_with_return"
11973 [(match_parallel 0 "pop_multiple_return"
11975 (set (match_operand:SI 2 "s_register_operand" "=rk")
11976 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11978 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11981 arm_output_multireg_pop (operands, /*return_pc=*/true,
11982 /*cond=*/const_true_rtx,
11988 [(set_attr "type" "load_16")
11989 (set_attr "predicable" "yes")
11990 (set (attr "length")
11991 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11992 /*write_back_p=*/false)"))]
11995 ;; Load into PC and return
11996 (define_insn "*ldr_with_return"
11998 (set (reg:SI PC_REGNUM)
11999 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
12000 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12001 "ldr%?\t%|pc, [%0], #4"
12002 [(set_attr "type" "load_4")
12003 (set_attr "predicable" "yes")]
12005 ;; Pop for floating point registers (as used in epilogue RTL)
12006 (define_insn "*vfp_pop_multiple_with_writeback"
12007 [(match_parallel 0 "pop_multiple_fp"
12008 [(set (match_operand:SI 1 "s_register_operand" "+rk")
12009 (plus:SI (match_dup 1)
12010 (match_operand:SI 2 "const_int_I_operand" "I")))
12011 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
12012 (mem:DF (match_dup 1)))])]
12013 "TARGET_32BIT && TARGET_VFP_BASE"
12016 int num_regs = XVECLEN (operands[0], 0);
12019 strcpy (pattern, \"vldm\\t\");
12020 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
12021 strcat (pattern, \"!, {\");
12022 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
12023 strcat (pattern, \"%P0\");
12024 if ((num_regs - 1) > 1)
12026 strcat (pattern, \"-%P1\");
12027 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
12030 strcat (pattern, \"}\");
12031 output_asm_insn (pattern, op_list);
12035 [(set_attr "type" "load_16")
12036 (set_attr "conds" "unconditional")
12037 (set_attr "predicable" "no")]
12040 ;; Special patterns for dealing with the constant pool
12042 (define_insn "align_4"
12043 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
12046 assemble_align (32);
12049 [(set_attr "type" "no_insn")]
12052 (define_insn "align_8"
12053 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
12056 assemble_align (64);
12059 [(set_attr "type" "no_insn")]
12062 (define_insn "consttable_end"
12063 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
12066 making_const_table = FALSE;
12069 [(set_attr "type" "no_insn")]
12072 (define_insn "consttable_1"
12073 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
12076 making_const_table = TRUE;
12077 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
12078 assemble_zeros (3);
12081 [(set_attr "length" "4")
12082 (set_attr "type" "no_insn")]
12085 (define_insn "consttable_2"
12086 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
12090 rtx x = operands[0];
12091 making_const_table = TRUE;
12092 switch (GET_MODE_CLASS (GET_MODE (x)))
12095 arm_emit_fp16_const (x);
12098 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
12099 assemble_zeros (2);
12104 [(set_attr "length" "4")
12105 (set_attr "type" "no_insn")]
12108 (define_insn "consttable_4"
12109 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
12113 rtx x = operands[0];
12114 making_const_table = TRUE;
12115 scalar_float_mode float_mode;
12116 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
12117 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
12120 /* XXX: Sometimes gcc does something really dumb and ends up with
12121 a HIGH in a constant pool entry, usually because it's trying to
12122 load into a VFP register. We know this will always be used in
12123 combination with a LO_SUM which ignores the high bits, so just
12124 strip off the HIGH. */
12125 if (GET_CODE (x) == HIGH)
12127 assemble_integer (x, 4, BITS_PER_WORD, 1);
12128 mark_symbol_refs_as_used (x);
12132 [(set_attr "length" "4")
12133 (set_attr "type" "no_insn")]
12136 (define_insn "consttable_8"
12137 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
12141 making_const_table = TRUE;
12142 scalar_float_mode float_mode;
12143 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
12144 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
12145 float_mode, BITS_PER_WORD);
12147 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12150 [(set_attr "length" "8")
12151 (set_attr "type" "no_insn")]
12154 (define_insn "consttable_16"
12155 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12159 making_const_table = TRUE;
12160 scalar_float_mode float_mode;
12161 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
12162 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
12163 float_mode, BITS_PER_WORD);
12165 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12168 [(set_attr "length" "16")
12169 (set_attr "type" "no_insn")]
12172 ;; V5 Instructions,
12174 (define_insn "clzsi2"
12175 [(set (match_operand:SI 0 "s_register_operand" "=r")
12176 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12177 "TARGET_32BIT && arm_arch5t"
12179 [(set_attr "predicable" "yes")
12180 (set_attr "type" "clz")])
12182 (define_insn "rbitsi2"
12183 [(set (match_operand:SI 0 "s_register_operand" "=r")
12184 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12185 "TARGET_32BIT && arm_arch_thumb2"
12187 [(set_attr "predicable" "yes")
12188 (set_attr "type" "clz")])
12190 ;; Keep this as a CTZ expression until after reload and then split
12191 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
12192 ;; to fold with any other expression.
12194 (define_insn_and_split "ctzsi2"
12195 [(set (match_operand:SI 0 "s_register_operand" "=r")
12196 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12197 "TARGET_32BIT && arm_arch_thumb2"
12199 "&& reload_completed"
12202 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
12203 emit_insn (gen_clzsi2 (operands[0], operands[0]));
12207 ;; V5E instructions.
12209 (define_insn "prefetch"
12210 [(prefetch (match_operand:SI 0 "address_operand" "p")
12211 (match_operand:SI 1 "" "")
12212 (match_operand:SI 2 "" ""))]
12213 "TARGET_32BIT && arm_arch5te"
12215 [(set_attr "type" "load_4")]
12218 ;; General predication pattern
12221 [(match_operator 0 "arm_comparison_operator"
12222 [(match_operand 1 "cc_register" "")
12225 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
12227 [(set_attr "predicated" "yes")]
12230 (define_insn "force_register_use"
12231 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12234 [(set_attr "length" "0")
12235 (set_attr "type" "no_insn")]
12239 ;; Patterns for exception handling
12241 (define_expand "eh_return"
12242 [(use (match_operand 0 "general_operand"))]
12247 emit_insn (gen_arm_eh_return (operands[0]));
12249 emit_insn (gen_thumb_eh_return (operands[0]));
12254 ;; We can't expand this before we know where the link register is stored.
12255 (define_insn_and_split "arm_eh_return"
12256 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12258 (clobber (match_scratch:SI 1 "=&r"))]
12261 "&& reload_completed"
12265 arm_set_return_address (operands[0], operands[1]);
12273 (define_insn "load_tp_hard"
12274 [(set (match_operand:SI 0 "register_operand" "=r")
12275 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12277 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12278 [(set_attr "predicable" "yes")
12279 (set_attr "type" "mrs")]
12282 ;; Used by the TLS register based stack protector
12283 (define_insn "reload_tp_hard"
12284 [(set (match_operand:SI 0 "register_operand" "=r")
12285 (unspec_volatile:SI [(const_int 0)] VUNSPEC_MRC))]
12287 "mrc\\tp15, 0, %0, c13, c0, 3\\t@ reload_tp_hard"
12288 [(set_attr "type" "mrs")]
12291 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12292 (define_insn "load_tp_soft_fdpic"
12293 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12294 (clobber (reg:SI FDPIC_REGNUM))
12295 (clobber (reg:SI LR_REGNUM))
12296 (clobber (reg:SI IP_REGNUM))
12297 (clobber (reg:CC CC_REGNUM))]
12298 "TARGET_SOFT_TP && TARGET_FDPIC"
12299 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12300 [(set_attr "conds" "clob")
12301 (set_attr "type" "branch")]
12304 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12305 (define_insn "load_tp_soft"
12306 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12307 (clobber (reg:SI LR_REGNUM))
12308 (clobber (reg:SI IP_REGNUM))
12309 (clobber (reg:CC CC_REGNUM))]
12310 "TARGET_SOFT_TP && !TARGET_FDPIC"
12311 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12312 [(set_attr "conds" "clob")
12313 (set_attr "type" "branch")]
12316 ;; tls descriptor call
12317 (define_insn "tlscall"
12318 [(set (reg:SI R0_REGNUM)
12319 (unspec:SI [(reg:SI R0_REGNUM)
12320 (match_operand:SI 0 "" "X")
12321 (match_operand 1 "" "")] UNSPEC_TLS))
12322 (clobber (reg:SI R1_REGNUM))
12323 (clobber (reg:SI LR_REGNUM))
12324 (clobber (reg:SI CC_REGNUM))]
12327 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12328 INTVAL (operands[1]));
12329 return "bl\\t%c0(tlscall)";
12331 [(set_attr "conds" "clob")
12332 (set_attr "length" "4")
12333 (set_attr "type" "branch")]
12336 ;; For thread pointer builtin
12337 (define_expand "get_thread_pointersi"
12338 [(match_operand:SI 0 "s_register_operand")]
12342 arm_load_tp (operands[0]);
12348 ;; We only care about the lower 16 bits of the constant
12349 ;; being inserted into the upper 16 bits of the register.
12350 (define_insn "*arm_movtas_ze"
12351 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
12354 (match_operand:SI 1 "const_int_operand" ""))]
12359 [(set_attr "arch" "32,v8mb")
12360 (set_attr "predicable" "yes")
12361 (set_attr "length" "4")
12362 (set_attr "type" "alu_sreg")]
12365 (define_insn "*arm_rev"
12366 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12367 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12373 [(set_attr "arch" "t1,t2,32")
12374 (set_attr "length" "2,2,4")
12375 (set_attr "predicable" "no,yes,yes")
12376 (set_attr "type" "rev")]
12379 (define_expand "arm_legacy_rev"
12380 [(set (match_operand:SI 2 "s_register_operand")
12381 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
12385 (lshiftrt:SI (match_dup 2)
12387 (set (match_operand:SI 3 "s_register_operand")
12388 (rotatert:SI (match_dup 1)
12391 (and:SI (match_dup 2)
12392 (const_int -65281)))
12393 (set (match_operand:SI 0 "s_register_operand")
12394 (xor:SI (match_dup 3)
12400 ;; Reuse temporaries to keep register pressure down.
12401 (define_expand "thumb_legacy_rev"
12402 [(set (match_operand:SI 2 "s_register_operand")
12403 (ashift:SI (match_operand:SI 1 "s_register_operand")
12405 (set (match_operand:SI 3 "s_register_operand")
12406 (lshiftrt:SI (match_dup 1)
12409 (ior:SI (match_dup 3)
12411 (set (match_operand:SI 4 "s_register_operand")
12413 (set (match_operand:SI 5 "s_register_operand")
12414 (rotatert:SI (match_dup 1)
12417 (ashift:SI (match_dup 5)
12420 (lshiftrt:SI (match_dup 5)
12423 (ior:SI (match_dup 5)
12426 (rotatert:SI (match_dup 5)
12428 (set (match_operand:SI 0 "s_register_operand")
12429 (ior:SI (match_dup 5)
12435 ;; ARM-specific expansion of signed mod by power of 2
12436 ;; using conditional negate.
12437 ;; For r0 % n where n is a power of 2 produce:
12439 ;; and r0, r0, #(n - 1)
12440 ;; and r1, r1, #(n - 1)
12441 ;; rsbpl r0, r1, #0
12443 (define_expand "modsi3"
12444 [(match_operand:SI 0 "register_operand")
12445 (match_operand:SI 1 "register_operand")
12446 (match_operand:SI 2 "const_int_operand")]
12449 HOST_WIDE_INT val = INTVAL (operands[2]);
12452 || exact_log2 (val) <= 0)
12455 rtx mask = GEN_INT (val - 1);
12457 /* In the special case of x0 % 2 we can do the even shorter:
12460 rsblt r0, r0, #0. */
12464 rtx cc_reg = arm_gen_compare_reg (LT,
12465 operands[1], const0_rtx, NULL_RTX);
12466 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
12467 rtx masked = gen_reg_rtx (SImode);
12469 emit_insn (gen_andsi3 (masked, operands[1], mask));
12470 emit_move_insn (operands[0],
12471 gen_rtx_IF_THEN_ELSE (SImode, cond,
12472 gen_rtx_NEG (SImode,
12478 rtx neg_op = gen_reg_rtx (SImode);
12479 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
12482 /* Extract the condition register and mode. */
12483 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
12484 rtx cc_reg = SET_DEST (cmp);
12485 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
12487 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
12489 rtx masked_neg = gen_reg_rtx (SImode);
12490 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
12492 /* We want a conditional negate here, but emitting COND_EXEC rtxes
12493 during expand does not always work. Do an IF_THEN_ELSE instead. */
12494 emit_move_insn (operands[0],
12495 gen_rtx_IF_THEN_ELSE (SImode, cond,
12496 gen_rtx_NEG (SImode, masked_neg),
12504 (define_expand "bswapsi2"
12505 [(set (match_operand:SI 0 "s_register_operand")
12506 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
12507 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12511 rtx op2 = gen_reg_rtx (SImode);
12512 rtx op3 = gen_reg_rtx (SImode);
12516 rtx op4 = gen_reg_rtx (SImode);
12517 rtx op5 = gen_reg_rtx (SImode);
12519 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12520 op2, op3, op4, op5));
12524 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12533 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12534 ;; and unsigned variants, respectively. For rev16, expose
12535 ;; byte-swapping in the lower 16 bits only.
12536 (define_insn "*arm_revsh"
12537 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12538 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12544 [(set_attr "arch" "t1,t2,32")
12545 (set_attr "length" "2,2,4")
12546 (set_attr "type" "rev")]
12549 (define_insn "*arm_rev16"
12550 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12551 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12557 [(set_attr "arch" "t1,t2,32")
12558 (set_attr "length" "2,2,4")
12559 (set_attr "type" "rev")]
12562 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
12563 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
12564 ;; each valid permutation.
12566 (define_insn "arm_rev16si2"
12567 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12568 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
12570 (match_operand:SI 3 "const_int_operand" "n,n,n"))
12571 (and:SI (lshiftrt:SI (match_dup 1)
12573 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
12575 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12576 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12578 [(set_attr "arch" "t1,t2,32")
12579 (set_attr "length" "2,2,4")
12580 (set_attr "type" "rev")]
12583 (define_insn "arm_rev16si2_alt"
12584 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12585 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
12587 (match_operand:SI 2 "const_int_operand" "n,n,n"))
12588 (and:SI (ashift:SI (match_dup 1)
12590 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
12592 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12593 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12595 [(set_attr "arch" "t1,t2,32")
12596 (set_attr "length" "2,2,4")
12597 (set_attr "type" "rev")]
12600 (define_expand "bswaphi2"
12601 [(set (match_operand:HI 0 "s_register_operand")
12602 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
12607 ;; Patterns for LDRD/STRD in Thumb2 mode
12609 (define_insn "*thumb2_ldrd"
12610 [(set (match_operand:SI 0 "s_register_operand" "=r")
12611 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12612 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12613 (set (match_operand:SI 3 "s_register_operand" "=r")
12614 (mem:SI (plus:SI (match_dup 1)
12615 (match_operand:SI 4 "const_int_operand" ""))))]
12616 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12617 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12618 && (operands_ok_ldrd_strd (operands[0], operands[3],
12619 operands[1], INTVAL (operands[2]),
12621 "ldrd%?\t%0, %3, [%1, %2]"
12622 [(set_attr "type" "load_8")
12623 (set_attr "predicable" "yes")])
12625 (define_insn "*thumb2_ldrd_base"
12626 [(set (match_operand:SI 0 "s_register_operand" "=r")
12627 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12628 (set (match_operand:SI 2 "s_register_operand" "=r")
12629 (mem:SI (plus:SI (match_dup 1)
12631 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12632 && (operands_ok_ldrd_strd (operands[0], operands[2],
12633 operands[1], 0, false, true))"
12634 "ldrd%?\t%0, %2, [%1]"
12635 [(set_attr "type" "load_8")
12636 (set_attr "predicable" "yes")])
12638 (define_insn "*thumb2_ldrd_base_neg"
12639 [(set (match_operand:SI 0 "s_register_operand" "=r")
12640 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12642 (set (match_operand:SI 2 "s_register_operand" "=r")
12643 (mem:SI (match_dup 1)))]
12644 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12645 && (operands_ok_ldrd_strd (operands[0], operands[2],
12646 operands[1], -4, false, true))"
12647 "ldrd%?\t%0, %2, [%1, #-4]"
12648 [(set_attr "type" "load_8")
12649 (set_attr "predicable" "yes")])
12651 (define_insn "*thumb2_strd"
12652 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12653 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12654 (match_operand:SI 2 "s_register_operand" "r"))
12655 (set (mem:SI (plus:SI (match_dup 0)
12656 (match_operand:SI 3 "const_int_operand" "")))
12657 (match_operand:SI 4 "s_register_operand" "r"))]
12658 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12659 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12660 && (operands_ok_ldrd_strd (operands[2], operands[4],
12661 operands[0], INTVAL (operands[1]),
12663 "strd%?\t%2, %4, [%0, %1]"
12664 [(set_attr "type" "store_8")
12665 (set_attr "predicable" "yes")])
12667 (define_insn "*thumb2_strd_base"
12668 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12669 (match_operand:SI 1 "s_register_operand" "r"))
12670 (set (mem:SI (plus:SI (match_dup 0)
12672 (match_operand:SI 2 "s_register_operand" "r"))]
12673 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12674 && (operands_ok_ldrd_strd (operands[1], operands[2],
12675 operands[0], 0, false, false))"
12676 "strd%?\t%1, %2, [%0]"
12677 [(set_attr "type" "store_8")
12678 (set_attr "predicable" "yes")])
12680 (define_insn "*thumb2_strd_base_neg"
12681 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12683 (match_operand:SI 1 "s_register_operand" "r"))
12684 (set (mem:SI (match_dup 0))
12685 (match_operand:SI 2 "s_register_operand" "r"))]
12686 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12687 && (operands_ok_ldrd_strd (operands[1], operands[2],
12688 operands[0], -4, false, false))"
12689 "strd%?\t%1, %2, [%0, #-4]"
12690 [(set_attr "type" "store_8")
12691 (set_attr "predicable" "yes")])
12693 ;; ARMv8 CRC32 instructions.
12694 (define_insn "arm_<crc_variant>"
12695 [(set (match_operand:SI 0 "s_register_operand" "=r")
12696 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
12697 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
12700 "<crc_variant>\\t%0, %1, %2"
12701 [(set_attr "type" "crc")
12702 (set_attr "conds" "unconditional")]
12705 ;; Load the load/store double peephole optimizations.
12706 (include "ldrdstrd.md")
12708 ;; Load the load/store multiple patterns
12709 (include "ldmstm.md")
12711 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12712 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12713 ;; The operands are validated through the load_multiple_operation
12714 ;; match_parallel predicate rather than through constraints so enable it only
12716 (define_insn "*load_multiple"
12717 [(match_parallel 0 "load_multiple_operation"
12718 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12719 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12721 "TARGET_32BIT && reload_completed"
12724 arm_output_multireg_pop (operands, /*return_pc=*/false,
12725 /*cond=*/const_true_rtx,
12731 [(set_attr "predicable" "yes")]
12734 (define_expand "copysignsf3"
12735 [(match_operand:SF 0 "register_operand")
12736 (match_operand:SF 1 "register_operand")
12737 (match_operand:SF 2 "register_operand")]
12738 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12740 emit_move_insn (operands[0], operands[2]);
12741 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
12742 GEN_INT (31), GEN_INT (0),
12743 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
12748 (define_expand "copysigndf3"
12749 [(match_operand:DF 0 "register_operand")
12750 (match_operand:DF 1 "register_operand")
12751 (match_operand:DF 2 "register_operand")]
12752 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12754 rtx op0_low = gen_lowpart (SImode, operands[0]);
12755 rtx op0_high = gen_highpart (SImode, operands[0]);
12756 rtx op1_low = gen_lowpart (SImode, operands[1]);
12757 rtx op1_high = gen_highpart (SImode, operands[1]);
12758 rtx op2_high = gen_highpart (SImode, operands[2]);
12760 rtx scratch1 = gen_reg_rtx (SImode);
12761 rtx scratch2 = gen_reg_rtx (SImode);
12762 emit_move_insn (scratch1, op2_high);
12763 emit_move_insn (scratch2, op1_high);
12765 emit_insn(gen_rtx_SET(scratch1,
12766 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
12767 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
12768 emit_move_insn (op0_low, op1_low);
12769 emit_move_insn (op0_high, scratch2);
12775 ;; movmisalign for DImode
12776 (define_expand "movmisaligndi"
12777 [(match_operand:DI 0 "general_operand")
12778 (match_operand:DI 1 "general_operand")]
12781 rtx lo_op0 = gen_lowpart (SImode, operands[0]);
12782 rtx lo_op1 = gen_lowpart (SImode, operands[1]);
12783 rtx hi_op0 = gen_highpart_mode (SImode, DImode, operands[0]);
12784 rtx hi_op1 = gen_highpart_mode (SImode, DImode, operands[1]);
12786 emit_insn (gen_movmisalignsi (lo_op0, lo_op1));
12787 emit_insn (gen_movmisalignsi (hi_op0, hi_op1));
12791 ;; movmisalign patterns for HImode and SImode.
12792 (define_expand "movmisalign<mode>"
12793 [(match_operand:HSI 0 "general_operand")
12794 (match_operand:HSI 1 "general_operand")]
12797 /* This pattern is not permitted to fail during expansion: if both arguments
12798 are non-registers (e.g. memory := constant), force operand 1 into a
12800 rtx (* gen_unaligned_load)(rtx, rtx);
12801 rtx tmp_dest = operands[0];
12802 if (!s_register_operand (operands[0], <MODE>mode)
12803 && !s_register_operand (operands[1], <MODE>mode))
12804 operands[1] = force_reg (<MODE>mode, operands[1]);
12806 if (<MODE>mode == HImode)
12808 gen_unaligned_load = gen_unaligned_loadhiu;
12809 tmp_dest = gen_reg_rtx (SImode);
12812 gen_unaligned_load = gen_unaligned_loadsi;
12814 if (MEM_P (operands[1]))
12816 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12817 if (<MODE>mode == HImode)
12818 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12821 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12826 (define_insn "arm_<cdp>"
12827 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12828 (match_operand:SI 1 "immediate_operand" "n")
12829 (match_operand:SI 2 "immediate_operand" "n")
12830 (match_operand:SI 3 "immediate_operand" "n")
12831 (match_operand:SI 4 "immediate_operand" "n")
12832 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12833 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12835 arm_const_bounds (operands[0], 0, 16);
12836 arm_const_bounds (operands[1], 0, 16);
12837 arm_const_bounds (operands[2], 0, (1 << 5));
12838 arm_const_bounds (operands[3], 0, (1 << 5));
12839 arm_const_bounds (operands[4], 0, (1 << 5));
12840 arm_const_bounds (operands[5], 0, 8);
12841 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12843 [(set_attr "length" "4")
12844 (set_attr "type" "coproc")])
12846 (define_insn "*ldc"
12847 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12848 (match_operand:SI 1 "immediate_operand" "n")
12849 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12850 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12852 arm_const_bounds (operands[0], 0, 16);
12853 arm_const_bounds (operands[1], 0, (1 << 5));
12854 return "<ldc>\\tp%c0, CR%c1, %2";
12856 [(set_attr "length" "4")
12857 (set_attr "type" "coproc")])
12859 (define_insn "*stc"
12860 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12861 (match_operand:SI 1 "immediate_operand" "n")
12862 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12863 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12865 arm_const_bounds (operands[0], 0, 16);
12866 arm_const_bounds (operands[1], 0, (1 << 5));
12867 return "<stc>\\tp%c0, CR%c1, %2";
12869 [(set_attr "length" "4")
12870 (set_attr "type" "coproc")])
12872 (define_expand "arm_<ldc>"
12873 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12874 (match_operand:SI 1 "immediate_operand")
12875 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12876 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12878 (define_expand "arm_<stc>"
12879 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12880 (match_operand:SI 1 "immediate_operand")
12881 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12882 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12884 (define_insn "arm_<mcr>"
12885 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12886 (match_operand:SI 1 "immediate_operand" "n")
12887 (match_operand:SI 2 "s_register_operand" "r")
12888 (match_operand:SI 3 "immediate_operand" "n")
12889 (match_operand:SI 4 "immediate_operand" "n")
12890 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12891 (use (match_dup 2))]
12892 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12894 arm_const_bounds (operands[0], 0, 16);
12895 arm_const_bounds (operands[1], 0, 8);
12896 arm_const_bounds (operands[3], 0, (1 << 5));
12897 arm_const_bounds (operands[4], 0, (1 << 5));
12898 arm_const_bounds (operands[5], 0, 8);
12899 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12901 [(set_attr "length" "4")
12902 (set_attr "type" "coproc")])
12904 (define_insn "arm_<mrc>"
12905 [(set (match_operand:SI 0 "s_register_operand" "=r")
12906 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12907 (match_operand:SI 2 "immediate_operand" "n")
12908 (match_operand:SI 3 "immediate_operand" "n")
12909 (match_operand:SI 4 "immediate_operand" "n")
12910 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12911 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12913 arm_const_bounds (operands[1], 0, 16);
12914 arm_const_bounds (operands[2], 0, 8);
12915 arm_const_bounds (operands[3], 0, (1 << 5));
12916 arm_const_bounds (operands[4], 0, (1 << 5));
12917 arm_const_bounds (operands[5], 0, 8);
12918 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12920 [(set_attr "length" "4")
12921 (set_attr "type" "coproc")])
12923 (define_insn "arm_<mcrr>"
12924 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12925 (match_operand:SI 1 "immediate_operand" "n")
12926 (match_operand:DI 2 "s_register_operand" "r")
12927 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12928 (use (match_dup 2))]
12929 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12931 arm_const_bounds (operands[0], 0, 16);
12932 arm_const_bounds (operands[1], 0, 8);
12933 arm_const_bounds (operands[3], 0, (1 << 5));
12934 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12936 [(set_attr "length" "4")
12937 (set_attr "type" "coproc")])
12939 (define_insn "arm_<mrrc>"
12940 [(set (match_operand:DI 0 "s_register_operand" "=r")
12941 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12942 (match_operand:SI 2 "immediate_operand" "n")
12943 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12944 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12946 arm_const_bounds (operands[1], 0, 16);
12947 arm_const_bounds (operands[2], 0, 8);
12948 arm_const_bounds (operands[3], 0, (1 << 5));
12949 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12951 [(set_attr "length" "4")
12952 (set_attr "type" "coproc")])
12954 (define_expand "speculation_barrier"
12955 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12958 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12959 have a usable barrier (and probably don't need one in practice).
12960 But to be safe if such code is run on later architectures, call a
12961 helper function in libgcc that will do the thing for the active
12963 if (!(arm_arch7 || arm_arch8))
12965 arm_emit_speculation_barrier_function ();
12971 ;; Generate a hard speculation barrier when we have not enabled speculation
12973 (define_insn "*speculation_barrier_insn"
12974 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12975 "arm_arch7 || arm_arch8"
12977 [(set_attr "type" "block")
12978 (set_attr "length" "8")]
12981 ;; Vector bits common to IWMMXT, Neon and MVE
12982 (include "vec-common.md")
12983 ;; Load the Intel Wireless Multimedia Extension patterns
12984 (include "iwmmxt.md")
12985 ;; Load the VFP co-processor patterns
12987 ;; Thumb-1 patterns
12988 (include "thumb1.md")
12989 ;; Thumb-2 patterns
12990 (include "thumb2.md")
12992 (include "neon.md")
12994 (include "crypto.md")
12995 ;; Synchronization Primitives
12996 (include "sync.md")
12997 ;; Fixed-point patterns
12998 (include "arm-fixed.md")
12999 ;; M-profile Vector Extension