1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2022 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 (APSRQ_REGNUM 104) ; Q bit pseudo register
43 (APSRGE_REGNUM 105) ; GE bits pseudo register
44 (VPR_REGNUM 106) ; Vector Predication Register - MVE register.
47 ;; 3rd operand to select_dominance_cc_mode
54 ;; conditional compare combination
65 ;;---------------------------------------------------------------------------
68 ;; Processor type. This is created automatically from arm-cores.def.
69 (include "arm-tune.md")
71 ;; Instruction classification types
74 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
75 ; generating ARM code. This is used to control the length of some insn
76 ; patterns that share the same RTL in both ARM and Thumb code.
77 (define_attr "is_thumb" "yes,no"
78 (const (if_then_else (symbol_ref "TARGET_THUMB")
79 (const_string "yes") (const_string "no"))))
81 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
82 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
84 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
85 (define_attr "is_thumb1" "yes,no"
86 (const (if_then_else (symbol_ref "TARGET_THUMB1")
87 (const_string "yes") (const_string "no"))))
89 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
90 ; The arm_restrict_it flag enables the "short IT" feature which
91 ; restricts IT blocks to a single 16-bit instruction.
92 ; This attribute should only be used on 16-bit Thumb-2 instructions
93 ; which may be predicated (the "predicable" attribute must be set).
94 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
96 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
97 ; This attribute should only be used on instructions which may emit
98 ; an IT block in their expansion which is not a short IT.
99 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
101 ; Mark an instruction sequence as the required way of loading a
102 ; constant when -mpure-code is enabled (which implies
103 ; arm_disable_literal_pool)
104 (define_attr "required_for_purecode" "no,yes" (const_string "no"))
106 ;; Operand number of an input operand that is shifted. Zero if the
107 ;; given instruction does not shift one of its input operands.
108 (define_attr "shift" "" (const_int 0))
110 ;; [For compatibility with AArch64 in pipeline models]
111 ;; Attribute that specifies whether or not the instruction touches fp
113 (define_attr "fp" "no,yes" (const_string "no"))
115 ; Floating Point Unit. If we only have floating point emulation, then there
116 ; is no point in scheduling the floating point insns. (Well, for best
117 ; performance we should try and group them together).
118 (define_attr "fpu" "none,vfp"
119 (const (symbol_ref "arm_fpu_attr")))
121 ; Predicated means that the insn form is conditionally executed based on a
122 ; predicate. We default to 'no' because no Thumb patterns match this rule
123 ; and not all ARM insns do.
124 (define_attr "predicated" "yes,no" (const_string "no"))
126 ; LENGTH of an instruction (in bytes)
127 (define_attr "length" ""
130 ; The architecture which supports the instruction (or alternative).
131 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
132 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
133 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
134 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
135 ; Baseline. "fix_vlldm" is for fixing the v8-m/v8.1-m VLLDM erratum.
136 ; This attribute is used to compute attribute "enabled",
137 ; use type "any" to enable an alternative in all cases.
138 (define_attr "arch" "any, a, t, 32, t1, t2, v6,nov6, v6t2, \
139 v8mb, fix_vlldm, iwmmxt, iwmmxt2, armv6_or_vfpv3, \
141 (const_string "any"))
143 (define_attr "arch_enabled" "no,yes"
144 (cond [(eq_attr "arch" "any")
147 (and (eq_attr "arch" "a")
148 (match_test "TARGET_ARM"))
151 (and (eq_attr "arch" "t")
152 (match_test "TARGET_THUMB"))
155 (and (eq_attr "arch" "t1")
156 (match_test "TARGET_THUMB1"))
159 (and (eq_attr "arch" "t2")
160 (match_test "TARGET_THUMB2"))
163 (and (eq_attr "arch" "32")
164 (match_test "TARGET_32BIT"))
167 (and (eq_attr "arch" "v6")
168 (match_test "TARGET_32BIT && arm_arch6"))
171 (and (eq_attr "arch" "nov6")
172 (match_test "TARGET_32BIT && !arm_arch6"))
175 (and (eq_attr "arch" "v6t2")
176 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
179 (and (eq_attr "arch" "v8mb")
180 (match_test "TARGET_THUMB1 && arm_arch8"))
183 (and (eq_attr "arch" "fix_vlldm")
184 (match_test "fix_vlldm"))
187 (and (eq_attr "arch" "iwmmxt2")
188 (match_test "TARGET_REALLY_IWMMXT2"))
191 (and (eq_attr "arch" "armv6_or_vfpv3")
192 (match_test "arm_arch6 || TARGET_VFP3"))
195 (and (eq_attr "arch" "neon")
196 (match_test "TARGET_NEON"))
199 (and (eq_attr "arch" "mve")
200 (match_test "TARGET_HAVE_MVE"))
204 (const_string "no")))
206 (define_attr "opt" "any,speed,size"
207 (const_string "any"))
209 (define_attr "opt_enabled" "no,yes"
210 (cond [(eq_attr "opt" "any")
213 (and (eq_attr "opt" "speed")
214 (match_test "optimize_function_for_speed_p (cfun)"))
217 (and (eq_attr "opt" "size")
218 (match_test "optimize_function_for_size_p (cfun)"))
219 (const_string "yes")]
220 (const_string "no")))
222 (define_attr "use_literal_pool" "no,yes"
223 (cond [(and (eq_attr "type" "f_loads,f_loadd")
224 (match_test "CONSTANT_P (operands[1])"))
225 (const_string "yes")]
226 (const_string "no")))
228 ; Enable all alternatives that are both arch_enabled and insn_enabled.
229 ; FIXME:: opt_enabled has been temporarily removed till the time we have
230 ; an attribute that allows the use of such alternatives.
231 ; This depends on caching of speed_p, size_p on a per
232 ; alternative basis. The problem is that the enabled attribute
233 ; cannot depend on any state that is not cached or is not constant
234 ; for a compilation unit. We probably need a generic "hot/cold"
235 ; alternative which if implemented can help with this. We disable this
236 ; until such a time as this is implemented and / or the improvements or
237 ; regressions with removing this attribute are double checked.
238 ; See ashldi3_neon and <shift>di3_neon in neon.md.
240 (define_attr "enabled" "no,yes"
241 (cond [(and (eq_attr "predicable_short_it" "no")
242 (and (eq_attr "predicated" "yes")
243 (match_test "arm_restrict_it")))
246 (and (eq_attr "enabled_for_short_it" "no")
247 (match_test "arm_restrict_it"))
250 (and (eq_attr "required_for_purecode" "yes")
251 (not (match_test "arm_disable_literal_pool")))
254 (eq_attr "arch_enabled" "no")
256 (const_string "yes")))
258 ; POOL_RANGE is how far away from a constant pool entry that this insn
259 ; can be placed. If the distance is zero, then this insn will never
260 ; reference the pool.
261 ; Note that for Thumb constant pools the PC value is rounded down to the
262 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
263 ; Thumb insns) should be set to <max_range> - 2.
264 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
265 ; before its address. It is set to <max_range> - (8 + <data_size>).
266 (define_attr "arm_pool_range" "" (const_int 0))
267 (define_attr "thumb2_pool_range" "" (const_int 0))
268 (define_attr "arm_neg_pool_range" "" (const_int 0))
269 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
271 (define_attr "pool_range" ""
272 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
273 (attr "arm_pool_range")))
274 (define_attr "neg_pool_range" ""
275 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
276 (attr "arm_neg_pool_range")))
278 ; An assembler sequence may clobber the condition codes without us knowing.
279 ; If such an insn references the pool, then we have no way of knowing how,
280 ; so use the most conservative value for pool_range.
281 (define_asm_attributes
282 [(set_attr "conds" "clob")
283 (set_attr "length" "4")
284 (set_attr "pool_range" "250")])
286 ; Load scheduling, set from the arm_ld_sched variable
287 ; initialized by arm_option_override()
288 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
290 ; condition codes: this one is used by final_prescan_insn to speed up
291 ; conditionalizing instructions. It saves having to scan the rtl to see if
292 ; it uses or alters the condition codes.
294 ; USE means that the condition codes are used by the insn in the process of
295 ; outputting code, this means (at present) that we can't use the insn in
298 ; SET means that the purpose of the insn is to set the condition codes in a
299 ; well defined manner.
301 ; CLOB means that the condition codes are altered in an undefined manner, if
302 ; they are altered at all
304 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
305 ; that the instruction does not use or alter the condition codes.
307 ; NOCOND means that the instruction does not use or alter the condition
308 ; codes but can be converted into a conditionally exectuted instruction.
310 (define_attr "conds" "use,set,clob,unconditional,nocond"
312 (ior (eq_attr "is_thumb1" "yes")
313 (eq_attr "type" "call"))
314 (const_string "clob")
316 (ior (eq_attr "is_neon_type" "yes")
317 (eq_attr "is_mve_type" "yes"))
318 (const_string "unconditional")
319 (const_string "nocond"))))
321 ; Predicable means that the insn can be conditionally executed based on
322 ; an automatically added predicate (additional patterns are generated by
323 ; gen...). We default to 'no' because no Thumb patterns match this rule
324 ; and not all ARM patterns do.
325 (define_attr "predicable" "no,yes" (const_string "no"))
327 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
328 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
329 ; suffer blockages enough to warrant modelling this (and it can adversely
330 ; affect the schedule).
331 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
333 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
334 ; to stall the processor. Used with model_wbuf above.
335 (define_attr "write_conflict" "no,yes"
336 (if_then_else (eq_attr "type"
339 (const_string "no")))
341 ; Classify the insns into those that take one cycle and those that take more
342 ; than one on the main cpu execution unit.
343 (define_attr "core_cycles" "single,multi"
344 (if_then_else (eq_attr "type"
345 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
346 alu_shift_imm_lsl_1to4, alu_shift_imm_other, alu_shift_reg, alu_dsp_reg,\
347 alus_ext, alus_imm, alus_sreg,\
348 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
349 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
350 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
351 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
352 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
353 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
354 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
355 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
356 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
357 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
358 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
359 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
360 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
361 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
362 (const_string "single")
363 (const_string "multi")))
365 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
366 ;; distant label. Only applicable to Thumb code.
367 (define_attr "far_jump" "yes,no" (const_string "no"))
370 ;; The number of machine instructions this pattern expands to.
371 ;; Used for Thumb-2 conditional execution.
372 (define_attr "ce_count" "" (const_int 1))
374 ;;---------------------------------------------------------------------------
377 (include "unspecs.md")
379 ;;---------------------------------------------------------------------------
382 (include "iterators.md")
384 ;;---------------------------------------------------------------------------
387 (include "predicates.md")
388 (include "constraints.md")
390 ;;---------------------------------------------------------------------------
391 ;; Pipeline descriptions
393 (define_attr "tune_cortexr4" "yes,no"
395 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
397 (const_string "no"))))
399 ;; True if the generic scheduling description should be used.
401 (define_attr "generic_sched" "yes,no"
403 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
404 arm926ejs,arm10e,arm1026ejs,arm1136js,\
405 arm1136jfs,cortexa5,cortexa7,cortexa8,\
406 cortexa9,cortexa12,cortexa15,cortexa17,\
407 cortexa53,cortexa57,cortexm4,cortexm7,\
408 exynosm1,marvell_pj4,xgene1")
409 (eq_attr "tune_cortexr4" "yes"))
411 (const_string "yes"))))
413 (define_attr "generic_vfp" "yes,no"
415 (and (eq_attr "fpu" "vfp")
416 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
417 cortexa8,cortexa9,cortexa53,cortexm4,\
418 cortexm7,marvell_pj4,xgene1")
419 (eq_attr "tune_cortexr4" "no"))
421 (const_string "no"))))
423 (include "marvell-f-iwmmxt.md")
424 (include "arm-generic.md")
425 (include "arm926ejs.md")
426 (include "arm1020e.md")
427 (include "arm1026ejs.md")
428 (include "arm1136jfs.md")
430 (include "fa606te.md")
431 (include "fa626te.md")
432 (include "fmp626.md")
433 (include "fa726te.md")
434 (include "cortex-a5.md")
435 (include "cortex-a7.md")
436 (include "cortex-a8.md")
437 (include "cortex-a9.md")
438 (include "cortex-a15.md")
439 (include "cortex-a17.md")
440 (include "cortex-a53.md")
441 (include "cortex-a57.md")
442 (include "cortex-r4.md")
443 (include "cortex-r4f.md")
444 (include "cortex-m7.md")
445 (include "cortex-m4.md")
446 (include "cortex-m4-fpu.md")
447 (include "exynos-m1.md")
449 (include "marvell-pj4.md")
450 (include "xgene1.md")
452 ;; define_subst and associated attributes
454 (define_subst "add_setq"
455 [(set (match_operand:SI 0 "" "")
456 (match_operand:SI 1 "" ""))]
460 (set (reg:CC APSRQ_REGNUM)
461 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
463 (define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
464 (define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
467 ;;---------------------------------------------------------------------------
472 ;; Note: For DImode insns, there is normally no reason why operands should
473 ;; not be in the same register, what we don't want is for something being
474 ;; written to partially overlap something that is an input.
476 (define_expand "adddi3"
478 [(set (match_operand:DI 0 "s_register_operand")
479 (plus:DI (match_operand:DI 1 "s_register_operand")
480 (match_operand:DI 2 "reg_or_int_operand")))
481 (clobber (reg:CC CC_REGNUM))])]
486 if (!REG_P (operands[2]))
487 operands[2] = force_reg (DImode, operands[2]);
491 rtx lo_result, hi_result, lo_dest, hi_dest;
492 rtx lo_op1, hi_op1, lo_op2, hi_op2;
493 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
495 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
496 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
498 if (lo_op2 == const0_rtx)
501 if (!arm_add_operand (hi_op2, SImode))
502 hi_op2 = force_reg (SImode, hi_op2);
503 /* Assume hi_op2 won't also be zero. */
504 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
508 if (!arm_add_operand (lo_op2, SImode))
509 lo_op2 = force_reg (SImode, lo_op2);
510 if (!arm_not_operand (hi_op2, SImode))
511 hi_op2 = force_reg (SImode, hi_op2);
513 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
514 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
516 if (hi_op2 == const0_rtx)
517 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
519 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
522 if (lo_result != lo_dest)
523 emit_move_insn (lo_result, lo_dest);
524 if (hi_result != hi_dest)
525 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
531 (define_expand "addvsi4"
532 [(match_operand:SI 0 "s_register_operand")
533 (match_operand:SI 1 "s_register_operand")
534 (match_operand:SI 2 "arm_add_operand")
535 (match_operand 3 "")]
538 if (CONST_INT_P (operands[2]))
539 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
541 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
542 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
547 (define_expand "addvdi4"
548 [(match_operand:DI 0 "s_register_operand")
549 (match_operand:DI 1 "s_register_operand")
550 (match_operand:DI 2 "reg_or_int_operand")
551 (match_operand 3 "")]
554 rtx lo_result, hi_result;
555 rtx lo_op1, hi_op1, lo_op2, hi_op2;
556 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
558 lo_result = gen_lowpart (SImode, operands[0]);
559 hi_result = gen_highpart (SImode, operands[0]);
561 if (lo_op2 == const0_rtx)
563 emit_move_insn (lo_result, lo_op1);
564 if (!arm_add_operand (hi_op2, SImode))
565 hi_op2 = force_reg (SImode, hi_op2);
567 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
571 if (!arm_add_operand (lo_op2, SImode))
572 lo_op2 = force_reg (SImode, lo_op2);
573 if (!arm_not_operand (hi_op2, SImode))
574 hi_op2 = force_reg (SImode, hi_op2);
576 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
578 if (hi_op2 == const0_rtx)
579 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
580 else if (CONST_INT_P (hi_op2))
581 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
583 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
585 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
591 (define_expand "addsi3_cin_vout_reg"
596 (plus:DI (match_dup 4)
597 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
598 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
599 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
601 (set (match_operand:SI 0 "s_register_operand")
602 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
606 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
607 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
608 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
609 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
613 (define_insn "*addsi3_cin_vout_reg_insn"
614 [(set (reg:CC_V CC_REGNUM)
618 (match_operand:DI 3 "arm_carry_operation" "")
619 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
620 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
622 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
625 (set (match_operand:SI 0 "s_register_operand" "=l,r")
626 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
632 [(set_attr "type" "alus_sreg")
633 (set_attr "arch" "t2,*")
634 (set_attr "length" "2,4")]
637 (define_expand "addsi3_cin_vout_imm"
642 (plus:DI (match_dup 4)
643 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
645 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
647 (set (match_operand:SI 0 "s_register_operand")
648 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
649 (match_operand 2 "arm_adcimm_operand")))])]
652 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
659 (define_insn "*addsi3_cin_vout_imm_insn"
660 [(set (reg:CC_V CC_REGNUM)
664 (match_operand:DI 3 "arm_carry_operation" "")
665 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
666 (match_operand 2 "arm_adcimm_operand" "I,K"))
668 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
671 (set (match_operand:SI 0 "s_register_operand" "=r,r")
672 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
677 sbcs%?\\t%0, %1, #%B2"
678 [(set_attr "type" "alus_imm")]
681 (define_expand "addsi3_cin_vout_0"
685 (plus:DI (match_dup 3)
686 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
687 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
688 (set (match_operand:SI 0 "s_register_operand")
689 (plus:SI (match_dup 4) (match_dup 1)))])]
692 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
693 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
694 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
695 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
699 (define_insn "*addsi3_cin_vout_0_insn"
700 [(set (reg:CC_V CC_REGNUM)
703 (match_operand:DI 2 "arm_carry_operation" "")
704 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
705 (sign_extend:DI (plus:SI
706 (match_operand:SI 3 "arm_carry_operation" "")
708 (set (match_operand:SI 0 "s_register_operand" "=r")
709 (plus:SI (match_dup 3) (match_dup 1)))]
711 "adcs%?\\t%0, %1, #0"
712 [(set_attr "type" "alus_imm")]
715 (define_expand "uaddvsi4"
716 [(match_operand:SI 0 "s_register_operand")
717 (match_operand:SI 1 "s_register_operand")
718 (match_operand:SI 2 "arm_add_operand")
719 (match_operand 3 "")]
722 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
723 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
728 (define_expand "uaddvdi4"
729 [(match_operand:DI 0 "s_register_operand")
730 (match_operand:DI 1 "s_register_operand")
731 (match_operand:DI 2 "reg_or_int_operand")
732 (match_operand 3 "")]
735 rtx lo_result, hi_result;
736 rtx lo_op1, hi_op1, lo_op2, hi_op2;
737 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
739 lo_result = gen_lowpart (SImode, operands[0]);
740 hi_result = gen_highpart (SImode, operands[0]);
742 if (lo_op2 == const0_rtx)
744 emit_move_insn (lo_result, lo_op1);
745 if (!arm_add_operand (hi_op2, SImode))
746 hi_op2 = force_reg (SImode, hi_op2);
748 emit_insn (gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
752 if (!arm_add_operand (lo_op2, SImode))
753 lo_op2 = force_reg (SImode, lo_op2);
754 if (!arm_not_operand (hi_op2, SImode))
755 hi_op2 = force_reg (SImode, hi_op2);
757 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
759 if (hi_op2 == const0_rtx)
760 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
761 else if (CONST_INT_P (hi_op2))
762 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
764 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
766 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
772 (define_expand "addsi3_cin_cout_reg"
777 (plus:DI (match_dup 4)
778 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
779 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
780 (const_int 4294967296)))
781 (set (match_operand:SI 0 "s_register_operand")
782 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
786 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
787 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
788 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
789 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
793 (define_insn "*addsi3_cin_cout_reg_insn"
794 [(set (reg:CC_ADC CC_REGNUM)
798 (match_operand:DI 3 "arm_carry_operation" "")
799 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
800 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
801 (const_int 4294967296)))
802 (set (match_operand:SI 0 "s_register_operand" "=l,r")
803 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
810 [(set_attr "type" "alus_sreg")
811 (set_attr "arch" "t2,*")
812 (set_attr "length" "2,4")]
815 (define_expand "addsi3_cin_cout_imm"
820 (plus:DI (match_dup 4)
821 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
823 (const_int 4294967296)))
824 (set (match_operand:SI 0 "s_register_operand")
825 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
826 (match_operand:SI 2 "arm_adcimm_operand")))])]
829 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
833 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
837 (define_insn "*addsi3_cin_cout_imm_insn"
838 [(set (reg:CC_ADC CC_REGNUM)
842 (match_operand:DI 3 "arm_carry_operation" "")
843 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
844 (match_operand:DI 5 "const_int_operand" "n,n"))
845 (const_int 4294967296)))
846 (set (match_operand:SI 0 "s_register_operand" "=r,r")
847 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
849 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
851 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
854 sbcs%?\\t%0, %1, #%B2"
855 [(set_attr "type" "alus_imm")]
858 (define_expand "addsi3_cin_cout_0"
862 (plus:DI (match_dup 3)
863 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
864 (const_int 4294967296)))
865 (set (match_operand:SI 0 "s_register_operand")
866 (plus:SI (match_dup 4) (match_dup 1)))])]
869 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
870 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
871 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
872 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
876 (define_insn "*addsi3_cin_cout_0_insn"
877 [(set (reg:CC_ADC CC_REGNUM)
880 (match_operand:DI 2 "arm_carry_operation" "")
881 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
882 (const_int 4294967296)))
883 (set (match_operand:SI 0 "s_register_operand" "=r")
884 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
886 "adcs%?\\t%0, %1, #0"
887 [(set_attr "type" "alus_imm")]
890 (define_expand "addsi3"
891 [(set (match_operand:SI 0 "s_register_operand")
892 (plus:SI (match_operand:SI 1 "s_register_operand")
893 (match_operand:SI 2 "reg_or_int_operand")))]
896 if (TARGET_32BIT && CONST_INT_P (operands[2]))
898 arm_split_constant (PLUS, SImode, NULL_RTX,
899 INTVAL (operands[2]), operands[0], operands[1],
900 optimize && can_create_pseudo_p ());
906 ; If there is a scratch available, this will be faster than synthesizing the
909 [(match_scratch:SI 3 "r")
910 (set (match_operand:SI 0 "arm_general_register_operand" "")
911 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
912 (match_operand:SI 2 "const_int_operand" "")))]
914 !(const_ok_for_arm (INTVAL (operands[2]))
915 || const_ok_for_arm (-INTVAL (operands[2])))
916 && const_ok_for_arm (~INTVAL (operands[2]))"
917 [(set (match_dup 3) (match_dup 2))
918 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
922 ;; The r/r/k alternative is required when reloading the address
923 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
924 ;; put the duplicated register first, and not try the commutative version.
925 (define_insn_and_split "*arm_addsi3"
926 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
927 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
928 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
944 subw%?\\t%0, %1, #%n2
945 subw%?\\t%0, %1, #%n2
948 && CONST_INT_P (operands[2])
949 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
950 && (reload_completed || !arm_eliminable_register (operands[1]))"
951 [(clobber (const_int 0))]
953 arm_split_constant (PLUS, SImode, curr_insn,
954 INTVAL (operands[2]), operands[0],
958 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
959 (set_attr "predicable" "yes")
960 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
961 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
962 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
963 (const_string "alu_imm")
964 (const_string "alu_sreg")))
968 (define_insn "addsi3_compareV_reg"
969 [(set (reg:CC_V CC_REGNUM)
972 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
973 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
974 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
975 (set (match_operand:SI 0 "register_operand" "=l,r,r")
976 (plus:SI (match_dup 1) (match_dup 2)))]
978 "adds%?\\t%0, %1, %2"
979 [(set_attr "conds" "set")
980 (set_attr "arch" "t2,t2,*")
981 (set_attr "length" "2,2,4")
982 (set_attr "type" "alus_sreg")]
985 (define_insn "*addsi3_compareV_reg_nosum"
986 [(set (reg:CC_V CC_REGNUM)
989 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
990 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
991 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
994 [(set_attr "conds" "set")
995 (set_attr "arch" "t2,*")
996 (set_attr "length" "2,4")
997 (set_attr "type" "alus_sreg")]
1000 (define_insn "subvsi3_intmin"
1001 [(set (reg:CC_V CC_REGNUM)
1005 (match_operand:SI 1 "register_operand" "r"))
1006 (const_int 2147483648))
1007 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
1008 (set (match_operand:SI 0 "register_operand" "=r")
1009 (plus:SI (match_dup 1) (const_int -2147483648)))]
1011 "subs%?\\t%0, %1, #-2147483648"
1012 [(set_attr "conds" "set")
1013 (set_attr "type" "alus_imm")]
1016 (define_insn "addsi3_compareV_imm"
1017 [(set (reg:CC_V CC_REGNUM)
1021 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
1022 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
1023 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
1024 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
1025 (plus:SI (match_dup 1) (match_dup 2)))]
1027 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
1031 subs%?\\t%0, %1, #%n2
1032 subs%?\\t%0, %0, #%n2
1034 subs%?\\t%0, %1, #%n2"
1035 [(set_attr "conds" "set")
1036 (set_attr "arch" "t2,t2,t2,t2,*,*")
1037 (set_attr "length" "2,2,2,2,4,4")
1038 (set_attr "type" "alus_imm")]
1041 (define_insn "addsi3_compareV_imm_nosum"
1042 [(set (reg:CC_V CC_REGNUM)
1046 (match_operand:SI 0 "register_operand" "l,r,r"))
1047 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1048 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1050 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1055 [(set_attr "conds" "set")
1056 (set_attr "arch" "t2,*,*")
1057 (set_attr "length" "2,4,4")
1058 (set_attr "type" "alus_imm")]
1061 ;; We can handle more constants efficently if we can clobber either a scratch
1062 ;; or the other source operand. We deliberately leave this late as in
1063 ;; high register pressure situations it's not worth forcing any reloads.
1065 [(match_scratch:SI 2 "l")
1066 (set (reg:CC_V CC_REGNUM)
1070 (match_operand:SI 0 "low_register_operand"))
1071 (match_operand 1 "const_int_operand"))
1072 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1074 && satisfies_constraint_Pd (operands[1])"
1076 (set (reg:CC_V CC_REGNUM)
1078 (plus:DI (sign_extend:DI (match_dup 0))
1079 (sign_extend:DI (match_dup 1)))
1080 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1081 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1085 [(set (reg:CC_V CC_REGNUM)
1089 (match_operand:SI 0 "low_register_operand"))
1090 (match_operand 1 "const_int_operand"))
1091 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1093 && dead_or_set_p (peep2_next_insn (0), operands[0])
1094 && satisfies_constraint_Py (operands[1])"
1096 (set (reg:CC_V CC_REGNUM)
1098 (plus:DI (sign_extend:DI (match_dup 0))
1099 (sign_extend:DI (match_dup 1)))
1100 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1101 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1104 (define_insn "addsi3_compare0"
1105 [(set (reg:CC_NZ CC_REGNUM)
1107 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1108 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1110 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1111 (plus:SI (match_dup 1) (match_dup 2)))]
1115 subs%?\\t%0, %1, #%n2
1116 adds%?\\t%0, %1, %2"
1117 [(set_attr "conds" "set")
1118 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1121 (define_insn "*addsi3_compare0_scratch"
1122 [(set (reg:CC_NZ CC_REGNUM)
1124 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1125 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1132 [(set_attr "conds" "set")
1133 (set_attr "predicable" "yes")
1134 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1137 (define_insn "*compare_negsi_si"
1138 [(set (reg:CC_Z CC_REGNUM)
1140 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1141 (match_operand:SI 1 "s_register_operand" "l,r")))]
1144 [(set_attr "conds" "set")
1145 (set_attr "predicable" "yes")
1146 (set_attr "arch" "t2,*")
1147 (set_attr "length" "2,4")
1148 (set_attr "predicable_short_it" "yes,no")
1149 (set_attr "type" "alus_sreg")]
1152 ;; This is the canonicalization of subsi3_compare when the
1153 ;; addend is a constant.
1154 (define_insn "cmpsi2_addneg"
1155 [(set (reg:CC CC_REGNUM)
1157 (match_operand:SI 1 "s_register_operand" "r,r")
1158 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1159 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1160 (plus:SI (match_dup 1)
1161 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1163 && (INTVAL (operands[2])
1164 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1166 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1167 in different condition codes (like cmn rather than like cmp), so that
1168 alternative comes first. Both alternatives can match for any 0x??000000
1169 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1170 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1171 as it is shorter. */
1172 if (which_alternative == 0 && operands[3] != const1_rtx)
1173 return "subs%?\\t%0, %1, #%n3";
1175 return "adds%?\\t%0, %1, %3";
1177 [(set_attr "conds" "set")
1178 (set_attr "type" "alus_sreg")]
1181 ;; Convert the sequence
1183 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1187 ;; bcs dest ((unsigned)rn >= 1)
1188 ;; similarly for the beq variant using bcc.
1189 ;; This is a common looping idiom (while (n--))
1191 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1192 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1194 (set (match_operand 2 "cc_register" "")
1195 (compare (match_dup 0) (const_int -1)))
1197 (if_then_else (match_operator 3 "equality_operator"
1198 [(match_dup 2) (const_int 0)])
1199 (match_operand 4 "" "")
1200 (match_operand 5 "" "")))]
1201 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1205 (match_dup 1) (const_int 1)))
1206 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1208 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1211 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1212 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1215 operands[2], const0_rtx);"
1218 ;; The next four insns work because they compare the result with one of
1219 ;; the operands, and we know that the use of the condition code is
1220 ;; either GEU or LTU, so we can use the carry flag from the addition
1221 ;; instead of doing the compare a second time.
1222 (define_insn "addsi3_compare_op1"
1223 [(set (reg:CC_C CC_REGNUM)
1225 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1226 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1228 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1229 (plus:SI (match_dup 1) (match_dup 2)))]
1234 subs%?\\t%0, %1, #%n2
1235 subs%?\\t%0, %0, #%n2
1237 subs%?\\t%0, %1, #%n2"
1238 [(set_attr "conds" "set")
1239 (set_attr "arch" "t2,t2,t2,t2,*,*")
1240 (set_attr "length" "2,2,2,2,4,4")
1242 (if_then_else (match_operand 2 "const_int_operand")
1243 (const_string "alu_imm")
1244 (const_string "alu_sreg")))]
1247 (define_insn "*addsi3_compare_op2"
1248 [(set (reg:CC_C CC_REGNUM)
1250 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1251 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1253 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1254 (plus:SI (match_dup 1) (match_dup 2)))]
1259 subs%?\\t%0, %1, #%n2
1260 subs%?\\t%0, %0, #%n2
1262 subs%?\\t%0, %1, #%n2"
1263 [(set_attr "conds" "set")
1264 (set_attr "arch" "t2,t2,t2,t2,*,*")
1265 (set_attr "length" "2,2,2,2,4,4")
1267 (if_then_else (match_operand 2 "const_int_operand")
1268 (const_string "alu_imm")
1269 (const_string "alu_sreg")))]
1272 (define_insn "*compare_addsi2_op0"
1273 [(set (reg:CC_C CC_REGNUM)
1275 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1276 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1284 [(set_attr "conds" "set")
1285 (set_attr "predicable" "yes")
1286 (set_attr "arch" "t2,t2,*,*")
1287 (set_attr "predicable_short_it" "yes,yes,no,no")
1288 (set_attr "length" "2,2,4,4")
1290 (if_then_else (match_operand 1 "const_int_operand")
1291 (const_string "alu_imm")
1292 (const_string "alu_sreg")))]
1295 (define_insn "*compare_addsi2_op1"
1296 [(set (reg:CC_C CC_REGNUM)
1298 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1299 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1307 [(set_attr "conds" "set")
1308 (set_attr "predicable" "yes")
1309 (set_attr "arch" "t2,t2,*,*")
1310 (set_attr "predicable_short_it" "yes,yes,no,no")
1311 (set_attr "length" "2,2,4,4")
1313 (if_then_else (match_operand 1 "const_int_operand")
1314 (const_string "alu_imm")
1315 (const_string "alu_sreg")))]
1318 (define_insn "addsi3_carryin"
1319 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1320 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1321 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1322 (match_operand:SI 3 "arm_carry_operation" "")))]
1327 sbc%?\\t%0, %1, #%B2"
1328 [(set_attr "conds" "use")
1329 (set_attr "predicable" "yes")
1330 (set_attr "arch" "t2,*,*")
1331 (set_attr "length" "4")
1332 (set_attr "predicable_short_it" "yes,no,no")
1333 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1336 ;; Canonicalization of the above when the immediate is zero.
1337 (define_insn "add0si3_carryin"
1338 [(set (match_operand:SI 0 "s_register_operand" "=r")
1339 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1340 (match_operand:SI 1 "arm_not_operand" "r")))]
1342 "adc%?\\t%0, %1, #0"
1343 [(set_attr "conds" "use")
1344 (set_attr "predicable" "yes")
1345 (set_attr "length" "4")
1346 (set_attr "type" "adc_imm")]
1349 (define_insn "*addsi3_carryin_alt2"
1350 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1351 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1352 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1353 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1358 sbc%?\\t%0, %1, #%B2"
1359 [(set_attr "conds" "use")
1360 (set_attr "predicable" "yes")
1361 (set_attr "arch" "t2,*,*")
1362 (set_attr "length" "4")
1363 (set_attr "predicable_short_it" "yes,no,no")
1364 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1367 (define_insn "*addsi3_carryin_shift"
1368 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1370 (match_operator:SI 2 "shift_operator"
1371 [(match_operand:SI 3 "s_register_operand" "r,r")
1372 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1373 (match_operand:SI 5 "arm_carry_operation" ""))
1374 (match_operand:SI 1 "s_register_operand" "r,r")))]
1376 "adc%?\\t%0, %1, %3%S2"
1377 [(set_attr "conds" "use")
1378 (set_attr "arch" "32,a")
1379 (set_attr "shift" "3")
1380 (set_attr "predicable" "yes")
1381 (set_attr "autodetect_type" "alu_shift_operator2")]
1384 (define_insn "*addsi3_carryin_clobercc"
1385 [(set (match_operand:SI 0 "s_register_operand" "=r")
1386 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1387 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1388 (match_operand:SI 3 "arm_carry_operation" "")))
1389 (clobber (reg:CC CC_REGNUM))]
1391 "adcs%?\\t%0, %1, %2"
1392 [(set_attr "conds" "set")
1393 (set_attr "type" "adcs_reg")]
1396 (define_expand "subvsi4"
1397 [(match_operand:SI 0 "s_register_operand")
1398 (match_operand:SI 1 "arm_rhs_operand")
1399 (match_operand:SI 2 "arm_add_operand")
1400 (match_operand 3 "")]
1403 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1405 /* If both operands are constants we can decide the result statically. */
1406 wi::overflow_type overflow;
1407 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1408 rtx_mode_t (operands[2], SImode),
1410 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1411 if (overflow != wi::OVF_NONE)
1412 emit_jump_insn (gen_jump (operands[3]));
1415 else if (CONST_INT_P (operands[2]))
1417 operands[2] = GEN_INT (-INTVAL (operands[2]));
1418 /* Special case for INT_MIN. */
1419 if (INTVAL (operands[2]) == 0x80000000)
1420 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1422 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1425 else if (CONST_INT_P (operands[1]))
1426 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1428 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1430 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1434 (define_expand "subvdi4"
1435 [(match_operand:DI 0 "s_register_operand")
1436 (match_operand:DI 1 "reg_or_int_operand")
1437 (match_operand:DI 2 "reg_or_int_operand")
1438 (match_operand 3 "")]
1441 rtx lo_result, hi_result;
1442 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1443 lo_result = gen_lowpart (SImode, operands[0]);
1444 hi_result = gen_highpart (SImode, operands[0]);
1445 machine_mode mode = CCmode;
1447 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1449 /* If both operands are constants we can decide the result statically. */
1450 wi::overflow_type overflow;
1451 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1452 rtx_mode_t (operands[2], DImode),
1454 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1455 if (overflow != wi::OVF_NONE)
1456 emit_jump_insn (gen_jump (operands[3]));
1459 else if (CONST_INT_P (operands[1]))
1461 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1463 if (const_ok_for_arm (INTVAL (lo_op1)))
1465 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1466 GEN_INT (~UINTVAL (lo_op1))));
1467 /* We could potentially use RSC here in Arm state, but not
1468 in Thumb, so it's probably not worth the effort of handling
1470 hi_op1 = force_reg (SImode, hi_op1);
1474 operands[1] = force_reg (DImode, operands[1]);
1477 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1479 if (lo_op2 == const0_rtx)
1481 emit_move_insn (lo_result, lo_op1);
1482 if (!arm_add_operand (hi_op2, SImode))
1483 hi_op2 = force_reg (SImode, hi_op2);
1484 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1488 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1489 lo_op2 = force_reg (SImode, lo_op2);
1490 if (CONST_INT_P (lo_op2))
1491 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1492 gen_int_mode (-INTVAL (lo_op2), SImode)));
1494 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1497 if (!arm_not_operand (hi_op2, SImode))
1498 hi_op2 = force_reg (SImode, hi_op2);
1499 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1500 if (CONST_INT_P (hi_op2))
1501 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1502 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1503 gen_rtx_LTU (DImode, ccreg,
1506 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1507 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1508 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1509 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1514 (define_expand "usubvsi4"
1515 [(match_operand:SI 0 "s_register_operand")
1516 (match_operand:SI 1 "arm_rhs_operand")
1517 (match_operand:SI 2 "arm_add_operand")
1518 (match_operand 3 "")]
1521 machine_mode mode = CCmode;
1522 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1524 /* If both operands are constants we can decide the result statically. */
1525 wi::overflow_type overflow;
1526 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1527 rtx_mode_t (operands[2], SImode),
1528 UNSIGNED, &overflow);
1529 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1530 if (overflow != wi::OVF_NONE)
1531 emit_jump_insn (gen_jump (operands[3]));
1534 else if (CONST_INT_P (operands[2]))
1535 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1536 gen_int_mode (-INTVAL (operands[2]),
1538 else if (CONST_INT_P (operands[1]))
1541 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1542 GEN_INT (~UINTVAL (operands[1]))));
1545 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1546 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1551 (define_expand "usubvdi4"
1552 [(match_operand:DI 0 "s_register_operand")
1553 (match_operand:DI 1 "reg_or_int_operand")
1554 (match_operand:DI 2 "reg_or_int_operand")
1555 (match_operand 3 "")]
1558 rtx lo_result, hi_result;
1559 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1560 lo_result = gen_lowpart (SImode, operands[0]);
1561 hi_result = gen_highpart (SImode, operands[0]);
1562 machine_mode mode = CCmode;
1564 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1566 /* If both operands are constants we can decide the result statically. */
1567 wi::overflow_type overflow;
1568 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1569 rtx_mode_t (operands[2], DImode),
1570 UNSIGNED, &overflow);
1571 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1572 if (overflow != wi::OVF_NONE)
1573 emit_jump_insn (gen_jump (operands[3]));
1576 else if (CONST_INT_P (operands[1]))
1578 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1580 if (const_ok_for_arm (INTVAL (lo_op1)))
1582 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1583 GEN_INT (~UINTVAL (lo_op1))));
1584 /* We could potentially use RSC here in Arm state, but not
1585 in Thumb, so it's probably not worth the effort of handling
1587 hi_op1 = force_reg (SImode, hi_op1);
1591 operands[1] = force_reg (DImode, operands[1]);
1594 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1596 if (lo_op2 == const0_rtx)
1598 emit_move_insn (lo_result, lo_op1);
1599 if (!arm_add_operand (hi_op2, SImode))
1600 hi_op2 = force_reg (SImode, hi_op2);
1601 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1605 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1606 lo_op2 = force_reg (SImode, lo_op2);
1607 if (CONST_INT_P (lo_op2))
1608 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1609 gen_int_mode (-INTVAL (lo_op2), SImode)));
1611 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1614 if (!arm_not_operand (hi_op2, SImode))
1615 hi_op2 = force_reg (SImode, hi_op2);
1616 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1617 if (CONST_INT_P (hi_op2))
1618 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1619 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1620 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1621 gen_rtx_LTU (DImode, ccreg,
1624 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1625 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1626 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1627 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1632 (define_insn "subsi3_compare1"
1633 [(set (reg:CC CC_REGNUM)
1635 (match_operand:SI 1 "register_operand" "r")
1636 (match_operand:SI 2 "register_operand" "r")))
1637 (set (match_operand:SI 0 "register_operand" "=r")
1638 (minus:SI (match_dup 1) (match_dup 2)))]
1640 "subs%?\\t%0, %1, %2"
1641 [(set_attr "conds" "set")
1642 (set_attr "type" "alus_sreg")]
1645 (define_insn "subvsi3"
1646 [(set (reg:CC_V CC_REGNUM)
1649 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1650 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1651 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1652 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1653 (minus:SI (match_dup 1) (match_dup 2)))]
1655 "subs%?\\t%0, %1, %2"
1656 [(set_attr "conds" "set")
1657 (set_attr "arch" "t2,*")
1658 (set_attr "length" "2,4")
1659 (set_attr "type" "alus_sreg")]
1662 (define_insn "subvsi3_imm1"
1663 [(set (reg:CC_V CC_REGNUM)
1666 (match_operand 1 "arm_immediate_operand" "I")
1667 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1668 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1669 (set (match_operand:SI 0 "s_register_operand" "=r")
1670 (minus:SI (match_dup 1) (match_dup 2)))]
1672 "rsbs%?\\t%0, %2, %1"
1673 [(set_attr "conds" "set")
1674 (set_attr "type" "alus_imm")]
1677 (define_insn "subsi3_carryin"
1678 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1679 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1680 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1681 (match_operand:SI 3 "arm_borrow_operation" "")))]
1686 sbc%?\\t%0, %2, %2, lsl #1"
1687 [(set_attr "conds" "use")
1688 (set_attr "arch" "*,a,t2")
1689 (set_attr "predicable" "yes")
1690 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm_lsl_1to4")]
1693 ;; Special canonicalization of the above when operand1 == (const_int 1):
1694 ;; in this case the 'borrow' needs to treated like subtracting from the carry.
1695 (define_insn "rsbsi_carryin_reg"
1696 [(set (match_operand:SI 0 "s_register_operand" "=r")
1697 (minus:SI (match_operand:SI 1 "arm_carry_operation" "")
1698 (match_operand:SI 2 "s_register_operand" "r")))]
1700 "rsc%?\\t%0, %2, #1"
1701 [(set_attr "conds" "use")
1702 (set_attr "predicable" "yes")
1703 (set_attr "type" "adc_imm")]
1706 ;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
1707 ;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
1708 (define_insn "*add_not_cin"
1709 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1711 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
1712 (match_operand:SI 3 "arm_carry_operation" ""))
1713 (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
1714 "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
1718 [(set_attr "conds" "use")
1719 (set_attr "predicable" "yes")
1720 (set_attr "arch" "*,a")
1721 (set_attr "type" "adc_reg,adc_imm")]
1724 ;; On Arm we can also use the same trick when the non-inverted operand is
1725 ;; shifted, using RSC.
1726 (define_insn "add_not_shift_cin"
1727 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1729 (plus:SI (match_operator:SI 3 "shift_operator"
1730 [(match_operand:SI 1 "s_register_operand" "r,r")
1731 (match_operand:SI 2 "shift_amount_operand" "M,r")])
1732 (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
1733 (match_operand:SI 5 "arm_carry_operation" "")))]
1735 "rsc%?\\t%0, %4, %1%S3"
1736 [(set_attr "conds" "use")
1737 (set_attr "predicable" "yes")
1738 (set_attr "autodetect_type" "alu_shift_operator3")]
1741 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1742 [(set (reg:<CC_EXTEND> CC_REGNUM)
1743 (compare:<CC_EXTEND>
1744 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1745 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1746 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1747 (clobber (match_scratch:SI 0 "=l,r"))]
1750 [(set_attr "conds" "set")
1751 (set_attr "arch" "t2,*")
1752 (set_attr "length" "2,4")
1753 (set_attr "type" "adc_reg")]
1756 ;; Similar to the above, but handling a constant which has a different
1757 ;; canonicalization.
1758 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1759 [(set (reg:<CC_EXTEND> CC_REGNUM)
1760 (compare:<CC_EXTEND>
1761 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1762 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1763 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1764 (clobber (match_scratch:SI 0 "=l,r"))]
1768 adcs\\t%0, %1, #%B2"
1769 [(set_attr "conds" "set")
1770 (set_attr "type" "adc_imm")]
1773 ;; Further canonicalization when the constant is zero.
1774 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1775 [(set (reg:<CC_EXTEND> CC_REGNUM)
1776 (compare:<CC_EXTEND>
1777 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1778 (match_operand:DI 2 "arm_borrow_operation" "")))
1779 (clobber (match_scratch:SI 0 "=l,r"))]
1782 [(set_attr "conds" "set")
1783 (set_attr "type" "adc_imm")]
1786 (define_insn "*subsi3_carryin_const"
1787 [(set (match_operand:SI 0 "s_register_operand" "=r")
1789 (match_operand:SI 1 "s_register_operand" "r")
1790 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1791 (match_operand:SI 3 "arm_borrow_operation" "")))]
1793 "sbc\\t%0, %1, #%n2"
1794 [(set_attr "conds" "use")
1795 (set_attr "type" "adc_imm")]
1798 (define_insn "*subsi3_carryin_const0"
1799 [(set (match_operand:SI 0 "s_register_operand" "=r")
1800 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1801 (match_operand:SI 2 "arm_borrow_operation" "")))]
1804 [(set_attr "conds" "use")
1805 (set_attr "type" "adc_imm")]
1808 (define_insn "*subsi3_carryin_shift"
1809 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1811 (match_operand:SI 1 "s_register_operand" "r,r")
1812 (match_operator:SI 2 "shift_operator"
1813 [(match_operand:SI 3 "s_register_operand" "r,r")
1814 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1815 (match_operand:SI 5 "arm_borrow_operation" "")))]
1817 "sbc%?\\t%0, %1, %3%S2"
1818 [(set_attr "conds" "use")
1819 (set_attr "arch" "32,a")
1820 (set_attr "shift" "3")
1821 (set_attr "predicable" "yes")
1822 (set_attr "autodetect_type" "alu_shift_operator2")]
1825 (define_insn "*subsi3_carryin_shift_alt"
1826 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1828 (match_operand:SI 1 "s_register_operand" "r,r")
1829 (match_operand:SI 5 "arm_borrow_operation" ""))
1830 (match_operator:SI 2 "shift_operator"
1831 [(match_operand:SI 3 "s_register_operand" "r,r")
1832 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1834 "sbc%?\\t%0, %1, %3%S2"
1835 [(set_attr "conds" "use")
1836 (set_attr "arch" "32,a")
1837 (set_attr "shift" "3")
1838 (set_attr "predicable" "yes")
1839 (set_attr "autodetect_type" "alu_shift_operator2")]
1843 (define_insn "*rsbsi3_carryin_shift"
1844 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1846 (match_operator:SI 2 "shift_operator"
1847 [(match_operand:SI 3 "s_register_operand" "r,r")
1848 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1849 (match_operand:SI 1 "s_register_operand" "r,r"))
1850 (match_operand:SI 5 "arm_borrow_operation" "")))]
1852 "rsc%?\\t%0, %1, %3%S2"
1853 [(set_attr "conds" "use")
1854 (set_attr "predicable" "yes")
1855 (set_attr "autodetect_type" "alu_shift_operator2")]
1858 (define_insn "*rsbsi3_carryin_shift_alt"
1859 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1861 (match_operator:SI 2 "shift_operator"
1862 [(match_operand:SI 3 "s_register_operand" "r,r")
1863 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1864 (match_operand:SI 5 "arm_borrow_operation" ""))
1865 (match_operand:SI 1 "s_register_operand" "r,r")))]
1867 "rsc%?\\t%0, %1, %3%S2"
1868 [(set_attr "conds" "use")
1869 (set_attr "predicable" "yes")
1870 (set_attr "autodetect_type" "alu_shift_operator2")]
1873 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1875 [(set (match_operand:SI 0 "s_register_operand" "")
1876 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1877 (match_operand:SI 2 "s_register_operand" ""))
1879 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1881 [(set (match_dup 3) (match_dup 1))
1882 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1884 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1887 (define_expand "addsf3"
1888 [(set (match_operand:SF 0 "s_register_operand")
1889 (plus:SF (match_operand:SF 1 "s_register_operand")
1890 (match_operand:SF 2 "s_register_operand")))]
1891 "TARGET_32BIT && TARGET_HARD_FLOAT"
1895 (define_expand "adddf3"
1896 [(set (match_operand:DF 0 "s_register_operand")
1897 (plus:DF (match_operand:DF 1 "s_register_operand")
1898 (match_operand:DF 2 "s_register_operand")))]
1899 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1903 (define_expand "subdi3"
1905 [(set (match_operand:DI 0 "s_register_operand")
1906 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1907 (match_operand:DI 2 "s_register_operand")))
1908 (clobber (reg:CC CC_REGNUM))])]
1913 if (!REG_P (operands[1]))
1914 operands[1] = force_reg (DImode, operands[1]);
1918 rtx lo_result, hi_result, lo_dest, hi_dest;
1919 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1922 /* Since operands[1] may be an integer, pass it second, so that
1923 any necessary simplifications will be done on the decomposed
1925 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1927 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1928 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1930 if (!arm_rhs_operand (lo_op1, SImode))
1931 lo_op1 = force_reg (SImode, lo_op1);
1933 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1934 || !arm_rhs_operand (hi_op1, SImode))
1935 hi_op1 = force_reg (SImode, hi_op1);
1938 if (lo_op1 == const0_rtx)
1940 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1941 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1943 else if (CONST_INT_P (lo_op1))
1945 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1946 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1947 GEN_INT (~UINTVAL (lo_op1))));
1951 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1952 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1955 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1957 if (hi_op1 == const0_rtx)
1958 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1960 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1962 if (lo_result != lo_dest)
1963 emit_move_insn (lo_result, lo_dest);
1965 if (hi_result != hi_dest)
1966 emit_move_insn (hi_result, hi_dest);
1973 (define_expand "subsi3"
1974 [(set (match_operand:SI 0 "s_register_operand")
1975 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1976 (match_operand:SI 2 "s_register_operand")))]
1979 if (CONST_INT_P (operands[1]))
1983 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1984 operands[1] = force_reg (SImode, operands[1]);
1987 arm_split_constant (MINUS, SImode, NULL_RTX,
1988 INTVAL (operands[1]), operands[0],
1990 optimize && can_create_pseudo_p ());
1994 else /* TARGET_THUMB1 */
1995 operands[1] = force_reg (SImode, operands[1]);
2000 ; ??? Check Thumb-2 split length
2001 (define_insn_and_split "*arm_subsi3_insn"
2002 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
2003 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
2004 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
2016 "&& (CONST_INT_P (operands[1])
2017 && !const_ok_for_arm (INTVAL (operands[1])))"
2018 [(clobber (const_int 0))]
2020 arm_split_constant (MINUS, SImode, curr_insn,
2021 INTVAL (operands[1]), operands[0], operands[2], 0);
2024 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
2025 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
2026 (set_attr "predicable" "yes")
2027 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
2028 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
2032 [(match_scratch:SI 3 "r")
2033 (set (match_operand:SI 0 "arm_general_register_operand" "")
2034 (minus:SI (match_operand:SI 1 "const_int_operand" "")
2035 (match_operand:SI 2 "arm_general_register_operand" "")))]
2037 && !const_ok_for_arm (INTVAL (operands[1]))
2038 && const_ok_for_arm (~INTVAL (operands[1]))"
2039 [(set (match_dup 3) (match_dup 1))
2040 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
2044 (define_insn "subsi3_compare0"
2045 [(set (reg:CC_NZ CC_REGNUM)
2047 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2048 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
2050 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2051 (minus:SI (match_dup 1) (match_dup 2)))]
2056 rsbs%?\\t%0, %2, %1"
2057 [(set_attr "conds" "set")
2058 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
2061 (define_insn "subsi3_compare"
2062 [(set (reg:CC CC_REGNUM)
2063 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2064 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
2065 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2066 (minus:SI (match_dup 1) (match_dup 2)))]
2071 rsbs%?\\t%0, %2, %1"
2072 [(set_attr "conds" "set")
2073 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
2076 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
2077 ;; rather than (0 cmp reg). This gives the same results for unsigned
2078 ;; and equality compares which is what we mostly need here.
2079 (define_insn "rsb_imm_compare"
2080 [(set (reg:CC_RSB CC_REGNUM)
2081 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2082 (match_operand 3 "const_int_operand" "")))
2083 (set (match_operand:SI 0 "s_register_operand" "=r")
2084 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
2086 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2088 [(set_attr "conds" "set")
2089 (set_attr "type" "alus_imm")]
2092 ;; Similarly, but the result is unused.
2093 (define_insn "rsb_imm_compare_scratch"
2094 [(set (reg:CC_RSB CC_REGNUM)
2095 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2096 (match_operand 1 "arm_not_immediate_operand" "K")))
2097 (clobber (match_scratch:SI 0 "=r"))]
2099 "rsbs\\t%0, %2, #%B1"
2100 [(set_attr "conds" "set")
2101 (set_attr "type" "alus_imm")]
2104 ;; Compare the sum of a value plus a carry against a constant. Uses
2105 ;; RSC, so the result is swapped. Only available on Arm
2106 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2107 [(set (reg:CC_SWP CC_REGNUM)
2109 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2110 (match_operand:DI 3 "arm_borrow_operation" ""))
2111 (match_operand 1 "arm_immediate_operand" "I")))
2112 (clobber (match_scratch:SI 0 "=r"))]
2115 [(set_attr "conds" "set")
2116 (set_attr "type" "alus_imm")]
2119 (define_insn "usubvsi3_borrow"
2120 [(set (reg:CC_B CC_REGNUM)
2122 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2123 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2125 (match_operand:SI 2 "s_register_operand" "l,r")))))
2126 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2127 (minus:SI (match_dup 1)
2128 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2131 "sbcs%?\\t%0, %1, %2"
2132 [(set_attr "conds" "set")
2133 (set_attr "arch" "t2,*")
2134 (set_attr "length" "2,4")]
2137 (define_insn "usubvsi3_borrow_imm"
2138 [(set (reg:CC_B CC_REGNUM)
2140 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2141 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2142 (match_operand:DI 3 "const_int_operand" "n,n"))))
2143 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2144 (minus:SI (match_dup 1)
2145 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2146 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2148 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2151 adcs%?\\t%0, %1, #%B2"
2152 [(set_attr "conds" "set")
2153 (set_attr "type" "alus_imm")]
2156 (define_insn "subvsi3_borrow"
2157 [(set (reg:CC_V CC_REGNUM)
2161 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2162 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2163 (match_operand:DI 4 "arm_borrow_operation" ""))
2165 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2166 (match_operand:SI 3 "arm_borrow_operation" "")))))
2167 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2168 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2171 "sbcs%?\\t%0, %1, %2"
2172 [(set_attr "conds" "set")
2173 (set_attr "arch" "t2,*")
2174 (set_attr "length" "2,4")]
2177 (define_insn "subvsi3_borrow_imm"
2178 [(set (reg:CC_V CC_REGNUM)
2182 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2183 (match_operand 2 "arm_adcimm_operand" "I,K"))
2184 (match_operand:DI 4 "arm_borrow_operation" ""))
2186 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2187 (match_operand:SI 3 "arm_borrow_operation" "")))))
2188 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2189 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2192 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2195 adcs%?\\t%0, %1, #%B2"
2196 [(set_attr "conds" "set")
2197 (set_attr "type" "alus_imm")]
2200 (define_expand "subsf3"
2201 [(set (match_operand:SF 0 "s_register_operand")
2202 (minus:SF (match_operand:SF 1 "s_register_operand")
2203 (match_operand:SF 2 "s_register_operand")))]
2204 "TARGET_32BIT && TARGET_HARD_FLOAT"
2208 (define_expand "subdf3"
2209 [(set (match_operand:DF 0 "s_register_operand")
2210 (minus:DF (match_operand:DF 1 "s_register_operand")
2211 (match_operand:DF 2 "s_register_operand")))]
2212 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2217 ;; Multiplication insns
2219 (define_expand "mulhi3"
2220 [(set (match_operand:HI 0 "s_register_operand")
2221 (mult:HI (match_operand:HI 1 "s_register_operand")
2222 (match_operand:HI 2 "s_register_operand")))]
2223 "TARGET_DSP_MULTIPLY"
2226 rtx result = gen_reg_rtx (SImode);
2227 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2228 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2233 (define_expand "mulsi3"
2234 [(set (match_operand:SI 0 "s_register_operand")
2235 (mult:SI (match_operand:SI 2 "s_register_operand")
2236 (match_operand:SI 1 "s_register_operand")))]
2241 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2243 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2244 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2245 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2247 "mul%?\\t%0, %2, %1"
2248 [(set_attr "type" "mul")
2249 (set_attr "predicable" "yes")
2250 (set_attr "arch" "t2,v6,nov6,nov6")
2251 (set_attr "length" "4")
2252 (set_attr "predicable_short_it" "yes,no,*,*")]
2255 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2256 ;; reusing the same register.
2259 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2261 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2262 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2263 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2265 "mla%?\\t%0, %3, %2, %1"
2266 [(set_attr "type" "mla")
2267 (set_attr "predicable" "yes")
2268 (set_attr "arch" "v6,nov6,nov6,nov6")]
2272 [(set (match_operand:SI 0 "s_register_operand" "=r")
2274 (match_operand:SI 1 "s_register_operand" "r")
2275 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2276 (match_operand:SI 2 "s_register_operand" "r"))))]
2277 "TARGET_32BIT && arm_arch_thumb2"
2278 "mls%?\\t%0, %3, %2, %1"
2279 [(set_attr "type" "mla")
2280 (set_attr "predicable" "yes")]
2283 (define_insn "*mulsi3_compare0"
2284 [(set (reg:CC_NZ CC_REGNUM)
2285 (compare:CC_NZ (mult:SI
2286 (match_operand:SI 2 "s_register_operand" "r,r")
2287 (match_operand:SI 1 "s_register_operand" "%0,r"))
2289 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2290 (mult:SI (match_dup 2) (match_dup 1)))]
2291 "TARGET_ARM && !arm_arch6"
2292 "muls%?\\t%0, %2, %1"
2293 [(set_attr "conds" "set")
2294 (set_attr "type" "muls")]
2297 (define_insn "*mulsi3_compare0_v6"
2298 [(set (reg:CC_NZ CC_REGNUM)
2299 (compare:CC_NZ (mult:SI
2300 (match_operand:SI 2 "s_register_operand" "r")
2301 (match_operand:SI 1 "s_register_operand" "r"))
2303 (set (match_operand:SI 0 "s_register_operand" "=r")
2304 (mult:SI (match_dup 2) (match_dup 1)))]
2305 "TARGET_ARM && arm_arch6 && optimize_size"
2306 "muls%?\\t%0, %2, %1"
2307 [(set_attr "conds" "set")
2308 (set_attr "type" "muls")]
2311 (define_insn "*mulsi_compare0_scratch"
2312 [(set (reg:CC_NZ CC_REGNUM)
2313 (compare:CC_NZ (mult:SI
2314 (match_operand:SI 2 "s_register_operand" "r,r")
2315 (match_operand:SI 1 "s_register_operand" "%0,r"))
2317 (clobber (match_scratch:SI 0 "=&r,&r"))]
2318 "TARGET_ARM && !arm_arch6"
2319 "muls%?\\t%0, %2, %1"
2320 [(set_attr "conds" "set")
2321 (set_attr "type" "muls")]
2324 (define_insn "*mulsi_compare0_scratch_v6"
2325 [(set (reg:CC_NZ CC_REGNUM)
2326 (compare:CC_NZ (mult:SI
2327 (match_operand:SI 2 "s_register_operand" "r")
2328 (match_operand:SI 1 "s_register_operand" "r"))
2330 (clobber (match_scratch:SI 0 "=r"))]
2331 "TARGET_ARM && arm_arch6 && optimize_size"
2332 "muls%?\\t%0, %2, %1"
2333 [(set_attr "conds" "set")
2334 (set_attr "type" "muls")]
2337 (define_insn "*mulsi3addsi_compare0"
2338 [(set (reg:CC_NZ CC_REGNUM)
2341 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2342 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2343 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2345 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2346 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2348 "TARGET_ARM && arm_arch6"
2349 "mlas%?\\t%0, %2, %1, %3"
2350 [(set_attr "conds" "set")
2351 (set_attr "type" "mlas")]
2354 (define_insn "*mulsi3addsi_compare0_v6"
2355 [(set (reg:CC_NZ CC_REGNUM)
2358 (match_operand:SI 2 "s_register_operand" "r")
2359 (match_operand:SI 1 "s_register_operand" "r"))
2360 (match_operand:SI 3 "s_register_operand" "r"))
2362 (set (match_operand:SI 0 "s_register_operand" "=r")
2363 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2365 "TARGET_ARM && arm_arch6 && optimize_size"
2366 "mlas%?\\t%0, %2, %1, %3"
2367 [(set_attr "conds" "set")
2368 (set_attr "type" "mlas")]
2371 (define_insn "*mulsi3addsi_compare0_scratch"
2372 [(set (reg:CC_NZ CC_REGNUM)
2375 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2376 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2377 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2379 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2380 "TARGET_ARM && !arm_arch6"
2381 "mlas%?\\t%0, %2, %1, %3"
2382 [(set_attr "conds" "set")
2383 (set_attr "type" "mlas")]
2386 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2387 [(set (reg:CC_NZ CC_REGNUM)
2390 (match_operand:SI 2 "s_register_operand" "r")
2391 (match_operand:SI 1 "s_register_operand" "r"))
2392 (match_operand:SI 3 "s_register_operand" "r"))
2394 (clobber (match_scratch:SI 0 "=r"))]
2395 "TARGET_ARM && arm_arch6 && optimize_size"
2396 "mlas%?\\t%0, %2, %1, %3"
2397 [(set_attr "conds" "set")
2398 (set_attr "type" "mlas")]
2401 ;; 32x32->64 widening multiply.
2402 ;; The only difference between the v3-5 and v6+ versions is the requirement
2403 ;; that the output does not overlap with either input.
2405 (define_expand "<Us>mulsidi3"
2406 [(set (match_operand:DI 0 "s_register_operand")
2408 (SE:DI (match_operand:SI 1 "s_register_operand"))
2409 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2412 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2413 gen_highpart (SImode, operands[0]),
2414 operands[1], operands[2]));
2419 (define_insn "<US>mull"
2420 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2422 (match_operand:SI 2 "s_register_operand" "%r,r")
2423 (match_operand:SI 3 "s_register_operand" "r,r")))
2424 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2427 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2430 "<US>mull%?\\t%0, %1, %2, %3"
2431 [(set_attr "type" "umull")
2432 (set_attr "predicable" "yes")
2433 (set_attr "arch" "v6,nov6")]
2436 (define_expand "<Us>maddsidi4"
2437 [(set (match_operand:DI 0 "s_register_operand")
2440 (SE:DI (match_operand:SI 1 "s_register_operand"))
2441 (SE:DI (match_operand:SI 2 "s_register_operand")))
2442 (match_operand:DI 3 "s_register_operand")))]
2445 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2446 gen_lowpart (SImode, operands[3]),
2447 gen_highpart (SImode, operands[0]),
2448 gen_highpart (SImode, operands[3]),
2449 operands[1], operands[2]));
2454 (define_insn "<US>mlal"
2455 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2458 (match_operand:SI 4 "s_register_operand" "%r,r")
2459 (match_operand:SI 5 "s_register_operand" "r,r"))
2460 (match_operand:SI 1 "s_register_operand" "0,0")))
2461 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2466 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2467 (zero_extend:DI (match_dup 1)))
2469 (match_operand:SI 3 "s_register_operand" "2,2")))]
2471 "<US>mlal%?\\t%0, %2, %4, %5"
2472 [(set_attr "type" "umlal")
2473 (set_attr "predicable" "yes")
2474 (set_attr "arch" "v6,nov6")]
2477 (define_expand "<US>mulsi3_highpart"
2479 [(set (match_operand:SI 0 "s_register_operand")
2483 (SE:DI (match_operand:SI 1 "s_register_operand"))
2484 (SE:DI (match_operand:SI 2 "s_register_operand")))
2486 (clobber (match_scratch:SI 3 ""))])]
2491 (define_insn "*<US>mull_high"
2492 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2496 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2497 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2499 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2501 "<US>mull%?\\t%3, %0, %2, %1"
2502 [(set_attr "type" "umull")
2503 (set_attr "predicable" "yes")
2504 (set_attr "arch" "v6,nov6,nov6")]
2507 (define_insn "mulhisi3"
2508 [(set (match_operand:SI 0 "s_register_operand" "=r")
2509 (mult:SI (sign_extend:SI
2510 (match_operand:HI 1 "s_register_operand" "%r"))
2512 (match_operand:HI 2 "s_register_operand" "r"))))]
2513 "TARGET_DSP_MULTIPLY"
2514 "smulbb%?\\t%0, %1, %2"
2515 [(set_attr "type" "smulxy")
2516 (set_attr "predicable" "yes")]
2519 (define_insn "*mulhisi3tb"
2520 [(set (match_operand:SI 0 "s_register_operand" "=r")
2521 (mult:SI (ashiftrt:SI
2522 (match_operand:SI 1 "s_register_operand" "r")
2525 (match_operand:HI 2 "s_register_operand" "r"))))]
2526 "TARGET_DSP_MULTIPLY"
2527 "smultb%?\\t%0, %1, %2"
2528 [(set_attr "type" "smulxy")
2529 (set_attr "predicable" "yes")]
2532 (define_insn "*mulhisi3bt"
2533 [(set (match_operand:SI 0 "s_register_operand" "=r")
2534 (mult:SI (sign_extend:SI
2535 (match_operand:HI 1 "s_register_operand" "r"))
2537 (match_operand:SI 2 "s_register_operand" "r")
2539 "TARGET_DSP_MULTIPLY"
2540 "smulbt%?\\t%0, %1, %2"
2541 [(set_attr "type" "smulxy")
2542 (set_attr "predicable" "yes")]
2545 (define_insn "*mulhisi3tt"
2546 [(set (match_operand:SI 0 "s_register_operand" "=r")
2547 (mult:SI (ashiftrt:SI
2548 (match_operand:SI 1 "s_register_operand" "r")
2551 (match_operand:SI 2 "s_register_operand" "r")
2553 "TARGET_DSP_MULTIPLY"
2554 "smultt%?\\t%0, %1, %2"
2555 [(set_attr "type" "smulxy")
2556 (set_attr "predicable" "yes")]
2559 (define_expand "maddhisi4"
2560 [(set (match_operand:SI 0 "s_register_operand")
2561 (plus:SI (mult:SI (sign_extend:SI
2562 (match_operand:HI 1 "s_register_operand"))
2564 (match_operand:HI 2 "s_register_operand")))
2565 (match_operand:SI 3 "s_register_operand")))]
2566 "TARGET_DSP_MULTIPLY"
2568 /* If this function reads the Q bit from ACLE intrinsics break up the
2569 multiplication and accumulation as an overflow during accumulation will
2570 clobber the Q flag. */
2573 rtx tmp = gen_reg_rtx (SImode);
2574 emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
2575 emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
2581 (define_insn "*arm_maddhisi4"
2582 [(set (match_operand:SI 0 "s_register_operand" "=r")
2583 (plus:SI (mult:SI (sign_extend:SI
2584 (match_operand:HI 1 "s_register_operand" "r"))
2586 (match_operand:HI 2 "s_register_operand" "r")))
2587 (match_operand:SI 3 "s_register_operand" "r")))]
2588 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2589 "smlabb%?\\t%0, %1, %2, %3"
2590 [(set_attr "type" "smlaxy")
2591 (set_attr "predicable" "yes")]
2594 (define_insn "arm_smlabb_setq"
2595 [(set (match_operand:SI 0 "s_register_operand" "=r")
2596 (plus:SI (mult:SI (sign_extend:SI
2597 (match_operand:HI 1 "s_register_operand" "r"))
2599 (match_operand:HI 2 "s_register_operand" "r")))
2600 (match_operand:SI 3 "s_register_operand" "r")))
2601 (set (reg:CC APSRQ_REGNUM)
2602 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2603 "TARGET_DSP_MULTIPLY"
2604 "smlabb%?\\t%0, %1, %2, %3"
2605 [(set_attr "type" "smlaxy")
2606 (set_attr "predicable" "yes")]
2609 (define_expand "arm_smlabb"
2610 [(match_operand:SI 0 "s_register_operand")
2611 (match_operand:SI 1 "s_register_operand")
2612 (match_operand:SI 2 "s_register_operand")
2613 (match_operand:SI 3 "s_register_operand")]
2614 "TARGET_DSP_MULTIPLY"
2616 rtx mult1 = gen_lowpart (HImode, operands[1]);
2617 rtx mult2 = gen_lowpart (HImode, operands[2]);
2619 emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
2621 emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
2626 ;; Note: there is no maddhisi4ibt because this one is canonical form
2627 (define_insn "maddhisi4tb"
2628 [(set (match_operand:SI 0 "s_register_operand" "=r")
2629 (plus:SI (mult:SI (ashiftrt:SI
2630 (match_operand:SI 1 "s_register_operand" "r")
2633 (match_operand:HI 2 "s_register_operand" "r")))
2634 (match_operand:SI 3 "s_register_operand" "r")))]
2635 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2636 "smlatb%?\\t%0, %1, %2, %3"
2637 [(set_attr "type" "smlaxy")
2638 (set_attr "predicable" "yes")]
2641 (define_insn "arm_smlatb_setq"
2642 [(set (match_operand:SI 0 "s_register_operand" "=r")
2643 (plus:SI (mult:SI (ashiftrt:SI
2644 (match_operand:SI 1 "s_register_operand" "r")
2647 (match_operand:HI 2 "s_register_operand" "r")))
2648 (match_operand:SI 3 "s_register_operand" "r")))
2649 (set (reg:CC APSRQ_REGNUM)
2650 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2651 "TARGET_DSP_MULTIPLY"
2652 "smlatb%?\\t%0, %1, %2, %3"
2653 [(set_attr "type" "smlaxy")
2654 (set_attr "predicable" "yes")]
2657 (define_expand "arm_smlatb"
2658 [(match_operand:SI 0 "s_register_operand")
2659 (match_operand:SI 1 "s_register_operand")
2660 (match_operand:SI 2 "s_register_operand")
2661 (match_operand:SI 3 "s_register_operand")]
2662 "TARGET_DSP_MULTIPLY"
2664 rtx mult2 = gen_lowpart (HImode, operands[2]);
2666 emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
2667 mult2, operands[3]));
2669 emit_insn (gen_maddhisi4tb (operands[0], operands[1],
2670 mult2, operands[3]));
2675 (define_insn "maddhisi4tt"
2676 [(set (match_operand:SI 0 "s_register_operand" "=r")
2677 (plus:SI (mult:SI (ashiftrt:SI
2678 (match_operand:SI 1 "s_register_operand" "r")
2681 (match_operand:SI 2 "s_register_operand" "r")
2683 (match_operand:SI 3 "s_register_operand" "r")))]
2684 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2685 "smlatt%?\\t%0, %1, %2, %3"
2686 [(set_attr "type" "smlaxy")
2687 (set_attr "predicable" "yes")]
2690 (define_insn "arm_smlatt_setq"
2691 [(set (match_operand:SI 0 "s_register_operand" "=r")
2692 (plus:SI (mult:SI (ashiftrt:SI
2693 (match_operand:SI 1 "s_register_operand" "r")
2696 (match_operand:SI 2 "s_register_operand" "r")
2698 (match_operand:SI 3 "s_register_operand" "r")))
2699 (set (reg:CC APSRQ_REGNUM)
2700 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2701 "TARGET_DSP_MULTIPLY"
2702 "smlatt%?\\t%0, %1, %2, %3"
2703 [(set_attr "type" "smlaxy")
2704 (set_attr "predicable" "yes")]
2707 (define_expand "arm_smlatt"
2708 [(match_operand:SI 0 "s_register_operand")
2709 (match_operand:SI 1 "s_register_operand")
2710 (match_operand:SI 2 "s_register_operand")
2711 (match_operand:SI 3 "s_register_operand")]
2712 "TARGET_DSP_MULTIPLY"
2715 emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
2716 operands[2], operands[3]));
2718 emit_insn (gen_maddhisi4tt (operands[0], operands[1],
2719 operands[2], operands[3]));
2724 (define_insn "maddhidi4"
2725 [(set (match_operand:DI 0 "s_register_operand" "=r")
2727 (mult:DI (sign_extend:DI
2728 (match_operand:HI 1 "s_register_operand" "r"))
2730 (match_operand:HI 2 "s_register_operand" "r")))
2731 (match_operand:DI 3 "s_register_operand" "0")))]
2732 "TARGET_DSP_MULTIPLY"
2733 "smlalbb%?\\t%Q0, %R0, %1, %2"
2734 [(set_attr "type" "smlalxy")
2735 (set_attr "predicable" "yes")])
2737 ;; Note: there is no maddhidi4ibt because this one is canonical form
2738 (define_insn "*maddhidi4tb"
2739 [(set (match_operand:DI 0 "s_register_operand" "=r")
2741 (mult:DI (sign_extend:DI
2743 (match_operand:SI 1 "s_register_operand" "r")
2746 (match_operand:HI 2 "s_register_operand" "r")))
2747 (match_operand:DI 3 "s_register_operand" "0")))]
2748 "TARGET_DSP_MULTIPLY"
2749 "smlaltb%?\\t%Q0, %R0, %1, %2"
2750 [(set_attr "type" "smlalxy")
2751 (set_attr "predicable" "yes")])
2753 (define_insn "*maddhidi4tt"
2754 [(set (match_operand:DI 0 "s_register_operand" "=r")
2756 (mult:DI (sign_extend:DI
2758 (match_operand:SI 1 "s_register_operand" "r")
2762 (match_operand:SI 2 "s_register_operand" "r")
2764 (match_operand:DI 3 "s_register_operand" "0")))]
2765 "TARGET_DSP_MULTIPLY"
2766 "smlaltt%?\\t%Q0, %R0, %1, %2"
2767 [(set_attr "type" "smlalxy")
2768 (set_attr "predicable" "yes")])
2770 (define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
2771 [(set (match_operand:SI 0 "s_register_operand" "=r")
2773 [(match_operand:SI 1 "s_register_operand" "r")
2774 (match_operand:SI 2 "s_register_operand" "r")
2775 (match_operand:SI 3 "s_register_operand" "r")]
2777 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
2778 "<smlaw_op>%?\\t%0, %1, %2, %3"
2779 [(set_attr "type" "smlaxy")
2780 (set_attr "predicable" "yes")]
2783 (define_expand "arm_<smlaw_op>"
2784 [(set (match_operand:SI 0 "s_register_operand")
2786 [(match_operand:SI 1 "s_register_operand")
2787 (match_operand:SI 2 "s_register_operand")
2788 (match_operand:SI 3 "s_register_operand")]
2790 "TARGET_DSP_MULTIPLY"
2793 emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
2794 operands[2], operands[3]));
2796 emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
2797 operands[2], operands[3]));
2802 (define_expand "mulsf3"
2803 [(set (match_operand:SF 0 "s_register_operand")
2804 (mult:SF (match_operand:SF 1 "s_register_operand")
2805 (match_operand:SF 2 "s_register_operand")))]
2806 "TARGET_32BIT && TARGET_HARD_FLOAT"
2810 (define_expand "muldf3"
2811 [(set (match_operand:DF 0 "s_register_operand")
2812 (mult:DF (match_operand:DF 1 "s_register_operand")
2813 (match_operand:DF 2 "s_register_operand")))]
2814 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2820 (define_expand "divsf3"
2821 [(set (match_operand:SF 0 "s_register_operand")
2822 (div:SF (match_operand:SF 1 "s_register_operand")
2823 (match_operand:SF 2 "s_register_operand")))]
2824 "TARGET_32BIT && TARGET_HARD_FLOAT"
2827 (define_expand "divdf3"
2828 [(set (match_operand:DF 0 "s_register_operand")
2829 (div:DF (match_operand:DF 1 "s_register_operand")
2830 (match_operand:DF 2 "s_register_operand")))]
2831 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2835 ; Expand logical operations. The mid-end expander does not split off memory
2836 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2837 ; So an explicit expander is needed to generate better code.
2839 (define_expand "<LOGICAL:optab>di3"
2840 [(set (match_operand:DI 0 "s_register_operand")
2841 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2842 (match_operand:DI 2 "arm_<optab>di_operand")))]
2845 rtx low = simplify_gen_binary (<CODE>, SImode,
2846 gen_lowpart (SImode, operands[1]),
2847 gen_lowpart (SImode, operands[2]));
2848 rtx high = simplify_gen_binary (<CODE>, SImode,
2849 gen_highpart (SImode, operands[1]),
2850 gen_highpart_mode (SImode, DImode,
2853 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2854 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2859 (define_expand "one_cmpldi2"
2860 [(set (match_operand:DI 0 "s_register_operand")
2861 (not:DI (match_operand:DI 1 "s_register_operand")))]
2864 rtx low = simplify_gen_unary (NOT, SImode,
2865 gen_lowpart (SImode, operands[1]),
2867 rtx high = simplify_gen_unary (NOT, SImode,
2868 gen_highpart_mode (SImode, DImode,
2872 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2873 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2878 ;; Split DImode and, ior, xor operations. Simply perform the logical
2879 ;; operation on the upper and lower halves of the registers.
2880 ;; This is needed for atomic operations in arm_split_atomic_op.
2881 ;; Avoid splitting IWMMXT instructions.
2883 [(set (match_operand:DI 0 "s_register_operand" "")
2884 (match_operator:DI 6 "logical_binary_operator"
2885 [(match_operand:DI 1 "s_register_operand" "")
2886 (match_operand:DI 2 "s_register_operand" "")]))]
2887 "TARGET_32BIT && reload_completed
2888 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2889 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2890 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2893 operands[3] = gen_highpart (SImode, operands[0]);
2894 operands[0] = gen_lowpart (SImode, operands[0]);
2895 operands[4] = gen_highpart (SImode, operands[1]);
2896 operands[1] = gen_lowpart (SImode, operands[1]);
2897 operands[5] = gen_highpart (SImode, operands[2]);
2898 operands[2] = gen_lowpart (SImode, operands[2]);
2902 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2903 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2905 [(set (match_operand:DI 0 "s_register_operand")
2906 (not:DI (match_operand:DI 1 "s_register_operand")))]
2908 [(set (match_dup 0) (not:SI (match_dup 1)))
2909 (set (match_dup 2) (not:SI (match_dup 3)))]
2912 operands[2] = gen_highpart (SImode, operands[0]);
2913 operands[0] = gen_lowpart (SImode, operands[0]);
2914 operands[3] = gen_highpart (SImode, operands[1]);
2915 operands[1] = gen_lowpart (SImode, operands[1]);
2919 (define_expand "andsi3"
2920 [(set (match_operand:SI 0 "s_register_operand")
2921 (and:SI (match_operand:SI 1 "s_register_operand")
2922 (match_operand:SI 2 "reg_or_int_operand")))]
2927 if (CONST_INT_P (operands[2]))
2929 if (INTVAL (operands[2]) == 255 && arm_arch6)
2931 operands[1] = convert_to_mode (QImode, operands[1], 1);
2932 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2936 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2937 operands[2] = force_reg (SImode, operands[2]);
2940 arm_split_constant (AND, SImode, NULL_RTX,
2941 INTVAL (operands[2]), operands[0],
2943 optimize && can_create_pseudo_p ());
2949 else /* TARGET_THUMB1 */
2951 if (!CONST_INT_P (operands[2]))
2953 rtx tmp = force_reg (SImode, operands[2]);
2954 if (rtx_equal_p (operands[0], operands[1]))
2958 operands[2] = operands[1];
2966 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2968 operands[2] = force_reg (SImode,
2969 GEN_INT (~INTVAL (operands[2])));
2971 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2976 for (i = 9; i <= 31; i++)
2978 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2980 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2984 else if ((HOST_WIDE_INT_1 << i) - 1
2985 == ~INTVAL (operands[2]))
2987 rtx shift = GEN_INT (i);
2988 rtx reg = gen_reg_rtx (SImode);
2990 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2991 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2997 operands[2] = force_reg (SImode, operands[2]);
3003 ; ??? Check split length for Thumb-2
3004 (define_insn_and_split "*arm_andsi3_insn"
3005 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3006 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3007 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3012 bic%?\\t%0, %1, #%B2
3016 && CONST_INT_P (operands[2])
3017 && !(const_ok_for_arm (INTVAL (operands[2]))
3018 || const_ok_for_arm (~INTVAL (operands[2])))"
3019 [(clobber (const_int 0))]
3021 arm_split_constant (AND, SImode, curr_insn,
3022 INTVAL (operands[2]), operands[0], operands[1], 0);
3025 [(set_attr "length" "4,4,4,4,16")
3026 (set_attr "predicable" "yes")
3027 (set_attr "predicable_short_it" "no,yes,no,no,no")
3028 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
3031 (define_insn "*andsi3_compare0"
3032 [(set (reg:CC_NZ CC_REGNUM)
3034 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
3035 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
3037 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3038 (and:SI (match_dup 1) (match_dup 2)))]
3042 bics%?\\t%0, %1, #%B2
3043 ands%?\\t%0, %1, %2"
3044 [(set_attr "conds" "set")
3045 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3048 (define_insn "*andsi3_compare0_scratch"
3049 [(set (reg:CC_NZ CC_REGNUM)
3051 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
3052 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
3054 (clobber (match_scratch:SI 2 "=X,r,X"))]
3058 bics%?\\t%2, %0, #%B1
3060 [(set_attr "conds" "set")
3061 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3064 (define_insn "*zeroextractsi_compare0_scratch"
3065 [(set (reg:CC_NZ CC_REGNUM)
3066 (compare:CC_NZ (zero_extract:SI
3067 (match_operand:SI 0 "s_register_operand" "r")
3068 (match_operand 1 "const_int_operand" "n")
3069 (match_operand 2 "const_int_operand" "n"))
3072 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
3073 && INTVAL (operands[1]) > 0
3074 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
3075 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
3077 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
3078 << INTVAL (operands[2]));
3079 output_asm_insn (\"tst%?\\t%0, %1\", operands);
3082 [(set_attr "conds" "set")
3083 (set_attr "predicable" "yes")
3084 (set_attr "type" "logics_imm")]
3087 (define_insn_and_split "*ne_zeroextractsi"
3088 [(set (match_operand:SI 0 "s_register_operand" "=r")
3089 (ne:SI (zero_extract:SI
3090 (match_operand:SI 1 "s_register_operand" "r")
3091 (match_operand:SI 2 "const_int_operand" "n")
3092 (match_operand:SI 3 "const_int_operand" "n"))
3094 (clobber (reg:CC CC_REGNUM))]
3096 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3097 && INTVAL (operands[2]) > 0
3098 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3099 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3102 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3103 && INTVAL (operands[2]) > 0
3104 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3105 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3106 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3107 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3109 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3111 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3112 (match_dup 0) (const_int 1)))]
3114 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3115 << INTVAL (operands[3]));
3117 [(set_attr "conds" "clob")
3118 (set (attr "length")
3119 (if_then_else (eq_attr "is_thumb" "yes")
3122 (set_attr "type" "multiple")]
3125 (define_insn_and_split "*ne_zeroextractsi_shifted"
3126 [(set (match_operand:SI 0 "s_register_operand" "=r")
3127 (ne:SI (zero_extract:SI
3128 (match_operand:SI 1 "s_register_operand" "r")
3129 (match_operand:SI 2 "const_int_operand" "n")
3132 (clobber (reg:CC CC_REGNUM))]
3136 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3137 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3139 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3141 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3142 (match_dup 0) (const_int 1)))]
3144 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3146 [(set_attr "conds" "clob")
3147 (set_attr "length" "8")
3148 (set_attr "type" "multiple")]
3151 (define_insn_and_split "*ite_ne_zeroextractsi"
3152 [(set (match_operand:SI 0 "s_register_operand" "=r")
3153 (if_then_else:SI (ne (zero_extract:SI
3154 (match_operand:SI 1 "s_register_operand" "r")
3155 (match_operand:SI 2 "const_int_operand" "n")
3156 (match_operand:SI 3 "const_int_operand" "n"))
3158 (match_operand:SI 4 "arm_not_operand" "rIK")
3160 (clobber (reg:CC CC_REGNUM))]
3162 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3163 && INTVAL (operands[2]) > 0
3164 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3165 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3166 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3169 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3170 && INTVAL (operands[2]) > 0
3171 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3172 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3173 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3174 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3175 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3177 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3179 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3180 (match_dup 0) (match_dup 4)))]
3182 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3183 << INTVAL (operands[3]));
3185 [(set_attr "conds" "clob")
3186 (set_attr "length" "8")
3187 (set_attr "type" "multiple")]
3190 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
3191 [(set (match_operand:SI 0 "s_register_operand" "=r")
3192 (if_then_else:SI (ne (zero_extract:SI
3193 (match_operand:SI 1 "s_register_operand" "r")
3194 (match_operand:SI 2 "const_int_operand" "n")
3197 (match_operand:SI 3 "arm_not_operand" "rIK")
3199 (clobber (reg:CC CC_REGNUM))]
3200 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3202 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3203 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3204 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3206 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3208 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3209 (match_dup 0) (match_dup 3)))]
3211 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3213 [(set_attr "conds" "clob")
3214 (set_attr "length" "8")
3215 (set_attr "type" "multiple")]
3218 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
3220 [(set (match_operand:SI 0 "s_register_operand" "")
3221 (match_operator:SI 1 "shiftable_operator"
3222 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3223 (match_operand:SI 3 "const_int_operand" "")
3224 (match_operand:SI 4 "const_int_operand" ""))
3225 (match_operand:SI 5 "s_register_operand" "")]))
3226 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3228 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3231 [(lshiftrt:SI (match_dup 6) (match_dup 4))
3234 HOST_WIDE_INT temp = INTVAL (operands[3]);
3236 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3237 operands[4] = GEN_INT (32 - temp);
3242 [(set (match_operand:SI 0 "s_register_operand" "")
3243 (match_operator:SI 1 "shiftable_operator"
3244 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3245 (match_operand:SI 3 "const_int_operand" "")
3246 (match_operand:SI 4 "const_int_operand" ""))
3247 (match_operand:SI 5 "s_register_operand" "")]))
3248 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3250 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3253 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3256 HOST_WIDE_INT temp = INTVAL (operands[3]);
3258 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3259 operands[4] = GEN_INT (32 - temp);
3263 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3264 ;;; represented by the bitfield, then this will produce incorrect results.
3265 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3266 ;;; which have a real bit-field insert instruction, the truncation happens
3267 ;;; in the bit-field insert instruction itself. Since arm does not have a
3268 ;;; bit-field insert instruction, we would have to emit code here to truncate
3269 ;;; the value before we insert. This loses some of the advantage of having
3270 ;;; this insv pattern, so this pattern needs to be reevalutated.
3272 (define_expand "insv"
3273 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3274 (match_operand 1 "general_operand")
3275 (match_operand 2 "general_operand"))
3276 (match_operand 3 "reg_or_int_operand"))]
3277 "TARGET_ARM || arm_arch_thumb2"
3280 int start_bit = INTVAL (operands[2]);
3281 int width = INTVAL (operands[1]);
3282 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3283 rtx target, subtarget;
3285 if (arm_arch_thumb2)
3287 if (unaligned_access && MEM_P (operands[0])
3288 && s_register_operand (operands[3], GET_MODE (operands[3]))
3289 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3293 if (BYTES_BIG_ENDIAN)
3294 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3299 base_addr = adjust_address (operands[0], SImode,
3300 start_bit / BITS_PER_UNIT);
3301 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3305 rtx tmp = gen_reg_rtx (HImode);
3307 base_addr = adjust_address (operands[0], HImode,
3308 start_bit / BITS_PER_UNIT);
3309 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3310 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3314 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3316 bool use_bfi = TRUE;
3318 if (CONST_INT_P (operands[3]))
3320 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3324 emit_insn (gen_insv_zero (operands[0], operands[1],
3329 /* See if the set can be done with a single orr instruction. */
3330 if (val == mask && const_ok_for_arm (val << start_bit))
3336 if (!REG_P (operands[3]))
3337 operands[3] = force_reg (SImode, operands[3]);
3339 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3348 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3351 target = copy_rtx (operands[0]);
3352 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3353 subreg as the final target. */
3354 if (GET_CODE (target) == SUBREG)
3356 subtarget = gen_reg_rtx (SImode);
3357 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3358 < GET_MODE_SIZE (SImode))
3359 target = SUBREG_REG (target);
3364 if (CONST_INT_P (operands[3]))
3366 /* Since we are inserting a known constant, we may be able to
3367 reduce the number of bits that we have to clear so that
3368 the mask becomes simple. */
3369 /* ??? This code does not check to see if the new mask is actually
3370 simpler. It may not be. */
3371 rtx op1 = gen_reg_rtx (SImode);
3372 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3373 start of this pattern. */
3374 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3375 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3377 emit_insn (gen_andsi3 (op1, operands[0],
3378 gen_int_mode (~mask2, SImode)));
3379 emit_insn (gen_iorsi3 (subtarget, op1,
3380 gen_int_mode (op3_value << start_bit, SImode)));
3382 else if (start_bit == 0
3383 && !(const_ok_for_arm (mask)
3384 || const_ok_for_arm (~mask)))
3386 /* A Trick, since we are setting the bottom bits in the word,
3387 we can shift operand[3] up, operand[0] down, OR them together
3388 and rotate the result back again. This takes 3 insns, and
3389 the third might be mergeable into another op. */
3390 /* The shift up copes with the possibility that operand[3] is
3391 wider than the bitfield. */
3392 rtx op0 = gen_reg_rtx (SImode);
3393 rtx op1 = gen_reg_rtx (SImode);
3395 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3396 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3397 emit_insn (gen_iorsi3 (op1, op1, op0));
3398 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3400 else if ((width + start_bit == 32)
3401 && !(const_ok_for_arm (mask)
3402 || const_ok_for_arm (~mask)))
3404 /* Similar trick, but slightly less efficient. */
3406 rtx op0 = gen_reg_rtx (SImode);
3407 rtx op1 = gen_reg_rtx (SImode);
3409 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3410 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3411 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3412 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3416 rtx op0 = gen_int_mode (mask, SImode);
3417 rtx op1 = gen_reg_rtx (SImode);
3418 rtx op2 = gen_reg_rtx (SImode);
3420 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3422 rtx tmp = gen_reg_rtx (SImode);
3424 emit_insn (gen_movsi (tmp, op0));
3428 /* Mask out any bits in operand[3] that are not needed. */
3429 emit_insn (gen_andsi3 (op1, operands[3], op0));
3431 if (CONST_INT_P (op0)
3432 && (const_ok_for_arm (mask << start_bit)
3433 || const_ok_for_arm (~(mask << start_bit))))
3435 op0 = gen_int_mode (~(mask << start_bit), SImode);
3436 emit_insn (gen_andsi3 (op2, operands[0], op0));
3440 if (CONST_INT_P (op0))
3442 rtx tmp = gen_reg_rtx (SImode);
3444 emit_insn (gen_movsi (tmp, op0));
3449 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3451 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3455 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3457 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3460 if (subtarget != target)
3462 /* If TARGET is still a SUBREG, then it must be wider than a word,
3463 so we must be careful only to set the subword we were asked to. */
3464 if (GET_CODE (target) == SUBREG)
3465 emit_move_insn (target, subtarget);
3467 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3474 (define_insn "insv_zero"
3475 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3476 (match_operand:SI 1 "const_int_M_operand" "M")
3477 (match_operand:SI 2 "const_int_M_operand" "M"))
3481 [(set_attr "length" "4")
3482 (set_attr "predicable" "yes")
3483 (set_attr "type" "bfm")]
3486 (define_insn "insv_t2"
3487 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3488 (match_operand:SI 1 "const_int_M_operand" "M")
3489 (match_operand:SI 2 "const_int_M_operand" "M"))
3490 (match_operand:SI 3 "s_register_operand" "r"))]
3492 "bfi%?\t%0, %3, %2, %1"
3493 [(set_attr "length" "4")
3494 (set_attr "predicable" "yes")
3495 (set_attr "type" "bfm")]
3498 (define_insn "andsi_notsi_si"
3499 [(set (match_operand:SI 0 "s_register_operand" "=r")
3500 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3501 (match_operand:SI 1 "s_register_operand" "r")))]
3503 "bic%?\\t%0, %1, %2"
3504 [(set_attr "predicable" "yes")
3505 (set_attr "type" "logic_reg")]
3508 (define_insn "andsi_not_shiftsi_si"
3509 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3510 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3511 [(match_operand:SI 2 "s_register_operand" "r,r")
3512 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3513 (match_operand:SI 1 "s_register_operand" "r,r")))]
3515 "bic%?\\t%0, %1, %2%S4"
3516 [(set_attr "predicable" "yes")
3517 (set_attr "shift" "2")
3518 (set_attr "arch" "32,a")
3519 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3522 ;; Shifted bics pattern used to set up CC status register and not reusing
3523 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3524 ;; does not support shift by register.
3525 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3526 [(set (reg:CC_NZ CC_REGNUM)
3528 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3529 [(match_operand:SI 1 "s_register_operand" "r,r")
3530 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3531 (match_operand:SI 3 "s_register_operand" "r,r"))
3533 (clobber (match_scratch:SI 4 "=r,r"))]
3535 "bics%?\\t%4, %3, %1%S0"
3536 [(set_attr "predicable" "yes")
3537 (set_attr "arch" "32,a")
3538 (set_attr "conds" "set")
3539 (set_attr "shift" "1")
3540 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3543 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3544 ;; getting reused later.
3545 (define_insn "andsi_not_shiftsi_si_scc"
3546 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3548 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3549 [(match_operand:SI 1 "s_register_operand" "r,r")
3550 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3551 (match_operand:SI 3 "s_register_operand" "r,r"))
3553 (set (match_operand:SI 4 "s_register_operand" "=r,r")
3554 (and:SI (not:SI (match_op_dup 0
3559 "bics%?\\t%4, %3, %1%S0"
3560 [(set_attr "predicable" "yes")
3561 (set_attr "arch" "32,a")
3562 (set_attr "conds" "set")
3563 (set_attr "shift" "1")
3564 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3567 (define_insn "*andsi_notsi_si_compare0"
3568 [(set (reg:CC_NZ CC_REGNUM)
3570 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3571 (match_operand:SI 1 "s_register_operand" "r"))
3573 (set (match_operand:SI 0 "s_register_operand" "=r")
3574 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3577 [(set_attr "conds" "set")
3578 (set_attr "type" "logics_shift_reg")]
3581 (define_insn "*andsi_notsi_si_compare0_scratch"
3582 [(set (reg:CC_NZ CC_REGNUM)
3584 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3585 (match_operand:SI 1 "s_register_operand" "r"))
3587 (clobber (match_scratch:SI 0 "=r"))]
3590 [(set_attr "conds" "set")
3591 (set_attr "type" "logics_shift_reg")]
3594 (define_expand "iorsi3"
3595 [(set (match_operand:SI 0 "s_register_operand")
3596 (ior:SI (match_operand:SI 1 "s_register_operand")
3597 (match_operand:SI 2 "reg_or_int_operand")))]
3600 if (CONST_INT_P (operands[2]))
3604 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3605 operands[2] = force_reg (SImode, operands[2]);
3608 arm_split_constant (IOR, SImode, NULL_RTX,
3609 INTVAL (operands[2]), operands[0],
3611 optimize && can_create_pseudo_p ());
3615 else /* TARGET_THUMB1 */
3617 rtx tmp = force_reg (SImode, operands[2]);
3618 if (rtx_equal_p (operands[0], operands[1]))
3622 operands[2] = operands[1];
3630 (define_insn_and_split "*iorsi3_insn"
3631 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3632 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3633 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3638 orn%?\\t%0, %1, #%B2
3642 && CONST_INT_P (operands[2])
3643 && !(const_ok_for_arm (INTVAL (operands[2]))
3644 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3645 [(clobber (const_int 0))]
3647 arm_split_constant (IOR, SImode, curr_insn,
3648 INTVAL (operands[2]), operands[0], operands[1], 0);
3651 [(set_attr "length" "4,4,4,4,16")
3652 (set_attr "arch" "32,t2,t2,32,32")
3653 (set_attr "predicable" "yes")
3654 (set_attr "predicable_short_it" "no,yes,no,no,no")
3655 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3659 [(match_scratch:SI 3 "r")
3660 (set (match_operand:SI 0 "arm_general_register_operand" "")
3661 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3662 (match_operand:SI 2 "const_int_operand" "")))]
3664 && !const_ok_for_arm (INTVAL (operands[2]))
3665 && const_ok_for_arm (~INTVAL (operands[2]))"
3666 [(set (match_dup 3) (match_dup 2))
3667 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3671 (define_insn "*iorsi3_compare0"
3672 [(set (reg:CC_NZ CC_REGNUM)
3674 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3675 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3677 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3678 (ior:SI (match_dup 1) (match_dup 2)))]
3680 "orrs%?\\t%0, %1, %2"
3681 [(set_attr "conds" "set")
3682 (set_attr "arch" "*,t2,*")
3683 (set_attr "length" "4,2,4")
3684 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3687 (define_insn "*iorsi3_compare0_scratch"
3688 [(set (reg:CC_NZ CC_REGNUM)
3690 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3691 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3693 (clobber (match_scratch:SI 0 "=r,l,r"))]
3695 "orrs%?\\t%0, %1, %2"
3696 [(set_attr "conds" "set")
3697 (set_attr "arch" "*,t2,*")
3698 (set_attr "length" "4,2,4")
3699 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3702 (define_expand "xorsi3"
3703 [(set (match_operand:SI 0 "s_register_operand")
3704 (xor:SI (match_operand:SI 1 "s_register_operand")
3705 (match_operand:SI 2 "reg_or_int_operand")))]
3707 "if (CONST_INT_P (operands[2]))
3711 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3712 operands[2] = force_reg (SImode, operands[2]);
3715 arm_split_constant (XOR, SImode, NULL_RTX,
3716 INTVAL (operands[2]), operands[0],
3718 optimize && can_create_pseudo_p ());
3722 else /* TARGET_THUMB1 */
3724 rtx tmp = force_reg (SImode, operands[2]);
3725 if (rtx_equal_p (operands[0], operands[1]))
3729 operands[2] = operands[1];
3736 (define_insn_and_split "*arm_xorsi3"
3737 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3738 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3739 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3747 && CONST_INT_P (operands[2])
3748 && !const_ok_for_arm (INTVAL (operands[2]))"
3749 [(clobber (const_int 0))]
3751 arm_split_constant (XOR, SImode, curr_insn,
3752 INTVAL (operands[2]), operands[0], operands[1], 0);
3755 [(set_attr "length" "4,4,4,16")
3756 (set_attr "predicable" "yes")
3757 (set_attr "predicable_short_it" "no,yes,no,no")
3758 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3761 (define_insn "*xorsi3_compare0"
3762 [(set (reg:CC_NZ CC_REGNUM)
3763 (compare:CC_NZ (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3764 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3766 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3767 (xor:SI (match_dup 1) (match_dup 2)))]
3769 "eors%?\\t%0, %1, %2"
3770 [(set_attr "conds" "set")
3771 (set_attr "type" "logics_imm,logics_reg")]
3774 (define_insn "*xorsi3_compare0_scratch"
3775 [(set (reg:CC_NZ CC_REGNUM)
3776 (compare:CC_NZ (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3777 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3781 [(set_attr "conds" "set")
3782 (set_attr "type" "logics_imm,logics_reg")]
3785 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3786 ; (NOT D) we can sometimes merge the final NOT into one of the following
3790 [(set (match_operand:SI 0 "s_register_operand" "")
3791 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3792 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3793 (match_operand:SI 3 "arm_rhs_operand" "")))
3794 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3796 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3797 (not:SI (match_dup 3))))
3798 (set (match_dup 0) (not:SI (match_dup 4)))]
3802 (define_insn_and_split "*andsi_iorsi3_notsi"
3803 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3804 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3805 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3806 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3808 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3809 "&& reload_completed"
3810 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3811 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3813 /* If operands[3] is a constant make sure to fold the NOT into it
3814 to avoid creating a NOT of a CONST_INT. */
3815 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3816 if (CONST_INT_P (not_rtx))
3818 operands[4] = operands[0];
3819 operands[5] = not_rtx;
3823 operands[5] = operands[0];
3824 operands[4] = not_rtx;
3827 [(set_attr "length" "8")
3828 (set_attr "ce_count" "2")
3829 (set_attr "predicable" "yes")
3830 (set_attr "type" "multiple")]
3833 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3834 ; insns are available?
3836 [(set (match_operand:SI 0 "s_register_operand" "")
3837 (match_operator:SI 1 "logical_binary_operator"
3838 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3839 (match_operand:SI 3 "const_int_operand" "")
3840 (match_operand:SI 4 "const_int_operand" ""))
3841 (match_operator:SI 9 "logical_binary_operator"
3842 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3843 (match_operand:SI 6 "const_int_operand" ""))
3844 (match_operand:SI 7 "s_register_operand" "")])]))
3845 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3847 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3848 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3851 [(ashift:SI (match_dup 2) (match_dup 4))
3855 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3858 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3862 [(set (match_operand:SI 0 "s_register_operand" "")
3863 (match_operator:SI 1 "logical_binary_operator"
3864 [(match_operator:SI 9 "logical_binary_operator"
3865 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3866 (match_operand:SI 6 "const_int_operand" ""))
3867 (match_operand:SI 7 "s_register_operand" "")])
3868 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3869 (match_operand:SI 3 "const_int_operand" "")
3870 (match_operand:SI 4 "const_int_operand" ""))]))
3871 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3873 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3874 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3877 [(ashift:SI (match_dup 2) (match_dup 4))
3881 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3884 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3888 [(set (match_operand:SI 0 "s_register_operand" "")
3889 (match_operator:SI 1 "logical_binary_operator"
3890 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3891 (match_operand:SI 3 "const_int_operand" "")
3892 (match_operand:SI 4 "const_int_operand" ""))
3893 (match_operator:SI 9 "logical_binary_operator"
3894 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3895 (match_operand:SI 6 "const_int_operand" ""))
3896 (match_operand:SI 7 "s_register_operand" "")])]))
3897 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3899 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3900 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3903 [(ashift:SI (match_dup 2) (match_dup 4))
3907 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3910 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3914 [(set (match_operand:SI 0 "s_register_operand" "")
3915 (match_operator:SI 1 "logical_binary_operator"
3916 [(match_operator:SI 9 "logical_binary_operator"
3917 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3918 (match_operand:SI 6 "const_int_operand" ""))
3919 (match_operand:SI 7 "s_register_operand" "")])
3920 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3921 (match_operand:SI 3 "const_int_operand" "")
3922 (match_operand:SI 4 "const_int_operand" ""))]))
3923 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3925 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3926 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3929 [(ashift:SI (match_dup 2) (match_dup 4))
3933 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3936 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3940 ;; Minimum and maximum insns
3942 (define_expand "smaxsi3"
3944 (set (match_operand:SI 0 "s_register_operand")
3945 (smax:SI (match_operand:SI 1 "s_register_operand")
3946 (match_operand:SI 2 "arm_rhs_operand")))
3947 (clobber (reg:CC CC_REGNUM))])]
3950 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3952 /* No need for a clobber of the condition code register here. */
3953 emit_insn (gen_rtx_SET (operands[0],
3954 gen_rtx_SMAX (SImode, operands[1],
3960 (define_insn "*smax_0"
3961 [(set (match_operand:SI 0 "s_register_operand" "=r")
3962 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3965 "bic%?\\t%0, %1, %1, asr #31"
3966 [(set_attr "predicable" "yes")
3967 (set_attr "type" "logic_shift_reg")]
3970 (define_insn "*smax_m1"
3971 [(set (match_operand:SI 0 "s_register_operand" "=r")
3972 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3975 "orr%?\\t%0, %1, %1, asr #31"
3976 [(set_attr "predicable" "yes")
3977 (set_attr "type" "logic_shift_reg")]
3980 (define_insn_and_split "*arm_smax_insn"
3981 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3982 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3983 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3984 (clobber (reg:CC CC_REGNUM))]
3987 ; cmp\\t%1, %2\;movlt\\t%0, %2
3988 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3990 [(set (reg:CC CC_REGNUM)
3991 (compare:CC (match_dup 1) (match_dup 2)))
3993 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3997 [(set_attr "conds" "clob")
3998 (set_attr "length" "8,12")
3999 (set_attr "type" "multiple")]
4002 (define_expand "sminsi3"
4004 (set (match_operand:SI 0 "s_register_operand")
4005 (smin:SI (match_operand:SI 1 "s_register_operand")
4006 (match_operand:SI 2 "arm_rhs_operand")))
4007 (clobber (reg:CC CC_REGNUM))])]
4010 if (operands[2] == const0_rtx)
4012 /* No need for a clobber of the condition code register here. */
4013 emit_insn (gen_rtx_SET (operands[0],
4014 gen_rtx_SMIN (SImode, operands[1],
4020 (define_insn "*smin_0"
4021 [(set (match_operand:SI 0 "s_register_operand" "=r")
4022 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
4025 "and%?\\t%0, %1, %1, asr #31"
4026 [(set_attr "predicable" "yes")
4027 (set_attr "type" "logic_shift_reg")]
4030 (define_insn_and_split "*arm_smin_insn"
4031 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4032 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4033 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4034 (clobber (reg:CC CC_REGNUM))]
4037 ; cmp\\t%1, %2\;movge\\t%0, %2
4038 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
4040 [(set (reg:CC CC_REGNUM)
4041 (compare:CC (match_dup 1) (match_dup 2)))
4043 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
4047 [(set_attr "conds" "clob")
4048 (set_attr "length" "8,12")
4049 (set_attr "type" "multiple,multiple")]
4052 (define_expand "umaxsi3"
4054 (set (match_operand:SI 0 "s_register_operand")
4055 (umax:SI (match_operand:SI 1 "s_register_operand")
4056 (match_operand:SI 2 "arm_rhs_operand")))
4057 (clobber (reg:CC CC_REGNUM))])]
4062 (define_insn_and_split "*arm_umaxsi3"
4063 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4064 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4065 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4066 (clobber (reg:CC CC_REGNUM))]
4069 ; cmp\\t%1, %2\;movcc\\t%0, %2
4070 ; cmp\\t%1, %2\;movcs\\t%0, %1
4071 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
4073 [(set (reg:CC CC_REGNUM)
4074 (compare:CC (match_dup 1) (match_dup 2)))
4076 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
4080 [(set_attr "conds" "clob")
4081 (set_attr "length" "8,8,12")
4082 (set_attr "type" "store_4")]
4085 (define_expand "uminsi3"
4087 (set (match_operand:SI 0 "s_register_operand")
4088 (umin:SI (match_operand:SI 1 "s_register_operand")
4089 (match_operand:SI 2 "arm_rhs_operand")))
4090 (clobber (reg:CC CC_REGNUM))])]
4095 (define_insn_and_split "*arm_uminsi3"
4096 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4097 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4098 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4099 (clobber (reg:CC CC_REGNUM))]
4102 ; cmp\\t%1, %2\;movcs\\t%0, %2
4103 ; cmp\\t%1, %2\;movcc\\t%0, %1
4104 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
4106 [(set (reg:CC CC_REGNUM)
4107 (compare:CC (match_dup 1) (match_dup 2)))
4109 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
4113 [(set_attr "conds" "clob")
4114 (set_attr "length" "8,8,12")
4115 (set_attr "type" "store_4")]
4118 (define_insn "*store_minmaxsi"
4119 [(set (match_operand:SI 0 "memory_operand" "=m")
4120 (match_operator:SI 3 "minmax_operator"
4121 [(match_operand:SI 1 "s_register_operand" "r")
4122 (match_operand:SI 2 "s_register_operand" "r")]))
4123 (clobber (reg:CC CC_REGNUM))]
4124 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
4126 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
4127 operands[1], operands[2]);
4128 output_asm_insn (\"cmp\\t%1, %2\", operands);
4130 output_asm_insn (\"ite\t%d3\", operands);
4131 output_asm_insn (\"str%d3\\t%1, %0\", operands);
4132 output_asm_insn (\"str%D3\\t%2, %0\", operands);
4135 [(set_attr "conds" "clob")
4136 (set (attr "length")
4137 (if_then_else (eq_attr "is_thumb" "yes")
4140 (set_attr "type" "store_4")]
4143 ; Reject the frame pointer in operand[1], since reloading this after
4144 ; it has been eliminated can cause carnage.
4145 (define_insn "*minmax_arithsi"
4146 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4147 (match_operator:SI 4 "shiftable_operator"
4148 [(match_operator:SI 5 "minmax_operator"
4149 [(match_operand:SI 2 "s_register_operand" "r,r")
4150 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
4151 (match_operand:SI 1 "s_register_operand" "0,?r")]))
4152 (clobber (reg:CC CC_REGNUM))]
4153 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
4156 enum rtx_code code = GET_CODE (operands[4]);
4159 if (which_alternative != 0 || operands[3] != const0_rtx
4160 || (code != PLUS && code != IOR && code != XOR))
4165 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4166 operands[2], operands[3]);
4167 output_asm_insn (\"cmp\\t%2, %3\", operands);
4171 output_asm_insn (\"ite\\t%d5\", operands);
4173 output_asm_insn (\"it\\t%d5\", operands);
4175 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4177 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4180 [(set_attr "conds" "clob")
4181 (set (attr "length")
4182 (if_then_else (eq_attr "is_thumb" "yes")
4185 (set_attr "type" "multiple")]
4188 ; Reject the frame pointer in operand[1], since reloading this after
4189 ; it has been eliminated can cause carnage.
4190 (define_insn_and_split "*minmax_arithsi_non_canon"
4191 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4193 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4194 (match_operator:SI 4 "minmax_operator"
4195 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4196 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4197 (clobber (reg:CC CC_REGNUM))]
4198 "TARGET_32BIT && !arm_eliminable_register (operands[1])
4199 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4201 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4202 [(set (reg:CC CC_REGNUM)
4203 (compare:CC (match_dup 2) (match_dup 3)))
4205 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4207 (minus:SI (match_dup 1)
4209 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4213 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4214 operands[2], operands[3]);
4215 enum rtx_code rc = minmax_code (operands[4]);
4216 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4217 operands[2], operands[3]);
4219 if (mode == CCFPmode || mode == CCFPEmode)
4220 rc = reverse_condition_maybe_unordered (rc);
4222 rc = reverse_condition (rc);
4223 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4224 if (CONST_INT_P (operands[3]))
4225 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4227 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4229 [(set_attr "conds" "clob")
4230 (set (attr "length")
4231 (if_then_else (eq_attr "is_thumb" "yes")
4234 (set_attr "type" "multiple")]
4238 (define_expand "arm_<ss_op>"
4239 [(set (match_operand:SI 0 "s_register_operand")
4240 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
4241 (match_operand:SI 2 "s_register_operand")))]
4242 "TARGET_DSP_MULTIPLY"
4245 emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
4246 operands[1], operands[2]));
4248 emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
4253 (define_insn "arm_<ss_op><add_clobber_q_name>_insn"
4254 [(set (match_operand:SI 0 "s_register_operand" "=r")
4255 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
4256 (match_operand:SI 2 "s_register_operand" "r")))]
4257 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
4258 "<ss_op>%?\t%0, %1, %2"
4259 [(set_attr "predicable" "yes")
4260 (set_attr "type" "alu_dsp_reg")]
4263 (define_code_iterator SAT [smin smax])
4264 (define_code_attr SATrev [(smin "smax") (smax "smin")])
4265 (define_code_attr SATlo [(smin "1") (smax "2")])
4266 (define_code_attr SAThi [(smin "2") (smax "1")])
4268 (define_expand "arm_ssat"
4269 [(match_operand:SI 0 "s_register_operand")
4270 (match_operand:SI 1 "s_register_operand")
4271 (match_operand:SI 2 "const_int_operand")]
4272 "TARGET_32BIT && arm_arch6"
4274 HOST_WIDE_INT val = INTVAL (operands[2]);
4275 /* The builtin checking code should have ensured the right
4276 range for the immediate. */
4277 gcc_assert (IN_RANGE (val, 1, 32));
4278 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
4279 HOST_WIDE_INT lower_bound = -upper_bound - 1;
4280 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4281 rtx lo_rtx = gen_int_mode (lower_bound, SImode);
4283 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
4284 up_rtx, operands[1]));
4286 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4291 (define_expand "arm_usat"
4292 [(match_operand:SI 0 "s_register_operand")
4293 (match_operand:SI 1 "s_register_operand")
4294 (match_operand:SI 2 "const_int_operand")]
4295 "TARGET_32BIT && arm_arch6"
4297 HOST_WIDE_INT val = INTVAL (operands[2]);
4298 /* The builtin checking code should have ensured the right
4299 range for the immediate. */
4300 gcc_assert (IN_RANGE (val, 0, 31));
4301 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
4302 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4303 rtx lo_rtx = CONST0_RTX (SImode);
4305 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
4308 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4313 (define_insn "arm_get_apsr"
4314 [(set (match_operand:SI 0 "s_register_operand" "=r")
4315 (unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
4318 [(set_attr "predicable" "yes")
4319 (set_attr "conds" "use")]
4322 (define_insn "arm_set_apsr"
4323 [(set (reg:CC APSRQ_REGNUM)
4325 [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
4327 "msr%?\tAPSR_nzcvq, %0"
4328 [(set_attr "predicable" "yes")
4329 (set_attr "conds" "set")]
4332 ;; Read the APSR and extract the Q bit (bit 27)
4333 (define_expand "arm_saturation_occurred"
4334 [(match_operand:SI 0 "s_register_operand")]
4337 rtx apsr = gen_reg_rtx (SImode);
4338 emit_insn (gen_arm_get_apsr (apsr));
4339 emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
4340 gen_int_mode (27, SImode)));
4345 ;; Read the APSR and set the Q bit (bit position 27) according to operand 0
4346 (define_expand "arm_set_saturation"
4347 [(match_operand:SI 0 "reg_or_int_operand")]
4350 rtx apsr = gen_reg_rtx (SImode);
4351 emit_insn (gen_arm_get_apsr (apsr));
4352 rtx to_insert = gen_reg_rtx (SImode);
4353 if (CONST_INT_P (operands[0]))
4354 emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
4355 ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
4358 rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
4359 emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
4360 CONST0_RTX (SImode)));
4362 emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
4363 gen_int_mode (27, SImode), to_insert));
4364 emit_insn (gen_arm_set_apsr (apsr));
4369 (define_insn "satsi_<SAT:code><add_clobber_q_name>"
4370 [(set (match_operand:SI 0 "s_register_operand" "=r")
4371 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4372 (match_operand:SI 1 "const_int_operand" "i"))
4373 (match_operand:SI 2 "const_int_operand" "i")))]
4374 "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
4375 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4379 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4380 &mask, &signed_sat))
4383 operands[1] = GEN_INT (mask);
4385 return "ssat%?\t%0, %1, %3";
4387 return "usat%?\t%0, %1, %3";
4389 [(set_attr "predicable" "yes")
4390 (set_attr "type" "alus_imm")]
4393 (define_insn "*satsi_<SAT:code>_shift"
4394 [(set (match_operand:SI 0 "s_register_operand" "=r")
4395 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4396 [(match_operand:SI 4 "s_register_operand" "r")
4397 (match_operand:SI 5 "const_int_operand" "i")])
4398 (match_operand:SI 1 "const_int_operand" "i"))
4399 (match_operand:SI 2 "const_int_operand" "i")))]
4400 "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
4401 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4405 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4406 &mask, &signed_sat))
4409 operands[1] = GEN_INT (mask);
4411 return "ssat%?\t%0, %1, %4%S3";
4413 return "usat%?\t%0, %1, %4%S3";
4415 [(set_attr "predicable" "yes")
4416 (set_attr "shift" "3")
4417 (set_attr "type" "logic_shift_reg")])
4419 ;; Custom Datapath Extension insns.
4420 (define_insn "arm_cx1<mode>"
4421 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4422 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4423 (match_operand:SI 2 "const_int_ccde1_operand" "i")]
4426 "cx1<cde_suffix>\\tp%c1, <cde_dest>, %2"
4427 [(set_attr "type" "coproc")]
4430 (define_insn "arm_cx1a<mode>"
4431 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4432 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4433 (match_operand:SIDI 2 "s_register_operand" "0")
4434 (match_operand:SI 3 "const_int_ccde1_operand" "i")]
4437 "cx1<cde_suffix>a\\tp%c1, <cde_dest>, %3"
4438 [(set_attr "type" "coproc")]
4441 (define_insn "arm_cx2<mode>"
4442 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4443 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4444 (match_operand:SI 2 "s_register_operand" "r")
4445 (match_operand:SI 3 "const_int_ccde2_operand" "i")]
4448 "cx2<cde_suffix>\\tp%c1, <cde_dest>, %2, %3"
4449 [(set_attr "type" "coproc")]
4452 (define_insn "arm_cx2a<mode>"
4453 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4454 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4455 (match_operand:SIDI 2 "s_register_operand" "0")
4456 (match_operand:SI 3 "s_register_operand" "r")
4457 (match_operand:SI 4 "const_int_ccde2_operand" "i")]
4460 "cx2<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4"
4461 [(set_attr "type" "coproc")]
4464 (define_insn "arm_cx3<mode>"
4465 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4466 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4467 (match_operand:SI 2 "s_register_operand" "r")
4468 (match_operand:SI 3 "s_register_operand" "r")
4469 (match_operand:SI 4 "const_int_ccde3_operand" "i")]
4472 "cx3<cde_suffix>\\tp%c1, <cde_dest>, %2, %3, %4"
4473 [(set_attr "type" "coproc")]
4476 (define_insn "arm_cx3a<mode>"
4477 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4478 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4479 (match_operand:SIDI 2 "s_register_operand" "0")
4480 (match_operand:SI 3 "s_register_operand" "r")
4481 (match_operand:SI 4 "s_register_operand" "r")
4482 (match_operand:SI 5 "const_int_ccde3_operand" "i")]
4485 "cx3<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4, %5"
4486 [(set_attr "type" "coproc")]
4489 ;; Shift and rotation insns
4491 (define_expand "ashldi3"
4492 [(set (match_operand:DI 0 "s_register_operand")
4493 (ashift:DI (match_operand:DI 1 "s_register_operand")
4494 (match_operand:SI 2 "reg_or_int_operand")))]
4497 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN)
4499 if (!reg_or_int_operand (operands[2], SImode))
4500 operands[2] = force_reg (SImode, operands[2]);
4502 /* Armv8.1-M Mainline double shifts are not expanded. */
4503 if (arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2]))
4504 && (REG_P (operands[2]) || INTVAL(operands[2]) != 32))
4506 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4507 emit_insn (gen_movdi (operands[0], operands[1]));
4509 emit_insn (gen_thumb2_lsll (operands[0], operands[2]));
4514 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4515 operands[2], gen_reg_rtx (SImode),
4516 gen_reg_rtx (SImode));
4520 (define_expand "ashlsi3"
4521 [(set (match_operand:SI 0 "s_register_operand")
4522 (ashift:SI (match_operand:SI 1 "s_register_operand")
4523 (match_operand:SI 2 "arm_rhs_operand")))]
4526 if (CONST_INT_P (operands[2])
4527 && (UINTVAL (operands[2])) > 31)
4529 emit_insn (gen_movsi (operands[0], const0_rtx));
4535 (define_expand "ashrdi3"
4536 [(set (match_operand:DI 0 "s_register_operand")
4537 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4538 (match_operand:SI 2 "reg_or_int_operand")))]
4541 /* Armv8.1-M Mainline double shifts are not expanded. */
4542 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4543 && arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2])))
4545 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4546 emit_insn (gen_movdi (operands[0], operands[1]));
4548 emit_insn (gen_thumb2_asrl (operands[0], operands[2]));
4552 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4553 operands[2], gen_reg_rtx (SImode),
4554 gen_reg_rtx (SImode));
4558 (define_expand "ashrsi3"
4559 [(set (match_operand:SI 0 "s_register_operand")
4560 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4561 (match_operand:SI 2 "arm_rhs_operand")))]
4564 if (CONST_INT_P (operands[2])
4565 && UINTVAL (operands[2]) > 31)
4566 operands[2] = GEN_INT (31);
4570 (define_expand "lshrdi3"
4571 [(set (match_operand:DI 0 "s_register_operand")
4572 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4573 (match_operand:SI 2 "reg_or_int_operand")))]
4576 /* Armv8.1-M Mainline double shifts are not expanded. */
4577 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4578 && long_shift_imm (operands[2], GET_MODE (operands[2])))
4580 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4581 emit_insn (gen_movdi (operands[0], operands[1]));
4583 emit_insn (gen_thumb2_lsrl (operands[0], operands[2]));
4587 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4588 operands[2], gen_reg_rtx (SImode),
4589 gen_reg_rtx (SImode));
4593 (define_expand "lshrsi3"
4594 [(set (match_operand:SI 0 "s_register_operand")
4595 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4596 (match_operand:SI 2 "arm_rhs_operand")))]
4599 if (CONST_INT_P (operands[2])
4600 && (UINTVAL (operands[2])) > 31)
4602 emit_insn (gen_movsi (operands[0], const0_rtx));
4608 (define_expand "rotlsi3"
4609 [(set (match_operand:SI 0 "s_register_operand")
4610 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4611 (match_operand:SI 2 "reg_or_int_operand")))]
4614 if (CONST_INT_P (operands[2]))
4615 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4618 rtx reg = gen_reg_rtx (SImode);
4619 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4625 (define_expand "rotrsi3"
4626 [(set (match_operand:SI 0 "s_register_operand")
4627 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4628 (match_operand:SI 2 "arm_rhs_operand")))]
4633 if (CONST_INT_P (operands[2])
4634 && UINTVAL (operands[2]) > 31)
4635 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4637 else /* TARGET_THUMB1 */
4639 if (CONST_INT_P (operands [2]))
4640 operands [2] = force_reg (SImode, operands[2]);
4645 (define_insn "*arm_shiftsi3"
4646 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4647 (match_operator:SI 3 "shift_operator"
4648 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4649 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4651 "* return arm_output_shift(operands, 0);"
4652 [(set_attr "predicable" "yes")
4653 (set_attr "arch" "t2,t2,*,*")
4654 (set_attr "predicable_short_it" "yes,yes,no,no")
4655 (set_attr "length" "4")
4656 (set_attr "shift" "1")
4657 (set_attr "autodetect_type" "alu_shift_operator3")]
4660 (define_insn "*shiftsi3_compare0"
4661 [(set (reg:CC_NZ CC_REGNUM)
4662 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4663 [(match_operand:SI 1 "s_register_operand" "r,r")
4664 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4666 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4667 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4669 "* return arm_output_shift(operands, 1);"
4670 [(set_attr "conds" "set")
4671 (set_attr "shift" "1")
4672 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4675 (define_insn "*shiftsi3_compare0_scratch"
4676 [(set (reg:CC_NZ CC_REGNUM)
4677 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4678 [(match_operand:SI 1 "s_register_operand" "r,r")
4679 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4681 (clobber (match_scratch:SI 0 "=r,r"))]
4683 "* return arm_output_shift(operands, 1);"
4684 [(set_attr "conds" "set")
4685 (set_attr "shift" "1")
4686 (set_attr "type" "shift_imm,shift_reg")]
4689 (define_insn "*not_shiftsi"
4690 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4691 (not:SI (match_operator:SI 3 "shift_operator"
4692 [(match_operand:SI 1 "s_register_operand" "r,r")
4693 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4696 [(set_attr "predicable" "yes")
4697 (set_attr "shift" "1")
4698 (set_attr "arch" "32,a")
4699 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4701 (define_insn "*not_shiftsi_compare0"
4702 [(set (reg:CC_NZ CC_REGNUM)
4704 (not:SI (match_operator:SI 3 "shift_operator"
4705 [(match_operand:SI 1 "s_register_operand" "r,r")
4706 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4708 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4709 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4711 "mvns%?\\t%0, %1%S3"
4712 [(set_attr "conds" "set")
4713 (set_attr "shift" "1")
4714 (set_attr "arch" "32,a")
4715 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4717 (define_insn "*not_shiftsi_compare0_scratch"
4718 [(set (reg:CC_NZ CC_REGNUM)
4720 (not:SI (match_operator:SI 3 "shift_operator"
4721 [(match_operand:SI 1 "s_register_operand" "r,r")
4722 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4724 (clobber (match_scratch:SI 0 "=r,r"))]
4726 "mvns%?\\t%0, %1%S3"
4727 [(set_attr "conds" "set")
4728 (set_attr "shift" "1")
4729 (set_attr "arch" "32,a")
4730 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4732 ;; We don't really have extzv, but defining this using shifts helps
4733 ;; to reduce register pressure later on.
4735 (define_expand "extzv"
4736 [(set (match_operand 0 "s_register_operand")
4737 (zero_extract (match_operand 1 "nonimmediate_operand")
4738 (match_operand 2 "const_int_operand")
4739 (match_operand 3 "const_int_operand")))]
4740 "TARGET_THUMB1 || arm_arch_thumb2"
4743 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4744 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4746 if (arm_arch_thumb2)
4748 HOST_WIDE_INT width = INTVAL (operands[2]);
4749 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4751 if (unaligned_access && MEM_P (operands[1])
4752 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4756 if (BYTES_BIG_ENDIAN)
4757 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4762 base_addr = adjust_address (operands[1], SImode,
4763 bitpos / BITS_PER_UNIT);
4764 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4768 rtx dest = operands[0];
4769 rtx tmp = gen_reg_rtx (SImode);
4771 /* We may get a paradoxical subreg here. Strip it off. */
4772 if (GET_CODE (dest) == SUBREG
4773 && GET_MODE (dest) == SImode
4774 && GET_MODE (SUBREG_REG (dest)) == HImode)
4775 dest = SUBREG_REG (dest);
4777 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4780 base_addr = adjust_address (operands[1], HImode,
4781 bitpos / BITS_PER_UNIT);
4782 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4783 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4787 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4789 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4797 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4800 operands[3] = GEN_INT (rshift);
4804 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4808 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4809 operands[3], gen_reg_rtx (SImode)));
4814 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4816 (define_expand "extzv_t1"
4817 [(set (match_operand:SI 4 "s_register_operand")
4818 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4819 (match_operand:SI 2 "const_int_operand")))
4820 (set (match_operand:SI 0 "s_register_operand")
4821 (lshiftrt:SI (match_dup 4)
4822 (match_operand:SI 3 "const_int_operand")))]
4826 (define_expand "extv"
4827 [(set (match_operand 0 "s_register_operand")
4828 (sign_extract (match_operand 1 "nonimmediate_operand")
4829 (match_operand 2 "const_int_operand")
4830 (match_operand 3 "const_int_operand")))]
4833 HOST_WIDE_INT width = INTVAL (operands[2]);
4834 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4836 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4837 && (bitpos % BITS_PER_UNIT) == 0)
4841 if (BYTES_BIG_ENDIAN)
4842 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4846 base_addr = adjust_address (operands[1], SImode,
4847 bitpos / BITS_PER_UNIT);
4848 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4852 rtx dest = operands[0];
4853 rtx tmp = gen_reg_rtx (SImode);
4855 /* We may get a paradoxical subreg here. Strip it off. */
4856 if (GET_CODE (dest) == SUBREG
4857 && GET_MODE (dest) == SImode
4858 && GET_MODE (SUBREG_REG (dest)) == HImode)
4859 dest = SUBREG_REG (dest);
4861 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4864 base_addr = adjust_address (operands[1], HImode,
4865 bitpos / BITS_PER_UNIT);
4866 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4867 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4872 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4874 else if (GET_MODE (operands[0]) == SImode
4875 && GET_MODE (operands[1]) == SImode)
4877 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4885 ; Helper to expand register forms of extv with the proper modes.
4887 (define_expand "extv_regsi"
4888 [(set (match_operand:SI 0 "s_register_operand")
4889 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4890 (match_operand 2 "const_int_operand")
4891 (match_operand 3 "const_int_operand")))]
4896 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4898 (define_insn "unaligned_loaddi"
4899 [(set (match_operand:DI 0 "s_register_operand" "=r")
4900 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4901 UNSPEC_UNALIGNED_LOAD))]
4902 "TARGET_32BIT && TARGET_LDRD"
4904 return output_move_double (operands, true, NULL);
4906 [(set_attr "length" "8")
4907 (set_attr "type" "load_8")])
4909 (define_insn "unaligned_loadsi"
4910 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4911 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4912 UNSPEC_UNALIGNED_LOAD))]
4915 ldr\t%0, %1\t@ unaligned
4916 ldr%?\t%0, %1\t@ unaligned
4917 ldr%?\t%0, %1\t@ unaligned"
4918 [(set_attr "arch" "t1,t2,32")
4919 (set_attr "length" "2,2,4")
4920 (set_attr "predicable" "no,yes,yes")
4921 (set_attr "predicable_short_it" "no,yes,no")
4922 (set_attr "type" "load_4")])
4924 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4925 ;; address (there's no immediate format). That's tricky to support
4926 ;; here and we don't really need this pattern for that case, so only
4927 ;; enable for 32-bit ISAs.
4928 (define_insn "unaligned_loadhis"
4929 [(set (match_operand:SI 0 "s_register_operand" "=r")
4931 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4932 UNSPEC_UNALIGNED_LOAD)))]
4933 "unaligned_access && TARGET_32BIT"
4934 "ldrsh%?\t%0, %1\t@ unaligned"
4935 [(set_attr "predicable" "yes")
4936 (set_attr "type" "load_byte")])
4938 (define_insn "unaligned_loadhiu"
4939 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4941 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4942 UNSPEC_UNALIGNED_LOAD)))]
4945 ldrh\t%0, %1\t@ unaligned
4946 ldrh%?\t%0, %1\t@ unaligned
4947 ldrh%?\t%0, %1\t@ unaligned"
4948 [(set_attr "arch" "t1,t2,32")
4949 (set_attr "length" "2,2,4")
4950 (set_attr "predicable" "no,yes,yes")
4951 (set_attr "predicable_short_it" "no,yes,no")
4952 (set_attr "type" "load_byte")])
4954 (define_insn "unaligned_storedi"
4955 [(set (match_operand:DI 0 "memory_operand" "=m")
4956 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4957 UNSPEC_UNALIGNED_STORE))]
4958 "TARGET_32BIT && TARGET_LDRD"
4960 return output_move_double (operands, true, NULL);
4962 [(set_attr "length" "8")
4963 (set_attr "type" "store_8")])
4965 (define_insn "unaligned_storesi"
4966 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4967 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4968 UNSPEC_UNALIGNED_STORE))]
4971 str\t%1, %0\t@ unaligned
4972 str%?\t%1, %0\t@ unaligned
4973 str%?\t%1, %0\t@ unaligned"
4974 [(set_attr "arch" "t1,t2,32")
4975 (set_attr "length" "2,2,4")
4976 (set_attr "predicable" "no,yes,yes")
4977 (set_attr "predicable_short_it" "no,yes,no")
4978 (set_attr "type" "store_4")])
4980 (define_insn "unaligned_storehi"
4981 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4982 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4983 UNSPEC_UNALIGNED_STORE))]
4986 strh\t%1, %0\t@ unaligned
4987 strh%?\t%1, %0\t@ unaligned
4988 strh%?\t%1, %0\t@ unaligned"
4989 [(set_attr "arch" "t1,t2,32")
4990 (set_attr "length" "2,2,4")
4991 (set_attr "predicable" "no,yes,yes")
4992 (set_attr "predicable_short_it" "no,yes,no")
4993 (set_attr "type" "store_4")])
4996 (define_insn "*extv_reg"
4997 [(set (match_operand:SI 0 "s_register_operand" "=r")
4998 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4999 (match_operand:SI 2 "const_int_operand" "n")
5000 (match_operand:SI 3 "const_int_operand" "n")))]
5002 && IN_RANGE (INTVAL (operands[3]), 0, 31)
5003 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5004 "sbfx%?\t%0, %1, %3, %2"
5005 [(set_attr "length" "4")
5006 (set_attr "predicable" "yes")
5007 (set_attr "type" "bfm")]
5010 (define_insn "extzv_t2"
5011 [(set (match_operand:SI 0 "s_register_operand" "=r")
5012 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5013 (match_operand:SI 2 "const_int_operand" "n")
5014 (match_operand:SI 3 "const_int_operand" "n")))]
5016 && IN_RANGE (INTVAL (operands[3]), 0, 31)
5017 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5018 "ubfx%?\t%0, %1, %3, %2"
5019 [(set_attr "length" "4")
5020 (set_attr "predicable" "yes")
5021 (set_attr "type" "bfm")]
5025 ;; Division instructions
5026 (define_insn "divsi3"
5027 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5028 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
5029 (match_operand:SI 2 "s_register_operand" "r,r")))]
5034 [(set_attr "arch" "32,v8mb")
5035 (set_attr "predicable" "yes")
5036 (set_attr "type" "sdiv")]
5039 (define_insn "udivsi3"
5040 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5041 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
5042 (match_operand:SI 2 "s_register_operand" "r,r")))]
5047 [(set_attr "arch" "32,v8mb")
5048 (set_attr "predicable" "yes")
5049 (set_attr "type" "udiv")]
5053 ;; Unary arithmetic insns
5055 (define_expand "negv<SIDI:mode>3"
5056 [(match_operand:SIDI 0 "s_register_operand")
5057 (match_operand:SIDI 1 "s_register_operand")
5058 (match_operand 2 "")]
5061 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
5066 (define_expand "negsi2"
5067 [(set (match_operand:SI 0 "s_register_operand")
5068 (neg:SI (match_operand:SI 1 "s_register_operand")))]
5073 (define_insn "*arm_negsi2"
5074 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5075 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5077 "rsb%?\\t%0, %1, #0"
5078 [(set_attr "predicable" "yes")
5079 (set_attr "predicable_short_it" "yes,no")
5080 (set_attr "arch" "t2,*")
5081 (set_attr "length" "4")
5082 (set_attr "type" "alu_imm")]
5085 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
5086 ;; rather than (0 cmp reg). This gives the same results for unsigned
5087 ;; and equality compares which is what we mostly need here.
5088 (define_insn "negsi2_0compare"
5089 [(set (reg:CC_RSB CC_REGNUM)
5090 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
5092 (set (match_operand:SI 0 "s_register_operand" "=l,r")
5093 (neg:SI (match_dup 1)))]
5098 [(set_attr "conds" "set")
5099 (set_attr "arch" "t2,*")
5100 (set_attr "length" "2,*")
5101 (set_attr "type" "alus_imm")]
5104 (define_insn "negsi2_carryin"
5105 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5106 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
5107 (match_operand:SI 2 "arm_borrow_operation" "")))]
5111 sbc\\t%0, %1, %1, lsl #1"
5112 [(set_attr "conds" "use")
5113 (set_attr "arch" "a,t2")
5114 (set_attr "type" "adc_imm,adc_reg")]
5117 (define_expand "negsf2"
5118 [(set (match_operand:SF 0 "s_register_operand")
5119 (neg:SF (match_operand:SF 1 "s_register_operand")))]
5120 "TARGET_32BIT && TARGET_HARD_FLOAT"
5124 (define_expand "negdf2"
5125 [(set (match_operand:DF 0 "s_register_operand")
5126 (neg:DF (match_operand:DF 1 "s_register_operand")))]
5127 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5130 ;; abssi2 doesn't really clobber the condition codes if a different register
5131 ;; is being set. To keep things simple, assume during rtl manipulations that
5132 ;; it does, but tell the final scan operator the truth. Similarly for
5135 (define_expand "abssi2"
5137 [(set (match_operand:SI 0 "s_register_operand")
5138 (abs:SI (match_operand:SI 1 "s_register_operand")))
5139 (clobber (match_dup 2))])]
5143 operands[2] = gen_rtx_SCRATCH (SImode);
5145 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5148 (define_insn_and_split "*arm_abssi2"
5149 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5150 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5151 (clobber (reg:CC CC_REGNUM))]
5154 "&& reload_completed"
5157 /* if (which_alternative == 0) */
5158 if (REGNO(operands[0]) == REGNO(operands[1]))
5160 /* Emit the pattern:
5161 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5162 [(set (reg:CC CC_REGNUM)
5163 (compare:CC (match_dup 0) (const_int 0)))
5164 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5165 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5167 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5168 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5169 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5170 (gen_rtx_LT (SImode,
5171 gen_rtx_REG (CCmode, CC_REGNUM),
5173 (gen_rtx_SET (operands[0],
5174 (gen_rtx_MINUS (SImode,
5181 /* Emit the pattern:
5182 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5184 (xor:SI (match_dup 1)
5185 (ashiftrt:SI (match_dup 1) (const_int 31))))
5187 (minus:SI (match_dup 0)
5188 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5190 emit_insn (gen_rtx_SET (operands[0],
5191 gen_rtx_XOR (SImode,
5192 gen_rtx_ASHIFTRT (SImode,
5196 emit_insn (gen_rtx_SET (operands[0],
5197 gen_rtx_MINUS (SImode,
5199 gen_rtx_ASHIFTRT (SImode,
5205 [(set_attr "conds" "clob,*")
5206 (set_attr "shift" "1")
5207 (set_attr "predicable" "no, yes")
5208 (set_attr "length" "8")
5209 (set_attr "type" "multiple")]
5212 (define_insn_and_split "*arm_neg_abssi2"
5213 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5214 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5215 (clobber (reg:CC CC_REGNUM))]
5218 "&& reload_completed"
5221 /* if (which_alternative == 0) */
5222 if (REGNO (operands[0]) == REGNO (operands[1]))
5224 /* Emit the pattern:
5225 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5227 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5228 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5229 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5231 gen_rtx_REG (CCmode, CC_REGNUM),
5233 gen_rtx_SET (operands[0],
5234 (gen_rtx_MINUS (SImode,
5240 /* Emit the pattern:
5241 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5243 emit_insn (gen_rtx_SET (operands[0],
5244 gen_rtx_XOR (SImode,
5245 gen_rtx_ASHIFTRT (SImode,
5249 emit_insn (gen_rtx_SET (operands[0],
5250 gen_rtx_MINUS (SImode,
5251 gen_rtx_ASHIFTRT (SImode,
5258 [(set_attr "conds" "clob,*")
5259 (set_attr "shift" "1")
5260 (set_attr "predicable" "no, yes")
5261 (set_attr "length" "8")
5262 (set_attr "type" "multiple")]
5265 (define_expand "abssf2"
5266 [(set (match_operand:SF 0 "s_register_operand")
5267 (abs:SF (match_operand:SF 1 "s_register_operand")))]
5268 "TARGET_32BIT && TARGET_HARD_FLOAT"
5271 (define_expand "absdf2"
5272 [(set (match_operand:DF 0 "s_register_operand")
5273 (abs:DF (match_operand:DF 1 "s_register_operand")))]
5274 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5277 (define_expand "sqrtsf2"
5278 [(set (match_operand:SF 0 "s_register_operand")
5279 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
5280 "TARGET_32BIT && TARGET_HARD_FLOAT"
5283 (define_expand "sqrtdf2"
5284 [(set (match_operand:DF 0 "s_register_operand")
5285 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
5286 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5289 (define_expand "one_cmplsi2"
5290 [(set (match_operand:SI 0 "s_register_operand")
5291 (not:SI (match_operand:SI 1 "s_register_operand")))]
5296 (define_insn "*arm_one_cmplsi2"
5297 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5298 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5301 [(set_attr "predicable" "yes")
5302 (set_attr "predicable_short_it" "yes,no")
5303 (set_attr "arch" "t2,*")
5304 (set_attr "length" "4")
5305 (set_attr "type" "mvn_reg")]
5308 (define_insn "*notsi_compare0"
5309 [(set (reg:CC_NZ CC_REGNUM)
5310 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5312 (set (match_operand:SI 0 "s_register_operand" "=r")
5313 (not:SI (match_dup 1)))]
5316 [(set_attr "conds" "set")
5317 (set_attr "type" "mvn_reg")]
5320 (define_insn "*notsi_compare0_scratch"
5321 [(set (reg:CC_NZ CC_REGNUM)
5322 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5324 (clobber (match_scratch:SI 0 "=r"))]
5327 [(set_attr "conds" "set")
5328 (set_attr "type" "mvn_reg")]
5331 ;; Fixed <--> Floating conversion insns
5333 (define_expand "floatsihf2"
5334 [(set (match_operand:HF 0 "general_operand")
5335 (float:HF (match_operand:SI 1 "general_operand")))]
5339 rtx op1 = gen_reg_rtx (SFmode);
5340 expand_float (op1, operands[1], 0);
5341 op1 = convert_to_mode (HFmode, op1, 0);
5342 emit_move_insn (operands[0], op1);
5347 (define_expand "floatdihf2"
5348 [(set (match_operand:HF 0 "general_operand")
5349 (float:HF (match_operand:DI 1 "general_operand")))]
5353 rtx op1 = gen_reg_rtx (SFmode);
5354 expand_float (op1, operands[1], 0);
5355 op1 = convert_to_mode (HFmode, op1, 0);
5356 emit_move_insn (operands[0], op1);
5361 (define_expand "floatsisf2"
5362 [(set (match_operand:SF 0 "s_register_operand")
5363 (float:SF (match_operand:SI 1 "s_register_operand")))]
5364 "TARGET_32BIT && TARGET_HARD_FLOAT"
5368 (define_expand "floatsidf2"
5369 [(set (match_operand:DF 0 "s_register_operand")
5370 (float:DF (match_operand:SI 1 "s_register_operand")))]
5371 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5375 (define_expand "fix_trunchfsi2"
5376 [(set (match_operand:SI 0 "general_operand")
5377 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
5381 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5382 expand_fix (operands[0], op1, 0);
5387 (define_expand "fix_trunchfdi2"
5388 [(set (match_operand:DI 0 "general_operand")
5389 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
5393 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5394 expand_fix (operands[0], op1, 0);
5399 (define_expand "fix_truncsfsi2"
5400 [(set (match_operand:SI 0 "s_register_operand")
5401 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
5402 "TARGET_32BIT && TARGET_HARD_FLOAT"
5406 (define_expand "fix_truncdfsi2"
5407 [(set (match_operand:SI 0 "s_register_operand")
5408 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
5409 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5415 (define_expand "truncdfsf2"
5416 [(set (match_operand:SF 0 "s_register_operand")
5418 (match_operand:DF 1 "s_register_operand")))]
5419 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5423 ;; DFmode to HFmode conversions on targets without a single-step hardware
5424 ;; instruction for it would have to go through SFmode. This is dangerous
5425 ;; as it introduces double rounding.
5427 ;; Disable this pattern unless we are in an unsafe math mode, or we have
5428 ;; a single-step instruction.
5430 (define_expand "truncdfhf2"
5431 [(set (match_operand:HF 0 "s_register_operand")
5433 (match_operand:DF 1 "s_register_operand")))]
5434 "(TARGET_EITHER && flag_unsafe_math_optimizations)
5435 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
5437 /* We don't have a direct instruction for this, so we must be in
5438 an unsafe math mode, and going via SFmode. */
5440 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5443 op1 = convert_to_mode (SFmode, operands[1], 0);
5444 op1 = convert_to_mode (HFmode, op1, 0);
5445 emit_move_insn (operands[0], op1);
5448 /* Otherwise, we will pick this up as a single instruction with
5449 no intermediary rounding. */
5453 ;; Zero and sign extension instructions.
5455 (define_expand "zero_extend<mode>di2"
5456 [(set (match_operand:DI 0 "s_register_operand" "")
5457 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5458 "TARGET_32BIT <qhs_zextenddi_cond>"
5460 rtx res_lo, res_hi, op0_lo, op0_hi;
5461 res_lo = gen_lowpart (SImode, operands[0]);
5462 res_hi = gen_highpart (SImode, operands[0]);
5463 if (can_create_pseudo_p ())
5465 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5466 op0_hi = gen_reg_rtx (SImode);
5470 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5473 if (<MODE>mode != SImode)
5474 emit_insn (gen_rtx_SET (op0_lo,
5475 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5476 emit_insn (gen_movsi (op0_hi, const0_rtx));
5477 if (res_lo != op0_lo)
5478 emit_move_insn (res_lo, op0_lo);
5479 if (res_hi != op0_hi)
5480 emit_move_insn (res_hi, op0_hi);
5485 (define_expand "extend<mode>di2"
5486 [(set (match_operand:DI 0 "s_register_operand" "")
5487 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5488 "TARGET_32BIT <qhs_sextenddi_cond>"
5490 rtx res_lo, res_hi, op0_lo, op0_hi;
5491 res_lo = gen_lowpart (SImode, operands[0]);
5492 res_hi = gen_highpart (SImode, operands[0]);
5493 if (can_create_pseudo_p ())
5495 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5496 op0_hi = gen_reg_rtx (SImode);
5500 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5503 if (<MODE>mode != SImode)
5504 emit_insn (gen_rtx_SET (op0_lo,
5505 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5506 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5507 if (res_lo != op0_lo)
5508 emit_move_insn (res_lo, op0_lo);
5509 if (res_hi != op0_hi)
5510 emit_move_insn (res_hi, op0_hi);
5515 ;; Splits for all extensions to DImode
5517 [(set (match_operand:DI 0 "s_register_operand" "")
5518 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5520 [(set (match_dup 0) (match_dup 1))]
5522 rtx lo_part = gen_lowpart (SImode, operands[0]);
5523 machine_mode src_mode = GET_MODE (operands[1]);
5525 if (src_mode == SImode)
5526 emit_move_insn (lo_part, operands[1]);
5528 emit_insn (gen_rtx_SET (lo_part,
5529 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5530 operands[0] = gen_highpart (SImode, operands[0]);
5531 operands[1] = const0_rtx;
5535 [(set (match_operand:DI 0 "s_register_operand" "")
5536 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5538 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5540 rtx lo_part = gen_lowpart (SImode, operands[0]);
5541 machine_mode src_mode = GET_MODE (operands[1]);
5543 if (src_mode == SImode)
5544 emit_move_insn (lo_part, operands[1]);
5546 emit_insn (gen_rtx_SET (lo_part,
5547 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5548 operands[1] = lo_part;
5549 operands[0] = gen_highpart (SImode, operands[0]);
5552 (define_expand "zero_extendhisi2"
5553 [(set (match_operand:SI 0 "s_register_operand")
5554 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5557 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5559 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5562 if (!arm_arch6 && !MEM_P (operands[1]))
5564 rtx t = gen_lowpart (SImode, operands[1]);
5565 rtx tmp = gen_reg_rtx (SImode);
5566 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5567 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5573 [(set (match_operand:SI 0 "s_register_operand" "")
5574 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5575 "!TARGET_THUMB2 && !arm_arch6"
5576 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5577 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5579 operands[2] = gen_lowpart (SImode, operands[1]);
5582 (define_insn "*arm_zero_extendhisi2"
5583 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5584 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5585 "TARGET_ARM && arm_arch4 && !arm_arch6"
5589 [(set_attr "type" "alu_shift_reg,load_byte")
5590 (set_attr "predicable" "yes")]
5593 (define_insn "*arm_zero_extendhisi2_v6"
5594 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5595 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5596 "TARGET_ARM && arm_arch6"
5600 [(set_attr "predicable" "yes")
5601 (set_attr "type" "extend,load_byte")]
5604 (define_insn "*arm_zero_extendhisi2addsi"
5605 [(set (match_operand:SI 0 "s_register_operand" "=r")
5606 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5607 (match_operand:SI 2 "s_register_operand" "r")))]
5609 "uxtah%?\\t%0, %2, %1"
5610 [(set_attr "type" "alu_shift_reg")
5611 (set_attr "predicable" "yes")]
5614 (define_expand "zero_extendqisi2"
5615 [(set (match_operand:SI 0 "s_register_operand")
5616 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5619 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5621 emit_insn (gen_andsi3 (operands[0],
5622 gen_lowpart (SImode, operands[1]),
5626 if (!arm_arch6 && !MEM_P (operands[1]))
5628 rtx t = gen_lowpart (SImode, operands[1]);
5629 rtx tmp = gen_reg_rtx (SImode);
5630 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5631 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5637 [(set (match_operand:SI 0 "s_register_operand" "")
5638 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5640 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5641 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5643 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5646 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5651 (define_insn "*arm_zero_extendqisi2"
5652 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5653 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5654 "TARGET_ARM && !arm_arch6"
5657 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5658 [(set_attr "length" "8,4")
5659 (set_attr "type" "alu_shift_reg,load_byte")
5660 (set_attr "predicable" "yes")]
5663 (define_insn "*arm_zero_extendqisi2_v6"
5664 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5665 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5666 "TARGET_ARM && arm_arch6"
5669 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5670 [(set_attr "type" "extend,load_byte")
5671 (set_attr "predicable" "yes")]
5674 (define_insn "*arm_zero_extendqisi2addsi"
5675 [(set (match_operand:SI 0 "s_register_operand" "=r")
5676 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5677 (match_operand:SI 2 "s_register_operand" "r")))]
5679 "uxtab%?\\t%0, %2, %1"
5680 [(set_attr "predicable" "yes")
5681 (set_attr "type" "alu_shift_reg")]
5685 [(set (match_operand:SI 0 "s_register_operand" "")
5686 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5687 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5688 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5689 [(set (match_dup 2) (match_dup 1))
5690 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5695 [(set (match_operand:SI 0 "s_register_operand" "")
5696 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5697 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5698 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5699 [(set (match_dup 2) (match_dup 1))
5700 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5706 [(set (match_operand:SI 0 "s_register_operand" "")
5707 (IOR_XOR:SI (and:SI (ashift:SI
5708 (match_operand:SI 1 "s_register_operand" "")
5709 (match_operand:SI 2 "const_int_operand" ""))
5710 (match_operand:SI 3 "const_int_operand" ""))
5712 (match_operator 5 "subreg_lowpart_operator"
5713 [(match_operand:SI 4 "s_register_operand" "")]))))]
5715 && (UINTVAL (operands[3])
5716 == (GET_MODE_MASK (GET_MODE (operands[5]))
5717 & (GET_MODE_MASK (GET_MODE (operands[5]))
5718 << (INTVAL (operands[2])))))"
5719 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5721 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5722 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5725 (define_insn "*compareqi_eq0"
5726 [(set (reg:CC_Z CC_REGNUM)
5727 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5731 [(set_attr "conds" "set")
5732 (set_attr "predicable" "yes")
5733 (set_attr "type" "logic_imm")]
5736 (define_expand "extendhisi2"
5737 [(set (match_operand:SI 0 "s_register_operand")
5738 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5743 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5746 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5748 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5752 if (!arm_arch6 && !MEM_P (operands[1]))
5754 rtx t = gen_lowpart (SImode, operands[1]);
5755 rtx tmp = gen_reg_rtx (SImode);
5756 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5757 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5764 [(set (match_operand:SI 0 "register_operand" "")
5765 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5766 (clobber (match_scratch:SI 2 ""))])]
5768 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5769 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5771 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5774 ;; This pattern will only be used when ldsh is not available
5775 (define_expand "extendhisi2_mem"
5776 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5778 (zero_extend:SI (match_dup 7)))
5779 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5780 (set (match_operand:SI 0 "" "")
5781 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5786 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5788 mem1 = change_address (operands[1], QImode, addr);
5789 mem2 = change_address (operands[1], QImode,
5790 plus_constant (Pmode, addr, 1));
5791 operands[0] = gen_lowpart (SImode, operands[0]);
5793 operands[2] = gen_reg_rtx (SImode);
5794 operands[3] = gen_reg_rtx (SImode);
5795 operands[6] = gen_reg_rtx (SImode);
5798 if (BYTES_BIG_ENDIAN)
5800 operands[4] = operands[2];
5801 operands[5] = operands[3];
5805 operands[4] = operands[3];
5806 operands[5] = operands[2];
5812 [(set (match_operand:SI 0 "register_operand" "")
5813 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5815 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5816 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5818 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5821 (define_insn "*arm_extendhisi2"
5822 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5823 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5824 "TARGET_ARM && arm_arch4 && !arm_arch6"
5828 [(set_attr "length" "8,4")
5829 (set_attr "type" "alu_shift_reg,load_byte")
5830 (set_attr "predicable" "yes")]
5833 ;; ??? Check Thumb-2 pool range
5834 (define_insn "*arm_extendhisi2_v6"
5835 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5836 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5837 "TARGET_32BIT && arm_arch6"
5841 [(set_attr "type" "extend,load_byte")
5842 (set_attr "predicable" "yes")]
5845 (define_insn "*arm_extendhisi2addsi"
5846 [(set (match_operand:SI 0 "s_register_operand" "=r")
5847 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5848 (match_operand:SI 2 "s_register_operand" "r")))]
5850 "sxtah%?\\t%0, %2, %1"
5851 [(set_attr "type" "alu_shift_reg")]
5854 (define_expand "extendqihi2"
5856 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5858 (set (match_operand:HI 0 "s_register_operand")
5859 (ashiftrt:SI (match_dup 2)
5864 if (arm_arch4 && MEM_P (operands[1]))
5866 emit_insn (gen_rtx_SET (operands[0],
5867 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5870 if (!s_register_operand (operands[1], QImode))
5871 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5872 operands[0] = gen_lowpart (SImode, operands[0]);
5873 operands[1] = gen_lowpart (SImode, operands[1]);
5874 operands[2] = gen_reg_rtx (SImode);
5878 (define_insn "*arm_extendqihi_insn"
5879 [(set (match_operand:HI 0 "s_register_operand" "=r")
5880 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5881 "TARGET_ARM && arm_arch4"
5883 [(set_attr "type" "load_byte")
5884 (set_attr "predicable" "yes")]
5887 (define_expand "extendqisi2"
5888 [(set (match_operand:SI 0 "s_register_operand")
5889 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5892 if (!arm_arch4 && MEM_P (operands[1]))
5893 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5895 if (!arm_arch6 && !MEM_P (operands[1]))
5897 rtx t = gen_lowpart (SImode, operands[1]);
5898 rtx tmp = gen_reg_rtx (SImode);
5899 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5900 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5906 [(set (match_operand:SI 0 "register_operand" "")
5907 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5909 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5910 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5912 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5915 (define_insn "*arm_extendqisi"
5916 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5917 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5918 "TARGET_ARM && arm_arch4 && !arm_arch6"
5922 [(set_attr "length" "8,4")
5923 (set_attr "type" "alu_shift_reg,load_byte")
5924 (set_attr "predicable" "yes")]
5927 (define_insn "*arm_extendqisi_v6"
5928 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5930 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5931 "TARGET_ARM && arm_arch6"
5935 [(set_attr "type" "extend,load_byte")
5936 (set_attr "predicable" "yes")]
5939 (define_insn "*arm_extendqisi2addsi"
5940 [(set (match_operand:SI 0 "s_register_operand" "=r")
5941 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5942 (match_operand:SI 2 "s_register_operand" "r")))]
5944 "sxtab%?\\t%0, %2, %1"
5945 [(set_attr "type" "alu_shift_reg")
5946 (set_attr "predicable" "yes")]
5949 (define_insn "arm_<sup>xtb16"
5950 [(set (match_operand:SI 0 "s_register_operand" "=r")
5952 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5954 "<sup>xtb16%?\\t%0, %1"
5955 [(set_attr "predicable" "yes")
5956 (set_attr "type" "alu_dsp_reg")])
5958 (define_insn "arm_<simd32_op>"
5959 [(set (match_operand:SI 0 "s_register_operand" "=r")
5961 [(match_operand:SI 1 "s_register_operand" "r")
5962 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5964 "<simd32_op>%?\\t%0, %1, %2"
5965 [(set_attr "predicable" "yes")
5966 (set_attr "type" "alu_dsp_reg")])
5968 (define_insn "arm_usada8"
5969 [(set (match_operand:SI 0 "s_register_operand" "=r")
5971 [(match_operand:SI 1 "s_register_operand" "r")
5972 (match_operand:SI 2 "s_register_operand" "r")
5973 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5975 "usada8%?\\t%0, %1, %2, %3"
5976 [(set_attr "predicable" "yes")
5977 (set_attr "type" "alu_dsp_reg")])
5979 (define_insn "arm_<simd32_op>"
5980 [(set (match_operand:DI 0 "s_register_operand" "=r")
5982 [(match_operand:SI 1 "s_register_operand" "r")
5983 (match_operand:SI 2 "s_register_operand" "r")
5984 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5986 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5987 [(set_attr "predicable" "yes")
5988 (set_attr "type" "smlald")])
5990 (define_insn "arm_<simd32_op>"
5991 [(set (match_operand:SI 0 "s_register_operand" "=r")
5993 [(match_operand:SI 1 "s_register_operand" "r")
5994 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
5995 (set (reg:CC APSRGE_REGNUM)
5996 (unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
5998 "<simd32_op>%?\\t%0, %1, %2"
5999 [(set_attr "predicable" "yes")
6000 (set_attr "type" "alu_sreg")])
6002 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6003 [(set (match_operand:SI 0 "s_register_operand" "=r")
6005 [(match_operand:SI 1 "s_register_operand" "r")
6006 (match_operand:SI 2 "s_register_operand" "r")
6007 (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
6008 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6009 "<simd32_op>%?\\t%0, %1, %2, %3"
6010 [(set_attr "predicable" "yes")
6011 (set_attr "type" "alu_sreg")])
6013 (define_expand "arm_<simd32_op>"
6014 [(set (match_operand:SI 0 "s_register_operand")
6016 [(match_operand:SI 1 "s_register_operand")
6017 (match_operand:SI 2 "s_register_operand")
6018 (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
6022 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6023 operands[2], operands[3]));
6025 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6026 operands[2], operands[3]));
6031 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6032 [(set (match_operand:SI 0 "s_register_operand" "=r")
6034 [(match_operand:SI 1 "s_register_operand" "r")
6035 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
6036 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6037 "<simd32_op>%?\\t%0, %1, %2"
6038 [(set_attr "predicable" "yes")
6039 (set_attr "type" "alu_sreg")])
6041 (define_expand "arm_<simd32_op>"
6042 [(set (match_operand:SI 0 "s_register_operand")
6044 [(match_operand:SI 1 "s_register_operand")
6045 (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
6049 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6052 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6058 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6059 [(set (match_operand:SI 0 "s_register_operand" "=r")
6061 [(match_operand:SI 1 "s_register_operand" "r")
6062 (match_operand:SI 2 "<sup>sat16_imm" "i")] USSAT16))]
6063 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6064 "<simd32_op>%?\\t%0, %2, %1"
6065 [(set_attr "predicable" "yes")
6066 (set_attr "type" "alu_sreg")])
6068 (define_expand "arm_<simd32_op>"
6069 [(set (match_operand:SI 0 "s_register_operand")
6071 [(match_operand:SI 1 "s_register_operand")
6072 (match_operand:SI 2 "<sup>sat16_imm")] USSAT16))]
6076 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6079 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6085 (define_insn "arm_sel"
6086 [(set (match_operand:SI 0 "s_register_operand" "=r")
6088 [(match_operand:SI 1 "s_register_operand" "r")
6089 (match_operand:SI 2 "s_register_operand" "r")
6090 (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
6092 "sel%?\\t%0, %1, %2"
6093 [(set_attr "predicable" "yes")
6094 (set_attr "type" "alu_sreg")])
6096 (define_expand "extendsfdf2"
6097 [(set (match_operand:DF 0 "s_register_operand")
6098 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
6099 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6103 ;; HFmode -> DFmode conversions where we don't have an instruction for it
6104 ;; must go through SFmode.
6106 ;; This is always safe for an extend.
6108 (define_expand "extendhfdf2"
6109 [(set (match_operand:DF 0 "s_register_operand")
6110 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
6113 /* We don't have a direct instruction for this, so go via SFmode. */
6114 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
6117 op1 = convert_to_mode (SFmode, operands[1], 0);
6118 op1 = convert_to_mode (DFmode, op1, 0);
6119 emit_insn (gen_movdf (operands[0], op1));
6122 /* Otherwise, we're done producing RTL and will pick up the correct
6123 pattern to do this with one rounding-step in a single instruction. */
6127 ;; Move insns (including loads and stores)
6129 ;; XXX Just some ideas about movti.
6130 ;; I don't think these are a good idea on the arm, there just aren't enough
6132 ;;(define_expand "loadti"
6133 ;; [(set (match_operand:TI 0 "s_register_operand")
6134 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
6137 ;;(define_expand "storeti"
6138 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
6139 ;; (match_operand:TI 1 "s_register_operand"))]
6142 ;;(define_expand "movti"
6143 ;; [(set (match_operand:TI 0 "general_operand")
6144 ;; (match_operand:TI 1 "general_operand"))]
6150 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
6151 ;; operands[1] = copy_to_reg (operands[1]);
6152 ;; if (MEM_P (operands[0]))
6153 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6154 ;; else if (MEM_P (operands[1]))
6155 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6159 ;; emit_insn (insn);
6163 ;; Recognize garbage generated above.
6166 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6167 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6171 ;; register mem = (which_alternative < 3);
6172 ;; register const char *template;
6174 ;; operands[mem] = XEXP (operands[mem], 0);
6175 ;; switch (which_alternative)
6177 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6178 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6179 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6180 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6181 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6182 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6184 ;; output_asm_insn (template, operands);
6188 (define_expand "movdi"
6189 [(set (match_operand:DI 0 "general_operand")
6190 (match_operand:DI 1 "general_operand"))]
6193 gcc_checking_assert (aligned_operand (operands[0], DImode));
6194 gcc_checking_assert (aligned_operand (operands[1], DImode));
6195 if (can_create_pseudo_p ())
6197 if (!REG_P (operands[0]))
6198 operands[1] = force_reg (DImode, operands[1]);
6200 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
6201 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
6203 /* Avoid LDRD's into an odd-numbered register pair in ARM state
6204 when expanding function calls. */
6205 gcc_assert (can_create_pseudo_p ());
6206 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
6208 /* Perform load into legal reg pair first, then move. */
6209 rtx reg = gen_reg_rtx (DImode);
6210 emit_insn (gen_movdi (reg, operands[1]));
6213 emit_move_insn (gen_lowpart (SImode, operands[0]),
6214 gen_lowpart (SImode, operands[1]));
6215 emit_move_insn (gen_highpart (SImode, operands[0]),
6216 gen_highpart (SImode, operands[1]));
6219 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
6220 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
6222 /* Avoid STRD's from an odd-numbered register pair in ARM state
6223 when expanding function prologue. */
6224 gcc_assert (can_create_pseudo_p ());
6225 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
6226 ? gen_reg_rtx (DImode)
6228 emit_move_insn (gen_lowpart (SImode, split_dest),
6229 gen_lowpart (SImode, operands[1]));
6230 emit_move_insn (gen_highpart (SImode, split_dest),
6231 gen_highpart (SImode, operands[1]));
6232 if (split_dest != operands[0])
6233 emit_insn (gen_movdi (operands[0], split_dest));
6239 (define_insn "*arm_movdi"
6240 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
6241 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
6243 && !(TARGET_HARD_FLOAT)
6244 && !(TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT)
6246 && ( register_operand (operands[0], DImode)
6247 || register_operand (operands[1], DImode))"
6249 switch (which_alternative)
6256 /* Cannot load it directly, split to load it via MOV / MOVT. */
6257 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6261 return output_move_double (operands, true, NULL);
6264 [(set_attr "length" "8,12,16,8,8")
6265 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6266 (set_attr "arm_pool_range" "*,*,*,1020,*")
6267 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6268 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6269 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6273 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6274 (match_operand:ANY64 1 "immediate_operand" ""))]
6277 && (arm_disable_literal_pool
6278 || (arm_const_double_inline_cost (operands[1])
6279 <= arm_max_const_double_inline_cost ()))"
6282 arm_split_constant (SET, SImode, curr_insn,
6283 INTVAL (gen_lowpart (SImode, operands[1])),
6284 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6285 arm_split_constant (SET, SImode, curr_insn,
6286 INTVAL (gen_highpart_mode (SImode,
6287 GET_MODE (operands[0]),
6289 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6294 ; If optimizing for size, or if we have load delay slots, then
6295 ; we want to split the constant into two separate operations.
6296 ; In both cases this may split a trivial part into a single data op
6297 ; leaving a single complex constant to load. We can also get longer
6298 ; offsets in a LDR which means we get better chances of sharing the pool
6299 ; entries. Finally, we can normally do a better job of scheduling
6300 ; LDR instructions than we can with LDM.
6301 ; This pattern will only match if the one above did not.
6303 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6304 (match_operand:ANY64 1 "const_double_operand" ""))]
6305 "TARGET_ARM && reload_completed
6306 && arm_const_double_by_parts (operands[1])"
6307 [(set (match_dup 0) (match_dup 1))
6308 (set (match_dup 2) (match_dup 3))]
6310 operands[2] = gen_highpart (SImode, operands[0]);
6311 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6313 operands[0] = gen_lowpart (SImode, operands[0]);
6314 operands[1] = gen_lowpart (SImode, operands[1]);
6319 [(set (match_operand:ANY64_BF 0 "arm_general_register_operand" "")
6320 (match_operand:ANY64_BF 1 "arm_general_register_operand" ""))]
6321 "TARGET_EITHER && reload_completed"
6322 [(set (match_dup 0) (match_dup 1))
6323 (set (match_dup 2) (match_dup 3))]
6325 operands[2] = gen_highpart (SImode, operands[0]);
6326 operands[3] = gen_highpart (SImode, operands[1]);
6327 operands[0] = gen_lowpart (SImode, operands[0]);
6328 operands[1] = gen_lowpart (SImode, operands[1]);
6330 /* Handle a partial overlap. */
6331 if (rtx_equal_p (operands[0], operands[3]))
6333 rtx tmp0 = operands[0];
6334 rtx tmp1 = operands[1];
6336 operands[0] = operands[2];
6337 operands[1] = operands[3];
6344 ;; We can't actually do base+index doubleword loads if the index and
6345 ;; destination overlap. Split here so that we at least have chance to
6348 [(set (match_operand:DI 0 "s_register_operand" "")
6349 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6350 (match_operand:SI 2 "s_register_operand" ""))))]
6352 && reg_overlap_mentioned_p (operands[0], operands[1])
6353 && reg_overlap_mentioned_p (operands[0], operands[2])"
6355 (plus:SI (match_dup 1)
6358 (mem:DI (match_dup 4)))]
6360 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6364 (define_expand "movsi"
6365 [(set (match_operand:SI 0 "general_operand")
6366 (match_operand:SI 1 "general_operand"))]
6370 rtx base, offset, tmp;
6372 gcc_checking_assert (aligned_operand (operands[0], SImode));
6373 gcc_checking_assert (aligned_operand (operands[1], SImode));
6374 if (TARGET_32BIT || TARGET_HAVE_MOVT)
6376 /* Everything except mem = const or mem = mem can be done easily. */
6377 if (MEM_P (operands[0]))
6378 operands[1] = force_reg (SImode, operands[1]);
6379 if (arm_general_register_operand (operands[0], SImode)
6380 && CONST_INT_P (operands[1])
6381 && !(const_ok_for_arm (INTVAL (operands[1]))
6382 || const_ok_for_arm (~INTVAL (operands[1]))))
6384 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
6386 emit_insn (gen_rtx_SET (operands[0], operands[1]));
6391 arm_split_constant (SET, SImode, NULL_RTX,
6392 INTVAL (operands[1]), operands[0], NULL_RTX,
6393 optimize && can_create_pseudo_p ());
6398 else /* Target doesn't have MOVT... */
6400 if (can_create_pseudo_p ())
6402 if (!REG_P (operands[0]))
6403 operands[1] = force_reg (SImode, operands[1]);
6407 split_const (operands[1], &base, &offset);
6408 if (INTVAL (offset) != 0
6409 && targetm.cannot_force_const_mem (SImode, operands[1]))
6411 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6412 emit_move_insn (tmp, base);
6413 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6417 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
6419 /* Recognize the case where operand[1] is a reference to thread-local
6420 data and load its address to a register. Offsets have been split off
6422 if (arm_tls_referenced_p (operands[1]))
6423 operands[1] = legitimize_tls_address (operands[1], tmp);
6425 && (CONSTANT_P (operands[1])
6426 || symbol_mentioned_p (operands[1])
6427 || label_mentioned_p (operands[1])))
6429 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
6434 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6435 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6436 ;; so this does not matter.
6437 (define_insn "*arm_movt"
6438 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
6439 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
6440 (match_operand:SI 2 "general_operand" "i,i")))]
6441 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
6443 movt%?\t%0, #:upper16:%c2
6444 movt\t%0, #:upper16:%c2"
6445 [(set_attr "arch" "32,v8mb")
6446 (set_attr "predicable" "yes")
6447 (set_attr "length" "4")
6448 (set_attr "type" "alu_sreg")]
6451 (define_insn "*arm_movsi_insn"
6452 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6453 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6454 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
6455 && ( register_operand (operands[0], SImode)
6456 || register_operand (operands[1], SImode))"
6464 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
6465 (set_attr "predicable" "yes")
6466 (set_attr "arch" "*,*,*,v6t2,*,*")
6467 (set_attr "pool_range" "*,*,*,*,4096,*")
6468 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6472 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6473 (match_operand:SI 1 "const_int_operand" ""))]
6474 "(TARGET_32BIT || TARGET_HAVE_MOVT)
6475 && (!(const_ok_for_arm (INTVAL (operands[1]))
6476 || const_ok_for_arm (~INTVAL (operands[1]))))"
6477 [(clobber (const_int 0))]
6479 arm_split_constant (SET, SImode, NULL_RTX,
6480 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6485 ;; A normal way to do (symbol + offset) requires three instructions at least
6486 ;; (depends on how big the offset is) as below:
6487 ;; movw r0, #:lower16:g
6488 ;; movw r0, #:upper16:g
6491 ;; A better way would be:
6492 ;; movw r0, #:lower16:g+4
6493 ;; movw r0, #:upper16:g+4
6495 ;; The limitation of this way is that the length of offset should be a 16-bit
6496 ;; signed value, because current assembler only supports REL type relocation for
6497 ;; such case. If the more powerful RELA type is supported in future, we should
6498 ;; update this pattern to go with better way.
6500 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6501 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
6502 (match_operand:SI 2 "const_int_operand" ""))))]
6505 && arm_disable_literal_pool
6507 && GET_CODE (operands[1]) == SYMBOL_REF"
6508 [(clobber (const_int 0))]
6510 int offset = INTVAL (operands[2]);
6512 if (offset < -0x8000 || offset > 0x7fff)
6514 arm_emit_movpair (operands[0], operands[1]);
6515 emit_insn (gen_rtx_SET (operands[0],
6516 gen_rtx_PLUS (SImode, operands[0], operands[2])));
6520 rtx op = gen_rtx_CONST (SImode,
6521 gen_rtx_PLUS (SImode, operands[1], operands[2]));
6522 arm_emit_movpair (operands[0], op);
6527 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6528 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6529 ;; and lo_sum would be merged back into memory load at cprop. However,
6530 ;; if the default is to prefer movt/movw rather than a load from the constant
6531 ;; pool, the performance is better.
6533 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6534 (match_operand:SI 1 "general_operand" ""))]
6535 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6536 && !target_word_relocations
6537 && !arm_tls_referenced_p (operands[1])"
6538 [(clobber (const_int 0))]
6540 arm_emit_movpair (operands[0], operands[1]);
6544 ;; When generating pic, we need to load the symbol offset into a register.
6545 ;; So that the optimizer does not confuse this with a normal symbol load
6546 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6547 ;; since that is the only type of relocation we can use.
6549 ;; Wrap calculation of the whole PIC address in a single pattern for the
6550 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6551 ;; a PIC address involves two loads from memory, so we want to CSE it
6552 ;; as often as possible.
6553 ;; This pattern will be split into one of the pic_load_addr_* patterns
6554 ;; and a move after GCSE optimizations.
6556 ;; Note: Update arm.cc: legitimize_pic_address() when changing this pattern.
6557 (define_expand "calculate_pic_address"
6558 [(set (match_operand:SI 0 "register_operand")
6559 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6560 (unspec:SI [(match_operand:SI 2 "" "")]
6565 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6567 [(set (match_operand:SI 0 "register_operand" "")
6568 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6569 (unspec:SI [(match_operand:SI 2 "" "")]
6572 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6573 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6574 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6577 ;; operand1 is the memory address to go into
6578 ;; pic_load_addr_32bit.
6579 ;; operand2 is the PIC label to be emitted
6580 ;; from pic_add_dot_plus_eight.
6581 ;; We do this to allow hoisting of the entire insn.
6582 (define_insn_and_split "pic_load_addr_unified"
6583 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6584 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6585 (match_operand:SI 2 "" "")]
6586 UNSPEC_PIC_UNIFIED))]
6589 "&& reload_completed"
6590 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6591 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6592 (match_dup 2)] UNSPEC_PIC_BASE))]
6593 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6594 [(set_attr "type" "load_4,load_4,load_4")
6595 (set_attr "pool_range" "4096,4094,1022")
6596 (set_attr "neg_pool_range" "4084,0,0")
6597 (set_attr "arch" "a,t2,t1")
6598 (set_attr "length" "8,6,4")]
6601 ;; The rather odd constraints on the following are to force reload to leave
6602 ;; the insn alone, and to force the minipool generation pass to then move
6603 ;; the GOT symbol to memory.
6605 (define_insn "pic_load_addr_32bit"
6606 [(set (match_operand:SI 0 "s_register_operand" "=r")
6607 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6608 "TARGET_32BIT && flag_pic"
6610 [(set_attr "type" "load_4")
6611 (set (attr "pool_range")
6612 (if_then_else (eq_attr "is_thumb" "no")
6615 (set (attr "neg_pool_range")
6616 (if_then_else (eq_attr "is_thumb" "no")
6621 (define_insn "pic_load_addr_thumb1"
6622 [(set (match_operand:SI 0 "s_register_operand" "=l")
6623 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6624 "TARGET_THUMB1 && flag_pic"
6626 [(set_attr "type" "load_4")
6627 (set (attr "pool_range") (const_int 1018))]
6630 (define_insn "pic_add_dot_plus_four"
6631 [(set (match_operand:SI 0 "register_operand" "=r")
6632 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6634 (match_operand 2 "" "")]
6638 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6639 INTVAL (operands[2]));
6640 return \"add\\t%0, %|pc\";
6642 [(set_attr "length" "2")
6643 (set_attr "type" "alu_sreg")]
6646 (define_insn "pic_add_dot_plus_eight"
6647 [(set (match_operand:SI 0 "register_operand" "=r")
6648 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6650 (match_operand 2 "" "")]
6654 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6655 INTVAL (operands[2]));
6656 return \"add%?\\t%0, %|pc, %1\";
6658 [(set_attr "predicable" "yes")
6659 (set_attr "type" "alu_sreg")]
6662 (define_insn "tls_load_dot_plus_eight"
6663 [(set (match_operand:SI 0 "register_operand" "=r")
6664 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6666 (match_operand 2 "" "")]
6670 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6671 INTVAL (operands[2]));
6672 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6674 [(set_attr "predicable" "yes")
6675 (set_attr "type" "load_4")]
6678 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6679 ;; followed by a load. These sequences can be crunched down to
6680 ;; tls_load_dot_plus_eight by a peephole.
6683 [(set (match_operand:SI 0 "register_operand" "")
6684 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6686 (match_operand 1 "" "")]
6688 (set (match_operand:SI 2 "arm_general_register_operand" "")
6689 (mem:SI (match_dup 0)))]
6690 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6692 (mem:SI (unspec:SI [(match_dup 3)
6699 (define_insn "pic_offset_arm"
6700 [(set (match_operand:SI 0 "register_operand" "=r")
6701 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6702 (unspec:SI [(match_operand:SI 2 "" "X")]
6703 UNSPEC_PIC_OFFSET))))]
6704 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6705 "ldr%?\\t%0, [%1,%2]"
6706 [(set_attr "type" "load_4")]
6709 (define_expand "builtin_setjmp_receiver"
6710 [(label_ref (match_operand 0 "" ""))]
6714 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6716 if (arm_pic_register != INVALID_REGNUM)
6717 arm_load_pic_register (1UL << 3, NULL_RTX);
6721 ;; If copying one reg to another we can set the condition codes according to
6722 ;; its value. Such a move is common after a return from subroutine and the
6723 ;; result is being tested against zero.
6725 (define_insn "*movsi_compare0"
6726 [(set (reg:CC CC_REGNUM)
6727 (compare:CC (match_operand:SI 1 "s_register_operand" "0,0,l,rk,rk")
6729 (set (match_operand:SI 0 "s_register_operand" "=l,rk,l,r,rk")
6737 subs%?\\t%0, %1, #0"
6738 [(set_attr "conds" "set")
6739 (set_attr "arch" "t2,*,t2,t2,a")
6740 (set_attr "type" "alus_imm")
6741 (set_attr "length" "2,4,2,4,4")]
6744 ;; Subroutine to store a half word from a register into memory.
6745 ;; Operand 0 is the source register (HImode)
6746 ;; Operand 1 is the destination address in a register (SImode)
6748 ;; In both this routine and the next, we must be careful not to spill
6749 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6750 ;; can generate unrecognizable rtl.
6752 (define_expand "storehi"
6753 [;; store the low byte
6754 (set (match_operand 1 "" "") (match_dup 3))
6755 ;; extract the high byte
6757 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6758 ;; store the high byte
6759 (set (match_dup 4) (match_dup 5))]
6763 rtx op1 = operands[1];
6764 rtx addr = XEXP (op1, 0);
6765 enum rtx_code code = GET_CODE (addr);
6767 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6769 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6771 operands[4] = adjust_address (op1, QImode, 1);
6772 operands[1] = adjust_address (operands[1], QImode, 0);
6773 operands[3] = gen_lowpart (QImode, operands[0]);
6774 operands[0] = gen_lowpart (SImode, operands[0]);
6775 operands[2] = gen_reg_rtx (SImode);
6776 operands[5] = gen_lowpart (QImode, operands[2]);
6780 (define_expand "storehi_bigend"
6781 [(set (match_dup 4) (match_dup 3))
6783 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6784 (set (match_operand 1 "" "") (match_dup 5))]
6788 rtx op1 = operands[1];
6789 rtx addr = XEXP (op1, 0);
6790 enum rtx_code code = GET_CODE (addr);
6792 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6794 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6796 operands[4] = adjust_address (op1, QImode, 1);
6797 operands[1] = adjust_address (operands[1], QImode, 0);
6798 operands[3] = gen_lowpart (QImode, operands[0]);
6799 operands[0] = gen_lowpart (SImode, operands[0]);
6800 operands[2] = gen_reg_rtx (SImode);
6801 operands[5] = gen_lowpart (QImode, operands[2]);
6805 ;; Subroutine to store a half word integer constant into memory.
6806 (define_expand "storeinthi"
6807 [(set (match_operand 0 "" "")
6808 (match_operand 1 "" ""))
6809 (set (match_dup 3) (match_dup 2))]
6813 HOST_WIDE_INT value = INTVAL (operands[1]);
6814 rtx addr = XEXP (operands[0], 0);
6815 rtx op0 = operands[0];
6816 enum rtx_code code = GET_CODE (addr);
6818 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6820 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6822 operands[1] = gen_reg_rtx (SImode);
6823 if (BYTES_BIG_ENDIAN)
6825 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6826 if ((value & 255) == ((value >> 8) & 255))
6827 operands[2] = operands[1];
6830 operands[2] = gen_reg_rtx (SImode);
6831 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6836 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6837 if ((value & 255) == ((value >> 8) & 255))
6838 operands[2] = operands[1];
6841 operands[2] = gen_reg_rtx (SImode);
6842 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6846 operands[3] = adjust_address (op0, QImode, 1);
6847 operands[0] = adjust_address (operands[0], QImode, 0);
6848 operands[2] = gen_lowpart (QImode, operands[2]);
6849 operands[1] = gen_lowpart (QImode, operands[1]);
6853 (define_expand "storehi_single_op"
6854 [(set (match_operand:HI 0 "memory_operand")
6855 (match_operand:HI 1 "general_operand"))]
6856 "TARGET_32BIT && arm_arch4"
6858 if (!s_register_operand (operands[1], HImode))
6859 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6863 (define_expand "movhi"
6864 [(set (match_operand:HI 0 "general_operand")
6865 (match_operand:HI 1 "general_operand"))]
6868 gcc_checking_assert (aligned_operand (operands[0], HImode));
6869 gcc_checking_assert (aligned_operand (operands[1], HImode));
6872 if (can_create_pseudo_p ())
6874 if (MEM_P (operands[0]))
6878 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6881 if (CONST_INT_P (operands[1]))
6882 emit_insn (gen_storeinthi (operands[0], operands[1]));
6885 if (MEM_P (operands[1]))
6886 operands[1] = force_reg (HImode, operands[1]);
6887 if (BYTES_BIG_ENDIAN)
6888 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6890 emit_insn (gen_storehi (operands[1], operands[0]));
6894 /* Sign extend a constant, and keep it in an SImode reg. */
6895 else if (CONST_INT_P (operands[1]))
6897 rtx reg = gen_reg_rtx (SImode);
6898 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6900 /* If the constant is already valid, leave it alone. */
6901 if (!const_ok_for_arm (val))
6903 /* If setting all the top bits will make the constant
6904 loadable in a single instruction, then set them.
6905 Otherwise, sign extend the number. */
6907 if (const_ok_for_arm (~(val | ~0xffff)))
6909 else if (val & 0x8000)
6913 emit_insn (gen_movsi (reg, GEN_INT (val)));
6914 operands[1] = gen_lowpart (HImode, reg);
6916 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6917 && MEM_P (operands[1]))
6919 rtx reg = gen_reg_rtx (SImode);
6921 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6922 operands[1] = gen_lowpart (HImode, reg);
6924 else if (!arm_arch4)
6926 if (MEM_P (operands[1]))
6929 rtx offset = const0_rtx;
6930 rtx reg = gen_reg_rtx (SImode);
6932 if ((REG_P (base = XEXP (operands[1], 0))
6933 || (GET_CODE (base) == PLUS
6934 && (CONST_INT_P (offset = XEXP (base, 1)))
6935 && ((INTVAL(offset) & 1) != 1)
6936 && REG_P (base = XEXP (base, 0))))
6937 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6941 new_rtx = widen_memory_access (operands[1], SImode,
6942 ((INTVAL (offset) & ~3)
6943 - INTVAL (offset)));
6944 emit_insn (gen_movsi (reg, new_rtx));
6945 if (((INTVAL (offset) & 2) != 0)
6946 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6948 rtx reg2 = gen_reg_rtx (SImode);
6950 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6955 emit_insn (gen_movhi_bytes (reg, operands[1]));
6957 operands[1] = gen_lowpart (HImode, reg);
6961 /* Handle loading a large integer during reload. */
6962 else if (CONST_INT_P (operands[1])
6963 && !const_ok_for_arm (INTVAL (operands[1]))
6964 && !const_ok_for_arm (~INTVAL (operands[1])))
6966 /* Writing a constant to memory needs a scratch, which should
6967 be handled with SECONDARY_RELOADs. */
6968 gcc_assert (REG_P (operands[0]));
6970 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6971 emit_insn (gen_movsi (operands[0], operands[1]));
6975 else if (TARGET_THUMB2)
6977 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6978 if (can_create_pseudo_p ())
6980 if (!REG_P (operands[0]))
6981 operands[1] = force_reg (HImode, operands[1]);
6982 /* Zero extend a constant, and keep it in an SImode reg. */
6983 else if (CONST_INT_P (operands[1]))
6985 rtx reg = gen_reg_rtx (SImode);
6986 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6988 emit_insn (gen_movsi (reg, GEN_INT (val)));
6989 operands[1] = gen_lowpart (HImode, reg);
6993 else /* TARGET_THUMB1 */
6995 if (can_create_pseudo_p ())
6997 if (CONST_INT_P (operands[1]))
6999 rtx reg = gen_reg_rtx (SImode);
7001 emit_insn (gen_movsi (reg, operands[1]));
7002 operands[1] = gen_lowpart (HImode, reg);
7005 /* ??? We shouldn't really get invalid addresses here, but this can
7006 happen if we are passed a SP (never OK for HImode/QImode) or
7007 virtual register (also rejected as illegitimate for HImode/QImode)
7008 relative address. */
7009 /* ??? This should perhaps be fixed elsewhere, for instance, in
7010 fixup_stack_1, by checking for other kinds of invalid addresses,
7011 e.g. a bare reference to a virtual register. This may confuse the
7012 alpha though, which must handle this case differently. */
7013 if (MEM_P (operands[0])
7014 && !memory_address_p (GET_MODE (operands[0]),
7015 XEXP (operands[0], 0)))
7017 = replace_equiv_address (operands[0],
7018 copy_to_reg (XEXP (operands[0], 0)));
7020 if (MEM_P (operands[1])
7021 && !memory_address_p (GET_MODE (operands[1]),
7022 XEXP (operands[1], 0)))
7024 = replace_equiv_address (operands[1],
7025 copy_to_reg (XEXP (operands[1], 0)));
7027 if (MEM_P (operands[1]) && optimize > 0)
7029 rtx reg = gen_reg_rtx (SImode);
7031 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7032 operands[1] = gen_lowpart (HImode, reg);
7035 if (MEM_P (operands[0]))
7036 operands[1] = force_reg (HImode, operands[1]);
7038 else if (CONST_INT_P (operands[1])
7039 && !satisfies_constraint_I (operands[1]))
7041 /* Handle loading a large integer during reload. */
7043 /* Writing a constant to memory needs a scratch, which should
7044 be handled with SECONDARY_RELOADs. */
7045 gcc_assert (REG_P (operands[0]));
7047 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7048 emit_insn (gen_movsi (operands[0], operands[1]));
7055 (define_expand "movhi_bytes"
7056 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7058 (zero_extend:SI (match_dup 6)))
7059 (set (match_operand:SI 0 "" "")
7060 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7065 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7067 mem1 = change_address (operands[1], QImode, addr);
7068 mem2 = change_address (operands[1], QImode,
7069 plus_constant (Pmode, addr, 1));
7070 operands[0] = gen_lowpart (SImode, operands[0]);
7072 operands[2] = gen_reg_rtx (SImode);
7073 operands[3] = gen_reg_rtx (SImode);
7076 if (BYTES_BIG_ENDIAN)
7078 operands[4] = operands[2];
7079 operands[5] = operands[3];
7083 operands[4] = operands[3];
7084 operands[5] = operands[2];
7089 (define_expand "movhi_bigend"
7091 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
7094 (ashiftrt:SI (match_dup 2) (const_int 16)))
7095 (set (match_operand:HI 0 "s_register_operand")
7099 operands[2] = gen_reg_rtx (SImode);
7100 operands[3] = gen_reg_rtx (SImode);
7101 operands[4] = gen_lowpart (HImode, operands[3]);
7105 ;; Pattern to recognize insn generated default case above
7106 (define_insn "*movhi_insn_arch4"
7107 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
7108 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
7110 && arm_arch4 && !TARGET_HARD_FLOAT
7111 && (register_operand (operands[0], HImode)
7112 || register_operand (operands[1], HImode))"
7114 mov%?\\t%0, %1\\t%@ movhi
7115 mvn%?\\t%0, #%B1\\t%@ movhi
7116 movw%?\\t%0, %L1\\t%@ movhi
7117 strh%?\\t%1, %0\\t%@ movhi
7118 ldrh%?\\t%0, %1\\t%@ movhi"
7119 [(set_attr "predicable" "yes")
7120 (set_attr "pool_range" "*,*,*,*,256")
7121 (set_attr "neg_pool_range" "*,*,*,*,244")
7122 (set_attr "arch" "*,*,v6t2,*,*")
7123 (set_attr_alternative "type"
7124 [(if_then_else (match_operand 1 "const_int_operand" "")
7125 (const_string "mov_imm" )
7126 (const_string "mov_reg"))
7127 (const_string "mvn_imm")
7128 (const_string "mov_imm")
7129 (const_string "store_4")
7130 (const_string "load_4")])]
7133 (define_insn "*movhi_bytes"
7134 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7135 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
7136 "TARGET_ARM && !TARGET_HARD_FLOAT"
7138 mov%?\\t%0, %1\\t%@ movhi
7139 mov%?\\t%0, %1\\t%@ movhi
7140 mvn%?\\t%0, #%B1\\t%@ movhi"
7141 [(set_attr "predicable" "yes")
7142 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
7145 ;; We use a DImode scratch because we may occasionally need an additional
7146 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
7147 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
7148 ;; The reload_in<m> and reload_out<m> patterns require special constraints
7149 ;; to be correctly handled in default_secondary_reload function.
7150 (define_expand "reload_outhi"
7151 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7152 (match_operand:HI 1 "s_register_operand" "r")
7153 (match_operand:DI 2 "s_register_operand" "=&l")])]
7156 arm_reload_out_hi (operands);
7158 thumb_reload_out_hi (operands);
7163 (define_expand "reload_inhi"
7164 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7165 (match_operand:HI 1 "arm_reload_memory_operand" "o")
7166 (match_operand:DI 2 "s_register_operand" "=&r")])]
7170 arm_reload_in_hi (operands);
7172 thumb_reload_out_hi (operands);
7176 (define_expand "movqi"
7177 [(set (match_operand:QI 0 "general_operand")
7178 (match_operand:QI 1 "general_operand"))]
7181 /* Everything except mem = const or mem = mem can be done easily */
7183 if (can_create_pseudo_p ())
7185 if (CONST_INT_P (operands[1]))
7187 rtx reg = gen_reg_rtx (SImode);
7189 /* For thumb we want an unsigned immediate, then we are more likely
7190 to be able to use a movs insn. */
7192 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7194 emit_insn (gen_movsi (reg, operands[1]));
7195 operands[1] = gen_lowpart (QImode, reg);
7200 /* ??? We shouldn't really get invalid addresses here, but this can
7201 happen if we are passed a SP (never OK for HImode/QImode) or
7202 virtual register (also rejected as illegitimate for HImode/QImode)
7203 relative address. */
7204 /* ??? This should perhaps be fixed elsewhere, for instance, in
7205 fixup_stack_1, by checking for other kinds of invalid addresses,
7206 e.g. a bare reference to a virtual register. This may confuse the
7207 alpha though, which must handle this case differently. */
7208 if (MEM_P (operands[0])
7209 && !memory_address_p (GET_MODE (operands[0]),
7210 XEXP (operands[0], 0)))
7212 = replace_equiv_address (operands[0],
7213 copy_to_reg (XEXP (operands[0], 0)));
7214 if (MEM_P (operands[1])
7215 && !memory_address_p (GET_MODE (operands[1]),
7216 XEXP (operands[1], 0)))
7218 = replace_equiv_address (operands[1],
7219 copy_to_reg (XEXP (operands[1], 0)));
7222 if (MEM_P (operands[1]) && optimize > 0)
7224 rtx reg = gen_reg_rtx (SImode);
7226 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7227 operands[1] = gen_lowpart (QImode, reg);
7230 if (MEM_P (operands[0]))
7231 operands[1] = force_reg (QImode, operands[1]);
7233 else if (TARGET_THUMB
7234 && CONST_INT_P (operands[1])
7235 && !satisfies_constraint_I (operands[1]))
7237 /* Handle loading a large integer during reload. */
7239 /* Writing a constant to memory needs a scratch, which should
7240 be handled with SECONDARY_RELOADs. */
7241 gcc_assert (REG_P (operands[0]));
7243 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7244 emit_insn (gen_movsi (operands[0], operands[1]));
7250 (define_insn "*arm_movqi_insn"
7251 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7252 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
7254 && ( register_operand (operands[0], QImode)
7255 || register_operand (operands[1], QImode))"
7266 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
7267 (set_attr "predicable" "yes")
7268 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
7269 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7270 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7273 ;; HFmode and BFmode moves.
7274 (define_expand "mov<mode>"
7275 [(set (match_operand:HFBF 0 "general_operand")
7276 (match_operand:HFBF 1 "general_operand"))]
7279 gcc_checking_assert (aligned_operand (operands[0], <MODE>mode));
7280 gcc_checking_assert (aligned_operand (operands[1], <MODE>mode));
7283 if (MEM_P (operands[0]))
7284 operands[1] = force_reg (<MODE>mode, operands[1]);
7286 else /* TARGET_THUMB1 */
7288 if (can_create_pseudo_p ())
7290 if (!REG_P (operands[0]))
7291 operands[1] = force_reg (<MODE>mode, operands[1]);
7297 (define_insn "*arm32_mov<mode>"
7298 [(set (match_operand:HFBF 0 "nonimmediate_operand" "=r,m,r,r")
7299 (match_operand:HFBF 1 "general_operand" " m,r,r,F"))]
7301 && !TARGET_HARD_FLOAT
7303 && ( s_register_operand (operands[0], <MODE>mode)
7304 || s_register_operand (operands[1], <MODE>mode))"
7306 switch (which_alternative)
7308 case 0: /* ARM register from memory */
7309 return \"ldrh%?\\t%0, %1\\t%@ __<fporbf>\";
7310 case 1: /* memory from ARM register */
7311 return \"strh%?\\t%1, %0\\t%@ __<fporbf>\";
7312 case 2: /* ARM register from ARM register */
7313 return \"mov%?\\t%0, %1\\t%@ __<fporbf>\";
7314 case 3: /* ARM register from constant */
7319 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
7321 ops[0] = operands[0];
7322 ops[1] = GEN_INT (bits);
7323 ops[2] = GEN_INT (bits & 0xff00);
7324 ops[3] = GEN_INT (bits & 0x00ff);
7326 if (arm_arch_thumb2)
7327 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7329 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7336 [(set_attr "conds" "unconditional")
7337 (set_attr "type" "load_4,store_4,mov_reg,multiple")
7338 (set_attr "length" "4,4,4,8")
7339 (set_attr "predicable" "yes")]
7342 (define_expand "movsf"
7343 [(set (match_operand:SF 0 "general_operand")
7344 (match_operand:SF 1 "general_operand"))]
7347 gcc_checking_assert (aligned_operand (operands[0], SFmode));
7348 gcc_checking_assert (aligned_operand (operands[1], SFmode));
7351 if (MEM_P (operands[0]))
7352 operands[1] = force_reg (SFmode, operands[1]);
7354 else /* TARGET_THUMB1 */
7356 if (can_create_pseudo_p ())
7358 if (!REG_P (operands[0]))
7359 operands[1] = force_reg (SFmode, operands[1]);
7363 /* Cannot load it directly, generate a load with clobber so that it can be
7364 loaded via GPR with MOV / MOVT. */
7365 if (arm_disable_literal_pool
7366 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7367 && CONST_DOUBLE_P (operands[1])
7369 && !vfp3_const_double_rtx (operands[1]))
7371 rtx clobreg = gen_reg_rtx (SFmode);
7372 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
7379 ;; Transform a floating-point move of a constant into a core register into
7380 ;; an SImode operation.
7382 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7383 (match_operand:SF 1 "immediate_operand" ""))]
7386 && CONST_DOUBLE_P (operands[1])"
7387 [(set (match_dup 2) (match_dup 3))]
7389 operands[2] = gen_lowpart (SImode, operands[0]);
7390 operands[3] = gen_lowpart (SImode, operands[1]);
7391 if (operands[2] == 0 || operands[3] == 0)
7396 (define_insn "*arm_movsf_soft_insn"
7397 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7398 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7400 && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7401 && (!MEM_P (operands[0])
7402 || register_operand (operands[1], SFmode))"
7404 switch (which_alternative)
7406 case 0: return \"mov%?\\t%0, %1\";
7408 /* Cannot load it directly, split to load it via MOV / MOVT. */
7409 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7411 return \"ldr%?\\t%0, %1\\t%@ float\";
7412 case 2: return \"str%?\\t%1, %0\\t%@ float\";
7413 default: gcc_unreachable ();
7416 [(set_attr "predicable" "yes")
7417 (set_attr "type" "mov_reg,load_4,store_4")
7418 (set_attr "arm_pool_range" "*,4096,*")
7419 (set_attr "thumb2_pool_range" "*,4094,*")
7420 (set_attr "arm_neg_pool_range" "*,4084,*")
7421 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7424 ;; Splitter for the above.
7426 [(set (match_operand:SF 0 "s_register_operand")
7427 (match_operand:SF 1 "const_double_operand"))]
7428 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7432 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
7433 rtx cst = gen_int_mode (buf, SImode);
7434 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
7439 (define_expand "movdf"
7440 [(set (match_operand:DF 0 "general_operand")
7441 (match_operand:DF 1 "general_operand"))]
7444 gcc_checking_assert (aligned_operand (operands[0], DFmode));
7445 gcc_checking_assert (aligned_operand (operands[1], DFmode));
7448 if (MEM_P (operands[0]))
7449 operands[1] = force_reg (DFmode, operands[1]);
7451 else /* TARGET_THUMB */
7453 if (can_create_pseudo_p ())
7455 if (!REG_P (operands[0]))
7456 operands[1] = force_reg (DFmode, operands[1]);
7460 /* Cannot load it directly, generate a load with clobber so that it can be
7461 loaded via GPR with MOV / MOVT. */
7462 if (arm_disable_literal_pool
7463 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7464 && CONSTANT_P (operands[1])
7466 && !arm_const_double_rtx (operands[1])
7467 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
7469 rtx clobreg = gen_reg_rtx (DFmode);
7470 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
7477 ;; Reloading a df mode value stored in integer regs to memory can require a
7479 ;; Another reload_out<m> pattern that requires special constraints.
7480 (define_expand "reload_outdf"
7481 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7482 (match_operand:DF 1 "s_register_operand" "r")
7483 (match_operand:SI 2 "s_register_operand" "=&r")]
7487 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7490 operands[2] = XEXP (operands[0], 0);
7491 else if (code == POST_INC || code == PRE_DEC)
7493 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7494 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7495 emit_insn (gen_movdi (operands[0], operands[1]));
7498 else if (code == PRE_INC)
7500 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7502 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7505 else if (code == POST_DEC)
7506 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7508 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7509 XEXP (XEXP (operands[0], 0), 1)));
7511 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
7514 if (code == POST_DEC)
7515 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7521 (define_insn "*movdf_soft_insn"
7522 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
7523 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
7524 "TARGET_32BIT && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7525 && ( register_operand (operands[0], DFmode)
7526 || register_operand (operands[1], DFmode))"
7528 switch (which_alternative)
7535 /* Cannot load it directly, split to load it via MOV / MOVT. */
7536 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7540 return output_move_double (operands, true, NULL);
7543 [(set_attr "length" "8,12,16,8,8")
7544 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
7545 (set_attr "arm_pool_range" "*,*,*,1020,*")
7546 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7547 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7548 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7551 ;; Splitter for the above.
7553 [(set (match_operand:DF 0 "s_register_operand")
7554 (match_operand:DF 1 "const_double_operand"))]
7555 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7559 int order = BYTES_BIG_ENDIAN ? 1 : 0;
7560 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7561 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7562 ival |= (zext_hwi (buf[1 - order], 32) << 32);
7563 rtx cst = gen_int_mode (ival, DImode);
7564 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7570 ;; load- and store-multiple insns
7571 ;; The arm can load/store any set of registers, provided that they are in
7572 ;; ascending order, but these expanders assume a contiguous set.
7574 (define_expand "load_multiple"
7575 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7576 (match_operand:SI 1 "" ""))
7577 (use (match_operand:SI 2 "" ""))])]
7580 HOST_WIDE_INT offset = 0;
7582 /* Support only fixed point registers. */
7583 if (!CONST_INT_P (operands[2])
7584 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7585 || INTVAL (operands[2]) < 2
7586 || !MEM_P (operands[1])
7587 || !REG_P (operands[0])
7588 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7589 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7593 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7594 INTVAL (operands[2]),
7595 force_reg (SImode, XEXP (operands[1], 0)),
7596 FALSE, operands[1], &offset);
7599 (define_expand "store_multiple"
7600 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7601 (match_operand:SI 1 "" ""))
7602 (use (match_operand:SI 2 "" ""))])]
7605 HOST_WIDE_INT offset = 0;
7607 /* Support only fixed point registers. */
7608 if (!CONST_INT_P (operands[2])
7609 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7610 || INTVAL (operands[2]) < 2
7611 || !REG_P (operands[1])
7612 || !MEM_P (operands[0])
7613 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7614 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7618 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7619 INTVAL (operands[2]),
7620 force_reg (SImode, XEXP (operands[0], 0)),
7621 FALSE, operands[0], &offset);
7625 (define_expand "setmemsi"
7626 [(match_operand:BLK 0 "general_operand")
7627 (match_operand:SI 1 "const_int_operand")
7628 (match_operand:SI 2 "const_int_operand")
7629 (match_operand:SI 3 "const_int_operand")]
7632 if (arm_gen_setmem (operands))
7639 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7640 ;; We could let this apply for blocks of less than this, but it clobbers so
7641 ;; many registers that there is then probably a better way.
7643 (define_expand "cpymemqi"
7644 [(match_operand:BLK 0 "general_operand")
7645 (match_operand:BLK 1 "general_operand")
7646 (match_operand:SI 2 "const_int_operand")
7647 (match_operand:SI 3 "const_int_operand")]
7652 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7653 && !optimize_function_for_size_p (cfun))
7655 if (gen_cpymem_ldrd_strd (operands))
7660 if (arm_gen_cpymemqi (operands))
7664 else /* TARGET_THUMB1 */
7666 if ( INTVAL (operands[3]) != 4
7667 || INTVAL (operands[2]) > 48)
7670 thumb_expand_cpymemqi (operands);
7677 ;; Compare & branch insns
7678 ;; The range calculations are based as follows:
7679 ;; For forward branches, the address calculation returns the address of
7680 ;; the next instruction. This is 2 beyond the branch instruction.
7681 ;; For backward branches, the address calculation returns the address of
7682 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7683 ;; instruction for the shortest sequence, and 4 before the branch instruction
7684 ;; if we have to jump around an unconditional branch.
7685 ;; To the basic branch range the PC offset must be added (this is +4).
7686 ;; So for forward branches we have
7687 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7688 ;; And for backward branches we have
7689 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7691 ;; In 16-bit Thumb these ranges are:
7692 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7693 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7695 ;; In 32-bit Thumb these ranges are:
7696 ;; For a 'b' +/- 16MB is not checked for.
7697 ;; For a 'b<cond>' pos_range = 1048574, neg_range = -1048576 giving
7698 ;; (-1048568 -> 1048576).
7700 (define_expand "cbranchsi4"
7701 [(set (pc) (if_then_else
7702 (match_operator 0 "expandable_comparison_operator"
7703 [(match_operand:SI 1 "s_register_operand")
7704 (match_operand:SI 2 "nonmemory_operand")])
7705 (label_ref (match_operand 3 "" ""))
7711 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7713 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7717 if (thumb1_cmpneg_operand (operands[2], SImode))
7719 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7720 operands[3], operands[0]));
7723 if (!thumb1_cmp_operand (operands[2], SImode))
7724 operands[2] = force_reg (SImode, operands[2]);
7727 (define_expand "cbranchsf4"
7728 [(set (pc) (if_then_else
7729 (match_operator 0 "expandable_comparison_operator"
7730 [(match_operand:SF 1 "s_register_operand")
7731 (match_operand:SF 2 "vfp_compare_operand")])
7732 (label_ref (match_operand 3 "" ""))
7734 "TARGET_32BIT && TARGET_HARD_FLOAT"
7735 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7736 operands[3])); DONE;"
7739 (define_expand "cbranchdf4"
7740 [(set (pc) (if_then_else
7741 (match_operator 0 "expandable_comparison_operator"
7742 [(match_operand:DF 1 "s_register_operand")
7743 (match_operand:DF 2 "vfp_compare_operand")])
7744 (label_ref (match_operand 3 "" ""))
7746 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7747 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7748 operands[3])); DONE;"
7751 (define_expand "cbranchdi4"
7752 [(set (pc) (if_then_else
7753 (match_operator 0 "expandable_comparison_operator"
7754 [(match_operand:DI 1 "s_register_operand")
7755 (match_operand:DI 2 "reg_or_int_operand")])
7756 (label_ref (match_operand 3 "" ""))
7760 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7762 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7768 ;; Comparison and test insns
7770 (define_insn "*arm_cmpsi_insn"
7771 [(set (reg:CC CC_REGNUM)
7772 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7773 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7781 [(set_attr "conds" "set")
7782 (set_attr "arch" "t2,t2,any,any,any")
7783 (set_attr "length" "2,2,4,4,4")
7784 (set_attr "predicable" "yes")
7785 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7786 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7789 (define_insn "*cmpsi_shiftsi"
7790 [(set (reg:CC CC_REGNUM)
7791 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7792 (match_operator:SI 3 "shift_operator"
7793 [(match_operand:SI 1 "s_register_operand" "r,r")
7794 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7797 [(set_attr "conds" "set")
7798 (set_attr "shift" "1")
7799 (set_attr "arch" "32,a")
7800 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7802 (define_insn "*cmpsi_shiftsi_swp"
7803 [(set (reg:CC_SWP CC_REGNUM)
7804 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7805 [(match_operand:SI 1 "s_register_operand" "r,r")
7806 (match_operand:SI 2 "shift_amount_operand" "M,r")])
7807 (match_operand:SI 0 "s_register_operand" "r,r")))]
7810 [(set_attr "conds" "set")
7811 (set_attr "shift" "1")
7812 (set_attr "arch" "32,a")
7813 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7815 (define_insn "*arm_cmpsi_negshiftsi_si"
7816 [(set (reg:CC_Z CC_REGNUM)
7818 (neg:SI (match_operator:SI 1 "shift_operator"
7819 [(match_operand:SI 2 "s_register_operand" "r,r")
7820 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7821 (match_operand:SI 0 "s_register_operand" "r,r")))]
7824 [(set_attr "conds" "set")
7825 (set_attr "arch" "32,a")
7826 (set_attr "shift" "2")
7827 (set_attr "type" "alus_shift_imm,alus_shift_reg")
7828 (set_attr "predicable" "yes")]
7831 ; This insn allows redundant compares to be removed by cse, nothing should
7832 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7833 ; is deleted later on. The match_dup will match the mode here, so that
7834 ; mode changes of the condition codes aren't lost by this even though we don't
7835 ; specify what they are.
7837 (define_insn "*deleted_compare"
7838 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7840 "\\t%@ deleted compare"
7841 [(set_attr "conds" "set")
7842 (set_attr "length" "0")
7843 (set_attr "type" "no_insn")]
7847 ;; Conditional branch insns
7849 (define_expand "cbranch_cc"
7851 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7852 (match_operand 2 "" "")])
7853 (label_ref (match_operand 3 "" ""))
7856 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7857 operands[1], operands[2], NULL_RTX);
7858 operands[2] = const0_rtx;"
7862 ;; Patterns to match conditional branch insns.
7865 (define_insn "arm_cond_branch"
7867 (if_then_else (match_operator 1 "arm_comparison_operator"
7868 [(match_operand 2 "cc_register" "") (const_int 0)])
7869 (label_ref (match_operand 0 "" ""))
7873 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7875 arm_ccfsm_state += 2;
7878 switch (get_attr_length (insn))
7880 case 2: /* Thumb2 16-bit b{cond}. */
7881 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
7885 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
7886 unconditional branch b. */
7887 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%D1\t");
7890 [(set_attr "conds" "use")
7891 (set_attr "type" "branch")
7892 (set (attr "length")
7893 (if_then_else (match_test "!TARGET_THUMB2")
7895 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
7898 ;; Check if target is within 16-bit Thumb2 b{cond} range.
7899 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7900 (le (minus (match_dup 0) (pc)) (const_int 256)))
7902 ;; Target is Thumb2, within narrow range.
7903 ;; Generate b{cond}.
7906 ;; Check if target is within 32-bit Thumb2 b{cond} range.
7907 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7908 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7910 ;; Target is Thumb2, within wide range.
7913 ;; Target is Thumb2, out of range.
7914 ;; Generate narrow b{cond} and unconditional branch b.
7918 (define_insn "*arm_cond_branch_reversed"
7920 (if_then_else (match_operator 1 "arm_comparison_operator"
7921 [(match_operand 2 "cc_register" "") (const_int 0)])
7923 (label_ref (match_operand 0 "" ""))))]
7926 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7928 arm_ccfsm_state += 2;
7931 switch (get_attr_length (insn))
7933 case 2: /* Thumb2 16-bit b{cond}. */
7934 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
7938 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
7939 unconditional branch b. */
7940 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%d1\t");
7943 [(set_attr "conds" "use")
7944 (set_attr "type" "branch")
7945 (set (attr "length")
7946 (if_then_else (match_test "!TARGET_THUMB2")
7948 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
7951 ;; Check if target is within 16-bit Thumb2 b{cond} range.
7952 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7953 (le (minus (match_dup 0) (pc)) (const_int 256)))
7955 ;; Target is Thumb2, within narrow range.
7956 ;; Generate b{cond}.
7959 ;; Check if target is within 32-bit Thumb2 b{cond} range.
7960 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7961 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7963 ;; Target is Thumb2, within wide range.
7964 ;; Generate b{cond}.
7966 ;; Target is Thumb2, out of range.
7967 ;; Generate narrow b{cond} and unconditional branch b.
7975 (define_expand "cstore_cc"
7976 [(set (match_operand:SI 0 "s_register_operand")
7977 (match_operator:SI 1 "" [(match_operand 2 "" "")
7978 (match_operand 3 "" "")]))]
7980 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7981 operands[2], operands[3], NULL_RTX);
7982 operands[3] = const0_rtx;"
7985 (define_insn_and_split "*mov_scc"
7986 [(set (match_operand:SI 0 "s_register_operand" "=r")
7987 (match_operator:SI 1 "arm_comparison_operator_mode"
7988 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7990 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7993 (if_then_else:SI (match_dup 1)
7997 [(set_attr "conds" "use")
7998 (set_attr "length" "8")
7999 (set_attr "type" "multiple")]
8002 (define_insn "*negscc_borrow"
8003 [(set (match_operand:SI 0 "s_register_operand" "=r")
8004 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
8007 [(set_attr "conds" "use")
8008 (set_attr "length" "4")
8009 (set_attr "type" "adc_reg")]
8012 (define_insn_and_split "*mov_negscc"
8013 [(set (match_operand:SI 0 "s_register_operand" "=r")
8014 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
8015 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8016 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
8017 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8020 (if_then_else:SI (match_dup 1)
8024 operands[3] = GEN_INT (~0);
8026 [(set_attr "conds" "use")
8027 (set_attr "length" "8")
8028 (set_attr "type" "multiple")]
8031 (define_insn_and_split "*mov_notscc"
8032 [(set (match_operand:SI 0 "s_register_operand" "=r")
8033 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8034 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8036 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8039 (if_then_else:SI (match_dup 1)
8043 operands[3] = GEN_INT (~1);
8044 operands[4] = GEN_INT (~0);
8046 [(set_attr "conds" "use")
8047 (set_attr "length" "8")
8048 (set_attr "type" "multiple")]
8051 (define_expand "cstoresi4"
8052 [(set (match_operand:SI 0 "s_register_operand")
8053 (match_operator:SI 1 "expandable_comparison_operator"
8054 [(match_operand:SI 2 "s_register_operand")
8055 (match_operand:SI 3 "reg_or_int_operand")]))]
8056 "TARGET_32BIT || TARGET_THUMB1"
8058 rtx op3, scratch, scratch2;
8062 if (!arm_add_operand (operands[3], SImode))
8063 operands[3] = force_reg (SImode, operands[3]);
8064 emit_insn (gen_cstore_cc (operands[0], operands[1],
8065 operands[2], operands[3]));
8069 if (operands[3] == const0_rtx)
8071 switch (GET_CODE (operands[1]))
8074 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8078 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8082 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8083 NULL_RTX, 0, OPTAB_WIDEN);
8084 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8085 NULL_RTX, 0, OPTAB_WIDEN);
8086 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8087 operands[0], 1, OPTAB_WIDEN);
8091 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8093 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8094 NULL_RTX, 1, OPTAB_WIDEN);
8098 scratch = expand_binop (SImode, ashr_optab, operands[2],
8099 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8100 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8101 NULL_RTX, 0, OPTAB_WIDEN);
8102 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8106 /* LT is handled by generic code. No need for unsigned with 0. */
8113 switch (GET_CODE (operands[1]))
8116 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8117 NULL_RTX, 0, OPTAB_WIDEN);
8118 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8122 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8123 NULL_RTX, 0, OPTAB_WIDEN);
8124 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8128 op3 = force_reg (SImode, operands[3]);
8130 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8131 NULL_RTX, 1, OPTAB_WIDEN);
8132 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8133 NULL_RTX, 0, OPTAB_WIDEN);
8134 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8140 if (!thumb1_cmp_operand (op3, SImode))
8141 op3 = force_reg (SImode, op3);
8142 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8143 NULL_RTX, 0, OPTAB_WIDEN);
8144 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8145 NULL_RTX, 1, OPTAB_WIDEN);
8146 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8151 op3 = force_reg (SImode, operands[3]);
8152 scratch = force_reg (SImode, const0_rtx);
8153 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8159 if (!thumb1_cmp_operand (op3, SImode))
8160 op3 = force_reg (SImode, op3);
8161 scratch = force_reg (SImode, const0_rtx);
8162 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8168 if (!thumb1_cmp_operand (op3, SImode))
8169 op3 = force_reg (SImode, op3);
8170 scratch = gen_reg_rtx (SImode);
8171 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8175 op3 = force_reg (SImode, operands[3]);
8176 scratch = gen_reg_rtx (SImode);
8177 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8180 /* No good sequences for GT, LT. */
8187 (define_expand "cstorehf4"
8188 [(set (match_operand:SI 0 "s_register_operand")
8189 (match_operator:SI 1 "expandable_comparison_operator"
8190 [(match_operand:HF 2 "s_register_operand")
8191 (match_operand:HF 3 "vfp_compare_operand")]))]
8192 "TARGET_VFP_FP16INST"
8194 if (!arm_validize_comparison (&operands[1],
8199 emit_insn (gen_cstore_cc (operands[0], operands[1],
8200 operands[2], operands[3]));
8205 (define_expand "cstoresf4"
8206 [(set (match_operand:SI 0 "s_register_operand")
8207 (match_operator:SI 1 "expandable_comparison_operator"
8208 [(match_operand:SF 2 "s_register_operand")
8209 (match_operand:SF 3 "vfp_compare_operand")]))]
8210 "TARGET_32BIT && TARGET_HARD_FLOAT"
8211 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8212 operands[2], operands[3])); DONE;"
8215 (define_expand "cstoredf4"
8216 [(set (match_operand:SI 0 "s_register_operand")
8217 (match_operator:SI 1 "expandable_comparison_operator"
8218 [(match_operand:DF 2 "s_register_operand")
8219 (match_operand:DF 3 "vfp_compare_operand")]))]
8220 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8221 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8222 operands[2], operands[3])); DONE;"
8225 (define_expand "cstoredi4"
8226 [(set (match_operand:SI 0 "s_register_operand")
8227 (match_operator:SI 1 "expandable_comparison_operator"
8228 [(match_operand:DI 2 "s_register_operand")
8229 (match_operand:DI 3 "reg_or_int_operand")]))]
8232 if (!arm_validize_comparison (&operands[1],
8236 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8243 ;; Conditional move insns
8245 (define_expand "movsicc"
8246 [(set (match_operand:SI 0 "s_register_operand")
8247 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
8248 (match_operand:SI 2 "arm_not_operand")
8249 (match_operand:SI 3 "arm_not_operand")))]
8256 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8257 &XEXP (operands[1], 1)))
8260 code = GET_CODE (operands[1]);
8261 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8262 XEXP (operands[1], 1), NULL_RTX);
8263 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8267 (define_expand "movhfcc"
8268 [(set (match_operand:HF 0 "s_register_operand")
8269 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
8270 (match_operand:HF 2 "s_register_operand")
8271 (match_operand:HF 3 "s_register_operand")))]
8272 "TARGET_VFP_FP16INST"
8275 enum rtx_code code = GET_CODE (operands[1]);
8278 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8279 &XEXP (operands[1], 1)))
8282 code = GET_CODE (operands[1]);
8283 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8284 XEXP (operands[1], 1), NULL_RTX);
8285 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8289 (define_expand "movsfcc"
8290 [(set (match_operand:SF 0 "s_register_operand")
8291 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
8292 (match_operand:SF 2 "s_register_operand")
8293 (match_operand:SF 3 "s_register_operand")))]
8294 "TARGET_32BIT && TARGET_HARD_FLOAT"
8297 enum rtx_code code = GET_CODE (operands[1]);
8300 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8301 &XEXP (operands[1], 1)))
8304 code = GET_CODE (operands[1]);
8305 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8306 XEXP (operands[1], 1), NULL_RTX);
8307 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8311 (define_expand "movdfcc"
8312 [(set (match_operand:DF 0 "s_register_operand")
8313 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
8314 (match_operand:DF 2 "s_register_operand")
8315 (match_operand:DF 3 "s_register_operand")))]
8316 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8319 enum rtx_code code = GET_CODE (operands[1]);
8322 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8323 &XEXP (operands[1], 1)))
8325 code = GET_CODE (operands[1]);
8326 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8327 XEXP (operands[1], 1), NULL_RTX);
8328 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8332 (define_insn "*cmov<mode>"
8333 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8334 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8335 [(match_operand 2 "cc_register" "") (const_int 0)])
8336 (match_operand:SDF 3 "s_register_operand"
8338 (match_operand:SDF 4 "s_register_operand"
8339 "<F_constraint>")))]
8340 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
8343 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8350 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8355 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8361 [(set_attr "conds" "use")
8362 (set_attr "type" "fcsel")]
8365 (define_insn "*cmovhf"
8366 [(set (match_operand:HF 0 "s_register_operand" "=t")
8367 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
8368 [(match_operand 2 "cc_register" "") (const_int 0)])
8369 (match_operand:HF 3 "s_register_operand" "t")
8370 (match_operand:HF 4 "s_register_operand" "t")))]
8371 "TARGET_VFP_FP16INST"
8374 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8381 return \"vsel%d1.f16\\t%0, %3, %4\";
8386 return \"vsel%D1.f16\\t%0, %4, %3\";
8392 [(set_attr "conds" "use")
8393 (set_attr "type" "fcsel")]
8396 (define_insn_and_split "*movsicc_insn"
8397 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8399 (match_operator 3 "arm_comparison_operator"
8400 [(match_operand 4 "cc_register" "") (const_int 0)])
8401 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8402 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8413 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8414 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8415 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8416 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8417 "&& reload_completed"
8420 enum rtx_code rev_code;
8424 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8426 gen_rtx_SET (operands[0], operands[1])));
8428 rev_code = GET_CODE (operands[3]);
8429 mode = GET_MODE (operands[4]);
8430 if (mode == CCFPmode || mode == CCFPEmode)
8431 rev_code = reverse_condition_maybe_unordered (rev_code);
8433 rev_code = reverse_condition (rev_code);
8435 rev_cond = gen_rtx_fmt_ee (rev_code,
8439 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8441 gen_rtx_SET (operands[0], operands[2])));
8444 [(set_attr "length" "4,4,4,4,8,8,8,8")
8445 (set_attr "conds" "use")
8446 (set_attr_alternative "type"
8447 [(if_then_else (match_operand 2 "const_int_operand" "")
8448 (const_string "mov_imm")
8449 (const_string "mov_reg"))
8450 (const_string "mvn_imm")
8451 (if_then_else (match_operand 1 "const_int_operand" "")
8452 (const_string "mov_imm")
8453 (const_string "mov_reg"))
8454 (const_string "mvn_imm")
8455 (const_string "multiple")
8456 (const_string "multiple")
8457 (const_string "multiple")
8458 (const_string "multiple")])]
8461 (define_insn "*movsfcc_soft_insn"
8462 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8463 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8464 [(match_operand 4 "cc_register" "") (const_int 0)])
8465 (match_operand:SF 1 "s_register_operand" "0,r")
8466 (match_operand:SF 2 "s_register_operand" "r,0")))]
8467 "TARGET_ARM && TARGET_SOFT_FLOAT"
8471 [(set_attr "conds" "use")
8472 (set_attr "type" "mov_reg")]
8476 ;; Jump and linkage insns
8478 (define_expand "jump"
8480 (label_ref (match_operand 0 "" "")))]
8485 (define_insn "*arm_jump"
8487 (label_ref (match_operand 0 "" "")))]
8491 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8493 arm_ccfsm_state += 2;
8496 return \"b%?\\t%l0\";
8499 [(set_attr "predicable" "yes")
8500 (set (attr "length")
8502 (and (match_test "TARGET_THUMB2")
8503 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8504 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8507 (set_attr "type" "branch")]
8510 (define_expand "call"
8511 [(parallel [(call (match_operand 0 "memory_operand")
8512 (match_operand 1 "general_operand"))
8513 (use (match_operand 2 "" ""))
8514 (clobber (reg:SI LR_REGNUM))])]
8519 tree addr = MEM_EXPR (operands[0]);
8521 /* In an untyped call, we can get NULL for operand 2. */
8522 if (operands[2] == NULL_RTX)
8523 operands[2] = const0_rtx;
8525 /* Decide if we should generate indirect calls by loading the
8526 32-bit address of the callee into a register before performing the
8528 callee = XEXP (operands[0], 0);
8529 if (GET_CODE (callee) == SYMBOL_REF
8530 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8532 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8534 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
8535 /* Indirect call: set r9 with FDPIC value of callee. */
8536 XEXP (operands[0], 0)
8537 = arm_load_function_descriptor (XEXP (operands[0], 0));
8539 if (detect_cmse_nonsecure_call (addr))
8541 pat = gen_nonsecure_call_internal (operands[0], operands[1],
8543 emit_call_insn (pat);
8547 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8548 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
8551 /* Restore FDPIC register (r9) after call. */
8554 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8555 rtx initial_fdpic_reg
8556 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8558 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8559 initial_fdpic_reg));
8566 (define_insn "restore_pic_register_after_call"
8567 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
8568 (unspec:SI [(match_dup 0)
8569 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
8570 UNSPEC_PIC_RESTORE))]
8577 (define_expand "call_internal"
8578 [(parallel [(call (match_operand 0 "memory_operand")
8579 (match_operand 1 "general_operand"))
8580 (use (match_operand 2 "" ""))
8581 (clobber (reg:SI LR_REGNUM))])])
8583 (define_expand "nonsecure_call_internal"
8584 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
8585 UNSPEC_NONSECURE_MEM)
8586 (match_operand 1 "general_operand"))
8587 (use (match_operand 2 "" ""))
8588 (clobber (reg:SI LR_REGNUM))])]
8591 rtx addr = XEXP (operands[0], 0);
8592 rtx tmp = REG_P (addr) ? addr : force_reg (SImode, addr);
8594 if (!TARGET_HAVE_FPCXT_CMSE)
8596 rtx r4 = gen_rtx_REG (SImode, R4_REGNUM);
8597 emit_move_insn (r4, tmp);
8602 operands[0] = replace_equiv_address (operands[0], tmp);
8606 (define_insn "*call_reg_armv5"
8607 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8608 (match_operand 1 "" ""))
8609 (use (match_operand 2 "" ""))
8610 (clobber (reg:SI LR_REGNUM))]
8611 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8613 [(set_attr "type" "call")]
8616 (define_insn "*call_reg_arm"
8617 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8618 (match_operand 1 "" ""))
8619 (use (match_operand 2 "" ""))
8620 (clobber (reg:SI LR_REGNUM))]
8621 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8623 return output_call (operands);
8625 ;; length is worst case, normally it is only two
8626 [(set_attr "length" "12")
8627 (set_attr "type" "call")]
8631 (define_expand "call_value"
8632 [(parallel [(set (match_operand 0 "" "")
8633 (call (match_operand 1 "memory_operand")
8634 (match_operand 2 "general_operand")))
8635 (use (match_operand 3 "" ""))
8636 (clobber (reg:SI LR_REGNUM))])]
8641 tree addr = MEM_EXPR (operands[1]);
8643 /* In an untyped call, we can get NULL for operand 2. */
8644 if (operands[3] == 0)
8645 operands[3] = const0_rtx;
8647 /* Decide if we should generate indirect calls by loading the
8648 32-bit address of the callee into a register before performing the
8650 callee = XEXP (operands[1], 0);
8651 if (GET_CODE (callee) == SYMBOL_REF
8652 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8654 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8656 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8657 /* Indirect call: set r9 with FDPIC value of callee. */
8658 XEXP (operands[1], 0)
8659 = arm_load_function_descriptor (XEXP (operands[1], 0));
8661 if (detect_cmse_nonsecure_call (addr))
8663 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8664 operands[2], operands[3]);
8665 emit_call_insn (pat);
8669 pat = gen_call_value_internal (operands[0], operands[1],
8670 operands[2], operands[3]);
8671 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8674 /* Restore FDPIC register (r9) after call. */
8677 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8678 rtx initial_fdpic_reg
8679 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8681 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8682 initial_fdpic_reg));
8689 (define_expand "call_value_internal"
8690 [(parallel [(set (match_operand 0 "" "")
8691 (call (match_operand 1 "memory_operand")
8692 (match_operand 2 "general_operand")))
8693 (use (match_operand 3 "" ""))
8694 (clobber (reg:SI LR_REGNUM))])])
8696 (define_expand "nonsecure_call_value_internal"
8697 [(parallel [(set (match_operand 0 "" "")
8698 (call (unspec:SI [(match_operand 1 "memory_operand")]
8699 UNSPEC_NONSECURE_MEM)
8700 (match_operand 2 "general_operand")))
8701 (use (match_operand 3 "" ""))
8702 (clobber (reg:SI LR_REGNUM))])]
8706 if (!TARGET_HAVE_FPCXT_CMSE)
8709 copy_to_suggested_reg (XEXP (operands[1], 0),
8710 gen_rtx_REG (SImode, R4_REGNUM),
8713 operands[1] = replace_equiv_address (operands[1], tmp);
8717 (define_insn "*call_value_reg_armv5"
8718 [(set (match_operand 0 "" "")
8719 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8720 (match_operand 2 "" "")))
8721 (use (match_operand 3 "" ""))
8722 (clobber (reg:SI LR_REGNUM))]
8723 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8725 [(set_attr "type" "call")]
8728 (define_insn "*call_value_reg_arm"
8729 [(set (match_operand 0 "" "")
8730 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8731 (match_operand 2 "" "")))
8732 (use (match_operand 3 "" ""))
8733 (clobber (reg:SI LR_REGNUM))]
8734 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8736 return output_call (&operands[1]);
8738 [(set_attr "length" "12")
8739 (set_attr "type" "call")]
8742 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8743 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8745 (define_insn "*call_symbol"
8746 [(call (mem:SI (match_operand:SI 0 "" ""))
8747 (match_operand 1 "" ""))
8748 (use (match_operand 2 "" ""))
8749 (clobber (reg:SI LR_REGNUM))]
8751 && !SIBLING_CALL_P (insn)
8752 && (GET_CODE (operands[0]) == SYMBOL_REF)
8753 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8756 rtx op = operands[0];
8758 /* Switch mode now when possible. */
8759 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8760 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8761 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8763 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8765 [(set_attr "type" "call")]
8768 (define_insn "*call_value_symbol"
8769 [(set (match_operand 0 "" "")
8770 (call (mem:SI (match_operand:SI 1 "" ""))
8771 (match_operand:SI 2 "" "")))
8772 (use (match_operand 3 "" ""))
8773 (clobber (reg:SI LR_REGNUM))]
8775 && !SIBLING_CALL_P (insn)
8776 && (GET_CODE (operands[1]) == SYMBOL_REF)
8777 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8780 rtx op = operands[1];
8782 /* Switch mode now when possible. */
8783 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8784 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8785 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8787 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8789 [(set_attr "type" "call")]
8792 (define_expand "sibcall_internal"
8793 [(parallel [(call (match_operand 0 "memory_operand")
8794 (match_operand 1 "general_operand"))
8796 (use (match_operand 2 "" ""))])])
8798 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8799 (define_expand "sibcall"
8800 [(parallel [(call (match_operand 0 "memory_operand")
8801 (match_operand 1 "general_operand"))
8803 (use (match_operand 2 "" ""))])]
8809 if ((!REG_P (XEXP (operands[0], 0))
8810 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8811 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8812 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8813 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8815 if (operands[2] == NULL_RTX)
8816 operands[2] = const0_rtx;
8818 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8819 arm_emit_call_insn (pat, operands[0], true);
8824 (define_expand "sibcall_value_internal"
8825 [(parallel [(set (match_operand 0 "" "")
8826 (call (match_operand 1 "memory_operand")
8827 (match_operand 2 "general_operand")))
8829 (use (match_operand 3 "" ""))])])
8831 (define_expand "sibcall_value"
8832 [(parallel [(set (match_operand 0 "" "")
8833 (call (match_operand 1 "memory_operand")
8834 (match_operand 2 "general_operand")))
8836 (use (match_operand 3 "" ""))])]
8842 if ((!REG_P (XEXP (operands[1], 0))
8843 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8844 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8845 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8846 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8848 if (operands[3] == NULL_RTX)
8849 operands[3] = const0_rtx;
8851 pat = gen_sibcall_value_internal (operands[0], operands[1],
8852 operands[2], operands[3]);
8853 arm_emit_call_insn (pat, operands[1], true);
8858 (define_insn "*sibcall_insn"
8859 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8860 (match_operand 1 "" ""))
8862 (use (match_operand 2 "" ""))]
8863 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8865 if (which_alternative == 1)
8866 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8869 if (arm_arch5t || arm_arch4t)
8870 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8872 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8875 [(set_attr "type" "call")]
8878 (define_insn "*sibcall_value_insn"
8879 [(set (match_operand 0 "" "")
8880 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8881 (match_operand 2 "" "")))
8883 (use (match_operand 3 "" ""))]
8884 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8886 if (which_alternative == 1)
8887 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8890 if (arm_arch5t || arm_arch4t)
8891 return \"bx%?\\t%1\";
8893 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8896 [(set_attr "type" "call")]
8899 (define_expand "<return_str>return"
8901 "(TARGET_ARM || (TARGET_THUMB2
8902 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8903 && !IS_STACKALIGN (arm_current_func_type ())))
8904 <return_cond_false>"
8909 thumb2_expand_return (<return_simple_p>);
8916 ;; Often the return insn will be the same as loading from memory, so set attr
8917 (define_insn "*arm_return"
8919 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8922 if (arm_ccfsm_state == 2)
8924 arm_ccfsm_state += 2;
8927 return output_return_instruction (const_true_rtx, true, false, false);
8929 [(set_attr "type" "load_4")
8930 (set_attr "length" "12")
8931 (set_attr "predicable" "yes")]
8934 (define_insn "*cond_<return_str>return"
8936 (if_then_else (match_operator 0 "arm_comparison_operator"
8937 [(match_operand 1 "cc_register" "") (const_int 0)])
8940 "TARGET_ARM <return_cond_true>"
8943 if (arm_ccfsm_state == 2)
8945 arm_ccfsm_state += 2;
8948 return output_return_instruction (operands[0], true, false,
8951 [(set_attr "conds" "use")
8952 (set_attr "length" "12")
8953 (set_attr "type" "load_4")]
8956 (define_insn "*cond_<return_str>return_inverted"
8958 (if_then_else (match_operator 0 "arm_comparison_operator"
8959 [(match_operand 1 "cc_register" "") (const_int 0)])
8962 "TARGET_ARM <return_cond_true>"
8965 if (arm_ccfsm_state == 2)
8967 arm_ccfsm_state += 2;
8970 return output_return_instruction (operands[0], true, true,
8973 [(set_attr "conds" "use")
8974 (set_attr "length" "12")
8975 (set_attr "type" "load_4")]
8978 (define_insn "*arm_simple_return"
8983 if (arm_ccfsm_state == 2)
8985 arm_ccfsm_state += 2;
8988 return output_return_instruction (const_true_rtx, true, false, true);
8990 [(set_attr "type" "branch")
8991 (set_attr "length" "4")
8992 (set_attr "predicable" "yes")]
8995 ;; Generate a sequence of instructions to determine if the processor is
8996 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8999 (define_expand "return_addr_mask"
9001 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9003 (set (match_operand:SI 0 "s_register_operand")
9004 (if_then_else:SI (eq (match_dup 1) (const_int 0))
9006 (const_int 67108860)))] ; 0x03fffffc
9009 operands[1] = gen_rtx_REG (CC_NZmode, CC_REGNUM);
9012 (define_insn "*check_arch2"
9013 [(set (match_operand:CC_NZ 0 "cc_register" "")
9014 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9017 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9018 [(set_attr "length" "8")
9019 (set_attr "conds" "set")
9020 (set_attr "type" "multiple")]
9023 ;; Call subroutine returning any type.
9025 (define_expand "untyped_call"
9026 [(parallel [(call (match_operand 0 "" "")
9028 (match_operand 1 "" "")
9029 (match_operand 2 "" "")])]
9030 "TARGET_EITHER && !TARGET_FDPIC"
9034 rtx par = gen_rtx_PARALLEL (VOIDmode,
9035 rtvec_alloc (XVECLEN (operands[2], 0)));
9036 rtx addr = gen_reg_rtx (Pmode);
9040 emit_move_insn (addr, XEXP (operands[1], 0));
9041 mem = change_address (operands[1], BLKmode, addr);
9043 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9045 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9047 /* Default code only uses r0 as a return value, but we could
9048 be using anything up to 4 registers. */
9049 if (REGNO (src) == R0_REGNUM)
9050 src = gen_rtx_REG (TImode, R0_REGNUM);
9052 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9054 size += GET_MODE_SIZE (GET_MODE (src));
9057 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
9061 for (i = 0; i < XVECLEN (par, 0); i++)
9063 HOST_WIDE_INT offset = 0;
9064 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9067 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9069 mem = change_address (mem, GET_MODE (reg), NULL);
9070 if (REGNO (reg) == R0_REGNUM)
9072 /* On thumb we have to use a write-back instruction. */
9073 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9074 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9075 size = TARGET_ARM ? 16 : 0;
9079 emit_move_insn (mem, reg);
9080 size = GET_MODE_SIZE (GET_MODE (reg));
9084 /* The optimizer does not know that the call sets the function value
9085 registers we stored in the result block. We avoid problems by
9086 claiming that all hard registers are used and clobbered at this
9088 emit_insn (gen_blockage ());
9094 (define_expand "untyped_return"
9095 [(match_operand:BLK 0 "memory_operand")
9096 (match_operand 1 "" "")]
9097 "TARGET_EITHER && !TARGET_FDPIC"
9101 rtx addr = gen_reg_rtx (Pmode);
9105 emit_move_insn (addr, XEXP (operands[0], 0));
9106 mem = change_address (operands[0], BLKmode, addr);
9108 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9110 HOST_WIDE_INT offset = 0;
9111 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9114 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9116 mem = change_address (mem, GET_MODE (reg), NULL);
9117 if (REGNO (reg) == R0_REGNUM)
9119 /* On thumb we have to use a write-back instruction. */
9120 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9121 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9122 size = TARGET_ARM ? 16 : 0;
9126 emit_move_insn (reg, mem);
9127 size = GET_MODE_SIZE (GET_MODE (reg));
9131 /* Emit USE insns before the return. */
9132 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9133 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9135 /* Construct the return. */
9136 expand_naked_return ();
9142 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9143 ;; all of memory. This blocks insns from being moved across this point.
9145 (define_insn "blockage"
9146 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9149 [(set_attr "length" "0")
9150 (set_attr "type" "block")]
9153 ;; Since we hard code r0 here use the 'o' constraint to prevent
9154 ;; provoking undefined behaviour in the hardware with putting out
9155 ;; auto-increment operations with potentially r0 as the base register.
9156 (define_insn "probe_stack"
9157 [(set (match_operand:SI 0 "memory_operand" "=o")
9158 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
9161 [(set_attr "type" "store_4")
9162 (set_attr "predicable" "yes")]
9165 (define_insn "probe_stack_range"
9166 [(set (match_operand:SI 0 "register_operand" "=r")
9167 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
9168 (match_operand:SI 2 "register_operand" "r")]
9169 VUNSPEC_PROBE_STACK_RANGE))]
9172 return output_probe_stack_range (operands[0], operands[2]);
9174 [(set_attr "type" "multiple")
9175 (set_attr "conds" "clob")]
9178 ;; Named patterns for stack smashing protection.
9179 (define_expand "stack_protect_combined_set"
9181 [(set (match_operand:SI 0 "memory_operand")
9182 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9184 (clobber (match_scratch:SI 2 ""))
9185 (clobber (match_scratch:SI 3 ""))])]
9190 ;; Use a separate insn from the above expand to be able to have the mem outside
9191 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9192 ;; try to reload the guard since we need to control how PIC access is done in
9193 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9194 ;; legitimize_pic_address ()).
9195 (define_insn_and_split "*stack_protect_combined_set_insn"
9196 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9197 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9199 (clobber (match_scratch:SI 2 "=&l,&r"))
9200 (clobber (match_scratch:SI 3 "=&l,&r"))]
9204 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
9206 (clobber (match_dup 2))])]
9214 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9216 pic_reg = operands[3];
9218 /* Forces recomputing of GOT base now. */
9219 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
9220 true /*compute_now*/);
9224 if (address_operand (operands[1], SImode))
9225 operands[2] = operands[1];
9228 rtx mem = force_const_mem (SImode, operands[1]);
9229 if (!general_operand (mem, SImode))
9231 emit_move_insn (operands[2], XEXP (mem, 0));
9232 mem = replace_equiv_address (mem, operands[2], false);
9234 emit_move_insn (operands[2], mem);
9238 [(set_attr "arch" "t1,32")]
9241 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
9242 ;; canary value does not live beyond the life of this sequence.
9243 (define_insn "*stack_protect_set_insn"
9244 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9245 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
9247 (clobber (match_dup 1))]
9250 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
9251 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
9252 [(set_attr "length" "8,12")
9253 (set_attr "conds" "clob,nocond")
9254 (set_attr "type" "multiple")
9255 (set_attr "arch" "t1,32")]
9258 (define_expand "stack_protect_combined_test"
9262 (eq (match_operand:SI 0 "memory_operand")
9263 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9265 (label_ref (match_operand 2))
9267 (clobber (match_scratch:SI 3 ""))
9268 (clobber (match_scratch:SI 4 ""))
9269 (clobber (reg:CC CC_REGNUM))])]
9274 ;; Use a separate insn from the above expand to be able to have the mem outside
9275 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9276 ;; try to reload the guard since we need to control how PIC access is done in
9277 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9278 ;; legitimize_pic_address ()).
9279 (define_insn_and_split "*stack_protect_combined_test_insn"
9282 (eq (match_operand:SI 0 "memory_operand" "m,m")
9283 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9285 (label_ref (match_operand 2))
9287 (clobber (match_scratch:SI 3 "=&l,&r"))
9288 (clobber (match_scratch:SI 4 "=&l,&r"))
9289 (clobber (reg:CC CC_REGNUM))]
9302 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9304 pic_reg = operands[4];
9306 /* Forces recomputing of GOT base now. */
9307 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
9308 true /*compute_now*/);
9312 if (address_operand (operands[1], SImode))
9313 operands[3] = operands[1];
9316 rtx mem = force_const_mem (SImode, operands[1]);
9317 if (!general_operand (mem, SImode))
9319 emit_move_insn (operands[3], XEXP (mem, 0));
9320 mem = replace_equiv_address (mem, operands[3], false);
9322 emit_move_insn (operands[3], mem);
9327 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
9329 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9330 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9331 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9335 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
9337 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
9338 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
9343 [(set_attr "arch" "t1,32")]
9346 ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the
9347 ;; canary value does not live beyond the end of this sequence.
9348 (define_insn "arm_stack_protect_test_insn"
9349 [(set (reg:CC_Z CC_REGNUM)
9350 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
9351 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
9354 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
9355 (clobber (match_dup 2))]
9357 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;mov\t%2, #0"
9358 [(set_attr "length" "12,16")
9359 (set_attr "conds" "set")
9360 (set_attr "type" "multiple")
9361 (set_attr "arch" "t,32")]
9364 (define_expand "casesi"
9365 [(match_operand:SI 0 "s_register_operand") ; index to jump on
9366 (match_operand:SI 1 "const_int_operand") ; lower bound
9367 (match_operand:SI 2 "const_int_operand") ; total range
9368 (match_operand:SI 3 "" "") ; table label
9369 (match_operand:SI 4 "" "")] ; Out of range label
9370 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
9373 enum insn_code code;
9374 if (operands[1] != const0_rtx)
9376 rtx reg = gen_reg_rtx (SImode);
9378 emit_insn (gen_addsi3 (reg, operands[0],
9379 gen_int_mode (-INTVAL (operands[1]),
9385 code = CODE_FOR_arm_casesi_internal;
9386 else if (TARGET_THUMB1)
9387 code = CODE_FOR_thumb1_casesi_internal_pic;
9389 code = CODE_FOR_thumb2_casesi_internal_pic;
9391 code = CODE_FOR_thumb2_casesi_internal;
9393 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9394 operands[2] = force_reg (SImode, operands[2]);
9396 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9397 operands[3], operands[4]));
9402 ;; The USE in this pattern is needed to tell flow analysis that this is
9403 ;; a CASESI insn. It has no other purpose.
9404 (define_expand "arm_casesi_internal"
9405 [(parallel [(set (pc)
9407 (leu (match_operand:SI 0 "s_register_operand")
9408 (match_operand:SI 1 "arm_rhs_operand"))
9410 (label_ref:SI (match_operand 3 ""))))
9411 (clobber (reg:CC CC_REGNUM))
9412 (use (label_ref:SI (match_operand 2 "")))])]
9415 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
9416 operands[4] = gen_rtx_PLUS (SImode, operands[4],
9417 gen_rtx_LABEL_REF (SImode, operands[2]));
9418 operands[4] = gen_rtx_MEM (SImode, operands[4]);
9419 MEM_READONLY_P (operands[4]) = 1;
9420 MEM_NOTRAP_P (operands[4]) = 1;
9423 (define_insn "*arm_casesi_internal"
9424 [(parallel [(set (pc)
9426 (leu (match_operand:SI 0 "s_register_operand" "r")
9427 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9428 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9429 (label_ref:SI (match_operand 2 "" ""))))
9430 (label_ref:SI (match_operand 3 "" ""))))
9431 (clobber (reg:CC CC_REGNUM))
9432 (use (label_ref:SI (match_dup 2)))])]
9436 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9437 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9439 [(set_attr "conds" "clob")
9440 (set_attr "length" "12")
9441 (set_attr "type" "multiple")]
9444 (define_expand "indirect_jump"
9446 (match_operand:SI 0 "s_register_operand"))]
9449 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9450 address and use bx. */
9454 tmp = gen_reg_rtx (SImode);
9455 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9461 ;; NB Never uses BX.
9462 (define_insn "*arm_indirect_jump"
9464 (match_operand:SI 0 "s_register_operand" "r"))]
9466 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9467 [(set_attr "predicable" "yes")
9468 (set_attr "type" "branch")]
9471 (define_insn "*load_indirect_jump"
9473 (match_operand:SI 0 "memory_operand" "m"))]
9475 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9476 [(set_attr "type" "load_4")
9477 (set_attr "pool_range" "4096")
9478 (set_attr "neg_pool_range" "4084")
9479 (set_attr "predicable" "yes")]
9489 [(set (attr "length")
9490 (if_then_else (eq_attr "is_thumb" "yes")
9493 (set_attr "type" "mov_reg")]
9497 [(trap_if (const_int 1) (const_int 0))]
9501 return \".inst\\t0xe7f000f0\";
9503 return \".inst\\t0xdeff\";
9505 [(set (attr "length")
9506 (if_then_else (eq_attr "is_thumb" "yes")
9509 (set_attr "type" "trap")
9510 (set_attr "conds" "unconditional")]
9514 ;; Patterns to allow combination of arithmetic, cond code and shifts
9516 (define_insn "*<arith_shift_insn>_multsi"
9517 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9519 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
9520 (match_operand:SI 3 "power_of_two_operand" ""))
9521 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
9523 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
9524 [(set_attr "predicable" "yes")
9525 (set_attr "shift" "2")
9526 (set_attr "arch" "a,t2")
9527 (set_attr "autodetect_type" "alu_shift_mul_op3")])
9529 (define_insn "*<arith_shift_insn>_shiftsi"
9530 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9532 (match_operator:SI 2 "shift_nomul_operator"
9533 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9534 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
9535 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
9536 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
9537 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
9538 [(set_attr "predicable" "yes")
9539 (set_attr "shift" "3")
9540 (set_attr "arch" "a,t2,a")
9541 (set_attr "autodetect_type" "alu_shift_operator2")])
9544 [(set (match_operand:SI 0 "s_register_operand" "")
9545 (match_operator:SI 1 "shiftable_operator"
9546 [(match_operator:SI 2 "shiftable_operator"
9547 [(match_operator:SI 3 "shift_operator"
9548 [(match_operand:SI 4 "s_register_operand" "")
9549 (match_operand:SI 5 "reg_or_int_operand" "")])
9550 (match_operand:SI 6 "s_register_operand" "")])
9551 (match_operand:SI 7 "arm_rhs_operand" "")]))
9552 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9555 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9558 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9561 (define_insn "*arith_shiftsi_compare0"
9562 [(set (reg:CC_NZ CC_REGNUM)
9564 (match_operator:SI 1 "shiftable_operator"
9565 [(match_operator:SI 3 "shift_operator"
9566 [(match_operand:SI 4 "s_register_operand" "r,r")
9567 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9568 (match_operand:SI 2 "s_register_operand" "r,r")])
9570 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9571 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9574 "%i1s%?\\t%0, %2, %4%S3"
9575 [(set_attr "conds" "set")
9576 (set_attr "shift" "4")
9577 (set_attr "arch" "32,a")
9578 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9580 (define_insn "*arith_shiftsi_compare0_scratch"
9581 [(set (reg:CC_NZ CC_REGNUM)
9583 (match_operator:SI 1 "shiftable_operator"
9584 [(match_operator:SI 3 "shift_operator"
9585 [(match_operand:SI 4 "s_register_operand" "r,r")
9586 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9587 (match_operand:SI 2 "s_register_operand" "r,r")])
9589 (clobber (match_scratch:SI 0 "=r,r"))]
9591 "%i1s%?\\t%0, %2, %4%S3"
9592 [(set_attr "conds" "set")
9593 (set_attr "shift" "4")
9594 (set_attr "arch" "32,a")
9595 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9597 (define_insn "*sub_shiftsi"
9598 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9599 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9600 (match_operator:SI 2 "shift_operator"
9601 [(match_operand:SI 3 "s_register_operand" "r,r")
9602 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9604 "sub%?\\t%0, %1, %3%S2"
9605 [(set_attr "predicable" "yes")
9606 (set_attr "predicable_short_it" "no")
9607 (set_attr "shift" "3")
9608 (set_attr "arch" "32,a")
9609 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9611 (define_insn "*sub_shiftsi_compare0"
9612 [(set (reg:CC_NZ CC_REGNUM)
9614 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9615 (match_operator:SI 2 "shift_operator"
9616 [(match_operand:SI 3 "s_register_operand" "r,r")
9617 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9619 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9620 (minus:SI (match_dup 1)
9621 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9623 "subs%?\\t%0, %1, %3%S2"
9624 [(set_attr "conds" "set")
9625 (set_attr "shift" "3")
9626 (set_attr "arch" "32,a")
9627 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9629 (define_insn "*sub_shiftsi_compare0_scratch"
9630 [(set (reg:CC_NZ CC_REGNUM)
9632 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9633 (match_operator:SI 2 "shift_operator"
9634 [(match_operand:SI 3 "s_register_operand" "r,r")
9635 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9637 (clobber (match_scratch:SI 0 "=r,r"))]
9639 "subs%?\\t%0, %1, %3%S2"
9640 [(set_attr "conds" "set")
9641 (set_attr "shift" "3")
9642 (set_attr "arch" "32,a")
9643 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9646 (define_insn_and_split "*and_scc"
9647 [(set (match_operand:SI 0 "s_register_operand" "=r")
9648 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9649 [(match_operand 2 "cc_register" "") (const_int 0)])
9650 (match_operand:SI 3 "s_register_operand" "r")))]
9652 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9653 "&& reload_completed"
9654 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9655 (cond_exec (match_dup 4) (set (match_dup 0)
9656 (and:SI (match_dup 3) (const_int 1))))]
9658 machine_mode mode = GET_MODE (operands[2]);
9659 enum rtx_code rc = GET_CODE (operands[1]);
9661 /* Note that operands[4] is the same as operands[1],
9662 but with VOIDmode as the result. */
9663 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9664 if (mode == CCFPmode || mode == CCFPEmode)
9665 rc = reverse_condition_maybe_unordered (rc);
9667 rc = reverse_condition (rc);
9668 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9670 [(set_attr "conds" "use")
9671 (set_attr "type" "multiple")
9672 (set_attr "length" "8")]
9675 (define_insn_and_split "*ior_scc"
9676 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9677 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9678 [(match_operand 2 "cc_register" "") (const_int 0)])
9679 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9684 "&& reload_completed
9685 && REGNO (operands [0]) != REGNO (operands[3])"
9686 ;; && which_alternative == 1
9687 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9688 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9689 (cond_exec (match_dup 4) (set (match_dup 0)
9690 (ior:SI (match_dup 3) (const_int 1))))]
9692 machine_mode mode = GET_MODE (operands[2]);
9693 enum rtx_code rc = GET_CODE (operands[1]);
9695 /* Note that operands[4] is the same as operands[1],
9696 but with VOIDmode as the result. */
9697 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9698 if (mode == CCFPmode || mode == CCFPEmode)
9699 rc = reverse_condition_maybe_unordered (rc);
9701 rc = reverse_condition (rc);
9702 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9704 [(set_attr "conds" "use")
9705 (set_attr "length" "4,8")
9706 (set_attr "type" "logic_imm,multiple")]
9709 ; A series of splitters for the compare_scc pattern below. Note that
9710 ; order is important.
9712 [(set (match_operand:SI 0 "s_register_operand" "")
9713 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9715 (clobber (reg:CC CC_REGNUM))]
9716 "TARGET_32BIT && reload_completed"
9717 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9720 [(set (match_operand:SI 0 "s_register_operand" "")
9721 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9723 (clobber (reg:CC CC_REGNUM))]
9724 "TARGET_32BIT && reload_completed"
9725 [(set (match_dup 0) (not:SI (match_dup 1)))
9726 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9729 [(set (match_operand:SI 0 "s_register_operand" "")
9730 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9732 (clobber (reg:CC CC_REGNUM))]
9733 "arm_arch5t && TARGET_32BIT"
9734 [(set (match_dup 0) (clz:SI (match_dup 1)))
9735 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9739 [(set (match_operand:SI 0 "s_register_operand" "")
9740 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9742 (clobber (reg:CC CC_REGNUM))]
9743 "TARGET_32BIT && reload_completed"
9745 [(set (reg:CC CC_REGNUM)
9746 (compare:CC (const_int 1) (match_dup 1)))
9748 (minus:SI (const_int 1) (match_dup 1)))])
9749 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9750 (set (match_dup 0) (const_int 0)))])
9753 [(set (match_operand:SI 0 "s_register_operand" "")
9754 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9755 (match_operand:SI 2 "const_int_operand" "")))
9756 (clobber (reg:CC CC_REGNUM))]
9757 "TARGET_32BIT && reload_completed"
9759 [(set (reg:CC CC_REGNUM)
9760 (compare:CC (match_dup 1) (match_dup 2)))
9761 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9762 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9763 (set (match_dup 0) (const_int 1)))]
9765 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9769 [(set (match_operand:SI 0 "s_register_operand" "")
9770 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9771 (match_operand:SI 2 "arm_add_operand" "")))
9772 (clobber (reg:CC CC_REGNUM))]
9773 "TARGET_32BIT && reload_completed"
9775 [(set (reg:CC_NZ CC_REGNUM)
9776 (compare:CC_NZ (minus:SI (match_dup 1) (match_dup 2))
9778 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9779 (cond_exec (ne:CC_NZ (reg:CC_NZ CC_REGNUM) (const_int 0))
9780 (set (match_dup 0) (const_int 1)))])
9782 (define_insn_and_split "*compare_scc"
9783 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9784 (match_operator:SI 1 "arm_comparison_operator"
9785 [(match_operand:SI 2 "s_register_operand" "r,r")
9786 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9787 (clobber (reg:CC CC_REGNUM))]
9790 "&& reload_completed"
9791 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9792 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9793 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9796 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9797 operands[2], operands[3]);
9798 enum rtx_code rc = GET_CODE (operands[1]);
9800 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9802 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9803 if (mode == CCFPmode || mode == CCFPEmode)
9804 rc = reverse_condition_maybe_unordered (rc);
9806 rc = reverse_condition (rc);
9807 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9809 [(set_attr "type" "multiple")]
9812 ;; Attempt to improve the sequence generated by the compare_scc splitters
9813 ;; not to use conditional execution.
9815 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9819 [(set (reg:CC CC_REGNUM)
9820 (compare:CC (match_operand:SI 1 "register_operand" "")
9822 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9823 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9824 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9825 (set (match_dup 0) (const_int 1)))]
9826 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9827 [(set (match_dup 0) (clz:SI (match_dup 1)))
9828 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9831 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9835 [(set (reg:CC CC_REGNUM)
9836 (compare:CC (match_operand:SI 1 "register_operand" "")
9838 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9839 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9840 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9841 (set (match_dup 0) (const_int 1)))
9842 (match_scratch:SI 2 "r")]
9843 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9845 [(set (reg:CC CC_REGNUM)
9846 (compare:CC (const_int 0) (match_dup 1)))
9847 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9849 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9850 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9853 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9854 ;; sub Rd, Reg1, reg2
9858 [(set (reg:CC CC_REGNUM)
9859 (compare:CC (match_operand:SI 1 "register_operand" "")
9860 (match_operand:SI 2 "arm_rhs_operand" "")))
9861 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9862 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9863 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9864 (set (match_dup 0) (const_int 1)))]
9865 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9866 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9867 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9868 (set (match_dup 0) (clz:SI (match_dup 0)))
9869 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9873 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9874 ;; sub T1, Reg1, reg2
9878 [(set (reg:CC CC_REGNUM)
9879 (compare:CC (match_operand:SI 1 "register_operand" "")
9880 (match_operand:SI 2 "arm_rhs_operand" "")))
9881 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9882 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9883 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9884 (set (match_dup 0) (const_int 1)))
9885 (match_scratch:SI 3 "r")]
9886 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9887 [(set (match_dup 3) (match_dup 4))
9889 [(set (reg:CC CC_REGNUM)
9890 (compare:CC (const_int 0) (match_dup 3)))
9891 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9893 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9894 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9896 if (CONST_INT_P (operands[2]))
9897 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9899 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9902 (define_insn "*cond_move"
9903 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9904 (if_then_else:SI (match_operator 3 "equality_operator"
9905 [(match_operator 4 "arm_comparison_operator"
9906 [(match_operand 5 "cc_register" "") (const_int 0)])
9908 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9909 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9912 if (GET_CODE (operands[3]) == NE)
9914 if (which_alternative != 1)
9915 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9916 if (which_alternative != 0)
9917 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9920 if (which_alternative != 0)
9921 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9922 if (which_alternative != 1)
9923 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9926 [(set_attr "conds" "use")
9927 (set_attr_alternative "type"
9928 [(if_then_else (match_operand 2 "const_int_operand" "")
9929 (const_string "mov_imm")
9930 (const_string "mov_reg"))
9931 (if_then_else (match_operand 1 "const_int_operand" "")
9932 (const_string "mov_imm")
9933 (const_string "mov_reg"))
9934 (const_string "multiple")])
9935 (set_attr "length" "4,4,8")]
9938 (define_insn "*cond_arith"
9939 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9940 (match_operator:SI 5 "shiftable_operator"
9941 [(match_operator:SI 4 "arm_comparison_operator"
9942 [(match_operand:SI 2 "s_register_operand" "r,r")
9943 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9944 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9945 (clobber (reg:CC CC_REGNUM))]
9948 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9949 return \"%i5\\t%0, %1, %2, lsr #31\";
9951 output_asm_insn (\"cmp\\t%2, %3\", operands);
9952 if (GET_CODE (operands[5]) == AND)
9953 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9954 else if (GET_CODE (operands[5]) == MINUS)
9955 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9956 else if (which_alternative != 0)
9957 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9958 return \"%i5%d4\\t%0, %1, #1\";
9960 [(set_attr "conds" "clob")
9961 (set_attr "length" "12")
9962 (set_attr "type" "multiple")]
9965 (define_insn "*cond_sub"
9966 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9967 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9968 (match_operator:SI 4 "arm_comparison_operator"
9969 [(match_operand:SI 2 "s_register_operand" "r,r")
9970 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9971 (clobber (reg:CC CC_REGNUM))]
9974 output_asm_insn (\"cmp\\t%2, %3\", operands);
9975 if (which_alternative != 0)
9976 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9977 return \"sub%d4\\t%0, %1, #1\";
9979 [(set_attr "conds" "clob")
9980 (set_attr "length" "8,12")
9981 (set_attr "type" "multiple")]
9984 (define_insn "*cmp_ite0"
9985 [(set (match_operand 6 "dominant_cc_register" "")
9988 (match_operator 4 "arm_comparison_operator"
9989 [(match_operand:SI 0 "s_register_operand"
9990 "l,l,l,r,r,r,r,r,r")
9991 (match_operand:SI 1 "arm_add_operand"
9992 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9993 (match_operator:SI 5 "arm_comparison_operator"
9994 [(match_operand:SI 2 "s_register_operand"
9995 "l,r,r,l,l,r,r,r,r")
9996 (match_operand:SI 3 "arm_add_operand"
9997 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10003 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10005 {\"cmp%d5\\t%0, %1\",
10006 \"cmp%d4\\t%2, %3\"},
10007 {\"cmn%d5\\t%0, #%n1\",
10008 \"cmp%d4\\t%2, %3\"},
10009 {\"cmp%d5\\t%0, %1\",
10010 \"cmn%d4\\t%2, #%n3\"},
10011 {\"cmn%d5\\t%0, #%n1\",
10012 \"cmn%d4\\t%2, #%n3\"}
10014 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10019 \"cmn\\t%0, #%n1\"},
10020 {\"cmn\\t%2, #%n3\",
10022 {\"cmn\\t%2, #%n3\",
10023 \"cmn\\t%0, #%n1\"}
10025 static const char * const ite[2] =
10030 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10031 CMP_CMP, CMN_CMP, CMP_CMP,
10032 CMN_CMP, CMP_CMN, CMN_CMN};
10034 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10036 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10037 if (TARGET_THUMB2) {
10038 output_asm_insn (ite[swap], operands);
10040 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10043 [(set_attr "conds" "set")
10044 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10045 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10046 (set_attr "type" "multiple")
10047 (set_attr_alternative "length"
10053 (if_then_else (eq_attr "is_thumb" "no")
10056 (if_then_else (eq_attr "is_thumb" "no")
10059 (if_then_else (eq_attr "is_thumb" "no")
10062 (if_then_else (eq_attr "is_thumb" "no")
10067 (define_insn "*cmp_ite1"
10068 [(set (match_operand 6 "dominant_cc_register" "")
10071 (match_operator 4 "arm_comparison_operator"
10072 [(match_operand:SI 0 "s_register_operand"
10073 "l,l,l,r,r,r,r,r,r")
10074 (match_operand:SI 1 "arm_add_operand"
10075 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10076 (match_operator:SI 5 "arm_comparison_operator"
10077 [(match_operand:SI 2 "s_register_operand"
10078 "l,r,r,l,l,r,r,r,r")
10079 (match_operand:SI 3 "arm_add_operand"
10080 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10086 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10090 {\"cmn\\t%0, #%n1\",
10093 \"cmn\\t%2, #%n3\"},
10094 {\"cmn\\t%0, #%n1\",
10095 \"cmn\\t%2, #%n3\"}
10097 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10099 {\"cmp%d4\\t%2, %3\",
10100 \"cmp%D5\\t%0, %1\"},
10101 {\"cmp%d4\\t%2, %3\",
10102 \"cmn%D5\\t%0, #%n1\"},
10103 {\"cmn%d4\\t%2, #%n3\",
10104 \"cmp%D5\\t%0, %1\"},
10105 {\"cmn%d4\\t%2, #%n3\",
10106 \"cmn%D5\\t%0, #%n1\"}
10108 static const char * const ite[2] =
10113 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10114 CMP_CMP, CMN_CMP, CMP_CMP,
10115 CMN_CMP, CMP_CMN, CMN_CMN};
10117 comparison_dominates_p (GET_CODE (operands[5]),
10118 reverse_condition (GET_CODE (operands[4])));
10120 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10121 if (TARGET_THUMB2) {
10122 output_asm_insn (ite[swap], operands);
10124 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10127 [(set_attr "conds" "set")
10128 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10129 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10130 (set_attr_alternative "length"
10136 (if_then_else (eq_attr "is_thumb" "no")
10139 (if_then_else (eq_attr "is_thumb" "no")
10142 (if_then_else (eq_attr "is_thumb" "no")
10145 (if_then_else (eq_attr "is_thumb" "no")
10148 (set_attr "type" "multiple")]
10151 (define_insn "*cmp_and"
10152 [(set (match_operand 6 "dominant_cc_register" "")
10155 (match_operator 4 "arm_comparison_operator"
10156 [(match_operand:SI 0 "s_register_operand"
10157 "l,l,l,r,r,r,r,r,r,r")
10158 (match_operand:SI 1 "arm_add_operand"
10159 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10160 (match_operator:SI 5 "arm_comparison_operator"
10161 [(match_operand:SI 2 "s_register_operand"
10162 "l,r,r,l,l,r,r,r,r,r")
10163 (match_operand:SI 3 "arm_add_operand"
10164 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10169 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10171 {\"cmp%d5\\t%0, %1\",
10172 \"cmp%d4\\t%2, %3\"},
10173 {\"cmn%d5\\t%0, #%n1\",
10174 \"cmp%d4\\t%2, %3\"},
10175 {\"cmp%d5\\t%0, %1\",
10176 \"cmn%d4\\t%2, #%n3\"},
10177 {\"cmn%d5\\t%0, #%n1\",
10178 \"cmn%d4\\t%2, #%n3\"}
10180 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10185 \"cmn\\t%0, #%n1\"},
10186 {\"cmn\\t%2, #%n3\",
10188 {\"cmn\\t%2, #%n3\",
10189 \"cmn\\t%0, #%n1\"}
10191 static const char *const ite[2] =
10196 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10197 CMP_CMP, CMN_CMP, CMP_CMP,
10198 CMP_CMP, CMN_CMP, CMP_CMN,
10201 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10203 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10204 if (TARGET_THUMB2) {
10205 output_asm_insn (ite[swap], operands);
10207 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10210 [(set_attr "conds" "set")
10211 (set_attr "predicable" "no")
10212 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10213 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10214 (set_attr_alternative "length"
10221 (if_then_else (eq_attr "is_thumb" "no")
10224 (if_then_else (eq_attr "is_thumb" "no")
10227 (if_then_else (eq_attr "is_thumb" "no")
10230 (if_then_else (eq_attr "is_thumb" "no")
10233 (set_attr "type" "multiple")]
10236 (define_insn "*cmp_ior"
10237 [(set (match_operand 6 "dominant_cc_register" "")
10240 (match_operator 4 "arm_comparison_operator"
10241 [(match_operand:SI 0 "s_register_operand"
10242 "l,l,l,r,r,r,r,r,r,r")
10243 (match_operand:SI 1 "arm_add_operand"
10244 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10245 (match_operator:SI 5 "arm_comparison_operator"
10246 [(match_operand:SI 2 "s_register_operand"
10247 "l,r,r,l,l,r,r,r,r,r")
10248 (match_operand:SI 3 "arm_add_operand"
10249 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10254 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10258 {\"cmn\\t%0, #%n1\",
10261 \"cmn\\t%2, #%n3\"},
10262 {\"cmn\\t%0, #%n1\",
10263 \"cmn\\t%2, #%n3\"}
10265 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10267 {\"cmp%D4\\t%2, %3\",
10268 \"cmp%D5\\t%0, %1\"},
10269 {\"cmp%D4\\t%2, %3\",
10270 \"cmn%D5\\t%0, #%n1\"},
10271 {\"cmn%D4\\t%2, #%n3\",
10272 \"cmp%D5\\t%0, %1\"},
10273 {\"cmn%D4\\t%2, #%n3\",
10274 \"cmn%D5\\t%0, #%n1\"}
10276 static const char *const ite[2] =
10281 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10282 CMP_CMP, CMN_CMP, CMP_CMP,
10283 CMP_CMP, CMN_CMP, CMP_CMN,
10286 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10288 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10289 if (TARGET_THUMB2) {
10290 output_asm_insn (ite[swap], operands);
10292 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10296 [(set_attr "conds" "set")
10297 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10298 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10299 (set_attr_alternative "length"
10306 (if_then_else (eq_attr "is_thumb" "no")
10309 (if_then_else (eq_attr "is_thumb" "no")
10312 (if_then_else (eq_attr "is_thumb" "no")
10315 (if_then_else (eq_attr "is_thumb" "no")
10318 (set_attr "type" "multiple")]
10321 (define_insn_and_split "*ior_scc_scc"
10322 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10323 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10324 [(match_operand:SI 1 "s_register_operand" "l,r")
10325 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10326 (match_operator:SI 6 "arm_comparison_operator"
10327 [(match_operand:SI 4 "s_register_operand" "l,r")
10328 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10329 (clobber (reg:CC CC_REGNUM))]
10331 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10334 "TARGET_32BIT && reload_completed"
10335 [(set (match_dup 7)
10338 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10339 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10341 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10343 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10346 [(set_attr "conds" "clob")
10347 (set_attr "enabled_for_short_it" "yes,no")
10348 (set_attr "length" "16")
10349 (set_attr "type" "multiple")]
10352 ; If the above pattern is followed by a CMP insn, then the compare is
10353 ; redundant, since we can rework the conditional instruction that follows.
10354 (define_insn_and_split "*ior_scc_scc_cmp"
10355 [(set (match_operand 0 "dominant_cc_register" "")
10356 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10357 [(match_operand:SI 1 "s_register_operand" "l,r")
10358 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10359 (match_operator:SI 6 "arm_comparison_operator"
10360 [(match_operand:SI 4 "s_register_operand" "l,r")
10361 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10363 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10364 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10365 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10368 "TARGET_32BIT && reload_completed"
10369 [(set (match_dup 0)
10372 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10373 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10375 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10377 [(set_attr "conds" "set")
10378 (set_attr "enabled_for_short_it" "yes,no")
10379 (set_attr "length" "16")
10380 (set_attr "type" "multiple")]
10383 (define_insn_and_split "*and_scc_scc"
10384 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10385 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10386 [(match_operand:SI 1 "s_register_operand" "l,r")
10387 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10388 (match_operator:SI 6 "arm_comparison_operator"
10389 [(match_operand:SI 4 "s_register_operand" "l,r")
10390 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10391 (clobber (reg:CC CC_REGNUM))]
10393 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10396 "TARGET_32BIT && reload_completed
10397 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10399 [(set (match_dup 7)
10402 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10403 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10405 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10407 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10410 [(set_attr "conds" "clob")
10411 (set_attr "enabled_for_short_it" "yes,no")
10412 (set_attr "length" "16")
10413 (set_attr "type" "multiple")]
10416 ; If the above pattern is followed by a CMP insn, then the compare is
10417 ; redundant, since we can rework the conditional instruction that follows.
10418 (define_insn_and_split "*and_scc_scc_cmp"
10419 [(set (match_operand 0 "dominant_cc_register" "")
10420 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10421 [(match_operand:SI 1 "s_register_operand" "l,r")
10422 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10423 (match_operator:SI 6 "arm_comparison_operator"
10424 [(match_operand:SI 4 "s_register_operand" "l,r")
10425 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10427 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10428 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10429 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10432 "TARGET_32BIT && reload_completed"
10433 [(set (match_dup 0)
10436 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10437 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10439 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10441 [(set_attr "conds" "set")
10442 (set_attr "enabled_for_short_it" "yes,no")
10443 (set_attr "length" "16")
10444 (set_attr "type" "multiple")]
10447 ;; If there is no dominance in the comparison, then we can still save an
10448 ;; instruction in the AND case, since we can know that the second compare
10449 ;; need only zero the value if false (if true, then the value is already
10451 (define_insn_and_split "*and_scc_scc_nodom"
10452 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10453 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10454 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10455 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10456 (match_operator:SI 6 "arm_comparison_operator"
10457 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10458 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10459 (clobber (reg:CC CC_REGNUM))]
10461 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10464 "TARGET_32BIT && reload_completed"
10465 [(parallel [(set (match_dup 0)
10466 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10467 (clobber (reg:CC CC_REGNUM))])
10468 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10470 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10473 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10474 operands[4], operands[5]),
10476 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10478 [(set_attr "conds" "clob")
10479 (set_attr "length" "20")
10480 (set_attr "type" "multiple")]
10484 [(set (reg:CC_NZ CC_REGNUM)
10485 (compare:CC_NZ (ior:SI
10486 (and:SI (match_operand:SI 0 "s_register_operand" "")
10488 (match_operator:SI 1 "arm_comparison_operator"
10489 [(match_operand:SI 2 "s_register_operand" "")
10490 (match_operand:SI 3 "arm_add_operand" "")]))
10492 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10494 [(set (match_dup 4)
10495 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10497 (set (reg:CC_NZ CC_REGNUM)
10498 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10503 [(set (reg:CC_NZ CC_REGNUM)
10504 (compare:CC_NZ (ior:SI
10505 (match_operator:SI 1 "arm_comparison_operator"
10506 [(match_operand:SI 2 "s_register_operand" "")
10507 (match_operand:SI 3 "arm_add_operand" "")])
10508 (and:SI (match_operand:SI 0 "s_register_operand" "")
10511 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10513 [(set (match_dup 4)
10514 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10516 (set (reg:CC_NZ CC_REGNUM)
10517 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10520 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10522 (define_insn_and_split "*negscc"
10523 [(set (match_operand:SI 0 "s_register_operand" "=r")
10524 (neg:SI (match_operator 3 "arm_comparison_operator"
10525 [(match_operand:SI 1 "s_register_operand" "r")
10526 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10527 (clobber (reg:CC CC_REGNUM))]
10530 "&& reload_completed"
10533 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10535 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10537 /* Emit mov\\t%0, %1, asr #31 */
10538 emit_insn (gen_rtx_SET (operands[0],
10539 gen_rtx_ASHIFTRT (SImode,
10544 else if (GET_CODE (operands[3]) == NE)
10546 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10547 if (CONST_INT_P (operands[2]))
10548 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10549 gen_int_mode (-INTVAL (operands[2]),
10552 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10554 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10555 gen_rtx_NE (SImode,
10558 gen_rtx_SET (operands[0],
10564 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10565 emit_insn (gen_rtx_SET (cc_reg,
10566 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10567 enum rtx_code rc = GET_CODE (operands[3]);
10569 rc = reverse_condition (rc);
10570 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10571 gen_rtx_fmt_ee (rc,
10575 gen_rtx_SET (operands[0], const0_rtx)));
10576 rc = GET_CODE (operands[3]);
10577 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10578 gen_rtx_fmt_ee (rc,
10582 gen_rtx_SET (operands[0],
10588 [(set_attr "conds" "clob")
10589 (set_attr "length" "12")
10590 (set_attr "type" "multiple")]
10593 (define_insn_and_split "movcond_addsi"
10594 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
10596 (match_operator 5 "comparison_operator"
10597 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
10598 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
10600 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
10601 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
10602 (clobber (reg:CC CC_REGNUM))]
10605 "&& reload_completed"
10606 [(set (reg:CC_NZ CC_REGNUM)
10608 (plus:SI (match_dup 3)
10611 (set (match_dup 0) (match_dup 1))
10612 (cond_exec (match_dup 6)
10613 (set (match_dup 0) (match_dup 2)))]
10616 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
10617 operands[3], operands[4]);
10618 enum rtx_code rc = GET_CODE (operands[5]);
10619 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10620 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
10621 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
10622 rc = reverse_condition (rc);
10624 std::swap (operands[1], operands[2]);
10626 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10629 [(set_attr "conds" "clob")
10630 (set_attr "enabled_for_short_it" "no,yes,yes")
10631 (set_attr "type" "multiple")]
10634 (define_insn "movcond"
10635 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10637 (match_operator 5 "arm_comparison_operator"
10638 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10639 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10640 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10641 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10642 (clobber (reg:CC CC_REGNUM))]
10645 if (GET_CODE (operands[5]) == LT
10646 && (operands[4] == const0_rtx))
10648 if (which_alternative != 1 && REG_P (operands[1]))
10650 if (operands[2] == const0_rtx)
10651 return \"and\\t%0, %1, %3, asr #31\";
10652 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10654 else if (which_alternative != 0 && REG_P (operands[2]))
10656 if (operands[1] == const0_rtx)
10657 return \"bic\\t%0, %2, %3, asr #31\";
10658 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10660 /* The only case that falls through to here is when both ops 1 & 2
10664 if (GET_CODE (operands[5]) == GE
10665 && (operands[4] == const0_rtx))
10667 if (which_alternative != 1 && REG_P (operands[1]))
10669 if (operands[2] == const0_rtx)
10670 return \"bic\\t%0, %1, %3, asr #31\";
10671 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10673 else if (which_alternative != 0 && REG_P (operands[2]))
10675 if (operands[1] == const0_rtx)
10676 return \"and\\t%0, %2, %3, asr #31\";
10677 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10679 /* The only case that falls through to here is when both ops 1 & 2
10682 if (CONST_INT_P (operands[4])
10683 && !const_ok_for_arm (INTVAL (operands[4])))
10684 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10686 output_asm_insn (\"cmp\\t%3, %4\", operands);
10687 if (which_alternative != 0)
10688 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10689 if (which_alternative != 1)
10690 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10693 [(set_attr "conds" "clob")
10694 (set_attr "length" "8,8,12")
10695 (set_attr "type" "multiple")]
10698 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10700 (define_insn "*ifcompare_plus_move"
10701 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10702 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10703 [(match_operand:SI 4 "s_register_operand" "r,r")
10704 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10706 (match_operand:SI 2 "s_register_operand" "r,r")
10707 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10708 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10709 (clobber (reg:CC CC_REGNUM))]
10712 [(set_attr "conds" "clob")
10713 (set_attr "length" "8,12")
10714 (set_attr "type" "multiple")]
10717 (define_insn "*if_plus_move"
10718 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10720 (match_operator 4 "arm_comparison_operator"
10721 [(match_operand 5 "cc_register" "") (const_int 0)])
10723 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10724 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10725 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10728 add%d4\\t%0, %2, %3
10729 sub%d4\\t%0, %2, #%n3
10730 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10731 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10732 [(set_attr "conds" "use")
10733 (set_attr "length" "4,4,8,8")
10734 (set_attr_alternative "type"
10735 [(if_then_else (match_operand 3 "const_int_operand" "")
10736 (const_string "alu_imm" )
10737 (const_string "alu_sreg"))
10738 (const_string "alu_imm")
10739 (const_string "multiple")
10740 (const_string "multiple")])]
10743 (define_insn "*ifcompare_move_plus"
10744 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10745 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10746 [(match_operand:SI 4 "s_register_operand" "r,r")
10747 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10748 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10750 (match_operand:SI 2 "s_register_operand" "r,r")
10751 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10752 (clobber (reg:CC CC_REGNUM))]
10755 [(set_attr "conds" "clob")
10756 (set_attr "length" "8,12")
10757 (set_attr "type" "multiple")]
10760 (define_insn "*if_move_plus"
10761 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10763 (match_operator 4 "arm_comparison_operator"
10764 [(match_operand 5 "cc_register" "") (const_int 0)])
10765 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10767 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10768 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10771 add%D4\\t%0, %2, %3
10772 sub%D4\\t%0, %2, #%n3
10773 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10774 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10775 [(set_attr "conds" "use")
10776 (set_attr "length" "4,4,8,8")
10777 (set_attr_alternative "type"
10778 [(if_then_else (match_operand 3 "const_int_operand" "")
10779 (const_string "alu_imm" )
10780 (const_string "alu_sreg"))
10781 (const_string "alu_imm")
10782 (const_string "multiple")
10783 (const_string "multiple")])]
10786 (define_insn "*ifcompare_arith_arith"
10787 [(set (match_operand:SI 0 "s_register_operand" "=r")
10788 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10789 [(match_operand:SI 5 "s_register_operand" "r")
10790 (match_operand:SI 6 "arm_add_operand" "rIL")])
10791 (match_operator:SI 8 "shiftable_operator"
10792 [(match_operand:SI 1 "s_register_operand" "r")
10793 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10794 (match_operator:SI 7 "shiftable_operator"
10795 [(match_operand:SI 3 "s_register_operand" "r")
10796 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10797 (clobber (reg:CC CC_REGNUM))]
10800 [(set_attr "conds" "clob")
10801 (set_attr "length" "12")
10802 (set_attr "type" "multiple")]
10805 (define_insn "*if_arith_arith"
10806 [(set (match_operand:SI 0 "s_register_operand" "=r")
10807 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10808 [(match_operand 8 "cc_register" "") (const_int 0)])
10809 (match_operator:SI 6 "shiftable_operator"
10810 [(match_operand:SI 1 "s_register_operand" "r")
10811 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10812 (match_operator:SI 7 "shiftable_operator"
10813 [(match_operand:SI 3 "s_register_operand" "r")
10814 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10816 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10817 [(set_attr "conds" "use")
10818 (set_attr "length" "8")
10819 (set_attr "type" "multiple")]
10822 (define_insn "*ifcompare_arith_move"
10823 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10824 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10825 [(match_operand:SI 2 "s_register_operand" "r,r")
10826 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10827 (match_operator:SI 7 "shiftable_operator"
10828 [(match_operand:SI 4 "s_register_operand" "r,r")
10829 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10830 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10831 (clobber (reg:CC CC_REGNUM))]
10834 /* If we have an operation where (op x 0) is the identity operation and
10835 the conditional operator is LT or GE and we are comparing against zero and
10836 everything is in registers then we can do this in two instructions. */
10837 if (operands[3] == const0_rtx
10838 && GET_CODE (operands[7]) != AND
10839 && REG_P (operands[5])
10840 && REG_P (operands[1])
10841 && REGNO (operands[1]) == REGNO (operands[4])
10842 && REGNO (operands[4]) != REGNO (operands[0]))
10844 if (GET_CODE (operands[6]) == LT)
10845 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10846 else if (GET_CODE (operands[6]) == GE)
10847 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10849 if (CONST_INT_P (operands[3])
10850 && !const_ok_for_arm (INTVAL (operands[3])))
10851 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10853 output_asm_insn (\"cmp\\t%2, %3\", operands);
10854 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10855 if (which_alternative != 0)
10856 return \"mov%D6\\t%0, %1\";
10859 [(set_attr "conds" "clob")
10860 (set_attr "length" "8,12")
10861 (set_attr "type" "multiple")]
10864 (define_insn "*if_arith_move"
10865 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10866 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10867 [(match_operand 6 "cc_register" "") (const_int 0)])
10868 (match_operator:SI 5 "shiftable_operator"
10869 [(match_operand:SI 2 "s_register_operand" "r,r")
10870 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10871 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10874 %I5%d4\\t%0, %2, %3
10875 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10876 [(set_attr "conds" "use")
10877 (set_attr "length" "4,8")
10878 (set_attr_alternative "type"
10879 [(if_then_else (match_operand 3 "const_int_operand" "")
10880 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
10881 (const_string "alu_shift_imm_lsl_1to4")
10882 (const_string "alu_shift_imm_other"))
10883 (const_string "alu_shift_reg"))
10884 (const_string "multiple")])]
10887 (define_insn "*ifcompare_move_arith"
10888 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10889 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10890 [(match_operand:SI 4 "s_register_operand" "r,r")
10891 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10892 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10893 (match_operator:SI 7 "shiftable_operator"
10894 [(match_operand:SI 2 "s_register_operand" "r,r")
10895 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10896 (clobber (reg:CC CC_REGNUM))]
10899 /* If we have an operation where (op x 0) is the identity operation and
10900 the conditional operator is LT or GE and we are comparing against zero and
10901 everything is in registers then we can do this in two instructions */
10902 if (operands[5] == const0_rtx
10903 && GET_CODE (operands[7]) != AND
10904 && REG_P (operands[3])
10905 && REG_P (operands[1])
10906 && REGNO (operands[1]) == REGNO (operands[2])
10907 && REGNO (operands[2]) != REGNO (operands[0]))
10909 if (GET_CODE (operands[6]) == GE)
10910 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10911 else if (GET_CODE (operands[6]) == LT)
10912 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10915 if (CONST_INT_P (operands[5])
10916 && !const_ok_for_arm (INTVAL (operands[5])))
10917 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10919 output_asm_insn (\"cmp\\t%4, %5\", operands);
10921 if (which_alternative != 0)
10922 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10923 return \"%I7%D6\\t%0, %2, %3\";
10925 [(set_attr "conds" "clob")
10926 (set_attr "length" "8,12")
10927 (set_attr "type" "multiple")]
10930 (define_insn "*if_move_arith"
10931 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10933 (match_operator 4 "arm_comparison_operator"
10934 [(match_operand 6 "cc_register" "") (const_int 0)])
10935 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10936 (match_operator:SI 5 "shiftable_operator"
10937 [(match_operand:SI 2 "s_register_operand" "r,r")
10938 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10941 %I5%D4\\t%0, %2, %3
10942 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10943 [(set_attr "conds" "use")
10944 (set_attr "length" "4,8")
10945 (set_attr_alternative "type"
10946 [(if_then_else (match_operand 3 "const_int_operand" "")
10947 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
10948 (const_string "alu_shift_imm_lsl_1to4")
10949 (const_string "alu_shift_imm_other"))
10950 (const_string "alu_shift_reg"))
10951 (const_string "multiple")])]
10954 (define_insn "*ifcompare_move_not"
10955 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10957 (match_operator 5 "arm_comparison_operator"
10958 [(match_operand:SI 3 "s_register_operand" "r,r")
10959 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10960 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10962 (match_operand:SI 2 "s_register_operand" "r,r"))))
10963 (clobber (reg:CC CC_REGNUM))]
10966 [(set_attr "conds" "clob")
10967 (set_attr "length" "8,12")
10968 (set_attr "type" "multiple")]
10971 (define_insn "*if_move_not"
10972 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10974 (match_operator 4 "arm_comparison_operator"
10975 [(match_operand 3 "cc_register" "") (const_int 0)])
10976 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10977 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10981 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10982 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10983 [(set_attr "conds" "use")
10984 (set_attr "type" "mvn_reg")
10985 (set_attr "length" "4,8,8")
10986 (set_attr "type" "mvn_reg,multiple,multiple")]
10989 (define_insn "*ifcompare_not_move"
10990 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10992 (match_operator 5 "arm_comparison_operator"
10993 [(match_operand:SI 3 "s_register_operand" "r,r")
10994 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10996 (match_operand:SI 2 "s_register_operand" "r,r"))
10997 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10998 (clobber (reg:CC CC_REGNUM))]
11001 [(set_attr "conds" "clob")
11002 (set_attr "length" "8,12")
11003 (set_attr "type" "multiple")]
11006 (define_insn "*if_not_move"
11007 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11009 (match_operator 4 "arm_comparison_operator"
11010 [(match_operand 3 "cc_register" "") (const_int 0)])
11011 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11012 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11016 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11017 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11018 [(set_attr "conds" "use")
11019 (set_attr "type" "mvn_reg,multiple,multiple")
11020 (set_attr "length" "4,8,8")]
11023 (define_insn "*ifcompare_shift_move"
11024 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11026 (match_operator 6 "arm_comparison_operator"
11027 [(match_operand:SI 4 "s_register_operand" "r,r")
11028 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11029 (match_operator:SI 7 "shift_operator"
11030 [(match_operand:SI 2 "s_register_operand" "r,r")
11031 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11032 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11033 (clobber (reg:CC CC_REGNUM))]
11036 [(set_attr "conds" "clob")
11037 (set_attr "length" "8,12")
11038 (set_attr "type" "multiple")]
11041 (define_insn "*if_shift_move"
11042 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11044 (match_operator 5 "arm_comparison_operator"
11045 [(match_operand 6 "cc_register" "") (const_int 0)])
11046 (match_operator:SI 4 "shift_operator"
11047 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11048 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11049 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11053 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11054 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11055 [(set_attr "conds" "use")
11056 (set_attr "shift" "2")
11057 (set_attr "length" "4,8,8")
11058 (set_attr_alternative "type"
11059 [(if_then_else (match_operand 3 "const_int_operand" "")
11060 (const_string "mov_shift" )
11061 (const_string "mov_shift_reg"))
11062 (const_string "multiple")
11063 (const_string "multiple")])]
11066 (define_insn "*ifcompare_move_shift"
11067 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11069 (match_operator 6 "arm_comparison_operator"
11070 [(match_operand:SI 4 "s_register_operand" "r,r")
11071 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11072 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11073 (match_operator:SI 7 "shift_operator"
11074 [(match_operand:SI 2 "s_register_operand" "r,r")
11075 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11076 (clobber (reg:CC CC_REGNUM))]
11079 [(set_attr "conds" "clob")
11080 (set_attr "length" "8,12")
11081 (set_attr "type" "multiple")]
11084 (define_insn "*if_move_shift"
11085 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11087 (match_operator 5 "arm_comparison_operator"
11088 [(match_operand 6 "cc_register" "") (const_int 0)])
11089 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11090 (match_operator:SI 4 "shift_operator"
11091 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11092 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11096 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11097 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11098 [(set_attr "conds" "use")
11099 (set_attr "shift" "2")
11100 (set_attr "length" "4,8,8")
11101 (set_attr_alternative "type"
11102 [(if_then_else (match_operand 3 "const_int_operand" "")
11103 (const_string "mov_shift" )
11104 (const_string "mov_shift_reg"))
11105 (const_string "multiple")
11106 (const_string "multiple")])]
11109 (define_insn "*ifcompare_shift_shift"
11110 [(set (match_operand:SI 0 "s_register_operand" "=r")
11112 (match_operator 7 "arm_comparison_operator"
11113 [(match_operand:SI 5 "s_register_operand" "r")
11114 (match_operand:SI 6 "arm_add_operand" "rIL")])
11115 (match_operator:SI 8 "shift_operator"
11116 [(match_operand:SI 1 "s_register_operand" "r")
11117 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11118 (match_operator:SI 9 "shift_operator"
11119 [(match_operand:SI 3 "s_register_operand" "r")
11120 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11121 (clobber (reg:CC CC_REGNUM))]
11124 [(set_attr "conds" "clob")
11125 (set_attr "length" "12")
11126 (set_attr "type" "multiple")]
11129 (define_insn "*if_shift_shift"
11130 [(set (match_operand:SI 0 "s_register_operand" "=r")
11132 (match_operator 5 "arm_comparison_operator"
11133 [(match_operand 8 "cc_register" "") (const_int 0)])
11134 (match_operator:SI 6 "shift_operator"
11135 [(match_operand:SI 1 "s_register_operand" "r")
11136 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11137 (match_operator:SI 7 "shift_operator"
11138 [(match_operand:SI 3 "s_register_operand" "r")
11139 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11141 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11142 [(set_attr "conds" "use")
11143 (set_attr "shift" "1")
11144 (set_attr "length" "8")
11145 (set (attr "type") (if_then_else
11146 (and (match_operand 2 "const_int_operand" "")
11147 (match_operand 4 "const_int_operand" ""))
11148 (const_string "mov_shift")
11149 (const_string "mov_shift_reg")))]
11152 (define_insn "*ifcompare_not_arith"
11153 [(set (match_operand:SI 0 "s_register_operand" "=r")
11155 (match_operator 6 "arm_comparison_operator"
11156 [(match_operand:SI 4 "s_register_operand" "r")
11157 (match_operand:SI 5 "arm_add_operand" "rIL")])
11158 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11159 (match_operator:SI 7 "shiftable_operator"
11160 [(match_operand:SI 2 "s_register_operand" "r")
11161 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11162 (clobber (reg:CC CC_REGNUM))]
11165 [(set_attr "conds" "clob")
11166 (set_attr "length" "12")
11167 (set_attr "type" "multiple")]
11170 (define_insn "*if_not_arith"
11171 [(set (match_operand:SI 0 "s_register_operand" "=r")
11173 (match_operator 5 "arm_comparison_operator"
11174 [(match_operand 4 "cc_register" "") (const_int 0)])
11175 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11176 (match_operator:SI 6 "shiftable_operator"
11177 [(match_operand:SI 2 "s_register_operand" "r")
11178 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11180 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11181 [(set_attr "conds" "use")
11182 (set_attr "type" "mvn_reg")
11183 (set_attr "length" "8")]
11186 (define_insn "*ifcompare_arith_not"
11187 [(set (match_operand:SI 0 "s_register_operand" "=r")
11189 (match_operator 6 "arm_comparison_operator"
11190 [(match_operand:SI 4 "s_register_operand" "r")
11191 (match_operand:SI 5 "arm_add_operand" "rIL")])
11192 (match_operator:SI 7 "shiftable_operator"
11193 [(match_operand:SI 2 "s_register_operand" "r")
11194 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11195 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11196 (clobber (reg:CC CC_REGNUM))]
11199 [(set_attr "conds" "clob")
11200 (set_attr "length" "12")
11201 (set_attr "type" "multiple")]
11204 (define_insn "*if_arith_not"
11205 [(set (match_operand:SI 0 "s_register_operand" "=r")
11207 (match_operator 5 "arm_comparison_operator"
11208 [(match_operand 4 "cc_register" "") (const_int 0)])
11209 (match_operator:SI 6 "shiftable_operator"
11210 [(match_operand:SI 2 "s_register_operand" "r")
11211 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11212 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11214 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11215 [(set_attr "conds" "use")
11216 (set_attr "type" "multiple")
11217 (set_attr "length" "8")]
11220 (define_insn "*ifcompare_neg_move"
11221 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11223 (match_operator 5 "arm_comparison_operator"
11224 [(match_operand:SI 3 "s_register_operand" "r,r")
11225 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11226 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11227 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11228 (clobber (reg:CC CC_REGNUM))]
11231 [(set_attr "conds" "clob")
11232 (set_attr "length" "8,12")
11233 (set_attr "type" "multiple")]
11236 (define_insn_and_split "*if_neg_move"
11237 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11239 (match_operator 4 "arm_comparison_operator"
11240 [(match_operand 3 "cc_register" "") (const_int 0)])
11241 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
11242 (match_operand:SI 1 "s_register_operand" "0,0")))]
11243 "TARGET_32BIT && !TARGET_COND_ARITH"
11245 "&& reload_completed"
11246 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
11247 (set (match_dup 0) (neg:SI (match_dup 2))))]
11249 [(set_attr "conds" "use")
11250 (set_attr "length" "4")
11251 (set_attr "arch" "t2,32")
11252 (set_attr "enabled_for_short_it" "yes,no")
11253 (set_attr "type" "logic_shift_imm")]
11256 (define_insn "*ifcompare_move_neg"
11257 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11259 (match_operator 5 "arm_comparison_operator"
11260 [(match_operand:SI 3 "s_register_operand" "r,r")
11261 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11262 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11263 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11264 (clobber (reg:CC CC_REGNUM))]
11267 [(set_attr "conds" "clob")
11268 (set_attr "length" "8,12")
11269 (set_attr "type" "multiple")]
11272 (define_insn_and_split "*if_move_neg"
11273 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11275 (match_operator 4 "arm_comparison_operator"
11276 [(match_operand 3 "cc_register" "") (const_int 0)])
11277 (match_operand:SI 1 "s_register_operand" "0,0")
11278 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
11281 "&& reload_completed"
11282 [(cond_exec (match_dup 5)
11283 (set (match_dup 0) (neg:SI (match_dup 2))))]
11285 machine_mode mode = GET_MODE (operands[3]);
11286 rtx_code rc = GET_CODE (operands[4]);
11288 if (mode == CCFPmode || mode == CCFPEmode)
11289 rc = reverse_condition_maybe_unordered (rc);
11291 rc = reverse_condition (rc);
11293 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
11295 [(set_attr "conds" "use")
11296 (set_attr "length" "4")
11297 (set_attr "arch" "t2,32")
11298 (set_attr "enabled_for_short_it" "yes,no")
11299 (set_attr "type" "logic_shift_imm")]
11302 (define_insn "*arith_adjacentmem"
11303 [(set (match_operand:SI 0 "s_register_operand" "=r")
11304 (match_operator:SI 1 "shiftable_operator"
11305 [(match_operand:SI 2 "memory_operand" "m")
11306 (match_operand:SI 3 "memory_operand" "m")]))
11307 (clobber (match_scratch:SI 4 "=r"))]
11308 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11314 HOST_WIDE_INT val1 = 0, val2 = 0;
11316 if (REGNO (operands[0]) > REGNO (operands[4]))
11318 ldm[1] = operands[4];
11319 ldm[2] = operands[0];
11323 ldm[1] = operands[0];
11324 ldm[2] = operands[4];
11327 base_reg = XEXP (operands[2], 0);
11329 if (!REG_P (base_reg))
11331 val1 = INTVAL (XEXP (base_reg, 1));
11332 base_reg = XEXP (base_reg, 0);
11335 if (!REG_P (XEXP (operands[3], 0)))
11336 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11338 arith[0] = operands[0];
11339 arith[3] = operands[1];
11353 if (val1 !=0 && val2 != 0)
11357 if (val1 == 4 || val2 == 4)
11358 /* Other val must be 8, since we know they are adjacent and neither
11360 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
11361 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11363 ldm[0] = ops[0] = operands[4];
11365 ops[2] = GEN_INT (val1);
11366 output_add_immediate (ops);
11368 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11370 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11374 /* Offset is out of range for a single add, so use two ldr. */
11377 ops[2] = GEN_INT (val1);
11378 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11380 ops[2] = GEN_INT (val2);
11381 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11384 else if (val1 != 0)
11387 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11389 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11394 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11396 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11398 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11401 [(set_attr "length" "12")
11402 (set_attr "predicable" "yes")
11403 (set_attr "type" "load_4")]
11406 ; This pattern is never tried by combine, so do it as a peephole
11409 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11410 (match_operand:SI 1 "arm_general_register_operand" ""))
11411 (set (reg:CC CC_REGNUM)
11412 (compare:CC (match_dup 1) (const_int 0)))]
11414 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11415 (set (match_dup 0) (match_dup 1))])]
11420 [(set (match_operand:SI 0 "s_register_operand" "")
11421 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11423 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11424 [(match_operand:SI 3 "s_register_operand" "")
11425 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11426 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11428 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11429 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11434 ;; This split can be used because CC_Z mode implies that the following
11435 ;; branch will be an equality, or an unsigned inequality, so the sign
11436 ;; extension is not needed.
11439 [(set (reg:CC_Z CC_REGNUM)
11441 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11443 (match_operand 1 "const_int_operand" "")))
11444 (clobber (match_scratch:SI 2 ""))]
11446 && ((UINTVAL (operands[1]))
11447 == ((UINTVAL (operands[1])) >> 24) << 24)"
11448 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11449 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11451 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11454 ;; ??? Check the patterns above for Thumb-2 usefulness
11456 (define_expand "prologue"
11457 [(clobber (const_int 0))]
11460 arm_expand_prologue ();
11462 thumb1_expand_prologue ();
11467 (define_expand "epilogue"
11468 [(clobber (const_int 0))]
11471 if (crtl->calls_eh_return)
11472 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11475 thumb1_expand_epilogue ();
11476 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11477 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11479 else if (HAVE_return)
11481 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11482 no need for explicit testing again. */
11483 emit_jump_insn (gen_return ());
11485 else if (TARGET_32BIT)
11487 arm_expand_epilogue (true);
11493 ;; Note - although unspec_volatile's USE all hard registers,
11494 ;; USEs are ignored after relaod has completed. Thus we need
11495 ;; to add an unspec of the link register to ensure that flow
11496 ;; does not think that it is unused by the sibcall branch that
11497 ;; will replace the standard function epilogue.
11498 (define_expand "sibcall_epilogue"
11499 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11500 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11503 arm_expand_epilogue (false);
11508 (define_expand "eh_epilogue"
11509 [(use (match_operand:SI 0 "register_operand"))
11510 (use (match_operand:SI 1 "register_operand"))
11511 (use (match_operand:SI 2 "register_operand"))]
11515 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11516 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11518 rtx ra = gen_rtx_REG (Pmode, 2);
11520 emit_move_insn (ra, operands[2]);
11523 /* This is a hack -- we may have crystalized the function type too
11525 cfun->machine->func_type = 0;
11529 ;; This split is only used during output to reduce the number of patterns
11530 ;; that need assembler instructions adding to them. We allowed the setting
11531 ;; of the conditions to be implicit during rtl generation so that
11532 ;; the conditional compare patterns would work. However this conflicts to
11533 ;; some extent with the conditional data operations, so we have to split them
11536 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11537 ;; conditional execution sufficient?
11540 [(set (match_operand:SI 0 "s_register_operand" "")
11541 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11542 [(match_operand 2 "" "") (match_operand 3 "" "")])
11544 (match_operand 4 "" "")))
11545 (clobber (reg:CC CC_REGNUM))]
11546 "TARGET_ARM && reload_completed"
11547 [(set (match_dup 5) (match_dup 6))
11548 (cond_exec (match_dup 7)
11549 (set (match_dup 0) (match_dup 4)))]
11552 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11553 operands[2], operands[3]);
11554 enum rtx_code rc = GET_CODE (operands[1]);
11556 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11557 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11558 if (mode == CCFPmode || mode == CCFPEmode)
11559 rc = reverse_condition_maybe_unordered (rc);
11561 rc = reverse_condition (rc);
11563 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11568 [(set (match_operand:SI 0 "s_register_operand" "")
11569 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11570 [(match_operand 2 "" "") (match_operand 3 "" "")])
11571 (match_operand 4 "" "")
11573 (clobber (reg:CC CC_REGNUM))]
11574 "TARGET_ARM && reload_completed"
11575 [(set (match_dup 5) (match_dup 6))
11576 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11577 (set (match_dup 0) (match_dup 4)))]
11580 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11581 operands[2], operands[3]);
11583 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11584 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11589 [(set (match_operand:SI 0 "s_register_operand" "")
11590 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11591 [(match_operand 2 "" "") (match_operand 3 "" "")])
11592 (match_operand 4 "" "")
11593 (match_operand 5 "" "")))
11594 (clobber (reg:CC CC_REGNUM))]
11595 "TARGET_ARM && reload_completed"
11596 [(set (match_dup 6) (match_dup 7))
11597 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11598 (set (match_dup 0) (match_dup 4)))
11599 (cond_exec (match_dup 8)
11600 (set (match_dup 0) (match_dup 5)))]
11603 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11604 operands[2], operands[3]);
11605 enum rtx_code rc = GET_CODE (operands[1]);
11607 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11608 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11609 if (mode == CCFPmode || mode == CCFPEmode)
11610 rc = reverse_condition_maybe_unordered (rc);
11612 rc = reverse_condition (rc);
11614 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11619 [(set (match_operand:SI 0 "s_register_operand" "")
11620 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11621 [(match_operand:SI 2 "s_register_operand" "")
11622 (match_operand:SI 3 "arm_add_operand" "")])
11623 (match_operand:SI 4 "arm_rhs_operand" "")
11625 (match_operand:SI 5 "s_register_operand" ""))))
11626 (clobber (reg:CC CC_REGNUM))]
11627 "TARGET_ARM && reload_completed"
11628 [(set (match_dup 6) (match_dup 7))
11629 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11630 (set (match_dup 0) (match_dup 4)))
11631 (cond_exec (match_dup 8)
11632 (set (match_dup 0) (not:SI (match_dup 5))))]
11635 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11636 operands[2], operands[3]);
11637 enum rtx_code rc = GET_CODE (operands[1]);
11639 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11640 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11641 if (mode == CCFPmode || mode == CCFPEmode)
11642 rc = reverse_condition_maybe_unordered (rc);
11644 rc = reverse_condition (rc);
11646 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11650 (define_insn "*cond_move_not"
11651 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11652 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11653 [(match_operand 3 "cc_register" "") (const_int 0)])
11654 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11656 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11660 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11661 [(set_attr "conds" "use")
11662 (set_attr "type" "mvn_reg,multiple")
11663 (set_attr "length" "4,8")]
11666 ;; The next two patterns occur when an AND operation is followed by a
11667 ;; scc insn sequence
11669 (define_insn "*sign_extract_onebit"
11670 [(set (match_operand:SI 0 "s_register_operand" "=r")
11671 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11673 (match_operand:SI 2 "const_int_operand" "n")))
11674 (clobber (reg:CC CC_REGNUM))]
11677 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11678 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11679 return \"mvnne\\t%0, #0\";
11681 [(set_attr "conds" "clob")
11682 (set_attr "length" "8")
11683 (set_attr "type" "multiple")]
11686 (define_insn "*not_signextract_onebit"
11687 [(set (match_operand:SI 0 "s_register_operand" "=r")
11689 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11691 (match_operand:SI 2 "const_int_operand" "n"))))
11692 (clobber (reg:CC CC_REGNUM))]
11695 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11696 output_asm_insn (\"tst\\t%1, %2\", operands);
11697 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11698 return \"movne\\t%0, #0\";
11700 [(set_attr "conds" "clob")
11701 (set_attr "length" "12")
11702 (set_attr "type" "multiple")]
11704 ;; ??? The above patterns need auditing for Thumb-2
11706 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11707 ;; expressions. For simplicity, the first register is also in the unspec
11709 ;; To avoid the usage of GNU extension, the length attribute is computed
11710 ;; in a C function arm_attr_length_push_multi.
11711 (define_insn "*push_multi"
11712 [(match_parallel 2 "multi_register_push"
11713 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11714 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11715 UNSPEC_PUSH_MULT))])]
11719 int num_saves = XVECLEN (operands[2], 0);
11721 /* For the StrongARM at least it is faster to
11722 use STR to store only a single register.
11723 In Thumb mode always use push, and the assembler will pick
11724 something appropriate. */
11725 if (num_saves == 1 && TARGET_ARM)
11726 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11733 strcpy (pattern, \"push%?\\t{%1\");
11735 strcpy (pattern, \"push\\t{%1\");
11737 for (i = 1; i < num_saves; i++)
11739 strcat (pattern, \", %|\");
11741 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11744 strcat (pattern, \"}\");
11745 output_asm_insn (pattern, operands);
11750 [(set_attr "type" "store_16")
11751 (set (attr "length")
11752 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11755 (define_insn "stack_tie"
11756 [(set (mem:BLK (scratch))
11757 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11758 (match_operand:SI 1 "s_register_operand" "rk")]
11762 [(set_attr "length" "0")
11763 (set_attr "type" "block")]
11766 ;; Pop (as used in epilogue RTL)
11768 (define_insn "*load_multiple_with_writeback"
11769 [(match_parallel 0 "load_multiple_operation"
11770 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11771 (plus:SI (match_dup 1)
11772 (match_operand:SI 2 "const_int_I_operand" "I")))
11773 (set (match_operand:SI 3 "s_register_operand" "=rk")
11774 (mem:SI (match_dup 1)))
11776 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11779 arm_output_multireg_pop (operands, /*return_pc=*/false,
11780 /*cond=*/const_true_rtx,
11786 [(set_attr "type" "load_16")
11787 (set_attr "predicable" "yes")
11788 (set (attr "length")
11789 (symbol_ref "arm_attr_length_pop_multi (operands,
11790 /*return_pc=*/false,
11791 /*write_back_p=*/true)"))]
11794 ;; Pop with return (as used in epilogue RTL)
11796 ;; This instruction is generated when the registers are popped at the end of
11797 ;; epilogue. Here, instead of popping the value into LR and then generating
11798 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11800 (define_insn "*pop_multiple_with_writeback_and_return"
11801 [(match_parallel 0 "pop_multiple_return"
11803 (set (match_operand:SI 1 "s_register_operand" "+rk")
11804 (plus:SI (match_dup 1)
11805 (match_operand:SI 2 "const_int_I_operand" "I")))
11806 (set (match_operand:SI 3 "s_register_operand" "=rk")
11807 (mem:SI (match_dup 1)))
11809 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11812 arm_output_multireg_pop (operands, /*return_pc=*/true,
11813 /*cond=*/const_true_rtx,
11819 [(set_attr "type" "load_16")
11820 (set_attr "predicable" "yes")
11821 (set (attr "length")
11822 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11823 /*write_back_p=*/true)"))]
11826 (define_insn "*pop_multiple_with_return"
11827 [(match_parallel 0 "pop_multiple_return"
11829 (set (match_operand:SI 2 "s_register_operand" "=rk")
11830 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11832 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11835 arm_output_multireg_pop (operands, /*return_pc=*/true,
11836 /*cond=*/const_true_rtx,
11842 [(set_attr "type" "load_16")
11843 (set_attr "predicable" "yes")
11844 (set (attr "length")
11845 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11846 /*write_back_p=*/false)"))]
11849 ;; Load into PC and return
11850 (define_insn "*ldr_with_return"
11852 (set (reg:SI PC_REGNUM)
11853 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11854 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11855 "ldr%?\t%|pc, [%0], #4"
11856 [(set_attr "type" "load_4")
11857 (set_attr "predicable" "yes")]
11859 ;; Pop for floating point registers (as used in epilogue RTL)
11860 (define_insn "*vfp_pop_multiple_with_writeback"
11861 [(match_parallel 0 "pop_multiple_fp"
11862 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11863 (plus:SI (match_dup 1)
11864 (match_operand:SI 2 "const_int_I_operand" "I")))
11865 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11866 (mem:DF (match_dup 1)))])]
11867 "TARGET_32BIT && TARGET_VFP_BASE"
11870 int num_regs = XVECLEN (operands[0], 0);
11873 strcpy (pattern, \"vldm\\t\");
11874 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11875 strcat (pattern, \"!, {\");
11876 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11877 strcat (pattern, \"%P0\");
11878 if ((num_regs - 1) > 1)
11880 strcat (pattern, \"-%P1\");
11881 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11884 strcat (pattern, \"}\");
11885 output_asm_insn (pattern, op_list);
11889 [(set_attr "type" "load_16")
11890 (set_attr "conds" "unconditional")
11891 (set_attr "predicable" "no")]
11894 ;; Special patterns for dealing with the constant pool
11896 (define_insn "align_4"
11897 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11900 assemble_align (32);
11903 [(set_attr "type" "no_insn")]
11906 (define_insn "align_8"
11907 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11910 assemble_align (64);
11913 [(set_attr "type" "no_insn")]
11916 (define_insn "consttable_end"
11917 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11920 making_const_table = FALSE;
11923 [(set_attr "type" "no_insn")]
11926 (define_insn "consttable_1"
11927 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11930 making_const_table = TRUE;
11931 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11932 assemble_zeros (3);
11935 [(set_attr "length" "4")
11936 (set_attr "type" "no_insn")]
11939 (define_insn "consttable_2"
11940 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11944 rtx x = operands[0];
11945 making_const_table = TRUE;
11946 switch (GET_MODE_CLASS (GET_MODE (x)))
11949 arm_emit_fp16_const (x);
11952 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11953 assemble_zeros (2);
11958 [(set_attr "length" "4")
11959 (set_attr "type" "no_insn")]
11962 (define_insn "consttable_4"
11963 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11967 rtx x = operands[0];
11968 making_const_table = TRUE;
11969 scalar_float_mode float_mode;
11970 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11971 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11974 /* XXX: Sometimes gcc does something really dumb and ends up with
11975 a HIGH in a constant pool entry, usually because it's trying to
11976 load into a VFP register. We know this will always be used in
11977 combination with a LO_SUM which ignores the high bits, so just
11978 strip off the HIGH. */
11979 if (GET_CODE (x) == HIGH)
11981 assemble_integer (x, 4, BITS_PER_WORD, 1);
11982 mark_symbol_refs_as_used (x);
11986 [(set_attr "length" "4")
11987 (set_attr "type" "no_insn")]
11990 (define_insn "consttable_8"
11991 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11995 making_const_table = TRUE;
11996 scalar_float_mode float_mode;
11997 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11998 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11999 float_mode, BITS_PER_WORD);
12001 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12004 [(set_attr "length" "8")
12005 (set_attr "type" "no_insn")]
12008 (define_insn "consttable_16"
12009 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12013 making_const_table = TRUE;
12014 scalar_float_mode float_mode;
12015 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
12016 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
12017 float_mode, BITS_PER_WORD);
12019 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12022 [(set_attr "length" "16")
12023 (set_attr "type" "no_insn")]
12026 ;; V5 Instructions,
12028 (define_insn "clzsi2"
12029 [(set (match_operand:SI 0 "s_register_operand" "=r")
12030 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12031 "TARGET_32BIT && arm_arch5t"
12033 [(set_attr "predicable" "yes")
12034 (set_attr "type" "clz")])
12036 (define_insn "rbitsi2"
12037 [(set (match_operand:SI 0 "s_register_operand" "=r")
12038 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12039 "TARGET_32BIT && arm_arch_thumb2"
12041 [(set_attr "predicable" "yes")
12042 (set_attr "type" "clz")])
12044 ;; Keep this as a CTZ expression until after reload and then split
12045 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
12046 ;; to fold with any other expression.
12048 (define_insn_and_split "ctzsi2"
12049 [(set (match_operand:SI 0 "s_register_operand" "=r")
12050 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12051 "TARGET_32BIT && arm_arch_thumb2"
12053 "&& reload_completed"
12056 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
12057 emit_insn (gen_clzsi2 (operands[0], operands[0]));
12061 ;; V5E instructions.
12063 (define_insn "prefetch"
12064 [(prefetch (match_operand:SI 0 "address_operand" "p")
12065 (match_operand:SI 1 "" "")
12066 (match_operand:SI 2 "" ""))]
12067 "TARGET_32BIT && arm_arch5te"
12069 [(set_attr "type" "load_4")]
12072 ;; General predication pattern
12075 [(match_operator 0 "arm_comparison_operator"
12076 [(match_operand 1 "cc_register" "")
12079 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
12081 [(set_attr "predicated" "yes")]
12084 (define_insn "force_register_use"
12085 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12088 [(set_attr "length" "0")
12089 (set_attr "type" "no_insn")]
12093 ;; Patterns for exception handling
12095 (define_expand "eh_return"
12096 [(use (match_operand 0 "general_operand"))]
12101 emit_insn (gen_arm_eh_return (operands[0]));
12103 emit_insn (gen_thumb_eh_return (operands[0]));
12108 ;; We can't expand this before we know where the link register is stored.
12109 (define_insn_and_split "arm_eh_return"
12110 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12112 (clobber (match_scratch:SI 1 "=&r"))]
12115 "&& reload_completed"
12119 arm_set_return_address (operands[0], operands[1]);
12127 (define_insn "load_tp_hard"
12128 [(set (match_operand:SI 0 "register_operand" "=r")
12129 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12131 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12132 [(set_attr "predicable" "yes")
12133 (set_attr "type" "mrs")]
12136 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12137 (define_insn "load_tp_soft_fdpic"
12138 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12139 (clobber (reg:SI FDPIC_REGNUM))
12140 (clobber (reg:SI LR_REGNUM))
12141 (clobber (reg:SI IP_REGNUM))
12142 (clobber (reg:CC CC_REGNUM))]
12143 "TARGET_SOFT_TP && TARGET_FDPIC"
12144 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12145 [(set_attr "conds" "clob")
12146 (set_attr "type" "branch")]
12149 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12150 (define_insn "load_tp_soft"
12151 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12152 (clobber (reg:SI LR_REGNUM))
12153 (clobber (reg:SI IP_REGNUM))
12154 (clobber (reg:CC CC_REGNUM))]
12155 "TARGET_SOFT_TP && !TARGET_FDPIC"
12156 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12157 [(set_attr "conds" "clob")
12158 (set_attr "type" "branch")]
12161 ;; tls descriptor call
12162 (define_insn "tlscall"
12163 [(set (reg:SI R0_REGNUM)
12164 (unspec:SI [(reg:SI R0_REGNUM)
12165 (match_operand:SI 0 "" "X")
12166 (match_operand 1 "" "")] UNSPEC_TLS))
12167 (clobber (reg:SI R1_REGNUM))
12168 (clobber (reg:SI LR_REGNUM))
12169 (clobber (reg:SI CC_REGNUM))]
12172 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12173 INTVAL (operands[1]));
12174 return "bl\\t%c0(tlscall)";
12176 [(set_attr "conds" "clob")
12177 (set_attr "length" "4")
12178 (set_attr "type" "branch")]
12181 ;; For thread pointer builtin
12182 (define_expand "get_thread_pointersi"
12183 [(match_operand:SI 0 "s_register_operand")]
12187 arm_load_tp (operands[0]);
12193 ;; We only care about the lower 16 bits of the constant
12194 ;; being inserted into the upper 16 bits of the register.
12195 (define_insn "*arm_movtas_ze"
12196 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
12199 (match_operand:SI 1 "const_int_operand" ""))]
12204 [(set_attr "arch" "32,v8mb")
12205 (set_attr "predicable" "yes")
12206 (set_attr "length" "4")
12207 (set_attr "type" "alu_sreg")]
12210 (define_insn "*arm_rev"
12211 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12212 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12218 [(set_attr "arch" "t1,t2,32")
12219 (set_attr "length" "2,2,4")
12220 (set_attr "predicable" "no,yes,yes")
12221 (set_attr "type" "rev")]
12224 (define_expand "arm_legacy_rev"
12225 [(set (match_operand:SI 2 "s_register_operand")
12226 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
12230 (lshiftrt:SI (match_dup 2)
12232 (set (match_operand:SI 3 "s_register_operand")
12233 (rotatert:SI (match_dup 1)
12236 (and:SI (match_dup 2)
12237 (const_int -65281)))
12238 (set (match_operand:SI 0 "s_register_operand")
12239 (xor:SI (match_dup 3)
12245 ;; Reuse temporaries to keep register pressure down.
12246 (define_expand "thumb_legacy_rev"
12247 [(set (match_operand:SI 2 "s_register_operand")
12248 (ashift:SI (match_operand:SI 1 "s_register_operand")
12250 (set (match_operand:SI 3 "s_register_operand")
12251 (lshiftrt:SI (match_dup 1)
12254 (ior:SI (match_dup 3)
12256 (set (match_operand:SI 4 "s_register_operand")
12258 (set (match_operand:SI 5 "s_register_operand")
12259 (rotatert:SI (match_dup 1)
12262 (ashift:SI (match_dup 5)
12265 (lshiftrt:SI (match_dup 5)
12268 (ior:SI (match_dup 5)
12271 (rotatert:SI (match_dup 5)
12273 (set (match_operand:SI 0 "s_register_operand")
12274 (ior:SI (match_dup 5)
12280 ;; ARM-specific expansion of signed mod by power of 2
12281 ;; using conditional negate.
12282 ;; For r0 % n where n is a power of 2 produce:
12284 ;; and r0, r0, #(n - 1)
12285 ;; and r1, r1, #(n - 1)
12286 ;; rsbpl r0, r1, #0
12288 (define_expand "modsi3"
12289 [(match_operand:SI 0 "register_operand")
12290 (match_operand:SI 1 "register_operand")
12291 (match_operand:SI 2 "const_int_operand")]
12294 HOST_WIDE_INT val = INTVAL (operands[2]);
12297 || exact_log2 (val) <= 0)
12300 rtx mask = GEN_INT (val - 1);
12302 /* In the special case of x0 % 2 we can do the even shorter:
12305 rsblt r0, r0, #0. */
12309 rtx cc_reg = arm_gen_compare_reg (LT,
12310 operands[1], const0_rtx, NULL_RTX);
12311 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
12312 rtx masked = gen_reg_rtx (SImode);
12314 emit_insn (gen_andsi3 (masked, operands[1], mask));
12315 emit_move_insn (operands[0],
12316 gen_rtx_IF_THEN_ELSE (SImode, cond,
12317 gen_rtx_NEG (SImode,
12323 rtx neg_op = gen_reg_rtx (SImode);
12324 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
12327 /* Extract the condition register and mode. */
12328 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
12329 rtx cc_reg = SET_DEST (cmp);
12330 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
12332 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
12334 rtx masked_neg = gen_reg_rtx (SImode);
12335 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
12337 /* We want a conditional negate here, but emitting COND_EXEC rtxes
12338 during expand does not always work. Do an IF_THEN_ELSE instead. */
12339 emit_move_insn (operands[0],
12340 gen_rtx_IF_THEN_ELSE (SImode, cond,
12341 gen_rtx_NEG (SImode, masked_neg),
12349 (define_expand "bswapsi2"
12350 [(set (match_operand:SI 0 "s_register_operand")
12351 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
12352 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12356 rtx op2 = gen_reg_rtx (SImode);
12357 rtx op3 = gen_reg_rtx (SImode);
12361 rtx op4 = gen_reg_rtx (SImode);
12362 rtx op5 = gen_reg_rtx (SImode);
12364 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12365 op2, op3, op4, op5));
12369 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12378 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12379 ;; and unsigned variants, respectively. For rev16, expose
12380 ;; byte-swapping in the lower 16 bits only.
12381 (define_insn "*arm_revsh"
12382 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12383 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12389 [(set_attr "arch" "t1,t2,32")
12390 (set_attr "length" "2,2,4")
12391 (set_attr "type" "rev")]
12394 (define_insn "*arm_rev16"
12395 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12396 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12402 [(set_attr "arch" "t1,t2,32")
12403 (set_attr "length" "2,2,4")
12404 (set_attr "type" "rev")]
12407 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
12408 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
12409 ;; each valid permutation.
12411 (define_insn "arm_rev16si2"
12412 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12413 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
12415 (match_operand:SI 3 "const_int_operand" "n,n,n"))
12416 (and:SI (lshiftrt:SI (match_dup 1)
12418 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
12420 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12421 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12423 [(set_attr "arch" "t1,t2,32")
12424 (set_attr "length" "2,2,4")
12425 (set_attr "type" "rev")]
12428 (define_insn "arm_rev16si2_alt"
12429 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12430 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
12432 (match_operand:SI 2 "const_int_operand" "n,n,n"))
12433 (and:SI (ashift:SI (match_dup 1)
12435 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
12437 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12438 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12440 [(set_attr "arch" "t1,t2,32")
12441 (set_attr "length" "2,2,4")
12442 (set_attr "type" "rev")]
12445 (define_expand "bswaphi2"
12446 [(set (match_operand:HI 0 "s_register_operand")
12447 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
12452 ;; Patterns for LDRD/STRD in Thumb2 mode
12454 (define_insn "*thumb2_ldrd"
12455 [(set (match_operand:SI 0 "s_register_operand" "=r")
12456 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12457 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12458 (set (match_operand:SI 3 "s_register_operand" "=r")
12459 (mem:SI (plus:SI (match_dup 1)
12460 (match_operand:SI 4 "const_int_operand" ""))))]
12461 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12462 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12463 && (operands_ok_ldrd_strd (operands[0], operands[3],
12464 operands[1], INTVAL (operands[2]),
12466 "ldrd%?\t%0, %3, [%1, %2]"
12467 [(set_attr "type" "load_8")
12468 (set_attr "predicable" "yes")])
12470 (define_insn "*thumb2_ldrd_base"
12471 [(set (match_operand:SI 0 "s_register_operand" "=r")
12472 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12473 (set (match_operand:SI 2 "s_register_operand" "=r")
12474 (mem:SI (plus:SI (match_dup 1)
12476 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12477 && (operands_ok_ldrd_strd (operands[0], operands[2],
12478 operands[1], 0, false, true))"
12479 "ldrd%?\t%0, %2, [%1]"
12480 [(set_attr "type" "load_8")
12481 (set_attr "predicable" "yes")])
12483 (define_insn "*thumb2_ldrd_base_neg"
12484 [(set (match_operand:SI 0 "s_register_operand" "=r")
12485 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12487 (set (match_operand:SI 2 "s_register_operand" "=r")
12488 (mem:SI (match_dup 1)))]
12489 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12490 && (operands_ok_ldrd_strd (operands[0], operands[2],
12491 operands[1], -4, false, true))"
12492 "ldrd%?\t%0, %2, [%1, #-4]"
12493 [(set_attr "type" "load_8")
12494 (set_attr "predicable" "yes")])
12496 (define_insn "*thumb2_strd"
12497 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12498 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12499 (match_operand:SI 2 "s_register_operand" "r"))
12500 (set (mem:SI (plus:SI (match_dup 0)
12501 (match_operand:SI 3 "const_int_operand" "")))
12502 (match_operand:SI 4 "s_register_operand" "r"))]
12503 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12504 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12505 && (operands_ok_ldrd_strd (operands[2], operands[4],
12506 operands[0], INTVAL (operands[1]),
12508 "strd%?\t%2, %4, [%0, %1]"
12509 [(set_attr "type" "store_8")
12510 (set_attr "predicable" "yes")])
12512 (define_insn "*thumb2_strd_base"
12513 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12514 (match_operand:SI 1 "s_register_operand" "r"))
12515 (set (mem:SI (plus:SI (match_dup 0)
12517 (match_operand:SI 2 "s_register_operand" "r"))]
12518 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12519 && (operands_ok_ldrd_strd (operands[1], operands[2],
12520 operands[0], 0, false, false))"
12521 "strd%?\t%1, %2, [%0]"
12522 [(set_attr "type" "store_8")
12523 (set_attr "predicable" "yes")])
12525 (define_insn "*thumb2_strd_base_neg"
12526 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12528 (match_operand:SI 1 "s_register_operand" "r"))
12529 (set (mem:SI (match_dup 0))
12530 (match_operand:SI 2 "s_register_operand" "r"))]
12531 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12532 && (operands_ok_ldrd_strd (operands[1], operands[2],
12533 operands[0], -4, false, false))"
12534 "strd%?\t%1, %2, [%0, #-4]"
12535 [(set_attr "type" "store_8")
12536 (set_attr "predicable" "yes")])
12538 ;; ARMv8 CRC32 instructions.
12539 (define_insn "arm_<crc_variant>"
12540 [(set (match_operand:SI 0 "s_register_operand" "=r")
12541 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
12542 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
12545 "<crc_variant>\\t%0, %1, %2"
12546 [(set_attr "type" "crc")
12547 (set_attr "conds" "unconditional")]
12550 ;; Load the load/store double peephole optimizations.
12551 (include "ldrdstrd.md")
12553 ;; Load the load/store multiple patterns
12554 (include "ldmstm.md")
12556 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12557 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12558 ;; The operands are validated through the load_multiple_operation
12559 ;; match_parallel predicate rather than through constraints so enable it only
12561 (define_insn "*load_multiple"
12562 [(match_parallel 0 "load_multiple_operation"
12563 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12564 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12566 "TARGET_32BIT && reload_completed"
12569 arm_output_multireg_pop (operands, /*return_pc=*/false,
12570 /*cond=*/const_true_rtx,
12576 [(set_attr "predicable" "yes")]
12579 (define_expand "copysignsf3"
12580 [(match_operand:SF 0 "register_operand")
12581 (match_operand:SF 1 "register_operand")
12582 (match_operand:SF 2 "register_operand")]
12583 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12585 emit_move_insn (operands[0], operands[2]);
12586 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
12587 GEN_INT (31), GEN_INT (0),
12588 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
12593 (define_expand "copysigndf3"
12594 [(match_operand:DF 0 "register_operand")
12595 (match_operand:DF 1 "register_operand")
12596 (match_operand:DF 2 "register_operand")]
12597 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12599 rtx op0_low = gen_lowpart (SImode, operands[0]);
12600 rtx op0_high = gen_highpart (SImode, operands[0]);
12601 rtx op1_low = gen_lowpart (SImode, operands[1]);
12602 rtx op1_high = gen_highpart (SImode, operands[1]);
12603 rtx op2_high = gen_highpart (SImode, operands[2]);
12605 rtx scratch1 = gen_reg_rtx (SImode);
12606 rtx scratch2 = gen_reg_rtx (SImode);
12607 emit_move_insn (scratch1, op2_high);
12608 emit_move_insn (scratch2, op1_high);
12610 emit_insn(gen_rtx_SET(scratch1,
12611 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
12612 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
12613 emit_move_insn (op0_low, op1_low);
12614 emit_move_insn (op0_high, scratch2);
12620 ;; movmisalign for DImode
12621 (define_expand "movmisaligndi"
12622 [(match_operand:DI 0 "general_operand")
12623 (match_operand:DI 1 "general_operand")]
12626 rtx lo_op0 = gen_lowpart (SImode, operands[0]);
12627 rtx lo_op1 = gen_lowpart (SImode, operands[1]);
12628 rtx hi_op0 = gen_highpart_mode (SImode, DImode, operands[0]);
12629 rtx hi_op1 = gen_highpart_mode (SImode, DImode, operands[1]);
12631 emit_insn (gen_movmisalignsi (lo_op0, lo_op1));
12632 emit_insn (gen_movmisalignsi (hi_op0, hi_op1));
12636 ;; movmisalign patterns for HImode and SImode.
12637 (define_expand "movmisalign<mode>"
12638 [(match_operand:HSI 0 "general_operand")
12639 (match_operand:HSI 1 "general_operand")]
12642 /* This pattern is not permitted to fail during expansion: if both arguments
12643 are non-registers (e.g. memory := constant), force operand 1 into a
12645 rtx (* gen_unaligned_load)(rtx, rtx);
12646 rtx tmp_dest = operands[0];
12647 if (!s_register_operand (operands[0], <MODE>mode)
12648 && !s_register_operand (operands[1], <MODE>mode))
12649 operands[1] = force_reg (<MODE>mode, operands[1]);
12651 if (<MODE>mode == HImode)
12653 gen_unaligned_load = gen_unaligned_loadhiu;
12654 tmp_dest = gen_reg_rtx (SImode);
12657 gen_unaligned_load = gen_unaligned_loadsi;
12659 if (MEM_P (operands[1]))
12661 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12662 if (<MODE>mode == HImode)
12663 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12666 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12671 (define_insn "arm_<cdp>"
12672 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12673 (match_operand:SI 1 "immediate_operand" "n")
12674 (match_operand:SI 2 "immediate_operand" "n")
12675 (match_operand:SI 3 "immediate_operand" "n")
12676 (match_operand:SI 4 "immediate_operand" "n")
12677 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12678 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12680 arm_const_bounds (operands[0], 0, 16);
12681 arm_const_bounds (operands[1], 0, 16);
12682 arm_const_bounds (operands[2], 0, (1 << 5));
12683 arm_const_bounds (operands[3], 0, (1 << 5));
12684 arm_const_bounds (operands[4], 0, (1 << 5));
12685 arm_const_bounds (operands[5], 0, 8);
12686 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12688 [(set_attr "length" "4")
12689 (set_attr "type" "coproc")])
12691 (define_insn "*ldc"
12692 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12693 (match_operand:SI 1 "immediate_operand" "n")
12694 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12695 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12697 arm_const_bounds (operands[0], 0, 16);
12698 arm_const_bounds (operands[1], 0, (1 << 5));
12699 return "<ldc>\\tp%c0, CR%c1, %2";
12701 [(set_attr "length" "4")
12702 (set_attr "type" "coproc")])
12704 (define_insn "*stc"
12705 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12706 (match_operand:SI 1 "immediate_operand" "n")
12707 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12708 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12710 arm_const_bounds (operands[0], 0, 16);
12711 arm_const_bounds (operands[1], 0, (1 << 5));
12712 return "<stc>\\tp%c0, CR%c1, %2";
12714 [(set_attr "length" "4")
12715 (set_attr "type" "coproc")])
12717 (define_expand "arm_<ldc>"
12718 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12719 (match_operand:SI 1 "immediate_operand")
12720 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12721 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12723 (define_expand "arm_<stc>"
12724 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12725 (match_operand:SI 1 "immediate_operand")
12726 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12727 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12729 (define_insn "arm_<mcr>"
12730 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12731 (match_operand:SI 1 "immediate_operand" "n")
12732 (match_operand:SI 2 "s_register_operand" "r")
12733 (match_operand:SI 3 "immediate_operand" "n")
12734 (match_operand:SI 4 "immediate_operand" "n")
12735 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12736 (use (match_dup 2))]
12737 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12739 arm_const_bounds (operands[0], 0, 16);
12740 arm_const_bounds (operands[1], 0, 8);
12741 arm_const_bounds (operands[3], 0, (1 << 5));
12742 arm_const_bounds (operands[4], 0, (1 << 5));
12743 arm_const_bounds (operands[5], 0, 8);
12744 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12746 [(set_attr "length" "4")
12747 (set_attr "type" "coproc")])
12749 (define_insn "arm_<mrc>"
12750 [(set (match_operand:SI 0 "s_register_operand" "=r")
12751 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12752 (match_operand:SI 2 "immediate_operand" "n")
12753 (match_operand:SI 3 "immediate_operand" "n")
12754 (match_operand:SI 4 "immediate_operand" "n")
12755 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12756 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12758 arm_const_bounds (operands[1], 0, 16);
12759 arm_const_bounds (operands[2], 0, 8);
12760 arm_const_bounds (operands[3], 0, (1 << 5));
12761 arm_const_bounds (operands[4], 0, (1 << 5));
12762 arm_const_bounds (operands[5], 0, 8);
12763 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12765 [(set_attr "length" "4")
12766 (set_attr "type" "coproc")])
12768 (define_insn "arm_<mcrr>"
12769 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12770 (match_operand:SI 1 "immediate_operand" "n")
12771 (match_operand:DI 2 "s_register_operand" "r")
12772 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12773 (use (match_dup 2))]
12774 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12776 arm_const_bounds (operands[0], 0, 16);
12777 arm_const_bounds (operands[1], 0, 8);
12778 arm_const_bounds (operands[3], 0, (1 << 5));
12779 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12781 [(set_attr "length" "4")
12782 (set_attr "type" "coproc")])
12784 (define_insn "arm_<mrrc>"
12785 [(set (match_operand:DI 0 "s_register_operand" "=r")
12786 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12787 (match_operand:SI 2 "immediate_operand" "n")
12788 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12789 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12791 arm_const_bounds (operands[1], 0, 16);
12792 arm_const_bounds (operands[2], 0, 8);
12793 arm_const_bounds (operands[3], 0, (1 << 5));
12794 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12796 [(set_attr "length" "4")
12797 (set_attr "type" "coproc")])
12799 (define_expand "speculation_barrier"
12800 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12803 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12804 have a usable barrier (and probably don't need one in practice).
12805 But to be safe if such code is run on later architectures, call a
12806 helper function in libgcc that will do the thing for the active
12808 if (!(arm_arch7 || arm_arch8))
12810 arm_emit_speculation_barrier_function ();
12816 ;; Generate a hard speculation barrier when we have not enabled speculation
12818 (define_insn "*speculation_barrier_insn"
12819 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12820 "arm_arch7 || arm_arch8"
12822 [(set_attr "type" "block")
12823 (set_attr "length" "8")]
12826 ;; Vector bits common to IWMMXT, Neon and MVE
12827 (include "vec-common.md")
12828 ;; Load the Intel Wireless Multimedia Extension patterns
12829 (include "iwmmxt.md")
12830 ;; Load the VFP co-processor patterns
12832 ;; Thumb-1 patterns
12833 (include "thumb1.md")
12834 ;; Thumb-2 patterns
12835 (include "thumb2.md")
12837 (include "neon.md")
12839 (include "crypto.md")
12840 ;; Synchronization Primitives
12841 (include "sync.md")
12842 ;; Fixed-point patterns
12843 (include "arm-fixed.md")
12844 ;; M-profile Vector Extension