1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2021 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 (APSRQ_REGNUM 104) ; Q bit pseudo register
43 (APSRGE_REGNUM 105) ; GE bits pseudo register
44 (VPR_REGNUM 106) ; Vector Predication Register - MVE register.
47 ;; 3rd operand to select_dominance_cc_mode
54 ;; conditional compare combination
65 ;;---------------------------------------------------------------------------
68 ;; Processor type. This is created automatically from arm-cores.def.
69 (include "arm-tune.md")
71 ;; Instruction classification types
74 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
75 ; generating ARM code. This is used to control the length of some insn
76 ; patterns that share the same RTL in both ARM and Thumb code.
77 (define_attr "is_thumb" "yes,no"
78 (const (if_then_else (symbol_ref "TARGET_THUMB")
79 (const_string "yes") (const_string "no"))))
81 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
82 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
84 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
85 (define_attr "is_thumb1" "yes,no"
86 (const (if_then_else (symbol_ref "TARGET_THUMB1")
87 (const_string "yes") (const_string "no"))))
89 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
90 ; The arm_restrict_it flag enables the "short IT" feature which
91 ; restricts IT blocks to a single 16-bit instruction.
92 ; This attribute should only be used on 16-bit Thumb-2 instructions
93 ; which may be predicated (the "predicable" attribute must be set).
94 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
96 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
97 ; This attribute should only be used on instructions which may emit
98 ; an IT block in their expansion which is not a short IT.
99 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
101 ; Mark an instruction sequence as the required way of loading a
102 ; constant when -mpure-code is enabled (which implies
103 ; arm_disable_literal_pool)
104 (define_attr "required_for_purecode" "no,yes" (const_string "no"))
106 ;; Operand number of an input operand that is shifted. Zero if the
107 ;; given instruction does not shift one of its input operands.
108 (define_attr "shift" "" (const_int 0))
110 ;; [For compatibility with AArch64 in pipeline models]
111 ;; Attribute that specifies whether or not the instruction touches fp
113 (define_attr "fp" "no,yes" (const_string "no"))
115 ; Floating Point Unit. If we only have floating point emulation, then there
116 ; is no point in scheduling the floating point insns. (Well, for best
117 ; performance we should try and group them together).
118 (define_attr "fpu" "none,vfp"
119 (const (symbol_ref "arm_fpu_attr")))
121 ; Predicated means that the insn form is conditionally executed based on a
122 ; predicate. We default to 'no' because no Thumb patterns match this rule
123 ; and not all ARM insns do.
124 (define_attr "predicated" "yes,no" (const_string "no"))
126 ; LENGTH of an instruction (in bytes)
127 (define_attr "length" ""
130 ; The architecture which supports the instruction (or alternative).
131 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
132 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
133 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
134 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
135 ; Baseline. This attribute is used to compute attribute "enabled",
136 ; use type "any" to enable an alternative in all cases.
137 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon,mve"
138 (const_string "any"))
140 (define_attr "arch_enabled" "no,yes"
141 (cond [(eq_attr "arch" "any")
144 (and (eq_attr "arch" "a")
145 (match_test "TARGET_ARM"))
148 (and (eq_attr "arch" "t")
149 (match_test "TARGET_THUMB"))
152 (and (eq_attr "arch" "t1")
153 (match_test "TARGET_THUMB1"))
156 (and (eq_attr "arch" "t2")
157 (match_test "TARGET_THUMB2"))
160 (and (eq_attr "arch" "32")
161 (match_test "TARGET_32BIT"))
164 (and (eq_attr "arch" "v6")
165 (match_test "TARGET_32BIT && arm_arch6"))
168 (and (eq_attr "arch" "nov6")
169 (match_test "TARGET_32BIT && !arm_arch6"))
172 (and (eq_attr "arch" "v6t2")
173 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
176 (and (eq_attr "arch" "v8mb")
177 (match_test "TARGET_THUMB1 && arm_arch8"))
180 (and (eq_attr "arch" "iwmmxt2")
181 (match_test "TARGET_REALLY_IWMMXT2"))
184 (and (eq_attr "arch" "armv6_or_vfpv3")
185 (match_test "arm_arch6 || TARGET_VFP3"))
188 (and (eq_attr "arch" "neon")
189 (match_test "TARGET_NEON"))
192 (and (eq_attr "arch" "mve")
193 (match_test "TARGET_HAVE_MVE"))
197 (const_string "no")))
199 (define_attr "opt" "any,speed,size"
200 (const_string "any"))
202 (define_attr "opt_enabled" "no,yes"
203 (cond [(eq_attr "opt" "any")
206 (and (eq_attr "opt" "speed")
207 (match_test "optimize_function_for_speed_p (cfun)"))
210 (and (eq_attr "opt" "size")
211 (match_test "optimize_function_for_size_p (cfun)"))
212 (const_string "yes")]
213 (const_string "no")))
215 (define_attr "use_literal_pool" "no,yes"
216 (cond [(and (eq_attr "type" "f_loads,f_loadd")
217 (match_test "CONSTANT_P (operands[1])"))
218 (const_string "yes")]
219 (const_string "no")))
221 ; Enable all alternatives that are both arch_enabled and insn_enabled.
222 ; FIXME:: opt_enabled has been temporarily removed till the time we have
223 ; an attribute that allows the use of such alternatives.
224 ; This depends on caching of speed_p, size_p on a per
225 ; alternative basis. The problem is that the enabled attribute
226 ; cannot depend on any state that is not cached or is not constant
227 ; for a compilation unit. We probably need a generic "hot/cold"
228 ; alternative which if implemented can help with this. We disable this
229 ; until such a time as this is implemented and / or the improvements or
230 ; regressions with removing this attribute are double checked.
231 ; See ashldi3_neon and <shift>di3_neon in neon.md.
233 (define_attr "enabled" "no,yes"
234 (cond [(and (eq_attr "predicable_short_it" "no")
235 (and (eq_attr "predicated" "yes")
236 (match_test "arm_restrict_it")))
239 (and (eq_attr "enabled_for_short_it" "no")
240 (match_test "arm_restrict_it"))
243 (and (eq_attr "required_for_purecode" "yes")
244 (not (match_test "arm_disable_literal_pool")))
247 (eq_attr "arch_enabled" "no")
249 (const_string "yes")))
251 ; POOL_RANGE is how far away from a constant pool entry that this insn
252 ; can be placed. If the distance is zero, then this insn will never
253 ; reference the pool.
254 ; Note that for Thumb constant pools the PC value is rounded down to the
255 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
256 ; Thumb insns) should be set to <max_range> - 2.
257 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
258 ; before its address. It is set to <max_range> - (8 + <data_size>).
259 (define_attr "arm_pool_range" "" (const_int 0))
260 (define_attr "thumb2_pool_range" "" (const_int 0))
261 (define_attr "arm_neg_pool_range" "" (const_int 0))
262 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
264 (define_attr "pool_range" ""
265 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
266 (attr "arm_pool_range")))
267 (define_attr "neg_pool_range" ""
268 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
269 (attr "arm_neg_pool_range")))
271 ; An assembler sequence may clobber the condition codes without us knowing.
272 ; If such an insn references the pool, then we have no way of knowing how,
273 ; so use the most conservative value for pool_range.
274 (define_asm_attributes
275 [(set_attr "conds" "clob")
276 (set_attr "length" "4")
277 (set_attr "pool_range" "250")])
279 ; Load scheduling, set from the arm_ld_sched variable
280 ; initialized by arm_option_override()
281 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
283 ; condition codes: this one is used by final_prescan_insn to speed up
284 ; conditionalizing instructions. It saves having to scan the rtl to see if
285 ; it uses or alters the condition codes.
287 ; USE means that the condition codes are used by the insn in the process of
288 ; outputting code, this means (at present) that we can't use the insn in
291 ; SET means that the purpose of the insn is to set the condition codes in a
292 ; well defined manner.
294 ; CLOB means that the condition codes are altered in an undefined manner, if
295 ; they are altered at all
297 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
298 ; that the instruction does not use or alter the condition codes.
300 ; NOCOND means that the instruction does not use or alter the condition
301 ; codes but can be converted into a conditionally exectuted instruction.
303 (define_attr "conds" "use,set,clob,unconditional,nocond"
305 (ior (eq_attr "is_thumb1" "yes")
306 (eq_attr "type" "call"))
307 (const_string "clob")
309 (ior (eq_attr "is_neon_type" "yes")
310 (eq_attr "is_mve_type" "yes"))
311 (const_string "unconditional")
312 (const_string "nocond"))))
314 ; Predicable means that the insn can be conditionally executed based on
315 ; an automatically added predicate (additional patterns are generated by
316 ; gen...). We default to 'no' because no Thumb patterns match this rule
317 ; and not all ARM patterns do.
318 (define_attr "predicable" "no,yes" (const_string "no"))
320 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
321 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
322 ; suffer blockages enough to warrant modelling this (and it can adversely
323 ; affect the schedule).
324 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
326 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
327 ; to stall the processor. Used with model_wbuf above.
328 (define_attr "write_conflict" "no,yes"
329 (if_then_else (eq_attr "type"
332 (const_string "no")))
334 ; Classify the insns into those that take one cycle and those that take more
335 ; than one on the main cpu execution unit.
336 (define_attr "core_cycles" "single,multi"
337 (if_then_else (eq_attr "type"
338 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
339 alu_shift_imm_lsl_1to4, alu_shift_imm_other, alu_shift_reg, alu_dsp_reg,\
340 alus_ext, alus_imm, alus_sreg,\
341 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
342 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
343 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
344 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
345 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
346 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
347 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
348 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
349 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
350 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
351 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
352 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
353 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
354 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
355 (const_string "single")
356 (const_string "multi")))
358 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
359 ;; distant label. Only applicable to Thumb code.
360 (define_attr "far_jump" "yes,no" (const_string "no"))
363 ;; The number of machine instructions this pattern expands to.
364 ;; Used for Thumb-2 conditional execution.
365 (define_attr "ce_count" "" (const_int 1))
367 ;;---------------------------------------------------------------------------
370 (include "unspecs.md")
372 ;;---------------------------------------------------------------------------
375 (include "iterators.md")
377 ;;---------------------------------------------------------------------------
380 (include "predicates.md")
381 (include "constraints.md")
383 ;;---------------------------------------------------------------------------
384 ;; Pipeline descriptions
386 (define_attr "tune_cortexr4" "yes,no"
388 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
390 (const_string "no"))))
392 ;; True if the generic scheduling description should be used.
394 (define_attr "generic_sched" "yes,no"
396 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
397 arm926ejs,arm10e,arm1026ejs,arm1136js,\
398 arm1136jfs,cortexa5,cortexa7,cortexa8,\
399 cortexa9,cortexa12,cortexa15,cortexa17,\
400 cortexa53,cortexa57,cortexm4,cortexm7,\
401 exynosm1,marvell_pj4,xgene1")
402 (eq_attr "tune_cortexr4" "yes"))
404 (const_string "yes"))))
406 (define_attr "generic_vfp" "yes,no"
408 (and (eq_attr "fpu" "vfp")
409 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
410 cortexa8,cortexa9,cortexa53,cortexm4,\
411 cortexm7,marvell_pj4,xgene1")
412 (eq_attr "tune_cortexr4" "no"))
414 (const_string "no"))))
416 (include "marvell-f-iwmmxt.md")
417 (include "arm-generic.md")
418 (include "arm926ejs.md")
419 (include "arm1020e.md")
420 (include "arm1026ejs.md")
421 (include "arm1136jfs.md")
423 (include "fa606te.md")
424 (include "fa626te.md")
425 (include "fmp626.md")
426 (include "fa726te.md")
427 (include "cortex-a5.md")
428 (include "cortex-a7.md")
429 (include "cortex-a8.md")
430 (include "cortex-a9.md")
431 (include "cortex-a15.md")
432 (include "cortex-a17.md")
433 (include "cortex-a53.md")
434 (include "cortex-a57.md")
435 (include "cortex-r4.md")
436 (include "cortex-r4f.md")
437 (include "cortex-m7.md")
438 (include "cortex-m4.md")
439 (include "cortex-m4-fpu.md")
440 (include "exynos-m1.md")
442 (include "marvell-pj4.md")
443 (include "xgene1.md")
445 ;; define_subst and associated attributes
447 (define_subst "add_setq"
448 [(set (match_operand:SI 0 "" "")
449 (match_operand:SI 1 "" ""))]
453 (set (reg:CC APSRQ_REGNUM)
454 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
456 (define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
457 (define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
460 ;;---------------------------------------------------------------------------
465 ;; Note: For DImode insns, there is normally no reason why operands should
466 ;; not be in the same register, what we don't want is for something being
467 ;; written to partially overlap something that is an input.
469 (define_expand "adddi3"
471 [(set (match_operand:DI 0 "s_register_operand")
472 (plus:DI (match_operand:DI 1 "s_register_operand")
473 (match_operand:DI 2 "reg_or_int_operand")))
474 (clobber (reg:CC CC_REGNUM))])]
479 if (!REG_P (operands[2]))
480 operands[2] = force_reg (DImode, operands[2]);
484 rtx lo_result, hi_result, lo_dest, hi_dest;
485 rtx lo_op1, hi_op1, lo_op2, hi_op2;
486 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
488 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
489 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
491 if (lo_op2 == const0_rtx)
494 if (!arm_add_operand (hi_op2, SImode))
495 hi_op2 = force_reg (SImode, hi_op2);
496 /* Assume hi_op2 won't also be zero. */
497 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
501 if (!arm_add_operand (lo_op2, SImode))
502 lo_op2 = force_reg (SImode, lo_op2);
503 if (!arm_not_operand (hi_op2, SImode))
504 hi_op2 = force_reg (SImode, hi_op2);
506 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
507 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
509 if (hi_op2 == const0_rtx)
510 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
512 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
515 if (lo_result != lo_dest)
516 emit_move_insn (lo_result, lo_dest);
517 if (hi_result != hi_dest)
518 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
524 (define_expand "addvsi4"
525 [(match_operand:SI 0 "s_register_operand")
526 (match_operand:SI 1 "s_register_operand")
527 (match_operand:SI 2 "arm_add_operand")
528 (match_operand 3 "")]
531 if (CONST_INT_P (operands[2]))
532 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
534 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
535 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
540 (define_expand "addvdi4"
541 [(match_operand:DI 0 "s_register_operand")
542 (match_operand:DI 1 "s_register_operand")
543 (match_operand:DI 2 "reg_or_int_operand")
544 (match_operand 3 "")]
547 rtx lo_result, hi_result;
548 rtx lo_op1, hi_op1, lo_op2, hi_op2;
549 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
551 lo_result = gen_lowpart (SImode, operands[0]);
552 hi_result = gen_highpart (SImode, operands[0]);
554 if (lo_op2 == const0_rtx)
556 emit_move_insn (lo_result, lo_op1);
557 if (!arm_add_operand (hi_op2, SImode))
558 hi_op2 = force_reg (SImode, hi_op2);
560 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
564 if (!arm_add_operand (lo_op2, SImode))
565 lo_op2 = force_reg (SImode, lo_op2);
566 if (!arm_not_operand (hi_op2, SImode))
567 hi_op2 = force_reg (SImode, hi_op2);
569 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
571 if (hi_op2 == const0_rtx)
572 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
573 else if (CONST_INT_P (hi_op2))
574 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
576 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
578 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
584 (define_expand "addsi3_cin_vout_reg"
589 (plus:DI (match_dup 4)
590 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
591 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
592 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
594 (set (match_operand:SI 0 "s_register_operand")
595 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
599 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
600 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
601 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
602 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
606 (define_insn "*addsi3_cin_vout_reg_insn"
607 [(set (reg:CC_V CC_REGNUM)
611 (match_operand:DI 3 "arm_carry_operation" "")
612 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
613 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
615 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
618 (set (match_operand:SI 0 "s_register_operand" "=l,r")
619 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
625 [(set_attr "type" "alus_sreg")
626 (set_attr "arch" "t2,*")
627 (set_attr "length" "2,4")]
630 (define_expand "addsi3_cin_vout_imm"
635 (plus:DI (match_dup 4)
636 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
638 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
640 (set (match_operand:SI 0 "s_register_operand")
641 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
642 (match_operand 2 "arm_adcimm_operand")))])]
645 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
646 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
647 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
648 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
652 (define_insn "*addsi3_cin_vout_imm_insn"
653 [(set (reg:CC_V CC_REGNUM)
657 (match_operand:DI 3 "arm_carry_operation" "")
658 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
659 (match_operand 2 "arm_adcimm_operand" "I,K"))
661 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
664 (set (match_operand:SI 0 "s_register_operand" "=r,r")
665 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
670 sbcs%?\\t%0, %1, #%B2"
671 [(set_attr "type" "alus_imm")]
674 (define_expand "addsi3_cin_vout_0"
678 (plus:DI (match_dup 3)
679 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
680 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
681 (set (match_operand:SI 0 "s_register_operand")
682 (plus:SI (match_dup 4) (match_dup 1)))])]
685 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
686 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
687 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
688 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
692 (define_insn "*addsi3_cin_vout_0_insn"
693 [(set (reg:CC_V CC_REGNUM)
696 (match_operand:DI 2 "arm_carry_operation" "")
697 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
698 (sign_extend:DI (plus:SI
699 (match_operand:SI 3 "arm_carry_operation" "")
701 (set (match_operand:SI 0 "s_register_operand" "=r")
702 (plus:SI (match_dup 3) (match_dup 1)))]
704 "adcs%?\\t%0, %1, #0"
705 [(set_attr "type" "alus_imm")]
708 (define_expand "uaddvsi4"
709 [(match_operand:SI 0 "s_register_operand")
710 (match_operand:SI 1 "s_register_operand")
711 (match_operand:SI 2 "arm_add_operand")
712 (match_operand 3 "")]
715 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
716 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
721 (define_expand "uaddvdi4"
722 [(match_operand:DI 0 "s_register_operand")
723 (match_operand:DI 1 "s_register_operand")
724 (match_operand:DI 2 "reg_or_int_operand")
725 (match_operand 3 "")]
728 rtx lo_result, hi_result;
729 rtx lo_op1, hi_op1, lo_op2, hi_op2;
730 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
732 lo_result = gen_lowpart (SImode, operands[0]);
733 hi_result = gen_highpart (SImode, operands[0]);
735 if (lo_op2 == const0_rtx)
737 emit_move_insn (lo_result, lo_op1);
738 if (!arm_add_operand (hi_op2, SImode))
739 hi_op2 = force_reg (SImode, hi_op2);
741 emit_insn (gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
745 if (!arm_add_operand (lo_op2, SImode))
746 lo_op2 = force_reg (SImode, lo_op2);
747 if (!arm_not_operand (hi_op2, SImode))
748 hi_op2 = force_reg (SImode, hi_op2);
750 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
752 if (hi_op2 == const0_rtx)
753 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
754 else if (CONST_INT_P (hi_op2))
755 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
757 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
759 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
765 (define_expand "addsi3_cin_cout_reg"
770 (plus:DI (match_dup 4)
771 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
772 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
773 (const_int 4294967296)))
774 (set (match_operand:SI 0 "s_register_operand")
775 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
779 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
780 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
781 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
782 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
786 (define_insn "*addsi3_cin_cout_reg_insn"
787 [(set (reg:CC_ADC CC_REGNUM)
791 (match_operand:DI 3 "arm_carry_operation" "")
792 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
793 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
794 (const_int 4294967296)))
795 (set (match_operand:SI 0 "s_register_operand" "=l,r")
796 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
803 [(set_attr "type" "alus_sreg")
804 (set_attr "arch" "t2,*")
805 (set_attr "length" "2,4")]
808 (define_expand "addsi3_cin_cout_imm"
813 (plus:DI (match_dup 4)
814 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
816 (const_int 4294967296)))
817 (set (match_operand:SI 0 "s_register_operand")
818 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
819 (match_operand:SI 2 "arm_adcimm_operand")))])]
822 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
823 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
824 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
825 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
826 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
830 (define_insn "*addsi3_cin_cout_imm_insn"
831 [(set (reg:CC_ADC CC_REGNUM)
835 (match_operand:DI 3 "arm_carry_operation" "")
836 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
837 (match_operand:DI 5 "const_int_operand" "n,n"))
838 (const_int 4294967296)))
839 (set (match_operand:SI 0 "s_register_operand" "=r,r")
840 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
842 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
844 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
847 sbcs%?\\t%0, %1, #%B2"
848 [(set_attr "type" "alus_imm")]
851 (define_expand "addsi3_cin_cout_0"
855 (plus:DI (match_dup 3)
856 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
857 (const_int 4294967296)))
858 (set (match_operand:SI 0 "s_register_operand")
859 (plus:SI (match_dup 4) (match_dup 1)))])]
862 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
863 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
864 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
865 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
869 (define_insn "*addsi3_cin_cout_0_insn"
870 [(set (reg:CC_ADC CC_REGNUM)
873 (match_operand:DI 2 "arm_carry_operation" "")
874 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
875 (const_int 4294967296)))
876 (set (match_operand:SI 0 "s_register_operand" "=r")
877 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
879 "adcs%?\\t%0, %1, #0"
880 [(set_attr "type" "alus_imm")]
883 (define_expand "addsi3"
884 [(set (match_operand:SI 0 "s_register_operand")
885 (plus:SI (match_operand:SI 1 "s_register_operand")
886 (match_operand:SI 2 "reg_or_int_operand")))]
889 if (TARGET_32BIT && CONST_INT_P (operands[2]))
891 arm_split_constant (PLUS, SImode, NULL_RTX,
892 INTVAL (operands[2]), operands[0], operands[1],
893 optimize && can_create_pseudo_p ());
899 ; If there is a scratch available, this will be faster than synthesizing the
902 [(match_scratch:SI 3 "r")
903 (set (match_operand:SI 0 "arm_general_register_operand" "")
904 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
905 (match_operand:SI 2 "const_int_operand" "")))]
907 !(const_ok_for_arm (INTVAL (operands[2]))
908 || const_ok_for_arm (-INTVAL (operands[2])))
909 && const_ok_for_arm (~INTVAL (operands[2]))"
910 [(set (match_dup 3) (match_dup 2))
911 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
915 ;; The r/r/k alternative is required when reloading the address
916 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
917 ;; put the duplicated register first, and not try the commutative version.
918 (define_insn_and_split "*arm_addsi3"
919 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
920 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
921 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
937 subw%?\\t%0, %1, #%n2
938 subw%?\\t%0, %1, #%n2
941 && CONST_INT_P (operands[2])
942 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
943 && (reload_completed || !arm_eliminable_register (operands[1]))"
944 [(clobber (const_int 0))]
946 arm_split_constant (PLUS, SImode, curr_insn,
947 INTVAL (operands[2]), operands[0],
951 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
952 (set_attr "predicable" "yes")
953 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
954 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
955 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
956 (const_string "alu_imm")
957 (const_string "alu_sreg")))
961 (define_insn "addsi3_compareV_reg"
962 [(set (reg:CC_V CC_REGNUM)
965 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
966 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
967 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
968 (set (match_operand:SI 0 "register_operand" "=l,r,r")
969 (plus:SI (match_dup 1) (match_dup 2)))]
971 "adds%?\\t%0, %1, %2"
972 [(set_attr "conds" "set")
973 (set_attr "arch" "t2,t2,*")
974 (set_attr "length" "2,2,4")
975 (set_attr "type" "alus_sreg")]
978 (define_insn "*addsi3_compareV_reg_nosum"
979 [(set (reg:CC_V CC_REGNUM)
982 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
983 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
984 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
987 [(set_attr "conds" "set")
988 (set_attr "arch" "t2,*")
989 (set_attr "length" "2,4")
990 (set_attr "type" "alus_sreg")]
993 (define_insn "subvsi3_intmin"
994 [(set (reg:CC_V CC_REGNUM)
998 (match_operand:SI 1 "register_operand" "r"))
999 (const_int 2147483648))
1000 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
1001 (set (match_operand:SI 0 "register_operand" "=r")
1002 (plus:SI (match_dup 1) (const_int -2147483648)))]
1004 "subs%?\\t%0, %1, #-2147483648"
1005 [(set_attr "conds" "set")
1006 (set_attr "type" "alus_imm")]
1009 (define_insn "addsi3_compareV_imm"
1010 [(set (reg:CC_V CC_REGNUM)
1014 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
1015 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
1016 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
1017 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
1018 (plus:SI (match_dup 1) (match_dup 2)))]
1020 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
1024 subs%?\\t%0, %1, #%n2
1025 subs%?\\t%0, %0, #%n2
1027 subs%?\\t%0, %1, #%n2"
1028 [(set_attr "conds" "set")
1029 (set_attr "arch" "t2,t2,t2,t2,*,*")
1030 (set_attr "length" "2,2,2,2,4,4")
1031 (set_attr "type" "alus_imm")]
1034 (define_insn "addsi3_compareV_imm_nosum"
1035 [(set (reg:CC_V CC_REGNUM)
1039 (match_operand:SI 0 "register_operand" "l,r,r"))
1040 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1041 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1043 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1048 [(set_attr "conds" "set")
1049 (set_attr "arch" "t2,*,*")
1050 (set_attr "length" "2,4,4")
1051 (set_attr "type" "alus_imm")]
1054 ;; We can handle more constants efficently if we can clobber either a scratch
1055 ;; or the other source operand. We deliberately leave this late as in
1056 ;; high register pressure situations it's not worth forcing any reloads.
1058 [(match_scratch:SI 2 "l")
1059 (set (reg:CC_V CC_REGNUM)
1063 (match_operand:SI 0 "low_register_operand"))
1064 (match_operand 1 "const_int_operand"))
1065 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1067 && satisfies_constraint_Pd (operands[1])"
1069 (set (reg:CC_V CC_REGNUM)
1071 (plus:DI (sign_extend:DI (match_dup 0))
1072 (sign_extend:DI (match_dup 1)))
1073 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1074 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1078 [(set (reg:CC_V CC_REGNUM)
1082 (match_operand:SI 0 "low_register_operand"))
1083 (match_operand 1 "const_int_operand"))
1084 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1086 && dead_or_set_p (peep2_next_insn (0), operands[0])
1087 && satisfies_constraint_Py (operands[1])"
1089 (set (reg:CC_V CC_REGNUM)
1091 (plus:DI (sign_extend:DI (match_dup 0))
1092 (sign_extend:DI (match_dup 1)))
1093 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1094 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1097 (define_insn "addsi3_compare0"
1098 [(set (reg:CC_NZ CC_REGNUM)
1100 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1101 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1103 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1104 (plus:SI (match_dup 1) (match_dup 2)))]
1108 subs%?\\t%0, %1, #%n2
1109 adds%?\\t%0, %1, %2"
1110 [(set_attr "conds" "set")
1111 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1114 (define_insn "*addsi3_compare0_scratch"
1115 [(set (reg:CC_NZ CC_REGNUM)
1117 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1118 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1125 [(set_attr "conds" "set")
1126 (set_attr "predicable" "yes")
1127 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1130 (define_insn "*compare_negsi_si"
1131 [(set (reg:CC_Z CC_REGNUM)
1133 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1134 (match_operand:SI 1 "s_register_operand" "l,r")))]
1137 [(set_attr "conds" "set")
1138 (set_attr "predicable" "yes")
1139 (set_attr "arch" "t2,*")
1140 (set_attr "length" "2,4")
1141 (set_attr "predicable_short_it" "yes,no")
1142 (set_attr "type" "alus_sreg")]
1145 ;; This is the canonicalization of subsi3_compare when the
1146 ;; addend is a constant.
1147 (define_insn "cmpsi2_addneg"
1148 [(set (reg:CC CC_REGNUM)
1150 (match_operand:SI 1 "s_register_operand" "r,r")
1151 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1152 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1153 (plus:SI (match_dup 1)
1154 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1156 && (INTVAL (operands[2])
1157 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1159 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1160 in different condition codes (like cmn rather than like cmp), so that
1161 alternative comes first. Both alternatives can match for any 0x??000000
1162 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1163 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1164 as it is shorter. */
1165 if (which_alternative == 0 && operands[3] != const1_rtx)
1166 return "subs%?\\t%0, %1, #%n3";
1168 return "adds%?\\t%0, %1, %3";
1170 [(set_attr "conds" "set")
1171 (set_attr "type" "alus_sreg")]
1174 ;; Convert the sequence
1176 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1180 ;; bcs dest ((unsigned)rn >= 1)
1181 ;; similarly for the beq variant using bcc.
1182 ;; This is a common looping idiom (while (n--))
1184 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1185 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1187 (set (match_operand 2 "cc_register" "")
1188 (compare (match_dup 0) (const_int -1)))
1190 (if_then_else (match_operator 3 "equality_operator"
1191 [(match_dup 2) (const_int 0)])
1192 (match_operand 4 "" "")
1193 (match_operand 5 "" "")))]
1194 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1198 (match_dup 1) (const_int 1)))
1199 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1201 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1204 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1205 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1208 operands[2], const0_rtx);"
1211 ;; The next four insns work because they compare the result with one of
1212 ;; the operands, and we know that the use of the condition code is
1213 ;; either GEU or LTU, so we can use the carry flag from the addition
1214 ;; instead of doing the compare a second time.
1215 (define_insn "addsi3_compare_op1"
1216 [(set (reg:CC_C CC_REGNUM)
1218 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1219 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1221 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1222 (plus:SI (match_dup 1) (match_dup 2)))]
1227 subs%?\\t%0, %1, #%n2
1228 subs%?\\t%0, %0, #%n2
1230 subs%?\\t%0, %1, #%n2"
1231 [(set_attr "conds" "set")
1232 (set_attr "arch" "t2,t2,t2,t2,*,*")
1233 (set_attr "length" "2,2,2,2,4,4")
1235 (if_then_else (match_operand 2 "const_int_operand")
1236 (const_string "alu_imm")
1237 (const_string "alu_sreg")))]
1240 (define_insn "*addsi3_compare_op2"
1241 [(set (reg:CC_C CC_REGNUM)
1243 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1244 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1246 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1247 (plus:SI (match_dup 1) (match_dup 2)))]
1252 subs%?\\t%0, %1, #%n2
1253 subs%?\\t%0, %0, #%n2
1255 subs%?\\t%0, %1, #%n2"
1256 [(set_attr "conds" "set")
1257 (set_attr "arch" "t2,t2,t2,t2,*,*")
1258 (set_attr "length" "2,2,2,2,4,4")
1260 (if_then_else (match_operand 2 "const_int_operand")
1261 (const_string "alu_imm")
1262 (const_string "alu_sreg")))]
1265 (define_insn "*compare_addsi2_op0"
1266 [(set (reg:CC_C CC_REGNUM)
1268 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1269 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1277 [(set_attr "conds" "set")
1278 (set_attr "predicable" "yes")
1279 (set_attr "arch" "t2,t2,*,*")
1280 (set_attr "predicable_short_it" "yes,yes,no,no")
1281 (set_attr "length" "2,2,4,4")
1283 (if_then_else (match_operand 1 "const_int_operand")
1284 (const_string "alu_imm")
1285 (const_string "alu_sreg")))]
1288 (define_insn "*compare_addsi2_op1"
1289 [(set (reg:CC_C CC_REGNUM)
1291 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1292 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1300 [(set_attr "conds" "set")
1301 (set_attr "predicable" "yes")
1302 (set_attr "arch" "t2,t2,*,*")
1303 (set_attr "predicable_short_it" "yes,yes,no,no")
1304 (set_attr "length" "2,2,4,4")
1306 (if_then_else (match_operand 1 "const_int_operand")
1307 (const_string "alu_imm")
1308 (const_string "alu_sreg")))]
1311 (define_insn "addsi3_carryin"
1312 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1313 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1314 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1315 (match_operand:SI 3 "arm_carry_operation" "")))]
1320 sbc%?\\t%0, %1, #%B2"
1321 [(set_attr "conds" "use")
1322 (set_attr "predicable" "yes")
1323 (set_attr "arch" "t2,*,*")
1324 (set_attr "length" "4")
1325 (set_attr "predicable_short_it" "yes,no,no")
1326 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1329 ;; Canonicalization of the above when the immediate is zero.
1330 (define_insn "add0si3_carryin"
1331 [(set (match_operand:SI 0 "s_register_operand" "=r")
1332 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1333 (match_operand:SI 1 "arm_not_operand" "r")))]
1335 "adc%?\\t%0, %1, #0"
1336 [(set_attr "conds" "use")
1337 (set_attr "predicable" "yes")
1338 (set_attr "length" "4")
1339 (set_attr "type" "adc_imm")]
1342 (define_insn "*addsi3_carryin_alt2"
1343 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1344 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1345 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1346 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1351 sbc%?\\t%0, %1, #%B2"
1352 [(set_attr "conds" "use")
1353 (set_attr "predicable" "yes")
1354 (set_attr "arch" "t2,*,*")
1355 (set_attr "length" "4")
1356 (set_attr "predicable_short_it" "yes,no,no")
1357 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1360 (define_insn "*addsi3_carryin_shift"
1361 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1363 (match_operator:SI 2 "shift_operator"
1364 [(match_operand:SI 3 "s_register_operand" "r,r")
1365 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1366 (match_operand:SI 5 "arm_carry_operation" ""))
1367 (match_operand:SI 1 "s_register_operand" "r,r")))]
1369 "adc%?\\t%0, %1, %3%S2"
1370 [(set_attr "conds" "use")
1371 (set_attr "arch" "32,a")
1372 (set_attr "shift" "3")
1373 (set_attr "predicable" "yes")
1374 (set_attr "autodetect_type" "alu_shift_operator2")]
1377 (define_insn "*addsi3_carryin_clobercc"
1378 [(set (match_operand:SI 0 "s_register_operand" "=r")
1379 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1380 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1381 (match_operand:SI 3 "arm_carry_operation" "")))
1382 (clobber (reg:CC CC_REGNUM))]
1384 "adcs%?\\t%0, %1, %2"
1385 [(set_attr "conds" "set")
1386 (set_attr "type" "adcs_reg")]
1389 (define_expand "subvsi4"
1390 [(match_operand:SI 0 "s_register_operand")
1391 (match_operand:SI 1 "arm_rhs_operand")
1392 (match_operand:SI 2 "arm_add_operand")
1393 (match_operand 3 "")]
1396 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1398 /* If both operands are constants we can decide the result statically. */
1399 wi::overflow_type overflow;
1400 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1401 rtx_mode_t (operands[2], SImode),
1403 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1404 if (overflow != wi::OVF_NONE)
1405 emit_jump_insn (gen_jump (operands[3]));
1408 else if (CONST_INT_P (operands[2]))
1410 operands[2] = GEN_INT (-INTVAL (operands[2]));
1411 /* Special case for INT_MIN. */
1412 if (INTVAL (operands[2]) == 0x80000000)
1413 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1415 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1418 else if (CONST_INT_P (operands[1]))
1419 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1421 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1423 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1427 (define_expand "subvdi4"
1428 [(match_operand:DI 0 "s_register_operand")
1429 (match_operand:DI 1 "reg_or_int_operand")
1430 (match_operand:DI 2 "reg_or_int_operand")
1431 (match_operand 3 "")]
1434 rtx lo_result, hi_result;
1435 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1436 lo_result = gen_lowpart (SImode, operands[0]);
1437 hi_result = gen_highpart (SImode, operands[0]);
1438 machine_mode mode = CCmode;
1440 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1442 /* If both operands are constants we can decide the result statically. */
1443 wi::overflow_type overflow;
1444 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1445 rtx_mode_t (operands[2], DImode),
1447 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1448 if (overflow != wi::OVF_NONE)
1449 emit_jump_insn (gen_jump (operands[3]));
1452 else if (CONST_INT_P (operands[1]))
1454 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1456 if (const_ok_for_arm (INTVAL (lo_op1)))
1458 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1459 GEN_INT (~UINTVAL (lo_op1))));
1460 /* We could potentially use RSC here in Arm state, but not
1461 in Thumb, so it's probably not worth the effort of handling
1463 hi_op1 = force_reg (SImode, hi_op1);
1467 operands[1] = force_reg (DImode, operands[1]);
1470 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1472 if (lo_op2 == const0_rtx)
1474 emit_move_insn (lo_result, lo_op1);
1475 if (!arm_add_operand (hi_op2, SImode))
1476 hi_op2 = force_reg (SImode, hi_op2);
1477 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1481 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1482 lo_op2 = force_reg (SImode, lo_op2);
1483 if (CONST_INT_P (lo_op2))
1484 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1485 gen_int_mode (-INTVAL (lo_op2), SImode)));
1487 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1490 if (!arm_not_operand (hi_op2, SImode))
1491 hi_op2 = force_reg (SImode, hi_op2);
1492 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1493 if (CONST_INT_P (hi_op2))
1494 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1495 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1496 gen_rtx_LTU (DImode, ccreg,
1499 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1500 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1501 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1502 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1507 (define_expand "usubvsi4"
1508 [(match_operand:SI 0 "s_register_operand")
1509 (match_operand:SI 1 "arm_rhs_operand")
1510 (match_operand:SI 2 "arm_add_operand")
1511 (match_operand 3 "")]
1514 machine_mode mode = CCmode;
1515 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1517 /* If both operands are constants we can decide the result statically. */
1518 wi::overflow_type overflow;
1519 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1520 rtx_mode_t (operands[2], SImode),
1521 UNSIGNED, &overflow);
1522 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1523 if (overflow != wi::OVF_NONE)
1524 emit_jump_insn (gen_jump (operands[3]));
1527 else if (CONST_INT_P (operands[2]))
1528 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1529 gen_int_mode (-INTVAL (operands[2]),
1531 else if (CONST_INT_P (operands[1]))
1534 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1535 GEN_INT (~UINTVAL (operands[1]))));
1538 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1539 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1544 (define_expand "usubvdi4"
1545 [(match_operand:DI 0 "s_register_operand")
1546 (match_operand:DI 1 "reg_or_int_operand")
1547 (match_operand:DI 2 "reg_or_int_operand")
1548 (match_operand 3 "")]
1551 rtx lo_result, hi_result;
1552 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1553 lo_result = gen_lowpart (SImode, operands[0]);
1554 hi_result = gen_highpart (SImode, operands[0]);
1555 machine_mode mode = CCmode;
1557 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1559 /* If both operands are constants we can decide the result statically. */
1560 wi::overflow_type overflow;
1561 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1562 rtx_mode_t (operands[2], DImode),
1563 UNSIGNED, &overflow);
1564 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1565 if (overflow != wi::OVF_NONE)
1566 emit_jump_insn (gen_jump (operands[3]));
1569 else if (CONST_INT_P (operands[1]))
1571 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1573 if (const_ok_for_arm (INTVAL (lo_op1)))
1575 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1576 GEN_INT (~UINTVAL (lo_op1))));
1577 /* We could potentially use RSC here in Arm state, but not
1578 in Thumb, so it's probably not worth the effort of handling
1580 hi_op1 = force_reg (SImode, hi_op1);
1584 operands[1] = force_reg (DImode, operands[1]);
1587 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1589 if (lo_op2 == const0_rtx)
1591 emit_move_insn (lo_result, lo_op1);
1592 if (!arm_add_operand (hi_op2, SImode))
1593 hi_op2 = force_reg (SImode, hi_op2);
1594 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1598 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1599 lo_op2 = force_reg (SImode, lo_op2);
1600 if (CONST_INT_P (lo_op2))
1601 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1602 gen_int_mode (-INTVAL (lo_op2), SImode)));
1604 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1607 if (!arm_not_operand (hi_op2, SImode))
1608 hi_op2 = force_reg (SImode, hi_op2);
1609 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1610 if (CONST_INT_P (hi_op2))
1611 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1612 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1613 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1614 gen_rtx_LTU (DImode, ccreg,
1617 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1618 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1619 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1620 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1625 (define_insn "subsi3_compare1"
1626 [(set (reg:CC CC_REGNUM)
1628 (match_operand:SI 1 "register_operand" "r")
1629 (match_operand:SI 2 "register_operand" "r")))
1630 (set (match_operand:SI 0 "register_operand" "=r")
1631 (minus:SI (match_dup 1) (match_dup 2)))]
1633 "subs%?\\t%0, %1, %2"
1634 [(set_attr "conds" "set")
1635 (set_attr "type" "alus_sreg")]
1638 (define_insn "subvsi3"
1639 [(set (reg:CC_V CC_REGNUM)
1642 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1643 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1644 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1645 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1646 (minus:SI (match_dup 1) (match_dup 2)))]
1648 "subs%?\\t%0, %1, %2"
1649 [(set_attr "conds" "set")
1650 (set_attr "arch" "t2,*")
1651 (set_attr "length" "2,4")
1652 (set_attr "type" "alus_sreg")]
1655 (define_insn "subvsi3_imm1"
1656 [(set (reg:CC_V CC_REGNUM)
1659 (match_operand 1 "arm_immediate_operand" "I")
1660 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1661 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1662 (set (match_operand:SI 0 "s_register_operand" "=r")
1663 (minus:SI (match_dup 1) (match_dup 2)))]
1665 "rsbs%?\\t%0, %2, %1"
1666 [(set_attr "conds" "set")
1667 (set_attr "type" "alus_imm")]
1670 (define_insn "subsi3_carryin"
1671 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1672 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1673 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1674 (match_operand:SI 3 "arm_borrow_operation" "")))]
1679 sbc%?\\t%0, %2, %2, lsl #1"
1680 [(set_attr "conds" "use")
1681 (set_attr "arch" "*,a,t2")
1682 (set_attr "predicable" "yes")
1683 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm_lsl_1to4")]
1686 ;; Special canonicalization of the above when operand1 == (const_int 1):
1687 ;; in this case the 'borrow' needs to treated like subtracting from the carry.
1688 (define_insn "rsbsi_carryin_reg"
1689 [(set (match_operand:SI 0 "s_register_operand" "=r")
1690 (minus:SI (match_operand:SI 1 "arm_carry_operation" "")
1691 (match_operand:SI 2 "s_register_operand" "r")))]
1693 "rsc%?\\t%0, %2, #1"
1694 [(set_attr "conds" "use")
1695 (set_attr "predicable" "yes")
1696 (set_attr "type" "adc_imm")]
1699 ;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
1700 ;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
1701 (define_insn "*add_not_cin"
1702 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1704 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
1705 (match_operand:SI 3 "arm_carry_operation" ""))
1706 (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
1707 "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
1711 [(set_attr "conds" "use")
1712 (set_attr "predicable" "yes")
1713 (set_attr "arch" "*,a")
1714 (set_attr "type" "adc_reg,adc_imm")]
1717 ;; On Arm we can also use the same trick when the non-inverted operand is
1718 ;; shifted, using RSC.
1719 (define_insn "add_not_shift_cin"
1720 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1722 (plus:SI (match_operator:SI 3 "shift_operator"
1723 [(match_operand:SI 1 "s_register_operand" "r,r")
1724 (match_operand:SI 2 "shift_amount_operand" "M,r")])
1725 (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
1726 (match_operand:SI 5 "arm_carry_operation" "")))]
1728 "rsc%?\\t%0, %4, %1%S3"
1729 [(set_attr "conds" "use")
1730 (set_attr "predicable" "yes")
1731 (set_attr "autodetect_type" "alu_shift_operator3")]
1734 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1735 [(set (reg:<CC_EXTEND> CC_REGNUM)
1736 (compare:<CC_EXTEND>
1737 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1738 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1739 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1740 (clobber (match_scratch:SI 0 "=l,r"))]
1743 [(set_attr "conds" "set")
1744 (set_attr "arch" "t2,*")
1745 (set_attr "length" "2,4")
1746 (set_attr "type" "adc_reg")]
1749 ;; Similar to the above, but handling a constant which has a different
1750 ;; canonicalization.
1751 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1752 [(set (reg:<CC_EXTEND> CC_REGNUM)
1753 (compare:<CC_EXTEND>
1754 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1755 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1756 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1757 (clobber (match_scratch:SI 0 "=l,r"))]
1761 adcs\\t%0, %1, #%B2"
1762 [(set_attr "conds" "set")
1763 (set_attr "type" "adc_imm")]
1766 ;; Further canonicalization when the constant is zero.
1767 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1768 [(set (reg:<CC_EXTEND> CC_REGNUM)
1769 (compare:<CC_EXTEND>
1770 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1771 (match_operand:DI 2 "arm_borrow_operation" "")))
1772 (clobber (match_scratch:SI 0 "=l,r"))]
1775 [(set_attr "conds" "set")
1776 (set_attr "type" "adc_imm")]
1779 (define_insn "*subsi3_carryin_const"
1780 [(set (match_operand:SI 0 "s_register_operand" "=r")
1782 (match_operand:SI 1 "s_register_operand" "r")
1783 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1784 (match_operand:SI 3 "arm_borrow_operation" "")))]
1786 "sbc\\t%0, %1, #%n2"
1787 [(set_attr "conds" "use")
1788 (set_attr "type" "adc_imm")]
1791 (define_insn "*subsi3_carryin_const0"
1792 [(set (match_operand:SI 0 "s_register_operand" "=r")
1793 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1794 (match_operand:SI 2 "arm_borrow_operation" "")))]
1797 [(set_attr "conds" "use")
1798 (set_attr "type" "adc_imm")]
1801 (define_insn "*subsi3_carryin_shift"
1802 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1804 (match_operand:SI 1 "s_register_operand" "r,r")
1805 (match_operator:SI 2 "shift_operator"
1806 [(match_operand:SI 3 "s_register_operand" "r,r")
1807 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1808 (match_operand:SI 5 "arm_borrow_operation" "")))]
1810 "sbc%?\\t%0, %1, %3%S2"
1811 [(set_attr "conds" "use")
1812 (set_attr "arch" "32,a")
1813 (set_attr "shift" "3")
1814 (set_attr "predicable" "yes")
1815 (set_attr "autodetect_type" "alu_shift_operator2")]
1818 (define_insn "*subsi3_carryin_shift_alt"
1819 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1821 (match_operand:SI 1 "s_register_operand" "r,r")
1822 (match_operand:SI 5 "arm_borrow_operation" ""))
1823 (match_operator:SI 2 "shift_operator"
1824 [(match_operand:SI 3 "s_register_operand" "r,r")
1825 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1827 "sbc%?\\t%0, %1, %3%S2"
1828 [(set_attr "conds" "use")
1829 (set_attr "arch" "32,a")
1830 (set_attr "shift" "3")
1831 (set_attr "predicable" "yes")
1832 (set_attr "autodetect_type" "alu_shift_operator2")]
1836 (define_insn "*rsbsi3_carryin_shift"
1837 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1839 (match_operator:SI 2 "shift_operator"
1840 [(match_operand:SI 3 "s_register_operand" "r,r")
1841 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1842 (match_operand:SI 1 "s_register_operand" "r,r"))
1843 (match_operand:SI 5 "arm_borrow_operation" "")))]
1845 "rsc%?\\t%0, %1, %3%S2"
1846 [(set_attr "conds" "use")
1847 (set_attr "predicable" "yes")
1848 (set_attr "autodetect_type" "alu_shift_operator2")]
1851 (define_insn "*rsbsi3_carryin_shift_alt"
1852 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1854 (match_operator:SI 2 "shift_operator"
1855 [(match_operand:SI 3 "s_register_operand" "r,r")
1856 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1857 (match_operand:SI 5 "arm_borrow_operation" ""))
1858 (match_operand:SI 1 "s_register_operand" "r,r")))]
1860 "rsc%?\\t%0, %1, %3%S2"
1861 [(set_attr "conds" "use")
1862 (set_attr "predicable" "yes")
1863 (set_attr "autodetect_type" "alu_shift_operator2")]
1866 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1868 [(set (match_operand:SI 0 "s_register_operand" "")
1869 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1870 (match_operand:SI 2 "s_register_operand" ""))
1872 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1874 [(set (match_dup 3) (match_dup 1))
1875 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1877 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1880 (define_expand "addsf3"
1881 [(set (match_operand:SF 0 "s_register_operand")
1882 (plus:SF (match_operand:SF 1 "s_register_operand")
1883 (match_operand:SF 2 "s_register_operand")))]
1884 "TARGET_32BIT && TARGET_HARD_FLOAT"
1888 (define_expand "adddf3"
1889 [(set (match_operand:DF 0 "s_register_operand")
1890 (plus:DF (match_operand:DF 1 "s_register_operand")
1891 (match_operand:DF 2 "s_register_operand")))]
1892 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1896 (define_expand "subdi3"
1898 [(set (match_operand:DI 0 "s_register_operand")
1899 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1900 (match_operand:DI 2 "s_register_operand")))
1901 (clobber (reg:CC CC_REGNUM))])]
1906 if (!REG_P (operands[1]))
1907 operands[1] = force_reg (DImode, operands[1]);
1911 rtx lo_result, hi_result, lo_dest, hi_dest;
1912 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1915 /* Since operands[1] may be an integer, pass it second, so that
1916 any necessary simplifications will be done on the decomposed
1918 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1920 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1921 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1923 if (!arm_rhs_operand (lo_op1, SImode))
1924 lo_op1 = force_reg (SImode, lo_op1);
1926 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1927 || !arm_rhs_operand (hi_op1, SImode))
1928 hi_op1 = force_reg (SImode, hi_op1);
1931 if (lo_op1 == const0_rtx)
1933 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1934 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1936 else if (CONST_INT_P (lo_op1))
1938 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1939 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1940 GEN_INT (~UINTVAL (lo_op1))));
1944 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1945 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1948 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1950 if (hi_op1 == const0_rtx)
1951 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1953 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1955 if (lo_result != lo_dest)
1956 emit_move_insn (lo_result, lo_dest);
1958 if (hi_result != hi_dest)
1959 emit_move_insn (hi_result, hi_dest);
1966 (define_expand "subsi3"
1967 [(set (match_operand:SI 0 "s_register_operand")
1968 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1969 (match_operand:SI 2 "s_register_operand")))]
1972 if (CONST_INT_P (operands[1]))
1976 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1977 operands[1] = force_reg (SImode, operands[1]);
1980 arm_split_constant (MINUS, SImode, NULL_RTX,
1981 INTVAL (operands[1]), operands[0],
1983 optimize && can_create_pseudo_p ());
1987 else /* TARGET_THUMB1 */
1988 operands[1] = force_reg (SImode, operands[1]);
1993 ; ??? Check Thumb-2 split length
1994 (define_insn_and_split "*arm_subsi3_insn"
1995 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1996 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1997 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
2009 "&& (CONST_INT_P (operands[1])
2010 && !const_ok_for_arm (INTVAL (operands[1])))"
2011 [(clobber (const_int 0))]
2013 arm_split_constant (MINUS, SImode, curr_insn,
2014 INTVAL (operands[1]), operands[0], operands[2], 0);
2017 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
2018 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
2019 (set_attr "predicable" "yes")
2020 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
2021 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
2025 [(match_scratch:SI 3 "r")
2026 (set (match_operand:SI 0 "arm_general_register_operand" "")
2027 (minus:SI (match_operand:SI 1 "const_int_operand" "")
2028 (match_operand:SI 2 "arm_general_register_operand" "")))]
2030 && !const_ok_for_arm (INTVAL (operands[1]))
2031 && const_ok_for_arm (~INTVAL (operands[1]))"
2032 [(set (match_dup 3) (match_dup 1))
2033 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
2037 (define_insn "subsi3_compare0"
2038 [(set (reg:CC_NZ CC_REGNUM)
2040 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2041 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
2043 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2044 (minus:SI (match_dup 1) (match_dup 2)))]
2049 rsbs%?\\t%0, %2, %1"
2050 [(set_attr "conds" "set")
2051 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
2054 (define_insn "subsi3_compare"
2055 [(set (reg:CC CC_REGNUM)
2056 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2057 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
2058 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2059 (minus:SI (match_dup 1) (match_dup 2)))]
2064 rsbs%?\\t%0, %2, %1"
2065 [(set_attr "conds" "set")
2066 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
2069 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
2070 ;; rather than (0 cmp reg). This gives the same results for unsigned
2071 ;; and equality compares which is what we mostly need here.
2072 (define_insn "rsb_imm_compare"
2073 [(set (reg:CC_RSB CC_REGNUM)
2074 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2075 (match_operand 3 "const_int_operand" "")))
2076 (set (match_operand:SI 0 "s_register_operand" "=r")
2077 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
2079 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2081 [(set_attr "conds" "set")
2082 (set_attr "type" "alus_imm")]
2085 ;; Similarly, but the result is unused.
2086 (define_insn "rsb_imm_compare_scratch"
2087 [(set (reg:CC_RSB CC_REGNUM)
2088 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2089 (match_operand 1 "arm_not_immediate_operand" "K")))
2090 (clobber (match_scratch:SI 0 "=r"))]
2092 "rsbs\\t%0, %2, #%B1"
2093 [(set_attr "conds" "set")
2094 (set_attr "type" "alus_imm")]
2097 ;; Compare the sum of a value plus a carry against a constant. Uses
2098 ;; RSC, so the result is swapped. Only available on Arm
2099 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2100 [(set (reg:CC_SWP CC_REGNUM)
2102 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2103 (match_operand:DI 3 "arm_borrow_operation" ""))
2104 (match_operand 1 "arm_immediate_operand" "I")))
2105 (clobber (match_scratch:SI 0 "=r"))]
2108 [(set_attr "conds" "set")
2109 (set_attr "type" "alus_imm")]
2112 (define_insn "usubvsi3_borrow"
2113 [(set (reg:CC_B CC_REGNUM)
2115 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2116 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2118 (match_operand:SI 2 "s_register_operand" "l,r")))))
2119 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2120 (minus:SI (match_dup 1)
2121 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2124 "sbcs%?\\t%0, %1, %2"
2125 [(set_attr "conds" "set")
2126 (set_attr "arch" "t2,*")
2127 (set_attr "length" "2,4")]
2130 (define_insn "usubvsi3_borrow_imm"
2131 [(set (reg:CC_B CC_REGNUM)
2133 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2134 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2135 (match_operand:DI 3 "const_int_operand" "n,n"))))
2136 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2137 (minus:SI (match_dup 1)
2138 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2139 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2141 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2144 adcs%?\\t%0, %1, #%B2"
2145 [(set_attr "conds" "set")
2146 (set_attr "type" "alus_imm")]
2149 (define_insn "subvsi3_borrow"
2150 [(set (reg:CC_V CC_REGNUM)
2154 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2155 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2156 (match_operand:DI 4 "arm_borrow_operation" ""))
2158 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2159 (match_operand:SI 3 "arm_borrow_operation" "")))))
2160 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2161 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2164 "sbcs%?\\t%0, %1, %2"
2165 [(set_attr "conds" "set")
2166 (set_attr "arch" "t2,*")
2167 (set_attr "length" "2,4")]
2170 (define_insn "subvsi3_borrow_imm"
2171 [(set (reg:CC_V CC_REGNUM)
2175 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2176 (match_operand 2 "arm_adcimm_operand" "I,K"))
2177 (match_operand:DI 4 "arm_borrow_operation" ""))
2179 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2180 (match_operand:SI 3 "arm_borrow_operation" "")))))
2181 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2182 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2185 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2188 adcs%?\\t%0, %1, #%B2"
2189 [(set_attr "conds" "set")
2190 (set_attr "type" "alus_imm")]
2193 (define_expand "subsf3"
2194 [(set (match_operand:SF 0 "s_register_operand")
2195 (minus:SF (match_operand:SF 1 "s_register_operand")
2196 (match_operand:SF 2 "s_register_operand")))]
2197 "TARGET_32BIT && TARGET_HARD_FLOAT"
2201 (define_expand "subdf3"
2202 [(set (match_operand:DF 0 "s_register_operand")
2203 (minus:DF (match_operand:DF 1 "s_register_operand")
2204 (match_operand:DF 2 "s_register_operand")))]
2205 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2210 ;; Multiplication insns
2212 (define_expand "mulhi3"
2213 [(set (match_operand:HI 0 "s_register_operand")
2214 (mult:HI (match_operand:HI 1 "s_register_operand")
2215 (match_operand:HI 2 "s_register_operand")))]
2216 "TARGET_DSP_MULTIPLY"
2219 rtx result = gen_reg_rtx (SImode);
2220 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2221 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2226 (define_expand "mulsi3"
2227 [(set (match_operand:SI 0 "s_register_operand")
2228 (mult:SI (match_operand:SI 2 "s_register_operand")
2229 (match_operand:SI 1 "s_register_operand")))]
2234 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2236 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2237 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2238 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2240 "mul%?\\t%0, %2, %1"
2241 [(set_attr "type" "mul")
2242 (set_attr "predicable" "yes")
2243 (set_attr "arch" "t2,v6,nov6,nov6")
2244 (set_attr "length" "4")
2245 (set_attr "predicable_short_it" "yes,no,*,*")]
2248 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2249 ;; reusing the same register.
2252 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2254 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2255 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2256 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2258 "mla%?\\t%0, %3, %2, %1"
2259 [(set_attr "type" "mla")
2260 (set_attr "predicable" "yes")
2261 (set_attr "arch" "v6,nov6,nov6,nov6")]
2265 [(set (match_operand:SI 0 "s_register_operand" "=r")
2267 (match_operand:SI 1 "s_register_operand" "r")
2268 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2269 (match_operand:SI 2 "s_register_operand" "r"))))]
2270 "TARGET_32BIT && arm_arch_thumb2"
2271 "mls%?\\t%0, %3, %2, %1"
2272 [(set_attr "type" "mla")
2273 (set_attr "predicable" "yes")]
2276 (define_insn "*mulsi3_compare0"
2277 [(set (reg:CC_NZ CC_REGNUM)
2278 (compare:CC_NZ (mult:SI
2279 (match_operand:SI 2 "s_register_operand" "r,r")
2280 (match_operand:SI 1 "s_register_operand" "%0,r"))
2282 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2283 (mult:SI (match_dup 2) (match_dup 1)))]
2284 "TARGET_ARM && !arm_arch6"
2285 "muls%?\\t%0, %2, %1"
2286 [(set_attr "conds" "set")
2287 (set_attr "type" "muls")]
2290 (define_insn "*mulsi3_compare0_v6"
2291 [(set (reg:CC_NZ CC_REGNUM)
2292 (compare:CC_NZ (mult:SI
2293 (match_operand:SI 2 "s_register_operand" "r")
2294 (match_operand:SI 1 "s_register_operand" "r"))
2296 (set (match_operand:SI 0 "s_register_operand" "=r")
2297 (mult:SI (match_dup 2) (match_dup 1)))]
2298 "TARGET_ARM && arm_arch6 && optimize_size"
2299 "muls%?\\t%0, %2, %1"
2300 [(set_attr "conds" "set")
2301 (set_attr "type" "muls")]
2304 (define_insn "*mulsi_compare0_scratch"
2305 [(set (reg:CC_NZ CC_REGNUM)
2306 (compare:CC_NZ (mult:SI
2307 (match_operand:SI 2 "s_register_operand" "r,r")
2308 (match_operand:SI 1 "s_register_operand" "%0,r"))
2310 (clobber (match_scratch:SI 0 "=&r,&r"))]
2311 "TARGET_ARM && !arm_arch6"
2312 "muls%?\\t%0, %2, %1"
2313 [(set_attr "conds" "set")
2314 (set_attr "type" "muls")]
2317 (define_insn "*mulsi_compare0_scratch_v6"
2318 [(set (reg:CC_NZ CC_REGNUM)
2319 (compare:CC_NZ (mult:SI
2320 (match_operand:SI 2 "s_register_operand" "r")
2321 (match_operand:SI 1 "s_register_operand" "r"))
2323 (clobber (match_scratch:SI 0 "=r"))]
2324 "TARGET_ARM && arm_arch6 && optimize_size"
2325 "muls%?\\t%0, %2, %1"
2326 [(set_attr "conds" "set")
2327 (set_attr "type" "muls")]
2330 (define_insn "*mulsi3addsi_compare0"
2331 [(set (reg:CC_NZ CC_REGNUM)
2334 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2335 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2336 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2338 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2339 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2341 "TARGET_ARM && arm_arch6"
2342 "mlas%?\\t%0, %2, %1, %3"
2343 [(set_attr "conds" "set")
2344 (set_attr "type" "mlas")]
2347 (define_insn "*mulsi3addsi_compare0_v6"
2348 [(set (reg:CC_NZ CC_REGNUM)
2351 (match_operand:SI 2 "s_register_operand" "r")
2352 (match_operand:SI 1 "s_register_operand" "r"))
2353 (match_operand:SI 3 "s_register_operand" "r"))
2355 (set (match_operand:SI 0 "s_register_operand" "=r")
2356 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2358 "TARGET_ARM && arm_arch6 && optimize_size"
2359 "mlas%?\\t%0, %2, %1, %3"
2360 [(set_attr "conds" "set")
2361 (set_attr "type" "mlas")]
2364 (define_insn "*mulsi3addsi_compare0_scratch"
2365 [(set (reg:CC_NZ CC_REGNUM)
2368 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2369 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2370 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2372 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2373 "TARGET_ARM && !arm_arch6"
2374 "mlas%?\\t%0, %2, %1, %3"
2375 [(set_attr "conds" "set")
2376 (set_attr "type" "mlas")]
2379 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2380 [(set (reg:CC_NZ CC_REGNUM)
2383 (match_operand:SI 2 "s_register_operand" "r")
2384 (match_operand:SI 1 "s_register_operand" "r"))
2385 (match_operand:SI 3 "s_register_operand" "r"))
2387 (clobber (match_scratch:SI 0 "=r"))]
2388 "TARGET_ARM && arm_arch6 && optimize_size"
2389 "mlas%?\\t%0, %2, %1, %3"
2390 [(set_attr "conds" "set")
2391 (set_attr "type" "mlas")]
2394 ;; 32x32->64 widening multiply.
2395 ;; The only difference between the v3-5 and v6+ versions is the requirement
2396 ;; that the output does not overlap with either input.
2398 (define_expand "<Us>mulsidi3"
2399 [(set (match_operand:DI 0 "s_register_operand")
2401 (SE:DI (match_operand:SI 1 "s_register_operand"))
2402 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2405 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2406 gen_highpart (SImode, operands[0]),
2407 operands[1], operands[2]));
2412 (define_insn "<US>mull"
2413 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2415 (match_operand:SI 2 "s_register_operand" "%r,r")
2416 (match_operand:SI 3 "s_register_operand" "r,r")))
2417 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2420 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2423 "<US>mull%?\\t%0, %1, %2, %3"
2424 [(set_attr "type" "umull")
2425 (set_attr "predicable" "yes")
2426 (set_attr "arch" "v6,nov6")]
2429 (define_expand "<Us>maddsidi4"
2430 [(set (match_operand:DI 0 "s_register_operand")
2433 (SE:DI (match_operand:SI 1 "s_register_operand"))
2434 (SE:DI (match_operand:SI 2 "s_register_operand")))
2435 (match_operand:DI 3 "s_register_operand")))]
2438 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2439 gen_lowpart (SImode, operands[3]),
2440 gen_highpart (SImode, operands[0]),
2441 gen_highpart (SImode, operands[3]),
2442 operands[1], operands[2]));
2447 (define_insn "<US>mlal"
2448 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2451 (match_operand:SI 4 "s_register_operand" "%r,r")
2452 (match_operand:SI 5 "s_register_operand" "r,r"))
2453 (match_operand:SI 1 "s_register_operand" "0,0")))
2454 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2459 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2460 (zero_extend:DI (match_dup 1)))
2462 (match_operand:SI 3 "s_register_operand" "2,2")))]
2464 "<US>mlal%?\\t%0, %2, %4, %5"
2465 [(set_attr "type" "umlal")
2466 (set_attr "predicable" "yes")
2467 (set_attr "arch" "v6,nov6")]
2470 (define_expand "<US>mulsi3_highpart"
2472 [(set (match_operand:SI 0 "s_register_operand")
2476 (SE:DI (match_operand:SI 1 "s_register_operand"))
2477 (SE:DI (match_operand:SI 2 "s_register_operand")))
2479 (clobber (match_scratch:SI 3 ""))])]
2484 (define_insn "*<US>mull_high"
2485 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2489 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2490 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2492 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2494 "<US>mull%?\\t%3, %0, %2, %1"
2495 [(set_attr "type" "umull")
2496 (set_attr "predicable" "yes")
2497 (set_attr "arch" "v6,nov6,nov6")]
2500 (define_insn "mulhisi3"
2501 [(set (match_operand:SI 0 "s_register_operand" "=r")
2502 (mult:SI (sign_extend:SI
2503 (match_operand:HI 1 "s_register_operand" "%r"))
2505 (match_operand:HI 2 "s_register_operand" "r"))))]
2506 "TARGET_DSP_MULTIPLY"
2507 "smulbb%?\\t%0, %1, %2"
2508 [(set_attr "type" "smulxy")
2509 (set_attr "predicable" "yes")]
2512 (define_insn "*mulhisi3tb"
2513 [(set (match_operand:SI 0 "s_register_operand" "=r")
2514 (mult:SI (ashiftrt:SI
2515 (match_operand:SI 1 "s_register_operand" "r")
2518 (match_operand:HI 2 "s_register_operand" "r"))))]
2519 "TARGET_DSP_MULTIPLY"
2520 "smultb%?\\t%0, %1, %2"
2521 [(set_attr "type" "smulxy")
2522 (set_attr "predicable" "yes")]
2525 (define_insn "*mulhisi3bt"
2526 [(set (match_operand:SI 0 "s_register_operand" "=r")
2527 (mult:SI (sign_extend:SI
2528 (match_operand:HI 1 "s_register_operand" "r"))
2530 (match_operand:SI 2 "s_register_operand" "r")
2532 "TARGET_DSP_MULTIPLY"
2533 "smulbt%?\\t%0, %1, %2"
2534 [(set_attr "type" "smulxy")
2535 (set_attr "predicable" "yes")]
2538 (define_insn "*mulhisi3tt"
2539 [(set (match_operand:SI 0 "s_register_operand" "=r")
2540 (mult:SI (ashiftrt:SI
2541 (match_operand:SI 1 "s_register_operand" "r")
2544 (match_operand:SI 2 "s_register_operand" "r")
2546 "TARGET_DSP_MULTIPLY"
2547 "smultt%?\\t%0, %1, %2"
2548 [(set_attr "type" "smulxy")
2549 (set_attr "predicable" "yes")]
2552 (define_expand "maddhisi4"
2553 [(set (match_operand:SI 0 "s_register_operand")
2554 (plus:SI (mult:SI (sign_extend:SI
2555 (match_operand:HI 1 "s_register_operand"))
2557 (match_operand:HI 2 "s_register_operand")))
2558 (match_operand:SI 3 "s_register_operand")))]
2559 "TARGET_DSP_MULTIPLY"
2561 /* If this function reads the Q bit from ACLE intrinsics break up the
2562 multiplication and accumulation as an overflow during accumulation will
2563 clobber the Q flag. */
2566 rtx tmp = gen_reg_rtx (SImode);
2567 emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
2568 emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
2574 (define_insn "*arm_maddhisi4"
2575 [(set (match_operand:SI 0 "s_register_operand" "=r")
2576 (plus:SI (mult:SI (sign_extend:SI
2577 (match_operand:HI 1 "s_register_operand" "r"))
2579 (match_operand:HI 2 "s_register_operand" "r")))
2580 (match_operand:SI 3 "s_register_operand" "r")))]
2581 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2582 "smlabb%?\\t%0, %1, %2, %3"
2583 [(set_attr "type" "smlaxy")
2584 (set_attr "predicable" "yes")]
2587 (define_insn "arm_smlabb_setq"
2588 [(set (match_operand:SI 0 "s_register_operand" "=r")
2589 (plus:SI (mult:SI (sign_extend:SI
2590 (match_operand:HI 1 "s_register_operand" "r"))
2592 (match_operand:HI 2 "s_register_operand" "r")))
2593 (match_operand:SI 3 "s_register_operand" "r")))
2594 (set (reg:CC APSRQ_REGNUM)
2595 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2596 "TARGET_DSP_MULTIPLY"
2597 "smlabb%?\\t%0, %1, %2, %3"
2598 [(set_attr "type" "smlaxy")
2599 (set_attr "predicable" "yes")]
2602 (define_expand "arm_smlabb"
2603 [(match_operand:SI 0 "s_register_operand")
2604 (match_operand:SI 1 "s_register_operand")
2605 (match_operand:SI 2 "s_register_operand")
2606 (match_operand:SI 3 "s_register_operand")]
2607 "TARGET_DSP_MULTIPLY"
2609 rtx mult1 = gen_lowpart (HImode, operands[1]);
2610 rtx mult2 = gen_lowpart (HImode, operands[2]);
2612 emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
2614 emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
2619 ;; Note: there is no maddhisi4ibt because this one is canonical form
2620 (define_insn "maddhisi4tb"
2621 [(set (match_operand:SI 0 "s_register_operand" "=r")
2622 (plus:SI (mult:SI (ashiftrt:SI
2623 (match_operand:SI 1 "s_register_operand" "r")
2626 (match_operand:HI 2 "s_register_operand" "r")))
2627 (match_operand:SI 3 "s_register_operand" "r")))]
2628 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2629 "smlatb%?\\t%0, %1, %2, %3"
2630 [(set_attr "type" "smlaxy")
2631 (set_attr "predicable" "yes")]
2634 (define_insn "arm_smlatb_setq"
2635 [(set (match_operand:SI 0 "s_register_operand" "=r")
2636 (plus:SI (mult:SI (ashiftrt:SI
2637 (match_operand:SI 1 "s_register_operand" "r")
2640 (match_operand:HI 2 "s_register_operand" "r")))
2641 (match_operand:SI 3 "s_register_operand" "r")))
2642 (set (reg:CC APSRQ_REGNUM)
2643 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2644 "TARGET_DSP_MULTIPLY"
2645 "smlatb%?\\t%0, %1, %2, %3"
2646 [(set_attr "type" "smlaxy")
2647 (set_attr "predicable" "yes")]
2650 (define_expand "arm_smlatb"
2651 [(match_operand:SI 0 "s_register_operand")
2652 (match_operand:SI 1 "s_register_operand")
2653 (match_operand:SI 2 "s_register_operand")
2654 (match_operand:SI 3 "s_register_operand")]
2655 "TARGET_DSP_MULTIPLY"
2657 rtx mult2 = gen_lowpart (HImode, operands[2]);
2659 emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
2660 mult2, operands[3]));
2662 emit_insn (gen_maddhisi4tb (operands[0], operands[1],
2663 mult2, operands[3]));
2668 (define_insn "maddhisi4tt"
2669 [(set (match_operand:SI 0 "s_register_operand" "=r")
2670 (plus:SI (mult:SI (ashiftrt:SI
2671 (match_operand:SI 1 "s_register_operand" "r")
2674 (match_operand:SI 2 "s_register_operand" "r")
2676 (match_operand:SI 3 "s_register_operand" "r")))]
2677 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2678 "smlatt%?\\t%0, %1, %2, %3"
2679 [(set_attr "type" "smlaxy")
2680 (set_attr "predicable" "yes")]
2683 (define_insn "arm_smlatt_setq"
2684 [(set (match_operand:SI 0 "s_register_operand" "=r")
2685 (plus:SI (mult:SI (ashiftrt:SI
2686 (match_operand:SI 1 "s_register_operand" "r")
2689 (match_operand:SI 2 "s_register_operand" "r")
2691 (match_operand:SI 3 "s_register_operand" "r")))
2692 (set (reg:CC APSRQ_REGNUM)
2693 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2694 "TARGET_DSP_MULTIPLY"
2695 "smlatt%?\\t%0, %1, %2, %3"
2696 [(set_attr "type" "smlaxy")
2697 (set_attr "predicable" "yes")]
2700 (define_expand "arm_smlatt"
2701 [(match_operand:SI 0 "s_register_operand")
2702 (match_operand:SI 1 "s_register_operand")
2703 (match_operand:SI 2 "s_register_operand")
2704 (match_operand:SI 3 "s_register_operand")]
2705 "TARGET_DSP_MULTIPLY"
2708 emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
2709 operands[2], operands[3]));
2711 emit_insn (gen_maddhisi4tt (operands[0], operands[1],
2712 operands[2], operands[3]));
2717 (define_insn "maddhidi4"
2718 [(set (match_operand:DI 0 "s_register_operand" "=r")
2720 (mult:DI (sign_extend:DI
2721 (match_operand:HI 1 "s_register_operand" "r"))
2723 (match_operand:HI 2 "s_register_operand" "r")))
2724 (match_operand:DI 3 "s_register_operand" "0")))]
2725 "TARGET_DSP_MULTIPLY"
2726 "smlalbb%?\\t%Q0, %R0, %1, %2"
2727 [(set_attr "type" "smlalxy")
2728 (set_attr "predicable" "yes")])
2730 ;; Note: there is no maddhidi4ibt because this one is canonical form
2731 (define_insn "*maddhidi4tb"
2732 [(set (match_operand:DI 0 "s_register_operand" "=r")
2734 (mult:DI (sign_extend:DI
2736 (match_operand:SI 1 "s_register_operand" "r")
2739 (match_operand:HI 2 "s_register_operand" "r")))
2740 (match_operand:DI 3 "s_register_operand" "0")))]
2741 "TARGET_DSP_MULTIPLY"
2742 "smlaltb%?\\t%Q0, %R0, %1, %2"
2743 [(set_attr "type" "smlalxy")
2744 (set_attr "predicable" "yes")])
2746 (define_insn "*maddhidi4tt"
2747 [(set (match_operand:DI 0 "s_register_operand" "=r")
2749 (mult:DI (sign_extend:DI
2751 (match_operand:SI 1 "s_register_operand" "r")
2755 (match_operand:SI 2 "s_register_operand" "r")
2757 (match_operand:DI 3 "s_register_operand" "0")))]
2758 "TARGET_DSP_MULTIPLY"
2759 "smlaltt%?\\t%Q0, %R0, %1, %2"
2760 [(set_attr "type" "smlalxy")
2761 (set_attr "predicable" "yes")])
2763 (define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
2764 [(set (match_operand:SI 0 "s_register_operand" "=r")
2766 [(match_operand:SI 1 "s_register_operand" "r")
2767 (match_operand:SI 2 "s_register_operand" "r")
2768 (match_operand:SI 3 "s_register_operand" "r")]
2770 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
2771 "<smlaw_op>%?\\t%0, %1, %2, %3"
2772 [(set_attr "type" "smlaxy")
2773 (set_attr "predicable" "yes")]
2776 (define_expand "arm_<smlaw_op>"
2777 [(set (match_operand:SI 0 "s_register_operand")
2779 [(match_operand:SI 1 "s_register_operand")
2780 (match_operand:SI 2 "s_register_operand")
2781 (match_operand:SI 3 "s_register_operand")]
2783 "TARGET_DSP_MULTIPLY"
2786 emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
2787 operands[2], operands[3]));
2789 emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
2790 operands[2], operands[3]));
2795 (define_expand "mulsf3"
2796 [(set (match_operand:SF 0 "s_register_operand")
2797 (mult:SF (match_operand:SF 1 "s_register_operand")
2798 (match_operand:SF 2 "s_register_operand")))]
2799 "TARGET_32BIT && TARGET_HARD_FLOAT"
2803 (define_expand "muldf3"
2804 [(set (match_operand:DF 0 "s_register_operand")
2805 (mult:DF (match_operand:DF 1 "s_register_operand")
2806 (match_operand:DF 2 "s_register_operand")))]
2807 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2813 (define_expand "divsf3"
2814 [(set (match_operand:SF 0 "s_register_operand")
2815 (div:SF (match_operand:SF 1 "s_register_operand")
2816 (match_operand:SF 2 "s_register_operand")))]
2817 "TARGET_32BIT && TARGET_HARD_FLOAT"
2820 (define_expand "divdf3"
2821 [(set (match_operand:DF 0 "s_register_operand")
2822 (div:DF (match_operand:DF 1 "s_register_operand")
2823 (match_operand:DF 2 "s_register_operand")))]
2824 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2828 ; Expand logical operations. The mid-end expander does not split off memory
2829 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2830 ; So an explicit expander is needed to generate better code.
2832 (define_expand "<LOGICAL:optab>di3"
2833 [(set (match_operand:DI 0 "s_register_operand")
2834 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2835 (match_operand:DI 2 "arm_<optab>di_operand")))]
2838 rtx low = simplify_gen_binary (<CODE>, SImode,
2839 gen_lowpart (SImode, operands[1]),
2840 gen_lowpart (SImode, operands[2]));
2841 rtx high = simplify_gen_binary (<CODE>, SImode,
2842 gen_highpart (SImode, operands[1]),
2843 gen_highpart_mode (SImode, DImode,
2846 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2847 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2852 (define_expand "one_cmpldi2"
2853 [(set (match_operand:DI 0 "s_register_operand")
2854 (not:DI (match_operand:DI 1 "s_register_operand")))]
2857 rtx low = simplify_gen_unary (NOT, SImode,
2858 gen_lowpart (SImode, operands[1]),
2860 rtx high = simplify_gen_unary (NOT, SImode,
2861 gen_highpart_mode (SImode, DImode,
2865 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2866 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2871 ;; Split DImode and, ior, xor operations. Simply perform the logical
2872 ;; operation on the upper and lower halves of the registers.
2873 ;; This is needed for atomic operations in arm_split_atomic_op.
2874 ;; Avoid splitting IWMMXT instructions.
2876 [(set (match_operand:DI 0 "s_register_operand" "")
2877 (match_operator:DI 6 "logical_binary_operator"
2878 [(match_operand:DI 1 "s_register_operand" "")
2879 (match_operand:DI 2 "s_register_operand" "")]))]
2880 "TARGET_32BIT && reload_completed
2881 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2882 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2883 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2886 operands[3] = gen_highpart (SImode, operands[0]);
2887 operands[0] = gen_lowpart (SImode, operands[0]);
2888 operands[4] = gen_highpart (SImode, operands[1]);
2889 operands[1] = gen_lowpart (SImode, operands[1]);
2890 operands[5] = gen_highpart (SImode, operands[2]);
2891 operands[2] = gen_lowpart (SImode, operands[2]);
2895 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2896 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2898 [(set (match_operand:DI 0 "s_register_operand")
2899 (not:DI (match_operand:DI 1 "s_register_operand")))]
2901 [(set (match_dup 0) (not:SI (match_dup 1)))
2902 (set (match_dup 2) (not:SI (match_dup 3)))]
2905 operands[2] = gen_highpart (SImode, operands[0]);
2906 operands[0] = gen_lowpart (SImode, operands[0]);
2907 operands[3] = gen_highpart (SImode, operands[1]);
2908 operands[1] = gen_lowpart (SImode, operands[1]);
2912 (define_expand "andsi3"
2913 [(set (match_operand:SI 0 "s_register_operand")
2914 (and:SI (match_operand:SI 1 "s_register_operand")
2915 (match_operand:SI 2 "reg_or_int_operand")))]
2920 if (CONST_INT_P (operands[2]))
2922 if (INTVAL (operands[2]) == 255 && arm_arch6)
2924 operands[1] = convert_to_mode (QImode, operands[1], 1);
2925 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2929 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2930 operands[2] = force_reg (SImode, operands[2]);
2933 arm_split_constant (AND, SImode, NULL_RTX,
2934 INTVAL (operands[2]), operands[0],
2936 optimize && can_create_pseudo_p ());
2942 else /* TARGET_THUMB1 */
2944 if (!CONST_INT_P (operands[2]))
2946 rtx tmp = force_reg (SImode, operands[2]);
2947 if (rtx_equal_p (operands[0], operands[1]))
2951 operands[2] = operands[1];
2959 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2961 operands[2] = force_reg (SImode,
2962 GEN_INT (~INTVAL (operands[2])));
2964 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2969 for (i = 9; i <= 31; i++)
2971 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2973 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2977 else if ((HOST_WIDE_INT_1 << i) - 1
2978 == ~INTVAL (operands[2]))
2980 rtx shift = GEN_INT (i);
2981 rtx reg = gen_reg_rtx (SImode);
2983 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2984 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2990 operands[2] = force_reg (SImode, operands[2]);
2996 ; ??? Check split length for Thumb-2
2997 (define_insn_and_split "*arm_andsi3_insn"
2998 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2999 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3000 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3005 bic%?\\t%0, %1, #%B2
3009 && CONST_INT_P (operands[2])
3010 && !(const_ok_for_arm (INTVAL (operands[2]))
3011 || const_ok_for_arm (~INTVAL (operands[2])))"
3012 [(clobber (const_int 0))]
3014 arm_split_constant (AND, SImode, curr_insn,
3015 INTVAL (operands[2]), operands[0], operands[1], 0);
3018 [(set_attr "length" "4,4,4,4,16")
3019 (set_attr "predicable" "yes")
3020 (set_attr "predicable_short_it" "no,yes,no,no,no")
3021 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
3024 (define_insn "*andsi3_compare0"
3025 [(set (reg:CC_NZ CC_REGNUM)
3027 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
3028 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
3030 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3031 (and:SI (match_dup 1) (match_dup 2)))]
3035 bics%?\\t%0, %1, #%B2
3036 ands%?\\t%0, %1, %2"
3037 [(set_attr "conds" "set")
3038 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3041 (define_insn "*andsi3_compare0_scratch"
3042 [(set (reg:CC_NZ CC_REGNUM)
3044 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
3045 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
3047 (clobber (match_scratch:SI 2 "=X,r,X"))]
3051 bics%?\\t%2, %0, #%B1
3053 [(set_attr "conds" "set")
3054 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3057 (define_insn "*zeroextractsi_compare0_scratch"
3058 [(set (reg:CC_NZ CC_REGNUM)
3059 (compare:CC_NZ (zero_extract:SI
3060 (match_operand:SI 0 "s_register_operand" "r")
3061 (match_operand 1 "const_int_operand" "n")
3062 (match_operand 2 "const_int_operand" "n"))
3065 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
3066 && INTVAL (operands[1]) > 0
3067 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
3068 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
3070 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
3071 << INTVAL (operands[2]));
3072 output_asm_insn (\"tst%?\\t%0, %1\", operands);
3075 [(set_attr "conds" "set")
3076 (set_attr "predicable" "yes")
3077 (set_attr "type" "logics_imm")]
3080 (define_insn_and_split "*ne_zeroextractsi"
3081 [(set (match_operand:SI 0 "s_register_operand" "=r")
3082 (ne:SI (zero_extract:SI
3083 (match_operand:SI 1 "s_register_operand" "r")
3084 (match_operand:SI 2 "const_int_operand" "n")
3085 (match_operand:SI 3 "const_int_operand" "n"))
3087 (clobber (reg:CC CC_REGNUM))]
3089 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3090 && INTVAL (operands[2]) > 0
3091 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3092 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3095 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3096 && INTVAL (operands[2]) > 0
3097 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3098 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3099 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3100 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3102 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3104 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3105 (match_dup 0) (const_int 1)))]
3107 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3108 << INTVAL (operands[3]));
3110 [(set_attr "conds" "clob")
3111 (set (attr "length")
3112 (if_then_else (eq_attr "is_thumb" "yes")
3115 (set_attr "type" "multiple")]
3118 (define_insn_and_split "*ne_zeroextractsi_shifted"
3119 [(set (match_operand:SI 0 "s_register_operand" "=r")
3120 (ne:SI (zero_extract:SI
3121 (match_operand:SI 1 "s_register_operand" "r")
3122 (match_operand:SI 2 "const_int_operand" "n")
3125 (clobber (reg:CC CC_REGNUM))]
3129 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3130 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3132 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3134 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3135 (match_dup 0) (const_int 1)))]
3137 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3139 [(set_attr "conds" "clob")
3140 (set_attr "length" "8")
3141 (set_attr "type" "multiple")]
3144 (define_insn_and_split "*ite_ne_zeroextractsi"
3145 [(set (match_operand:SI 0 "s_register_operand" "=r")
3146 (if_then_else:SI (ne (zero_extract:SI
3147 (match_operand:SI 1 "s_register_operand" "r")
3148 (match_operand:SI 2 "const_int_operand" "n")
3149 (match_operand:SI 3 "const_int_operand" "n"))
3151 (match_operand:SI 4 "arm_not_operand" "rIK")
3153 (clobber (reg:CC CC_REGNUM))]
3155 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3156 && INTVAL (operands[2]) > 0
3157 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3158 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3159 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3162 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3163 && INTVAL (operands[2]) > 0
3164 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3165 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3166 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3167 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3168 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3170 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3172 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3173 (match_dup 0) (match_dup 4)))]
3175 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3176 << INTVAL (operands[3]));
3178 [(set_attr "conds" "clob")
3179 (set_attr "length" "8")
3180 (set_attr "type" "multiple")]
3183 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
3184 [(set (match_operand:SI 0 "s_register_operand" "=r")
3185 (if_then_else:SI (ne (zero_extract:SI
3186 (match_operand:SI 1 "s_register_operand" "r")
3187 (match_operand:SI 2 "const_int_operand" "n")
3190 (match_operand:SI 3 "arm_not_operand" "rIK")
3192 (clobber (reg:CC CC_REGNUM))]
3193 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3195 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3196 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3197 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3199 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3201 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3202 (match_dup 0) (match_dup 3)))]
3204 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3206 [(set_attr "conds" "clob")
3207 (set_attr "length" "8")
3208 (set_attr "type" "multiple")]
3211 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
3213 [(set (match_operand:SI 0 "s_register_operand" "")
3214 (match_operator:SI 1 "shiftable_operator"
3215 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3216 (match_operand:SI 3 "const_int_operand" "")
3217 (match_operand:SI 4 "const_int_operand" ""))
3218 (match_operand:SI 5 "s_register_operand" "")]))
3219 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3221 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3224 [(lshiftrt:SI (match_dup 6) (match_dup 4))
3227 HOST_WIDE_INT temp = INTVAL (operands[3]);
3229 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3230 operands[4] = GEN_INT (32 - temp);
3235 [(set (match_operand:SI 0 "s_register_operand" "")
3236 (match_operator:SI 1 "shiftable_operator"
3237 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3238 (match_operand:SI 3 "const_int_operand" "")
3239 (match_operand:SI 4 "const_int_operand" ""))
3240 (match_operand:SI 5 "s_register_operand" "")]))
3241 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3243 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3246 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3249 HOST_WIDE_INT temp = INTVAL (operands[3]);
3251 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3252 operands[4] = GEN_INT (32 - temp);
3256 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3257 ;;; represented by the bitfield, then this will produce incorrect results.
3258 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3259 ;;; which have a real bit-field insert instruction, the truncation happens
3260 ;;; in the bit-field insert instruction itself. Since arm does not have a
3261 ;;; bit-field insert instruction, we would have to emit code here to truncate
3262 ;;; the value before we insert. This loses some of the advantage of having
3263 ;;; this insv pattern, so this pattern needs to be reevalutated.
3265 (define_expand "insv"
3266 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3267 (match_operand 1 "general_operand")
3268 (match_operand 2 "general_operand"))
3269 (match_operand 3 "reg_or_int_operand"))]
3270 "TARGET_ARM || arm_arch_thumb2"
3273 int start_bit = INTVAL (operands[2]);
3274 int width = INTVAL (operands[1]);
3275 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3276 rtx target, subtarget;
3278 if (arm_arch_thumb2)
3280 if (unaligned_access && MEM_P (operands[0])
3281 && s_register_operand (operands[3], GET_MODE (operands[3]))
3282 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3286 if (BYTES_BIG_ENDIAN)
3287 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3292 base_addr = adjust_address (operands[0], SImode,
3293 start_bit / BITS_PER_UNIT);
3294 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3298 rtx tmp = gen_reg_rtx (HImode);
3300 base_addr = adjust_address (operands[0], HImode,
3301 start_bit / BITS_PER_UNIT);
3302 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3303 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3307 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3309 bool use_bfi = TRUE;
3311 if (CONST_INT_P (operands[3]))
3313 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3317 emit_insn (gen_insv_zero (operands[0], operands[1],
3322 /* See if the set can be done with a single orr instruction. */
3323 if (val == mask && const_ok_for_arm (val << start_bit))
3329 if (!REG_P (operands[3]))
3330 operands[3] = force_reg (SImode, operands[3]);
3332 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3341 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3344 target = copy_rtx (operands[0]);
3345 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3346 subreg as the final target. */
3347 if (GET_CODE (target) == SUBREG)
3349 subtarget = gen_reg_rtx (SImode);
3350 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3351 < GET_MODE_SIZE (SImode))
3352 target = SUBREG_REG (target);
3357 if (CONST_INT_P (operands[3]))
3359 /* Since we are inserting a known constant, we may be able to
3360 reduce the number of bits that we have to clear so that
3361 the mask becomes simple. */
3362 /* ??? This code does not check to see if the new mask is actually
3363 simpler. It may not be. */
3364 rtx op1 = gen_reg_rtx (SImode);
3365 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3366 start of this pattern. */
3367 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3368 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3370 emit_insn (gen_andsi3 (op1, operands[0],
3371 gen_int_mode (~mask2, SImode)));
3372 emit_insn (gen_iorsi3 (subtarget, op1,
3373 gen_int_mode (op3_value << start_bit, SImode)));
3375 else if (start_bit == 0
3376 && !(const_ok_for_arm (mask)
3377 || const_ok_for_arm (~mask)))
3379 /* A Trick, since we are setting the bottom bits in the word,
3380 we can shift operand[3] up, operand[0] down, OR them together
3381 and rotate the result back again. This takes 3 insns, and
3382 the third might be mergeable into another op. */
3383 /* The shift up copes with the possibility that operand[3] is
3384 wider than the bitfield. */
3385 rtx op0 = gen_reg_rtx (SImode);
3386 rtx op1 = gen_reg_rtx (SImode);
3388 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3389 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3390 emit_insn (gen_iorsi3 (op1, op1, op0));
3391 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3393 else if ((width + start_bit == 32)
3394 && !(const_ok_for_arm (mask)
3395 || const_ok_for_arm (~mask)))
3397 /* Similar trick, but slightly less efficient. */
3399 rtx op0 = gen_reg_rtx (SImode);
3400 rtx op1 = gen_reg_rtx (SImode);
3402 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3403 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3404 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3405 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3409 rtx op0 = gen_int_mode (mask, SImode);
3410 rtx op1 = gen_reg_rtx (SImode);
3411 rtx op2 = gen_reg_rtx (SImode);
3413 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3415 rtx tmp = gen_reg_rtx (SImode);
3417 emit_insn (gen_movsi (tmp, op0));
3421 /* Mask out any bits in operand[3] that are not needed. */
3422 emit_insn (gen_andsi3 (op1, operands[3], op0));
3424 if (CONST_INT_P (op0)
3425 && (const_ok_for_arm (mask << start_bit)
3426 || const_ok_for_arm (~(mask << start_bit))))
3428 op0 = gen_int_mode (~(mask << start_bit), SImode);
3429 emit_insn (gen_andsi3 (op2, operands[0], op0));
3433 if (CONST_INT_P (op0))
3435 rtx tmp = gen_reg_rtx (SImode);
3437 emit_insn (gen_movsi (tmp, op0));
3442 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3444 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3448 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3450 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3453 if (subtarget != target)
3455 /* If TARGET is still a SUBREG, then it must be wider than a word,
3456 so we must be careful only to set the subword we were asked to. */
3457 if (GET_CODE (target) == SUBREG)
3458 emit_move_insn (target, subtarget);
3460 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3467 (define_insn "insv_zero"
3468 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3469 (match_operand:SI 1 "const_int_M_operand" "M")
3470 (match_operand:SI 2 "const_int_M_operand" "M"))
3474 [(set_attr "length" "4")
3475 (set_attr "predicable" "yes")
3476 (set_attr "type" "bfm")]
3479 (define_insn "insv_t2"
3480 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3481 (match_operand:SI 1 "const_int_M_operand" "M")
3482 (match_operand:SI 2 "const_int_M_operand" "M"))
3483 (match_operand:SI 3 "s_register_operand" "r"))]
3485 "bfi%?\t%0, %3, %2, %1"
3486 [(set_attr "length" "4")
3487 (set_attr "predicable" "yes")
3488 (set_attr "type" "bfm")]
3491 (define_insn "andsi_notsi_si"
3492 [(set (match_operand:SI 0 "s_register_operand" "=r")
3493 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3494 (match_operand:SI 1 "s_register_operand" "r")))]
3496 "bic%?\\t%0, %1, %2"
3497 [(set_attr "predicable" "yes")
3498 (set_attr "type" "logic_reg")]
3501 (define_insn "andsi_not_shiftsi_si"
3502 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3503 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3504 [(match_operand:SI 2 "s_register_operand" "r,r")
3505 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3506 (match_operand:SI 1 "s_register_operand" "r,r")))]
3508 "bic%?\\t%0, %1, %2%S4"
3509 [(set_attr "predicable" "yes")
3510 (set_attr "shift" "2")
3511 (set_attr "arch" "32,a")
3512 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3515 ;; Shifted bics pattern used to set up CC status register and not reusing
3516 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3517 ;; does not support shift by register.
3518 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3519 [(set (reg:CC_NZ CC_REGNUM)
3521 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3522 [(match_operand:SI 1 "s_register_operand" "r,r")
3523 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3524 (match_operand:SI 3 "s_register_operand" "r,r"))
3526 (clobber (match_scratch:SI 4 "=r,r"))]
3528 "bics%?\\t%4, %3, %1%S0"
3529 [(set_attr "predicable" "yes")
3530 (set_attr "arch" "32,a")
3531 (set_attr "conds" "set")
3532 (set_attr "shift" "1")
3533 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3536 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3537 ;; getting reused later.
3538 (define_insn "andsi_not_shiftsi_si_scc"
3539 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3541 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3542 [(match_operand:SI 1 "s_register_operand" "r,r")
3543 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3544 (match_operand:SI 3 "s_register_operand" "r,r"))
3546 (set (match_operand:SI 4 "s_register_operand" "=r,r")
3547 (and:SI (not:SI (match_op_dup 0
3552 "bics%?\\t%4, %3, %1%S0"
3553 [(set_attr "predicable" "yes")
3554 (set_attr "arch" "32,a")
3555 (set_attr "conds" "set")
3556 (set_attr "shift" "1")
3557 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3560 (define_insn "*andsi_notsi_si_compare0"
3561 [(set (reg:CC_NZ CC_REGNUM)
3563 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3564 (match_operand:SI 1 "s_register_operand" "r"))
3566 (set (match_operand:SI 0 "s_register_operand" "=r")
3567 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3570 [(set_attr "conds" "set")
3571 (set_attr "type" "logics_shift_reg")]
3574 (define_insn "*andsi_notsi_si_compare0_scratch"
3575 [(set (reg:CC_NZ CC_REGNUM)
3577 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3578 (match_operand:SI 1 "s_register_operand" "r"))
3580 (clobber (match_scratch:SI 0 "=r"))]
3583 [(set_attr "conds" "set")
3584 (set_attr "type" "logics_shift_reg")]
3587 (define_expand "iorsi3"
3588 [(set (match_operand:SI 0 "s_register_operand")
3589 (ior:SI (match_operand:SI 1 "s_register_operand")
3590 (match_operand:SI 2 "reg_or_int_operand")))]
3593 if (CONST_INT_P (operands[2]))
3597 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3598 operands[2] = force_reg (SImode, operands[2]);
3601 arm_split_constant (IOR, SImode, NULL_RTX,
3602 INTVAL (operands[2]), operands[0],
3604 optimize && can_create_pseudo_p ());
3608 else /* TARGET_THUMB1 */
3610 rtx tmp = force_reg (SImode, operands[2]);
3611 if (rtx_equal_p (operands[0], operands[1]))
3615 operands[2] = operands[1];
3623 (define_insn_and_split "*iorsi3_insn"
3624 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3625 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3626 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3631 orn%?\\t%0, %1, #%B2
3635 && CONST_INT_P (operands[2])
3636 && !(const_ok_for_arm (INTVAL (operands[2]))
3637 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3638 [(clobber (const_int 0))]
3640 arm_split_constant (IOR, SImode, curr_insn,
3641 INTVAL (operands[2]), operands[0], operands[1], 0);
3644 [(set_attr "length" "4,4,4,4,16")
3645 (set_attr "arch" "32,t2,t2,32,32")
3646 (set_attr "predicable" "yes")
3647 (set_attr "predicable_short_it" "no,yes,no,no,no")
3648 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3652 [(match_scratch:SI 3 "r")
3653 (set (match_operand:SI 0 "arm_general_register_operand" "")
3654 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3655 (match_operand:SI 2 "const_int_operand" "")))]
3657 && !const_ok_for_arm (INTVAL (operands[2]))
3658 && const_ok_for_arm (~INTVAL (operands[2]))"
3659 [(set (match_dup 3) (match_dup 2))
3660 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3664 (define_insn "*iorsi3_compare0"
3665 [(set (reg:CC_NZ CC_REGNUM)
3667 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3668 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3670 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3671 (ior:SI (match_dup 1) (match_dup 2)))]
3673 "orrs%?\\t%0, %1, %2"
3674 [(set_attr "conds" "set")
3675 (set_attr "arch" "*,t2,*")
3676 (set_attr "length" "4,2,4")
3677 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3680 (define_insn "*iorsi3_compare0_scratch"
3681 [(set (reg:CC_NZ CC_REGNUM)
3683 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3684 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3686 (clobber (match_scratch:SI 0 "=r,l,r"))]
3688 "orrs%?\\t%0, %1, %2"
3689 [(set_attr "conds" "set")
3690 (set_attr "arch" "*,t2,*")
3691 (set_attr "length" "4,2,4")
3692 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3695 (define_expand "xorsi3"
3696 [(set (match_operand:SI 0 "s_register_operand")
3697 (xor:SI (match_operand:SI 1 "s_register_operand")
3698 (match_operand:SI 2 "reg_or_int_operand")))]
3700 "if (CONST_INT_P (operands[2]))
3704 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3705 operands[2] = force_reg (SImode, operands[2]);
3708 arm_split_constant (XOR, SImode, NULL_RTX,
3709 INTVAL (operands[2]), operands[0],
3711 optimize && can_create_pseudo_p ());
3715 else /* TARGET_THUMB1 */
3717 rtx tmp = force_reg (SImode, operands[2]);
3718 if (rtx_equal_p (operands[0], operands[1]))
3722 operands[2] = operands[1];
3729 (define_insn_and_split "*arm_xorsi3"
3730 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3731 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3732 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3740 && CONST_INT_P (operands[2])
3741 && !const_ok_for_arm (INTVAL (operands[2]))"
3742 [(clobber (const_int 0))]
3744 arm_split_constant (XOR, SImode, curr_insn,
3745 INTVAL (operands[2]), operands[0], operands[1], 0);
3748 [(set_attr "length" "4,4,4,16")
3749 (set_attr "predicable" "yes")
3750 (set_attr "predicable_short_it" "no,yes,no,no")
3751 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3754 (define_insn "*xorsi3_compare0"
3755 [(set (reg:CC_NZ CC_REGNUM)
3756 (compare:CC_NZ (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3757 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3759 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3760 (xor:SI (match_dup 1) (match_dup 2)))]
3762 "eors%?\\t%0, %1, %2"
3763 [(set_attr "conds" "set")
3764 (set_attr "type" "logics_imm,logics_reg")]
3767 (define_insn "*xorsi3_compare0_scratch"
3768 [(set (reg:CC_NZ CC_REGNUM)
3769 (compare:CC_NZ (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3770 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3774 [(set_attr "conds" "set")
3775 (set_attr "type" "logics_imm,logics_reg")]
3778 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3779 ; (NOT D) we can sometimes merge the final NOT into one of the following
3783 [(set (match_operand:SI 0 "s_register_operand" "")
3784 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3785 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3786 (match_operand:SI 3 "arm_rhs_operand" "")))
3787 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3789 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3790 (not:SI (match_dup 3))))
3791 (set (match_dup 0) (not:SI (match_dup 4)))]
3795 (define_insn_and_split "*andsi_iorsi3_notsi"
3796 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3797 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3798 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3799 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3801 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3802 "&& reload_completed"
3803 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3804 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3806 /* If operands[3] is a constant make sure to fold the NOT into it
3807 to avoid creating a NOT of a CONST_INT. */
3808 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3809 if (CONST_INT_P (not_rtx))
3811 operands[4] = operands[0];
3812 operands[5] = not_rtx;
3816 operands[5] = operands[0];
3817 operands[4] = not_rtx;
3820 [(set_attr "length" "8")
3821 (set_attr "ce_count" "2")
3822 (set_attr "predicable" "yes")
3823 (set_attr "type" "multiple")]
3826 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3827 ; insns are available?
3829 [(set (match_operand:SI 0 "s_register_operand" "")
3830 (match_operator:SI 1 "logical_binary_operator"
3831 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3832 (match_operand:SI 3 "const_int_operand" "")
3833 (match_operand:SI 4 "const_int_operand" ""))
3834 (match_operator:SI 9 "logical_binary_operator"
3835 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3836 (match_operand:SI 6 "const_int_operand" ""))
3837 (match_operand:SI 7 "s_register_operand" "")])]))
3838 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3840 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3841 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3844 [(ashift:SI (match_dup 2) (match_dup 4))
3848 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3851 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3855 [(set (match_operand:SI 0 "s_register_operand" "")
3856 (match_operator:SI 1 "logical_binary_operator"
3857 [(match_operator:SI 9 "logical_binary_operator"
3858 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3859 (match_operand:SI 6 "const_int_operand" ""))
3860 (match_operand:SI 7 "s_register_operand" "")])
3861 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3862 (match_operand:SI 3 "const_int_operand" "")
3863 (match_operand:SI 4 "const_int_operand" ""))]))
3864 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3866 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3867 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3870 [(ashift:SI (match_dup 2) (match_dup 4))
3874 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3877 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3881 [(set (match_operand:SI 0 "s_register_operand" "")
3882 (match_operator:SI 1 "logical_binary_operator"
3883 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3884 (match_operand:SI 3 "const_int_operand" "")
3885 (match_operand:SI 4 "const_int_operand" ""))
3886 (match_operator:SI 9 "logical_binary_operator"
3887 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3888 (match_operand:SI 6 "const_int_operand" ""))
3889 (match_operand:SI 7 "s_register_operand" "")])]))
3890 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3892 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3893 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3896 [(ashift:SI (match_dup 2) (match_dup 4))
3900 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3903 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3907 [(set (match_operand:SI 0 "s_register_operand" "")
3908 (match_operator:SI 1 "logical_binary_operator"
3909 [(match_operator:SI 9 "logical_binary_operator"
3910 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3911 (match_operand:SI 6 "const_int_operand" ""))
3912 (match_operand:SI 7 "s_register_operand" "")])
3913 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3914 (match_operand:SI 3 "const_int_operand" "")
3915 (match_operand:SI 4 "const_int_operand" ""))]))
3916 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3918 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3919 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3922 [(ashift:SI (match_dup 2) (match_dup 4))
3926 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3929 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3933 ;; Minimum and maximum insns
3935 (define_expand "smaxsi3"
3937 (set (match_operand:SI 0 "s_register_operand")
3938 (smax:SI (match_operand:SI 1 "s_register_operand")
3939 (match_operand:SI 2 "arm_rhs_operand")))
3940 (clobber (reg:CC CC_REGNUM))])]
3943 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3945 /* No need for a clobber of the condition code register here. */
3946 emit_insn (gen_rtx_SET (operands[0],
3947 gen_rtx_SMAX (SImode, operands[1],
3953 (define_insn "*smax_0"
3954 [(set (match_operand:SI 0 "s_register_operand" "=r")
3955 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3958 "bic%?\\t%0, %1, %1, asr #31"
3959 [(set_attr "predicable" "yes")
3960 (set_attr "type" "logic_shift_reg")]
3963 (define_insn "*smax_m1"
3964 [(set (match_operand:SI 0 "s_register_operand" "=r")
3965 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3968 "orr%?\\t%0, %1, %1, asr #31"
3969 [(set_attr "predicable" "yes")
3970 (set_attr "type" "logic_shift_reg")]
3973 (define_insn_and_split "*arm_smax_insn"
3974 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3975 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3976 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3977 (clobber (reg:CC CC_REGNUM))]
3980 ; cmp\\t%1, %2\;movlt\\t%0, %2
3981 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3983 [(set (reg:CC CC_REGNUM)
3984 (compare:CC (match_dup 1) (match_dup 2)))
3986 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3990 [(set_attr "conds" "clob")
3991 (set_attr "length" "8,12")
3992 (set_attr "type" "multiple")]
3995 (define_expand "sminsi3"
3997 (set (match_operand:SI 0 "s_register_operand")
3998 (smin:SI (match_operand:SI 1 "s_register_operand")
3999 (match_operand:SI 2 "arm_rhs_operand")))
4000 (clobber (reg:CC CC_REGNUM))])]
4003 if (operands[2] == const0_rtx)
4005 /* No need for a clobber of the condition code register here. */
4006 emit_insn (gen_rtx_SET (operands[0],
4007 gen_rtx_SMIN (SImode, operands[1],
4013 (define_insn "*smin_0"
4014 [(set (match_operand:SI 0 "s_register_operand" "=r")
4015 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
4018 "and%?\\t%0, %1, %1, asr #31"
4019 [(set_attr "predicable" "yes")
4020 (set_attr "type" "logic_shift_reg")]
4023 (define_insn_and_split "*arm_smin_insn"
4024 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4025 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4026 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4027 (clobber (reg:CC CC_REGNUM))]
4030 ; cmp\\t%1, %2\;movge\\t%0, %2
4031 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
4033 [(set (reg:CC CC_REGNUM)
4034 (compare:CC (match_dup 1) (match_dup 2)))
4036 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
4040 [(set_attr "conds" "clob")
4041 (set_attr "length" "8,12")
4042 (set_attr "type" "multiple,multiple")]
4045 (define_expand "umaxsi3"
4047 (set (match_operand:SI 0 "s_register_operand")
4048 (umax:SI (match_operand:SI 1 "s_register_operand")
4049 (match_operand:SI 2 "arm_rhs_operand")))
4050 (clobber (reg:CC CC_REGNUM))])]
4055 (define_insn_and_split "*arm_umaxsi3"
4056 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4057 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4058 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4059 (clobber (reg:CC CC_REGNUM))]
4062 ; cmp\\t%1, %2\;movcc\\t%0, %2
4063 ; cmp\\t%1, %2\;movcs\\t%0, %1
4064 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
4066 [(set (reg:CC CC_REGNUM)
4067 (compare:CC (match_dup 1) (match_dup 2)))
4069 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
4073 [(set_attr "conds" "clob")
4074 (set_attr "length" "8,8,12")
4075 (set_attr "type" "store_4")]
4078 (define_expand "uminsi3"
4080 (set (match_operand:SI 0 "s_register_operand")
4081 (umin:SI (match_operand:SI 1 "s_register_operand")
4082 (match_operand:SI 2 "arm_rhs_operand")))
4083 (clobber (reg:CC CC_REGNUM))])]
4088 (define_insn_and_split "*arm_uminsi3"
4089 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4090 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4091 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4092 (clobber (reg:CC CC_REGNUM))]
4095 ; cmp\\t%1, %2\;movcs\\t%0, %2
4096 ; cmp\\t%1, %2\;movcc\\t%0, %1
4097 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
4099 [(set (reg:CC CC_REGNUM)
4100 (compare:CC (match_dup 1) (match_dup 2)))
4102 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
4106 [(set_attr "conds" "clob")
4107 (set_attr "length" "8,8,12")
4108 (set_attr "type" "store_4")]
4111 (define_insn "*store_minmaxsi"
4112 [(set (match_operand:SI 0 "memory_operand" "=m")
4113 (match_operator:SI 3 "minmax_operator"
4114 [(match_operand:SI 1 "s_register_operand" "r")
4115 (match_operand:SI 2 "s_register_operand" "r")]))
4116 (clobber (reg:CC CC_REGNUM))]
4117 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
4119 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
4120 operands[1], operands[2]);
4121 output_asm_insn (\"cmp\\t%1, %2\", operands);
4123 output_asm_insn (\"ite\t%d3\", operands);
4124 output_asm_insn (\"str%d3\\t%1, %0\", operands);
4125 output_asm_insn (\"str%D3\\t%2, %0\", operands);
4128 [(set_attr "conds" "clob")
4129 (set (attr "length")
4130 (if_then_else (eq_attr "is_thumb" "yes")
4133 (set_attr "type" "store_4")]
4136 ; Reject the frame pointer in operand[1], since reloading this after
4137 ; it has been eliminated can cause carnage.
4138 (define_insn "*minmax_arithsi"
4139 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4140 (match_operator:SI 4 "shiftable_operator"
4141 [(match_operator:SI 5 "minmax_operator"
4142 [(match_operand:SI 2 "s_register_operand" "r,r")
4143 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
4144 (match_operand:SI 1 "s_register_operand" "0,?r")]))
4145 (clobber (reg:CC CC_REGNUM))]
4146 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
4149 enum rtx_code code = GET_CODE (operands[4]);
4152 if (which_alternative != 0 || operands[3] != const0_rtx
4153 || (code != PLUS && code != IOR && code != XOR))
4158 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4159 operands[2], operands[3]);
4160 output_asm_insn (\"cmp\\t%2, %3\", operands);
4164 output_asm_insn (\"ite\\t%d5\", operands);
4166 output_asm_insn (\"it\\t%d5\", operands);
4168 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4170 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4173 [(set_attr "conds" "clob")
4174 (set (attr "length")
4175 (if_then_else (eq_attr "is_thumb" "yes")
4178 (set_attr "type" "multiple")]
4181 ; Reject the frame pointer in operand[1], since reloading this after
4182 ; it has been eliminated can cause carnage.
4183 (define_insn_and_split "*minmax_arithsi_non_canon"
4184 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4186 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4187 (match_operator:SI 4 "minmax_operator"
4188 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4189 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4190 (clobber (reg:CC CC_REGNUM))]
4191 "TARGET_32BIT && !arm_eliminable_register (operands[1])
4192 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4194 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4195 [(set (reg:CC CC_REGNUM)
4196 (compare:CC (match_dup 2) (match_dup 3)))
4198 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4200 (minus:SI (match_dup 1)
4202 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4206 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4207 operands[2], operands[3]);
4208 enum rtx_code rc = minmax_code (operands[4]);
4209 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4210 operands[2], operands[3]);
4212 if (mode == CCFPmode || mode == CCFPEmode)
4213 rc = reverse_condition_maybe_unordered (rc);
4215 rc = reverse_condition (rc);
4216 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4217 if (CONST_INT_P (operands[3]))
4218 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4220 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4222 [(set_attr "conds" "clob")
4223 (set (attr "length")
4224 (if_then_else (eq_attr "is_thumb" "yes")
4227 (set_attr "type" "multiple")]
4231 (define_expand "arm_<ss_op>"
4232 [(set (match_operand:SI 0 "s_register_operand")
4233 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
4234 (match_operand:SI 2 "s_register_operand")))]
4235 "TARGET_DSP_MULTIPLY"
4238 emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
4239 operands[1], operands[2]));
4241 emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
4246 (define_insn "arm_<ss_op><add_clobber_q_name>_insn"
4247 [(set (match_operand:SI 0 "s_register_operand" "=r")
4248 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
4249 (match_operand:SI 2 "s_register_operand" "r")))]
4250 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
4251 "<ss_op>%?\t%0, %1, %2"
4252 [(set_attr "predicable" "yes")
4253 (set_attr "type" "alu_dsp_reg")]
4256 (define_code_iterator SAT [smin smax])
4257 (define_code_attr SATrev [(smin "smax") (smax "smin")])
4258 (define_code_attr SATlo [(smin "1") (smax "2")])
4259 (define_code_attr SAThi [(smin "2") (smax "1")])
4261 (define_expand "arm_ssat"
4262 [(match_operand:SI 0 "s_register_operand")
4263 (match_operand:SI 1 "s_register_operand")
4264 (match_operand:SI 2 "const_int_operand")]
4265 "TARGET_32BIT && arm_arch6"
4267 HOST_WIDE_INT val = INTVAL (operands[2]);
4268 /* The builtin checking code should have ensured the right
4269 range for the immediate. */
4270 gcc_assert (IN_RANGE (val, 1, 32));
4271 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
4272 HOST_WIDE_INT lower_bound = -upper_bound - 1;
4273 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4274 rtx lo_rtx = gen_int_mode (lower_bound, SImode);
4276 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
4277 up_rtx, operands[1]));
4279 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4284 (define_expand "arm_usat"
4285 [(match_operand:SI 0 "s_register_operand")
4286 (match_operand:SI 1 "s_register_operand")
4287 (match_operand:SI 2 "const_int_operand")]
4288 "TARGET_32BIT && arm_arch6"
4290 HOST_WIDE_INT val = INTVAL (operands[2]);
4291 /* The builtin checking code should have ensured the right
4292 range for the immediate. */
4293 gcc_assert (IN_RANGE (val, 0, 31));
4294 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
4295 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4296 rtx lo_rtx = CONST0_RTX (SImode);
4298 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
4301 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4306 (define_insn "arm_get_apsr"
4307 [(set (match_operand:SI 0 "s_register_operand" "=r")
4308 (unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
4311 [(set_attr "predicable" "yes")
4312 (set_attr "conds" "use")]
4315 (define_insn "arm_set_apsr"
4316 [(set (reg:CC APSRQ_REGNUM)
4318 [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
4320 "msr%?\tAPSR_nzcvq, %0"
4321 [(set_attr "predicable" "yes")
4322 (set_attr "conds" "set")]
4325 ;; Read the APSR and extract the Q bit (bit 27)
4326 (define_expand "arm_saturation_occurred"
4327 [(match_operand:SI 0 "s_register_operand")]
4330 rtx apsr = gen_reg_rtx (SImode);
4331 emit_insn (gen_arm_get_apsr (apsr));
4332 emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
4333 gen_int_mode (27, SImode)));
4338 ;; Read the APSR and set the Q bit (bit position 27) according to operand 0
4339 (define_expand "arm_set_saturation"
4340 [(match_operand:SI 0 "reg_or_int_operand")]
4343 rtx apsr = gen_reg_rtx (SImode);
4344 emit_insn (gen_arm_get_apsr (apsr));
4345 rtx to_insert = gen_reg_rtx (SImode);
4346 if (CONST_INT_P (operands[0]))
4347 emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
4348 ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
4351 rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
4352 emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
4353 CONST0_RTX (SImode)));
4355 emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
4356 gen_int_mode (27, SImode), to_insert));
4357 emit_insn (gen_arm_set_apsr (apsr));
4362 (define_insn "satsi_<SAT:code><add_clobber_q_name>"
4363 [(set (match_operand:SI 0 "s_register_operand" "=r")
4364 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4365 (match_operand:SI 1 "const_int_operand" "i"))
4366 (match_operand:SI 2 "const_int_operand" "i")))]
4367 "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
4368 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4372 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4373 &mask, &signed_sat))
4376 operands[1] = GEN_INT (mask);
4378 return "ssat%?\t%0, %1, %3";
4380 return "usat%?\t%0, %1, %3";
4382 [(set_attr "predicable" "yes")
4383 (set_attr "type" "alus_imm")]
4386 (define_insn "*satsi_<SAT:code>_shift"
4387 [(set (match_operand:SI 0 "s_register_operand" "=r")
4388 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4389 [(match_operand:SI 4 "s_register_operand" "r")
4390 (match_operand:SI 5 "const_int_operand" "i")])
4391 (match_operand:SI 1 "const_int_operand" "i"))
4392 (match_operand:SI 2 "const_int_operand" "i")))]
4393 "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
4394 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4398 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4399 &mask, &signed_sat))
4402 operands[1] = GEN_INT (mask);
4404 return "ssat%?\t%0, %1, %4%S3";
4406 return "usat%?\t%0, %1, %4%S3";
4408 [(set_attr "predicable" "yes")
4409 (set_attr "shift" "3")
4410 (set_attr "type" "logic_shift_reg")])
4412 ;; Custom Datapath Extension insns.
4413 (define_insn "arm_cx1<mode>"
4414 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4415 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4416 (match_operand:SI 2 "const_int_ccde1_operand" "i")]
4419 "cx1<cde_suffix>\\tp%c1, <cde_dest>, %2"
4420 [(set_attr "type" "coproc")]
4423 (define_insn "arm_cx1a<mode>"
4424 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4425 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4426 (match_operand:SIDI 2 "s_register_operand" "0")
4427 (match_operand:SI 3 "const_int_ccde1_operand" "i")]
4430 "cx1<cde_suffix>a\\tp%c1, <cde_dest>, %3"
4431 [(set_attr "type" "coproc")]
4434 (define_insn "arm_cx2<mode>"
4435 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4436 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4437 (match_operand:SI 2 "s_register_operand" "r")
4438 (match_operand:SI 3 "const_int_ccde2_operand" "i")]
4441 "cx2<cde_suffix>\\tp%c1, <cde_dest>, %2, %3"
4442 [(set_attr "type" "coproc")]
4445 (define_insn "arm_cx2a<mode>"
4446 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4447 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4448 (match_operand:SIDI 2 "s_register_operand" "0")
4449 (match_operand:SI 3 "s_register_operand" "r")
4450 (match_operand:SI 4 "const_int_ccde2_operand" "i")]
4453 "cx2<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4"
4454 [(set_attr "type" "coproc")]
4457 (define_insn "arm_cx3<mode>"
4458 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4459 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4460 (match_operand:SI 2 "s_register_operand" "r")
4461 (match_operand:SI 3 "s_register_operand" "r")
4462 (match_operand:SI 4 "const_int_ccde3_operand" "i")]
4465 "cx3<cde_suffix>\\tp%c1, <cde_dest>, %2, %3, %4"
4466 [(set_attr "type" "coproc")]
4469 (define_insn "arm_cx3a<mode>"
4470 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4471 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4472 (match_operand:SIDI 2 "s_register_operand" "0")
4473 (match_operand:SI 3 "s_register_operand" "r")
4474 (match_operand:SI 4 "s_register_operand" "r")
4475 (match_operand:SI 5 "const_int_ccde3_operand" "i")]
4478 "cx3<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4, %5"
4479 [(set_attr "type" "coproc")]
4482 ;; Shift and rotation insns
4484 (define_expand "ashldi3"
4485 [(set (match_operand:DI 0 "s_register_operand")
4486 (ashift:DI (match_operand:DI 1 "s_register_operand")
4487 (match_operand:SI 2 "reg_or_int_operand")))]
4490 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN)
4492 if (!reg_or_int_operand (operands[2], SImode))
4493 operands[2] = force_reg (SImode, operands[2]);
4495 /* Armv8.1-M Mainline double shifts are not expanded. */
4496 if (arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2]))
4497 && (REG_P (operands[2]) || INTVAL(operands[2]) != 32))
4499 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4500 emit_insn (gen_movdi (operands[0], operands[1]));
4502 emit_insn (gen_thumb2_lsll (operands[0], operands[2]));
4507 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4508 operands[2], gen_reg_rtx (SImode),
4509 gen_reg_rtx (SImode));
4513 (define_expand "ashlsi3"
4514 [(set (match_operand:SI 0 "s_register_operand")
4515 (ashift:SI (match_operand:SI 1 "s_register_operand")
4516 (match_operand:SI 2 "arm_rhs_operand")))]
4519 if (CONST_INT_P (operands[2])
4520 && (UINTVAL (operands[2])) > 31)
4522 emit_insn (gen_movsi (operands[0], const0_rtx));
4528 (define_expand "ashrdi3"
4529 [(set (match_operand:DI 0 "s_register_operand")
4530 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4531 (match_operand:SI 2 "reg_or_int_operand")))]
4534 /* Armv8.1-M Mainline double shifts are not expanded. */
4535 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4536 && arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2])))
4538 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4539 emit_insn (gen_movdi (operands[0], operands[1]));
4541 emit_insn (gen_thumb2_asrl (operands[0], operands[2]));
4545 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4546 operands[2], gen_reg_rtx (SImode),
4547 gen_reg_rtx (SImode));
4551 (define_expand "ashrsi3"
4552 [(set (match_operand:SI 0 "s_register_operand")
4553 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4554 (match_operand:SI 2 "arm_rhs_operand")))]
4557 if (CONST_INT_P (operands[2])
4558 && UINTVAL (operands[2]) > 31)
4559 operands[2] = GEN_INT (31);
4563 (define_expand "lshrdi3"
4564 [(set (match_operand:DI 0 "s_register_operand")
4565 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4566 (match_operand:SI 2 "reg_or_int_operand")))]
4569 /* Armv8.1-M Mainline double shifts are not expanded. */
4570 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4571 && long_shift_imm (operands[2], GET_MODE (operands[2])))
4573 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4574 emit_insn (gen_movdi (operands[0], operands[1]));
4576 emit_insn (gen_thumb2_lsrl (operands[0], operands[2]));
4580 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4581 operands[2], gen_reg_rtx (SImode),
4582 gen_reg_rtx (SImode));
4586 (define_expand "lshrsi3"
4587 [(set (match_operand:SI 0 "s_register_operand")
4588 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4589 (match_operand:SI 2 "arm_rhs_operand")))]
4592 if (CONST_INT_P (operands[2])
4593 && (UINTVAL (operands[2])) > 31)
4595 emit_insn (gen_movsi (operands[0], const0_rtx));
4601 (define_expand "rotlsi3"
4602 [(set (match_operand:SI 0 "s_register_operand")
4603 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4604 (match_operand:SI 2 "reg_or_int_operand")))]
4607 if (CONST_INT_P (operands[2]))
4608 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4611 rtx reg = gen_reg_rtx (SImode);
4612 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4618 (define_expand "rotrsi3"
4619 [(set (match_operand:SI 0 "s_register_operand")
4620 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4621 (match_operand:SI 2 "arm_rhs_operand")))]
4626 if (CONST_INT_P (operands[2])
4627 && UINTVAL (operands[2]) > 31)
4628 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4630 else /* TARGET_THUMB1 */
4632 if (CONST_INT_P (operands [2]))
4633 operands [2] = force_reg (SImode, operands[2]);
4638 (define_insn "*arm_shiftsi3"
4639 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4640 (match_operator:SI 3 "shift_operator"
4641 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4642 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4644 "* return arm_output_shift(operands, 0);"
4645 [(set_attr "predicable" "yes")
4646 (set_attr "arch" "t2,t2,*,*")
4647 (set_attr "predicable_short_it" "yes,yes,no,no")
4648 (set_attr "length" "4")
4649 (set_attr "shift" "1")
4650 (set_attr "autodetect_type" "alu_shift_operator3")]
4653 (define_insn "*shiftsi3_compare0"
4654 [(set (reg:CC_NZ CC_REGNUM)
4655 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4656 [(match_operand:SI 1 "s_register_operand" "r,r")
4657 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4659 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4660 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4662 "* return arm_output_shift(operands, 1);"
4663 [(set_attr "conds" "set")
4664 (set_attr "shift" "1")
4665 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4668 (define_insn "*shiftsi3_compare0_scratch"
4669 [(set (reg:CC_NZ CC_REGNUM)
4670 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4671 [(match_operand:SI 1 "s_register_operand" "r,r")
4672 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4674 (clobber (match_scratch:SI 0 "=r,r"))]
4676 "* return arm_output_shift(operands, 1);"
4677 [(set_attr "conds" "set")
4678 (set_attr "shift" "1")
4679 (set_attr "type" "shift_imm,shift_reg")]
4682 (define_insn "*not_shiftsi"
4683 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4684 (not:SI (match_operator:SI 3 "shift_operator"
4685 [(match_operand:SI 1 "s_register_operand" "r,r")
4686 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4689 [(set_attr "predicable" "yes")
4690 (set_attr "shift" "1")
4691 (set_attr "arch" "32,a")
4692 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4694 (define_insn "*not_shiftsi_compare0"
4695 [(set (reg:CC_NZ CC_REGNUM)
4697 (not:SI (match_operator:SI 3 "shift_operator"
4698 [(match_operand:SI 1 "s_register_operand" "r,r")
4699 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4701 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4702 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4704 "mvns%?\\t%0, %1%S3"
4705 [(set_attr "conds" "set")
4706 (set_attr "shift" "1")
4707 (set_attr "arch" "32,a")
4708 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4710 (define_insn "*not_shiftsi_compare0_scratch"
4711 [(set (reg:CC_NZ CC_REGNUM)
4713 (not:SI (match_operator:SI 3 "shift_operator"
4714 [(match_operand:SI 1 "s_register_operand" "r,r")
4715 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4717 (clobber (match_scratch:SI 0 "=r,r"))]
4719 "mvns%?\\t%0, %1%S3"
4720 [(set_attr "conds" "set")
4721 (set_attr "shift" "1")
4722 (set_attr "arch" "32,a")
4723 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4725 ;; We don't really have extzv, but defining this using shifts helps
4726 ;; to reduce register pressure later on.
4728 (define_expand "extzv"
4729 [(set (match_operand 0 "s_register_operand")
4730 (zero_extract (match_operand 1 "nonimmediate_operand")
4731 (match_operand 2 "const_int_operand")
4732 (match_operand 3 "const_int_operand")))]
4733 "TARGET_THUMB1 || arm_arch_thumb2"
4736 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4737 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4739 if (arm_arch_thumb2)
4741 HOST_WIDE_INT width = INTVAL (operands[2]);
4742 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4744 if (unaligned_access && MEM_P (operands[1])
4745 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4749 if (BYTES_BIG_ENDIAN)
4750 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4755 base_addr = adjust_address (operands[1], SImode,
4756 bitpos / BITS_PER_UNIT);
4757 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4761 rtx dest = operands[0];
4762 rtx tmp = gen_reg_rtx (SImode);
4764 /* We may get a paradoxical subreg here. Strip it off. */
4765 if (GET_CODE (dest) == SUBREG
4766 && GET_MODE (dest) == SImode
4767 && GET_MODE (SUBREG_REG (dest)) == HImode)
4768 dest = SUBREG_REG (dest);
4770 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4773 base_addr = adjust_address (operands[1], HImode,
4774 bitpos / BITS_PER_UNIT);
4775 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4776 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4780 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4782 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4790 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4793 operands[3] = GEN_INT (rshift);
4797 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4801 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4802 operands[3], gen_reg_rtx (SImode)));
4807 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4809 (define_expand "extzv_t1"
4810 [(set (match_operand:SI 4 "s_register_operand")
4811 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4812 (match_operand:SI 2 "const_int_operand")))
4813 (set (match_operand:SI 0 "s_register_operand")
4814 (lshiftrt:SI (match_dup 4)
4815 (match_operand:SI 3 "const_int_operand")))]
4819 (define_expand "extv"
4820 [(set (match_operand 0 "s_register_operand")
4821 (sign_extract (match_operand 1 "nonimmediate_operand")
4822 (match_operand 2 "const_int_operand")
4823 (match_operand 3 "const_int_operand")))]
4826 HOST_WIDE_INT width = INTVAL (operands[2]);
4827 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4829 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4830 && (bitpos % BITS_PER_UNIT) == 0)
4834 if (BYTES_BIG_ENDIAN)
4835 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4839 base_addr = adjust_address (operands[1], SImode,
4840 bitpos / BITS_PER_UNIT);
4841 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4845 rtx dest = operands[0];
4846 rtx tmp = gen_reg_rtx (SImode);
4848 /* We may get a paradoxical subreg here. Strip it off. */
4849 if (GET_CODE (dest) == SUBREG
4850 && GET_MODE (dest) == SImode
4851 && GET_MODE (SUBREG_REG (dest)) == HImode)
4852 dest = SUBREG_REG (dest);
4854 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4857 base_addr = adjust_address (operands[1], HImode,
4858 bitpos / BITS_PER_UNIT);
4859 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4860 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4865 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4867 else if (GET_MODE (operands[0]) == SImode
4868 && GET_MODE (operands[1]) == SImode)
4870 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4878 ; Helper to expand register forms of extv with the proper modes.
4880 (define_expand "extv_regsi"
4881 [(set (match_operand:SI 0 "s_register_operand")
4882 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4883 (match_operand 2 "const_int_operand")
4884 (match_operand 3 "const_int_operand")))]
4889 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4891 (define_insn "unaligned_loaddi"
4892 [(set (match_operand:DI 0 "s_register_operand" "=r")
4893 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4894 UNSPEC_UNALIGNED_LOAD))]
4895 "TARGET_32BIT && TARGET_LDRD"
4897 return output_move_double (operands, true, NULL);
4899 [(set_attr "length" "8")
4900 (set_attr "type" "load_8")])
4902 (define_insn "unaligned_loadsi"
4903 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4904 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4905 UNSPEC_UNALIGNED_LOAD))]
4908 ldr\t%0, %1\t@ unaligned
4909 ldr%?\t%0, %1\t@ unaligned
4910 ldr%?\t%0, %1\t@ unaligned"
4911 [(set_attr "arch" "t1,t2,32")
4912 (set_attr "length" "2,2,4")
4913 (set_attr "predicable" "no,yes,yes")
4914 (set_attr "predicable_short_it" "no,yes,no")
4915 (set_attr "type" "load_4")])
4917 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4918 ;; address (there's no immediate format). That's tricky to support
4919 ;; here and we don't really need this pattern for that case, so only
4920 ;; enable for 32-bit ISAs.
4921 (define_insn "unaligned_loadhis"
4922 [(set (match_operand:SI 0 "s_register_operand" "=r")
4924 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4925 UNSPEC_UNALIGNED_LOAD)))]
4926 "unaligned_access && TARGET_32BIT"
4927 "ldrsh%?\t%0, %1\t@ unaligned"
4928 [(set_attr "predicable" "yes")
4929 (set_attr "type" "load_byte")])
4931 (define_insn "unaligned_loadhiu"
4932 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4934 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4935 UNSPEC_UNALIGNED_LOAD)))]
4938 ldrh\t%0, %1\t@ unaligned
4939 ldrh%?\t%0, %1\t@ unaligned
4940 ldrh%?\t%0, %1\t@ unaligned"
4941 [(set_attr "arch" "t1,t2,32")
4942 (set_attr "length" "2,2,4")
4943 (set_attr "predicable" "no,yes,yes")
4944 (set_attr "predicable_short_it" "no,yes,no")
4945 (set_attr "type" "load_byte")])
4947 (define_insn "unaligned_storedi"
4948 [(set (match_operand:DI 0 "memory_operand" "=m")
4949 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4950 UNSPEC_UNALIGNED_STORE))]
4951 "TARGET_32BIT && TARGET_LDRD"
4953 return output_move_double (operands, true, NULL);
4955 [(set_attr "length" "8")
4956 (set_attr "type" "store_8")])
4958 (define_insn "unaligned_storesi"
4959 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4960 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4961 UNSPEC_UNALIGNED_STORE))]
4964 str\t%1, %0\t@ unaligned
4965 str%?\t%1, %0\t@ unaligned
4966 str%?\t%1, %0\t@ unaligned"
4967 [(set_attr "arch" "t1,t2,32")
4968 (set_attr "length" "2,2,4")
4969 (set_attr "predicable" "no,yes,yes")
4970 (set_attr "predicable_short_it" "no,yes,no")
4971 (set_attr "type" "store_4")])
4973 (define_insn "unaligned_storehi"
4974 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4975 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4976 UNSPEC_UNALIGNED_STORE))]
4979 strh\t%1, %0\t@ unaligned
4980 strh%?\t%1, %0\t@ unaligned
4981 strh%?\t%1, %0\t@ unaligned"
4982 [(set_attr "arch" "t1,t2,32")
4983 (set_attr "length" "2,2,4")
4984 (set_attr "predicable" "no,yes,yes")
4985 (set_attr "predicable_short_it" "no,yes,no")
4986 (set_attr "type" "store_4")])
4989 (define_insn "*extv_reg"
4990 [(set (match_operand:SI 0 "s_register_operand" "=r")
4991 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4992 (match_operand:SI 2 "const_int_operand" "n")
4993 (match_operand:SI 3 "const_int_operand" "n")))]
4995 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4996 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4997 "sbfx%?\t%0, %1, %3, %2"
4998 [(set_attr "length" "4")
4999 (set_attr "predicable" "yes")
5000 (set_attr "type" "bfm")]
5003 (define_insn "extzv_t2"
5004 [(set (match_operand:SI 0 "s_register_operand" "=r")
5005 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5006 (match_operand:SI 2 "const_int_operand" "n")
5007 (match_operand:SI 3 "const_int_operand" "n")))]
5009 && IN_RANGE (INTVAL (operands[3]), 0, 31)
5010 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5011 "ubfx%?\t%0, %1, %3, %2"
5012 [(set_attr "length" "4")
5013 (set_attr "predicable" "yes")
5014 (set_attr "type" "bfm")]
5018 ;; Division instructions
5019 (define_insn "divsi3"
5020 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5021 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
5022 (match_operand:SI 2 "s_register_operand" "r,r")))]
5027 [(set_attr "arch" "32,v8mb")
5028 (set_attr "predicable" "yes")
5029 (set_attr "type" "sdiv")]
5032 (define_insn "udivsi3"
5033 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5034 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
5035 (match_operand:SI 2 "s_register_operand" "r,r")))]
5040 [(set_attr "arch" "32,v8mb")
5041 (set_attr "predicable" "yes")
5042 (set_attr "type" "udiv")]
5046 ;; Unary arithmetic insns
5048 (define_expand "negv<SIDI:mode>3"
5049 [(match_operand:SIDI 0 "s_register_operand")
5050 (match_operand:SIDI 1 "s_register_operand")
5051 (match_operand 2 "")]
5054 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
5059 (define_expand "negsi2"
5060 [(set (match_operand:SI 0 "s_register_operand")
5061 (neg:SI (match_operand:SI 1 "s_register_operand")))]
5066 (define_insn "*arm_negsi2"
5067 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5068 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5070 "rsb%?\\t%0, %1, #0"
5071 [(set_attr "predicable" "yes")
5072 (set_attr "predicable_short_it" "yes,no")
5073 (set_attr "arch" "t2,*")
5074 (set_attr "length" "4")
5075 (set_attr "type" "alu_imm")]
5078 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
5079 ;; rather than (0 cmp reg). This gives the same results for unsigned
5080 ;; and equality compares which is what we mostly need here.
5081 (define_insn "negsi2_0compare"
5082 [(set (reg:CC_RSB CC_REGNUM)
5083 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
5085 (set (match_operand:SI 0 "s_register_operand" "=l,r")
5086 (neg:SI (match_dup 1)))]
5091 [(set_attr "conds" "set")
5092 (set_attr "arch" "t2,*")
5093 (set_attr "length" "2,*")
5094 (set_attr "type" "alus_imm")]
5097 (define_insn "negsi2_carryin"
5098 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5099 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
5100 (match_operand:SI 2 "arm_borrow_operation" "")))]
5104 sbc\\t%0, %1, %1, lsl #1"
5105 [(set_attr "conds" "use")
5106 (set_attr "arch" "a,t2")
5107 (set_attr "type" "adc_imm,adc_reg")]
5110 (define_expand "negsf2"
5111 [(set (match_operand:SF 0 "s_register_operand")
5112 (neg:SF (match_operand:SF 1 "s_register_operand")))]
5113 "TARGET_32BIT && TARGET_HARD_FLOAT"
5117 (define_expand "negdf2"
5118 [(set (match_operand:DF 0 "s_register_operand")
5119 (neg:DF (match_operand:DF 1 "s_register_operand")))]
5120 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5123 ;; abssi2 doesn't really clobber the condition codes if a different register
5124 ;; is being set. To keep things simple, assume during rtl manipulations that
5125 ;; it does, but tell the final scan operator the truth. Similarly for
5128 (define_expand "abssi2"
5130 [(set (match_operand:SI 0 "s_register_operand")
5131 (abs:SI (match_operand:SI 1 "s_register_operand")))
5132 (clobber (match_dup 2))])]
5136 operands[2] = gen_rtx_SCRATCH (SImode);
5138 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5141 (define_insn_and_split "*arm_abssi2"
5142 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5143 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5144 (clobber (reg:CC CC_REGNUM))]
5147 "&& reload_completed"
5150 /* if (which_alternative == 0) */
5151 if (REGNO(operands[0]) == REGNO(operands[1]))
5153 /* Emit the pattern:
5154 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5155 [(set (reg:CC CC_REGNUM)
5156 (compare:CC (match_dup 0) (const_int 0)))
5157 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5158 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5160 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5161 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5162 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5163 (gen_rtx_LT (SImode,
5164 gen_rtx_REG (CCmode, CC_REGNUM),
5166 (gen_rtx_SET (operands[0],
5167 (gen_rtx_MINUS (SImode,
5174 /* Emit the pattern:
5175 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5177 (xor:SI (match_dup 1)
5178 (ashiftrt:SI (match_dup 1) (const_int 31))))
5180 (minus:SI (match_dup 0)
5181 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5183 emit_insn (gen_rtx_SET (operands[0],
5184 gen_rtx_XOR (SImode,
5185 gen_rtx_ASHIFTRT (SImode,
5189 emit_insn (gen_rtx_SET (operands[0],
5190 gen_rtx_MINUS (SImode,
5192 gen_rtx_ASHIFTRT (SImode,
5198 [(set_attr "conds" "clob,*")
5199 (set_attr "shift" "1")
5200 (set_attr "predicable" "no, yes")
5201 (set_attr "length" "8")
5202 (set_attr "type" "multiple")]
5205 (define_insn_and_split "*arm_neg_abssi2"
5206 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5207 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5208 (clobber (reg:CC CC_REGNUM))]
5211 "&& reload_completed"
5214 /* if (which_alternative == 0) */
5215 if (REGNO (operands[0]) == REGNO (operands[1]))
5217 /* Emit the pattern:
5218 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5220 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5221 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5222 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5224 gen_rtx_REG (CCmode, CC_REGNUM),
5226 gen_rtx_SET (operands[0],
5227 (gen_rtx_MINUS (SImode,
5233 /* Emit the pattern:
5234 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5236 emit_insn (gen_rtx_SET (operands[0],
5237 gen_rtx_XOR (SImode,
5238 gen_rtx_ASHIFTRT (SImode,
5242 emit_insn (gen_rtx_SET (operands[0],
5243 gen_rtx_MINUS (SImode,
5244 gen_rtx_ASHIFTRT (SImode,
5251 [(set_attr "conds" "clob,*")
5252 (set_attr "shift" "1")
5253 (set_attr "predicable" "no, yes")
5254 (set_attr "length" "8")
5255 (set_attr "type" "multiple")]
5258 (define_expand "abssf2"
5259 [(set (match_operand:SF 0 "s_register_operand")
5260 (abs:SF (match_operand:SF 1 "s_register_operand")))]
5261 "TARGET_32BIT && TARGET_HARD_FLOAT"
5264 (define_expand "absdf2"
5265 [(set (match_operand:DF 0 "s_register_operand")
5266 (abs:DF (match_operand:DF 1 "s_register_operand")))]
5267 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5270 (define_expand "sqrtsf2"
5271 [(set (match_operand:SF 0 "s_register_operand")
5272 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
5273 "TARGET_32BIT && TARGET_HARD_FLOAT"
5276 (define_expand "sqrtdf2"
5277 [(set (match_operand:DF 0 "s_register_operand")
5278 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
5279 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5282 (define_expand "one_cmplsi2"
5283 [(set (match_operand:SI 0 "s_register_operand")
5284 (not:SI (match_operand:SI 1 "s_register_operand")))]
5289 (define_insn "*arm_one_cmplsi2"
5290 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5291 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5294 [(set_attr "predicable" "yes")
5295 (set_attr "predicable_short_it" "yes,no")
5296 (set_attr "arch" "t2,*")
5297 (set_attr "length" "4")
5298 (set_attr "type" "mvn_reg")]
5301 (define_insn "*notsi_compare0"
5302 [(set (reg:CC_NZ CC_REGNUM)
5303 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5305 (set (match_operand:SI 0 "s_register_operand" "=r")
5306 (not:SI (match_dup 1)))]
5309 [(set_attr "conds" "set")
5310 (set_attr "type" "mvn_reg")]
5313 (define_insn "*notsi_compare0_scratch"
5314 [(set (reg:CC_NZ CC_REGNUM)
5315 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5317 (clobber (match_scratch:SI 0 "=r"))]
5320 [(set_attr "conds" "set")
5321 (set_attr "type" "mvn_reg")]
5324 ;; Fixed <--> Floating conversion insns
5326 (define_expand "floatsihf2"
5327 [(set (match_operand:HF 0 "general_operand")
5328 (float:HF (match_operand:SI 1 "general_operand")))]
5332 rtx op1 = gen_reg_rtx (SFmode);
5333 expand_float (op1, operands[1], 0);
5334 op1 = convert_to_mode (HFmode, op1, 0);
5335 emit_move_insn (operands[0], op1);
5340 (define_expand "floatdihf2"
5341 [(set (match_operand:HF 0 "general_operand")
5342 (float:HF (match_operand:DI 1 "general_operand")))]
5346 rtx op1 = gen_reg_rtx (SFmode);
5347 expand_float (op1, operands[1], 0);
5348 op1 = convert_to_mode (HFmode, op1, 0);
5349 emit_move_insn (operands[0], op1);
5354 (define_expand "floatsisf2"
5355 [(set (match_operand:SF 0 "s_register_operand")
5356 (float:SF (match_operand:SI 1 "s_register_operand")))]
5357 "TARGET_32BIT && TARGET_HARD_FLOAT"
5361 (define_expand "floatsidf2"
5362 [(set (match_operand:DF 0 "s_register_operand")
5363 (float:DF (match_operand:SI 1 "s_register_operand")))]
5364 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5368 (define_expand "fix_trunchfsi2"
5369 [(set (match_operand:SI 0 "general_operand")
5370 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
5374 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5375 expand_fix (operands[0], op1, 0);
5380 (define_expand "fix_trunchfdi2"
5381 [(set (match_operand:DI 0 "general_operand")
5382 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
5386 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5387 expand_fix (operands[0], op1, 0);
5392 (define_expand "fix_truncsfsi2"
5393 [(set (match_operand:SI 0 "s_register_operand")
5394 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
5395 "TARGET_32BIT && TARGET_HARD_FLOAT"
5399 (define_expand "fix_truncdfsi2"
5400 [(set (match_operand:SI 0 "s_register_operand")
5401 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
5402 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5408 (define_expand "truncdfsf2"
5409 [(set (match_operand:SF 0 "s_register_operand")
5411 (match_operand:DF 1 "s_register_operand")))]
5412 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5416 ;; DFmode to HFmode conversions on targets without a single-step hardware
5417 ;; instruction for it would have to go through SFmode. This is dangerous
5418 ;; as it introduces double rounding.
5420 ;; Disable this pattern unless we are in an unsafe math mode, or we have
5421 ;; a single-step instruction.
5423 (define_expand "truncdfhf2"
5424 [(set (match_operand:HF 0 "s_register_operand")
5426 (match_operand:DF 1 "s_register_operand")))]
5427 "(TARGET_EITHER && flag_unsafe_math_optimizations)
5428 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
5430 /* We don't have a direct instruction for this, so we must be in
5431 an unsafe math mode, and going via SFmode. */
5433 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5436 op1 = convert_to_mode (SFmode, operands[1], 0);
5437 op1 = convert_to_mode (HFmode, op1, 0);
5438 emit_move_insn (operands[0], op1);
5441 /* Otherwise, we will pick this up as a single instruction with
5442 no intermediary rounding. */
5446 ;; Zero and sign extension instructions.
5448 (define_expand "zero_extend<mode>di2"
5449 [(set (match_operand:DI 0 "s_register_operand" "")
5450 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5451 "TARGET_32BIT <qhs_zextenddi_cond>"
5453 rtx res_lo, res_hi, op0_lo, op0_hi;
5454 res_lo = gen_lowpart (SImode, operands[0]);
5455 res_hi = gen_highpart (SImode, operands[0]);
5456 if (can_create_pseudo_p ())
5458 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5459 op0_hi = gen_reg_rtx (SImode);
5463 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5466 if (<MODE>mode != SImode)
5467 emit_insn (gen_rtx_SET (op0_lo,
5468 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5469 emit_insn (gen_movsi (op0_hi, const0_rtx));
5470 if (res_lo != op0_lo)
5471 emit_move_insn (res_lo, op0_lo);
5472 if (res_hi != op0_hi)
5473 emit_move_insn (res_hi, op0_hi);
5478 (define_expand "extend<mode>di2"
5479 [(set (match_operand:DI 0 "s_register_operand" "")
5480 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5481 "TARGET_32BIT <qhs_sextenddi_cond>"
5483 rtx res_lo, res_hi, op0_lo, op0_hi;
5484 res_lo = gen_lowpart (SImode, operands[0]);
5485 res_hi = gen_highpart (SImode, operands[0]);
5486 if (can_create_pseudo_p ())
5488 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5489 op0_hi = gen_reg_rtx (SImode);
5493 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5496 if (<MODE>mode != SImode)
5497 emit_insn (gen_rtx_SET (op0_lo,
5498 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5499 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5500 if (res_lo != op0_lo)
5501 emit_move_insn (res_lo, op0_lo);
5502 if (res_hi != op0_hi)
5503 emit_move_insn (res_hi, op0_hi);
5508 ;; Splits for all extensions to DImode
5510 [(set (match_operand:DI 0 "s_register_operand" "")
5511 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5513 [(set (match_dup 0) (match_dup 1))]
5515 rtx lo_part = gen_lowpart (SImode, operands[0]);
5516 machine_mode src_mode = GET_MODE (operands[1]);
5518 if (src_mode == SImode)
5519 emit_move_insn (lo_part, operands[1]);
5521 emit_insn (gen_rtx_SET (lo_part,
5522 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5523 operands[0] = gen_highpart (SImode, operands[0]);
5524 operands[1] = const0_rtx;
5528 [(set (match_operand:DI 0 "s_register_operand" "")
5529 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5531 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5533 rtx lo_part = gen_lowpart (SImode, operands[0]);
5534 machine_mode src_mode = GET_MODE (operands[1]);
5536 if (src_mode == SImode)
5537 emit_move_insn (lo_part, operands[1]);
5539 emit_insn (gen_rtx_SET (lo_part,
5540 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5541 operands[1] = lo_part;
5542 operands[0] = gen_highpart (SImode, operands[0]);
5545 (define_expand "zero_extendhisi2"
5546 [(set (match_operand:SI 0 "s_register_operand")
5547 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5550 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5552 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5555 if (!arm_arch6 && !MEM_P (operands[1]))
5557 rtx t = gen_lowpart (SImode, operands[1]);
5558 rtx tmp = gen_reg_rtx (SImode);
5559 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5560 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5566 [(set (match_operand:SI 0 "s_register_operand" "")
5567 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5568 "!TARGET_THUMB2 && !arm_arch6"
5569 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5570 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5572 operands[2] = gen_lowpart (SImode, operands[1]);
5575 (define_insn "*arm_zero_extendhisi2"
5576 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5577 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5578 "TARGET_ARM && arm_arch4 && !arm_arch6"
5582 [(set_attr "type" "alu_shift_reg,load_byte")
5583 (set_attr "predicable" "yes")]
5586 (define_insn "*arm_zero_extendhisi2_v6"
5587 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5588 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5589 "TARGET_ARM && arm_arch6"
5593 [(set_attr "predicable" "yes")
5594 (set_attr "type" "extend,load_byte")]
5597 (define_insn "*arm_zero_extendhisi2addsi"
5598 [(set (match_operand:SI 0 "s_register_operand" "=r")
5599 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5600 (match_operand:SI 2 "s_register_operand" "r")))]
5602 "uxtah%?\\t%0, %2, %1"
5603 [(set_attr "type" "alu_shift_reg")
5604 (set_attr "predicable" "yes")]
5607 (define_expand "zero_extendqisi2"
5608 [(set (match_operand:SI 0 "s_register_operand")
5609 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5612 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5614 emit_insn (gen_andsi3 (operands[0],
5615 gen_lowpart (SImode, operands[1]),
5619 if (!arm_arch6 && !MEM_P (operands[1]))
5621 rtx t = gen_lowpart (SImode, operands[1]);
5622 rtx tmp = gen_reg_rtx (SImode);
5623 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5624 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5630 [(set (match_operand:SI 0 "s_register_operand" "")
5631 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5633 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5634 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5636 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5639 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5644 (define_insn "*arm_zero_extendqisi2"
5645 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5646 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5647 "TARGET_ARM && !arm_arch6"
5650 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5651 [(set_attr "length" "8,4")
5652 (set_attr "type" "alu_shift_reg,load_byte")
5653 (set_attr "predicable" "yes")]
5656 (define_insn "*arm_zero_extendqisi2_v6"
5657 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5658 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5659 "TARGET_ARM && arm_arch6"
5662 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5663 [(set_attr "type" "extend,load_byte")
5664 (set_attr "predicable" "yes")]
5667 (define_insn "*arm_zero_extendqisi2addsi"
5668 [(set (match_operand:SI 0 "s_register_operand" "=r")
5669 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5670 (match_operand:SI 2 "s_register_operand" "r")))]
5672 "uxtab%?\\t%0, %2, %1"
5673 [(set_attr "predicable" "yes")
5674 (set_attr "type" "alu_shift_reg")]
5678 [(set (match_operand:SI 0 "s_register_operand" "")
5679 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5680 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5681 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5682 [(set (match_dup 2) (match_dup 1))
5683 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5688 [(set (match_operand:SI 0 "s_register_operand" "")
5689 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5690 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5691 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5692 [(set (match_dup 2) (match_dup 1))
5693 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5699 [(set (match_operand:SI 0 "s_register_operand" "")
5700 (IOR_XOR:SI (and:SI (ashift:SI
5701 (match_operand:SI 1 "s_register_operand" "")
5702 (match_operand:SI 2 "const_int_operand" ""))
5703 (match_operand:SI 3 "const_int_operand" ""))
5705 (match_operator 5 "subreg_lowpart_operator"
5706 [(match_operand:SI 4 "s_register_operand" "")]))))]
5708 && (UINTVAL (operands[3])
5709 == (GET_MODE_MASK (GET_MODE (operands[5]))
5710 & (GET_MODE_MASK (GET_MODE (operands[5]))
5711 << (INTVAL (operands[2])))))"
5712 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5714 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5715 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5718 (define_insn "*compareqi_eq0"
5719 [(set (reg:CC_Z CC_REGNUM)
5720 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5724 [(set_attr "conds" "set")
5725 (set_attr "predicable" "yes")
5726 (set_attr "type" "logic_imm")]
5729 (define_expand "extendhisi2"
5730 [(set (match_operand:SI 0 "s_register_operand")
5731 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5736 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5739 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5741 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5745 if (!arm_arch6 && !MEM_P (operands[1]))
5747 rtx t = gen_lowpart (SImode, operands[1]);
5748 rtx tmp = gen_reg_rtx (SImode);
5749 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5750 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5757 [(set (match_operand:SI 0 "register_operand" "")
5758 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5759 (clobber (match_scratch:SI 2 ""))])]
5761 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5762 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5764 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5767 ;; This pattern will only be used when ldsh is not available
5768 (define_expand "extendhisi2_mem"
5769 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5771 (zero_extend:SI (match_dup 7)))
5772 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5773 (set (match_operand:SI 0 "" "")
5774 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5779 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5781 mem1 = change_address (operands[1], QImode, addr);
5782 mem2 = change_address (operands[1], QImode,
5783 plus_constant (Pmode, addr, 1));
5784 operands[0] = gen_lowpart (SImode, operands[0]);
5786 operands[2] = gen_reg_rtx (SImode);
5787 operands[3] = gen_reg_rtx (SImode);
5788 operands[6] = gen_reg_rtx (SImode);
5791 if (BYTES_BIG_ENDIAN)
5793 operands[4] = operands[2];
5794 operands[5] = operands[3];
5798 operands[4] = operands[3];
5799 operands[5] = operands[2];
5805 [(set (match_operand:SI 0 "register_operand" "")
5806 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5808 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5809 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5811 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5814 (define_insn "*arm_extendhisi2"
5815 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5816 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5817 "TARGET_ARM && arm_arch4 && !arm_arch6"
5821 [(set_attr "length" "8,4")
5822 (set_attr "type" "alu_shift_reg,load_byte")
5823 (set_attr "predicable" "yes")]
5826 ;; ??? Check Thumb-2 pool range
5827 (define_insn "*arm_extendhisi2_v6"
5828 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5829 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5830 "TARGET_32BIT && arm_arch6"
5834 [(set_attr "type" "extend,load_byte")
5835 (set_attr "predicable" "yes")]
5838 (define_insn "*arm_extendhisi2addsi"
5839 [(set (match_operand:SI 0 "s_register_operand" "=r")
5840 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5841 (match_operand:SI 2 "s_register_operand" "r")))]
5843 "sxtah%?\\t%0, %2, %1"
5844 [(set_attr "type" "alu_shift_reg")]
5847 (define_expand "extendqihi2"
5849 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5851 (set (match_operand:HI 0 "s_register_operand")
5852 (ashiftrt:SI (match_dup 2)
5857 if (arm_arch4 && MEM_P (operands[1]))
5859 emit_insn (gen_rtx_SET (operands[0],
5860 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5863 if (!s_register_operand (operands[1], QImode))
5864 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5865 operands[0] = gen_lowpart (SImode, operands[0]);
5866 operands[1] = gen_lowpart (SImode, operands[1]);
5867 operands[2] = gen_reg_rtx (SImode);
5871 (define_insn "*arm_extendqihi_insn"
5872 [(set (match_operand:HI 0 "s_register_operand" "=r")
5873 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5874 "TARGET_ARM && arm_arch4"
5876 [(set_attr "type" "load_byte")
5877 (set_attr "predicable" "yes")]
5880 (define_expand "extendqisi2"
5881 [(set (match_operand:SI 0 "s_register_operand")
5882 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5885 if (!arm_arch4 && MEM_P (operands[1]))
5886 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5888 if (!arm_arch6 && !MEM_P (operands[1]))
5890 rtx t = gen_lowpart (SImode, operands[1]);
5891 rtx tmp = gen_reg_rtx (SImode);
5892 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5893 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5899 [(set (match_operand:SI 0 "register_operand" "")
5900 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5902 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5903 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5905 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5908 (define_insn "*arm_extendqisi"
5909 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5910 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5911 "TARGET_ARM && arm_arch4 && !arm_arch6"
5915 [(set_attr "length" "8,4")
5916 (set_attr "type" "alu_shift_reg,load_byte")
5917 (set_attr "predicable" "yes")]
5920 (define_insn "*arm_extendqisi_v6"
5921 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5923 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5924 "TARGET_ARM && arm_arch6"
5928 [(set_attr "type" "extend,load_byte")
5929 (set_attr "predicable" "yes")]
5932 (define_insn "*arm_extendqisi2addsi"
5933 [(set (match_operand:SI 0 "s_register_operand" "=r")
5934 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5935 (match_operand:SI 2 "s_register_operand" "r")))]
5937 "sxtab%?\\t%0, %2, %1"
5938 [(set_attr "type" "alu_shift_reg")
5939 (set_attr "predicable" "yes")]
5942 (define_insn "arm_<sup>xtb16"
5943 [(set (match_operand:SI 0 "s_register_operand" "=r")
5945 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5947 "<sup>xtb16%?\\t%0, %1"
5948 [(set_attr "predicable" "yes")
5949 (set_attr "type" "alu_dsp_reg")])
5951 (define_insn "arm_<simd32_op>"
5952 [(set (match_operand:SI 0 "s_register_operand" "=r")
5954 [(match_operand:SI 1 "s_register_operand" "r")
5955 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5957 "<simd32_op>%?\\t%0, %1, %2"
5958 [(set_attr "predicable" "yes")
5959 (set_attr "type" "alu_dsp_reg")])
5961 (define_insn "arm_usada8"
5962 [(set (match_operand:SI 0 "s_register_operand" "=r")
5964 [(match_operand:SI 1 "s_register_operand" "r")
5965 (match_operand:SI 2 "s_register_operand" "r")
5966 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5968 "usada8%?\\t%0, %1, %2, %3"
5969 [(set_attr "predicable" "yes")
5970 (set_attr "type" "alu_dsp_reg")])
5972 (define_insn "arm_<simd32_op>"
5973 [(set (match_operand:DI 0 "s_register_operand" "=r")
5975 [(match_operand:SI 1 "s_register_operand" "r")
5976 (match_operand:SI 2 "s_register_operand" "r")
5977 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5979 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5980 [(set_attr "predicable" "yes")
5981 (set_attr "type" "smlald")])
5983 (define_insn "arm_<simd32_op>"
5984 [(set (match_operand:SI 0 "s_register_operand" "=r")
5986 [(match_operand:SI 1 "s_register_operand" "r")
5987 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
5988 (set (reg:CC APSRGE_REGNUM)
5989 (unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
5991 "<simd32_op>%?\\t%0, %1, %2"
5992 [(set_attr "predicable" "yes")
5993 (set_attr "type" "alu_sreg")])
5995 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
5996 [(set (match_operand:SI 0 "s_register_operand" "=r")
5998 [(match_operand:SI 1 "s_register_operand" "r")
5999 (match_operand:SI 2 "s_register_operand" "r")
6000 (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
6001 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6002 "<simd32_op>%?\\t%0, %1, %2, %3"
6003 [(set_attr "predicable" "yes")
6004 (set_attr "type" "alu_sreg")])
6006 (define_expand "arm_<simd32_op>"
6007 [(set (match_operand:SI 0 "s_register_operand")
6009 [(match_operand:SI 1 "s_register_operand")
6010 (match_operand:SI 2 "s_register_operand")
6011 (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
6015 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6016 operands[2], operands[3]));
6018 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6019 operands[2], operands[3]));
6024 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6025 [(set (match_operand:SI 0 "s_register_operand" "=r")
6027 [(match_operand:SI 1 "s_register_operand" "r")
6028 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
6029 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6030 "<simd32_op>%?\\t%0, %1, %2"
6031 [(set_attr "predicable" "yes")
6032 (set_attr "type" "alu_sreg")])
6034 (define_expand "arm_<simd32_op>"
6035 [(set (match_operand:SI 0 "s_register_operand")
6037 [(match_operand:SI 1 "s_register_operand")
6038 (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
6042 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6045 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6051 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6052 [(set (match_operand:SI 0 "s_register_operand" "=r")
6054 [(match_operand:SI 1 "s_register_operand" "r")
6055 (match_operand:SI 2 "<sup>sat16_imm" "i")] USSAT16))]
6056 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6057 "<simd32_op>%?\\t%0, %2, %1"
6058 [(set_attr "predicable" "yes")
6059 (set_attr "type" "alu_sreg")])
6061 (define_expand "arm_<simd32_op>"
6062 [(set (match_operand:SI 0 "s_register_operand")
6064 [(match_operand:SI 1 "s_register_operand")
6065 (match_operand:SI 2 "<sup>sat16_imm")] USSAT16))]
6069 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6072 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6078 (define_insn "arm_sel"
6079 [(set (match_operand:SI 0 "s_register_operand" "=r")
6081 [(match_operand:SI 1 "s_register_operand" "r")
6082 (match_operand:SI 2 "s_register_operand" "r")
6083 (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
6085 "sel%?\\t%0, %1, %2"
6086 [(set_attr "predicable" "yes")
6087 (set_attr "type" "alu_sreg")])
6089 (define_expand "extendsfdf2"
6090 [(set (match_operand:DF 0 "s_register_operand")
6091 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
6092 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6096 ;; HFmode -> DFmode conversions where we don't have an instruction for it
6097 ;; must go through SFmode.
6099 ;; This is always safe for an extend.
6101 (define_expand "extendhfdf2"
6102 [(set (match_operand:DF 0 "s_register_operand")
6103 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
6106 /* We don't have a direct instruction for this, so go via SFmode. */
6107 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
6110 op1 = convert_to_mode (SFmode, operands[1], 0);
6111 op1 = convert_to_mode (DFmode, op1, 0);
6112 emit_insn (gen_movdf (operands[0], op1));
6115 /* Otherwise, we're done producing RTL and will pick up the correct
6116 pattern to do this with one rounding-step in a single instruction. */
6120 ;; Move insns (including loads and stores)
6122 ;; XXX Just some ideas about movti.
6123 ;; I don't think these are a good idea on the arm, there just aren't enough
6125 ;;(define_expand "loadti"
6126 ;; [(set (match_operand:TI 0 "s_register_operand")
6127 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
6130 ;;(define_expand "storeti"
6131 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
6132 ;; (match_operand:TI 1 "s_register_operand"))]
6135 ;;(define_expand "movti"
6136 ;; [(set (match_operand:TI 0 "general_operand")
6137 ;; (match_operand:TI 1 "general_operand"))]
6143 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
6144 ;; operands[1] = copy_to_reg (operands[1]);
6145 ;; if (MEM_P (operands[0]))
6146 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6147 ;; else if (MEM_P (operands[1]))
6148 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6152 ;; emit_insn (insn);
6156 ;; Recognize garbage generated above.
6159 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6160 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6164 ;; register mem = (which_alternative < 3);
6165 ;; register const char *template;
6167 ;; operands[mem] = XEXP (operands[mem], 0);
6168 ;; switch (which_alternative)
6170 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6171 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6172 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6173 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6174 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6175 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6177 ;; output_asm_insn (template, operands);
6181 (define_expand "movdi"
6182 [(set (match_operand:DI 0 "general_operand")
6183 (match_operand:DI 1 "general_operand"))]
6186 gcc_checking_assert (aligned_operand (operands[0], DImode));
6187 gcc_checking_assert (aligned_operand (operands[1], DImode));
6188 if (can_create_pseudo_p ())
6190 if (!REG_P (operands[0]))
6191 operands[1] = force_reg (DImode, operands[1]);
6193 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
6194 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
6196 /* Avoid LDRD's into an odd-numbered register pair in ARM state
6197 when expanding function calls. */
6198 gcc_assert (can_create_pseudo_p ());
6199 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
6201 /* Perform load into legal reg pair first, then move. */
6202 rtx reg = gen_reg_rtx (DImode);
6203 emit_insn (gen_movdi (reg, operands[1]));
6206 emit_move_insn (gen_lowpart (SImode, operands[0]),
6207 gen_lowpart (SImode, operands[1]));
6208 emit_move_insn (gen_highpart (SImode, operands[0]),
6209 gen_highpart (SImode, operands[1]));
6212 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
6213 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
6215 /* Avoid STRD's from an odd-numbered register pair in ARM state
6216 when expanding function prologue. */
6217 gcc_assert (can_create_pseudo_p ());
6218 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
6219 ? gen_reg_rtx (DImode)
6221 emit_move_insn (gen_lowpart (SImode, split_dest),
6222 gen_lowpart (SImode, operands[1]));
6223 emit_move_insn (gen_highpart (SImode, split_dest),
6224 gen_highpart (SImode, operands[1]));
6225 if (split_dest != operands[0])
6226 emit_insn (gen_movdi (operands[0], split_dest));
6232 (define_insn "*arm_movdi"
6233 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
6234 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
6236 && !(TARGET_HARD_FLOAT)
6237 && !(TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT)
6239 && ( register_operand (operands[0], DImode)
6240 || register_operand (operands[1], DImode))"
6242 switch (which_alternative)
6249 /* Cannot load it directly, split to load it via MOV / MOVT. */
6250 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6254 return output_move_double (operands, true, NULL);
6257 [(set_attr "length" "8,12,16,8,8")
6258 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6259 (set_attr "arm_pool_range" "*,*,*,1020,*")
6260 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6261 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6262 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6266 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6267 (match_operand:ANY64 1 "immediate_operand" ""))]
6270 && (arm_disable_literal_pool
6271 || (arm_const_double_inline_cost (operands[1])
6272 <= arm_max_const_double_inline_cost ()))"
6275 arm_split_constant (SET, SImode, curr_insn,
6276 INTVAL (gen_lowpart (SImode, operands[1])),
6277 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6278 arm_split_constant (SET, SImode, curr_insn,
6279 INTVAL (gen_highpart_mode (SImode,
6280 GET_MODE (operands[0]),
6282 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6287 ; If optimizing for size, or if we have load delay slots, then
6288 ; we want to split the constant into two separate operations.
6289 ; In both cases this may split a trivial part into a single data op
6290 ; leaving a single complex constant to load. We can also get longer
6291 ; offsets in a LDR which means we get better chances of sharing the pool
6292 ; entries. Finally, we can normally do a better job of scheduling
6293 ; LDR instructions than we can with LDM.
6294 ; This pattern will only match if the one above did not.
6296 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6297 (match_operand:ANY64 1 "const_double_operand" ""))]
6298 "TARGET_ARM && reload_completed
6299 && arm_const_double_by_parts (operands[1])"
6300 [(set (match_dup 0) (match_dup 1))
6301 (set (match_dup 2) (match_dup 3))]
6303 operands[2] = gen_highpart (SImode, operands[0]);
6304 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6306 operands[0] = gen_lowpart (SImode, operands[0]);
6307 operands[1] = gen_lowpart (SImode, operands[1]);
6312 [(set (match_operand:ANY64_BF 0 "arm_general_register_operand" "")
6313 (match_operand:ANY64_BF 1 "arm_general_register_operand" ""))]
6314 "TARGET_EITHER && reload_completed"
6315 [(set (match_dup 0) (match_dup 1))
6316 (set (match_dup 2) (match_dup 3))]
6318 operands[2] = gen_highpart (SImode, operands[0]);
6319 operands[3] = gen_highpart (SImode, operands[1]);
6320 operands[0] = gen_lowpart (SImode, operands[0]);
6321 operands[1] = gen_lowpart (SImode, operands[1]);
6323 /* Handle a partial overlap. */
6324 if (rtx_equal_p (operands[0], operands[3]))
6326 rtx tmp0 = operands[0];
6327 rtx tmp1 = operands[1];
6329 operands[0] = operands[2];
6330 operands[1] = operands[3];
6337 ;; We can't actually do base+index doubleword loads if the index and
6338 ;; destination overlap. Split here so that we at least have chance to
6341 [(set (match_operand:DI 0 "s_register_operand" "")
6342 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6343 (match_operand:SI 2 "s_register_operand" ""))))]
6345 && reg_overlap_mentioned_p (operands[0], operands[1])
6346 && reg_overlap_mentioned_p (operands[0], operands[2])"
6348 (plus:SI (match_dup 1)
6351 (mem:DI (match_dup 4)))]
6353 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6357 (define_expand "movsi"
6358 [(set (match_operand:SI 0 "general_operand")
6359 (match_operand:SI 1 "general_operand"))]
6363 rtx base, offset, tmp;
6365 gcc_checking_assert (aligned_operand (operands[0], SImode));
6366 gcc_checking_assert (aligned_operand (operands[1], SImode));
6367 if (TARGET_32BIT || TARGET_HAVE_MOVT)
6369 /* Everything except mem = const or mem = mem can be done easily. */
6370 if (MEM_P (operands[0]))
6371 operands[1] = force_reg (SImode, operands[1]);
6372 if (arm_general_register_operand (operands[0], SImode)
6373 && CONST_INT_P (operands[1])
6374 && !(const_ok_for_arm (INTVAL (operands[1]))
6375 || const_ok_for_arm (~INTVAL (operands[1]))))
6377 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
6379 emit_insn (gen_rtx_SET (operands[0], operands[1]));
6384 arm_split_constant (SET, SImode, NULL_RTX,
6385 INTVAL (operands[1]), operands[0], NULL_RTX,
6386 optimize && can_create_pseudo_p ());
6391 else /* Target doesn't have MOVT... */
6393 if (can_create_pseudo_p ())
6395 if (!REG_P (operands[0]))
6396 operands[1] = force_reg (SImode, operands[1]);
6400 split_const (operands[1], &base, &offset);
6401 if (INTVAL (offset) != 0
6402 && targetm.cannot_force_const_mem (SImode, operands[1]))
6404 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6405 emit_move_insn (tmp, base);
6406 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6410 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
6412 /* Recognize the case where operand[1] is a reference to thread-local
6413 data and load its address to a register. Offsets have been split off
6415 if (arm_tls_referenced_p (operands[1]))
6416 operands[1] = legitimize_tls_address (operands[1], tmp);
6418 && (CONSTANT_P (operands[1])
6419 || symbol_mentioned_p (operands[1])
6420 || label_mentioned_p (operands[1])))
6422 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
6427 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6428 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6429 ;; so this does not matter.
6430 (define_insn "*arm_movt"
6431 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
6432 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
6433 (match_operand:SI 2 "general_operand" "i,i")))]
6434 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
6436 movt%?\t%0, #:upper16:%c2
6437 movt\t%0, #:upper16:%c2"
6438 [(set_attr "arch" "32,v8mb")
6439 (set_attr "predicable" "yes")
6440 (set_attr "length" "4")
6441 (set_attr "type" "alu_sreg")]
6444 (define_insn "*arm_movsi_insn"
6445 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6446 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6447 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
6448 && ( register_operand (operands[0], SImode)
6449 || register_operand (operands[1], SImode))"
6457 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
6458 (set_attr "predicable" "yes")
6459 (set_attr "arch" "*,*,*,v6t2,*,*")
6460 (set_attr "pool_range" "*,*,*,*,4096,*")
6461 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6465 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6466 (match_operand:SI 1 "const_int_operand" ""))]
6467 "(TARGET_32BIT || TARGET_HAVE_MOVT)
6468 && (!(const_ok_for_arm (INTVAL (operands[1]))
6469 || const_ok_for_arm (~INTVAL (operands[1]))))"
6470 [(clobber (const_int 0))]
6472 arm_split_constant (SET, SImode, NULL_RTX,
6473 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6478 ;; A normal way to do (symbol + offset) requires three instructions at least
6479 ;; (depends on how big the offset is) as below:
6480 ;; movw r0, #:lower16:g
6481 ;; movw r0, #:upper16:g
6484 ;; A better way would be:
6485 ;; movw r0, #:lower16:g+4
6486 ;; movw r0, #:upper16:g+4
6488 ;; The limitation of this way is that the length of offset should be a 16-bit
6489 ;; signed value, because current assembler only supports REL type relocation for
6490 ;; such case. If the more powerful RELA type is supported in future, we should
6491 ;; update this pattern to go with better way.
6493 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6494 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
6495 (match_operand:SI 2 "const_int_operand" ""))))]
6498 && arm_disable_literal_pool
6500 && GET_CODE (operands[1]) == SYMBOL_REF"
6501 [(clobber (const_int 0))]
6503 int offset = INTVAL (operands[2]);
6505 if (offset < -0x8000 || offset > 0x7fff)
6507 arm_emit_movpair (operands[0], operands[1]);
6508 emit_insn (gen_rtx_SET (operands[0],
6509 gen_rtx_PLUS (SImode, operands[0], operands[2])));
6513 rtx op = gen_rtx_CONST (SImode,
6514 gen_rtx_PLUS (SImode, operands[1], operands[2]));
6515 arm_emit_movpair (operands[0], op);
6520 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6521 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6522 ;; and lo_sum would be merged back into memory load at cprop. However,
6523 ;; if the default is to prefer movt/movw rather than a load from the constant
6524 ;; pool, the performance is better.
6526 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6527 (match_operand:SI 1 "general_operand" ""))]
6528 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6529 && !target_word_relocations
6530 && !arm_tls_referenced_p (operands[1])"
6531 [(clobber (const_int 0))]
6533 arm_emit_movpair (operands[0], operands[1]);
6537 ;; When generating pic, we need to load the symbol offset into a register.
6538 ;; So that the optimizer does not confuse this with a normal symbol load
6539 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6540 ;; since that is the only type of relocation we can use.
6542 ;; Wrap calculation of the whole PIC address in a single pattern for the
6543 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6544 ;; a PIC address involves two loads from memory, so we want to CSE it
6545 ;; as often as possible.
6546 ;; This pattern will be split into one of the pic_load_addr_* patterns
6547 ;; and a move after GCSE optimizations.
6549 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6550 (define_expand "calculate_pic_address"
6551 [(set (match_operand:SI 0 "register_operand")
6552 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6553 (unspec:SI [(match_operand:SI 2 "" "")]
6558 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6560 [(set (match_operand:SI 0 "register_operand" "")
6561 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6562 (unspec:SI [(match_operand:SI 2 "" "")]
6565 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6566 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6567 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6570 ;; operand1 is the memory address to go into
6571 ;; pic_load_addr_32bit.
6572 ;; operand2 is the PIC label to be emitted
6573 ;; from pic_add_dot_plus_eight.
6574 ;; We do this to allow hoisting of the entire insn.
6575 (define_insn_and_split "pic_load_addr_unified"
6576 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6577 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6578 (match_operand:SI 2 "" "")]
6579 UNSPEC_PIC_UNIFIED))]
6582 "&& reload_completed"
6583 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6584 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6585 (match_dup 2)] UNSPEC_PIC_BASE))]
6586 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6587 [(set_attr "type" "load_4,load_4,load_4")
6588 (set_attr "pool_range" "4096,4094,1022")
6589 (set_attr "neg_pool_range" "4084,0,0")
6590 (set_attr "arch" "a,t2,t1")
6591 (set_attr "length" "8,6,4")]
6594 ;; The rather odd constraints on the following are to force reload to leave
6595 ;; the insn alone, and to force the minipool generation pass to then move
6596 ;; the GOT symbol to memory.
6598 (define_insn "pic_load_addr_32bit"
6599 [(set (match_operand:SI 0 "s_register_operand" "=r")
6600 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6601 "TARGET_32BIT && flag_pic"
6603 [(set_attr "type" "load_4")
6604 (set (attr "pool_range")
6605 (if_then_else (eq_attr "is_thumb" "no")
6608 (set (attr "neg_pool_range")
6609 (if_then_else (eq_attr "is_thumb" "no")
6614 (define_insn "pic_load_addr_thumb1"
6615 [(set (match_operand:SI 0 "s_register_operand" "=l")
6616 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6617 "TARGET_THUMB1 && flag_pic"
6619 [(set_attr "type" "load_4")
6620 (set (attr "pool_range") (const_int 1018))]
6623 (define_insn "pic_add_dot_plus_four"
6624 [(set (match_operand:SI 0 "register_operand" "=r")
6625 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6627 (match_operand 2 "" "")]
6631 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6632 INTVAL (operands[2]));
6633 return \"add\\t%0, %|pc\";
6635 [(set_attr "length" "2")
6636 (set_attr "type" "alu_sreg")]
6639 (define_insn "pic_add_dot_plus_eight"
6640 [(set (match_operand:SI 0 "register_operand" "=r")
6641 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6643 (match_operand 2 "" "")]
6647 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6648 INTVAL (operands[2]));
6649 return \"add%?\\t%0, %|pc, %1\";
6651 [(set_attr "predicable" "yes")
6652 (set_attr "type" "alu_sreg")]
6655 (define_insn "tls_load_dot_plus_eight"
6656 [(set (match_operand:SI 0 "register_operand" "=r")
6657 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6659 (match_operand 2 "" "")]
6663 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6664 INTVAL (operands[2]));
6665 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6667 [(set_attr "predicable" "yes")
6668 (set_attr "type" "load_4")]
6671 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6672 ;; followed by a load. These sequences can be crunched down to
6673 ;; tls_load_dot_plus_eight by a peephole.
6676 [(set (match_operand:SI 0 "register_operand" "")
6677 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6679 (match_operand 1 "" "")]
6681 (set (match_operand:SI 2 "arm_general_register_operand" "")
6682 (mem:SI (match_dup 0)))]
6683 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6685 (mem:SI (unspec:SI [(match_dup 3)
6692 (define_insn "pic_offset_arm"
6693 [(set (match_operand:SI 0 "register_operand" "=r")
6694 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6695 (unspec:SI [(match_operand:SI 2 "" "X")]
6696 UNSPEC_PIC_OFFSET))))]
6697 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6698 "ldr%?\\t%0, [%1,%2]"
6699 [(set_attr "type" "load_4")]
6702 (define_expand "builtin_setjmp_receiver"
6703 [(label_ref (match_operand 0 "" ""))]
6707 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6709 if (arm_pic_register != INVALID_REGNUM)
6710 arm_load_pic_register (1UL << 3, NULL_RTX);
6714 ;; If copying one reg to another we can set the condition codes according to
6715 ;; its value. Such a move is common after a return from subroutine and the
6716 ;; result is being tested against zero.
6718 (define_insn "*movsi_compare0"
6719 [(set (reg:CC CC_REGNUM)
6720 (compare:CC (match_operand:SI 1 "s_register_operand" "0,0,l,rk,rk")
6722 (set (match_operand:SI 0 "s_register_operand" "=l,rk,l,r,rk")
6730 subs%?\\t%0, %1, #0"
6731 [(set_attr "conds" "set")
6732 (set_attr "arch" "t2,*,t2,t2,a")
6733 (set_attr "type" "alus_imm")
6734 (set_attr "length" "2,4,2,4,4")]
6737 ;; Subroutine to store a half word from a register into memory.
6738 ;; Operand 0 is the source register (HImode)
6739 ;; Operand 1 is the destination address in a register (SImode)
6741 ;; In both this routine and the next, we must be careful not to spill
6742 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6743 ;; can generate unrecognizable rtl.
6745 (define_expand "storehi"
6746 [;; store the low byte
6747 (set (match_operand 1 "" "") (match_dup 3))
6748 ;; extract the high byte
6750 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6751 ;; store the high byte
6752 (set (match_dup 4) (match_dup 5))]
6756 rtx op1 = operands[1];
6757 rtx addr = XEXP (op1, 0);
6758 enum rtx_code code = GET_CODE (addr);
6760 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6762 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6764 operands[4] = adjust_address (op1, QImode, 1);
6765 operands[1] = adjust_address (operands[1], QImode, 0);
6766 operands[3] = gen_lowpart (QImode, operands[0]);
6767 operands[0] = gen_lowpart (SImode, operands[0]);
6768 operands[2] = gen_reg_rtx (SImode);
6769 operands[5] = gen_lowpart (QImode, operands[2]);
6773 (define_expand "storehi_bigend"
6774 [(set (match_dup 4) (match_dup 3))
6776 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6777 (set (match_operand 1 "" "") (match_dup 5))]
6781 rtx op1 = operands[1];
6782 rtx addr = XEXP (op1, 0);
6783 enum rtx_code code = GET_CODE (addr);
6785 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6787 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6789 operands[4] = adjust_address (op1, QImode, 1);
6790 operands[1] = adjust_address (operands[1], QImode, 0);
6791 operands[3] = gen_lowpart (QImode, operands[0]);
6792 operands[0] = gen_lowpart (SImode, operands[0]);
6793 operands[2] = gen_reg_rtx (SImode);
6794 operands[5] = gen_lowpart (QImode, operands[2]);
6798 ;; Subroutine to store a half word integer constant into memory.
6799 (define_expand "storeinthi"
6800 [(set (match_operand 0 "" "")
6801 (match_operand 1 "" ""))
6802 (set (match_dup 3) (match_dup 2))]
6806 HOST_WIDE_INT value = INTVAL (operands[1]);
6807 rtx addr = XEXP (operands[0], 0);
6808 rtx op0 = operands[0];
6809 enum rtx_code code = GET_CODE (addr);
6811 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6813 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6815 operands[1] = gen_reg_rtx (SImode);
6816 if (BYTES_BIG_ENDIAN)
6818 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6819 if ((value & 255) == ((value >> 8) & 255))
6820 operands[2] = operands[1];
6823 operands[2] = gen_reg_rtx (SImode);
6824 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6829 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6830 if ((value & 255) == ((value >> 8) & 255))
6831 operands[2] = operands[1];
6834 operands[2] = gen_reg_rtx (SImode);
6835 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6839 operands[3] = adjust_address (op0, QImode, 1);
6840 operands[0] = adjust_address (operands[0], QImode, 0);
6841 operands[2] = gen_lowpart (QImode, operands[2]);
6842 operands[1] = gen_lowpart (QImode, operands[1]);
6846 (define_expand "storehi_single_op"
6847 [(set (match_operand:HI 0 "memory_operand")
6848 (match_operand:HI 1 "general_operand"))]
6849 "TARGET_32BIT && arm_arch4"
6851 if (!s_register_operand (operands[1], HImode))
6852 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6856 (define_expand "movhi"
6857 [(set (match_operand:HI 0 "general_operand")
6858 (match_operand:HI 1 "general_operand"))]
6861 gcc_checking_assert (aligned_operand (operands[0], HImode));
6862 gcc_checking_assert (aligned_operand (operands[1], HImode));
6865 if (can_create_pseudo_p ())
6867 if (MEM_P (operands[0]))
6871 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6874 if (CONST_INT_P (operands[1]))
6875 emit_insn (gen_storeinthi (operands[0], operands[1]));
6878 if (MEM_P (operands[1]))
6879 operands[1] = force_reg (HImode, operands[1]);
6880 if (BYTES_BIG_ENDIAN)
6881 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6883 emit_insn (gen_storehi (operands[1], operands[0]));
6887 /* Sign extend a constant, and keep it in an SImode reg. */
6888 else if (CONST_INT_P (operands[1]))
6890 rtx reg = gen_reg_rtx (SImode);
6891 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6893 /* If the constant is already valid, leave it alone. */
6894 if (!const_ok_for_arm (val))
6896 /* If setting all the top bits will make the constant
6897 loadable in a single instruction, then set them.
6898 Otherwise, sign extend the number. */
6900 if (const_ok_for_arm (~(val | ~0xffff)))
6902 else if (val & 0x8000)
6906 emit_insn (gen_movsi (reg, GEN_INT (val)));
6907 operands[1] = gen_lowpart (HImode, reg);
6909 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6910 && MEM_P (operands[1]))
6912 rtx reg = gen_reg_rtx (SImode);
6914 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6915 operands[1] = gen_lowpart (HImode, reg);
6917 else if (!arm_arch4)
6919 if (MEM_P (operands[1]))
6922 rtx offset = const0_rtx;
6923 rtx reg = gen_reg_rtx (SImode);
6925 if ((REG_P (base = XEXP (operands[1], 0))
6926 || (GET_CODE (base) == PLUS
6927 && (CONST_INT_P (offset = XEXP (base, 1)))
6928 && ((INTVAL(offset) & 1) != 1)
6929 && REG_P (base = XEXP (base, 0))))
6930 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6934 new_rtx = widen_memory_access (operands[1], SImode,
6935 ((INTVAL (offset) & ~3)
6936 - INTVAL (offset)));
6937 emit_insn (gen_movsi (reg, new_rtx));
6938 if (((INTVAL (offset) & 2) != 0)
6939 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6941 rtx reg2 = gen_reg_rtx (SImode);
6943 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6948 emit_insn (gen_movhi_bytes (reg, operands[1]));
6950 operands[1] = gen_lowpart (HImode, reg);
6954 /* Handle loading a large integer during reload. */
6955 else if (CONST_INT_P (operands[1])
6956 && !const_ok_for_arm (INTVAL (operands[1]))
6957 && !const_ok_for_arm (~INTVAL (operands[1])))
6959 /* Writing a constant to memory needs a scratch, which should
6960 be handled with SECONDARY_RELOADs. */
6961 gcc_assert (REG_P (operands[0]));
6963 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6964 emit_insn (gen_movsi (operands[0], operands[1]));
6968 else if (TARGET_THUMB2)
6970 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6971 if (can_create_pseudo_p ())
6973 if (!REG_P (operands[0]))
6974 operands[1] = force_reg (HImode, operands[1]);
6975 /* Zero extend a constant, and keep it in an SImode reg. */
6976 else if (CONST_INT_P (operands[1]))
6978 rtx reg = gen_reg_rtx (SImode);
6979 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6981 emit_insn (gen_movsi (reg, GEN_INT (val)));
6982 operands[1] = gen_lowpart (HImode, reg);
6986 else /* TARGET_THUMB1 */
6988 if (can_create_pseudo_p ())
6990 if (CONST_INT_P (operands[1]))
6992 rtx reg = gen_reg_rtx (SImode);
6994 emit_insn (gen_movsi (reg, operands[1]));
6995 operands[1] = gen_lowpart (HImode, reg);
6998 /* ??? We shouldn't really get invalid addresses here, but this can
6999 happen if we are passed a SP (never OK for HImode/QImode) or
7000 virtual register (also rejected as illegitimate for HImode/QImode)
7001 relative address. */
7002 /* ??? This should perhaps be fixed elsewhere, for instance, in
7003 fixup_stack_1, by checking for other kinds of invalid addresses,
7004 e.g. a bare reference to a virtual register. This may confuse the
7005 alpha though, which must handle this case differently. */
7006 if (MEM_P (operands[0])
7007 && !memory_address_p (GET_MODE (operands[0]),
7008 XEXP (operands[0], 0)))
7010 = replace_equiv_address (operands[0],
7011 copy_to_reg (XEXP (operands[0], 0)));
7013 if (MEM_P (operands[1])
7014 && !memory_address_p (GET_MODE (operands[1]),
7015 XEXP (operands[1], 0)))
7017 = replace_equiv_address (operands[1],
7018 copy_to_reg (XEXP (operands[1], 0)));
7020 if (MEM_P (operands[1]) && optimize > 0)
7022 rtx reg = gen_reg_rtx (SImode);
7024 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7025 operands[1] = gen_lowpart (HImode, reg);
7028 if (MEM_P (operands[0]))
7029 operands[1] = force_reg (HImode, operands[1]);
7031 else if (CONST_INT_P (operands[1])
7032 && !satisfies_constraint_I (operands[1]))
7034 /* Handle loading a large integer during reload. */
7036 /* Writing a constant to memory needs a scratch, which should
7037 be handled with SECONDARY_RELOADs. */
7038 gcc_assert (REG_P (operands[0]));
7040 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7041 emit_insn (gen_movsi (operands[0], operands[1]));
7048 (define_expand "movhi_bytes"
7049 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7051 (zero_extend:SI (match_dup 6)))
7052 (set (match_operand:SI 0 "" "")
7053 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7058 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7060 mem1 = change_address (operands[1], QImode, addr);
7061 mem2 = change_address (operands[1], QImode,
7062 plus_constant (Pmode, addr, 1));
7063 operands[0] = gen_lowpart (SImode, operands[0]);
7065 operands[2] = gen_reg_rtx (SImode);
7066 operands[3] = gen_reg_rtx (SImode);
7069 if (BYTES_BIG_ENDIAN)
7071 operands[4] = operands[2];
7072 operands[5] = operands[3];
7076 operands[4] = operands[3];
7077 operands[5] = operands[2];
7082 (define_expand "movhi_bigend"
7084 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
7087 (ashiftrt:SI (match_dup 2) (const_int 16)))
7088 (set (match_operand:HI 0 "s_register_operand")
7092 operands[2] = gen_reg_rtx (SImode);
7093 operands[3] = gen_reg_rtx (SImode);
7094 operands[4] = gen_lowpart (HImode, operands[3]);
7098 ;; Pattern to recognize insn generated default case above
7099 (define_insn "*movhi_insn_arch4"
7100 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
7101 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
7103 && arm_arch4 && !TARGET_HARD_FLOAT
7104 && (register_operand (operands[0], HImode)
7105 || register_operand (operands[1], HImode))"
7107 mov%?\\t%0, %1\\t%@ movhi
7108 mvn%?\\t%0, #%B1\\t%@ movhi
7109 movw%?\\t%0, %L1\\t%@ movhi
7110 strh%?\\t%1, %0\\t%@ movhi
7111 ldrh%?\\t%0, %1\\t%@ movhi"
7112 [(set_attr "predicable" "yes")
7113 (set_attr "pool_range" "*,*,*,*,256")
7114 (set_attr "neg_pool_range" "*,*,*,*,244")
7115 (set_attr "arch" "*,*,v6t2,*,*")
7116 (set_attr_alternative "type"
7117 [(if_then_else (match_operand 1 "const_int_operand" "")
7118 (const_string "mov_imm" )
7119 (const_string "mov_reg"))
7120 (const_string "mvn_imm")
7121 (const_string "mov_imm")
7122 (const_string "store_4")
7123 (const_string "load_4")])]
7126 (define_insn "*movhi_bytes"
7127 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7128 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
7129 "TARGET_ARM && !TARGET_HARD_FLOAT"
7131 mov%?\\t%0, %1\\t%@ movhi
7132 mov%?\\t%0, %1\\t%@ movhi
7133 mvn%?\\t%0, #%B1\\t%@ movhi"
7134 [(set_attr "predicable" "yes")
7135 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
7138 ;; We use a DImode scratch because we may occasionally need an additional
7139 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
7140 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
7141 ;; The reload_in<m> and reload_out<m> patterns require special constraints
7142 ;; to be correctly handled in default_secondary_reload function.
7143 (define_expand "reload_outhi"
7144 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7145 (match_operand:HI 1 "s_register_operand" "r")
7146 (match_operand:DI 2 "s_register_operand" "=&l")])]
7149 arm_reload_out_hi (operands);
7151 thumb_reload_out_hi (operands);
7156 (define_expand "reload_inhi"
7157 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7158 (match_operand:HI 1 "arm_reload_memory_operand" "o")
7159 (match_operand:DI 2 "s_register_operand" "=&r")])]
7163 arm_reload_in_hi (operands);
7165 thumb_reload_out_hi (operands);
7169 (define_expand "movqi"
7170 [(set (match_operand:QI 0 "general_operand")
7171 (match_operand:QI 1 "general_operand"))]
7174 /* Everything except mem = const or mem = mem can be done easily */
7176 if (can_create_pseudo_p ())
7178 if (CONST_INT_P (operands[1]))
7180 rtx reg = gen_reg_rtx (SImode);
7182 /* For thumb we want an unsigned immediate, then we are more likely
7183 to be able to use a movs insn. */
7185 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7187 emit_insn (gen_movsi (reg, operands[1]));
7188 operands[1] = gen_lowpart (QImode, reg);
7193 /* ??? We shouldn't really get invalid addresses here, but this can
7194 happen if we are passed a SP (never OK for HImode/QImode) or
7195 virtual register (also rejected as illegitimate for HImode/QImode)
7196 relative address. */
7197 /* ??? This should perhaps be fixed elsewhere, for instance, in
7198 fixup_stack_1, by checking for other kinds of invalid addresses,
7199 e.g. a bare reference to a virtual register. This may confuse the
7200 alpha though, which must handle this case differently. */
7201 if (MEM_P (operands[0])
7202 && !memory_address_p (GET_MODE (operands[0]),
7203 XEXP (operands[0], 0)))
7205 = replace_equiv_address (operands[0],
7206 copy_to_reg (XEXP (operands[0], 0)));
7207 if (MEM_P (operands[1])
7208 && !memory_address_p (GET_MODE (operands[1]),
7209 XEXP (operands[1], 0)))
7211 = replace_equiv_address (operands[1],
7212 copy_to_reg (XEXP (operands[1], 0)));
7215 if (MEM_P (operands[1]) && optimize > 0)
7217 rtx reg = gen_reg_rtx (SImode);
7219 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7220 operands[1] = gen_lowpart (QImode, reg);
7223 if (MEM_P (operands[0]))
7224 operands[1] = force_reg (QImode, operands[1]);
7226 else if (TARGET_THUMB
7227 && CONST_INT_P (operands[1])
7228 && !satisfies_constraint_I (operands[1]))
7230 /* Handle loading a large integer during reload. */
7232 /* Writing a constant to memory needs a scratch, which should
7233 be handled with SECONDARY_RELOADs. */
7234 gcc_assert (REG_P (operands[0]));
7236 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7237 emit_insn (gen_movsi (operands[0], operands[1]));
7243 (define_insn "*arm_movqi_insn"
7244 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7245 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
7247 && ( register_operand (operands[0], QImode)
7248 || register_operand (operands[1], QImode))"
7259 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
7260 (set_attr "predicable" "yes")
7261 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
7262 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7263 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7266 ;; HFmode and BFmode moves.
7267 (define_expand "mov<mode>"
7268 [(set (match_operand:HFBF 0 "general_operand")
7269 (match_operand:HFBF 1 "general_operand"))]
7272 gcc_checking_assert (aligned_operand (operands[0], <MODE>mode));
7273 gcc_checking_assert (aligned_operand (operands[1], <MODE>mode));
7276 if (MEM_P (operands[0]))
7277 operands[1] = force_reg (<MODE>mode, operands[1]);
7279 else /* TARGET_THUMB1 */
7281 if (can_create_pseudo_p ())
7283 if (!REG_P (operands[0]))
7284 operands[1] = force_reg (<MODE>mode, operands[1]);
7290 (define_insn "*arm32_mov<mode>"
7291 [(set (match_operand:HFBF 0 "nonimmediate_operand" "=r,m,r,r")
7292 (match_operand:HFBF 1 "general_operand" " m,r,r,F"))]
7294 && !TARGET_HARD_FLOAT
7296 && ( s_register_operand (operands[0], <MODE>mode)
7297 || s_register_operand (operands[1], <MODE>mode))"
7299 switch (which_alternative)
7301 case 0: /* ARM register from memory */
7302 return \"ldrh%?\\t%0, %1\\t%@ __<fporbf>\";
7303 case 1: /* memory from ARM register */
7304 return \"strh%?\\t%1, %0\\t%@ __<fporbf>\";
7305 case 2: /* ARM register from ARM register */
7306 return \"mov%?\\t%0, %1\\t%@ __<fporbf>\";
7307 case 3: /* ARM register from constant */
7312 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
7314 ops[0] = operands[0];
7315 ops[1] = GEN_INT (bits);
7316 ops[2] = GEN_INT (bits & 0xff00);
7317 ops[3] = GEN_INT (bits & 0x00ff);
7319 if (arm_arch_thumb2)
7320 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7322 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7329 [(set_attr "conds" "unconditional")
7330 (set_attr "type" "load_4,store_4,mov_reg,multiple")
7331 (set_attr "length" "4,4,4,8")
7332 (set_attr "predicable" "yes")]
7335 (define_expand "movsf"
7336 [(set (match_operand:SF 0 "general_operand")
7337 (match_operand:SF 1 "general_operand"))]
7340 gcc_checking_assert (aligned_operand (operands[0], SFmode));
7341 gcc_checking_assert (aligned_operand (operands[1], SFmode));
7344 if (MEM_P (operands[0]))
7345 operands[1] = force_reg (SFmode, operands[1]);
7347 else /* TARGET_THUMB1 */
7349 if (can_create_pseudo_p ())
7351 if (!REG_P (operands[0]))
7352 operands[1] = force_reg (SFmode, operands[1]);
7356 /* Cannot load it directly, generate a load with clobber so that it can be
7357 loaded via GPR with MOV / MOVT. */
7358 if (arm_disable_literal_pool
7359 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7360 && CONST_DOUBLE_P (operands[1])
7362 && !vfp3_const_double_rtx (operands[1]))
7364 rtx clobreg = gen_reg_rtx (SFmode);
7365 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
7372 ;; Transform a floating-point move of a constant into a core register into
7373 ;; an SImode operation.
7375 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7376 (match_operand:SF 1 "immediate_operand" ""))]
7379 && CONST_DOUBLE_P (operands[1])"
7380 [(set (match_dup 2) (match_dup 3))]
7382 operands[2] = gen_lowpart (SImode, operands[0]);
7383 operands[3] = gen_lowpart (SImode, operands[1]);
7384 if (operands[2] == 0 || operands[3] == 0)
7389 (define_insn "*arm_movsf_soft_insn"
7390 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7391 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7393 && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7394 && (!MEM_P (operands[0])
7395 || register_operand (operands[1], SFmode))"
7397 switch (which_alternative)
7399 case 0: return \"mov%?\\t%0, %1\";
7401 /* Cannot load it directly, split to load it via MOV / MOVT. */
7402 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7404 return \"ldr%?\\t%0, %1\\t%@ float\";
7405 case 2: return \"str%?\\t%1, %0\\t%@ float\";
7406 default: gcc_unreachable ();
7409 [(set_attr "predicable" "yes")
7410 (set_attr "type" "mov_reg,load_4,store_4")
7411 (set_attr "arm_pool_range" "*,4096,*")
7412 (set_attr "thumb2_pool_range" "*,4094,*")
7413 (set_attr "arm_neg_pool_range" "*,4084,*")
7414 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7417 ;; Splitter for the above.
7419 [(set (match_operand:SF 0 "s_register_operand")
7420 (match_operand:SF 1 "const_double_operand"))]
7421 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7425 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
7426 rtx cst = gen_int_mode (buf, SImode);
7427 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
7432 (define_expand "movdf"
7433 [(set (match_operand:DF 0 "general_operand")
7434 (match_operand:DF 1 "general_operand"))]
7437 gcc_checking_assert (aligned_operand (operands[0], DFmode));
7438 gcc_checking_assert (aligned_operand (operands[1], DFmode));
7441 if (MEM_P (operands[0]))
7442 operands[1] = force_reg (DFmode, operands[1]);
7444 else /* TARGET_THUMB */
7446 if (can_create_pseudo_p ())
7448 if (!REG_P (operands[0]))
7449 operands[1] = force_reg (DFmode, operands[1]);
7453 /* Cannot load it directly, generate a load with clobber so that it can be
7454 loaded via GPR with MOV / MOVT. */
7455 if (arm_disable_literal_pool
7456 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7457 && CONSTANT_P (operands[1])
7459 && !arm_const_double_rtx (operands[1])
7460 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
7462 rtx clobreg = gen_reg_rtx (DFmode);
7463 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
7470 ;; Reloading a df mode value stored in integer regs to memory can require a
7472 ;; Another reload_out<m> pattern that requires special constraints.
7473 (define_expand "reload_outdf"
7474 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7475 (match_operand:DF 1 "s_register_operand" "r")
7476 (match_operand:SI 2 "s_register_operand" "=&r")]
7480 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7483 operands[2] = XEXP (operands[0], 0);
7484 else if (code == POST_INC || code == PRE_DEC)
7486 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7487 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7488 emit_insn (gen_movdi (operands[0], operands[1]));
7491 else if (code == PRE_INC)
7493 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7495 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7498 else if (code == POST_DEC)
7499 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7501 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7502 XEXP (XEXP (operands[0], 0), 1)));
7504 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
7507 if (code == POST_DEC)
7508 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7514 (define_insn "*movdf_soft_insn"
7515 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
7516 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
7517 "TARGET_32BIT && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7518 && ( register_operand (operands[0], DFmode)
7519 || register_operand (operands[1], DFmode))"
7521 switch (which_alternative)
7528 /* Cannot load it directly, split to load it via MOV / MOVT. */
7529 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7533 return output_move_double (operands, true, NULL);
7536 [(set_attr "length" "8,12,16,8,8")
7537 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
7538 (set_attr "arm_pool_range" "*,*,*,1020,*")
7539 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7540 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7541 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7544 ;; Splitter for the above.
7546 [(set (match_operand:DF 0 "s_register_operand")
7547 (match_operand:DF 1 "const_double_operand"))]
7548 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7552 int order = BYTES_BIG_ENDIAN ? 1 : 0;
7553 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7554 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7555 ival |= (zext_hwi (buf[1 - order], 32) << 32);
7556 rtx cst = gen_int_mode (ival, DImode);
7557 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7563 ;; load- and store-multiple insns
7564 ;; The arm can load/store any set of registers, provided that they are in
7565 ;; ascending order, but these expanders assume a contiguous set.
7567 (define_expand "load_multiple"
7568 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7569 (match_operand:SI 1 "" ""))
7570 (use (match_operand:SI 2 "" ""))])]
7573 HOST_WIDE_INT offset = 0;
7575 /* Support only fixed point registers. */
7576 if (!CONST_INT_P (operands[2])
7577 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7578 || INTVAL (operands[2]) < 2
7579 || !MEM_P (operands[1])
7580 || !REG_P (operands[0])
7581 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7582 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7586 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7587 INTVAL (operands[2]),
7588 force_reg (SImode, XEXP (operands[1], 0)),
7589 FALSE, operands[1], &offset);
7592 (define_expand "store_multiple"
7593 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7594 (match_operand:SI 1 "" ""))
7595 (use (match_operand:SI 2 "" ""))])]
7598 HOST_WIDE_INT offset = 0;
7600 /* Support only fixed point registers. */
7601 if (!CONST_INT_P (operands[2])
7602 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7603 || INTVAL (operands[2]) < 2
7604 || !REG_P (operands[1])
7605 || !MEM_P (operands[0])
7606 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7607 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7611 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7612 INTVAL (operands[2]),
7613 force_reg (SImode, XEXP (operands[0], 0)),
7614 FALSE, operands[0], &offset);
7618 (define_expand "setmemsi"
7619 [(match_operand:BLK 0 "general_operand")
7620 (match_operand:SI 1 "const_int_operand")
7621 (match_operand:SI 2 "const_int_operand")
7622 (match_operand:SI 3 "const_int_operand")]
7625 if (arm_gen_setmem (operands))
7632 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7633 ;; We could let this apply for blocks of less than this, but it clobbers so
7634 ;; many registers that there is then probably a better way.
7636 (define_expand "cpymemqi"
7637 [(match_operand:BLK 0 "general_operand")
7638 (match_operand:BLK 1 "general_operand")
7639 (match_operand:SI 2 "const_int_operand")
7640 (match_operand:SI 3 "const_int_operand")]
7645 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7646 && !optimize_function_for_size_p (cfun))
7648 if (gen_cpymem_ldrd_strd (operands))
7653 if (arm_gen_cpymemqi (operands))
7657 else /* TARGET_THUMB1 */
7659 if ( INTVAL (operands[3]) != 4
7660 || INTVAL (operands[2]) > 48)
7663 thumb_expand_cpymemqi (operands);
7670 ;; Compare & branch insns
7671 ;; The range calculations are based as follows:
7672 ;; For forward branches, the address calculation returns the address of
7673 ;; the next instruction. This is 2 beyond the branch instruction.
7674 ;; For backward branches, the address calculation returns the address of
7675 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7676 ;; instruction for the shortest sequence, and 4 before the branch instruction
7677 ;; if we have to jump around an unconditional branch.
7678 ;; To the basic branch range the PC offset must be added (this is +4).
7679 ;; So for forward branches we have
7680 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7681 ;; And for backward branches we have
7682 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7684 ;; In 16-bit Thumb these ranges are:
7685 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7686 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7688 ;; In 32-bit Thumb these ranges are:
7689 ;; For a 'b' +/- 16MB is not checked for.
7690 ;; For a 'b<cond>' pos_range = 1048574, neg_range = -1048576 giving
7691 ;; (-1048568 -> 1048576).
7693 (define_expand "cbranchsi4"
7694 [(set (pc) (if_then_else
7695 (match_operator 0 "expandable_comparison_operator"
7696 [(match_operand:SI 1 "s_register_operand")
7697 (match_operand:SI 2 "nonmemory_operand")])
7698 (label_ref (match_operand 3 "" ""))
7704 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7706 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7710 if (thumb1_cmpneg_operand (operands[2], SImode))
7712 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7713 operands[3], operands[0]));
7716 if (!thumb1_cmp_operand (operands[2], SImode))
7717 operands[2] = force_reg (SImode, operands[2]);
7720 (define_expand "cbranchsf4"
7721 [(set (pc) (if_then_else
7722 (match_operator 0 "expandable_comparison_operator"
7723 [(match_operand:SF 1 "s_register_operand")
7724 (match_operand:SF 2 "vfp_compare_operand")])
7725 (label_ref (match_operand 3 "" ""))
7727 "TARGET_32BIT && TARGET_HARD_FLOAT"
7728 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7729 operands[3])); DONE;"
7732 (define_expand "cbranchdf4"
7733 [(set (pc) (if_then_else
7734 (match_operator 0 "expandable_comparison_operator"
7735 [(match_operand:DF 1 "s_register_operand")
7736 (match_operand:DF 2 "vfp_compare_operand")])
7737 (label_ref (match_operand 3 "" ""))
7739 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7740 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7741 operands[3])); DONE;"
7744 (define_expand "cbranchdi4"
7745 [(set (pc) (if_then_else
7746 (match_operator 0 "expandable_comparison_operator"
7747 [(match_operand:DI 1 "s_register_operand")
7748 (match_operand:DI 2 "reg_or_int_operand")])
7749 (label_ref (match_operand 3 "" ""))
7753 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7755 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7761 ;; Comparison and test insns
7763 (define_insn "*arm_cmpsi_insn"
7764 [(set (reg:CC CC_REGNUM)
7765 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7766 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7774 [(set_attr "conds" "set")
7775 (set_attr "arch" "t2,t2,any,any,any")
7776 (set_attr "length" "2,2,4,4,4")
7777 (set_attr "predicable" "yes")
7778 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7779 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7782 (define_insn "*cmpsi_shiftsi"
7783 [(set (reg:CC CC_REGNUM)
7784 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7785 (match_operator:SI 3 "shift_operator"
7786 [(match_operand:SI 1 "s_register_operand" "r,r")
7787 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7790 [(set_attr "conds" "set")
7791 (set_attr "shift" "1")
7792 (set_attr "arch" "32,a")
7793 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7795 (define_insn "*cmpsi_shiftsi_swp"
7796 [(set (reg:CC_SWP CC_REGNUM)
7797 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7798 [(match_operand:SI 1 "s_register_operand" "r,r")
7799 (match_operand:SI 2 "shift_amount_operand" "M,r")])
7800 (match_operand:SI 0 "s_register_operand" "r,r")))]
7803 [(set_attr "conds" "set")
7804 (set_attr "shift" "1")
7805 (set_attr "arch" "32,a")
7806 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7808 (define_insn "*arm_cmpsi_negshiftsi_si"
7809 [(set (reg:CC_Z CC_REGNUM)
7811 (neg:SI (match_operator:SI 1 "shift_operator"
7812 [(match_operand:SI 2 "s_register_operand" "r,r")
7813 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7814 (match_operand:SI 0 "s_register_operand" "r,r")))]
7817 [(set_attr "conds" "set")
7818 (set_attr "arch" "32,a")
7819 (set_attr "shift" "2")
7820 (set_attr "type" "alus_shift_imm,alus_shift_reg")
7821 (set_attr "predicable" "yes")]
7824 ; This insn allows redundant compares to be removed by cse, nothing should
7825 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7826 ; is deleted later on. The match_dup will match the mode here, so that
7827 ; mode changes of the condition codes aren't lost by this even though we don't
7828 ; specify what they are.
7830 (define_insn "*deleted_compare"
7831 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7833 "\\t%@ deleted compare"
7834 [(set_attr "conds" "set")
7835 (set_attr "length" "0")
7836 (set_attr "type" "no_insn")]
7840 ;; Conditional branch insns
7842 (define_expand "cbranch_cc"
7844 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7845 (match_operand 2 "" "")])
7846 (label_ref (match_operand 3 "" ""))
7849 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7850 operands[1], operands[2], NULL_RTX);
7851 operands[2] = const0_rtx;"
7855 ;; Patterns to match conditional branch insns.
7858 (define_insn "arm_cond_branch"
7860 (if_then_else (match_operator 1 "arm_comparison_operator"
7861 [(match_operand 2 "cc_register" "") (const_int 0)])
7862 (label_ref (match_operand 0 "" ""))
7866 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7868 arm_ccfsm_state += 2;
7871 switch (get_attr_length (insn))
7873 case 2: /* Thumb2 16-bit b{cond}. */
7874 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
7878 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
7879 unconditional branch b. */
7880 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%D1\t");
7883 [(set_attr "conds" "use")
7884 (set_attr "type" "branch")
7885 (set (attr "length")
7886 (if_then_else (match_test "!TARGET_THUMB2")
7888 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
7891 ;; Check if target is within 16-bit Thumb2 b{cond} range.
7892 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7893 (le (minus (match_dup 0) (pc)) (const_int 256)))
7895 ;; Target is Thumb2, within narrow range.
7896 ;; Generate b{cond}.
7899 ;; Check if target is within 32-bit Thumb2 b{cond} range.
7900 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7901 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7903 ;; Target is Thumb2, within wide range.
7906 ;; Target is Thumb2, out of range.
7907 ;; Generate narrow b{cond} and unconditional branch b.
7911 (define_insn "*arm_cond_branch_reversed"
7913 (if_then_else (match_operator 1 "arm_comparison_operator"
7914 [(match_operand 2 "cc_register" "") (const_int 0)])
7916 (label_ref (match_operand 0 "" ""))))]
7919 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7921 arm_ccfsm_state += 2;
7924 switch (get_attr_length (insn))
7926 case 2: /* Thumb2 16-bit b{cond}. */
7927 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
7931 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
7932 unconditional branch b. */
7933 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%d1\t");
7936 [(set_attr "conds" "use")
7937 (set_attr "type" "branch")
7938 (set (attr "length")
7939 (if_then_else (match_test "!TARGET_THUMB2")
7941 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
7944 ;; Check if target is within 16-bit Thumb2 b{cond} range.
7945 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7946 (le (minus (match_dup 0) (pc)) (const_int 256)))
7948 ;; Target is Thumb2, within narrow range.
7949 ;; Generate b{cond}.
7952 ;; Check if target is within 32-bit Thumb2 b{cond} range.
7953 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7954 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7956 ;; Target is Thumb2, within wide range.
7957 ;; Generate b{cond}.
7959 ;; Target is Thumb2, out of range.
7960 ;; Generate narrow b{cond} and unconditional branch b.
7968 (define_expand "cstore_cc"
7969 [(set (match_operand:SI 0 "s_register_operand")
7970 (match_operator:SI 1 "" [(match_operand 2 "" "")
7971 (match_operand 3 "" "")]))]
7973 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7974 operands[2], operands[3], NULL_RTX);
7975 operands[3] = const0_rtx;"
7978 (define_insn_and_split "*mov_scc"
7979 [(set (match_operand:SI 0 "s_register_operand" "=r")
7980 (match_operator:SI 1 "arm_comparison_operator_mode"
7981 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7983 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7986 (if_then_else:SI (match_dup 1)
7990 [(set_attr "conds" "use")
7991 (set_attr "length" "8")
7992 (set_attr "type" "multiple")]
7995 (define_insn "*negscc_borrow"
7996 [(set (match_operand:SI 0 "s_register_operand" "=r")
7997 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
8000 [(set_attr "conds" "use")
8001 (set_attr "length" "4")
8002 (set_attr "type" "adc_reg")]
8005 (define_insn_and_split "*mov_negscc"
8006 [(set (match_operand:SI 0 "s_register_operand" "=r")
8007 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
8008 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8009 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
8010 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8013 (if_then_else:SI (match_dup 1)
8017 operands[3] = GEN_INT (~0);
8019 [(set_attr "conds" "use")
8020 (set_attr "length" "8")
8021 (set_attr "type" "multiple")]
8024 (define_insn_and_split "*mov_notscc"
8025 [(set (match_operand:SI 0 "s_register_operand" "=r")
8026 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8027 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8029 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8032 (if_then_else:SI (match_dup 1)
8036 operands[3] = GEN_INT (~1);
8037 operands[4] = GEN_INT (~0);
8039 [(set_attr "conds" "use")
8040 (set_attr "length" "8")
8041 (set_attr "type" "multiple")]
8044 (define_expand "cstoresi4"
8045 [(set (match_operand:SI 0 "s_register_operand")
8046 (match_operator:SI 1 "expandable_comparison_operator"
8047 [(match_operand:SI 2 "s_register_operand")
8048 (match_operand:SI 3 "reg_or_int_operand")]))]
8049 "TARGET_32BIT || TARGET_THUMB1"
8051 rtx op3, scratch, scratch2;
8055 if (!arm_add_operand (operands[3], SImode))
8056 operands[3] = force_reg (SImode, operands[3]);
8057 emit_insn (gen_cstore_cc (operands[0], operands[1],
8058 operands[2], operands[3]));
8062 if (operands[3] == const0_rtx)
8064 switch (GET_CODE (operands[1]))
8067 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8071 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8075 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8076 NULL_RTX, 0, OPTAB_WIDEN);
8077 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8078 NULL_RTX, 0, OPTAB_WIDEN);
8079 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8080 operands[0], 1, OPTAB_WIDEN);
8084 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8086 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8087 NULL_RTX, 1, OPTAB_WIDEN);
8091 scratch = expand_binop (SImode, ashr_optab, operands[2],
8092 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8093 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8094 NULL_RTX, 0, OPTAB_WIDEN);
8095 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8099 /* LT is handled by generic code. No need for unsigned with 0. */
8106 switch (GET_CODE (operands[1]))
8109 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8110 NULL_RTX, 0, OPTAB_WIDEN);
8111 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8115 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8116 NULL_RTX, 0, OPTAB_WIDEN);
8117 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8121 op3 = force_reg (SImode, operands[3]);
8123 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8124 NULL_RTX, 1, OPTAB_WIDEN);
8125 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8126 NULL_RTX, 0, OPTAB_WIDEN);
8127 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8133 if (!thumb1_cmp_operand (op3, SImode))
8134 op3 = force_reg (SImode, op3);
8135 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8136 NULL_RTX, 0, OPTAB_WIDEN);
8137 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8138 NULL_RTX, 1, OPTAB_WIDEN);
8139 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8144 op3 = force_reg (SImode, operands[3]);
8145 scratch = force_reg (SImode, const0_rtx);
8146 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8152 if (!thumb1_cmp_operand (op3, SImode))
8153 op3 = force_reg (SImode, op3);
8154 scratch = force_reg (SImode, const0_rtx);
8155 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8161 if (!thumb1_cmp_operand (op3, SImode))
8162 op3 = force_reg (SImode, op3);
8163 scratch = gen_reg_rtx (SImode);
8164 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8168 op3 = force_reg (SImode, operands[3]);
8169 scratch = gen_reg_rtx (SImode);
8170 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8173 /* No good sequences for GT, LT. */
8180 (define_expand "cstorehf4"
8181 [(set (match_operand:SI 0 "s_register_operand")
8182 (match_operator:SI 1 "expandable_comparison_operator"
8183 [(match_operand:HF 2 "s_register_operand")
8184 (match_operand:HF 3 "vfp_compare_operand")]))]
8185 "TARGET_VFP_FP16INST"
8187 if (!arm_validize_comparison (&operands[1],
8192 emit_insn (gen_cstore_cc (operands[0], operands[1],
8193 operands[2], operands[3]));
8198 (define_expand "cstoresf4"
8199 [(set (match_operand:SI 0 "s_register_operand")
8200 (match_operator:SI 1 "expandable_comparison_operator"
8201 [(match_operand:SF 2 "s_register_operand")
8202 (match_operand:SF 3 "vfp_compare_operand")]))]
8203 "TARGET_32BIT && TARGET_HARD_FLOAT"
8204 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8205 operands[2], operands[3])); DONE;"
8208 (define_expand "cstoredf4"
8209 [(set (match_operand:SI 0 "s_register_operand")
8210 (match_operator:SI 1 "expandable_comparison_operator"
8211 [(match_operand:DF 2 "s_register_operand")
8212 (match_operand:DF 3 "vfp_compare_operand")]))]
8213 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8214 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8215 operands[2], operands[3])); DONE;"
8218 (define_expand "cstoredi4"
8219 [(set (match_operand:SI 0 "s_register_operand")
8220 (match_operator:SI 1 "expandable_comparison_operator"
8221 [(match_operand:DI 2 "s_register_operand")
8222 (match_operand:DI 3 "reg_or_int_operand")]))]
8225 if (!arm_validize_comparison (&operands[1],
8229 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8236 ;; Conditional move insns
8238 (define_expand "movsicc"
8239 [(set (match_operand:SI 0 "s_register_operand")
8240 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
8241 (match_operand:SI 2 "arm_not_operand")
8242 (match_operand:SI 3 "arm_not_operand")))]
8249 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8250 &XEXP (operands[1], 1)))
8253 code = GET_CODE (operands[1]);
8254 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8255 XEXP (operands[1], 1), NULL_RTX);
8256 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8260 (define_expand "movhfcc"
8261 [(set (match_operand:HF 0 "s_register_operand")
8262 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
8263 (match_operand:HF 2 "s_register_operand")
8264 (match_operand:HF 3 "s_register_operand")))]
8265 "TARGET_VFP_FP16INST"
8268 enum rtx_code code = GET_CODE (operands[1]);
8271 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8272 &XEXP (operands[1], 1)))
8275 code = GET_CODE (operands[1]);
8276 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8277 XEXP (operands[1], 1), NULL_RTX);
8278 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8282 (define_expand "movsfcc"
8283 [(set (match_operand:SF 0 "s_register_operand")
8284 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
8285 (match_operand:SF 2 "s_register_operand")
8286 (match_operand:SF 3 "s_register_operand")))]
8287 "TARGET_32BIT && TARGET_HARD_FLOAT"
8290 enum rtx_code code = GET_CODE (operands[1]);
8293 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8294 &XEXP (operands[1], 1)))
8297 code = GET_CODE (operands[1]);
8298 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8299 XEXP (operands[1], 1), NULL_RTX);
8300 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8304 (define_expand "movdfcc"
8305 [(set (match_operand:DF 0 "s_register_operand")
8306 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
8307 (match_operand:DF 2 "s_register_operand")
8308 (match_operand:DF 3 "s_register_operand")))]
8309 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8312 enum rtx_code code = GET_CODE (operands[1]);
8315 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8316 &XEXP (operands[1], 1)))
8318 code = GET_CODE (operands[1]);
8319 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8320 XEXP (operands[1], 1), NULL_RTX);
8321 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8325 (define_insn "*cmov<mode>"
8326 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8327 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8328 [(match_operand 2 "cc_register" "") (const_int 0)])
8329 (match_operand:SDF 3 "s_register_operand"
8331 (match_operand:SDF 4 "s_register_operand"
8332 "<F_constraint>")))]
8333 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
8336 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8343 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8348 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8354 [(set_attr "conds" "use")
8355 (set_attr "type" "fcsel")]
8358 (define_insn "*cmovhf"
8359 [(set (match_operand:HF 0 "s_register_operand" "=t")
8360 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
8361 [(match_operand 2 "cc_register" "") (const_int 0)])
8362 (match_operand:HF 3 "s_register_operand" "t")
8363 (match_operand:HF 4 "s_register_operand" "t")))]
8364 "TARGET_VFP_FP16INST"
8367 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8374 return \"vsel%d1.f16\\t%0, %3, %4\";
8379 return \"vsel%D1.f16\\t%0, %4, %3\";
8385 [(set_attr "conds" "use")
8386 (set_attr "type" "fcsel")]
8389 (define_insn_and_split "*movsicc_insn"
8390 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8392 (match_operator 3 "arm_comparison_operator"
8393 [(match_operand 4 "cc_register" "") (const_int 0)])
8394 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8395 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8406 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8407 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8408 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8409 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8410 "&& reload_completed"
8413 enum rtx_code rev_code;
8417 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8419 gen_rtx_SET (operands[0], operands[1])));
8421 rev_code = GET_CODE (operands[3]);
8422 mode = GET_MODE (operands[4]);
8423 if (mode == CCFPmode || mode == CCFPEmode)
8424 rev_code = reverse_condition_maybe_unordered (rev_code);
8426 rev_code = reverse_condition (rev_code);
8428 rev_cond = gen_rtx_fmt_ee (rev_code,
8432 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8434 gen_rtx_SET (operands[0], operands[2])));
8437 [(set_attr "length" "4,4,4,4,8,8,8,8")
8438 (set_attr "conds" "use")
8439 (set_attr_alternative "type"
8440 [(if_then_else (match_operand 2 "const_int_operand" "")
8441 (const_string "mov_imm")
8442 (const_string "mov_reg"))
8443 (const_string "mvn_imm")
8444 (if_then_else (match_operand 1 "const_int_operand" "")
8445 (const_string "mov_imm")
8446 (const_string "mov_reg"))
8447 (const_string "mvn_imm")
8448 (const_string "multiple")
8449 (const_string "multiple")
8450 (const_string "multiple")
8451 (const_string "multiple")])]
8454 (define_insn "*movsfcc_soft_insn"
8455 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8456 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8457 [(match_operand 4 "cc_register" "") (const_int 0)])
8458 (match_operand:SF 1 "s_register_operand" "0,r")
8459 (match_operand:SF 2 "s_register_operand" "r,0")))]
8460 "TARGET_ARM && TARGET_SOFT_FLOAT"
8464 [(set_attr "conds" "use")
8465 (set_attr "type" "mov_reg")]
8469 ;; Jump and linkage insns
8471 (define_expand "jump"
8473 (label_ref (match_operand 0 "" "")))]
8478 (define_insn "*arm_jump"
8480 (label_ref (match_operand 0 "" "")))]
8484 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8486 arm_ccfsm_state += 2;
8489 return \"b%?\\t%l0\";
8492 [(set_attr "predicable" "yes")
8493 (set (attr "length")
8495 (and (match_test "TARGET_THUMB2")
8496 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8497 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8500 (set_attr "type" "branch")]
8503 (define_expand "call"
8504 [(parallel [(call (match_operand 0 "memory_operand")
8505 (match_operand 1 "general_operand"))
8506 (use (match_operand 2 "" ""))
8507 (clobber (reg:SI LR_REGNUM))])]
8512 tree addr = MEM_EXPR (operands[0]);
8514 /* In an untyped call, we can get NULL for operand 2. */
8515 if (operands[2] == NULL_RTX)
8516 operands[2] = const0_rtx;
8518 /* Decide if we should generate indirect calls by loading the
8519 32-bit address of the callee into a register before performing the
8521 callee = XEXP (operands[0], 0);
8522 if (GET_CODE (callee) == SYMBOL_REF
8523 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8525 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8527 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
8528 /* Indirect call: set r9 with FDPIC value of callee. */
8529 XEXP (operands[0], 0)
8530 = arm_load_function_descriptor (XEXP (operands[0], 0));
8532 if (detect_cmse_nonsecure_call (addr))
8534 pat = gen_nonsecure_call_internal (operands[0], operands[1],
8536 emit_call_insn (pat);
8540 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8541 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
8544 /* Restore FDPIC register (r9) after call. */
8547 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8548 rtx initial_fdpic_reg
8549 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8551 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8552 initial_fdpic_reg));
8559 (define_insn "restore_pic_register_after_call"
8560 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
8561 (unspec:SI [(match_dup 0)
8562 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
8563 UNSPEC_PIC_RESTORE))]
8570 (define_expand "call_internal"
8571 [(parallel [(call (match_operand 0 "memory_operand")
8572 (match_operand 1 "general_operand"))
8573 (use (match_operand 2 "" ""))
8574 (clobber (reg:SI LR_REGNUM))])])
8576 (define_expand "nonsecure_call_internal"
8577 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
8578 UNSPEC_NONSECURE_MEM)
8579 (match_operand 1 "general_operand"))
8580 (use (match_operand 2 "" ""))
8581 (clobber (reg:SI LR_REGNUM))])]
8585 if (!TARGET_HAVE_FPCXT_CMSE)
8588 copy_to_suggested_reg (XEXP (operands[0], 0),
8589 gen_rtx_REG (SImode, R4_REGNUM),
8592 operands[0] = replace_equiv_address (operands[0], tmp);
8596 (define_insn "*call_reg_armv5"
8597 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8598 (match_operand 1 "" ""))
8599 (use (match_operand 2 "" ""))
8600 (clobber (reg:SI LR_REGNUM))]
8601 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8603 [(set_attr "type" "call")]
8606 (define_insn "*call_reg_arm"
8607 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8608 (match_operand 1 "" ""))
8609 (use (match_operand 2 "" ""))
8610 (clobber (reg:SI LR_REGNUM))]
8611 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8613 return output_call (operands);
8615 ;; length is worst case, normally it is only two
8616 [(set_attr "length" "12")
8617 (set_attr "type" "call")]
8621 (define_expand "call_value"
8622 [(parallel [(set (match_operand 0 "" "")
8623 (call (match_operand 1 "memory_operand")
8624 (match_operand 2 "general_operand")))
8625 (use (match_operand 3 "" ""))
8626 (clobber (reg:SI LR_REGNUM))])]
8631 tree addr = MEM_EXPR (operands[1]);
8633 /* In an untyped call, we can get NULL for operand 2. */
8634 if (operands[3] == 0)
8635 operands[3] = const0_rtx;
8637 /* Decide if we should generate indirect calls by loading the
8638 32-bit address of the callee into a register before performing the
8640 callee = XEXP (operands[1], 0);
8641 if (GET_CODE (callee) == SYMBOL_REF
8642 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8644 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8646 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8647 /* Indirect call: set r9 with FDPIC value of callee. */
8648 XEXP (operands[1], 0)
8649 = arm_load_function_descriptor (XEXP (operands[1], 0));
8651 if (detect_cmse_nonsecure_call (addr))
8653 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8654 operands[2], operands[3]);
8655 emit_call_insn (pat);
8659 pat = gen_call_value_internal (operands[0], operands[1],
8660 operands[2], operands[3]);
8661 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8664 /* Restore FDPIC register (r9) after call. */
8667 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8668 rtx initial_fdpic_reg
8669 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8671 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8672 initial_fdpic_reg));
8679 (define_expand "call_value_internal"
8680 [(parallel [(set (match_operand 0 "" "")
8681 (call (match_operand 1 "memory_operand")
8682 (match_operand 2 "general_operand")))
8683 (use (match_operand 3 "" ""))
8684 (clobber (reg:SI LR_REGNUM))])])
8686 (define_expand "nonsecure_call_value_internal"
8687 [(parallel [(set (match_operand 0 "" "")
8688 (call (unspec:SI [(match_operand 1 "memory_operand")]
8689 UNSPEC_NONSECURE_MEM)
8690 (match_operand 2 "general_operand")))
8691 (use (match_operand 3 "" ""))
8692 (clobber (reg:SI LR_REGNUM))])]
8696 if (!TARGET_HAVE_FPCXT_CMSE)
8699 copy_to_suggested_reg (XEXP (operands[1], 0),
8700 gen_rtx_REG (SImode, R4_REGNUM),
8703 operands[1] = replace_equiv_address (operands[1], tmp);
8707 (define_insn "*call_value_reg_armv5"
8708 [(set (match_operand 0 "" "")
8709 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8710 (match_operand 2 "" "")))
8711 (use (match_operand 3 "" ""))
8712 (clobber (reg:SI LR_REGNUM))]
8713 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8715 [(set_attr "type" "call")]
8718 (define_insn "*call_value_reg_arm"
8719 [(set (match_operand 0 "" "")
8720 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8721 (match_operand 2 "" "")))
8722 (use (match_operand 3 "" ""))
8723 (clobber (reg:SI LR_REGNUM))]
8724 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8726 return output_call (&operands[1]);
8728 [(set_attr "length" "12")
8729 (set_attr "type" "call")]
8732 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8733 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8735 (define_insn "*call_symbol"
8736 [(call (mem:SI (match_operand:SI 0 "" ""))
8737 (match_operand 1 "" ""))
8738 (use (match_operand 2 "" ""))
8739 (clobber (reg:SI LR_REGNUM))]
8741 && !SIBLING_CALL_P (insn)
8742 && (GET_CODE (operands[0]) == SYMBOL_REF)
8743 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8746 rtx op = operands[0];
8748 /* Switch mode now when possible. */
8749 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8750 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8751 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8753 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8755 [(set_attr "type" "call")]
8758 (define_insn "*call_value_symbol"
8759 [(set (match_operand 0 "" "")
8760 (call (mem:SI (match_operand:SI 1 "" ""))
8761 (match_operand:SI 2 "" "")))
8762 (use (match_operand 3 "" ""))
8763 (clobber (reg:SI LR_REGNUM))]
8765 && !SIBLING_CALL_P (insn)
8766 && (GET_CODE (operands[1]) == SYMBOL_REF)
8767 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8770 rtx op = operands[1];
8772 /* Switch mode now when possible. */
8773 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8774 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8775 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8777 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8779 [(set_attr "type" "call")]
8782 (define_expand "sibcall_internal"
8783 [(parallel [(call (match_operand 0 "memory_operand")
8784 (match_operand 1 "general_operand"))
8786 (use (match_operand 2 "" ""))])])
8788 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8789 (define_expand "sibcall"
8790 [(parallel [(call (match_operand 0 "memory_operand")
8791 (match_operand 1 "general_operand"))
8793 (use (match_operand 2 "" ""))])]
8799 if ((!REG_P (XEXP (operands[0], 0))
8800 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8801 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8802 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8803 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8805 if (operands[2] == NULL_RTX)
8806 operands[2] = const0_rtx;
8808 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8809 arm_emit_call_insn (pat, operands[0], true);
8814 (define_expand "sibcall_value_internal"
8815 [(parallel [(set (match_operand 0 "" "")
8816 (call (match_operand 1 "memory_operand")
8817 (match_operand 2 "general_operand")))
8819 (use (match_operand 3 "" ""))])])
8821 (define_expand "sibcall_value"
8822 [(parallel [(set (match_operand 0 "" "")
8823 (call (match_operand 1 "memory_operand")
8824 (match_operand 2 "general_operand")))
8826 (use (match_operand 3 "" ""))])]
8832 if ((!REG_P (XEXP (operands[1], 0))
8833 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8834 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8835 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8836 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8838 if (operands[3] == NULL_RTX)
8839 operands[3] = const0_rtx;
8841 pat = gen_sibcall_value_internal (operands[0], operands[1],
8842 operands[2], operands[3]);
8843 arm_emit_call_insn (pat, operands[1], true);
8848 (define_insn "*sibcall_insn"
8849 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8850 (match_operand 1 "" ""))
8852 (use (match_operand 2 "" ""))]
8853 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8855 if (which_alternative == 1)
8856 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8859 if (arm_arch5t || arm_arch4t)
8860 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8862 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8865 [(set_attr "type" "call")]
8868 (define_insn "*sibcall_value_insn"
8869 [(set (match_operand 0 "" "")
8870 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8871 (match_operand 2 "" "")))
8873 (use (match_operand 3 "" ""))]
8874 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8876 if (which_alternative == 1)
8877 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8880 if (arm_arch5t || arm_arch4t)
8881 return \"bx%?\\t%1\";
8883 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8886 [(set_attr "type" "call")]
8889 (define_expand "<return_str>return"
8891 "(TARGET_ARM || (TARGET_THUMB2
8892 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8893 && !IS_STACKALIGN (arm_current_func_type ())))
8894 <return_cond_false>"
8899 thumb2_expand_return (<return_simple_p>);
8906 ;; Often the return insn will be the same as loading from memory, so set attr
8907 (define_insn "*arm_return"
8909 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8912 if (arm_ccfsm_state == 2)
8914 arm_ccfsm_state += 2;
8917 return output_return_instruction (const_true_rtx, true, false, false);
8919 [(set_attr "type" "load_4")
8920 (set_attr "length" "12")
8921 (set_attr "predicable" "yes")]
8924 (define_insn "*cond_<return_str>return"
8926 (if_then_else (match_operator 0 "arm_comparison_operator"
8927 [(match_operand 1 "cc_register" "") (const_int 0)])
8930 "TARGET_ARM <return_cond_true>"
8933 if (arm_ccfsm_state == 2)
8935 arm_ccfsm_state += 2;
8938 return output_return_instruction (operands[0], true, false,
8941 [(set_attr "conds" "use")
8942 (set_attr "length" "12")
8943 (set_attr "type" "load_4")]
8946 (define_insn "*cond_<return_str>return_inverted"
8948 (if_then_else (match_operator 0 "arm_comparison_operator"
8949 [(match_operand 1 "cc_register" "") (const_int 0)])
8952 "TARGET_ARM <return_cond_true>"
8955 if (arm_ccfsm_state == 2)
8957 arm_ccfsm_state += 2;
8960 return output_return_instruction (operands[0], true, true,
8963 [(set_attr "conds" "use")
8964 (set_attr "length" "12")
8965 (set_attr "type" "load_4")]
8968 (define_insn "*arm_simple_return"
8973 if (arm_ccfsm_state == 2)
8975 arm_ccfsm_state += 2;
8978 return output_return_instruction (const_true_rtx, true, false, true);
8980 [(set_attr "type" "branch")
8981 (set_attr "length" "4")
8982 (set_attr "predicable" "yes")]
8985 ;; Generate a sequence of instructions to determine if the processor is
8986 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8989 (define_expand "return_addr_mask"
8991 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8993 (set (match_operand:SI 0 "s_register_operand")
8994 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8996 (const_int 67108860)))] ; 0x03fffffc
8999 operands[1] = gen_rtx_REG (CC_NZmode, CC_REGNUM);
9002 (define_insn "*check_arch2"
9003 [(set (match_operand:CC_NZ 0 "cc_register" "")
9004 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9007 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9008 [(set_attr "length" "8")
9009 (set_attr "conds" "set")
9010 (set_attr "type" "multiple")]
9013 ;; Call subroutine returning any type.
9015 (define_expand "untyped_call"
9016 [(parallel [(call (match_operand 0 "" "")
9018 (match_operand 1 "" "")
9019 (match_operand 2 "" "")])]
9020 "TARGET_EITHER && !TARGET_FDPIC"
9024 rtx par = gen_rtx_PARALLEL (VOIDmode,
9025 rtvec_alloc (XVECLEN (operands[2], 0)));
9026 rtx addr = gen_reg_rtx (Pmode);
9030 emit_move_insn (addr, XEXP (operands[1], 0));
9031 mem = change_address (operands[1], BLKmode, addr);
9033 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9035 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9037 /* Default code only uses r0 as a return value, but we could
9038 be using anything up to 4 registers. */
9039 if (REGNO (src) == R0_REGNUM)
9040 src = gen_rtx_REG (TImode, R0_REGNUM);
9042 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9044 size += GET_MODE_SIZE (GET_MODE (src));
9047 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
9051 for (i = 0; i < XVECLEN (par, 0); i++)
9053 HOST_WIDE_INT offset = 0;
9054 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9057 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9059 mem = change_address (mem, GET_MODE (reg), NULL);
9060 if (REGNO (reg) == R0_REGNUM)
9062 /* On thumb we have to use a write-back instruction. */
9063 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9064 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9065 size = TARGET_ARM ? 16 : 0;
9069 emit_move_insn (mem, reg);
9070 size = GET_MODE_SIZE (GET_MODE (reg));
9074 /* The optimizer does not know that the call sets the function value
9075 registers we stored in the result block. We avoid problems by
9076 claiming that all hard registers are used and clobbered at this
9078 emit_insn (gen_blockage ());
9084 (define_expand "untyped_return"
9085 [(match_operand:BLK 0 "memory_operand")
9086 (match_operand 1 "" "")]
9087 "TARGET_EITHER && !TARGET_FDPIC"
9091 rtx addr = gen_reg_rtx (Pmode);
9095 emit_move_insn (addr, XEXP (operands[0], 0));
9096 mem = change_address (operands[0], BLKmode, addr);
9098 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9100 HOST_WIDE_INT offset = 0;
9101 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9104 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9106 mem = change_address (mem, GET_MODE (reg), NULL);
9107 if (REGNO (reg) == R0_REGNUM)
9109 /* On thumb we have to use a write-back instruction. */
9110 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9111 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9112 size = TARGET_ARM ? 16 : 0;
9116 emit_move_insn (reg, mem);
9117 size = GET_MODE_SIZE (GET_MODE (reg));
9121 /* Emit USE insns before the return. */
9122 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9123 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9125 /* Construct the return. */
9126 expand_naked_return ();
9132 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9133 ;; all of memory. This blocks insns from being moved across this point.
9135 (define_insn "blockage"
9136 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9139 [(set_attr "length" "0")
9140 (set_attr "type" "block")]
9143 ;; Since we hard code r0 here use the 'o' constraint to prevent
9144 ;; provoking undefined behaviour in the hardware with putting out
9145 ;; auto-increment operations with potentially r0 as the base register.
9146 (define_insn "probe_stack"
9147 [(set (match_operand:SI 0 "memory_operand" "=o")
9148 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
9151 [(set_attr "type" "store_4")
9152 (set_attr "predicable" "yes")]
9155 (define_insn "probe_stack_range"
9156 [(set (match_operand:SI 0 "register_operand" "=r")
9157 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
9158 (match_operand:SI 2 "register_operand" "r")]
9159 VUNSPEC_PROBE_STACK_RANGE))]
9162 return output_probe_stack_range (operands[0], operands[2]);
9164 [(set_attr "type" "multiple")
9165 (set_attr "conds" "clob")]
9168 ;; Named patterns for stack smashing protection.
9169 (define_expand "stack_protect_combined_set"
9171 [(set (match_operand:SI 0 "memory_operand")
9172 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9174 (clobber (match_scratch:SI 2 ""))
9175 (clobber (match_scratch:SI 3 ""))])]
9180 ;; Use a separate insn from the above expand to be able to have the mem outside
9181 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9182 ;; try to reload the guard since we need to control how PIC access is done in
9183 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9184 ;; legitimize_pic_address ()).
9185 (define_insn_and_split "*stack_protect_combined_set_insn"
9186 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9187 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9189 (clobber (match_scratch:SI 2 "=&l,&r"))
9190 (clobber (match_scratch:SI 3 "=&l,&r"))]
9194 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
9196 (clobber (match_dup 2))])]
9204 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9206 pic_reg = operands[3];
9208 /* Forces recomputing of GOT base now. */
9209 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
9210 true /*compute_now*/);
9214 if (address_operand (operands[1], SImode))
9215 operands[2] = operands[1];
9218 rtx mem = force_const_mem (SImode, operands[1]);
9219 emit_move_insn (operands[2], mem);
9223 [(set_attr "arch" "t1,32")]
9226 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
9227 ;; canary value does not live beyond the life of this sequence.
9228 (define_insn "*stack_protect_set_insn"
9229 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9230 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
9232 (clobber (match_dup 1))]
9235 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
9236 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
9237 [(set_attr "length" "8,12")
9238 (set_attr "conds" "clob,nocond")
9239 (set_attr "type" "multiple")
9240 (set_attr "arch" "t1,32")]
9243 (define_expand "stack_protect_combined_test"
9247 (eq (match_operand:SI 0 "memory_operand")
9248 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9250 (label_ref (match_operand 2))
9252 (clobber (match_scratch:SI 3 ""))
9253 (clobber (match_scratch:SI 4 ""))
9254 (clobber (reg:CC CC_REGNUM))])]
9259 ;; Use a separate insn from the above expand to be able to have the mem outside
9260 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9261 ;; try to reload the guard since we need to control how PIC access is done in
9262 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9263 ;; legitimize_pic_address ()).
9264 (define_insn_and_split "*stack_protect_combined_test_insn"
9267 (eq (match_operand:SI 0 "memory_operand" "m,m")
9268 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9270 (label_ref (match_operand 2))
9272 (clobber (match_scratch:SI 3 "=&l,&r"))
9273 (clobber (match_scratch:SI 4 "=&l,&r"))
9274 (clobber (reg:CC CC_REGNUM))]
9287 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9289 pic_reg = operands[4];
9291 /* Forces recomputing of GOT base now. */
9292 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
9293 true /*compute_now*/);
9297 if (address_operand (operands[1], SImode))
9298 operands[3] = operands[1];
9301 rtx mem = force_const_mem (SImode, operands[1]);
9302 emit_move_insn (operands[3], mem);
9307 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
9309 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9310 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9311 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9315 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
9317 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
9318 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
9323 [(set_attr "arch" "t1,32")]
9326 ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the
9327 ;; canary value does not live beyond the end of this sequence.
9328 (define_insn "arm_stack_protect_test_insn"
9329 [(set (reg:CC_Z CC_REGNUM)
9330 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
9331 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
9334 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
9335 (clobber (match_dup 2))]
9337 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;mov\t%2, #0"
9338 [(set_attr "length" "12,16")
9339 (set_attr "conds" "set")
9340 (set_attr "type" "multiple")
9341 (set_attr "arch" "t,32")]
9344 (define_expand "casesi"
9345 [(match_operand:SI 0 "s_register_operand") ; index to jump on
9346 (match_operand:SI 1 "const_int_operand") ; lower bound
9347 (match_operand:SI 2 "const_int_operand") ; total range
9348 (match_operand:SI 3 "" "") ; table label
9349 (match_operand:SI 4 "" "")] ; Out of range label
9350 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
9353 enum insn_code code;
9354 if (operands[1] != const0_rtx)
9356 rtx reg = gen_reg_rtx (SImode);
9358 emit_insn (gen_addsi3 (reg, operands[0],
9359 gen_int_mode (-INTVAL (operands[1]),
9365 code = CODE_FOR_arm_casesi_internal;
9366 else if (TARGET_THUMB1)
9367 code = CODE_FOR_thumb1_casesi_internal_pic;
9369 code = CODE_FOR_thumb2_casesi_internal_pic;
9371 code = CODE_FOR_thumb2_casesi_internal;
9373 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9374 operands[2] = force_reg (SImode, operands[2]);
9376 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9377 operands[3], operands[4]));
9382 ;; The USE in this pattern is needed to tell flow analysis that this is
9383 ;; a CASESI insn. It has no other purpose.
9384 (define_expand "arm_casesi_internal"
9385 [(parallel [(set (pc)
9387 (leu (match_operand:SI 0 "s_register_operand")
9388 (match_operand:SI 1 "arm_rhs_operand"))
9390 (label_ref:SI (match_operand 3 ""))))
9391 (clobber (reg:CC CC_REGNUM))
9392 (use (label_ref:SI (match_operand 2 "")))])]
9395 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
9396 operands[4] = gen_rtx_PLUS (SImode, operands[4],
9397 gen_rtx_LABEL_REF (SImode, operands[2]));
9398 operands[4] = gen_rtx_MEM (SImode, operands[4]);
9399 MEM_READONLY_P (operands[4]) = 1;
9400 MEM_NOTRAP_P (operands[4]) = 1;
9403 (define_insn "*arm_casesi_internal"
9404 [(parallel [(set (pc)
9406 (leu (match_operand:SI 0 "s_register_operand" "r")
9407 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9408 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9409 (label_ref:SI (match_operand 2 "" ""))))
9410 (label_ref:SI (match_operand 3 "" ""))))
9411 (clobber (reg:CC CC_REGNUM))
9412 (use (label_ref:SI (match_dup 2)))])]
9416 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9417 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9419 [(set_attr "conds" "clob")
9420 (set_attr "length" "12")
9421 (set_attr "type" "multiple")]
9424 (define_expand "indirect_jump"
9426 (match_operand:SI 0 "s_register_operand"))]
9429 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9430 address and use bx. */
9434 tmp = gen_reg_rtx (SImode);
9435 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9441 ;; NB Never uses BX.
9442 (define_insn "*arm_indirect_jump"
9444 (match_operand:SI 0 "s_register_operand" "r"))]
9446 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9447 [(set_attr "predicable" "yes")
9448 (set_attr "type" "branch")]
9451 (define_insn "*load_indirect_jump"
9453 (match_operand:SI 0 "memory_operand" "m"))]
9455 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9456 [(set_attr "type" "load_4")
9457 (set_attr "pool_range" "4096")
9458 (set_attr "neg_pool_range" "4084")
9459 (set_attr "predicable" "yes")]
9469 [(set (attr "length")
9470 (if_then_else (eq_attr "is_thumb" "yes")
9473 (set_attr "type" "mov_reg")]
9477 [(trap_if (const_int 1) (const_int 0))]
9481 return \".inst\\t0xe7f000f0\";
9483 return \".inst\\t0xdeff\";
9485 [(set (attr "length")
9486 (if_then_else (eq_attr "is_thumb" "yes")
9489 (set_attr "type" "trap")
9490 (set_attr "conds" "unconditional")]
9494 ;; Patterns to allow combination of arithmetic, cond code and shifts
9496 (define_insn "*<arith_shift_insn>_multsi"
9497 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9499 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
9500 (match_operand:SI 3 "power_of_two_operand" ""))
9501 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
9503 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
9504 [(set_attr "predicable" "yes")
9505 (set_attr "shift" "2")
9506 (set_attr "arch" "a,t2")
9507 (set_attr "autodetect_type" "alu_shift_mul_op3")])
9509 (define_insn "*<arith_shift_insn>_shiftsi"
9510 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9512 (match_operator:SI 2 "shift_nomul_operator"
9513 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9514 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
9515 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
9516 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
9517 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
9518 [(set_attr "predicable" "yes")
9519 (set_attr "shift" "3")
9520 (set_attr "arch" "a,t2,a")
9521 (set_attr "autodetect_type" "alu_shift_operator2")])
9524 [(set (match_operand:SI 0 "s_register_operand" "")
9525 (match_operator:SI 1 "shiftable_operator"
9526 [(match_operator:SI 2 "shiftable_operator"
9527 [(match_operator:SI 3 "shift_operator"
9528 [(match_operand:SI 4 "s_register_operand" "")
9529 (match_operand:SI 5 "reg_or_int_operand" "")])
9530 (match_operand:SI 6 "s_register_operand" "")])
9531 (match_operand:SI 7 "arm_rhs_operand" "")]))
9532 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9535 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9538 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9541 (define_insn "*arith_shiftsi_compare0"
9542 [(set (reg:CC_NZ CC_REGNUM)
9544 (match_operator:SI 1 "shiftable_operator"
9545 [(match_operator:SI 3 "shift_operator"
9546 [(match_operand:SI 4 "s_register_operand" "r,r")
9547 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9548 (match_operand:SI 2 "s_register_operand" "r,r")])
9550 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9551 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9554 "%i1s%?\\t%0, %2, %4%S3"
9555 [(set_attr "conds" "set")
9556 (set_attr "shift" "4")
9557 (set_attr "arch" "32,a")
9558 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9560 (define_insn "*arith_shiftsi_compare0_scratch"
9561 [(set (reg:CC_NZ CC_REGNUM)
9563 (match_operator:SI 1 "shiftable_operator"
9564 [(match_operator:SI 3 "shift_operator"
9565 [(match_operand:SI 4 "s_register_operand" "r,r")
9566 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9567 (match_operand:SI 2 "s_register_operand" "r,r")])
9569 (clobber (match_scratch:SI 0 "=r,r"))]
9571 "%i1s%?\\t%0, %2, %4%S3"
9572 [(set_attr "conds" "set")
9573 (set_attr "shift" "4")
9574 (set_attr "arch" "32,a")
9575 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9577 (define_insn "*sub_shiftsi"
9578 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9579 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9580 (match_operator:SI 2 "shift_operator"
9581 [(match_operand:SI 3 "s_register_operand" "r,r")
9582 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9584 "sub%?\\t%0, %1, %3%S2"
9585 [(set_attr "predicable" "yes")
9586 (set_attr "predicable_short_it" "no")
9587 (set_attr "shift" "3")
9588 (set_attr "arch" "32,a")
9589 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9591 (define_insn "*sub_shiftsi_compare0"
9592 [(set (reg:CC_NZ CC_REGNUM)
9594 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9595 (match_operator:SI 2 "shift_operator"
9596 [(match_operand:SI 3 "s_register_operand" "r,r")
9597 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9599 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9600 (minus:SI (match_dup 1)
9601 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9603 "subs%?\\t%0, %1, %3%S2"
9604 [(set_attr "conds" "set")
9605 (set_attr "shift" "3")
9606 (set_attr "arch" "32,a")
9607 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9609 (define_insn "*sub_shiftsi_compare0_scratch"
9610 [(set (reg:CC_NZ CC_REGNUM)
9612 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9613 (match_operator:SI 2 "shift_operator"
9614 [(match_operand:SI 3 "s_register_operand" "r,r")
9615 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9617 (clobber (match_scratch:SI 0 "=r,r"))]
9619 "subs%?\\t%0, %1, %3%S2"
9620 [(set_attr "conds" "set")
9621 (set_attr "shift" "3")
9622 (set_attr "arch" "32,a")
9623 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9626 (define_insn_and_split "*and_scc"
9627 [(set (match_operand:SI 0 "s_register_operand" "=r")
9628 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9629 [(match_operand 2 "cc_register" "") (const_int 0)])
9630 (match_operand:SI 3 "s_register_operand" "r")))]
9632 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9633 "&& reload_completed"
9634 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9635 (cond_exec (match_dup 4) (set (match_dup 0)
9636 (and:SI (match_dup 3) (const_int 1))))]
9638 machine_mode mode = GET_MODE (operands[2]);
9639 enum rtx_code rc = GET_CODE (operands[1]);
9641 /* Note that operands[4] is the same as operands[1],
9642 but with VOIDmode as the result. */
9643 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9644 if (mode == CCFPmode || mode == CCFPEmode)
9645 rc = reverse_condition_maybe_unordered (rc);
9647 rc = reverse_condition (rc);
9648 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9650 [(set_attr "conds" "use")
9651 (set_attr "type" "multiple")
9652 (set_attr "length" "8")]
9655 (define_insn_and_split "*ior_scc"
9656 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9657 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9658 [(match_operand 2 "cc_register" "") (const_int 0)])
9659 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9664 "&& reload_completed
9665 && REGNO (operands [0]) != REGNO (operands[3])"
9666 ;; && which_alternative == 1
9667 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9668 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9669 (cond_exec (match_dup 4) (set (match_dup 0)
9670 (ior:SI (match_dup 3) (const_int 1))))]
9672 machine_mode mode = GET_MODE (operands[2]);
9673 enum rtx_code rc = GET_CODE (operands[1]);
9675 /* Note that operands[4] is the same as operands[1],
9676 but with VOIDmode as the result. */
9677 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9678 if (mode == CCFPmode || mode == CCFPEmode)
9679 rc = reverse_condition_maybe_unordered (rc);
9681 rc = reverse_condition (rc);
9682 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9684 [(set_attr "conds" "use")
9685 (set_attr "length" "4,8")
9686 (set_attr "type" "logic_imm,multiple")]
9689 ; A series of splitters for the compare_scc pattern below. Note that
9690 ; order is important.
9692 [(set (match_operand:SI 0 "s_register_operand" "")
9693 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9695 (clobber (reg:CC CC_REGNUM))]
9696 "TARGET_32BIT && reload_completed"
9697 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9700 [(set (match_operand:SI 0 "s_register_operand" "")
9701 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9703 (clobber (reg:CC CC_REGNUM))]
9704 "TARGET_32BIT && reload_completed"
9705 [(set (match_dup 0) (not:SI (match_dup 1)))
9706 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9709 [(set (match_operand:SI 0 "s_register_operand" "")
9710 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9712 (clobber (reg:CC CC_REGNUM))]
9713 "arm_arch5t && TARGET_32BIT"
9714 [(set (match_dup 0) (clz:SI (match_dup 1)))
9715 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9719 [(set (match_operand:SI 0 "s_register_operand" "")
9720 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9722 (clobber (reg:CC CC_REGNUM))]
9723 "TARGET_32BIT && reload_completed"
9725 [(set (reg:CC CC_REGNUM)
9726 (compare:CC (const_int 1) (match_dup 1)))
9728 (minus:SI (const_int 1) (match_dup 1)))])
9729 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9730 (set (match_dup 0) (const_int 0)))])
9733 [(set (match_operand:SI 0 "s_register_operand" "")
9734 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9735 (match_operand:SI 2 "const_int_operand" "")))
9736 (clobber (reg:CC CC_REGNUM))]
9737 "TARGET_32BIT && reload_completed"
9739 [(set (reg:CC CC_REGNUM)
9740 (compare:CC (match_dup 1) (match_dup 2)))
9741 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9742 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9743 (set (match_dup 0) (const_int 1)))]
9745 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9749 [(set (match_operand:SI 0 "s_register_operand" "")
9750 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9751 (match_operand:SI 2 "arm_add_operand" "")))
9752 (clobber (reg:CC CC_REGNUM))]
9753 "TARGET_32BIT && reload_completed"
9755 [(set (reg:CC_NZ CC_REGNUM)
9756 (compare:CC_NZ (minus:SI (match_dup 1) (match_dup 2))
9758 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9759 (cond_exec (ne:CC_NZ (reg:CC_NZ CC_REGNUM) (const_int 0))
9760 (set (match_dup 0) (const_int 1)))])
9762 (define_insn_and_split "*compare_scc"
9763 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9764 (match_operator:SI 1 "arm_comparison_operator"
9765 [(match_operand:SI 2 "s_register_operand" "r,r")
9766 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9767 (clobber (reg:CC CC_REGNUM))]
9770 "&& reload_completed"
9771 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9772 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9773 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9776 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9777 operands[2], operands[3]);
9778 enum rtx_code rc = GET_CODE (operands[1]);
9780 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9782 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9783 if (mode == CCFPmode || mode == CCFPEmode)
9784 rc = reverse_condition_maybe_unordered (rc);
9786 rc = reverse_condition (rc);
9787 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9789 [(set_attr "type" "multiple")]
9792 ;; Attempt to improve the sequence generated by the compare_scc splitters
9793 ;; not to use conditional execution.
9795 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9799 [(set (reg:CC CC_REGNUM)
9800 (compare:CC (match_operand:SI 1 "register_operand" "")
9802 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9803 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9804 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9805 (set (match_dup 0) (const_int 1)))]
9806 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9807 [(set (match_dup 0) (clz:SI (match_dup 1)))
9808 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9811 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9815 [(set (reg:CC CC_REGNUM)
9816 (compare:CC (match_operand:SI 1 "register_operand" "")
9818 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9819 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9820 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9821 (set (match_dup 0) (const_int 1)))
9822 (match_scratch:SI 2 "r")]
9823 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9825 [(set (reg:CC CC_REGNUM)
9826 (compare:CC (const_int 0) (match_dup 1)))
9827 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9829 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9830 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9833 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9834 ;; sub Rd, Reg1, reg2
9838 [(set (reg:CC CC_REGNUM)
9839 (compare:CC (match_operand:SI 1 "register_operand" "")
9840 (match_operand:SI 2 "arm_rhs_operand" "")))
9841 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9842 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9843 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9844 (set (match_dup 0) (const_int 1)))]
9845 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9846 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9847 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9848 (set (match_dup 0) (clz:SI (match_dup 0)))
9849 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9853 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9854 ;; sub T1, Reg1, reg2
9858 [(set (reg:CC CC_REGNUM)
9859 (compare:CC (match_operand:SI 1 "register_operand" "")
9860 (match_operand:SI 2 "arm_rhs_operand" "")))
9861 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9862 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9863 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9864 (set (match_dup 0) (const_int 1)))
9865 (match_scratch:SI 3 "r")]
9866 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9867 [(set (match_dup 3) (match_dup 4))
9869 [(set (reg:CC CC_REGNUM)
9870 (compare:CC (const_int 0) (match_dup 3)))
9871 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9873 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9874 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9876 if (CONST_INT_P (operands[2]))
9877 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9879 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9882 (define_insn "*cond_move"
9883 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9884 (if_then_else:SI (match_operator 3 "equality_operator"
9885 [(match_operator 4 "arm_comparison_operator"
9886 [(match_operand 5 "cc_register" "") (const_int 0)])
9888 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9889 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9892 if (GET_CODE (operands[3]) == NE)
9894 if (which_alternative != 1)
9895 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9896 if (which_alternative != 0)
9897 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9900 if (which_alternative != 0)
9901 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9902 if (which_alternative != 1)
9903 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9906 [(set_attr "conds" "use")
9907 (set_attr_alternative "type"
9908 [(if_then_else (match_operand 2 "const_int_operand" "")
9909 (const_string "mov_imm")
9910 (const_string "mov_reg"))
9911 (if_then_else (match_operand 1 "const_int_operand" "")
9912 (const_string "mov_imm")
9913 (const_string "mov_reg"))
9914 (const_string "multiple")])
9915 (set_attr "length" "4,4,8")]
9918 (define_insn "*cond_arith"
9919 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9920 (match_operator:SI 5 "shiftable_operator"
9921 [(match_operator:SI 4 "arm_comparison_operator"
9922 [(match_operand:SI 2 "s_register_operand" "r,r")
9923 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9924 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9925 (clobber (reg:CC CC_REGNUM))]
9928 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9929 return \"%i5\\t%0, %1, %2, lsr #31\";
9931 output_asm_insn (\"cmp\\t%2, %3\", operands);
9932 if (GET_CODE (operands[5]) == AND)
9933 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9934 else if (GET_CODE (operands[5]) == MINUS)
9935 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9936 else if (which_alternative != 0)
9937 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9938 return \"%i5%d4\\t%0, %1, #1\";
9940 [(set_attr "conds" "clob")
9941 (set_attr "length" "12")
9942 (set_attr "type" "multiple")]
9945 (define_insn "*cond_sub"
9946 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9947 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9948 (match_operator:SI 4 "arm_comparison_operator"
9949 [(match_operand:SI 2 "s_register_operand" "r,r")
9950 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9951 (clobber (reg:CC CC_REGNUM))]
9954 output_asm_insn (\"cmp\\t%2, %3\", operands);
9955 if (which_alternative != 0)
9956 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9957 return \"sub%d4\\t%0, %1, #1\";
9959 [(set_attr "conds" "clob")
9960 (set_attr "length" "8,12")
9961 (set_attr "type" "multiple")]
9964 (define_insn "*cmp_ite0"
9965 [(set (match_operand 6 "dominant_cc_register" "")
9968 (match_operator 4 "arm_comparison_operator"
9969 [(match_operand:SI 0 "s_register_operand"
9970 "l,l,l,r,r,r,r,r,r")
9971 (match_operand:SI 1 "arm_add_operand"
9972 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9973 (match_operator:SI 5 "arm_comparison_operator"
9974 [(match_operand:SI 2 "s_register_operand"
9975 "l,r,r,l,l,r,r,r,r")
9976 (match_operand:SI 3 "arm_add_operand"
9977 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9983 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9985 {\"cmp%d5\\t%0, %1\",
9986 \"cmp%d4\\t%2, %3\"},
9987 {\"cmn%d5\\t%0, #%n1\",
9988 \"cmp%d4\\t%2, %3\"},
9989 {\"cmp%d5\\t%0, %1\",
9990 \"cmn%d4\\t%2, #%n3\"},
9991 {\"cmn%d5\\t%0, #%n1\",
9992 \"cmn%d4\\t%2, #%n3\"}
9994 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9999 \"cmn\\t%0, #%n1\"},
10000 {\"cmn\\t%2, #%n3\",
10002 {\"cmn\\t%2, #%n3\",
10003 \"cmn\\t%0, #%n1\"}
10005 static const char * const ite[2] =
10010 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10011 CMP_CMP, CMN_CMP, CMP_CMP,
10012 CMN_CMP, CMP_CMN, CMN_CMN};
10014 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10016 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10017 if (TARGET_THUMB2) {
10018 output_asm_insn (ite[swap], operands);
10020 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10023 [(set_attr "conds" "set")
10024 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10025 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10026 (set_attr "type" "multiple")
10027 (set_attr_alternative "length"
10033 (if_then_else (eq_attr "is_thumb" "no")
10036 (if_then_else (eq_attr "is_thumb" "no")
10039 (if_then_else (eq_attr "is_thumb" "no")
10042 (if_then_else (eq_attr "is_thumb" "no")
10047 (define_insn "*cmp_ite1"
10048 [(set (match_operand 6 "dominant_cc_register" "")
10051 (match_operator 4 "arm_comparison_operator"
10052 [(match_operand:SI 0 "s_register_operand"
10053 "l,l,l,r,r,r,r,r,r")
10054 (match_operand:SI 1 "arm_add_operand"
10055 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10056 (match_operator:SI 5 "arm_comparison_operator"
10057 [(match_operand:SI 2 "s_register_operand"
10058 "l,r,r,l,l,r,r,r,r")
10059 (match_operand:SI 3 "arm_add_operand"
10060 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10066 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10070 {\"cmn\\t%0, #%n1\",
10073 \"cmn\\t%2, #%n3\"},
10074 {\"cmn\\t%0, #%n1\",
10075 \"cmn\\t%2, #%n3\"}
10077 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10079 {\"cmp%d4\\t%2, %3\",
10080 \"cmp%D5\\t%0, %1\"},
10081 {\"cmp%d4\\t%2, %3\",
10082 \"cmn%D5\\t%0, #%n1\"},
10083 {\"cmn%d4\\t%2, #%n3\",
10084 \"cmp%D5\\t%0, %1\"},
10085 {\"cmn%d4\\t%2, #%n3\",
10086 \"cmn%D5\\t%0, #%n1\"}
10088 static const char * const ite[2] =
10093 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10094 CMP_CMP, CMN_CMP, CMP_CMP,
10095 CMN_CMP, CMP_CMN, CMN_CMN};
10097 comparison_dominates_p (GET_CODE (operands[5]),
10098 reverse_condition (GET_CODE (operands[4])));
10100 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10101 if (TARGET_THUMB2) {
10102 output_asm_insn (ite[swap], operands);
10104 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10107 [(set_attr "conds" "set")
10108 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10109 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10110 (set_attr_alternative "length"
10116 (if_then_else (eq_attr "is_thumb" "no")
10119 (if_then_else (eq_attr "is_thumb" "no")
10122 (if_then_else (eq_attr "is_thumb" "no")
10125 (if_then_else (eq_attr "is_thumb" "no")
10128 (set_attr "type" "multiple")]
10131 (define_insn "*cmp_and"
10132 [(set (match_operand 6 "dominant_cc_register" "")
10135 (match_operator 4 "arm_comparison_operator"
10136 [(match_operand:SI 0 "s_register_operand"
10137 "l,l,l,r,r,r,r,r,r,r")
10138 (match_operand:SI 1 "arm_add_operand"
10139 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10140 (match_operator:SI 5 "arm_comparison_operator"
10141 [(match_operand:SI 2 "s_register_operand"
10142 "l,r,r,l,l,r,r,r,r,r")
10143 (match_operand:SI 3 "arm_add_operand"
10144 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10149 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10151 {\"cmp%d5\\t%0, %1\",
10152 \"cmp%d4\\t%2, %3\"},
10153 {\"cmn%d5\\t%0, #%n1\",
10154 \"cmp%d4\\t%2, %3\"},
10155 {\"cmp%d5\\t%0, %1\",
10156 \"cmn%d4\\t%2, #%n3\"},
10157 {\"cmn%d5\\t%0, #%n1\",
10158 \"cmn%d4\\t%2, #%n3\"}
10160 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10165 \"cmn\\t%0, #%n1\"},
10166 {\"cmn\\t%2, #%n3\",
10168 {\"cmn\\t%2, #%n3\",
10169 \"cmn\\t%0, #%n1\"}
10171 static const char *const ite[2] =
10176 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10177 CMP_CMP, CMN_CMP, CMP_CMP,
10178 CMP_CMP, CMN_CMP, CMP_CMN,
10181 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10183 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10184 if (TARGET_THUMB2) {
10185 output_asm_insn (ite[swap], operands);
10187 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10190 [(set_attr "conds" "set")
10191 (set_attr "predicable" "no")
10192 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10193 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10194 (set_attr_alternative "length"
10201 (if_then_else (eq_attr "is_thumb" "no")
10204 (if_then_else (eq_attr "is_thumb" "no")
10207 (if_then_else (eq_attr "is_thumb" "no")
10210 (if_then_else (eq_attr "is_thumb" "no")
10213 (set_attr "type" "multiple")]
10216 (define_insn "*cmp_ior"
10217 [(set (match_operand 6 "dominant_cc_register" "")
10220 (match_operator 4 "arm_comparison_operator"
10221 [(match_operand:SI 0 "s_register_operand"
10222 "l,l,l,r,r,r,r,r,r,r")
10223 (match_operand:SI 1 "arm_add_operand"
10224 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10225 (match_operator:SI 5 "arm_comparison_operator"
10226 [(match_operand:SI 2 "s_register_operand"
10227 "l,r,r,l,l,r,r,r,r,r")
10228 (match_operand:SI 3 "arm_add_operand"
10229 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10234 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10238 {\"cmn\\t%0, #%n1\",
10241 \"cmn\\t%2, #%n3\"},
10242 {\"cmn\\t%0, #%n1\",
10243 \"cmn\\t%2, #%n3\"}
10245 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10247 {\"cmp%D4\\t%2, %3\",
10248 \"cmp%D5\\t%0, %1\"},
10249 {\"cmp%D4\\t%2, %3\",
10250 \"cmn%D5\\t%0, #%n1\"},
10251 {\"cmn%D4\\t%2, #%n3\",
10252 \"cmp%D5\\t%0, %1\"},
10253 {\"cmn%D4\\t%2, #%n3\",
10254 \"cmn%D5\\t%0, #%n1\"}
10256 static const char *const ite[2] =
10261 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10262 CMP_CMP, CMN_CMP, CMP_CMP,
10263 CMP_CMP, CMN_CMP, CMP_CMN,
10266 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10268 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10269 if (TARGET_THUMB2) {
10270 output_asm_insn (ite[swap], operands);
10272 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10276 [(set_attr "conds" "set")
10277 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10278 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10279 (set_attr_alternative "length"
10286 (if_then_else (eq_attr "is_thumb" "no")
10289 (if_then_else (eq_attr "is_thumb" "no")
10292 (if_then_else (eq_attr "is_thumb" "no")
10295 (if_then_else (eq_attr "is_thumb" "no")
10298 (set_attr "type" "multiple")]
10301 (define_insn_and_split "*ior_scc_scc"
10302 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10303 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10304 [(match_operand:SI 1 "s_register_operand" "l,r")
10305 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10306 (match_operator:SI 6 "arm_comparison_operator"
10307 [(match_operand:SI 4 "s_register_operand" "l,r")
10308 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10309 (clobber (reg:CC CC_REGNUM))]
10311 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10314 "TARGET_32BIT && reload_completed"
10315 [(set (match_dup 7)
10318 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10319 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10321 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10323 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10326 [(set_attr "conds" "clob")
10327 (set_attr "enabled_for_short_it" "yes,no")
10328 (set_attr "length" "16")
10329 (set_attr "type" "multiple")]
10332 ; If the above pattern is followed by a CMP insn, then the compare is
10333 ; redundant, since we can rework the conditional instruction that follows.
10334 (define_insn_and_split "*ior_scc_scc_cmp"
10335 [(set (match_operand 0 "dominant_cc_register" "")
10336 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10337 [(match_operand:SI 1 "s_register_operand" "l,r")
10338 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10339 (match_operator:SI 6 "arm_comparison_operator"
10340 [(match_operand:SI 4 "s_register_operand" "l,r")
10341 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10343 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10344 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10345 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10348 "TARGET_32BIT && reload_completed"
10349 [(set (match_dup 0)
10352 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10353 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10355 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10357 [(set_attr "conds" "set")
10358 (set_attr "enabled_for_short_it" "yes,no")
10359 (set_attr "length" "16")
10360 (set_attr "type" "multiple")]
10363 (define_insn_and_split "*and_scc_scc"
10364 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10365 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10366 [(match_operand:SI 1 "s_register_operand" "l,r")
10367 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10368 (match_operator:SI 6 "arm_comparison_operator"
10369 [(match_operand:SI 4 "s_register_operand" "l,r")
10370 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10371 (clobber (reg:CC CC_REGNUM))]
10373 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10376 "TARGET_32BIT && reload_completed
10377 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10379 [(set (match_dup 7)
10382 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10383 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10385 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10387 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10390 [(set_attr "conds" "clob")
10391 (set_attr "enabled_for_short_it" "yes,no")
10392 (set_attr "length" "16")
10393 (set_attr "type" "multiple")]
10396 ; If the above pattern is followed by a CMP insn, then the compare is
10397 ; redundant, since we can rework the conditional instruction that follows.
10398 (define_insn_and_split "*and_scc_scc_cmp"
10399 [(set (match_operand 0 "dominant_cc_register" "")
10400 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10401 [(match_operand:SI 1 "s_register_operand" "l,r")
10402 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10403 (match_operator:SI 6 "arm_comparison_operator"
10404 [(match_operand:SI 4 "s_register_operand" "l,r")
10405 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10407 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10408 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10409 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10412 "TARGET_32BIT && reload_completed"
10413 [(set (match_dup 0)
10416 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10417 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10419 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10421 [(set_attr "conds" "set")
10422 (set_attr "enabled_for_short_it" "yes,no")
10423 (set_attr "length" "16")
10424 (set_attr "type" "multiple")]
10427 ;; If there is no dominance in the comparison, then we can still save an
10428 ;; instruction in the AND case, since we can know that the second compare
10429 ;; need only zero the value if false (if true, then the value is already
10431 (define_insn_and_split "*and_scc_scc_nodom"
10432 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10433 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10434 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10435 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10436 (match_operator:SI 6 "arm_comparison_operator"
10437 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10438 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10439 (clobber (reg:CC CC_REGNUM))]
10441 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10444 "TARGET_32BIT && reload_completed"
10445 [(parallel [(set (match_dup 0)
10446 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10447 (clobber (reg:CC CC_REGNUM))])
10448 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10450 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10453 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10454 operands[4], operands[5]),
10456 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10458 [(set_attr "conds" "clob")
10459 (set_attr "length" "20")
10460 (set_attr "type" "multiple")]
10464 [(set (reg:CC_NZ CC_REGNUM)
10465 (compare:CC_NZ (ior:SI
10466 (and:SI (match_operand:SI 0 "s_register_operand" "")
10468 (match_operator:SI 1 "arm_comparison_operator"
10469 [(match_operand:SI 2 "s_register_operand" "")
10470 (match_operand:SI 3 "arm_add_operand" "")]))
10472 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10474 [(set (match_dup 4)
10475 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10477 (set (reg:CC_NZ CC_REGNUM)
10478 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10483 [(set (reg:CC_NZ CC_REGNUM)
10484 (compare:CC_NZ (ior:SI
10485 (match_operator:SI 1 "arm_comparison_operator"
10486 [(match_operand:SI 2 "s_register_operand" "")
10487 (match_operand:SI 3 "arm_add_operand" "")])
10488 (and:SI (match_operand:SI 0 "s_register_operand" "")
10491 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10493 [(set (match_dup 4)
10494 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10496 (set (reg:CC_NZ CC_REGNUM)
10497 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10500 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10502 (define_insn_and_split "*negscc"
10503 [(set (match_operand:SI 0 "s_register_operand" "=r")
10504 (neg:SI (match_operator 3 "arm_comparison_operator"
10505 [(match_operand:SI 1 "s_register_operand" "r")
10506 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10507 (clobber (reg:CC CC_REGNUM))]
10510 "&& reload_completed"
10513 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10515 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10517 /* Emit mov\\t%0, %1, asr #31 */
10518 emit_insn (gen_rtx_SET (operands[0],
10519 gen_rtx_ASHIFTRT (SImode,
10524 else if (GET_CODE (operands[3]) == NE)
10526 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10527 if (CONST_INT_P (operands[2]))
10528 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10529 gen_int_mode (-INTVAL (operands[2]),
10532 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10534 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10535 gen_rtx_NE (SImode,
10538 gen_rtx_SET (operands[0],
10544 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10545 emit_insn (gen_rtx_SET (cc_reg,
10546 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10547 enum rtx_code rc = GET_CODE (operands[3]);
10549 rc = reverse_condition (rc);
10550 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10551 gen_rtx_fmt_ee (rc,
10555 gen_rtx_SET (operands[0], const0_rtx)));
10556 rc = GET_CODE (operands[3]);
10557 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10558 gen_rtx_fmt_ee (rc,
10562 gen_rtx_SET (operands[0],
10568 [(set_attr "conds" "clob")
10569 (set_attr "length" "12")
10570 (set_attr "type" "multiple")]
10573 (define_insn_and_split "movcond_addsi"
10574 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
10576 (match_operator 5 "comparison_operator"
10577 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
10578 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
10580 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
10581 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
10582 (clobber (reg:CC CC_REGNUM))]
10585 "&& reload_completed"
10586 [(set (reg:CC_NZ CC_REGNUM)
10588 (plus:SI (match_dup 3)
10591 (set (match_dup 0) (match_dup 1))
10592 (cond_exec (match_dup 6)
10593 (set (match_dup 0) (match_dup 2)))]
10596 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
10597 operands[3], operands[4]);
10598 enum rtx_code rc = GET_CODE (operands[5]);
10599 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10600 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
10601 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
10602 rc = reverse_condition (rc);
10604 std::swap (operands[1], operands[2]);
10606 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10609 [(set_attr "conds" "clob")
10610 (set_attr "enabled_for_short_it" "no,yes,yes")
10611 (set_attr "type" "multiple")]
10614 (define_insn "movcond"
10615 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10617 (match_operator 5 "arm_comparison_operator"
10618 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10619 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10620 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10621 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10622 (clobber (reg:CC CC_REGNUM))]
10625 if (GET_CODE (operands[5]) == LT
10626 && (operands[4] == const0_rtx))
10628 if (which_alternative != 1 && REG_P (operands[1]))
10630 if (operands[2] == const0_rtx)
10631 return \"and\\t%0, %1, %3, asr #31\";
10632 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10634 else if (which_alternative != 0 && REG_P (operands[2]))
10636 if (operands[1] == const0_rtx)
10637 return \"bic\\t%0, %2, %3, asr #31\";
10638 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10640 /* The only case that falls through to here is when both ops 1 & 2
10644 if (GET_CODE (operands[5]) == GE
10645 && (operands[4] == const0_rtx))
10647 if (which_alternative != 1 && REG_P (operands[1]))
10649 if (operands[2] == const0_rtx)
10650 return \"bic\\t%0, %1, %3, asr #31\";
10651 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10653 else if (which_alternative != 0 && REG_P (operands[2]))
10655 if (operands[1] == const0_rtx)
10656 return \"and\\t%0, %2, %3, asr #31\";
10657 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10659 /* The only case that falls through to here is when both ops 1 & 2
10662 if (CONST_INT_P (operands[4])
10663 && !const_ok_for_arm (INTVAL (operands[4])))
10664 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10666 output_asm_insn (\"cmp\\t%3, %4\", operands);
10667 if (which_alternative != 0)
10668 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10669 if (which_alternative != 1)
10670 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10673 [(set_attr "conds" "clob")
10674 (set_attr "length" "8,8,12")
10675 (set_attr "type" "multiple")]
10678 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10680 (define_insn "*ifcompare_plus_move"
10681 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10682 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10683 [(match_operand:SI 4 "s_register_operand" "r,r")
10684 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10686 (match_operand:SI 2 "s_register_operand" "r,r")
10687 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10688 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10689 (clobber (reg:CC CC_REGNUM))]
10692 [(set_attr "conds" "clob")
10693 (set_attr "length" "8,12")
10694 (set_attr "type" "multiple")]
10697 (define_insn "*if_plus_move"
10698 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10700 (match_operator 4 "arm_comparison_operator"
10701 [(match_operand 5 "cc_register" "") (const_int 0)])
10703 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10704 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10705 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10708 add%d4\\t%0, %2, %3
10709 sub%d4\\t%0, %2, #%n3
10710 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10711 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10712 [(set_attr "conds" "use")
10713 (set_attr "length" "4,4,8,8")
10714 (set_attr_alternative "type"
10715 [(if_then_else (match_operand 3 "const_int_operand" "")
10716 (const_string "alu_imm" )
10717 (const_string "alu_sreg"))
10718 (const_string "alu_imm")
10719 (const_string "multiple")
10720 (const_string "multiple")])]
10723 (define_insn "*ifcompare_move_plus"
10724 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10725 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10726 [(match_operand:SI 4 "s_register_operand" "r,r")
10727 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10728 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10730 (match_operand:SI 2 "s_register_operand" "r,r")
10731 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10732 (clobber (reg:CC CC_REGNUM))]
10735 [(set_attr "conds" "clob")
10736 (set_attr "length" "8,12")
10737 (set_attr "type" "multiple")]
10740 (define_insn "*if_move_plus"
10741 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10743 (match_operator 4 "arm_comparison_operator"
10744 [(match_operand 5 "cc_register" "") (const_int 0)])
10745 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10747 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10748 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10751 add%D4\\t%0, %2, %3
10752 sub%D4\\t%0, %2, #%n3
10753 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10754 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10755 [(set_attr "conds" "use")
10756 (set_attr "length" "4,4,8,8")
10757 (set_attr_alternative "type"
10758 [(if_then_else (match_operand 3 "const_int_operand" "")
10759 (const_string "alu_imm" )
10760 (const_string "alu_sreg"))
10761 (const_string "alu_imm")
10762 (const_string "multiple")
10763 (const_string "multiple")])]
10766 (define_insn "*ifcompare_arith_arith"
10767 [(set (match_operand:SI 0 "s_register_operand" "=r")
10768 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10769 [(match_operand:SI 5 "s_register_operand" "r")
10770 (match_operand:SI 6 "arm_add_operand" "rIL")])
10771 (match_operator:SI 8 "shiftable_operator"
10772 [(match_operand:SI 1 "s_register_operand" "r")
10773 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10774 (match_operator:SI 7 "shiftable_operator"
10775 [(match_operand:SI 3 "s_register_operand" "r")
10776 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10777 (clobber (reg:CC CC_REGNUM))]
10780 [(set_attr "conds" "clob")
10781 (set_attr "length" "12")
10782 (set_attr "type" "multiple")]
10785 (define_insn "*if_arith_arith"
10786 [(set (match_operand:SI 0 "s_register_operand" "=r")
10787 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10788 [(match_operand 8 "cc_register" "") (const_int 0)])
10789 (match_operator:SI 6 "shiftable_operator"
10790 [(match_operand:SI 1 "s_register_operand" "r")
10791 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10792 (match_operator:SI 7 "shiftable_operator"
10793 [(match_operand:SI 3 "s_register_operand" "r")
10794 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10796 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10797 [(set_attr "conds" "use")
10798 (set_attr "length" "8")
10799 (set_attr "type" "multiple")]
10802 (define_insn "*ifcompare_arith_move"
10803 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10804 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10805 [(match_operand:SI 2 "s_register_operand" "r,r")
10806 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10807 (match_operator:SI 7 "shiftable_operator"
10808 [(match_operand:SI 4 "s_register_operand" "r,r")
10809 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10810 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10811 (clobber (reg:CC CC_REGNUM))]
10814 /* If we have an operation where (op x 0) is the identity operation and
10815 the conditional operator is LT or GE and we are comparing against zero and
10816 everything is in registers then we can do this in two instructions. */
10817 if (operands[3] == const0_rtx
10818 && GET_CODE (operands[7]) != AND
10819 && REG_P (operands[5])
10820 && REG_P (operands[1])
10821 && REGNO (operands[1]) == REGNO (operands[4])
10822 && REGNO (operands[4]) != REGNO (operands[0]))
10824 if (GET_CODE (operands[6]) == LT)
10825 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10826 else if (GET_CODE (operands[6]) == GE)
10827 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10829 if (CONST_INT_P (operands[3])
10830 && !const_ok_for_arm (INTVAL (operands[3])))
10831 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10833 output_asm_insn (\"cmp\\t%2, %3\", operands);
10834 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10835 if (which_alternative != 0)
10836 return \"mov%D6\\t%0, %1\";
10839 [(set_attr "conds" "clob")
10840 (set_attr "length" "8,12")
10841 (set_attr "type" "multiple")]
10844 (define_insn "*if_arith_move"
10845 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10846 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10847 [(match_operand 6 "cc_register" "") (const_int 0)])
10848 (match_operator:SI 5 "shiftable_operator"
10849 [(match_operand:SI 2 "s_register_operand" "r,r")
10850 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10851 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10854 %I5%d4\\t%0, %2, %3
10855 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10856 [(set_attr "conds" "use")
10857 (set_attr "length" "4,8")
10858 (set_attr_alternative "type"
10859 [(if_then_else (match_operand 3 "const_int_operand" "")
10860 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
10861 (const_string "alu_shift_imm_lsl_1to4")
10862 (const_string "alu_shift_imm_other"))
10863 (const_string "alu_shift_reg"))
10864 (const_string "multiple")])]
10867 (define_insn "*ifcompare_move_arith"
10868 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10869 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10870 [(match_operand:SI 4 "s_register_operand" "r,r")
10871 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10872 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10873 (match_operator:SI 7 "shiftable_operator"
10874 [(match_operand:SI 2 "s_register_operand" "r,r")
10875 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10876 (clobber (reg:CC CC_REGNUM))]
10879 /* If we have an operation where (op x 0) is the identity operation and
10880 the conditional operator is LT or GE and we are comparing against zero and
10881 everything is in registers then we can do this in two instructions */
10882 if (operands[5] == const0_rtx
10883 && GET_CODE (operands[7]) != AND
10884 && REG_P (operands[3])
10885 && REG_P (operands[1])
10886 && REGNO (operands[1]) == REGNO (operands[2])
10887 && REGNO (operands[2]) != REGNO (operands[0]))
10889 if (GET_CODE (operands[6]) == GE)
10890 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10891 else if (GET_CODE (operands[6]) == LT)
10892 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10895 if (CONST_INT_P (operands[5])
10896 && !const_ok_for_arm (INTVAL (operands[5])))
10897 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10899 output_asm_insn (\"cmp\\t%4, %5\", operands);
10901 if (which_alternative != 0)
10902 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10903 return \"%I7%D6\\t%0, %2, %3\";
10905 [(set_attr "conds" "clob")
10906 (set_attr "length" "8,12")
10907 (set_attr "type" "multiple")]
10910 (define_insn "*if_move_arith"
10911 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10913 (match_operator 4 "arm_comparison_operator"
10914 [(match_operand 6 "cc_register" "") (const_int 0)])
10915 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10916 (match_operator:SI 5 "shiftable_operator"
10917 [(match_operand:SI 2 "s_register_operand" "r,r")
10918 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10921 %I5%D4\\t%0, %2, %3
10922 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10923 [(set_attr "conds" "use")
10924 (set_attr "length" "4,8")
10925 (set_attr_alternative "type"
10926 [(if_then_else (match_operand 3 "const_int_operand" "")
10927 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
10928 (const_string "alu_shift_imm_lsl_1to4")
10929 (const_string "alu_shift_imm_other"))
10930 (const_string "alu_shift_reg"))
10931 (const_string "multiple")])]
10934 (define_insn "*ifcompare_move_not"
10935 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10937 (match_operator 5 "arm_comparison_operator"
10938 [(match_operand:SI 3 "s_register_operand" "r,r")
10939 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10940 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10942 (match_operand:SI 2 "s_register_operand" "r,r"))))
10943 (clobber (reg:CC CC_REGNUM))]
10946 [(set_attr "conds" "clob")
10947 (set_attr "length" "8,12")
10948 (set_attr "type" "multiple")]
10951 (define_insn "*if_move_not"
10952 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10954 (match_operator 4 "arm_comparison_operator"
10955 [(match_operand 3 "cc_register" "") (const_int 0)])
10956 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10957 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10961 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10962 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10963 [(set_attr "conds" "use")
10964 (set_attr "type" "mvn_reg")
10965 (set_attr "length" "4,8,8")
10966 (set_attr "type" "mvn_reg,multiple,multiple")]
10969 (define_insn "*ifcompare_not_move"
10970 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10972 (match_operator 5 "arm_comparison_operator"
10973 [(match_operand:SI 3 "s_register_operand" "r,r")
10974 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10976 (match_operand:SI 2 "s_register_operand" "r,r"))
10977 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10978 (clobber (reg:CC CC_REGNUM))]
10981 [(set_attr "conds" "clob")
10982 (set_attr "length" "8,12")
10983 (set_attr "type" "multiple")]
10986 (define_insn "*if_not_move"
10987 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10989 (match_operator 4 "arm_comparison_operator"
10990 [(match_operand 3 "cc_register" "") (const_int 0)])
10991 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10992 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10996 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10997 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10998 [(set_attr "conds" "use")
10999 (set_attr "type" "mvn_reg,multiple,multiple")
11000 (set_attr "length" "4,8,8")]
11003 (define_insn "*ifcompare_shift_move"
11004 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11006 (match_operator 6 "arm_comparison_operator"
11007 [(match_operand:SI 4 "s_register_operand" "r,r")
11008 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11009 (match_operator:SI 7 "shift_operator"
11010 [(match_operand:SI 2 "s_register_operand" "r,r")
11011 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11012 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11013 (clobber (reg:CC CC_REGNUM))]
11016 [(set_attr "conds" "clob")
11017 (set_attr "length" "8,12")
11018 (set_attr "type" "multiple")]
11021 (define_insn "*if_shift_move"
11022 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11024 (match_operator 5 "arm_comparison_operator"
11025 [(match_operand 6 "cc_register" "") (const_int 0)])
11026 (match_operator:SI 4 "shift_operator"
11027 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11028 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11029 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11033 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11034 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11035 [(set_attr "conds" "use")
11036 (set_attr "shift" "2")
11037 (set_attr "length" "4,8,8")
11038 (set_attr_alternative "type"
11039 [(if_then_else (match_operand 3 "const_int_operand" "")
11040 (const_string "mov_shift" )
11041 (const_string "mov_shift_reg"))
11042 (const_string "multiple")
11043 (const_string "multiple")])]
11046 (define_insn "*ifcompare_move_shift"
11047 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11049 (match_operator 6 "arm_comparison_operator"
11050 [(match_operand:SI 4 "s_register_operand" "r,r")
11051 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11052 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11053 (match_operator:SI 7 "shift_operator"
11054 [(match_operand:SI 2 "s_register_operand" "r,r")
11055 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11056 (clobber (reg:CC CC_REGNUM))]
11059 [(set_attr "conds" "clob")
11060 (set_attr "length" "8,12")
11061 (set_attr "type" "multiple")]
11064 (define_insn "*if_move_shift"
11065 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11067 (match_operator 5 "arm_comparison_operator"
11068 [(match_operand 6 "cc_register" "") (const_int 0)])
11069 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11070 (match_operator:SI 4 "shift_operator"
11071 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11072 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11076 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11077 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11078 [(set_attr "conds" "use")
11079 (set_attr "shift" "2")
11080 (set_attr "length" "4,8,8")
11081 (set_attr_alternative "type"
11082 [(if_then_else (match_operand 3 "const_int_operand" "")
11083 (const_string "mov_shift" )
11084 (const_string "mov_shift_reg"))
11085 (const_string "multiple")
11086 (const_string "multiple")])]
11089 (define_insn "*ifcompare_shift_shift"
11090 [(set (match_operand:SI 0 "s_register_operand" "=r")
11092 (match_operator 7 "arm_comparison_operator"
11093 [(match_operand:SI 5 "s_register_operand" "r")
11094 (match_operand:SI 6 "arm_add_operand" "rIL")])
11095 (match_operator:SI 8 "shift_operator"
11096 [(match_operand:SI 1 "s_register_operand" "r")
11097 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11098 (match_operator:SI 9 "shift_operator"
11099 [(match_operand:SI 3 "s_register_operand" "r")
11100 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11101 (clobber (reg:CC CC_REGNUM))]
11104 [(set_attr "conds" "clob")
11105 (set_attr "length" "12")
11106 (set_attr "type" "multiple")]
11109 (define_insn "*if_shift_shift"
11110 [(set (match_operand:SI 0 "s_register_operand" "=r")
11112 (match_operator 5 "arm_comparison_operator"
11113 [(match_operand 8 "cc_register" "") (const_int 0)])
11114 (match_operator:SI 6 "shift_operator"
11115 [(match_operand:SI 1 "s_register_operand" "r")
11116 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11117 (match_operator:SI 7 "shift_operator"
11118 [(match_operand:SI 3 "s_register_operand" "r")
11119 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11121 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11122 [(set_attr "conds" "use")
11123 (set_attr "shift" "1")
11124 (set_attr "length" "8")
11125 (set (attr "type") (if_then_else
11126 (and (match_operand 2 "const_int_operand" "")
11127 (match_operand 4 "const_int_operand" ""))
11128 (const_string "mov_shift")
11129 (const_string "mov_shift_reg")))]
11132 (define_insn "*ifcompare_not_arith"
11133 [(set (match_operand:SI 0 "s_register_operand" "=r")
11135 (match_operator 6 "arm_comparison_operator"
11136 [(match_operand:SI 4 "s_register_operand" "r")
11137 (match_operand:SI 5 "arm_add_operand" "rIL")])
11138 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11139 (match_operator:SI 7 "shiftable_operator"
11140 [(match_operand:SI 2 "s_register_operand" "r")
11141 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11142 (clobber (reg:CC CC_REGNUM))]
11145 [(set_attr "conds" "clob")
11146 (set_attr "length" "12")
11147 (set_attr "type" "multiple")]
11150 (define_insn "*if_not_arith"
11151 [(set (match_operand:SI 0 "s_register_operand" "=r")
11153 (match_operator 5 "arm_comparison_operator"
11154 [(match_operand 4 "cc_register" "") (const_int 0)])
11155 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11156 (match_operator:SI 6 "shiftable_operator"
11157 [(match_operand:SI 2 "s_register_operand" "r")
11158 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11160 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11161 [(set_attr "conds" "use")
11162 (set_attr "type" "mvn_reg")
11163 (set_attr "length" "8")]
11166 (define_insn "*ifcompare_arith_not"
11167 [(set (match_operand:SI 0 "s_register_operand" "=r")
11169 (match_operator 6 "arm_comparison_operator"
11170 [(match_operand:SI 4 "s_register_operand" "r")
11171 (match_operand:SI 5 "arm_add_operand" "rIL")])
11172 (match_operator:SI 7 "shiftable_operator"
11173 [(match_operand:SI 2 "s_register_operand" "r")
11174 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11175 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11176 (clobber (reg:CC CC_REGNUM))]
11179 [(set_attr "conds" "clob")
11180 (set_attr "length" "12")
11181 (set_attr "type" "multiple")]
11184 (define_insn "*if_arith_not"
11185 [(set (match_operand:SI 0 "s_register_operand" "=r")
11187 (match_operator 5 "arm_comparison_operator"
11188 [(match_operand 4 "cc_register" "") (const_int 0)])
11189 (match_operator:SI 6 "shiftable_operator"
11190 [(match_operand:SI 2 "s_register_operand" "r")
11191 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11192 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11194 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11195 [(set_attr "conds" "use")
11196 (set_attr "type" "multiple")
11197 (set_attr "length" "8")]
11200 (define_insn "*ifcompare_neg_move"
11201 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11203 (match_operator 5 "arm_comparison_operator"
11204 [(match_operand:SI 3 "s_register_operand" "r,r")
11205 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11206 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11207 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11208 (clobber (reg:CC CC_REGNUM))]
11211 [(set_attr "conds" "clob")
11212 (set_attr "length" "8,12")
11213 (set_attr "type" "multiple")]
11216 (define_insn_and_split "*if_neg_move"
11217 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11219 (match_operator 4 "arm_comparison_operator"
11220 [(match_operand 3 "cc_register" "") (const_int 0)])
11221 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
11222 (match_operand:SI 1 "s_register_operand" "0,0")))]
11223 "TARGET_32BIT && !TARGET_COND_ARITH"
11225 "&& reload_completed"
11226 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
11227 (set (match_dup 0) (neg:SI (match_dup 2))))]
11229 [(set_attr "conds" "use")
11230 (set_attr "length" "4")
11231 (set_attr "arch" "t2,32")
11232 (set_attr "enabled_for_short_it" "yes,no")
11233 (set_attr "type" "logic_shift_imm")]
11236 (define_insn "*ifcompare_move_neg"
11237 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11239 (match_operator 5 "arm_comparison_operator"
11240 [(match_operand:SI 3 "s_register_operand" "r,r")
11241 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11242 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11243 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11244 (clobber (reg:CC CC_REGNUM))]
11247 [(set_attr "conds" "clob")
11248 (set_attr "length" "8,12")
11249 (set_attr "type" "multiple")]
11252 (define_insn_and_split "*if_move_neg"
11253 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11255 (match_operator 4 "arm_comparison_operator"
11256 [(match_operand 3 "cc_register" "") (const_int 0)])
11257 (match_operand:SI 1 "s_register_operand" "0,0")
11258 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
11261 "&& reload_completed"
11262 [(cond_exec (match_dup 5)
11263 (set (match_dup 0) (neg:SI (match_dup 2))))]
11265 machine_mode mode = GET_MODE (operands[3]);
11266 rtx_code rc = GET_CODE (operands[4]);
11268 if (mode == CCFPmode || mode == CCFPEmode)
11269 rc = reverse_condition_maybe_unordered (rc);
11271 rc = reverse_condition (rc);
11273 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
11275 [(set_attr "conds" "use")
11276 (set_attr "length" "4")
11277 (set_attr "arch" "t2,32")
11278 (set_attr "enabled_for_short_it" "yes,no")
11279 (set_attr "type" "logic_shift_imm")]
11282 (define_insn "*arith_adjacentmem"
11283 [(set (match_operand:SI 0 "s_register_operand" "=r")
11284 (match_operator:SI 1 "shiftable_operator"
11285 [(match_operand:SI 2 "memory_operand" "m")
11286 (match_operand:SI 3 "memory_operand" "m")]))
11287 (clobber (match_scratch:SI 4 "=r"))]
11288 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11294 HOST_WIDE_INT val1 = 0, val2 = 0;
11296 if (REGNO (operands[0]) > REGNO (operands[4]))
11298 ldm[1] = operands[4];
11299 ldm[2] = operands[0];
11303 ldm[1] = operands[0];
11304 ldm[2] = operands[4];
11307 base_reg = XEXP (operands[2], 0);
11309 if (!REG_P (base_reg))
11311 val1 = INTVAL (XEXP (base_reg, 1));
11312 base_reg = XEXP (base_reg, 0);
11315 if (!REG_P (XEXP (operands[3], 0)))
11316 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11318 arith[0] = operands[0];
11319 arith[3] = operands[1];
11333 if (val1 !=0 && val2 != 0)
11337 if (val1 == 4 || val2 == 4)
11338 /* Other val must be 8, since we know they are adjacent and neither
11340 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
11341 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11343 ldm[0] = ops[0] = operands[4];
11345 ops[2] = GEN_INT (val1);
11346 output_add_immediate (ops);
11348 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11350 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11354 /* Offset is out of range for a single add, so use two ldr. */
11357 ops[2] = GEN_INT (val1);
11358 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11360 ops[2] = GEN_INT (val2);
11361 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11364 else if (val1 != 0)
11367 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11369 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11374 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11376 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11378 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11381 [(set_attr "length" "12")
11382 (set_attr "predicable" "yes")
11383 (set_attr "type" "load_4")]
11386 ; This pattern is never tried by combine, so do it as a peephole
11389 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11390 (match_operand:SI 1 "arm_general_register_operand" ""))
11391 (set (reg:CC CC_REGNUM)
11392 (compare:CC (match_dup 1) (const_int 0)))]
11394 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11395 (set (match_dup 0) (match_dup 1))])]
11400 [(set (match_operand:SI 0 "s_register_operand" "")
11401 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11403 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11404 [(match_operand:SI 3 "s_register_operand" "")
11405 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11406 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11408 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11409 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11414 ;; This split can be used because CC_Z mode implies that the following
11415 ;; branch will be an equality, or an unsigned inequality, so the sign
11416 ;; extension is not needed.
11419 [(set (reg:CC_Z CC_REGNUM)
11421 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11423 (match_operand 1 "const_int_operand" "")))
11424 (clobber (match_scratch:SI 2 ""))]
11426 && ((UINTVAL (operands[1]))
11427 == ((UINTVAL (operands[1])) >> 24) << 24)"
11428 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11429 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11431 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11434 ;; ??? Check the patterns above for Thumb-2 usefulness
11436 (define_expand "prologue"
11437 [(clobber (const_int 0))]
11440 arm_expand_prologue ();
11442 thumb1_expand_prologue ();
11447 (define_expand "epilogue"
11448 [(clobber (const_int 0))]
11451 if (crtl->calls_eh_return)
11452 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11455 thumb1_expand_epilogue ();
11456 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11457 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11459 else if (HAVE_return)
11461 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11462 no need for explicit testing again. */
11463 emit_jump_insn (gen_return ());
11465 else if (TARGET_32BIT)
11467 arm_expand_epilogue (true);
11473 ;; Note - although unspec_volatile's USE all hard registers,
11474 ;; USEs are ignored after relaod has completed. Thus we need
11475 ;; to add an unspec of the link register to ensure that flow
11476 ;; does not think that it is unused by the sibcall branch that
11477 ;; will replace the standard function epilogue.
11478 (define_expand "sibcall_epilogue"
11479 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11480 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11483 arm_expand_epilogue (false);
11488 (define_expand "eh_epilogue"
11489 [(use (match_operand:SI 0 "register_operand"))
11490 (use (match_operand:SI 1 "register_operand"))
11491 (use (match_operand:SI 2 "register_operand"))]
11495 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11496 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11498 rtx ra = gen_rtx_REG (Pmode, 2);
11500 emit_move_insn (ra, operands[2]);
11503 /* This is a hack -- we may have crystalized the function type too
11505 cfun->machine->func_type = 0;
11509 ;; This split is only used during output to reduce the number of patterns
11510 ;; that need assembler instructions adding to them. We allowed the setting
11511 ;; of the conditions to be implicit during rtl generation so that
11512 ;; the conditional compare patterns would work. However this conflicts to
11513 ;; some extent with the conditional data operations, so we have to split them
11516 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11517 ;; conditional execution sufficient?
11520 [(set (match_operand:SI 0 "s_register_operand" "")
11521 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11522 [(match_operand 2 "" "") (match_operand 3 "" "")])
11524 (match_operand 4 "" "")))
11525 (clobber (reg:CC CC_REGNUM))]
11526 "TARGET_ARM && reload_completed"
11527 [(set (match_dup 5) (match_dup 6))
11528 (cond_exec (match_dup 7)
11529 (set (match_dup 0) (match_dup 4)))]
11532 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11533 operands[2], operands[3]);
11534 enum rtx_code rc = GET_CODE (operands[1]);
11536 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11537 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11538 if (mode == CCFPmode || mode == CCFPEmode)
11539 rc = reverse_condition_maybe_unordered (rc);
11541 rc = reverse_condition (rc);
11543 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11548 [(set (match_operand:SI 0 "s_register_operand" "")
11549 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11550 [(match_operand 2 "" "") (match_operand 3 "" "")])
11551 (match_operand 4 "" "")
11553 (clobber (reg:CC CC_REGNUM))]
11554 "TARGET_ARM && reload_completed"
11555 [(set (match_dup 5) (match_dup 6))
11556 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11557 (set (match_dup 0) (match_dup 4)))]
11560 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11561 operands[2], operands[3]);
11563 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11564 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11569 [(set (match_operand:SI 0 "s_register_operand" "")
11570 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11571 [(match_operand 2 "" "") (match_operand 3 "" "")])
11572 (match_operand 4 "" "")
11573 (match_operand 5 "" "")))
11574 (clobber (reg:CC CC_REGNUM))]
11575 "TARGET_ARM && reload_completed"
11576 [(set (match_dup 6) (match_dup 7))
11577 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11578 (set (match_dup 0) (match_dup 4)))
11579 (cond_exec (match_dup 8)
11580 (set (match_dup 0) (match_dup 5)))]
11583 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11584 operands[2], operands[3]);
11585 enum rtx_code rc = GET_CODE (operands[1]);
11587 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11588 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11589 if (mode == CCFPmode || mode == CCFPEmode)
11590 rc = reverse_condition_maybe_unordered (rc);
11592 rc = reverse_condition (rc);
11594 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11599 [(set (match_operand:SI 0 "s_register_operand" "")
11600 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11601 [(match_operand:SI 2 "s_register_operand" "")
11602 (match_operand:SI 3 "arm_add_operand" "")])
11603 (match_operand:SI 4 "arm_rhs_operand" "")
11605 (match_operand:SI 5 "s_register_operand" ""))))
11606 (clobber (reg:CC CC_REGNUM))]
11607 "TARGET_ARM && reload_completed"
11608 [(set (match_dup 6) (match_dup 7))
11609 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11610 (set (match_dup 0) (match_dup 4)))
11611 (cond_exec (match_dup 8)
11612 (set (match_dup 0) (not:SI (match_dup 5))))]
11615 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11616 operands[2], operands[3]);
11617 enum rtx_code rc = GET_CODE (operands[1]);
11619 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11620 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11621 if (mode == CCFPmode || mode == CCFPEmode)
11622 rc = reverse_condition_maybe_unordered (rc);
11624 rc = reverse_condition (rc);
11626 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11630 (define_insn "*cond_move_not"
11631 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11632 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11633 [(match_operand 3 "cc_register" "") (const_int 0)])
11634 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11636 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11640 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11641 [(set_attr "conds" "use")
11642 (set_attr "type" "mvn_reg,multiple")
11643 (set_attr "length" "4,8")]
11646 ;; The next two patterns occur when an AND operation is followed by a
11647 ;; scc insn sequence
11649 (define_insn "*sign_extract_onebit"
11650 [(set (match_operand:SI 0 "s_register_operand" "=r")
11651 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11653 (match_operand:SI 2 "const_int_operand" "n")))
11654 (clobber (reg:CC CC_REGNUM))]
11657 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11658 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11659 return \"mvnne\\t%0, #0\";
11661 [(set_attr "conds" "clob")
11662 (set_attr "length" "8")
11663 (set_attr "type" "multiple")]
11666 (define_insn "*not_signextract_onebit"
11667 [(set (match_operand:SI 0 "s_register_operand" "=r")
11669 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11671 (match_operand:SI 2 "const_int_operand" "n"))))
11672 (clobber (reg:CC CC_REGNUM))]
11675 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11676 output_asm_insn (\"tst\\t%1, %2\", operands);
11677 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11678 return \"movne\\t%0, #0\";
11680 [(set_attr "conds" "clob")
11681 (set_attr "length" "12")
11682 (set_attr "type" "multiple")]
11684 ;; ??? The above patterns need auditing for Thumb-2
11686 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11687 ;; expressions. For simplicity, the first register is also in the unspec
11689 ;; To avoid the usage of GNU extension, the length attribute is computed
11690 ;; in a C function arm_attr_length_push_multi.
11691 (define_insn "*push_multi"
11692 [(match_parallel 2 "multi_register_push"
11693 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11694 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11695 UNSPEC_PUSH_MULT))])]
11699 int num_saves = XVECLEN (operands[2], 0);
11701 /* For the StrongARM at least it is faster to
11702 use STR to store only a single register.
11703 In Thumb mode always use push, and the assembler will pick
11704 something appropriate. */
11705 if (num_saves == 1 && TARGET_ARM)
11706 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11713 strcpy (pattern, \"push%?\\t{%1\");
11715 strcpy (pattern, \"push\\t{%1\");
11717 for (i = 1; i < num_saves; i++)
11719 strcat (pattern, \", %|\");
11721 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11724 strcat (pattern, \"}\");
11725 output_asm_insn (pattern, operands);
11730 [(set_attr "type" "store_16")
11731 (set (attr "length")
11732 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11735 (define_insn "stack_tie"
11736 [(set (mem:BLK (scratch))
11737 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11738 (match_operand:SI 1 "s_register_operand" "rk")]
11742 [(set_attr "length" "0")
11743 (set_attr "type" "block")]
11746 ;; Pop (as used in epilogue RTL)
11748 (define_insn "*load_multiple_with_writeback"
11749 [(match_parallel 0 "load_multiple_operation"
11750 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11751 (plus:SI (match_dup 1)
11752 (match_operand:SI 2 "const_int_I_operand" "I")))
11753 (set (match_operand:SI 3 "s_register_operand" "=rk")
11754 (mem:SI (match_dup 1)))
11756 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11759 arm_output_multireg_pop (operands, /*return_pc=*/false,
11760 /*cond=*/const_true_rtx,
11766 [(set_attr "type" "load_16")
11767 (set_attr "predicable" "yes")
11768 (set (attr "length")
11769 (symbol_ref "arm_attr_length_pop_multi (operands,
11770 /*return_pc=*/false,
11771 /*write_back_p=*/true)"))]
11774 ;; Pop with return (as used in epilogue RTL)
11776 ;; This instruction is generated when the registers are popped at the end of
11777 ;; epilogue. Here, instead of popping the value into LR and then generating
11778 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11780 (define_insn "*pop_multiple_with_writeback_and_return"
11781 [(match_parallel 0 "pop_multiple_return"
11783 (set (match_operand:SI 1 "s_register_operand" "+rk")
11784 (plus:SI (match_dup 1)
11785 (match_operand:SI 2 "const_int_I_operand" "I")))
11786 (set (match_operand:SI 3 "s_register_operand" "=rk")
11787 (mem:SI (match_dup 1)))
11789 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11792 arm_output_multireg_pop (operands, /*return_pc=*/true,
11793 /*cond=*/const_true_rtx,
11799 [(set_attr "type" "load_16")
11800 (set_attr "predicable" "yes")
11801 (set (attr "length")
11802 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11803 /*write_back_p=*/true)"))]
11806 (define_insn "*pop_multiple_with_return"
11807 [(match_parallel 0 "pop_multiple_return"
11809 (set (match_operand:SI 2 "s_register_operand" "=rk")
11810 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11812 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11815 arm_output_multireg_pop (operands, /*return_pc=*/true,
11816 /*cond=*/const_true_rtx,
11822 [(set_attr "type" "load_16")
11823 (set_attr "predicable" "yes")
11824 (set (attr "length")
11825 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11826 /*write_back_p=*/false)"))]
11829 ;; Load into PC and return
11830 (define_insn "*ldr_with_return"
11832 (set (reg:SI PC_REGNUM)
11833 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11834 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11835 "ldr%?\t%|pc, [%0], #4"
11836 [(set_attr "type" "load_4")
11837 (set_attr "predicable" "yes")]
11839 ;; Pop for floating point registers (as used in epilogue RTL)
11840 (define_insn "*vfp_pop_multiple_with_writeback"
11841 [(match_parallel 0 "pop_multiple_fp"
11842 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11843 (plus:SI (match_dup 1)
11844 (match_operand:SI 2 "const_int_I_operand" "I")))
11845 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11846 (mem:DF (match_dup 1)))])]
11847 "TARGET_32BIT && TARGET_VFP_BASE"
11850 int num_regs = XVECLEN (operands[0], 0);
11853 strcpy (pattern, \"vldm\\t\");
11854 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11855 strcat (pattern, \"!, {\");
11856 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11857 strcat (pattern, \"%P0\");
11858 if ((num_regs - 1) > 1)
11860 strcat (pattern, \"-%P1\");
11861 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11864 strcat (pattern, \"}\");
11865 output_asm_insn (pattern, op_list);
11869 [(set_attr "type" "load_16")
11870 (set_attr "conds" "unconditional")
11871 (set_attr "predicable" "no")]
11874 ;; Special patterns for dealing with the constant pool
11876 (define_insn "align_4"
11877 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11880 assemble_align (32);
11883 [(set_attr "type" "no_insn")]
11886 (define_insn "align_8"
11887 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11890 assemble_align (64);
11893 [(set_attr "type" "no_insn")]
11896 (define_insn "consttable_end"
11897 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11900 making_const_table = FALSE;
11903 [(set_attr "type" "no_insn")]
11906 (define_insn "consttable_1"
11907 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11910 making_const_table = TRUE;
11911 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11912 assemble_zeros (3);
11915 [(set_attr "length" "4")
11916 (set_attr "type" "no_insn")]
11919 (define_insn "consttable_2"
11920 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11924 rtx x = operands[0];
11925 making_const_table = TRUE;
11926 switch (GET_MODE_CLASS (GET_MODE (x)))
11929 arm_emit_fp16_const (x);
11932 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11933 assemble_zeros (2);
11938 [(set_attr "length" "4")
11939 (set_attr "type" "no_insn")]
11942 (define_insn "consttable_4"
11943 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11947 rtx x = operands[0];
11948 making_const_table = TRUE;
11949 scalar_float_mode float_mode;
11950 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11951 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11954 /* XXX: Sometimes gcc does something really dumb and ends up with
11955 a HIGH in a constant pool entry, usually because it's trying to
11956 load into a VFP register. We know this will always be used in
11957 combination with a LO_SUM which ignores the high bits, so just
11958 strip off the HIGH. */
11959 if (GET_CODE (x) == HIGH)
11961 assemble_integer (x, 4, BITS_PER_WORD, 1);
11962 mark_symbol_refs_as_used (x);
11966 [(set_attr "length" "4")
11967 (set_attr "type" "no_insn")]
11970 (define_insn "consttable_8"
11971 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11975 making_const_table = TRUE;
11976 scalar_float_mode float_mode;
11977 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11978 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11979 float_mode, BITS_PER_WORD);
11981 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11984 [(set_attr "length" "8")
11985 (set_attr "type" "no_insn")]
11988 (define_insn "consttable_16"
11989 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11993 making_const_table = TRUE;
11994 scalar_float_mode float_mode;
11995 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11996 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11997 float_mode, BITS_PER_WORD);
11999 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12002 [(set_attr "length" "16")
12003 (set_attr "type" "no_insn")]
12006 ;; V5 Instructions,
12008 (define_insn "clzsi2"
12009 [(set (match_operand:SI 0 "s_register_operand" "=r")
12010 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12011 "TARGET_32BIT && arm_arch5t"
12013 [(set_attr "predicable" "yes")
12014 (set_attr "type" "clz")])
12016 (define_insn "rbitsi2"
12017 [(set (match_operand:SI 0 "s_register_operand" "=r")
12018 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12019 "TARGET_32BIT && arm_arch_thumb2"
12021 [(set_attr "predicable" "yes")
12022 (set_attr "type" "clz")])
12024 ;; Keep this as a CTZ expression until after reload and then split
12025 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
12026 ;; to fold with any other expression.
12028 (define_insn_and_split "ctzsi2"
12029 [(set (match_operand:SI 0 "s_register_operand" "=r")
12030 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12031 "TARGET_32BIT && arm_arch_thumb2"
12033 "&& reload_completed"
12036 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
12037 emit_insn (gen_clzsi2 (operands[0], operands[0]));
12041 ;; V5E instructions.
12043 (define_insn "prefetch"
12044 [(prefetch (match_operand:SI 0 "address_operand" "p")
12045 (match_operand:SI 1 "" "")
12046 (match_operand:SI 2 "" ""))]
12047 "TARGET_32BIT && arm_arch5te"
12049 [(set_attr "type" "load_4")]
12052 ;; General predication pattern
12055 [(match_operator 0 "arm_comparison_operator"
12056 [(match_operand 1 "cc_register" "")
12059 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
12061 [(set_attr "predicated" "yes")]
12064 (define_insn "force_register_use"
12065 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12068 [(set_attr "length" "0")
12069 (set_attr "type" "no_insn")]
12073 ;; Patterns for exception handling
12075 (define_expand "eh_return"
12076 [(use (match_operand 0 "general_operand"))]
12081 emit_insn (gen_arm_eh_return (operands[0]));
12083 emit_insn (gen_thumb_eh_return (operands[0]));
12088 ;; We can't expand this before we know where the link register is stored.
12089 (define_insn_and_split "arm_eh_return"
12090 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12092 (clobber (match_scratch:SI 1 "=&r"))]
12095 "&& reload_completed"
12099 arm_set_return_address (operands[0], operands[1]);
12107 (define_insn "load_tp_hard"
12108 [(set (match_operand:SI 0 "register_operand" "=r")
12109 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12111 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12112 [(set_attr "predicable" "yes")
12113 (set_attr "type" "mrs")]
12116 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12117 (define_insn "load_tp_soft_fdpic"
12118 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12119 (clobber (reg:SI FDPIC_REGNUM))
12120 (clobber (reg:SI LR_REGNUM))
12121 (clobber (reg:SI IP_REGNUM))
12122 (clobber (reg:CC CC_REGNUM))]
12123 "TARGET_SOFT_TP && TARGET_FDPIC"
12124 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12125 [(set_attr "conds" "clob")
12126 (set_attr "type" "branch")]
12129 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12130 (define_insn "load_tp_soft"
12131 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12132 (clobber (reg:SI LR_REGNUM))
12133 (clobber (reg:SI IP_REGNUM))
12134 (clobber (reg:CC CC_REGNUM))]
12135 "TARGET_SOFT_TP && !TARGET_FDPIC"
12136 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12137 [(set_attr "conds" "clob")
12138 (set_attr "type" "branch")]
12141 ;; tls descriptor call
12142 (define_insn "tlscall"
12143 [(set (reg:SI R0_REGNUM)
12144 (unspec:SI [(reg:SI R0_REGNUM)
12145 (match_operand:SI 0 "" "X")
12146 (match_operand 1 "" "")] UNSPEC_TLS))
12147 (clobber (reg:SI R1_REGNUM))
12148 (clobber (reg:SI LR_REGNUM))
12149 (clobber (reg:SI CC_REGNUM))]
12152 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12153 INTVAL (operands[1]));
12154 return "bl\\t%c0(tlscall)";
12156 [(set_attr "conds" "clob")
12157 (set_attr "length" "4")
12158 (set_attr "type" "branch")]
12161 ;; For thread pointer builtin
12162 (define_expand "get_thread_pointersi"
12163 [(match_operand:SI 0 "s_register_operand")]
12167 arm_load_tp (operands[0]);
12173 ;; We only care about the lower 16 bits of the constant
12174 ;; being inserted into the upper 16 bits of the register.
12175 (define_insn "*arm_movtas_ze"
12176 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
12179 (match_operand:SI 1 "const_int_operand" ""))]
12184 [(set_attr "arch" "32,v8mb")
12185 (set_attr "predicable" "yes")
12186 (set_attr "length" "4")
12187 (set_attr "type" "alu_sreg")]
12190 (define_insn "*arm_rev"
12191 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12192 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12198 [(set_attr "arch" "t1,t2,32")
12199 (set_attr "length" "2,2,4")
12200 (set_attr "predicable" "no,yes,yes")
12201 (set_attr "type" "rev")]
12204 (define_expand "arm_legacy_rev"
12205 [(set (match_operand:SI 2 "s_register_operand")
12206 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
12210 (lshiftrt:SI (match_dup 2)
12212 (set (match_operand:SI 3 "s_register_operand")
12213 (rotatert:SI (match_dup 1)
12216 (and:SI (match_dup 2)
12217 (const_int -65281)))
12218 (set (match_operand:SI 0 "s_register_operand")
12219 (xor:SI (match_dup 3)
12225 ;; Reuse temporaries to keep register pressure down.
12226 (define_expand "thumb_legacy_rev"
12227 [(set (match_operand:SI 2 "s_register_operand")
12228 (ashift:SI (match_operand:SI 1 "s_register_operand")
12230 (set (match_operand:SI 3 "s_register_operand")
12231 (lshiftrt:SI (match_dup 1)
12234 (ior:SI (match_dup 3)
12236 (set (match_operand:SI 4 "s_register_operand")
12238 (set (match_operand:SI 5 "s_register_operand")
12239 (rotatert:SI (match_dup 1)
12242 (ashift:SI (match_dup 5)
12245 (lshiftrt:SI (match_dup 5)
12248 (ior:SI (match_dup 5)
12251 (rotatert:SI (match_dup 5)
12253 (set (match_operand:SI 0 "s_register_operand")
12254 (ior:SI (match_dup 5)
12260 ;; ARM-specific expansion of signed mod by power of 2
12261 ;; using conditional negate.
12262 ;; For r0 % n where n is a power of 2 produce:
12264 ;; and r0, r0, #(n - 1)
12265 ;; and r1, r1, #(n - 1)
12266 ;; rsbpl r0, r1, #0
12268 (define_expand "modsi3"
12269 [(match_operand:SI 0 "register_operand")
12270 (match_operand:SI 1 "register_operand")
12271 (match_operand:SI 2 "const_int_operand")]
12274 HOST_WIDE_INT val = INTVAL (operands[2]);
12277 || exact_log2 (val) <= 0)
12280 rtx mask = GEN_INT (val - 1);
12282 /* In the special case of x0 % 2 we can do the even shorter:
12285 rsblt r0, r0, #0. */
12289 rtx cc_reg = arm_gen_compare_reg (LT,
12290 operands[1], const0_rtx, NULL_RTX);
12291 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
12292 rtx masked = gen_reg_rtx (SImode);
12294 emit_insn (gen_andsi3 (masked, operands[1], mask));
12295 emit_move_insn (operands[0],
12296 gen_rtx_IF_THEN_ELSE (SImode, cond,
12297 gen_rtx_NEG (SImode,
12303 rtx neg_op = gen_reg_rtx (SImode);
12304 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
12307 /* Extract the condition register and mode. */
12308 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
12309 rtx cc_reg = SET_DEST (cmp);
12310 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
12312 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
12314 rtx masked_neg = gen_reg_rtx (SImode);
12315 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
12317 /* We want a conditional negate here, but emitting COND_EXEC rtxes
12318 during expand does not always work. Do an IF_THEN_ELSE instead. */
12319 emit_move_insn (operands[0],
12320 gen_rtx_IF_THEN_ELSE (SImode, cond,
12321 gen_rtx_NEG (SImode, masked_neg),
12329 (define_expand "bswapsi2"
12330 [(set (match_operand:SI 0 "s_register_operand")
12331 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
12332 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12336 rtx op2 = gen_reg_rtx (SImode);
12337 rtx op3 = gen_reg_rtx (SImode);
12341 rtx op4 = gen_reg_rtx (SImode);
12342 rtx op5 = gen_reg_rtx (SImode);
12344 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12345 op2, op3, op4, op5));
12349 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12358 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12359 ;; and unsigned variants, respectively. For rev16, expose
12360 ;; byte-swapping in the lower 16 bits only.
12361 (define_insn "*arm_revsh"
12362 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12363 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12369 [(set_attr "arch" "t1,t2,32")
12370 (set_attr "length" "2,2,4")
12371 (set_attr "type" "rev")]
12374 (define_insn "*arm_rev16"
12375 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12376 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12382 [(set_attr "arch" "t1,t2,32")
12383 (set_attr "length" "2,2,4")
12384 (set_attr "type" "rev")]
12387 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
12388 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
12389 ;; each valid permutation.
12391 (define_insn "arm_rev16si2"
12392 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12393 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
12395 (match_operand:SI 3 "const_int_operand" "n,n,n"))
12396 (and:SI (lshiftrt:SI (match_dup 1)
12398 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
12400 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12401 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12403 [(set_attr "arch" "t1,t2,32")
12404 (set_attr "length" "2,2,4")
12405 (set_attr "type" "rev")]
12408 (define_insn "arm_rev16si2_alt"
12409 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12410 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
12412 (match_operand:SI 2 "const_int_operand" "n,n,n"))
12413 (and:SI (ashift:SI (match_dup 1)
12415 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
12417 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12418 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12420 [(set_attr "arch" "t1,t2,32")
12421 (set_attr "length" "2,2,4")
12422 (set_attr "type" "rev")]
12425 (define_expand "bswaphi2"
12426 [(set (match_operand:HI 0 "s_register_operand")
12427 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
12432 ;; Patterns for LDRD/STRD in Thumb2 mode
12434 (define_insn "*thumb2_ldrd"
12435 [(set (match_operand:SI 0 "s_register_operand" "=r")
12436 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12437 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12438 (set (match_operand:SI 3 "s_register_operand" "=r")
12439 (mem:SI (plus:SI (match_dup 1)
12440 (match_operand:SI 4 "const_int_operand" ""))))]
12441 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12442 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12443 && (operands_ok_ldrd_strd (operands[0], operands[3],
12444 operands[1], INTVAL (operands[2]),
12446 "ldrd%?\t%0, %3, [%1, %2]"
12447 [(set_attr "type" "load_8")
12448 (set_attr "predicable" "yes")])
12450 (define_insn "*thumb2_ldrd_base"
12451 [(set (match_operand:SI 0 "s_register_operand" "=r")
12452 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12453 (set (match_operand:SI 2 "s_register_operand" "=r")
12454 (mem:SI (plus:SI (match_dup 1)
12456 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12457 && (operands_ok_ldrd_strd (operands[0], operands[2],
12458 operands[1], 0, false, true))"
12459 "ldrd%?\t%0, %2, [%1]"
12460 [(set_attr "type" "load_8")
12461 (set_attr "predicable" "yes")])
12463 (define_insn "*thumb2_ldrd_base_neg"
12464 [(set (match_operand:SI 0 "s_register_operand" "=r")
12465 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12467 (set (match_operand:SI 2 "s_register_operand" "=r")
12468 (mem:SI (match_dup 1)))]
12469 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12470 && (operands_ok_ldrd_strd (operands[0], operands[2],
12471 operands[1], -4, false, true))"
12472 "ldrd%?\t%0, %2, [%1, #-4]"
12473 [(set_attr "type" "load_8")
12474 (set_attr "predicable" "yes")])
12476 (define_insn "*thumb2_strd"
12477 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12478 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12479 (match_operand:SI 2 "s_register_operand" "r"))
12480 (set (mem:SI (plus:SI (match_dup 0)
12481 (match_operand:SI 3 "const_int_operand" "")))
12482 (match_operand:SI 4 "s_register_operand" "r"))]
12483 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12484 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12485 && (operands_ok_ldrd_strd (operands[2], operands[4],
12486 operands[0], INTVAL (operands[1]),
12488 "strd%?\t%2, %4, [%0, %1]"
12489 [(set_attr "type" "store_8")
12490 (set_attr "predicable" "yes")])
12492 (define_insn "*thumb2_strd_base"
12493 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12494 (match_operand:SI 1 "s_register_operand" "r"))
12495 (set (mem:SI (plus:SI (match_dup 0)
12497 (match_operand:SI 2 "s_register_operand" "r"))]
12498 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12499 && (operands_ok_ldrd_strd (operands[1], operands[2],
12500 operands[0], 0, false, false))"
12501 "strd%?\t%1, %2, [%0]"
12502 [(set_attr "type" "store_8")
12503 (set_attr "predicable" "yes")])
12505 (define_insn "*thumb2_strd_base_neg"
12506 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12508 (match_operand:SI 1 "s_register_operand" "r"))
12509 (set (mem:SI (match_dup 0))
12510 (match_operand:SI 2 "s_register_operand" "r"))]
12511 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12512 && (operands_ok_ldrd_strd (operands[1], operands[2],
12513 operands[0], -4, false, false))"
12514 "strd%?\t%1, %2, [%0, #-4]"
12515 [(set_attr "type" "store_8")
12516 (set_attr "predicable" "yes")])
12518 ;; ARMv8 CRC32 instructions.
12519 (define_insn "arm_<crc_variant>"
12520 [(set (match_operand:SI 0 "s_register_operand" "=r")
12521 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
12522 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
12525 "<crc_variant>\\t%0, %1, %2"
12526 [(set_attr "type" "crc")
12527 (set_attr "conds" "unconditional")]
12530 ;; Load the load/store double peephole optimizations.
12531 (include "ldrdstrd.md")
12533 ;; Load the load/store multiple patterns
12534 (include "ldmstm.md")
12536 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12537 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12538 ;; The operands are validated through the load_multiple_operation
12539 ;; match_parallel predicate rather than through constraints so enable it only
12541 (define_insn "*load_multiple"
12542 [(match_parallel 0 "load_multiple_operation"
12543 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12544 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12546 "TARGET_32BIT && reload_completed"
12549 arm_output_multireg_pop (operands, /*return_pc=*/false,
12550 /*cond=*/const_true_rtx,
12556 [(set_attr "predicable" "yes")]
12559 (define_expand "copysignsf3"
12560 [(match_operand:SF 0 "register_operand")
12561 (match_operand:SF 1 "register_operand")
12562 (match_operand:SF 2 "register_operand")]
12563 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12565 emit_move_insn (operands[0], operands[2]);
12566 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
12567 GEN_INT (31), GEN_INT (0),
12568 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
12573 (define_expand "copysigndf3"
12574 [(match_operand:DF 0 "register_operand")
12575 (match_operand:DF 1 "register_operand")
12576 (match_operand:DF 2 "register_operand")]
12577 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12579 rtx op0_low = gen_lowpart (SImode, operands[0]);
12580 rtx op0_high = gen_highpart (SImode, operands[0]);
12581 rtx op1_low = gen_lowpart (SImode, operands[1]);
12582 rtx op1_high = gen_highpart (SImode, operands[1]);
12583 rtx op2_high = gen_highpart (SImode, operands[2]);
12585 rtx scratch1 = gen_reg_rtx (SImode);
12586 rtx scratch2 = gen_reg_rtx (SImode);
12587 emit_move_insn (scratch1, op2_high);
12588 emit_move_insn (scratch2, op1_high);
12590 emit_insn(gen_rtx_SET(scratch1,
12591 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
12592 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
12593 emit_move_insn (op0_low, op1_low);
12594 emit_move_insn (op0_high, scratch2);
12600 ;; movmisalign patterns for HImode and SImode.
12601 (define_expand "movmisalign<mode>"
12602 [(match_operand:HSI 0 "general_operand")
12603 (match_operand:HSI 1 "general_operand")]
12606 /* This pattern is not permitted to fail during expansion: if both arguments
12607 are non-registers (e.g. memory := constant), force operand 1 into a
12609 rtx (* gen_unaligned_load)(rtx, rtx);
12610 rtx tmp_dest = operands[0];
12611 if (!s_register_operand (operands[0], <MODE>mode)
12612 && !s_register_operand (operands[1], <MODE>mode))
12613 operands[1] = force_reg (<MODE>mode, operands[1]);
12615 if (<MODE>mode == HImode)
12617 gen_unaligned_load = gen_unaligned_loadhiu;
12618 tmp_dest = gen_reg_rtx (SImode);
12621 gen_unaligned_load = gen_unaligned_loadsi;
12623 if (MEM_P (operands[1]))
12625 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12626 if (<MODE>mode == HImode)
12627 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12630 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12635 (define_insn "arm_<cdp>"
12636 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12637 (match_operand:SI 1 "immediate_operand" "n")
12638 (match_operand:SI 2 "immediate_operand" "n")
12639 (match_operand:SI 3 "immediate_operand" "n")
12640 (match_operand:SI 4 "immediate_operand" "n")
12641 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12642 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12644 arm_const_bounds (operands[0], 0, 16);
12645 arm_const_bounds (operands[1], 0, 16);
12646 arm_const_bounds (operands[2], 0, (1 << 5));
12647 arm_const_bounds (operands[3], 0, (1 << 5));
12648 arm_const_bounds (operands[4], 0, (1 << 5));
12649 arm_const_bounds (operands[5], 0, 8);
12650 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12652 [(set_attr "length" "4")
12653 (set_attr "type" "coproc")])
12655 (define_insn "*ldc"
12656 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12657 (match_operand:SI 1 "immediate_operand" "n")
12658 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12659 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12661 arm_const_bounds (operands[0], 0, 16);
12662 arm_const_bounds (operands[1], 0, (1 << 5));
12663 return "<ldc>\\tp%c0, CR%c1, %2";
12665 [(set_attr "length" "4")
12666 (set_attr "type" "coproc")])
12668 (define_insn "*stc"
12669 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12670 (match_operand:SI 1 "immediate_operand" "n")
12671 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12672 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12674 arm_const_bounds (operands[0], 0, 16);
12675 arm_const_bounds (operands[1], 0, (1 << 5));
12676 return "<stc>\\tp%c0, CR%c1, %2";
12678 [(set_attr "length" "4")
12679 (set_attr "type" "coproc")])
12681 (define_expand "arm_<ldc>"
12682 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12683 (match_operand:SI 1 "immediate_operand")
12684 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12685 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12687 (define_expand "arm_<stc>"
12688 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12689 (match_operand:SI 1 "immediate_operand")
12690 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12691 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12693 (define_insn "arm_<mcr>"
12694 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12695 (match_operand:SI 1 "immediate_operand" "n")
12696 (match_operand:SI 2 "s_register_operand" "r")
12697 (match_operand:SI 3 "immediate_operand" "n")
12698 (match_operand:SI 4 "immediate_operand" "n")
12699 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12700 (use (match_dup 2))]
12701 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12703 arm_const_bounds (operands[0], 0, 16);
12704 arm_const_bounds (operands[1], 0, 8);
12705 arm_const_bounds (operands[3], 0, (1 << 5));
12706 arm_const_bounds (operands[4], 0, (1 << 5));
12707 arm_const_bounds (operands[5], 0, 8);
12708 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12710 [(set_attr "length" "4")
12711 (set_attr "type" "coproc")])
12713 (define_insn "arm_<mrc>"
12714 [(set (match_operand:SI 0 "s_register_operand" "=r")
12715 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12716 (match_operand:SI 2 "immediate_operand" "n")
12717 (match_operand:SI 3 "immediate_operand" "n")
12718 (match_operand:SI 4 "immediate_operand" "n")
12719 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12720 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12722 arm_const_bounds (operands[1], 0, 16);
12723 arm_const_bounds (operands[2], 0, 8);
12724 arm_const_bounds (operands[3], 0, (1 << 5));
12725 arm_const_bounds (operands[4], 0, (1 << 5));
12726 arm_const_bounds (operands[5], 0, 8);
12727 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12729 [(set_attr "length" "4")
12730 (set_attr "type" "coproc")])
12732 (define_insn "arm_<mcrr>"
12733 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12734 (match_operand:SI 1 "immediate_operand" "n")
12735 (match_operand:DI 2 "s_register_operand" "r")
12736 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12737 (use (match_dup 2))]
12738 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12740 arm_const_bounds (operands[0], 0, 16);
12741 arm_const_bounds (operands[1], 0, 8);
12742 arm_const_bounds (operands[3], 0, (1 << 5));
12743 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12745 [(set_attr "length" "4")
12746 (set_attr "type" "coproc")])
12748 (define_insn "arm_<mrrc>"
12749 [(set (match_operand:DI 0 "s_register_operand" "=r")
12750 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12751 (match_operand:SI 2 "immediate_operand" "n")
12752 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12753 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12755 arm_const_bounds (operands[1], 0, 16);
12756 arm_const_bounds (operands[2], 0, 8);
12757 arm_const_bounds (operands[3], 0, (1 << 5));
12758 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12760 [(set_attr "length" "4")
12761 (set_attr "type" "coproc")])
12763 (define_expand "speculation_barrier"
12764 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12767 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12768 have a usable barrier (and probably don't need one in practice).
12769 But to be safe if such code is run on later architectures, call a
12770 helper function in libgcc that will do the thing for the active
12772 if (!(arm_arch7 || arm_arch8))
12774 arm_emit_speculation_barrier_function ();
12780 ;; Generate a hard speculation barrier when we have not enabled speculation
12782 (define_insn "*speculation_barrier_insn"
12783 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12784 "arm_arch7 || arm_arch8"
12786 [(set_attr "type" "block")
12787 (set_attr "length" "8")]
12790 ;; Vector bits common to IWMMXT, Neon and MVE
12791 (include "vec-common.md")
12792 ;; Load the Intel Wireless Multimedia Extension patterns
12793 (include "iwmmxt.md")
12794 ;; Load the VFP co-processor patterns
12796 ;; Thumb-1 patterns
12797 (include "thumb1.md")
12798 ;; Thumb-2 patterns
12799 (include "thumb2.md")
12801 (include "neon.md")
12803 (include "crypto.md")
12804 ;; Synchronization Primitives
12805 (include "sync.md")
12806 ;; Fixed-point patterns
12807 (include "arm-fixed.md")
12808 ;; M-profile Vector Extension