1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 (APSRQ_REGNUM 104) ; Q bit pseudo register
43 (APSRGE_REGNUM 105) ; GE bits pseudo register
46 ;; 3rd operand to select_dominance_cc_mode
53 ;; conditional compare combination
64 ;;---------------------------------------------------------------------------
67 ;; Processor type. This is created automatically from arm-cores.def.
68 (include "arm-tune.md")
70 ;; Instruction classification types
73 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
74 ; generating ARM code. This is used to control the length of some insn
75 ; patterns that share the same RTL in both ARM and Thumb code.
76 (define_attr "is_thumb" "yes,no"
77 (const (if_then_else (symbol_ref "TARGET_THUMB")
78 (const_string "yes") (const_string "no"))))
80 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
81 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
83 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
84 (define_attr "is_thumb1" "yes,no"
85 (const (if_then_else (symbol_ref "TARGET_THUMB1")
86 (const_string "yes") (const_string "no"))))
88 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
89 ; The arm_restrict_it flag enables the "short IT" feature which
90 ; restricts IT blocks to a single 16-bit instruction.
91 ; This attribute should only be used on 16-bit Thumb-2 instructions
92 ; which may be predicated (the "predicable" attribute must be set).
93 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
95 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
96 ; This attribute should only be used on instructions which may emit
97 ; an IT block in their expansion which is not a short IT.
98 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
100 ;; Operand number of an input operand that is shifted. Zero if the
101 ;; given instruction does not shift one of its input operands.
102 (define_attr "shift" "" (const_int 0))
104 ;; [For compatibility with AArch64 in pipeline models]
105 ;; Attribute that specifies whether or not the instruction touches fp
107 (define_attr "fp" "no,yes" (const_string "no"))
109 ; Floating Point Unit. If we only have floating point emulation, then there
110 ; is no point in scheduling the floating point insns. (Well, for best
111 ; performance we should try and group them together).
112 (define_attr "fpu" "none,vfp"
113 (const (symbol_ref "arm_fpu_attr")))
115 ; Predicated means that the insn form is conditionally executed based on a
116 ; predicate. We default to 'no' because no Thumb patterns match this rule
117 ; and not all ARM insns do.
118 (define_attr "predicated" "yes,no" (const_string "no"))
120 ; LENGTH of an instruction (in bytes)
121 (define_attr "length" ""
124 ; The architecture which supports the instruction (or alternative).
125 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
126 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
127 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
128 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
129 ; Baseline. This attribute is used to compute attribute "enabled",
130 ; use type "any" to enable an alternative in all cases.
131 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
132 (const_string "any"))
134 (define_attr "arch_enabled" "no,yes"
135 (cond [(eq_attr "arch" "any")
138 (and (eq_attr "arch" "a")
139 (match_test "TARGET_ARM"))
142 (and (eq_attr "arch" "t")
143 (match_test "TARGET_THUMB"))
146 (and (eq_attr "arch" "t1")
147 (match_test "TARGET_THUMB1"))
150 (and (eq_attr "arch" "t2")
151 (match_test "TARGET_THUMB2"))
154 (and (eq_attr "arch" "32")
155 (match_test "TARGET_32BIT"))
158 (and (eq_attr "arch" "v6")
159 (match_test "TARGET_32BIT && arm_arch6"))
162 (and (eq_attr "arch" "nov6")
163 (match_test "TARGET_32BIT && !arm_arch6"))
166 (and (eq_attr "arch" "v6t2")
167 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
170 (and (eq_attr "arch" "v8mb")
171 (match_test "TARGET_THUMB1 && arm_arch8"))
174 (and (eq_attr "arch" "iwmmxt2")
175 (match_test "TARGET_REALLY_IWMMXT2"))
178 (and (eq_attr "arch" "armv6_or_vfpv3")
179 (match_test "arm_arch6 || TARGET_VFP3"))
182 (and (eq_attr "arch" "neon")
183 (match_test "TARGET_NEON"))
187 (const_string "no")))
189 (define_attr "opt" "any,speed,size"
190 (const_string "any"))
192 (define_attr "opt_enabled" "no,yes"
193 (cond [(eq_attr "opt" "any")
196 (and (eq_attr "opt" "speed")
197 (match_test "optimize_function_for_speed_p (cfun)"))
200 (and (eq_attr "opt" "size")
201 (match_test "optimize_function_for_size_p (cfun)"))
202 (const_string "yes")]
203 (const_string "no")))
205 (define_attr "use_literal_pool" "no,yes"
206 (cond [(and (eq_attr "type" "f_loads,f_loadd")
207 (match_test "CONSTANT_P (operands[1])"))
208 (const_string "yes")]
209 (const_string "no")))
211 ; Enable all alternatives that are both arch_enabled and insn_enabled.
212 ; FIXME:: opt_enabled has been temporarily removed till the time we have
213 ; an attribute that allows the use of such alternatives.
214 ; This depends on caching of speed_p, size_p on a per
215 ; alternative basis. The problem is that the enabled attribute
216 ; cannot depend on any state that is not cached or is not constant
217 ; for a compilation unit. We probably need a generic "hot/cold"
218 ; alternative which if implemented can help with this. We disable this
219 ; until such a time as this is implemented and / or the improvements or
220 ; regressions with removing this attribute are double checked.
221 ; See ashldi3_neon and <shift>di3_neon in neon.md.
223 (define_attr "enabled" "no,yes"
224 (cond [(and (eq_attr "predicable_short_it" "no")
225 (and (eq_attr "predicated" "yes")
226 (match_test "arm_restrict_it")))
229 (and (eq_attr "enabled_for_short_it" "no")
230 (match_test "arm_restrict_it"))
233 (eq_attr "arch_enabled" "no")
235 (const_string "yes")))
237 ; POOL_RANGE is how far away from a constant pool entry that this insn
238 ; can be placed. If the distance is zero, then this insn will never
239 ; reference the pool.
240 ; Note that for Thumb constant pools the PC value is rounded down to the
241 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
242 ; Thumb insns) should be set to <max_range> - 2.
243 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
244 ; before its address. It is set to <max_range> - (8 + <data_size>).
245 (define_attr "arm_pool_range" "" (const_int 0))
246 (define_attr "thumb2_pool_range" "" (const_int 0))
247 (define_attr "arm_neg_pool_range" "" (const_int 0))
248 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
250 (define_attr "pool_range" ""
251 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
252 (attr "arm_pool_range")))
253 (define_attr "neg_pool_range" ""
254 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
255 (attr "arm_neg_pool_range")))
257 ; An assembler sequence may clobber the condition codes without us knowing.
258 ; If such an insn references the pool, then we have no way of knowing how,
259 ; so use the most conservative value for pool_range.
260 (define_asm_attributes
261 [(set_attr "conds" "clob")
262 (set_attr "length" "4")
263 (set_attr "pool_range" "250")])
265 ; Load scheduling, set from the arm_ld_sched variable
266 ; initialized by arm_option_override()
267 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
269 ; condition codes: this one is used by final_prescan_insn to speed up
270 ; conditionalizing instructions. It saves having to scan the rtl to see if
271 ; it uses or alters the condition codes.
273 ; USE means that the condition codes are used by the insn in the process of
274 ; outputting code, this means (at present) that we can't use the insn in
277 ; SET means that the purpose of the insn is to set the condition codes in a
278 ; well defined manner.
280 ; CLOB means that the condition codes are altered in an undefined manner, if
281 ; they are altered at all
283 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
284 ; that the instruction does not use or alter the condition codes.
286 ; NOCOND means that the instruction does not use or alter the condition
287 ; codes but can be converted into a conditionally exectuted instruction.
289 (define_attr "conds" "use,set,clob,unconditional,nocond"
291 (ior (eq_attr "is_thumb1" "yes")
292 (eq_attr "type" "call"))
293 (const_string "clob")
294 (if_then_else (eq_attr "is_neon_type" "no")
295 (const_string "nocond")
296 (const_string "unconditional"))))
298 ; Predicable means that the insn can be conditionally executed based on
299 ; an automatically added predicate (additional patterns are generated by
300 ; gen...). We default to 'no' because no Thumb patterns match this rule
301 ; and not all ARM patterns do.
302 (define_attr "predicable" "no,yes" (const_string "no"))
304 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
305 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
306 ; suffer blockages enough to warrant modelling this (and it can adversely
307 ; affect the schedule).
308 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
310 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
311 ; to stall the processor. Used with model_wbuf above.
312 (define_attr "write_conflict" "no,yes"
313 (if_then_else (eq_attr "type"
316 (const_string "no")))
318 ; Classify the insns into those that take one cycle and those that take more
319 ; than one on the main cpu execution unit.
320 (define_attr "core_cycles" "single,multi"
321 (if_then_else (eq_attr "type"
322 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
323 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
324 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
325 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
326 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
327 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
328 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
329 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
330 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
331 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
332 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
333 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
334 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
335 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
336 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
337 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
338 (const_string "single")
339 (const_string "multi")))
341 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
342 ;; distant label. Only applicable to Thumb code.
343 (define_attr "far_jump" "yes,no" (const_string "no"))
346 ;; The number of machine instructions this pattern expands to.
347 ;; Used for Thumb-2 conditional execution.
348 (define_attr "ce_count" "" (const_int 1))
350 ;;---------------------------------------------------------------------------
353 (include "unspecs.md")
355 ;;---------------------------------------------------------------------------
358 (include "iterators.md")
360 ;;---------------------------------------------------------------------------
363 (include "predicates.md")
364 (include "constraints.md")
366 ;;---------------------------------------------------------------------------
367 ;; Pipeline descriptions
369 (define_attr "tune_cortexr4" "yes,no"
371 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
373 (const_string "no"))))
375 ;; True if the generic scheduling description should be used.
377 (define_attr "generic_sched" "yes,no"
379 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
380 arm926ejs,arm10e,arm1026ejs,arm1136js,\
381 arm1136jfs,cortexa5,cortexa7,cortexa8,\
382 cortexa9,cortexa12,cortexa15,cortexa17,\
383 cortexa53,cortexa57,cortexm4,cortexm7,\
384 exynosm1,marvell_pj4,xgene1")
385 (eq_attr "tune_cortexr4" "yes"))
387 (const_string "yes"))))
389 (define_attr "generic_vfp" "yes,no"
391 (and (eq_attr "fpu" "vfp")
392 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
393 cortexa8,cortexa9,cortexa53,cortexm4,\
394 cortexm7,marvell_pj4,xgene1")
395 (eq_attr "tune_cortexr4" "no"))
397 (const_string "no"))))
399 (include "marvell-f-iwmmxt.md")
400 (include "arm-generic.md")
401 (include "arm926ejs.md")
402 (include "arm1020e.md")
403 (include "arm1026ejs.md")
404 (include "arm1136jfs.md")
406 (include "fa606te.md")
407 (include "fa626te.md")
408 (include "fmp626.md")
409 (include "fa726te.md")
410 (include "cortex-a5.md")
411 (include "cortex-a7.md")
412 (include "cortex-a8.md")
413 (include "cortex-a9.md")
414 (include "cortex-a15.md")
415 (include "cortex-a17.md")
416 (include "cortex-a53.md")
417 (include "cortex-a57.md")
418 (include "cortex-r4.md")
419 (include "cortex-r4f.md")
420 (include "cortex-m7.md")
421 (include "cortex-m4.md")
422 (include "cortex-m4-fpu.md")
423 (include "exynos-m1.md")
425 (include "marvell-pj4.md")
426 (include "xgene1.md")
428 ;; define_subst and associated attributes
430 (define_subst "add_setq"
431 [(set (match_operand:SI 0 "" "")
432 (match_operand:SI 1 "" ""))]
436 (set (reg:CC APSRQ_REGNUM)
437 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
439 (define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
440 (define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
443 ;;---------------------------------------------------------------------------
448 ;; Note: For DImode insns, there is normally no reason why operands should
449 ;; not be in the same register, what we don't want is for something being
450 ;; written to partially overlap something that is an input.
452 (define_expand "adddi3"
454 [(set (match_operand:DI 0 "s_register_operand")
455 (plus:DI (match_operand:DI 1 "s_register_operand")
456 (match_operand:DI 2 "reg_or_int_operand")))
457 (clobber (reg:CC CC_REGNUM))])]
462 if (!REG_P (operands[2]))
463 operands[2] = force_reg (DImode, operands[2]);
467 rtx lo_result, hi_result, lo_dest, hi_dest;
468 rtx lo_op1, hi_op1, lo_op2, hi_op2;
469 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
471 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
472 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
474 if (lo_op2 == const0_rtx)
477 if (!arm_add_operand (hi_op2, SImode))
478 hi_op2 = force_reg (SImode, hi_op2);
479 /* Assume hi_op2 won't also be zero. */
480 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
484 if (!arm_add_operand (lo_op2, SImode))
485 lo_op2 = force_reg (SImode, lo_op2);
486 if (!arm_not_operand (hi_op2, SImode))
487 hi_op2 = force_reg (SImode, hi_op2);
489 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
490 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
492 if (hi_op2 == const0_rtx)
493 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
495 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
498 if (lo_result != lo_dest)
499 emit_move_insn (lo_result, lo_dest);
500 if (hi_result != hi_dest)
501 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
507 (define_expand "addvsi4"
508 [(match_operand:SI 0 "s_register_operand")
509 (match_operand:SI 1 "s_register_operand")
510 (match_operand:SI 2 "arm_add_operand")
511 (match_operand 3 "")]
514 if (CONST_INT_P (operands[2]))
515 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
517 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
518 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
523 (define_expand "addvdi4"
524 [(match_operand:DI 0 "s_register_operand")
525 (match_operand:DI 1 "s_register_operand")
526 (match_operand:DI 2 "reg_or_int_operand")
527 (match_operand 3 "")]
530 rtx lo_result, hi_result;
531 rtx lo_op1, hi_op1, lo_op2, hi_op2;
532 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
534 lo_result = gen_lowpart (SImode, operands[0]);
535 hi_result = gen_highpart (SImode, operands[0]);
537 if (lo_op2 == const0_rtx)
539 emit_move_insn (lo_result, lo_op1);
540 if (!arm_add_operand (hi_op2, SImode))
541 hi_op2 = force_reg (SImode, hi_op2);
543 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
547 if (!arm_add_operand (lo_op2, SImode))
548 lo_op2 = force_reg (SImode, lo_op2);
549 if (!arm_not_operand (hi_op2, SImode))
550 hi_op2 = force_reg (SImode, hi_op2);
552 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
554 if (hi_op2 == const0_rtx)
555 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
556 else if (CONST_INT_P (hi_op2))
557 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
559 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
561 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
567 (define_expand "addsi3_cin_vout_reg"
572 (plus:DI (match_dup 4)
573 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
574 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
575 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
577 (set (match_operand:SI 0 "s_register_operand")
578 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
582 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
583 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
584 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
585 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
589 (define_insn "*addsi3_cin_vout_reg_insn"
590 [(set (reg:CC_V CC_REGNUM)
594 (match_operand:DI 3 "arm_carry_operation" "")
595 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
596 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
598 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
601 (set (match_operand:SI 0 "s_register_operand" "=l,r")
602 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
608 [(set_attr "type" "alus_sreg")
609 (set_attr "arch" "t2,*")
610 (set_attr "length" "2,4")]
613 (define_expand "addsi3_cin_vout_imm"
618 (plus:DI (match_dup 4)
619 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
621 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
623 (set (match_operand:SI 0 "s_register_operand")
624 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
625 (match_operand 2 "arm_adcimm_operand")))])]
628 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
629 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
630 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
631 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
635 (define_insn "*addsi3_cin_vout_imm_insn"
636 [(set (reg:CC_V CC_REGNUM)
640 (match_operand:DI 3 "arm_carry_operation" "")
641 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
642 (match_operand 2 "arm_adcimm_operand" "I,K"))
644 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
647 (set (match_operand:SI 0 "s_register_operand" "=r,r")
648 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
653 sbcs%?\\t%0, %1, #%B2"
654 [(set_attr "type" "alus_imm")]
657 (define_expand "addsi3_cin_vout_0"
661 (plus:DI (match_dup 3)
662 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
663 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
664 (set (match_operand:SI 0 "s_register_operand")
665 (plus:SI (match_dup 4) (match_dup 1)))])]
668 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
669 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
670 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
671 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
675 (define_insn "*addsi3_cin_vout_0_insn"
676 [(set (reg:CC_V CC_REGNUM)
679 (match_operand:DI 2 "arm_carry_operation" "")
680 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
681 (sign_extend:DI (plus:SI
682 (match_operand:SI 3 "arm_carry_operation" "")
684 (set (match_operand:SI 0 "s_register_operand" "=r")
685 (plus:SI (match_dup 3) (match_dup 1)))]
687 "adcs%?\\t%0, %1, #0"
688 [(set_attr "type" "alus_imm")]
691 (define_expand "uaddvsi4"
692 [(match_operand:SI 0 "s_register_operand")
693 (match_operand:SI 1 "s_register_operand")
694 (match_operand:SI 2 "arm_add_operand")
695 (match_operand 3 "")]
698 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
699 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
704 (define_expand "uaddvdi4"
705 [(match_operand:DI 0 "s_register_operand")
706 (match_operand:DI 1 "s_register_operand")
707 (match_operand:DI 2 "reg_or_int_operand")
708 (match_operand 3 "")]
711 rtx lo_result, hi_result;
712 rtx lo_op1, hi_op1, lo_op2, hi_op2;
713 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
715 lo_result = gen_lowpart (SImode, operands[0]);
716 hi_result = gen_highpart (SImode, operands[0]);
718 if (lo_op2 == const0_rtx)
720 emit_move_insn (lo_result, lo_op1);
721 if (!arm_add_operand (hi_op2, SImode))
722 hi_op2 = force_reg (SImode, hi_op2);
724 gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]);
728 if (!arm_add_operand (lo_op2, SImode))
729 lo_op2 = force_reg (SImode, lo_op2);
730 if (!arm_not_operand (hi_op2, SImode))
731 hi_op2 = force_reg (SImode, hi_op2);
733 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
735 if (hi_op2 == const0_rtx)
736 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
737 else if (CONST_INT_P (hi_op2))
738 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
740 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
742 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
748 (define_expand "addsi3_cin_cout_reg"
753 (plus:DI (match_dup 4)
754 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
755 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
756 (const_int 4294967296)))
757 (set (match_operand:SI 0 "s_register_operand")
758 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
762 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
763 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
764 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
765 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
769 (define_insn "*addsi3_cin_cout_reg_insn"
770 [(set (reg:CC_ADC CC_REGNUM)
774 (match_operand:DI 3 "arm_carry_operation" "")
775 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
776 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
777 (const_int 4294967296)))
778 (set (match_operand:SI 0 "s_register_operand" "=l,r")
779 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
786 [(set_attr "type" "alus_sreg")
787 (set_attr "arch" "t2,*")
788 (set_attr "length" "2,4")]
791 (define_expand "addsi3_cin_cout_imm"
796 (plus:DI (match_dup 4)
797 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
799 (const_int 4294967296)))
800 (set (match_operand:SI 0 "s_register_operand")
801 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
802 (match_operand:SI 2 "arm_adcimm_operand")))])]
805 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
806 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
807 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
808 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
809 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
813 (define_insn "*addsi3_cin_cout_imm_insn"
814 [(set (reg:CC_ADC CC_REGNUM)
818 (match_operand:DI 3 "arm_carry_operation" "")
819 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
820 (match_operand:DI 5 "const_int_operand" "n,n"))
821 (const_int 4294967296)))
822 (set (match_operand:SI 0 "s_register_operand" "=r,r")
823 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
825 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
827 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
830 sbcs%?\\t%0, %1, #%B2"
831 [(set_attr "type" "alus_imm")]
834 (define_expand "addsi3_cin_cout_0"
838 (plus:DI (match_dup 3)
839 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
840 (const_int 4294967296)))
841 (set (match_operand:SI 0 "s_register_operand")
842 (plus:SI (match_dup 4) (match_dup 1)))])]
845 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
846 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
847 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
848 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
852 (define_insn "*addsi3_cin_cout_0_insn"
853 [(set (reg:CC_ADC CC_REGNUM)
856 (match_operand:DI 2 "arm_carry_operation" "")
857 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
858 (const_int 4294967296)))
859 (set (match_operand:SI 0 "s_register_operand" "=r")
860 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
862 "adcs%?\\t%0, %1, #0"
863 [(set_attr "type" "alus_imm")]
866 (define_expand "addsi3"
867 [(set (match_operand:SI 0 "s_register_operand")
868 (plus:SI (match_operand:SI 1 "s_register_operand")
869 (match_operand:SI 2 "reg_or_int_operand")))]
872 if (TARGET_32BIT && CONST_INT_P (operands[2]))
874 arm_split_constant (PLUS, SImode, NULL_RTX,
875 INTVAL (operands[2]), operands[0], operands[1],
876 optimize && can_create_pseudo_p ());
882 ; If there is a scratch available, this will be faster than synthesizing the
885 [(match_scratch:SI 3 "r")
886 (set (match_operand:SI 0 "arm_general_register_operand" "")
887 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
888 (match_operand:SI 2 "const_int_operand" "")))]
890 !(const_ok_for_arm (INTVAL (operands[2]))
891 || const_ok_for_arm (-INTVAL (operands[2])))
892 && const_ok_for_arm (~INTVAL (operands[2]))"
893 [(set (match_dup 3) (match_dup 2))
894 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
898 ;; The r/r/k alternative is required when reloading the address
899 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
900 ;; put the duplicated register first, and not try the commutative version.
901 (define_insn_and_split "*arm_addsi3"
902 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
903 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
904 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
920 subw%?\\t%0, %1, #%n2
921 subw%?\\t%0, %1, #%n2
924 && CONST_INT_P (operands[2])
925 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
926 && (reload_completed || !arm_eliminable_register (operands[1]))"
927 [(clobber (const_int 0))]
929 arm_split_constant (PLUS, SImode, curr_insn,
930 INTVAL (operands[2]), operands[0],
934 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
935 (set_attr "predicable" "yes")
936 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
937 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
938 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
939 (const_string "alu_imm")
940 (const_string "alu_sreg")))
944 (define_insn "addsi3_compareV_reg"
945 [(set (reg:CC_V CC_REGNUM)
948 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
949 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
950 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
951 (set (match_operand:SI 0 "register_operand" "=l,r,r")
952 (plus:SI (match_dup 1) (match_dup 2)))]
954 "adds%?\\t%0, %1, %2"
955 [(set_attr "conds" "set")
956 (set_attr "arch" "t2,t2,*")
957 (set_attr "length" "2,2,4")
958 (set_attr "type" "alus_sreg")]
961 (define_insn "*addsi3_compareV_reg_nosum"
962 [(set (reg:CC_V CC_REGNUM)
965 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
966 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
967 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
970 [(set_attr "conds" "set")
971 (set_attr "arch" "t2,*")
972 (set_attr "length" "2,4")
973 (set_attr "type" "alus_sreg")]
976 (define_insn "subvsi3_intmin"
977 [(set (reg:CC_V CC_REGNUM)
981 (match_operand:SI 1 "register_operand" "r"))
982 (const_int 2147483648))
983 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
984 (set (match_operand:SI 0 "register_operand" "=r")
985 (plus:SI (match_dup 1) (const_int -2147483648)))]
987 "subs%?\\t%0, %1, #-2147483648"
988 [(set_attr "conds" "set")
989 (set_attr "type" "alus_imm")]
992 (define_insn "addsi3_compareV_imm"
993 [(set (reg:CC_V CC_REGNUM)
997 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
998 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
999 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
1000 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
1001 (plus:SI (match_dup 1) (match_dup 2)))]
1003 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
1007 subs%?\\t%0, %1, #%n2
1008 subs%?\\t%0, %0, #%n2
1010 subs%?\\t%0, %1, #%n2"
1011 [(set_attr "conds" "set")
1012 (set_attr "arch" "t2,t2,t2,t2,*,*")
1013 (set_attr "length" "2,2,2,2,4,4")
1014 (set_attr "type" "alus_imm")]
1017 (define_insn "addsi3_compareV_imm_nosum"
1018 [(set (reg:CC_V CC_REGNUM)
1022 (match_operand:SI 0 "register_operand" "l,r,r"))
1023 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1024 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1026 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1031 [(set_attr "conds" "set")
1032 (set_attr "arch" "t2,*,*")
1033 (set_attr "length" "2,4,4")
1034 (set_attr "type" "alus_imm")]
1037 ;; We can handle more constants efficently if we can clobber either a scratch
1038 ;; or the other source operand. We deliberately leave this late as in
1039 ;; high register pressure situations it's not worth forcing any reloads.
1041 [(match_scratch:SI 2 "l")
1042 (set (reg:CC_V CC_REGNUM)
1046 (match_operand:SI 0 "low_register_operand"))
1047 (match_operand 1 "const_int_operand"))
1048 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1050 && satisfies_constraint_Pd (operands[1])"
1052 (set (reg:CC_V CC_REGNUM)
1054 (plus:DI (sign_extend:DI (match_dup 0))
1055 (sign_extend:DI (match_dup 1)))
1056 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1057 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1061 [(set (reg:CC_V CC_REGNUM)
1065 (match_operand:SI 0 "low_register_operand"))
1066 (match_operand 1 "const_int_operand"))
1067 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1069 && dead_or_set_p (peep2_next_insn (0), operands[0])
1070 && satisfies_constraint_Py (operands[1])"
1072 (set (reg:CC_V CC_REGNUM)
1074 (plus:DI (sign_extend:DI (match_dup 0))
1075 (sign_extend:DI (match_dup 1)))
1076 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1077 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1080 (define_insn "addsi3_compare0"
1081 [(set (reg:CC_NOOV CC_REGNUM)
1083 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1084 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1086 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1087 (plus:SI (match_dup 1) (match_dup 2)))]
1091 subs%?\\t%0, %1, #%n2
1092 adds%?\\t%0, %1, %2"
1093 [(set_attr "conds" "set")
1094 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1097 (define_insn "*addsi3_compare0_scratch"
1098 [(set (reg:CC_NOOV CC_REGNUM)
1100 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1101 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1108 [(set_attr "conds" "set")
1109 (set_attr "predicable" "yes")
1110 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1113 (define_insn "*compare_negsi_si"
1114 [(set (reg:CC_Z CC_REGNUM)
1116 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1117 (match_operand:SI 1 "s_register_operand" "l,r")))]
1120 [(set_attr "conds" "set")
1121 (set_attr "predicable" "yes")
1122 (set_attr "arch" "t2,*")
1123 (set_attr "length" "2,4")
1124 (set_attr "predicable_short_it" "yes,no")
1125 (set_attr "type" "alus_sreg")]
1128 ;; This is the canonicalization of subsi3_compare when the
1129 ;; addend is a constant.
1130 (define_insn "cmpsi2_addneg"
1131 [(set (reg:CC CC_REGNUM)
1133 (match_operand:SI 1 "s_register_operand" "r,r")
1134 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1135 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1136 (plus:SI (match_dup 1)
1137 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1139 && (INTVAL (operands[2])
1140 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1142 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1143 in different condition codes (like cmn rather than like cmp), so that
1144 alternative comes first. Both alternatives can match for any 0x??000000
1145 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1146 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1147 as it is shorter. */
1148 if (which_alternative == 0 && operands[3] != const1_rtx)
1149 return "subs%?\\t%0, %1, #%n3";
1151 return "adds%?\\t%0, %1, %3";
1153 [(set_attr "conds" "set")
1154 (set_attr "type" "alus_sreg")]
1157 ;; Convert the sequence
1159 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1163 ;; bcs dest ((unsigned)rn >= 1)
1164 ;; similarly for the beq variant using bcc.
1165 ;; This is a common looping idiom (while (n--))
1167 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1168 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1170 (set (match_operand 2 "cc_register" "")
1171 (compare (match_dup 0) (const_int -1)))
1173 (if_then_else (match_operator 3 "equality_operator"
1174 [(match_dup 2) (const_int 0)])
1175 (match_operand 4 "" "")
1176 (match_operand 5 "" "")))]
1177 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1181 (match_dup 1) (const_int 1)))
1182 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1184 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1187 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1188 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1191 operands[2], const0_rtx);"
1194 ;; The next four insns work because they compare the result with one of
1195 ;; the operands, and we know that the use of the condition code is
1196 ;; either GEU or LTU, so we can use the carry flag from the addition
1197 ;; instead of doing the compare a second time.
1198 (define_insn "addsi3_compare_op1"
1199 [(set (reg:CC_C CC_REGNUM)
1201 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1202 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1204 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1205 (plus:SI (match_dup 1) (match_dup 2)))]
1210 subs%?\\t%0, %1, #%n2
1211 subs%?\\t%0, %0, #%n2
1213 subs%?\\t%0, %1, #%n2"
1214 [(set_attr "conds" "set")
1215 (set_attr "arch" "t2,t2,t2,t2,*,*")
1216 (set_attr "length" "2,2,2,2,4,4")
1218 (if_then_else (match_operand 2 "const_int_operand")
1219 (const_string "alu_imm")
1220 (const_string "alu_sreg")))]
1223 (define_insn "*addsi3_compare_op2"
1224 [(set (reg:CC_C CC_REGNUM)
1226 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1227 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1229 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1230 (plus:SI (match_dup 1) (match_dup 2)))]
1235 subs%?\\t%0, %1, #%n2
1236 subs%?\\t%0, %0, #%n2
1238 subs%?\\t%0, %1, #%n2"
1239 [(set_attr "conds" "set")
1240 (set_attr "arch" "t2,t2,t2,t2,*,*")
1241 (set_attr "length" "2,2,2,2,4,4")
1243 (if_then_else (match_operand 2 "const_int_operand")
1244 (const_string "alu_imm")
1245 (const_string "alu_sreg")))]
1248 (define_insn "*compare_addsi2_op0"
1249 [(set (reg:CC_C CC_REGNUM)
1251 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1252 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1260 [(set_attr "conds" "set")
1261 (set_attr "predicable" "yes")
1262 (set_attr "arch" "t2,t2,*,*")
1263 (set_attr "predicable_short_it" "yes,yes,no,no")
1264 (set_attr "length" "2,2,4,4")
1266 (if_then_else (match_operand 1 "const_int_operand")
1267 (const_string "alu_imm")
1268 (const_string "alu_sreg")))]
1271 (define_insn "*compare_addsi2_op1"
1272 [(set (reg:CC_C CC_REGNUM)
1274 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1275 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1283 [(set_attr "conds" "set")
1284 (set_attr "predicable" "yes")
1285 (set_attr "arch" "t2,t2,*,*")
1286 (set_attr "predicable_short_it" "yes,yes,no,no")
1287 (set_attr "length" "2,2,4,4")
1289 (if_then_else (match_operand 1 "const_int_operand")
1290 (const_string "alu_imm")
1291 (const_string "alu_sreg")))]
1294 (define_insn "addsi3_carryin"
1295 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1296 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1297 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1298 (match_operand:SI 3 "arm_carry_operation" "")))]
1303 sbc%?\\t%0, %1, #%B2"
1304 [(set_attr "conds" "use")
1305 (set_attr "predicable" "yes")
1306 (set_attr "arch" "t2,*,*")
1307 (set_attr "length" "4")
1308 (set_attr "predicable_short_it" "yes,no,no")
1309 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1312 ;; Canonicalization of the above when the immediate is zero.
1313 (define_insn "add0si3_carryin"
1314 [(set (match_operand:SI 0 "s_register_operand" "=r")
1315 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1316 (match_operand:SI 1 "arm_not_operand" "r")))]
1318 "adc%?\\t%0, %1, #0"
1319 [(set_attr "conds" "use")
1320 (set_attr "predicable" "yes")
1321 (set_attr "length" "4")
1322 (set_attr "type" "adc_imm")]
1325 (define_insn "*addsi3_carryin_alt2"
1326 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1327 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1328 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1329 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1334 sbc%?\\t%0, %1, #%B2"
1335 [(set_attr "conds" "use")
1336 (set_attr "predicable" "yes")
1337 (set_attr "arch" "t2,*,*")
1338 (set_attr "length" "4")
1339 (set_attr "predicable_short_it" "yes,no,no")
1340 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1343 (define_insn "*addsi3_carryin_shift"
1344 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1346 (match_operator:SI 2 "shift_operator"
1347 [(match_operand:SI 3 "s_register_operand" "r,r")
1348 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1349 (match_operand:SI 5 "arm_carry_operation" ""))
1350 (match_operand:SI 1 "s_register_operand" "r,r")))]
1352 "adc%?\\t%0, %1, %3%S2"
1353 [(set_attr "conds" "use")
1354 (set_attr "arch" "32,a")
1355 (set_attr "shift" "3")
1356 (set_attr "predicable" "yes")
1357 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1360 (define_insn "*addsi3_carryin_clobercc"
1361 [(set (match_operand:SI 0 "s_register_operand" "=r")
1362 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1363 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1364 (match_operand:SI 3 "arm_carry_operation" "")))
1365 (clobber (reg:CC CC_REGNUM))]
1367 "adcs%?\\t%0, %1, %2"
1368 [(set_attr "conds" "set")
1369 (set_attr "type" "adcs_reg")]
1372 (define_expand "subvsi4"
1373 [(match_operand:SI 0 "s_register_operand")
1374 (match_operand:SI 1 "arm_rhs_operand")
1375 (match_operand:SI 2 "arm_add_operand")
1376 (match_operand 3 "")]
1379 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1381 /* If both operands are constants we can decide the result statically. */
1382 wi::overflow_type overflow;
1383 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1384 rtx_mode_t (operands[2], SImode),
1386 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1387 if (overflow != wi::OVF_NONE)
1388 emit_jump_insn (gen_jump (operands[3]));
1391 else if (CONST_INT_P (operands[2]))
1393 operands[2] = GEN_INT (-INTVAL (operands[2]));
1394 /* Special case for INT_MIN. */
1395 if (INTVAL (operands[2]) == 0x80000000)
1396 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1398 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1401 else if (CONST_INT_P (operands[1]))
1402 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1404 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1406 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1410 (define_expand "subvdi4"
1411 [(match_operand:DI 0 "s_register_operand")
1412 (match_operand:DI 1 "reg_or_int_operand")
1413 (match_operand:DI 2 "reg_or_int_operand")
1414 (match_operand 3 "")]
1417 rtx lo_result, hi_result;
1418 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1419 lo_result = gen_lowpart (SImode, operands[0]);
1420 hi_result = gen_highpart (SImode, operands[0]);
1421 machine_mode mode = CCmode;
1423 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1425 /* If both operands are constants we can decide the result statically. */
1426 wi::overflow_type overflow;
1427 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1428 rtx_mode_t (operands[2], DImode),
1430 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1431 if (overflow != wi::OVF_NONE)
1432 emit_jump_insn (gen_jump (operands[3]));
1435 else if (CONST_INT_P (operands[1]))
1437 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1439 if (const_ok_for_arm (INTVAL (lo_op1)))
1441 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1442 GEN_INT (~UINTVAL (lo_op1))));
1443 /* We could potentially use RSC here in Arm state, but not
1444 in Thumb, so it's probably not worth the effort of handling
1446 hi_op1 = force_reg (SImode, hi_op1);
1450 operands[1] = force_reg (DImode, operands[1]);
1453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1455 if (lo_op2 == const0_rtx)
1457 emit_move_insn (lo_result, lo_op1);
1458 if (!arm_add_operand (hi_op2, SImode))
1459 hi_op2 = force_reg (SImode, hi_op2);
1460 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1464 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1465 lo_op2 = force_reg (SImode, lo_op2);
1466 if (CONST_INT_P (lo_op2))
1467 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1468 GEN_INT (-INTVAL (lo_op2))));
1470 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1473 if (!arm_not_operand (hi_op2, SImode))
1474 hi_op2 = force_reg (SImode, hi_op2);
1475 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1476 if (CONST_INT_P (hi_op2))
1477 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1478 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1479 gen_rtx_LTU (DImode, ccreg,
1482 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1483 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1484 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1485 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1490 (define_expand "usubvsi4"
1491 [(match_operand:SI 0 "s_register_operand")
1492 (match_operand:SI 1 "arm_rhs_operand")
1493 (match_operand:SI 2 "arm_add_operand")
1494 (match_operand 3 "")]
1497 machine_mode mode = CCmode;
1498 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1500 /* If both operands are constants we can decide the result statically. */
1501 wi::overflow_type overflow;
1502 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1503 rtx_mode_t (operands[2], SImode),
1504 UNSIGNED, &overflow);
1505 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1506 if (overflow != wi::OVF_NONE)
1507 emit_jump_insn (gen_jump (operands[3]));
1510 else if (CONST_INT_P (operands[2]))
1511 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1512 GEN_INT (-INTVAL (operands[2]))));
1513 else if (CONST_INT_P (operands[1]))
1516 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1517 GEN_INT (~UINTVAL (operands[1]))));
1520 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1521 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1526 (define_expand "usubvdi4"
1527 [(match_operand:DI 0 "s_register_operand")
1528 (match_operand:DI 1 "reg_or_int_operand")
1529 (match_operand:DI 2 "reg_or_int_operand")
1530 (match_operand 3 "")]
1533 rtx lo_result, hi_result;
1534 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1535 lo_result = gen_lowpart (SImode, operands[0]);
1536 hi_result = gen_highpart (SImode, operands[0]);
1537 machine_mode mode = CCmode;
1539 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1541 /* If both operands are constants we can decide the result statically. */
1542 wi::overflow_type overflow;
1543 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1544 rtx_mode_t (operands[2], DImode),
1545 UNSIGNED, &overflow);
1546 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1547 if (overflow != wi::OVF_NONE)
1548 emit_jump_insn (gen_jump (operands[3]));
1551 else if (CONST_INT_P (operands[1]))
1553 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1555 if (const_ok_for_arm (INTVAL (lo_op1)))
1557 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1558 GEN_INT (~UINTVAL (lo_op1))));
1559 /* We could potentially use RSC here in Arm state, but not
1560 in Thumb, so it's probably not worth the effort of handling
1562 hi_op1 = force_reg (SImode, hi_op1);
1566 operands[1] = force_reg (DImode, operands[1]);
1569 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1571 if (lo_op2 == const0_rtx)
1573 emit_move_insn (lo_result, lo_op1);
1574 if (!arm_add_operand (hi_op2, SImode))
1575 hi_op2 = force_reg (SImode, hi_op2);
1576 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1580 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1581 lo_op2 = force_reg (SImode, lo_op2);
1582 if (CONST_INT_P (lo_op2))
1583 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1584 GEN_INT (-INTVAL (lo_op2))));
1586 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1589 if (!arm_not_operand (hi_op2, SImode))
1590 hi_op2 = force_reg (SImode, hi_op2);
1591 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1592 if (CONST_INT_P (hi_op2))
1593 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1594 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1595 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1596 gen_rtx_LTU (DImode, ccreg,
1599 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1600 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1601 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1602 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1607 (define_insn "subsi3_compare1"
1608 [(set (reg:CC CC_REGNUM)
1610 (match_operand:SI 1 "register_operand" "r")
1611 (match_operand:SI 2 "register_operand" "r")))
1612 (set (match_operand:SI 0 "register_operand" "=r")
1613 (minus:SI (match_dup 1) (match_dup 2)))]
1615 "subs%?\\t%0, %1, %2"
1616 [(set_attr "conds" "set")
1617 (set_attr "type" "alus_sreg")]
1620 (define_insn "subvsi3"
1621 [(set (reg:CC_V CC_REGNUM)
1624 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1625 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1626 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1627 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1628 (minus:SI (match_dup 1) (match_dup 2)))]
1630 "subs%?\\t%0, %1, %2"
1631 [(set_attr "conds" "set")
1632 (set_attr "arch" "t2,*")
1633 (set_attr "length" "2,4")
1634 (set_attr "type" "alus_sreg")]
1637 (define_insn "subvsi3_imm1"
1638 [(set (reg:CC_V CC_REGNUM)
1641 (match_operand 1 "arm_immediate_operand" "I")
1642 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1643 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1644 (set (match_operand:SI 0 "s_register_operand" "=r")
1645 (minus:SI (match_dup 1) (match_dup 2)))]
1647 "rsbs%?\\t%0, %2, %1"
1648 [(set_attr "conds" "set")
1649 (set_attr "type" "alus_imm")]
1652 (define_insn "subsi3_carryin"
1653 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1654 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1655 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1656 (match_operand:SI 3 "arm_borrow_operation" "")))]
1661 sbc%?\\t%0, %2, %2, lsl #1"
1662 [(set_attr "conds" "use")
1663 (set_attr "arch" "*,a,t2")
1664 (set_attr "predicable" "yes")
1665 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1668 ;; Special canonicalization of the above when operand1 == (const_int 1):
1669 ;; in this case the 'borrow' needs to treated like subtracting from the carry.
1670 (define_insn "rsbsi_carryin_reg"
1671 [(set (match_operand:SI 0 "s_register_operand" "=r")
1672 (minus:SI (match_operand:SI 1 "arm_carry_operation" "")
1673 (match_operand:SI 2 "s_register_operand" "r")))]
1675 "rsc%?\\t%0, %2, #1"
1676 [(set_attr "conds" "use")
1677 (set_attr "predicable" "yes")
1678 (set_attr "type" "adc_imm")]
1681 ;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
1682 ;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
1683 (define_insn "*add_not_cin"
1684 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1686 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
1687 (match_operand:SI 3 "arm_carry_operation" ""))
1688 (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
1689 "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
1693 [(set_attr "conds" "use")
1694 (set_attr "predicable" "yes")
1695 (set_attr "arch" "*,a")
1696 (set_attr "type" "adc_reg,adc_imm")]
1699 ;; On Arm we can also use the same trick when the non-inverted operand is
1700 ;; shifted, using RSC.
1701 (define_insn "add_not_shift_cin"
1702 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1704 (plus:SI (match_operator:SI 3 "shift_operator"
1705 [(match_operand:SI 1 "s_register_operand" "r,r")
1706 (match_operand:SI 2 "shift_amount_operand" "M,r")])
1707 (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
1708 (match_operand:SI 5 "arm_carry_operation" "")))]
1710 "rsc%?\\t%0, %4, %1%S3"
1711 [(set_attr "conds" "use")
1712 (set_attr "predicable" "yes")
1713 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1716 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1717 [(set (reg:<CC_EXTEND> CC_REGNUM)
1718 (compare:<CC_EXTEND>
1719 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1720 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1721 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1722 (clobber (match_scratch:SI 0 "=l,r"))]
1725 [(set_attr "conds" "set")
1726 (set_attr "arch" "t2,*")
1727 (set_attr "length" "2,4")
1728 (set_attr "type" "adc_reg")]
1731 ;; Similar to the above, but handling a constant which has a different
1732 ;; canonicalization.
1733 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1734 [(set (reg:<CC_EXTEND> CC_REGNUM)
1735 (compare:<CC_EXTEND>
1736 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1737 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1738 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1739 (clobber (match_scratch:SI 0 "=l,r"))]
1743 adcs\\t%0, %1, #%B2"
1744 [(set_attr "conds" "set")
1745 (set_attr "type" "adc_imm")]
1748 ;; Further canonicalization when the constant is zero.
1749 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1750 [(set (reg:<CC_EXTEND> CC_REGNUM)
1751 (compare:<CC_EXTEND>
1752 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1753 (match_operand:DI 2 "arm_borrow_operation" "")))
1754 (clobber (match_scratch:SI 0 "=l,r"))]
1757 [(set_attr "conds" "set")
1758 (set_attr "type" "adc_imm")]
1761 (define_insn "*subsi3_carryin_const"
1762 [(set (match_operand:SI 0 "s_register_operand" "=r")
1764 (match_operand:SI 1 "s_register_operand" "r")
1765 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1766 (match_operand:SI 3 "arm_borrow_operation" "")))]
1768 "sbc\\t%0, %1, #%n2"
1769 [(set_attr "conds" "use")
1770 (set_attr "type" "adc_imm")]
1773 (define_insn "*subsi3_carryin_const0"
1774 [(set (match_operand:SI 0 "s_register_operand" "=r")
1775 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1776 (match_operand:SI 2 "arm_borrow_operation" "")))]
1779 [(set_attr "conds" "use")
1780 (set_attr "type" "adc_imm")]
1783 (define_insn "*subsi3_carryin_shift"
1784 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1786 (match_operand:SI 1 "s_register_operand" "r,r")
1787 (match_operator:SI 2 "shift_operator"
1788 [(match_operand:SI 3 "s_register_operand" "r,r")
1789 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1790 (match_operand:SI 5 "arm_borrow_operation" "")))]
1792 "sbc%?\\t%0, %1, %3%S2"
1793 [(set_attr "conds" "use")
1794 (set_attr "arch" "32,a")
1795 (set_attr "shift" "3")
1796 (set_attr "predicable" "yes")
1797 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1800 (define_insn "*subsi3_carryin_shift_alt"
1801 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1803 (match_operand:SI 1 "s_register_operand" "r,r")
1804 (match_operand:SI 5 "arm_borrow_operation" ""))
1805 (match_operator:SI 2 "shift_operator"
1806 [(match_operand:SI 3 "s_register_operand" "r,r")
1807 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1809 "sbc%?\\t%0, %1, %3%S2"
1810 [(set_attr "conds" "use")
1811 (set_attr "arch" "32,a")
1812 (set_attr "shift" "3")
1813 (set_attr "predicable" "yes")
1814 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1818 (define_insn "*rsbsi3_carryin_shift"
1819 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1821 (match_operator:SI 2 "shift_operator"
1822 [(match_operand:SI 3 "s_register_operand" "r,r")
1823 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1824 (match_operand:SI 1 "s_register_operand" "r,r"))
1825 (match_operand:SI 5 "arm_borrow_operation" "")))]
1827 "rsc%?\\t%0, %1, %3%S2"
1828 [(set_attr "conds" "use")
1829 (set_attr "predicable" "yes")
1830 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1833 (define_insn "*rsbsi3_carryin_shift_alt"
1834 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1836 (match_operator:SI 2 "shift_operator"
1837 [(match_operand:SI 3 "s_register_operand" "r,r")
1838 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1839 (match_operand:SI 5 "arm_borrow_operation" ""))
1840 (match_operand:SI 1 "s_register_operand" "r,r")))]
1842 "rsc%?\\t%0, %1, %3%S2"
1843 [(set_attr "conds" "use")
1844 (set_attr "predicable" "yes")
1845 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1848 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1850 [(set (match_operand:SI 0 "s_register_operand" "")
1851 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1852 (match_operand:SI 2 "s_register_operand" ""))
1854 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1856 [(set (match_dup 3) (match_dup 1))
1857 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1859 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1862 (define_expand "addsf3"
1863 [(set (match_operand:SF 0 "s_register_operand")
1864 (plus:SF (match_operand:SF 1 "s_register_operand")
1865 (match_operand:SF 2 "s_register_operand")))]
1866 "TARGET_32BIT && TARGET_HARD_FLOAT"
1870 (define_expand "adddf3"
1871 [(set (match_operand:DF 0 "s_register_operand")
1872 (plus:DF (match_operand:DF 1 "s_register_operand")
1873 (match_operand:DF 2 "s_register_operand")))]
1874 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1878 (define_expand "subdi3"
1880 [(set (match_operand:DI 0 "s_register_operand")
1881 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1882 (match_operand:DI 2 "s_register_operand")))
1883 (clobber (reg:CC CC_REGNUM))])]
1888 if (!REG_P (operands[1]))
1889 operands[1] = force_reg (DImode, operands[1]);
1893 rtx lo_result, hi_result, lo_dest, hi_dest;
1894 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1897 /* Since operands[1] may be an integer, pass it second, so that
1898 any necessary simplifications will be done on the decomposed
1900 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1902 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1903 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1905 if (!arm_rhs_operand (lo_op1, SImode))
1906 lo_op1 = force_reg (SImode, lo_op1);
1908 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1909 || !arm_rhs_operand (hi_op1, SImode))
1910 hi_op1 = force_reg (SImode, hi_op1);
1913 if (lo_op1 == const0_rtx)
1915 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1916 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1918 else if (CONST_INT_P (lo_op1))
1920 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1921 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1922 GEN_INT (~UINTVAL (lo_op1))));
1926 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1927 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1930 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1932 if (hi_op1 == const0_rtx)
1933 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1935 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1937 if (lo_result != lo_dest)
1938 emit_move_insn (lo_result, lo_dest);
1940 if (hi_result != hi_dest)
1941 emit_move_insn (hi_result, hi_dest);
1948 (define_expand "subsi3"
1949 [(set (match_operand:SI 0 "s_register_operand")
1950 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1951 (match_operand:SI 2 "s_register_operand")))]
1954 if (CONST_INT_P (operands[1]))
1958 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1959 operands[1] = force_reg (SImode, operands[1]);
1962 arm_split_constant (MINUS, SImode, NULL_RTX,
1963 INTVAL (operands[1]), operands[0],
1965 optimize && can_create_pseudo_p ());
1969 else /* TARGET_THUMB1 */
1970 operands[1] = force_reg (SImode, operands[1]);
1975 ; ??? Check Thumb-2 split length
1976 (define_insn_and_split "*arm_subsi3_insn"
1977 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1978 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1979 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1991 "&& (CONST_INT_P (operands[1])
1992 && !const_ok_for_arm (INTVAL (operands[1])))"
1993 [(clobber (const_int 0))]
1995 arm_split_constant (MINUS, SImode, curr_insn,
1996 INTVAL (operands[1]), operands[0], operands[2], 0);
1999 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
2000 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
2001 (set_attr "predicable" "yes")
2002 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
2003 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
2007 [(match_scratch:SI 3 "r")
2008 (set (match_operand:SI 0 "arm_general_register_operand" "")
2009 (minus:SI (match_operand:SI 1 "const_int_operand" "")
2010 (match_operand:SI 2 "arm_general_register_operand" "")))]
2012 && !const_ok_for_arm (INTVAL (operands[1]))
2013 && const_ok_for_arm (~INTVAL (operands[1]))"
2014 [(set (match_dup 3) (match_dup 1))
2015 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
2019 (define_insn "subsi3_compare0"
2020 [(set (reg:CC_NOOV CC_REGNUM)
2022 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2023 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
2025 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2026 (minus:SI (match_dup 1) (match_dup 2)))]
2031 rsbs%?\\t%0, %2, %1"
2032 [(set_attr "conds" "set")
2033 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
2036 (define_insn "subsi3_compare"
2037 [(set (reg:CC CC_REGNUM)
2038 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2039 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
2040 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2041 (minus:SI (match_dup 1) (match_dup 2)))]
2046 rsbs%?\\t%0, %2, %1"
2047 [(set_attr "conds" "set")
2048 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
2051 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
2052 ;; rather than (0 cmp reg). This gives the same results for unsigned
2053 ;; and equality compares which is what we mostly need here.
2054 (define_insn "rsb_imm_compare"
2055 [(set (reg:CC_RSB CC_REGNUM)
2056 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2057 (match_operand 3 "const_int_operand" "")))
2058 (set (match_operand:SI 0 "s_register_operand" "=r")
2059 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
2061 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2063 [(set_attr "conds" "set")
2064 (set_attr "type" "alus_imm")]
2067 ;; Similarly, but the result is unused.
2068 (define_insn "rsb_imm_compare_scratch"
2069 [(set (reg:CC_RSB CC_REGNUM)
2070 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2071 (match_operand 1 "arm_not_immediate_operand" "K")))
2072 (clobber (match_scratch:SI 0 "=r"))]
2074 "rsbs\\t%0, %2, #%B1"
2075 [(set_attr "conds" "set")
2076 (set_attr "type" "alus_imm")]
2079 ;; Compare the sum of a value plus a carry against a constant. Uses
2080 ;; RSC, so the result is swapped. Only available on Arm
2081 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2082 [(set (reg:CC_SWP CC_REGNUM)
2084 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2085 (match_operand:DI 3 "arm_borrow_operation" ""))
2086 (match_operand 1 "arm_immediate_operand" "I")))
2087 (clobber (match_scratch:SI 0 "=r"))]
2090 [(set_attr "conds" "set")
2091 (set_attr "type" "alus_imm")]
2094 (define_insn "usubvsi3_borrow"
2095 [(set (reg:CC_B CC_REGNUM)
2097 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2098 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2100 (match_operand:SI 2 "s_register_operand" "l,r")))))
2101 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2102 (minus:SI (match_dup 1)
2103 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2106 "sbcs%?\\t%0, %1, %2"
2107 [(set_attr "conds" "set")
2108 (set_attr "arch" "t2,*")
2109 (set_attr "length" "2,4")]
2112 (define_insn "usubvsi3_borrow_imm"
2113 [(set (reg:CC_B CC_REGNUM)
2115 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2116 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2117 (match_operand:DI 3 "const_int_operand" "n,n"))))
2118 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2119 (minus:SI (match_dup 1)
2120 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2121 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2123 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2126 adcs%?\\t%0, %1, #%B2"
2127 [(set_attr "conds" "set")
2128 (set_attr "type" "alus_imm")]
2131 (define_insn "subvsi3_borrow"
2132 [(set (reg:CC_V CC_REGNUM)
2136 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2137 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2138 (match_operand:DI 4 "arm_borrow_operation" ""))
2140 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2141 (match_operand:SI 3 "arm_borrow_operation" "")))))
2142 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2143 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2146 "sbcs%?\\t%0, %1, %2"
2147 [(set_attr "conds" "set")
2148 (set_attr "arch" "t2,*")
2149 (set_attr "length" "2,4")]
2152 (define_insn "subvsi3_borrow_imm"
2153 [(set (reg:CC_V CC_REGNUM)
2157 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2158 (match_operand 2 "arm_adcimm_operand" "I,K"))
2159 (match_operand:DI 4 "arm_borrow_operation" ""))
2161 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2162 (match_operand:SI 3 "arm_borrow_operation" "")))))
2163 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2164 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2167 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2170 adcs%?\\t%0, %1, #%B2"
2171 [(set_attr "conds" "set")
2172 (set_attr "type" "alus_imm")]
2175 (define_expand "subsf3"
2176 [(set (match_operand:SF 0 "s_register_operand")
2177 (minus:SF (match_operand:SF 1 "s_register_operand")
2178 (match_operand:SF 2 "s_register_operand")))]
2179 "TARGET_32BIT && TARGET_HARD_FLOAT"
2183 (define_expand "subdf3"
2184 [(set (match_operand:DF 0 "s_register_operand")
2185 (minus:DF (match_operand:DF 1 "s_register_operand")
2186 (match_operand:DF 2 "s_register_operand")))]
2187 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2192 ;; Multiplication insns
2194 (define_expand "mulhi3"
2195 [(set (match_operand:HI 0 "s_register_operand")
2196 (mult:HI (match_operand:HI 1 "s_register_operand")
2197 (match_operand:HI 2 "s_register_operand")))]
2198 "TARGET_DSP_MULTIPLY"
2201 rtx result = gen_reg_rtx (SImode);
2202 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2203 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2208 (define_expand "mulsi3"
2209 [(set (match_operand:SI 0 "s_register_operand")
2210 (mult:SI (match_operand:SI 2 "s_register_operand")
2211 (match_operand:SI 1 "s_register_operand")))]
2216 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2218 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2219 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2220 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2222 "mul%?\\t%0, %2, %1"
2223 [(set_attr "type" "mul")
2224 (set_attr "predicable" "yes")
2225 (set_attr "arch" "t2,v6,nov6,nov6")
2226 (set_attr "length" "4")
2227 (set_attr "predicable_short_it" "yes,no,*,*")]
2230 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2231 ;; reusing the same register.
2234 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2236 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2237 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2238 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2240 "mla%?\\t%0, %3, %2, %1"
2241 [(set_attr "type" "mla")
2242 (set_attr "predicable" "yes")
2243 (set_attr "arch" "v6,nov6,nov6,nov6")]
2247 [(set (match_operand:SI 0 "s_register_operand" "=r")
2249 (match_operand:SI 1 "s_register_operand" "r")
2250 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2251 (match_operand:SI 2 "s_register_operand" "r"))))]
2252 "TARGET_32BIT && arm_arch_thumb2"
2253 "mls%?\\t%0, %3, %2, %1"
2254 [(set_attr "type" "mla")
2255 (set_attr "predicable" "yes")]
2258 (define_insn "*mulsi3_compare0"
2259 [(set (reg:CC_NOOV CC_REGNUM)
2260 (compare:CC_NOOV (mult:SI
2261 (match_operand:SI 2 "s_register_operand" "r,r")
2262 (match_operand:SI 1 "s_register_operand" "%0,r"))
2264 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2265 (mult:SI (match_dup 2) (match_dup 1)))]
2266 "TARGET_ARM && !arm_arch6"
2267 "muls%?\\t%0, %2, %1"
2268 [(set_attr "conds" "set")
2269 (set_attr "type" "muls")]
2272 (define_insn "*mulsi3_compare0_v6"
2273 [(set (reg:CC_NOOV CC_REGNUM)
2274 (compare:CC_NOOV (mult:SI
2275 (match_operand:SI 2 "s_register_operand" "r")
2276 (match_operand:SI 1 "s_register_operand" "r"))
2278 (set (match_operand:SI 0 "s_register_operand" "=r")
2279 (mult:SI (match_dup 2) (match_dup 1)))]
2280 "TARGET_ARM && arm_arch6 && optimize_size"
2281 "muls%?\\t%0, %2, %1"
2282 [(set_attr "conds" "set")
2283 (set_attr "type" "muls")]
2286 (define_insn "*mulsi_compare0_scratch"
2287 [(set (reg:CC_NOOV CC_REGNUM)
2288 (compare:CC_NOOV (mult:SI
2289 (match_operand:SI 2 "s_register_operand" "r,r")
2290 (match_operand:SI 1 "s_register_operand" "%0,r"))
2292 (clobber (match_scratch:SI 0 "=&r,&r"))]
2293 "TARGET_ARM && !arm_arch6"
2294 "muls%?\\t%0, %2, %1"
2295 [(set_attr "conds" "set")
2296 (set_attr "type" "muls")]
2299 (define_insn "*mulsi_compare0_scratch_v6"
2300 [(set (reg:CC_NOOV CC_REGNUM)
2301 (compare:CC_NOOV (mult:SI
2302 (match_operand:SI 2 "s_register_operand" "r")
2303 (match_operand:SI 1 "s_register_operand" "r"))
2305 (clobber (match_scratch:SI 0 "=r"))]
2306 "TARGET_ARM && arm_arch6 && optimize_size"
2307 "muls%?\\t%0, %2, %1"
2308 [(set_attr "conds" "set")
2309 (set_attr "type" "muls")]
2312 (define_insn "*mulsi3addsi_compare0"
2313 [(set (reg:CC_NOOV CC_REGNUM)
2316 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2317 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2318 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2320 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2321 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2323 "TARGET_ARM && arm_arch6"
2324 "mlas%?\\t%0, %2, %1, %3"
2325 [(set_attr "conds" "set")
2326 (set_attr "type" "mlas")]
2329 (define_insn "*mulsi3addsi_compare0_v6"
2330 [(set (reg:CC_NOOV CC_REGNUM)
2333 (match_operand:SI 2 "s_register_operand" "r")
2334 (match_operand:SI 1 "s_register_operand" "r"))
2335 (match_operand:SI 3 "s_register_operand" "r"))
2337 (set (match_operand:SI 0 "s_register_operand" "=r")
2338 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2340 "TARGET_ARM && arm_arch6 && optimize_size"
2341 "mlas%?\\t%0, %2, %1, %3"
2342 [(set_attr "conds" "set")
2343 (set_attr "type" "mlas")]
2346 (define_insn "*mulsi3addsi_compare0_scratch"
2347 [(set (reg:CC_NOOV CC_REGNUM)
2350 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2351 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2352 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2354 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2355 "TARGET_ARM && !arm_arch6"
2356 "mlas%?\\t%0, %2, %1, %3"
2357 [(set_attr "conds" "set")
2358 (set_attr "type" "mlas")]
2361 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2362 [(set (reg:CC_NOOV CC_REGNUM)
2365 (match_operand:SI 2 "s_register_operand" "r")
2366 (match_operand:SI 1 "s_register_operand" "r"))
2367 (match_operand:SI 3 "s_register_operand" "r"))
2369 (clobber (match_scratch:SI 0 "=r"))]
2370 "TARGET_ARM && arm_arch6 && optimize_size"
2371 "mlas%?\\t%0, %2, %1, %3"
2372 [(set_attr "conds" "set")
2373 (set_attr "type" "mlas")]
2376 ;; 32x32->64 widening multiply.
2377 ;; The only difference between the v3-5 and v6+ versions is the requirement
2378 ;; that the output does not overlap with either input.
2380 (define_expand "<Us>mulsidi3"
2381 [(set (match_operand:DI 0 "s_register_operand")
2383 (SE:DI (match_operand:SI 1 "s_register_operand"))
2384 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2387 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2388 gen_highpart (SImode, operands[0]),
2389 operands[1], operands[2]));
2394 (define_insn "<US>mull"
2395 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2397 (match_operand:SI 2 "s_register_operand" "%r,r")
2398 (match_operand:SI 3 "s_register_operand" "r,r")))
2399 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2402 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2405 "<US>mull%?\\t%0, %1, %2, %3"
2406 [(set_attr "type" "umull")
2407 (set_attr "predicable" "yes")
2408 (set_attr "arch" "v6,nov6")]
2411 (define_expand "<Us>maddsidi4"
2412 [(set (match_operand:DI 0 "s_register_operand")
2415 (SE:DI (match_operand:SI 1 "s_register_operand"))
2416 (SE:DI (match_operand:SI 2 "s_register_operand")))
2417 (match_operand:DI 3 "s_register_operand")))]
2420 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2421 gen_lowpart (SImode, operands[3]),
2422 gen_highpart (SImode, operands[0]),
2423 gen_highpart (SImode, operands[3]),
2424 operands[1], operands[2]));
2429 (define_insn "<US>mlal"
2430 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2433 (match_operand:SI 4 "s_register_operand" "%r,r")
2434 (match_operand:SI 5 "s_register_operand" "r,r"))
2435 (match_operand:SI 1 "s_register_operand" "0,0")))
2436 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2441 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2442 (zero_extend:DI (match_dup 1)))
2444 (match_operand:SI 3 "s_register_operand" "2,2")))]
2446 "<US>mlal%?\\t%0, %2, %4, %5"
2447 [(set_attr "type" "umlal")
2448 (set_attr "predicable" "yes")
2449 (set_attr "arch" "v6,nov6")]
2452 (define_expand "<US>mulsi3_highpart"
2454 [(set (match_operand:SI 0 "s_register_operand")
2458 (SE:DI (match_operand:SI 1 "s_register_operand"))
2459 (SE:DI (match_operand:SI 2 "s_register_operand")))
2461 (clobber (match_scratch:SI 3 ""))])]
2466 (define_insn "*<US>mull_high"
2467 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2471 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2472 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2474 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2476 "<US>mull%?\\t%3, %0, %2, %1"
2477 [(set_attr "type" "umull")
2478 (set_attr "predicable" "yes")
2479 (set_attr "arch" "v6,nov6,nov6")]
2482 (define_insn "mulhisi3"
2483 [(set (match_operand:SI 0 "s_register_operand" "=r")
2484 (mult:SI (sign_extend:SI
2485 (match_operand:HI 1 "s_register_operand" "%r"))
2487 (match_operand:HI 2 "s_register_operand" "r"))))]
2488 "TARGET_DSP_MULTIPLY"
2489 "smulbb%?\\t%0, %1, %2"
2490 [(set_attr "type" "smulxy")
2491 (set_attr "predicable" "yes")]
2494 (define_insn "*mulhisi3tb"
2495 [(set (match_operand:SI 0 "s_register_operand" "=r")
2496 (mult:SI (ashiftrt:SI
2497 (match_operand:SI 1 "s_register_operand" "r")
2500 (match_operand:HI 2 "s_register_operand" "r"))))]
2501 "TARGET_DSP_MULTIPLY"
2502 "smultb%?\\t%0, %1, %2"
2503 [(set_attr "type" "smulxy")
2504 (set_attr "predicable" "yes")]
2507 (define_insn "*mulhisi3bt"
2508 [(set (match_operand:SI 0 "s_register_operand" "=r")
2509 (mult:SI (sign_extend:SI
2510 (match_operand:HI 1 "s_register_operand" "r"))
2512 (match_operand:SI 2 "s_register_operand" "r")
2514 "TARGET_DSP_MULTIPLY"
2515 "smulbt%?\\t%0, %1, %2"
2516 [(set_attr "type" "smulxy")
2517 (set_attr "predicable" "yes")]
2520 (define_insn "*mulhisi3tt"
2521 [(set (match_operand:SI 0 "s_register_operand" "=r")
2522 (mult:SI (ashiftrt:SI
2523 (match_operand:SI 1 "s_register_operand" "r")
2526 (match_operand:SI 2 "s_register_operand" "r")
2528 "TARGET_DSP_MULTIPLY"
2529 "smultt%?\\t%0, %1, %2"
2530 [(set_attr "type" "smulxy")
2531 (set_attr "predicable" "yes")]
2534 (define_expand "maddhisi4"
2535 [(set (match_operand:SI 0 "s_register_operand")
2536 (plus:SI (mult:SI (sign_extend:SI
2537 (match_operand:HI 1 "s_register_operand"))
2539 (match_operand:HI 2 "s_register_operand")))
2540 (match_operand:SI 3 "s_register_operand")))]
2541 "TARGET_DSP_MULTIPLY"
2543 /* If this function reads the Q bit from ACLE intrinsics break up the
2544 multiplication and accumulation as an overflow during accumulation will
2545 clobber the Q flag. */
2548 rtx tmp = gen_reg_rtx (SImode);
2549 emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
2550 emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
2556 (define_insn "*arm_maddhisi4"
2557 [(set (match_operand:SI 0 "s_register_operand" "=r")
2558 (plus:SI (mult:SI (sign_extend:SI
2559 (match_operand:HI 1 "s_register_operand" "r"))
2561 (match_operand:HI 2 "s_register_operand" "r")))
2562 (match_operand:SI 3 "s_register_operand" "r")))]
2563 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2564 "smlabb%?\\t%0, %1, %2, %3"
2565 [(set_attr "type" "smlaxy")
2566 (set_attr "predicable" "yes")]
2569 (define_insn "arm_smlabb_setq"
2570 [(set (match_operand:SI 0 "s_register_operand" "=r")
2571 (plus:SI (mult:SI (sign_extend:SI
2572 (match_operand:HI 1 "s_register_operand" "r"))
2574 (match_operand:HI 2 "s_register_operand" "r")))
2575 (match_operand:SI 3 "s_register_operand" "r")))
2576 (set (reg:CC APSRQ_REGNUM)
2577 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2578 "TARGET_DSP_MULTIPLY"
2579 "smlabb%?\\t%0, %1, %2, %3"
2580 [(set_attr "type" "smlaxy")
2581 (set_attr "predicable" "yes")]
2584 (define_expand "arm_smlabb"
2585 [(match_operand:SI 0 "s_register_operand")
2586 (match_operand:SI 1 "s_register_operand")
2587 (match_operand:SI 2 "s_register_operand")
2588 (match_operand:SI 3 "s_register_operand")]
2589 "TARGET_DSP_MULTIPLY"
2591 rtx mult1 = gen_lowpart (HImode, operands[1]);
2592 rtx mult2 = gen_lowpart (HImode, operands[2]);
2594 emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
2596 emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
2601 ;; Note: there is no maddhisi4ibt because this one is canonical form
2602 (define_insn "maddhisi4tb"
2603 [(set (match_operand:SI 0 "s_register_operand" "=r")
2604 (plus:SI (mult:SI (ashiftrt:SI
2605 (match_operand:SI 1 "s_register_operand" "r")
2608 (match_operand:HI 2 "s_register_operand" "r")))
2609 (match_operand:SI 3 "s_register_operand" "r")))]
2610 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2611 "smlatb%?\\t%0, %1, %2, %3"
2612 [(set_attr "type" "smlaxy")
2613 (set_attr "predicable" "yes")]
2616 (define_insn "arm_smlatb_setq"
2617 [(set (match_operand:SI 0 "s_register_operand" "=r")
2618 (plus:SI (mult:SI (ashiftrt:SI
2619 (match_operand:SI 1 "s_register_operand" "r")
2622 (match_operand:HI 2 "s_register_operand" "r")))
2623 (match_operand:SI 3 "s_register_operand" "r")))
2624 (set (reg:CC APSRQ_REGNUM)
2625 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2626 "TARGET_DSP_MULTIPLY"
2627 "smlatb%?\\t%0, %1, %2, %3"
2628 [(set_attr "type" "smlaxy")
2629 (set_attr "predicable" "yes")]
2632 (define_expand "arm_smlatb"
2633 [(match_operand:SI 0 "s_register_operand")
2634 (match_operand:SI 1 "s_register_operand")
2635 (match_operand:SI 2 "s_register_operand")
2636 (match_operand:SI 3 "s_register_operand")]
2637 "TARGET_DSP_MULTIPLY"
2639 rtx mult2 = gen_lowpart (HImode, operands[2]);
2641 emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
2642 mult2, operands[3]));
2644 emit_insn (gen_maddhisi4tb (operands[0], operands[1],
2645 mult2, operands[3]));
2650 (define_insn "maddhisi4tt"
2651 [(set (match_operand:SI 0 "s_register_operand" "=r")
2652 (plus:SI (mult:SI (ashiftrt:SI
2653 (match_operand:SI 1 "s_register_operand" "r")
2656 (match_operand:SI 2 "s_register_operand" "r")
2658 (match_operand:SI 3 "s_register_operand" "r")))]
2659 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2660 "smlatt%?\\t%0, %1, %2, %3"
2661 [(set_attr "type" "smlaxy")
2662 (set_attr "predicable" "yes")]
2665 (define_insn "arm_smlatt_setq"
2666 [(set (match_operand:SI 0 "s_register_operand" "=r")
2667 (plus:SI (mult:SI (ashiftrt:SI
2668 (match_operand:SI 1 "s_register_operand" "r")
2671 (match_operand:SI 2 "s_register_operand" "r")
2673 (match_operand:SI 3 "s_register_operand" "r")))
2674 (set (reg:CC APSRQ_REGNUM)
2675 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2676 "TARGET_DSP_MULTIPLY"
2677 "smlatt%?\\t%0, %1, %2, %3"
2678 [(set_attr "type" "smlaxy")
2679 (set_attr "predicable" "yes")]
2682 (define_expand "arm_smlatt"
2683 [(match_operand:SI 0 "s_register_operand")
2684 (match_operand:SI 1 "s_register_operand")
2685 (match_operand:SI 2 "s_register_operand")
2686 (match_operand:SI 3 "s_register_operand")]
2687 "TARGET_DSP_MULTIPLY"
2690 emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
2691 operands[2], operands[3]));
2693 emit_insn (gen_maddhisi4tt (operands[0], operands[1],
2694 operands[2], operands[3]));
2699 (define_insn "maddhidi4"
2700 [(set (match_operand:DI 0 "s_register_operand" "=r")
2702 (mult:DI (sign_extend:DI
2703 (match_operand:HI 1 "s_register_operand" "r"))
2705 (match_operand:HI 2 "s_register_operand" "r")))
2706 (match_operand:DI 3 "s_register_operand" "0")))]
2707 "TARGET_DSP_MULTIPLY"
2708 "smlalbb%?\\t%Q0, %R0, %1, %2"
2709 [(set_attr "type" "smlalxy")
2710 (set_attr "predicable" "yes")])
2712 ;; Note: there is no maddhidi4ibt because this one is canonical form
2713 (define_insn "*maddhidi4tb"
2714 [(set (match_operand:DI 0 "s_register_operand" "=r")
2716 (mult:DI (sign_extend:DI
2718 (match_operand:SI 1 "s_register_operand" "r")
2721 (match_operand:HI 2 "s_register_operand" "r")))
2722 (match_operand:DI 3 "s_register_operand" "0")))]
2723 "TARGET_DSP_MULTIPLY"
2724 "smlaltb%?\\t%Q0, %R0, %1, %2"
2725 [(set_attr "type" "smlalxy")
2726 (set_attr "predicable" "yes")])
2728 (define_insn "*maddhidi4tt"
2729 [(set (match_operand:DI 0 "s_register_operand" "=r")
2731 (mult:DI (sign_extend:DI
2733 (match_operand:SI 1 "s_register_operand" "r")
2737 (match_operand:SI 2 "s_register_operand" "r")
2739 (match_operand:DI 3 "s_register_operand" "0")))]
2740 "TARGET_DSP_MULTIPLY"
2741 "smlaltt%?\\t%Q0, %R0, %1, %2"
2742 [(set_attr "type" "smlalxy")
2743 (set_attr "predicable" "yes")])
2745 (define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
2746 [(set (match_operand:SI 0 "s_register_operand" "=r")
2748 [(match_operand:SI 1 "s_register_operand" "r")
2749 (match_operand:SI 2 "s_register_operand" "r")
2750 (match_operand:SI 3 "s_register_operand" "r")]
2752 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
2753 "<smlaw_op>%?\\t%0, %1, %2, %3"
2754 [(set_attr "type" "smlaxy")
2755 (set_attr "predicable" "yes")]
2758 (define_expand "arm_<smlaw_op>"
2759 [(set (match_operand:SI 0 "s_register_operand")
2761 [(match_operand:SI 1 "s_register_operand")
2762 (match_operand:SI 2 "s_register_operand")
2763 (match_operand:SI 3 "s_register_operand")]
2765 "TARGET_DSP_MULTIPLY"
2768 emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
2769 operands[2], operands[3]));
2771 emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
2772 operands[2], operands[3]));
2777 (define_expand "mulsf3"
2778 [(set (match_operand:SF 0 "s_register_operand")
2779 (mult:SF (match_operand:SF 1 "s_register_operand")
2780 (match_operand:SF 2 "s_register_operand")))]
2781 "TARGET_32BIT && TARGET_HARD_FLOAT"
2785 (define_expand "muldf3"
2786 [(set (match_operand:DF 0 "s_register_operand")
2787 (mult:DF (match_operand:DF 1 "s_register_operand")
2788 (match_operand:DF 2 "s_register_operand")))]
2789 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2795 (define_expand "divsf3"
2796 [(set (match_operand:SF 0 "s_register_operand")
2797 (div:SF (match_operand:SF 1 "s_register_operand")
2798 (match_operand:SF 2 "s_register_operand")))]
2799 "TARGET_32BIT && TARGET_HARD_FLOAT"
2802 (define_expand "divdf3"
2803 [(set (match_operand:DF 0 "s_register_operand")
2804 (div:DF (match_operand:DF 1 "s_register_operand")
2805 (match_operand:DF 2 "s_register_operand")))]
2806 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2810 ; Expand logical operations. The mid-end expander does not split off memory
2811 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2812 ; So an explicit expander is needed to generate better code.
2814 (define_expand "<LOGICAL:optab>di3"
2815 [(set (match_operand:DI 0 "s_register_operand")
2816 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2817 (match_operand:DI 2 "arm_<optab>di_operand")))]
2820 rtx low = simplify_gen_binary (<CODE>, SImode,
2821 gen_lowpart (SImode, operands[1]),
2822 gen_lowpart (SImode, operands[2]));
2823 rtx high = simplify_gen_binary (<CODE>, SImode,
2824 gen_highpart (SImode, operands[1]),
2825 gen_highpart_mode (SImode, DImode,
2828 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2829 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2834 (define_expand "one_cmpldi2"
2835 [(set (match_operand:DI 0 "s_register_operand")
2836 (not:DI (match_operand:DI 1 "s_register_operand")))]
2839 rtx low = simplify_gen_unary (NOT, SImode,
2840 gen_lowpart (SImode, operands[1]),
2842 rtx high = simplify_gen_unary (NOT, SImode,
2843 gen_highpart_mode (SImode, DImode,
2847 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2848 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2853 ;; Split DImode and, ior, xor operations. Simply perform the logical
2854 ;; operation on the upper and lower halves of the registers.
2855 ;; This is needed for atomic operations in arm_split_atomic_op.
2856 ;; Avoid splitting IWMMXT instructions.
2858 [(set (match_operand:DI 0 "s_register_operand" "")
2859 (match_operator:DI 6 "logical_binary_operator"
2860 [(match_operand:DI 1 "s_register_operand" "")
2861 (match_operand:DI 2 "s_register_operand" "")]))]
2862 "TARGET_32BIT && reload_completed
2863 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2864 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2865 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2868 operands[3] = gen_highpart (SImode, operands[0]);
2869 operands[0] = gen_lowpart (SImode, operands[0]);
2870 operands[4] = gen_highpart (SImode, operands[1]);
2871 operands[1] = gen_lowpart (SImode, operands[1]);
2872 operands[5] = gen_highpart (SImode, operands[2]);
2873 operands[2] = gen_lowpart (SImode, operands[2]);
2877 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2878 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2880 [(set (match_operand:DI 0 "s_register_operand")
2881 (not:DI (match_operand:DI 1 "s_register_operand")))]
2883 [(set (match_dup 0) (not:SI (match_dup 1)))
2884 (set (match_dup 2) (not:SI (match_dup 3)))]
2887 operands[2] = gen_highpart (SImode, operands[0]);
2888 operands[0] = gen_lowpart (SImode, operands[0]);
2889 operands[3] = gen_highpart (SImode, operands[1]);
2890 operands[1] = gen_lowpart (SImode, operands[1]);
2894 (define_expand "andsi3"
2895 [(set (match_operand:SI 0 "s_register_operand")
2896 (and:SI (match_operand:SI 1 "s_register_operand")
2897 (match_operand:SI 2 "reg_or_int_operand")))]
2902 if (CONST_INT_P (operands[2]))
2904 if (INTVAL (operands[2]) == 255 && arm_arch6)
2906 operands[1] = convert_to_mode (QImode, operands[1], 1);
2907 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2911 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2912 operands[2] = force_reg (SImode, operands[2]);
2915 arm_split_constant (AND, SImode, NULL_RTX,
2916 INTVAL (operands[2]), operands[0],
2918 optimize && can_create_pseudo_p ());
2924 else /* TARGET_THUMB1 */
2926 if (!CONST_INT_P (operands[2]))
2928 rtx tmp = force_reg (SImode, operands[2]);
2929 if (rtx_equal_p (operands[0], operands[1]))
2933 operands[2] = operands[1];
2941 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2943 operands[2] = force_reg (SImode,
2944 GEN_INT (~INTVAL (operands[2])));
2946 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2951 for (i = 9; i <= 31; i++)
2953 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2955 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2959 else if ((HOST_WIDE_INT_1 << i) - 1
2960 == ~INTVAL (operands[2]))
2962 rtx shift = GEN_INT (i);
2963 rtx reg = gen_reg_rtx (SImode);
2965 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2966 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2972 operands[2] = force_reg (SImode, operands[2]);
2978 ; ??? Check split length for Thumb-2
2979 (define_insn_and_split "*arm_andsi3_insn"
2980 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2981 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2982 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2987 bic%?\\t%0, %1, #%B2
2991 && CONST_INT_P (operands[2])
2992 && !(const_ok_for_arm (INTVAL (operands[2]))
2993 || const_ok_for_arm (~INTVAL (operands[2])))"
2994 [(clobber (const_int 0))]
2996 arm_split_constant (AND, SImode, curr_insn,
2997 INTVAL (operands[2]), operands[0], operands[1], 0);
3000 [(set_attr "length" "4,4,4,4,16")
3001 (set_attr "predicable" "yes")
3002 (set_attr "predicable_short_it" "no,yes,no,no,no")
3003 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
3006 (define_insn "*andsi3_compare0"
3007 [(set (reg:CC_NOOV CC_REGNUM)
3009 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
3010 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
3012 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3013 (and:SI (match_dup 1) (match_dup 2)))]
3017 bics%?\\t%0, %1, #%B2
3018 ands%?\\t%0, %1, %2"
3019 [(set_attr "conds" "set")
3020 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3023 (define_insn "*andsi3_compare0_scratch"
3024 [(set (reg:CC_NOOV CC_REGNUM)
3026 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
3027 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
3029 (clobber (match_scratch:SI 2 "=X,r,X"))]
3033 bics%?\\t%2, %0, #%B1
3035 [(set_attr "conds" "set")
3036 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3039 (define_insn "*zeroextractsi_compare0_scratch"
3040 [(set (reg:CC_NOOV CC_REGNUM)
3041 (compare:CC_NOOV (zero_extract:SI
3042 (match_operand:SI 0 "s_register_operand" "r")
3043 (match_operand 1 "const_int_operand" "n")
3044 (match_operand 2 "const_int_operand" "n"))
3047 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
3048 && INTVAL (operands[1]) > 0
3049 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
3050 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
3052 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
3053 << INTVAL (operands[2]));
3054 output_asm_insn (\"tst%?\\t%0, %1\", operands);
3057 [(set_attr "conds" "set")
3058 (set_attr "predicable" "yes")
3059 (set_attr "type" "logics_imm")]
3062 (define_insn_and_split "*ne_zeroextractsi"
3063 [(set (match_operand:SI 0 "s_register_operand" "=r")
3064 (ne:SI (zero_extract:SI
3065 (match_operand:SI 1 "s_register_operand" "r")
3066 (match_operand:SI 2 "const_int_operand" "n")
3067 (match_operand:SI 3 "const_int_operand" "n"))
3069 (clobber (reg:CC CC_REGNUM))]
3071 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3072 && INTVAL (operands[2]) > 0
3073 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3074 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3077 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3078 && INTVAL (operands[2]) > 0
3079 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3080 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3081 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3082 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
3084 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3086 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
3087 (match_dup 0) (const_int 1)))]
3089 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3090 << INTVAL (operands[3]));
3092 [(set_attr "conds" "clob")
3093 (set (attr "length")
3094 (if_then_else (eq_attr "is_thumb" "yes")
3097 (set_attr "type" "multiple")]
3100 (define_insn_and_split "*ne_zeroextractsi_shifted"
3101 [(set (match_operand:SI 0 "s_register_operand" "=r")
3102 (ne:SI (zero_extract:SI
3103 (match_operand:SI 1 "s_register_operand" "r")
3104 (match_operand:SI 2 "const_int_operand" "n")
3107 (clobber (reg:CC CC_REGNUM))]
3111 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3112 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
3114 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3116 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
3117 (match_dup 0) (const_int 1)))]
3119 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3121 [(set_attr "conds" "clob")
3122 (set_attr "length" "8")
3123 (set_attr "type" "multiple")]
3126 (define_insn_and_split "*ite_ne_zeroextractsi"
3127 [(set (match_operand:SI 0 "s_register_operand" "=r")
3128 (if_then_else:SI (ne (zero_extract:SI
3129 (match_operand:SI 1 "s_register_operand" "r")
3130 (match_operand:SI 2 "const_int_operand" "n")
3131 (match_operand:SI 3 "const_int_operand" "n"))
3133 (match_operand:SI 4 "arm_not_operand" "rIK")
3135 (clobber (reg:CC CC_REGNUM))]
3137 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3138 && INTVAL (operands[2]) > 0
3139 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3140 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3141 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3144 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3145 && INTVAL (operands[2]) > 0
3146 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3147 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3148 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3149 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3150 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
3152 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3154 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
3155 (match_dup 0) (match_dup 4)))]
3157 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3158 << INTVAL (operands[3]));
3160 [(set_attr "conds" "clob")
3161 (set_attr "length" "8")
3162 (set_attr "type" "multiple")]
3165 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
3166 [(set (match_operand:SI 0 "s_register_operand" "=r")
3167 (if_then_else:SI (ne (zero_extract:SI
3168 (match_operand:SI 1 "s_register_operand" "r")
3169 (match_operand:SI 2 "const_int_operand" "n")
3172 (match_operand:SI 3 "arm_not_operand" "rIK")
3174 (clobber (reg:CC CC_REGNUM))]
3175 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3177 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3178 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3179 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
3181 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3183 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
3184 (match_dup 0) (match_dup 3)))]
3186 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3188 [(set_attr "conds" "clob")
3189 (set_attr "length" "8")
3190 (set_attr "type" "multiple")]
3193 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
3195 [(set (match_operand:SI 0 "s_register_operand" "")
3196 (match_operator:SI 1 "shiftable_operator"
3197 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3198 (match_operand:SI 3 "const_int_operand" "")
3199 (match_operand:SI 4 "const_int_operand" ""))
3200 (match_operand:SI 5 "s_register_operand" "")]))
3201 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3203 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3206 [(lshiftrt:SI (match_dup 6) (match_dup 4))
3209 HOST_WIDE_INT temp = INTVAL (operands[3]);
3211 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3212 operands[4] = GEN_INT (32 - temp);
3217 [(set (match_operand:SI 0 "s_register_operand" "")
3218 (match_operator:SI 1 "shiftable_operator"
3219 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3220 (match_operand:SI 3 "const_int_operand" "")
3221 (match_operand:SI 4 "const_int_operand" ""))
3222 (match_operand:SI 5 "s_register_operand" "")]))
3223 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3225 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3228 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3231 HOST_WIDE_INT temp = INTVAL (operands[3]);
3233 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3234 operands[4] = GEN_INT (32 - temp);
3238 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3239 ;;; represented by the bitfield, then this will produce incorrect results.
3240 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3241 ;;; which have a real bit-field insert instruction, the truncation happens
3242 ;;; in the bit-field insert instruction itself. Since arm does not have a
3243 ;;; bit-field insert instruction, we would have to emit code here to truncate
3244 ;;; the value before we insert. This loses some of the advantage of having
3245 ;;; this insv pattern, so this pattern needs to be reevalutated.
3247 (define_expand "insv"
3248 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3249 (match_operand 1 "general_operand")
3250 (match_operand 2 "general_operand"))
3251 (match_operand 3 "reg_or_int_operand"))]
3252 "TARGET_ARM || arm_arch_thumb2"
3255 int start_bit = INTVAL (operands[2]);
3256 int width = INTVAL (operands[1]);
3257 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3258 rtx target, subtarget;
3260 if (arm_arch_thumb2)
3262 if (unaligned_access && MEM_P (operands[0])
3263 && s_register_operand (operands[3], GET_MODE (operands[3]))
3264 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3268 if (BYTES_BIG_ENDIAN)
3269 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3274 base_addr = adjust_address (operands[0], SImode,
3275 start_bit / BITS_PER_UNIT);
3276 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3280 rtx tmp = gen_reg_rtx (HImode);
3282 base_addr = adjust_address (operands[0], HImode,
3283 start_bit / BITS_PER_UNIT);
3284 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3285 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3289 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3291 bool use_bfi = TRUE;
3293 if (CONST_INT_P (operands[3]))
3295 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3299 emit_insn (gen_insv_zero (operands[0], operands[1],
3304 /* See if the set can be done with a single orr instruction. */
3305 if (val == mask && const_ok_for_arm (val << start_bit))
3311 if (!REG_P (operands[3]))
3312 operands[3] = force_reg (SImode, operands[3]);
3314 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3323 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3326 target = copy_rtx (operands[0]);
3327 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3328 subreg as the final target. */
3329 if (GET_CODE (target) == SUBREG)
3331 subtarget = gen_reg_rtx (SImode);
3332 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3333 < GET_MODE_SIZE (SImode))
3334 target = SUBREG_REG (target);
3339 if (CONST_INT_P (operands[3]))
3341 /* Since we are inserting a known constant, we may be able to
3342 reduce the number of bits that we have to clear so that
3343 the mask becomes simple. */
3344 /* ??? This code does not check to see if the new mask is actually
3345 simpler. It may not be. */
3346 rtx op1 = gen_reg_rtx (SImode);
3347 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3348 start of this pattern. */
3349 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3350 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3352 emit_insn (gen_andsi3 (op1, operands[0],
3353 gen_int_mode (~mask2, SImode)));
3354 emit_insn (gen_iorsi3 (subtarget, op1,
3355 gen_int_mode (op3_value << start_bit, SImode)));
3357 else if (start_bit == 0
3358 && !(const_ok_for_arm (mask)
3359 || const_ok_for_arm (~mask)))
3361 /* A Trick, since we are setting the bottom bits in the word,
3362 we can shift operand[3] up, operand[0] down, OR them together
3363 and rotate the result back again. This takes 3 insns, and
3364 the third might be mergeable into another op. */
3365 /* The shift up copes with the possibility that operand[3] is
3366 wider than the bitfield. */
3367 rtx op0 = gen_reg_rtx (SImode);
3368 rtx op1 = gen_reg_rtx (SImode);
3370 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3371 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3372 emit_insn (gen_iorsi3 (op1, op1, op0));
3373 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3375 else if ((width + start_bit == 32)
3376 && !(const_ok_for_arm (mask)
3377 || const_ok_for_arm (~mask)))
3379 /* Similar trick, but slightly less efficient. */
3381 rtx op0 = gen_reg_rtx (SImode);
3382 rtx op1 = gen_reg_rtx (SImode);
3384 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3385 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3386 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3387 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3391 rtx op0 = gen_int_mode (mask, SImode);
3392 rtx op1 = gen_reg_rtx (SImode);
3393 rtx op2 = gen_reg_rtx (SImode);
3395 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3397 rtx tmp = gen_reg_rtx (SImode);
3399 emit_insn (gen_movsi (tmp, op0));
3403 /* Mask out any bits in operand[3] that are not needed. */
3404 emit_insn (gen_andsi3 (op1, operands[3], op0));
3406 if (CONST_INT_P (op0)
3407 && (const_ok_for_arm (mask << start_bit)
3408 || const_ok_for_arm (~(mask << start_bit))))
3410 op0 = gen_int_mode (~(mask << start_bit), SImode);
3411 emit_insn (gen_andsi3 (op2, operands[0], op0));
3415 if (CONST_INT_P (op0))
3417 rtx tmp = gen_reg_rtx (SImode);
3419 emit_insn (gen_movsi (tmp, op0));
3424 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3426 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3430 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3432 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3435 if (subtarget != target)
3437 /* If TARGET is still a SUBREG, then it must be wider than a word,
3438 so we must be careful only to set the subword we were asked to. */
3439 if (GET_CODE (target) == SUBREG)
3440 emit_move_insn (target, subtarget);
3442 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3449 (define_insn "insv_zero"
3450 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3451 (match_operand:SI 1 "const_int_M_operand" "M")
3452 (match_operand:SI 2 "const_int_M_operand" "M"))
3456 [(set_attr "length" "4")
3457 (set_attr "predicable" "yes")
3458 (set_attr "type" "bfm")]
3461 (define_insn "insv_t2"
3462 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3463 (match_operand:SI 1 "const_int_M_operand" "M")
3464 (match_operand:SI 2 "const_int_M_operand" "M"))
3465 (match_operand:SI 3 "s_register_operand" "r"))]
3467 "bfi%?\t%0, %3, %2, %1"
3468 [(set_attr "length" "4")
3469 (set_attr "predicable" "yes")
3470 (set_attr "type" "bfm")]
3473 (define_insn "andsi_notsi_si"
3474 [(set (match_operand:SI 0 "s_register_operand" "=r")
3475 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3476 (match_operand:SI 1 "s_register_operand" "r")))]
3478 "bic%?\\t%0, %1, %2"
3479 [(set_attr "predicable" "yes")
3480 (set_attr "type" "logic_reg")]
3483 (define_insn "andsi_not_shiftsi_si"
3484 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3485 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3486 [(match_operand:SI 2 "s_register_operand" "r,r")
3487 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3488 (match_operand:SI 1 "s_register_operand" "r,r")))]
3490 "bic%?\\t%0, %1, %2%S4"
3491 [(set_attr "predicable" "yes")
3492 (set_attr "shift" "2")
3493 (set_attr "arch" "32,a")
3494 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3497 ;; Shifted bics pattern used to set up CC status register and not reusing
3498 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3499 ;; does not support shift by register.
3500 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3501 [(set (reg:CC_NOOV CC_REGNUM)
3503 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3504 [(match_operand:SI 1 "s_register_operand" "r,r")
3505 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3506 (match_operand:SI 3 "s_register_operand" "r,r"))
3508 (clobber (match_scratch:SI 4 "=r,r"))]
3510 "bics%?\\t%4, %3, %1%S0"
3511 [(set_attr "predicable" "yes")
3512 (set_attr "arch" "32,a")
3513 (set_attr "conds" "set")
3514 (set_attr "shift" "1")
3515 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3518 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3519 ;; getting reused later.
3520 (define_insn "andsi_not_shiftsi_si_scc"
3521 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3523 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3524 [(match_operand:SI 1 "s_register_operand" "r,r")
3525 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3526 (match_operand:SI 3 "s_register_operand" "r,r"))
3528 (set (match_operand:SI 4 "s_register_operand" "=r,r")
3529 (and:SI (not:SI (match_op_dup 0
3534 "bics%?\\t%4, %3, %1%S0"
3535 [(set_attr "predicable" "yes")
3536 (set_attr "arch" "32,a")
3537 (set_attr "conds" "set")
3538 (set_attr "shift" "1")
3539 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3542 (define_insn "*andsi_notsi_si_compare0"
3543 [(set (reg:CC_NOOV CC_REGNUM)
3545 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3546 (match_operand:SI 1 "s_register_operand" "r"))
3548 (set (match_operand:SI 0 "s_register_operand" "=r")
3549 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3552 [(set_attr "conds" "set")
3553 (set_attr "type" "logics_shift_reg")]
3556 (define_insn "*andsi_notsi_si_compare0_scratch"
3557 [(set (reg:CC_NOOV CC_REGNUM)
3559 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3560 (match_operand:SI 1 "s_register_operand" "r"))
3562 (clobber (match_scratch:SI 0 "=r"))]
3565 [(set_attr "conds" "set")
3566 (set_attr "type" "logics_shift_reg")]
3569 (define_expand "iorsi3"
3570 [(set (match_operand:SI 0 "s_register_operand")
3571 (ior:SI (match_operand:SI 1 "s_register_operand")
3572 (match_operand:SI 2 "reg_or_int_operand")))]
3575 if (CONST_INT_P (operands[2]))
3579 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3580 operands[2] = force_reg (SImode, operands[2]);
3583 arm_split_constant (IOR, SImode, NULL_RTX,
3584 INTVAL (operands[2]), operands[0],
3586 optimize && can_create_pseudo_p ());
3590 else /* TARGET_THUMB1 */
3592 rtx tmp = force_reg (SImode, operands[2]);
3593 if (rtx_equal_p (operands[0], operands[1]))
3597 operands[2] = operands[1];
3605 (define_insn_and_split "*iorsi3_insn"
3606 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3607 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3608 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3613 orn%?\\t%0, %1, #%B2
3617 && CONST_INT_P (operands[2])
3618 && !(const_ok_for_arm (INTVAL (operands[2]))
3619 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3620 [(clobber (const_int 0))]
3622 arm_split_constant (IOR, SImode, curr_insn,
3623 INTVAL (operands[2]), operands[0], operands[1], 0);
3626 [(set_attr "length" "4,4,4,4,16")
3627 (set_attr "arch" "32,t2,t2,32,32")
3628 (set_attr "predicable" "yes")
3629 (set_attr "predicable_short_it" "no,yes,no,no,no")
3630 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3634 [(match_scratch:SI 3 "r")
3635 (set (match_operand:SI 0 "arm_general_register_operand" "")
3636 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3637 (match_operand:SI 2 "const_int_operand" "")))]
3639 && !const_ok_for_arm (INTVAL (operands[2]))
3640 && const_ok_for_arm (~INTVAL (operands[2]))"
3641 [(set (match_dup 3) (match_dup 2))
3642 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3646 (define_insn "*iorsi3_compare0"
3647 [(set (reg:CC_NOOV CC_REGNUM)
3649 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3650 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3652 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3653 (ior:SI (match_dup 1) (match_dup 2)))]
3655 "orrs%?\\t%0, %1, %2"
3656 [(set_attr "conds" "set")
3657 (set_attr "arch" "*,t2,*")
3658 (set_attr "length" "4,2,4")
3659 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3662 (define_insn "*iorsi3_compare0_scratch"
3663 [(set (reg:CC_NOOV CC_REGNUM)
3665 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3666 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3668 (clobber (match_scratch:SI 0 "=r,l,r"))]
3670 "orrs%?\\t%0, %1, %2"
3671 [(set_attr "conds" "set")
3672 (set_attr "arch" "*,t2,*")
3673 (set_attr "length" "4,2,4")
3674 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3677 (define_expand "xorsi3"
3678 [(set (match_operand:SI 0 "s_register_operand")
3679 (xor:SI (match_operand:SI 1 "s_register_operand")
3680 (match_operand:SI 2 "reg_or_int_operand")))]
3682 "if (CONST_INT_P (operands[2]))
3686 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3687 operands[2] = force_reg (SImode, operands[2]);
3690 arm_split_constant (XOR, SImode, NULL_RTX,
3691 INTVAL (operands[2]), operands[0],
3693 optimize && can_create_pseudo_p ());
3697 else /* TARGET_THUMB1 */
3699 rtx tmp = force_reg (SImode, operands[2]);
3700 if (rtx_equal_p (operands[0], operands[1]))
3704 operands[2] = operands[1];
3711 (define_insn_and_split "*arm_xorsi3"
3712 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3713 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3714 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3722 && CONST_INT_P (operands[2])
3723 && !const_ok_for_arm (INTVAL (operands[2]))"
3724 [(clobber (const_int 0))]
3726 arm_split_constant (XOR, SImode, curr_insn,
3727 INTVAL (operands[2]), operands[0], operands[1], 0);
3730 [(set_attr "length" "4,4,4,16")
3731 (set_attr "predicable" "yes")
3732 (set_attr "predicable_short_it" "no,yes,no,no")
3733 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3736 (define_insn "*xorsi3_compare0"
3737 [(set (reg:CC_NOOV CC_REGNUM)
3738 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3739 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3741 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3742 (xor:SI (match_dup 1) (match_dup 2)))]
3744 "eors%?\\t%0, %1, %2"
3745 [(set_attr "conds" "set")
3746 (set_attr "type" "logics_imm,logics_reg")]
3749 (define_insn "*xorsi3_compare0_scratch"
3750 [(set (reg:CC_NOOV CC_REGNUM)
3751 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3752 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3756 [(set_attr "conds" "set")
3757 (set_attr "type" "logics_imm,logics_reg")]
3760 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3761 ; (NOT D) we can sometimes merge the final NOT into one of the following
3765 [(set (match_operand:SI 0 "s_register_operand" "")
3766 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3767 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3768 (match_operand:SI 3 "arm_rhs_operand" "")))
3769 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3771 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3772 (not:SI (match_dup 3))))
3773 (set (match_dup 0) (not:SI (match_dup 4)))]
3777 (define_insn_and_split "*andsi_iorsi3_notsi"
3778 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3779 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3780 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3781 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3783 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3784 "&& reload_completed"
3785 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3786 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3788 /* If operands[3] is a constant make sure to fold the NOT into it
3789 to avoid creating a NOT of a CONST_INT. */
3790 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3791 if (CONST_INT_P (not_rtx))
3793 operands[4] = operands[0];
3794 operands[5] = not_rtx;
3798 operands[5] = operands[0];
3799 operands[4] = not_rtx;
3802 [(set_attr "length" "8")
3803 (set_attr "ce_count" "2")
3804 (set_attr "predicable" "yes")
3805 (set_attr "type" "multiple")]
3808 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3809 ; insns are available?
3811 [(set (match_operand:SI 0 "s_register_operand" "")
3812 (match_operator:SI 1 "logical_binary_operator"
3813 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3814 (match_operand:SI 3 "const_int_operand" "")
3815 (match_operand:SI 4 "const_int_operand" ""))
3816 (match_operator:SI 9 "logical_binary_operator"
3817 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3818 (match_operand:SI 6 "const_int_operand" ""))
3819 (match_operand:SI 7 "s_register_operand" "")])]))
3820 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3822 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3823 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3826 [(ashift:SI (match_dup 2) (match_dup 4))
3830 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3833 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3837 [(set (match_operand:SI 0 "s_register_operand" "")
3838 (match_operator:SI 1 "logical_binary_operator"
3839 [(match_operator:SI 9 "logical_binary_operator"
3840 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3841 (match_operand:SI 6 "const_int_operand" ""))
3842 (match_operand:SI 7 "s_register_operand" "")])
3843 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3844 (match_operand:SI 3 "const_int_operand" "")
3845 (match_operand:SI 4 "const_int_operand" ""))]))
3846 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3848 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3849 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3852 [(ashift:SI (match_dup 2) (match_dup 4))
3856 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3859 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3863 [(set (match_operand:SI 0 "s_register_operand" "")
3864 (match_operator:SI 1 "logical_binary_operator"
3865 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3866 (match_operand:SI 3 "const_int_operand" "")
3867 (match_operand:SI 4 "const_int_operand" ""))
3868 (match_operator:SI 9 "logical_binary_operator"
3869 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3870 (match_operand:SI 6 "const_int_operand" ""))
3871 (match_operand:SI 7 "s_register_operand" "")])]))
3872 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3874 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3875 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3878 [(ashift:SI (match_dup 2) (match_dup 4))
3882 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3885 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3889 [(set (match_operand:SI 0 "s_register_operand" "")
3890 (match_operator:SI 1 "logical_binary_operator"
3891 [(match_operator:SI 9 "logical_binary_operator"
3892 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3893 (match_operand:SI 6 "const_int_operand" ""))
3894 (match_operand:SI 7 "s_register_operand" "")])
3895 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3896 (match_operand:SI 3 "const_int_operand" "")
3897 (match_operand:SI 4 "const_int_operand" ""))]))
3898 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3900 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3901 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3904 [(ashift:SI (match_dup 2) (match_dup 4))
3908 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3911 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3915 ;; Minimum and maximum insns
3917 (define_expand "smaxsi3"
3919 (set (match_operand:SI 0 "s_register_operand")
3920 (smax:SI (match_operand:SI 1 "s_register_operand")
3921 (match_operand:SI 2 "arm_rhs_operand")))
3922 (clobber (reg:CC CC_REGNUM))])]
3925 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3927 /* No need for a clobber of the condition code register here. */
3928 emit_insn (gen_rtx_SET (operands[0],
3929 gen_rtx_SMAX (SImode, operands[1],
3935 (define_insn "*smax_0"
3936 [(set (match_operand:SI 0 "s_register_operand" "=r")
3937 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3940 "bic%?\\t%0, %1, %1, asr #31"
3941 [(set_attr "predicable" "yes")
3942 (set_attr "type" "logic_shift_reg")]
3945 (define_insn "*smax_m1"
3946 [(set (match_operand:SI 0 "s_register_operand" "=r")
3947 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3950 "orr%?\\t%0, %1, %1, asr #31"
3951 [(set_attr "predicable" "yes")
3952 (set_attr "type" "logic_shift_reg")]
3955 (define_insn_and_split "*arm_smax_insn"
3956 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3957 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3958 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3959 (clobber (reg:CC CC_REGNUM))]
3962 ; cmp\\t%1, %2\;movlt\\t%0, %2
3963 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3965 [(set (reg:CC CC_REGNUM)
3966 (compare:CC (match_dup 1) (match_dup 2)))
3968 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3972 [(set_attr "conds" "clob")
3973 (set_attr "length" "8,12")
3974 (set_attr "type" "multiple")]
3977 (define_expand "sminsi3"
3979 (set (match_operand:SI 0 "s_register_operand")
3980 (smin:SI (match_operand:SI 1 "s_register_operand")
3981 (match_operand:SI 2 "arm_rhs_operand")))
3982 (clobber (reg:CC CC_REGNUM))])]
3985 if (operands[2] == const0_rtx)
3987 /* No need for a clobber of the condition code register here. */
3988 emit_insn (gen_rtx_SET (operands[0],
3989 gen_rtx_SMIN (SImode, operands[1],
3995 (define_insn "*smin_0"
3996 [(set (match_operand:SI 0 "s_register_operand" "=r")
3997 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
4000 "and%?\\t%0, %1, %1, asr #31"
4001 [(set_attr "predicable" "yes")
4002 (set_attr "type" "logic_shift_reg")]
4005 (define_insn_and_split "*arm_smin_insn"
4006 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4007 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4008 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4009 (clobber (reg:CC CC_REGNUM))]
4012 ; cmp\\t%1, %2\;movge\\t%0, %2
4013 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
4015 [(set (reg:CC CC_REGNUM)
4016 (compare:CC (match_dup 1) (match_dup 2)))
4018 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
4022 [(set_attr "conds" "clob")
4023 (set_attr "length" "8,12")
4024 (set_attr "type" "multiple,multiple")]
4027 (define_expand "umaxsi3"
4029 (set (match_operand:SI 0 "s_register_operand")
4030 (umax:SI (match_operand:SI 1 "s_register_operand")
4031 (match_operand:SI 2 "arm_rhs_operand")))
4032 (clobber (reg:CC CC_REGNUM))])]
4037 (define_insn_and_split "*arm_umaxsi3"
4038 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4039 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4040 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4041 (clobber (reg:CC CC_REGNUM))]
4044 ; cmp\\t%1, %2\;movcc\\t%0, %2
4045 ; cmp\\t%1, %2\;movcs\\t%0, %1
4046 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
4048 [(set (reg:CC CC_REGNUM)
4049 (compare:CC (match_dup 1) (match_dup 2)))
4051 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
4055 [(set_attr "conds" "clob")
4056 (set_attr "length" "8,8,12")
4057 (set_attr "type" "store_4")]
4060 (define_expand "uminsi3"
4062 (set (match_operand:SI 0 "s_register_operand")
4063 (umin:SI (match_operand:SI 1 "s_register_operand")
4064 (match_operand:SI 2 "arm_rhs_operand")))
4065 (clobber (reg:CC CC_REGNUM))])]
4070 (define_insn_and_split "*arm_uminsi3"
4071 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4072 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4073 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4074 (clobber (reg:CC CC_REGNUM))]
4077 ; cmp\\t%1, %2\;movcs\\t%0, %2
4078 ; cmp\\t%1, %2\;movcc\\t%0, %1
4079 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
4081 [(set (reg:CC CC_REGNUM)
4082 (compare:CC (match_dup 1) (match_dup 2)))
4084 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
4088 [(set_attr "conds" "clob")
4089 (set_attr "length" "8,8,12")
4090 (set_attr "type" "store_4")]
4093 (define_insn "*store_minmaxsi"
4094 [(set (match_operand:SI 0 "memory_operand" "=m")
4095 (match_operator:SI 3 "minmax_operator"
4096 [(match_operand:SI 1 "s_register_operand" "r")
4097 (match_operand:SI 2 "s_register_operand" "r")]))
4098 (clobber (reg:CC CC_REGNUM))]
4099 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
4101 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
4102 operands[1], operands[2]);
4103 output_asm_insn (\"cmp\\t%1, %2\", operands);
4105 output_asm_insn (\"ite\t%d3\", operands);
4106 output_asm_insn (\"str%d3\\t%1, %0\", operands);
4107 output_asm_insn (\"str%D3\\t%2, %0\", operands);
4110 [(set_attr "conds" "clob")
4111 (set (attr "length")
4112 (if_then_else (eq_attr "is_thumb" "yes")
4115 (set_attr "type" "store_4")]
4118 ; Reject the frame pointer in operand[1], since reloading this after
4119 ; it has been eliminated can cause carnage.
4120 (define_insn "*minmax_arithsi"
4121 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4122 (match_operator:SI 4 "shiftable_operator"
4123 [(match_operator:SI 5 "minmax_operator"
4124 [(match_operand:SI 2 "s_register_operand" "r,r")
4125 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
4126 (match_operand:SI 1 "s_register_operand" "0,?r")]))
4127 (clobber (reg:CC CC_REGNUM))]
4128 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
4131 enum rtx_code code = GET_CODE (operands[4]);
4134 if (which_alternative != 0 || operands[3] != const0_rtx
4135 || (code != PLUS && code != IOR && code != XOR))
4140 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4141 operands[2], operands[3]);
4142 output_asm_insn (\"cmp\\t%2, %3\", operands);
4146 output_asm_insn (\"ite\\t%d5\", operands);
4148 output_asm_insn (\"it\\t%d5\", operands);
4150 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4152 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4155 [(set_attr "conds" "clob")
4156 (set (attr "length")
4157 (if_then_else (eq_attr "is_thumb" "yes")
4160 (set_attr "type" "multiple")]
4163 ; Reject the frame pointer in operand[1], since reloading this after
4164 ; it has been eliminated can cause carnage.
4165 (define_insn_and_split "*minmax_arithsi_non_canon"
4166 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4168 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4169 (match_operator:SI 4 "minmax_operator"
4170 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4171 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4172 (clobber (reg:CC CC_REGNUM))]
4173 "TARGET_32BIT && !arm_eliminable_register (operands[1])
4174 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4176 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4177 [(set (reg:CC CC_REGNUM)
4178 (compare:CC (match_dup 2) (match_dup 3)))
4180 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4182 (minus:SI (match_dup 1)
4184 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4188 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4189 operands[2], operands[3]);
4190 enum rtx_code rc = minmax_code (operands[4]);
4191 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4192 operands[2], operands[3]);
4194 if (mode == CCFPmode || mode == CCFPEmode)
4195 rc = reverse_condition_maybe_unordered (rc);
4197 rc = reverse_condition (rc);
4198 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4199 if (CONST_INT_P (operands[3]))
4200 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4202 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4204 [(set_attr "conds" "clob")
4205 (set (attr "length")
4206 (if_then_else (eq_attr "is_thumb" "yes")
4209 (set_attr "type" "multiple")]
4213 (define_expand "arm_<ss_op>"
4214 [(set (match_operand:SI 0 "s_register_operand")
4215 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
4216 (match_operand:SI 2 "s_register_operand")))]
4217 "TARGET_DSP_MULTIPLY"
4220 emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
4221 operands[1], operands[2]));
4223 emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
4228 (define_insn "arm_<ss_op><add_clobber_q_name>_insn"
4229 [(set (match_operand:SI 0 "s_register_operand" "=r")
4230 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
4231 (match_operand:SI 2 "s_register_operand" "r")))]
4232 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
4233 "<ss_op>%?\t%0, %1, %2"
4234 [(set_attr "predicable" "yes")
4235 (set_attr "type" "alu_dsp_reg")]
4238 (define_code_iterator SAT [smin smax])
4239 (define_code_attr SATrev [(smin "smax") (smax "smin")])
4240 (define_code_attr SATlo [(smin "1") (smax "2")])
4241 (define_code_attr SAThi [(smin "2") (smax "1")])
4243 (define_expand "arm_ssat"
4244 [(match_operand:SI 0 "s_register_operand")
4245 (match_operand:SI 1 "s_register_operand")
4246 (match_operand:SI 2 "const_int_operand")]
4247 "TARGET_32BIT && arm_arch6"
4249 HOST_WIDE_INT val = INTVAL (operands[2]);
4250 /* The builtin checking code should have ensured the right
4251 range for the immediate. */
4252 gcc_assert (IN_RANGE (val, 1, 32));
4253 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
4254 HOST_WIDE_INT lower_bound = -upper_bound - 1;
4255 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4256 rtx lo_rtx = gen_int_mode (lower_bound, SImode);
4258 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
4259 up_rtx, operands[1]));
4261 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4266 (define_expand "arm_usat"
4267 [(match_operand:SI 0 "s_register_operand")
4268 (match_operand:SI 1 "s_register_operand")
4269 (match_operand:SI 2 "const_int_operand")]
4270 "TARGET_32BIT && arm_arch6"
4272 HOST_WIDE_INT val = INTVAL (operands[2]);
4273 /* The builtin checking code should have ensured the right
4274 range for the immediate. */
4275 gcc_assert (IN_RANGE (val, 0, 31));
4276 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
4277 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4278 rtx lo_rtx = CONST0_RTX (SImode);
4280 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
4283 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4288 (define_insn "arm_get_apsr"
4289 [(set (match_operand:SI 0 "s_register_operand" "=r")
4290 (unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
4293 [(set_attr "predicable" "yes")
4294 (set_attr "conds" "use")]
4297 (define_insn "arm_set_apsr"
4298 [(set (reg:CC APSRQ_REGNUM)
4300 [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
4302 "msr%?\tAPSR_nzcvq, %0"
4303 [(set_attr "predicable" "yes")
4304 (set_attr "conds" "set")]
4307 ;; Read the APSR and extract the Q bit (bit 27)
4308 (define_expand "arm_saturation_occurred"
4309 [(match_operand:SI 0 "s_register_operand")]
4312 rtx apsr = gen_reg_rtx (SImode);
4313 emit_insn (gen_arm_get_apsr (apsr));
4314 emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
4315 gen_int_mode (27, SImode)));
4320 ;; Read the APSR and set the Q bit (bit position 27) according to operand 0
4321 (define_expand "arm_set_saturation"
4322 [(match_operand:SI 0 "reg_or_int_operand")]
4325 rtx apsr = gen_reg_rtx (SImode);
4326 emit_insn (gen_arm_get_apsr (apsr));
4327 rtx to_insert = gen_reg_rtx (SImode);
4328 if (CONST_INT_P (operands[0]))
4329 emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
4330 ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
4333 rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
4334 emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
4335 CONST0_RTX (SImode)));
4337 emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
4338 gen_int_mode (27, SImode), to_insert));
4339 emit_insn (gen_arm_set_apsr (apsr));
4344 (define_insn "satsi_<SAT:code><add_clobber_q_name>"
4345 [(set (match_operand:SI 0 "s_register_operand" "=r")
4346 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4347 (match_operand:SI 1 "const_int_operand" "i"))
4348 (match_operand:SI 2 "const_int_operand" "i")))]
4349 "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
4350 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4354 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4355 &mask, &signed_sat))
4358 operands[1] = GEN_INT (mask);
4360 return "ssat%?\t%0, %1, %3";
4362 return "usat%?\t%0, %1, %3";
4364 [(set_attr "predicable" "yes")
4365 (set_attr "type" "alus_imm")]
4368 (define_insn "*satsi_<SAT:code>_shift"
4369 [(set (match_operand:SI 0 "s_register_operand" "=r")
4370 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4371 [(match_operand:SI 4 "s_register_operand" "r")
4372 (match_operand:SI 5 "const_int_operand" "i")])
4373 (match_operand:SI 1 "const_int_operand" "i"))
4374 (match_operand:SI 2 "const_int_operand" "i")))]
4375 "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
4376 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4380 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4381 &mask, &signed_sat))
4384 operands[1] = GEN_INT (mask);
4386 return "ssat%?\t%0, %1, %4%S3";
4388 return "usat%?\t%0, %1, %4%S3";
4390 [(set_attr "predicable" "yes")
4391 (set_attr "shift" "3")
4392 (set_attr "type" "logic_shift_reg")])
4394 ;; Shift and rotation insns
4396 (define_expand "ashldi3"
4397 [(set (match_operand:DI 0 "s_register_operand")
4398 (ashift:DI (match_operand:DI 1 "s_register_operand")
4399 (match_operand:SI 2 "reg_or_int_operand")))]
4402 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4403 operands[2], gen_reg_rtx (SImode),
4404 gen_reg_rtx (SImode));
4408 (define_expand "ashlsi3"
4409 [(set (match_operand:SI 0 "s_register_operand")
4410 (ashift:SI (match_operand:SI 1 "s_register_operand")
4411 (match_operand:SI 2 "arm_rhs_operand")))]
4414 if (CONST_INT_P (operands[2])
4415 && (UINTVAL (operands[2])) > 31)
4417 emit_insn (gen_movsi (operands[0], const0_rtx));
4423 (define_expand "ashrdi3"
4424 [(set (match_operand:DI 0 "s_register_operand")
4425 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4426 (match_operand:SI 2 "reg_or_int_operand")))]
4429 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4430 operands[2], gen_reg_rtx (SImode),
4431 gen_reg_rtx (SImode));
4435 (define_expand "ashrsi3"
4436 [(set (match_operand:SI 0 "s_register_operand")
4437 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4438 (match_operand:SI 2 "arm_rhs_operand")))]
4441 if (CONST_INT_P (operands[2])
4442 && UINTVAL (operands[2]) > 31)
4443 operands[2] = GEN_INT (31);
4447 (define_expand "lshrdi3"
4448 [(set (match_operand:DI 0 "s_register_operand")
4449 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4450 (match_operand:SI 2 "reg_or_int_operand")))]
4453 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4454 operands[2], gen_reg_rtx (SImode),
4455 gen_reg_rtx (SImode));
4459 (define_expand "lshrsi3"
4460 [(set (match_operand:SI 0 "s_register_operand")
4461 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4462 (match_operand:SI 2 "arm_rhs_operand")))]
4465 if (CONST_INT_P (operands[2])
4466 && (UINTVAL (operands[2])) > 31)
4468 emit_insn (gen_movsi (operands[0], const0_rtx));
4474 (define_expand "rotlsi3"
4475 [(set (match_operand:SI 0 "s_register_operand")
4476 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4477 (match_operand:SI 2 "reg_or_int_operand")))]
4480 if (CONST_INT_P (operands[2]))
4481 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4484 rtx reg = gen_reg_rtx (SImode);
4485 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4491 (define_expand "rotrsi3"
4492 [(set (match_operand:SI 0 "s_register_operand")
4493 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4494 (match_operand:SI 2 "arm_rhs_operand")))]
4499 if (CONST_INT_P (operands[2])
4500 && UINTVAL (operands[2]) > 31)
4501 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4503 else /* TARGET_THUMB1 */
4505 if (CONST_INT_P (operands [2]))
4506 operands [2] = force_reg (SImode, operands[2]);
4511 (define_insn "*arm_shiftsi3"
4512 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4513 (match_operator:SI 3 "shift_operator"
4514 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4515 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4517 "* return arm_output_shift(operands, 0);"
4518 [(set_attr "predicable" "yes")
4519 (set_attr "arch" "t2,t2,*,*")
4520 (set_attr "predicable_short_it" "yes,yes,no,no")
4521 (set_attr "length" "4")
4522 (set_attr "shift" "1")
4523 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
4526 (define_insn "*shiftsi3_compare0"
4527 [(set (reg:CC_NOOV CC_REGNUM)
4528 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4529 [(match_operand:SI 1 "s_register_operand" "r,r")
4530 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4532 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4533 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4535 "* return arm_output_shift(operands, 1);"
4536 [(set_attr "conds" "set")
4537 (set_attr "shift" "1")
4538 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4541 (define_insn "*shiftsi3_compare0_scratch"
4542 [(set (reg:CC_NOOV CC_REGNUM)
4543 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4544 [(match_operand:SI 1 "s_register_operand" "r,r")
4545 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4547 (clobber (match_scratch:SI 0 "=r,r"))]
4549 "* return arm_output_shift(operands, 1);"
4550 [(set_attr "conds" "set")
4551 (set_attr "shift" "1")
4552 (set_attr "type" "shift_imm,shift_reg")]
4555 (define_insn "*not_shiftsi"
4556 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4557 (not:SI (match_operator:SI 3 "shift_operator"
4558 [(match_operand:SI 1 "s_register_operand" "r,r")
4559 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4562 [(set_attr "predicable" "yes")
4563 (set_attr "shift" "1")
4564 (set_attr "arch" "32,a")
4565 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4567 (define_insn "*not_shiftsi_compare0"
4568 [(set (reg:CC_NOOV CC_REGNUM)
4570 (not:SI (match_operator:SI 3 "shift_operator"
4571 [(match_operand:SI 1 "s_register_operand" "r,r")
4572 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4574 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4575 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4577 "mvns%?\\t%0, %1%S3"
4578 [(set_attr "conds" "set")
4579 (set_attr "shift" "1")
4580 (set_attr "arch" "32,a")
4581 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4583 (define_insn "*not_shiftsi_compare0_scratch"
4584 [(set (reg:CC_NOOV CC_REGNUM)
4586 (not:SI (match_operator:SI 3 "shift_operator"
4587 [(match_operand:SI 1 "s_register_operand" "r,r")
4588 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4590 (clobber (match_scratch:SI 0 "=r,r"))]
4592 "mvns%?\\t%0, %1%S3"
4593 [(set_attr "conds" "set")
4594 (set_attr "shift" "1")
4595 (set_attr "arch" "32,a")
4596 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4598 ;; We don't really have extzv, but defining this using shifts helps
4599 ;; to reduce register pressure later on.
4601 (define_expand "extzv"
4602 [(set (match_operand 0 "s_register_operand")
4603 (zero_extract (match_operand 1 "nonimmediate_operand")
4604 (match_operand 2 "const_int_operand")
4605 (match_operand 3 "const_int_operand")))]
4606 "TARGET_THUMB1 || arm_arch_thumb2"
4609 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4610 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4612 if (arm_arch_thumb2)
4614 HOST_WIDE_INT width = INTVAL (operands[2]);
4615 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4617 if (unaligned_access && MEM_P (operands[1])
4618 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4622 if (BYTES_BIG_ENDIAN)
4623 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4628 base_addr = adjust_address (operands[1], SImode,
4629 bitpos / BITS_PER_UNIT);
4630 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4634 rtx dest = operands[0];
4635 rtx tmp = gen_reg_rtx (SImode);
4637 /* We may get a paradoxical subreg here. Strip it off. */
4638 if (GET_CODE (dest) == SUBREG
4639 && GET_MODE (dest) == SImode
4640 && GET_MODE (SUBREG_REG (dest)) == HImode)
4641 dest = SUBREG_REG (dest);
4643 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4646 base_addr = adjust_address (operands[1], HImode,
4647 bitpos / BITS_PER_UNIT);
4648 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4649 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4653 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4655 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4663 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4666 operands[3] = GEN_INT (rshift);
4670 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4674 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4675 operands[3], gen_reg_rtx (SImode)));
4680 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4682 (define_expand "extzv_t1"
4683 [(set (match_operand:SI 4 "s_register_operand")
4684 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4685 (match_operand:SI 2 "const_int_operand")))
4686 (set (match_operand:SI 0 "s_register_operand")
4687 (lshiftrt:SI (match_dup 4)
4688 (match_operand:SI 3 "const_int_operand")))]
4692 (define_expand "extv"
4693 [(set (match_operand 0 "s_register_operand")
4694 (sign_extract (match_operand 1 "nonimmediate_operand")
4695 (match_operand 2 "const_int_operand")
4696 (match_operand 3 "const_int_operand")))]
4699 HOST_WIDE_INT width = INTVAL (operands[2]);
4700 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4702 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4703 && (bitpos % BITS_PER_UNIT) == 0)
4707 if (BYTES_BIG_ENDIAN)
4708 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4712 base_addr = adjust_address (operands[1], SImode,
4713 bitpos / BITS_PER_UNIT);
4714 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4718 rtx dest = operands[0];
4719 rtx tmp = gen_reg_rtx (SImode);
4721 /* We may get a paradoxical subreg here. Strip it off. */
4722 if (GET_CODE (dest) == SUBREG
4723 && GET_MODE (dest) == SImode
4724 && GET_MODE (SUBREG_REG (dest)) == HImode)
4725 dest = SUBREG_REG (dest);
4727 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4730 base_addr = adjust_address (operands[1], HImode,
4731 bitpos / BITS_PER_UNIT);
4732 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4733 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4738 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4740 else if (GET_MODE (operands[0]) == SImode
4741 && GET_MODE (operands[1]) == SImode)
4743 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4751 ; Helper to expand register forms of extv with the proper modes.
4753 (define_expand "extv_regsi"
4754 [(set (match_operand:SI 0 "s_register_operand")
4755 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4756 (match_operand 2 "const_int_operand")
4757 (match_operand 3 "const_int_operand")))]
4762 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4764 (define_insn "unaligned_loaddi"
4765 [(set (match_operand:DI 0 "s_register_operand" "=r")
4766 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4767 UNSPEC_UNALIGNED_LOAD))]
4768 "TARGET_32BIT && TARGET_LDRD"
4770 return output_move_double (operands, true, NULL);
4772 [(set_attr "length" "8")
4773 (set_attr "type" "load_8")])
4775 (define_insn "unaligned_loadsi"
4776 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4777 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4778 UNSPEC_UNALIGNED_LOAD))]
4781 ldr\t%0, %1\t@ unaligned
4782 ldr%?\t%0, %1\t@ unaligned
4783 ldr%?\t%0, %1\t@ unaligned"
4784 [(set_attr "arch" "t1,t2,32")
4785 (set_attr "length" "2,2,4")
4786 (set_attr "predicable" "no,yes,yes")
4787 (set_attr "predicable_short_it" "no,yes,no")
4788 (set_attr "type" "load_4")])
4790 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4791 ;; address (there's no immediate format). That's tricky to support
4792 ;; here and we don't really need this pattern for that case, so only
4793 ;; enable for 32-bit ISAs.
4794 (define_insn "unaligned_loadhis"
4795 [(set (match_operand:SI 0 "s_register_operand" "=r")
4797 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4798 UNSPEC_UNALIGNED_LOAD)))]
4799 "unaligned_access && TARGET_32BIT"
4800 "ldrsh%?\t%0, %1\t@ unaligned"
4801 [(set_attr "predicable" "yes")
4802 (set_attr "type" "load_byte")])
4804 (define_insn "unaligned_loadhiu"
4805 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4807 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4808 UNSPEC_UNALIGNED_LOAD)))]
4811 ldrh\t%0, %1\t@ unaligned
4812 ldrh%?\t%0, %1\t@ unaligned
4813 ldrh%?\t%0, %1\t@ unaligned"
4814 [(set_attr "arch" "t1,t2,32")
4815 (set_attr "length" "2,2,4")
4816 (set_attr "predicable" "no,yes,yes")
4817 (set_attr "predicable_short_it" "no,yes,no")
4818 (set_attr "type" "load_byte")])
4820 (define_insn "unaligned_storedi"
4821 [(set (match_operand:DI 0 "memory_operand" "=m")
4822 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4823 UNSPEC_UNALIGNED_STORE))]
4824 "TARGET_32BIT && TARGET_LDRD"
4826 return output_move_double (operands, true, NULL);
4828 [(set_attr "length" "8")
4829 (set_attr "type" "store_8")])
4831 (define_insn "unaligned_storesi"
4832 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4833 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4834 UNSPEC_UNALIGNED_STORE))]
4837 str\t%1, %0\t@ unaligned
4838 str%?\t%1, %0\t@ unaligned
4839 str%?\t%1, %0\t@ unaligned"
4840 [(set_attr "arch" "t1,t2,32")
4841 (set_attr "length" "2,2,4")
4842 (set_attr "predicable" "no,yes,yes")
4843 (set_attr "predicable_short_it" "no,yes,no")
4844 (set_attr "type" "store_4")])
4846 (define_insn "unaligned_storehi"
4847 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4848 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4849 UNSPEC_UNALIGNED_STORE))]
4852 strh\t%1, %0\t@ unaligned
4853 strh%?\t%1, %0\t@ unaligned
4854 strh%?\t%1, %0\t@ unaligned"
4855 [(set_attr "arch" "t1,t2,32")
4856 (set_attr "length" "2,2,4")
4857 (set_attr "predicable" "no,yes,yes")
4858 (set_attr "predicable_short_it" "no,yes,no")
4859 (set_attr "type" "store_4")])
4862 (define_insn "*extv_reg"
4863 [(set (match_operand:SI 0 "s_register_operand" "=r")
4864 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4865 (match_operand:SI 2 "const_int_operand" "n")
4866 (match_operand:SI 3 "const_int_operand" "n")))]
4868 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4869 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4870 "sbfx%?\t%0, %1, %3, %2"
4871 [(set_attr "length" "4")
4872 (set_attr "predicable" "yes")
4873 (set_attr "type" "bfm")]
4876 (define_insn "extzv_t2"
4877 [(set (match_operand:SI 0 "s_register_operand" "=r")
4878 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4879 (match_operand:SI 2 "const_int_operand" "n")
4880 (match_operand:SI 3 "const_int_operand" "n")))]
4882 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4883 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4884 "ubfx%?\t%0, %1, %3, %2"
4885 [(set_attr "length" "4")
4886 (set_attr "predicable" "yes")
4887 (set_attr "type" "bfm")]
4891 ;; Division instructions
4892 (define_insn "divsi3"
4893 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4894 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
4895 (match_operand:SI 2 "s_register_operand" "r,r")))]
4900 [(set_attr "arch" "32,v8mb")
4901 (set_attr "predicable" "yes")
4902 (set_attr "type" "sdiv")]
4905 (define_insn "udivsi3"
4906 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4907 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
4908 (match_operand:SI 2 "s_register_operand" "r,r")))]
4913 [(set_attr "arch" "32,v8mb")
4914 (set_attr "predicable" "yes")
4915 (set_attr "type" "udiv")]
4919 ;; Unary arithmetic insns
4921 (define_expand "negv<SIDI:mode>3"
4922 [(match_operand:SIDI 0 "s_register_operand")
4923 (match_operand:SIDI 1 "s_register_operand")
4924 (match_operand 2 "")]
4927 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
4932 (define_expand "negsi2"
4933 [(set (match_operand:SI 0 "s_register_operand")
4934 (neg:SI (match_operand:SI 1 "s_register_operand")))]
4939 (define_insn "*arm_negsi2"
4940 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4941 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4943 "rsb%?\\t%0, %1, #0"
4944 [(set_attr "predicable" "yes")
4945 (set_attr "predicable_short_it" "yes,no")
4946 (set_attr "arch" "t2,*")
4947 (set_attr "length" "4")
4948 (set_attr "type" "alu_imm")]
4951 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
4952 ;; rather than (0 cmp reg). This gives the same results for unsigned
4953 ;; and equality compares which is what we mostly need here.
4954 (define_insn "negsi2_0compare"
4955 [(set (reg:CC_RSB CC_REGNUM)
4956 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
4958 (set (match_operand:SI 0 "s_register_operand" "=l,r")
4959 (neg:SI (match_dup 1)))]
4964 [(set_attr "conds" "set")
4965 (set_attr "arch" "t2,*")
4966 (set_attr "length" "2,*")
4967 (set_attr "type" "alus_imm")]
4970 (define_insn "negsi2_carryin"
4971 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4972 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
4973 (match_operand:SI 2 "arm_borrow_operation" "")))]
4977 sbc\\t%0, %1, %1, lsl #1"
4978 [(set_attr "conds" "use")
4979 (set_attr "arch" "a,t2")
4980 (set_attr "type" "adc_imm,adc_reg")]
4983 (define_expand "negsf2"
4984 [(set (match_operand:SF 0 "s_register_operand")
4985 (neg:SF (match_operand:SF 1 "s_register_operand")))]
4986 "TARGET_32BIT && TARGET_HARD_FLOAT"
4990 (define_expand "negdf2"
4991 [(set (match_operand:DF 0 "s_register_operand")
4992 (neg:DF (match_operand:DF 1 "s_register_operand")))]
4993 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4996 ;; abssi2 doesn't really clobber the condition codes if a different register
4997 ;; is being set. To keep things simple, assume during rtl manipulations that
4998 ;; it does, but tell the final scan operator the truth. Similarly for
5001 (define_expand "abssi2"
5003 [(set (match_operand:SI 0 "s_register_operand")
5004 (abs:SI (match_operand:SI 1 "s_register_operand")))
5005 (clobber (match_dup 2))])]
5009 operands[2] = gen_rtx_SCRATCH (SImode);
5011 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5014 (define_insn_and_split "*arm_abssi2"
5015 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5016 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5017 (clobber (reg:CC CC_REGNUM))]
5020 "&& reload_completed"
5023 /* if (which_alternative == 0) */
5024 if (REGNO(operands[0]) == REGNO(operands[1]))
5026 /* Emit the pattern:
5027 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5028 [(set (reg:CC CC_REGNUM)
5029 (compare:CC (match_dup 0) (const_int 0)))
5030 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5031 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5033 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5034 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5035 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5036 (gen_rtx_LT (SImode,
5037 gen_rtx_REG (CCmode, CC_REGNUM),
5039 (gen_rtx_SET (operands[0],
5040 (gen_rtx_MINUS (SImode,
5047 /* Emit the pattern:
5048 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5050 (xor:SI (match_dup 1)
5051 (ashiftrt:SI (match_dup 1) (const_int 31))))
5053 (minus:SI (match_dup 0)
5054 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5056 emit_insn (gen_rtx_SET (operands[0],
5057 gen_rtx_XOR (SImode,
5058 gen_rtx_ASHIFTRT (SImode,
5062 emit_insn (gen_rtx_SET (operands[0],
5063 gen_rtx_MINUS (SImode,
5065 gen_rtx_ASHIFTRT (SImode,
5071 [(set_attr "conds" "clob,*")
5072 (set_attr "shift" "1")
5073 (set_attr "predicable" "no, yes")
5074 (set_attr "length" "8")
5075 (set_attr "type" "multiple")]
5078 (define_insn_and_split "*arm_neg_abssi2"
5079 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5080 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5081 (clobber (reg:CC CC_REGNUM))]
5084 "&& reload_completed"
5087 /* if (which_alternative == 0) */
5088 if (REGNO (operands[0]) == REGNO (operands[1]))
5090 /* Emit the pattern:
5091 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5093 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5094 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5095 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5097 gen_rtx_REG (CCmode, CC_REGNUM),
5099 gen_rtx_SET (operands[0],
5100 (gen_rtx_MINUS (SImode,
5106 /* Emit the pattern:
5107 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5109 emit_insn (gen_rtx_SET (operands[0],
5110 gen_rtx_XOR (SImode,
5111 gen_rtx_ASHIFTRT (SImode,
5115 emit_insn (gen_rtx_SET (operands[0],
5116 gen_rtx_MINUS (SImode,
5117 gen_rtx_ASHIFTRT (SImode,
5124 [(set_attr "conds" "clob,*")
5125 (set_attr "shift" "1")
5126 (set_attr "predicable" "no, yes")
5127 (set_attr "length" "8")
5128 (set_attr "type" "multiple")]
5131 (define_expand "abssf2"
5132 [(set (match_operand:SF 0 "s_register_operand")
5133 (abs:SF (match_operand:SF 1 "s_register_operand")))]
5134 "TARGET_32BIT && TARGET_HARD_FLOAT"
5137 (define_expand "absdf2"
5138 [(set (match_operand:DF 0 "s_register_operand")
5139 (abs:DF (match_operand:DF 1 "s_register_operand")))]
5140 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5143 (define_expand "sqrtsf2"
5144 [(set (match_operand:SF 0 "s_register_operand")
5145 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
5146 "TARGET_32BIT && TARGET_HARD_FLOAT"
5149 (define_expand "sqrtdf2"
5150 [(set (match_operand:DF 0 "s_register_operand")
5151 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
5152 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5155 (define_expand "one_cmplsi2"
5156 [(set (match_operand:SI 0 "s_register_operand")
5157 (not:SI (match_operand:SI 1 "s_register_operand")))]
5162 (define_insn "*arm_one_cmplsi2"
5163 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5164 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5167 [(set_attr "predicable" "yes")
5168 (set_attr "predicable_short_it" "yes,no")
5169 (set_attr "arch" "t2,*")
5170 (set_attr "length" "4")
5171 (set_attr "type" "mvn_reg")]
5174 (define_insn "*notsi_compare0"
5175 [(set (reg:CC_NOOV CC_REGNUM)
5176 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5178 (set (match_operand:SI 0 "s_register_operand" "=r")
5179 (not:SI (match_dup 1)))]
5182 [(set_attr "conds" "set")
5183 (set_attr "type" "mvn_reg")]
5186 (define_insn "*notsi_compare0_scratch"
5187 [(set (reg:CC_NOOV CC_REGNUM)
5188 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5190 (clobber (match_scratch:SI 0 "=r"))]
5193 [(set_attr "conds" "set")
5194 (set_attr "type" "mvn_reg")]
5197 ;; Fixed <--> Floating conversion insns
5199 (define_expand "floatsihf2"
5200 [(set (match_operand:HF 0 "general_operand")
5201 (float:HF (match_operand:SI 1 "general_operand")))]
5205 rtx op1 = gen_reg_rtx (SFmode);
5206 expand_float (op1, operands[1], 0);
5207 op1 = convert_to_mode (HFmode, op1, 0);
5208 emit_move_insn (operands[0], op1);
5213 (define_expand "floatdihf2"
5214 [(set (match_operand:HF 0 "general_operand")
5215 (float:HF (match_operand:DI 1 "general_operand")))]
5219 rtx op1 = gen_reg_rtx (SFmode);
5220 expand_float (op1, operands[1], 0);
5221 op1 = convert_to_mode (HFmode, op1, 0);
5222 emit_move_insn (operands[0], op1);
5227 (define_expand "floatsisf2"
5228 [(set (match_operand:SF 0 "s_register_operand")
5229 (float:SF (match_operand:SI 1 "s_register_operand")))]
5230 "TARGET_32BIT && TARGET_HARD_FLOAT"
5234 (define_expand "floatsidf2"
5235 [(set (match_operand:DF 0 "s_register_operand")
5236 (float:DF (match_operand:SI 1 "s_register_operand")))]
5237 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5241 (define_expand "fix_trunchfsi2"
5242 [(set (match_operand:SI 0 "general_operand")
5243 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
5247 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5248 expand_fix (operands[0], op1, 0);
5253 (define_expand "fix_trunchfdi2"
5254 [(set (match_operand:DI 0 "general_operand")
5255 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
5259 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5260 expand_fix (operands[0], op1, 0);
5265 (define_expand "fix_truncsfsi2"
5266 [(set (match_operand:SI 0 "s_register_operand")
5267 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
5268 "TARGET_32BIT && TARGET_HARD_FLOAT"
5272 (define_expand "fix_truncdfsi2"
5273 [(set (match_operand:SI 0 "s_register_operand")
5274 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
5275 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5281 (define_expand "truncdfsf2"
5282 [(set (match_operand:SF 0 "s_register_operand")
5284 (match_operand:DF 1 "s_register_operand")))]
5285 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5289 ;; DFmode to HFmode conversions on targets without a single-step hardware
5290 ;; instruction for it would have to go through SFmode. This is dangerous
5291 ;; as it introduces double rounding.
5293 ;; Disable this pattern unless we are in an unsafe math mode, or we have
5294 ;; a single-step instruction.
5296 (define_expand "truncdfhf2"
5297 [(set (match_operand:HF 0 "s_register_operand")
5299 (match_operand:DF 1 "s_register_operand")))]
5300 "(TARGET_EITHER && flag_unsafe_math_optimizations)
5301 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
5303 /* We don't have a direct instruction for this, so we must be in
5304 an unsafe math mode, and going via SFmode. */
5306 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5309 op1 = convert_to_mode (SFmode, operands[1], 0);
5310 op1 = convert_to_mode (HFmode, op1, 0);
5311 emit_move_insn (operands[0], op1);
5314 /* Otherwise, we will pick this up as a single instruction with
5315 no intermediary rounding. */
5319 ;; Zero and sign extension instructions.
5321 (define_expand "zero_extend<mode>di2"
5322 [(set (match_operand:DI 0 "s_register_operand" "")
5323 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5324 "TARGET_32BIT <qhs_zextenddi_cond>"
5326 rtx res_lo, res_hi, op0_lo, op0_hi;
5327 res_lo = gen_lowpart (SImode, operands[0]);
5328 res_hi = gen_highpart (SImode, operands[0]);
5329 if (can_create_pseudo_p ())
5331 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5332 op0_hi = gen_reg_rtx (SImode);
5336 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5339 if (<MODE>mode != SImode)
5340 emit_insn (gen_rtx_SET (op0_lo,
5341 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5342 emit_insn (gen_movsi (op0_hi, const0_rtx));
5343 if (res_lo != op0_lo)
5344 emit_move_insn (res_lo, op0_lo);
5345 if (res_hi != op0_hi)
5346 emit_move_insn (res_hi, op0_hi);
5351 (define_expand "extend<mode>di2"
5352 [(set (match_operand:DI 0 "s_register_operand" "")
5353 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5354 "TARGET_32BIT <qhs_sextenddi_cond>"
5356 rtx res_lo, res_hi, op0_lo, op0_hi;
5357 res_lo = gen_lowpart (SImode, operands[0]);
5358 res_hi = gen_highpart (SImode, operands[0]);
5359 if (can_create_pseudo_p ())
5361 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5362 op0_hi = gen_reg_rtx (SImode);
5366 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5369 if (<MODE>mode != SImode)
5370 emit_insn (gen_rtx_SET (op0_lo,
5371 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5372 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5373 if (res_lo != op0_lo)
5374 emit_move_insn (res_lo, op0_lo);
5375 if (res_hi != op0_hi)
5376 emit_move_insn (res_hi, op0_hi);
5381 ;; Splits for all extensions to DImode
5383 [(set (match_operand:DI 0 "s_register_operand" "")
5384 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5386 [(set (match_dup 0) (match_dup 1))]
5388 rtx lo_part = gen_lowpart (SImode, operands[0]);
5389 machine_mode src_mode = GET_MODE (operands[1]);
5391 if (src_mode == SImode)
5392 emit_move_insn (lo_part, operands[1]);
5394 emit_insn (gen_rtx_SET (lo_part,
5395 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5396 operands[0] = gen_highpart (SImode, operands[0]);
5397 operands[1] = const0_rtx;
5401 [(set (match_operand:DI 0 "s_register_operand" "")
5402 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5404 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5406 rtx lo_part = gen_lowpart (SImode, operands[0]);
5407 machine_mode src_mode = GET_MODE (operands[1]);
5409 if (src_mode == SImode)
5410 emit_move_insn (lo_part, operands[1]);
5412 emit_insn (gen_rtx_SET (lo_part,
5413 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5414 operands[1] = lo_part;
5415 operands[0] = gen_highpart (SImode, operands[0]);
5418 (define_expand "zero_extendhisi2"
5419 [(set (match_operand:SI 0 "s_register_operand")
5420 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5423 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5425 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5428 if (!arm_arch6 && !MEM_P (operands[1]))
5430 rtx t = gen_lowpart (SImode, operands[1]);
5431 rtx tmp = gen_reg_rtx (SImode);
5432 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5433 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5439 [(set (match_operand:SI 0 "s_register_operand" "")
5440 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5441 "!TARGET_THUMB2 && !arm_arch6"
5442 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5443 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5445 operands[2] = gen_lowpart (SImode, operands[1]);
5448 (define_insn "*arm_zero_extendhisi2"
5449 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5450 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5451 "TARGET_ARM && arm_arch4 && !arm_arch6"
5455 [(set_attr "type" "alu_shift_reg,load_byte")
5456 (set_attr "predicable" "yes")]
5459 (define_insn "*arm_zero_extendhisi2_v6"
5460 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5461 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5462 "TARGET_ARM && arm_arch6"
5466 [(set_attr "predicable" "yes")
5467 (set_attr "type" "extend,load_byte")]
5470 (define_insn "*arm_zero_extendhisi2addsi"
5471 [(set (match_operand:SI 0 "s_register_operand" "=r")
5472 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5473 (match_operand:SI 2 "s_register_operand" "r")))]
5475 "uxtah%?\\t%0, %2, %1"
5476 [(set_attr "type" "alu_shift_reg")
5477 (set_attr "predicable" "yes")]
5480 (define_expand "zero_extendqisi2"
5481 [(set (match_operand:SI 0 "s_register_operand")
5482 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5485 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5487 emit_insn (gen_andsi3 (operands[0],
5488 gen_lowpart (SImode, operands[1]),
5492 if (!arm_arch6 && !MEM_P (operands[1]))
5494 rtx t = gen_lowpart (SImode, operands[1]);
5495 rtx tmp = gen_reg_rtx (SImode);
5496 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5497 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5503 [(set (match_operand:SI 0 "s_register_operand" "")
5504 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5506 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5507 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5509 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5512 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5517 (define_insn "*arm_zero_extendqisi2"
5518 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5519 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5520 "TARGET_ARM && !arm_arch6"
5523 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5524 [(set_attr "length" "8,4")
5525 (set_attr "type" "alu_shift_reg,load_byte")
5526 (set_attr "predicable" "yes")]
5529 (define_insn "*arm_zero_extendqisi2_v6"
5530 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5531 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5532 "TARGET_ARM && arm_arch6"
5535 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5536 [(set_attr "type" "extend,load_byte")
5537 (set_attr "predicable" "yes")]
5540 (define_insn "*arm_zero_extendqisi2addsi"
5541 [(set (match_operand:SI 0 "s_register_operand" "=r")
5542 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5543 (match_operand:SI 2 "s_register_operand" "r")))]
5545 "uxtab%?\\t%0, %2, %1"
5546 [(set_attr "predicable" "yes")
5547 (set_attr "type" "alu_shift_reg")]
5551 [(set (match_operand:SI 0 "s_register_operand" "")
5552 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5553 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5554 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5555 [(set (match_dup 2) (match_dup 1))
5556 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5561 [(set (match_operand:SI 0 "s_register_operand" "")
5562 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5563 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5564 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5565 [(set (match_dup 2) (match_dup 1))
5566 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5572 [(set (match_operand:SI 0 "s_register_operand" "")
5573 (IOR_XOR:SI (and:SI (ashift:SI
5574 (match_operand:SI 1 "s_register_operand" "")
5575 (match_operand:SI 2 "const_int_operand" ""))
5576 (match_operand:SI 3 "const_int_operand" ""))
5578 (match_operator 5 "subreg_lowpart_operator"
5579 [(match_operand:SI 4 "s_register_operand" "")]))))]
5581 && (UINTVAL (operands[3])
5582 == (GET_MODE_MASK (GET_MODE (operands[5]))
5583 & (GET_MODE_MASK (GET_MODE (operands[5]))
5584 << (INTVAL (operands[2])))))"
5585 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5587 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5588 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5591 (define_insn "*compareqi_eq0"
5592 [(set (reg:CC_Z CC_REGNUM)
5593 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5597 [(set_attr "conds" "set")
5598 (set_attr "predicable" "yes")
5599 (set_attr "type" "logic_imm")]
5602 (define_expand "extendhisi2"
5603 [(set (match_operand:SI 0 "s_register_operand")
5604 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5609 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5612 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5614 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5618 if (!arm_arch6 && !MEM_P (operands[1]))
5620 rtx t = gen_lowpart (SImode, operands[1]);
5621 rtx tmp = gen_reg_rtx (SImode);
5622 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5623 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5630 [(set (match_operand:SI 0 "register_operand" "")
5631 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5632 (clobber (match_scratch:SI 2 ""))])]
5634 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5635 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5637 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5640 ;; This pattern will only be used when ldsh is not available
5641 (define_expand "extendhisi2_mem"
5642 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5644 (zero_extend:SI (match_dup 7)))
5645 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5646 (set (match_operand:SI 0 "" "")
5647 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5652 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5654 mem1 = change_address (operands[1], QImode, addr);
5655 mem2 = change_address (operands[1], QImode,
5656 plus_constant (Pmode, addr, 1));
5657 operands[0] = gen_lowpart (SImode, operands[0]);
5659 operands[2] = gen_reg_rtx (SImode);
5660 operands[3] = gen_reg_rtx (SImode);
5661 operands[6] = gen_reg_rtx (SImode);
5664 if (BYTES_BIG_ENDIAN)
5666 operands[4] = operands[2];
5667 operands[5] = operands[3];
5671 operands[4] = operands[3];
5672 operands[5] = operands[2];
5678 [(set (match_operand:SI 0 "register_operand" "")
5679 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5681 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5682 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5684 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5687 (define_insn "*arm_extendhisi2"
5688 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5689 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5690 "TARGET_ARM && arm_arch4 && !arm_arch6"
5694 [(set_attr "length" "8,4")
5695 (set_attr "type" "alu_shift_reg,load_byte")
5696 (set_attr "predicable" "yes")]
5699 ;; ??? Check Thumb-2 pool range
5700 (define_insn "*arm_extendhisi2_v6"
5701 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5702 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5703 "TARGET_32BIT && arm_arch6"
5707 [(set_attr "type" "extend,load_byte")
5708 (set_attr "predicable" "yes")]
5711 (define_insn "*arm_extendhisi2addsi"
5712 [(set (match_operand:SI 0 "s_register_operand" "=r")
5713 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5714 (match_operand:SI 2 "s_register_operand" "r")))]
5716 "sxtah%?\\t%0, %2, %1"
5717 [(set_attr "type" "alu_shift_reg")]
5720 (define_expand "extendqihi2"
5722 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5724 (set (match_operand:HI 0 "s_register_operand")
5725 (ashiftrt:SI (match_dup 2)
5730 if (arm_arch4 && MEM_P (operands[1]))
5732 emit_insn (gen_rtx_SET (operands[0],
5733 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5736 if (!s_register_operand (operands[1], QImode))
5737 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5738 operands[0] = gen_lowpart (SImode, operands[0]);
5739 operands[1] = gen_lowpart (SImode, operands[1]);
5740 operands[2] = gen_reg_rtx (SImode);
5744 (define_insn "*arm_extendqihi_insn"
5745 [(set (match_operand:HI 0 "s_register_operand" "=r")
5746 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5747 "TARGET_ARM && arm_arch4"
5749 [(set_attr "type" "load_byte")
5750 (set_attr "predicable" "yes")]
5753 (define_expand "extendqisi2"
5754 [(set (match_operand:SI 0 "s_register_operand")
5755 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5758 if (!arm_arch4 && MEM_P (operands[1]))
5759 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5761 if (!arm_arch6 && !MEM_P (operands[1]))
5763 rtx t = gen_lowpart (SImode, operands[1]);
5764 rtx tmp = gen_reg_rtx (SImode);
5765 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5766 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5772 [(set (match_operand:SI 0 "register_operand" "")
5773 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5775 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5776 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5778 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5781 (define_insn "*arm_extendqisi"
5782 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5783 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5784 "TARGET_ARM && arm_arch4 && !arm_arch6"
5788 [(set_attr "length" "8,4")
5789 (set_attr "type" "alu_shift_reg,load_byte")
5790 (set_attr "predicable" "yes")]
5793 (define_insn "*arm_extendqisi_v6"
5794 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5796 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5797 "TARGET_ARM && arm_arch6"
5801 [(set_attr "type" "extend,load_byte")
5802 (set_attr "predicable" "yes")]
5805 (define_insn "*arm_extendqisi2addsi"
5806 [(set (match_operand:SI 0 "s_register_operand" "=r")
5807 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5808 (match_operand:SI 2 "s_register_operand" "r")))]
5810 "sxtab%?\\t%0, %2, %1"
5811 [(set_attr "type" "alu_shift_reg")
5812 (set_attr "predicable" "yes")]
5815 (define_insn "arm_<sup>xtb16"
5816 [(set (match_operand:SI 0 "s_register_operand" "=r")
5818 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5820 "<sup>xtb16%?\\t%0, %1"
5821 [(set_attr "predicable" "yes")
5822 (set_attr "type" "alu_dsp_reg")])
5824 (define_insn "arm_<simd32_op>"
5825 [(set (match_operand:SI 0 "s_register_operand" "=r")
5827 [(match_operand:SI 1 "s_register_operand" "r")
5828 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5830 "<simd32_op>%?\\t%0, %1, %2"
5831 [(set_attr "predicable" "yes")
5832 (set_attr "type" "alu_dsp_reg")])
5834 (define_insn "arm_usada8"
5835 [(set (match_operand:SI 0 "s_register_operand" "=r")
5837 [(match_operand:SI 1 "s_register_operand" "r")
5838 (match_operand:SI 2 "s_register_operand" "r")
5839 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5841 "usada8%?\\t%0, %1, %2, %3"
5842 [(set_attr "predicable" "yes")
5843 (set_attr "type" "alu_dsp_reg")])
5845 (define_insn "arm_<simd32_op>"
5846 [(set (match_operand:DI 0 "s_register_operand" "=r")
5848 [(match_operand:SI 1 "s_register_operand" "r")
5849 (match_operand:SI 2 "s_register_operand" "r")
5850 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5852 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5853 [(set_attr "predicable" "yes")
5854 (set_attr "type" "smlald")])
5856 (define_insn "arm_<simd32_op>"
5857 [(set (match_operand:SI 0 "s_register_operand" "=r")
5859 [(match_operand:SI 1 "s_register_operand" "r")
5860 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
5861 (set (reg:CC APSRGE_REGNUM)
5862 (unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
5864 "<simd32_op>%?\\t%0, %1, %2"
5865 [(set_attr "predicable" "yes")
5866 (set_attr "type" "alu_sreg")])
5868 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
5869 [(set (match_operand:SI 0 "s_register_operand" "=r")
5871 [(match_operand:SI 1 "s_register_operand" "r")
5872 (match_operand:SI 2 "s_register_operand" "r")
5873 (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
5874 "TARGET_INT_SIMD && <add_clobber_q_pred>"
5875 "<simd32_op>%?\\t%0, %1, %2, %3"
5876 [(set_attr "predicable" "yes")
5877 (set_attr "type" "alu_sreg")])
5879 (define_expand "arm_<simd32_op>"
5880 [(set (match_operand:SI 0 "s_register_operand")
5882 [(match_operand:SI 1 "s_register_operand")
5883 (match_operand:SI 2 "s_register_operand")
5884 (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
5888 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
5889 operands[2], operands[3]));
5891 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
5892 operands[2], operands[3]));
5897 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
5898 [(set (match_operand:SI 0 "s_register_operand" "=r")
5900 [(match_operand:SI 1 "s_register_operand" "r")
5901 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
5902 "TARGET_INT_SIMD && <add_clobber_q_pred>"
5903 "<simd32_op>%?\\t%0, %1, %2"
5904 [(set_attr "predicable" "yes")
5905 (set_attr "type" "alu_sreg")])
5907 (define_expand "arm_<simd32_op>"
5908 [(set (match_operand:SI 0 "s_register_operand")
5910 [(match_operand:SI 1 "s_register_operand")
5911 (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
5915 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
5918 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
5924 (define_insn "arm_sel"
5925 [(set (match_operand:SI 0 "s_register_operand" "=r")
5927 [(match_operand:SI 1 "s_register_operand" "r")
5928 (match_operand:SI 2 "s_register_operand" "r")
5929 (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
5931 "sel%?\\t%0, %1, %2"
5932 [(set_attr "predicable" "yes")
5933 (set_attr "type" "alu_sreg")])
5935 (define_expand "extendsfdf2"
5936 [(set (match_operand:DF 0 "s_register_operand")
5937 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
5938 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5942 ;; HFmode -> DFmode conversions where we don't have an instruction for it
5943 ;; must go through SFmode.
5945 ;; This is always safe for an extend.
5947 (define_expand "extendhfdf2"
5948 [(set (match_operand:DF 0 "s_register_operand")
5949 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
5952 /* We don't have a direct instruction for this, so go via SFmode. */
5953 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5956 op1 = convert_to_mode (SFmode, operands[1], 0);
5957 op1 = convert_to_mode (DFmode, op1, 0);
5958 emit_insn (gen_movdf (operands[0], op1));
5961 /* Otherwise, we're done producing RTL and will pick up the correct
5962 pattern to do this with one rounding-step in a single instruction. */
5966 ;; Move insns (including loads and stores)
5968 ;; XXX Just some ideas about movti.
5969 ;; I don't think these are a good idea on the arm, there just aren't enough
5971 ;;(define_expand "loadti"
5972 ;; [(set (match_operand:TI 0 "s_register_operand")
5973 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
5976 ;;(define_expand "storeti"
5977 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
5978 ;; (match_operand:TI 1 "s_register_operand"))]
5981 ;;(define_expand "movti"
5982 ;; [(set (match_operand:TI 0 "general_operand")
5983 ;; (match_operand:TI 1 "general_operand"))]
5989 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5990 ;; operands[1] = copy_to_reg (operands[1]);
5991 ;; if (MEM_P (operands[0]))
5992 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5993 ;; else if (MEM_P (operands[1]))
5994 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5998 ;; emit_insn (insn);
6002 ;; Recognize garbage generated above.
6005 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6006 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6010 ;; register mem = (which_alternative < 3);
6011 ;; register const char *template;
6013 ;; operands[mem] = XEXP (operands[mem], 0);
6014 ;; switch (which_alternative)
6016 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6017 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6018 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6019 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6020 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6021 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6023 ;; output_asm_insn (template, operands);
6027 (define_expand "movdi"
6028 [(set (match_operand:DI 0 "general_operand")
6029 (match_operand:DI 1 "general_operand"))]
6032 gcc_checking_assert (aligned_operand (operands[0], DImode));
6033 gcc_checking_assert (aligned_operand (operands[1], DImode));
6034 if (can_create_pseudo_p ())
6036 if (!REG_P (operands[0]))
6037 operands[1] = force_reg (DImode, operands[1]);
6039 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
6040 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
6042 /* Avoid LDRD's into an odd-numbered register pair in ARM state
6043 when expanding function calls. */
6044 gcc_assert (can_create_pseudo_p ());
6045 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
6047 /* Perform load into legal reg pair first, then move. */
6048 rtx reg = gen_reg_rtx (DImode);
6049 emit_insn (gen_movdi (reg, operands[1]));
6052 emit_move_insn (gen_lowpart (SImode, operands[0]),
6053 gen_lowpart (SImode, operands[1]));
6054 emit_move_insn (gen_highpart (SImode, operands[0]),
6055 gen_highpart (SImode, operands[1]));
6058 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
6059 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
6061 /* Avoid STRD's from an odd-numbered register pair in ARM state
6062 when expanding function prologue. */
6063 gcc_assert (can_create_pseudo_p ());
6064 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
6065 ? gen_reg_rtx (DImode)
6067 emit_move_insn (gen_lowpart (SImode, split_dest),
6068 gen_lowpart (SImode, operands[1]));
6069 emit_move_insn (gen_highpart (SImode, split_dest),
6070 gen_highpart (SImode, operands[1]));
6071 if (split_dest != operands[0])
6072 emit_insn (gen_movdi (operands[0], split_dest));
6078 (define_insn "*arm_movdi"
6079 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
6080 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
6082 && !(TARGET_HARD_FLOAT)
6084 && ( register_operand (operands[0], DImode)
6085 || register_operand (operands[1], DImode))"
6087 switch (which_alternative)
6094 /* Cannot load it directly, split to load it via MOV / MOVT. */
6095 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6099 return output_move_double (operands, true, NULL);
6102 [(set_attr "length" "8,12,16,8,8")
6103 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6104 (set_attr "arm_pool_range" "*,*,*,1020,*")
6105 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6106 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6107 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6111 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6112 (match_operand:ANY64 1 "immediate_operand" ""))]
6115 && (arm_disable_literal_pool
6116 || (arm_const_double_inline_cost (operands[1])
6117 <= arm_max_const_double_inline_cost ()))"
6120 arm_split_constant (SET, SImode, curr_insn,
6121 INTVAL (gen_lowpart (SImode, operands[1])),
6122 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6123 arm_split_constant (SET, SImode, curr_insn,
6124 INTVAL (gen_highpart_mode (SImode,
6125 GET_MODE (operands[0]),
6127 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6132 ; If optimizing for size, or if we have load delay slots, then
6133 ; we want to split the constant into two separate operations.
6134 ; In both cases this may split a trivial part into a single data op
6135 ; leaving a single complex constant to load. We can also get longer
6136 ; offsets in a LDR which means we get better chances of sharing the pool
6137 ; entries. Finally, we can normally do a better job of scheduling
6138 ; LDR instructions than we can with LDM.
6139 ; This pattern will only match if the one above did not.
6141 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6142 (match_operand:ANY64 1 "const_double_operand" ""))]
6143 "TARGET_ARM && reload_completed
6144 && arm_const_double_by_parts (operands[1])"
6145 [(set (match_dup 0) (match_dup 1))
6146 (set (match_dup 2) (match_dup 3))]
6148 operands[2] = gen_highpart (SImode, operands[0]);
6149 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6151 operands[0] = gen_lowpart (SImode, operands[0]);
6152 operands[1] = gen_lowpart (SImode, operands[1]);
6157 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6158 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
6159 "TARGET_EITHER && reload_completed"
6160 [(set (match_dup 0) (match_dup 1))
6161 (set (match_dup 2) (match_dup 3))]
6163 operands[2] = gen_highpart (SImode, operands[0]);
6164 operands[3] = gen_highpart (SImode, operands[1]);
6165 operands[0] = gen_lowpart (SImode, operands[0]);
6166 operands[1] = gen_lowpart (SImode, operands[1]);
6168 /* Handle a partial overlap. */
6169 if (rtx_equal_p (operands[0], operands[3]))
6171 rtx tmp0 = operands[0];
6172 rtx tmp1 = operands[1];
6174 operands[0] = operands[2];
6175 operands[1] = operands[3];
6182 ;; We can't actually do base+index doubleword loads if the index and
6183 ;; destination overlap. Split here so that we at least have chance to
6186 [(set (match_operand:DI 0 "s_register_operand" "")
6187 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6188 (match_operand:SI 2 "s_register_operand" ""))))]
6190 && reg_overlap_mentioned_p (operands[0], operands[1])
6191 && reg_overlap_mentioned_p (operands[0], operands[2])"
6193 (plus:SI (match_dup 1)
6196 (mem:DI (match_dup 4)))]
6198 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6202 (define_expand "movsi"
6203 [(set (match_operand:SI 0 "general_operand")
6204 (match_operand:SI 1 "general_operand"))]
6208 rtx base, offset, tmp;
6210 gcc_checking_assert (aligned_operand (operands[0], SImode));
6211 gcc_checking_assert (aligned_operand (operands[1], SImode));
6212 if (TARGET_32BIT || TARGET_HAVE_MOVT)
6214 /* Everything except mem = const or mem = mem can be done easily. */
6215 if (MEM_P (operands[0]))
6216 operands[1] = force_reg (SImode, operands[1]);
6217 if (arm_general_register_operand (operands[0], SImode)
6218 && CONST_INT_P (operands[1])
6219 && !(const_ok_for_arm (INTVAL (operands[1]))
6220 || const_ok_for_arm (~INTVAL (operands[1]))))
6222 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
6224 emit_insn (gen_rtx_SET (operands[0], operands[1]));
6229 arm_split_constant (SET, SImode, NULL_RTX,
6230 INTVAL (operands[1]), operands[0], NULL_RTX,
6231 optimize && can_create_pseudo_p ());
6236 else /* Target doesn't have MOVT... */
6238 if (can_create_pseudo_p ())
6240 if (!REG_P (operands[0]))
6241 operands[1] = force_reg (SImode, operands[1]);
6245 split_const (operands[1], &base, &offset);
6246 if (INTVAL (offset) != 0
6247 && targetm.cannot_force_const_mem (SImode, operands[1]))
6249 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6250 emit_move_insn (tmp, base);
6251 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6255 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
6257 /* Recognize the case where operand[1] is a reference to thread-local
6258 data and load its address to a register. Offsets have been split off
6260 if (arm_tls_referenced_p (operands[1]))
6261 operands[1] = legitimize_tls_address (operands[1], tmp);
6263 && (CONSTANT_P (operands[1])
6264 || symbol_mentioned_p (operands[1])
6265 || label_mentioned_p (operands[1])))
6267 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
6272 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6273 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6274 ;; so this does not matter.
6275 (define_insn "*arm_movt"
6276 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
6277 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
6278 (match_operand:SI 2 "general_operand" "i,i")))]
6279 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
6281 movt%?\t%0, #:upper16:%c2
6282 movt\t%0, #:upper16:%c2"
6283 [(set_attr "arch" "32,v8mb")
6284 (set_attr "predicable" "yes")
6285 (set_attr "length" "4")
6286 (set_attr "type" "alu_sreg")]
6289 (define_insn "*arm_movsi_insn"
6290 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6291 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6292 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
6293 && ( register_operand (operands[0], SImode)
6294 || register_operand (operands[1], SImode))"
6302 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
6303 (set_attr "predicable" "yes")
6304 (set_attr "arch" "*,*,*,v6t2,*,*")
6305 (set_attr "pool_range" "*,*,*,*,4096,*")
6306 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6310 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6311 (match_operand:SI 1 "const_int_operand" ""))]
6312 "(TARGET_32BIT || TARGET_HAVE_MOVT)
6313 && (!(const_ok_for_arm (INTVAL (operands[1]))
6314 || const_ok_for_arm (~INTVAL (operands[1]))))"
6315 [(clobber (const_int 0))]
6317 arm_split_constant (SET, SImode, NULL_RTX,
6318 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6323 ;; A normal way to do (symbol + offset) requires three instructions at least
6324 ;; (depends on how big the offset is) as below:
6325 ;; movw r0, #:lower16:g
6326 ;; movw r0, #:upper16:g
6329 ;; A better way would be:
6330 ;; movw r0, #:lower16:g+4
6331 ;; movw r0, #:upper16:g+4
6333 ;; The limitation of this way is that the length of offset should be a 16-bit
6334 ;; signed value, because current assembler only supports REL type relocation for
6335 ;; such case. If the more powerful RELA type is supported in future, we should
6336 ;; update this pattern to go with better way.
6338 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6339 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
6340 (match_operand:SI 2 "const_int_operand" ""))))]
6343 && arm_disable_literal_pool
6345 && GET_CODE (operands[1]) == SYMBOL_REF"
6346 [(clobber (const_int 0))]
6348 int offset = INTVAL (operands[2]);
6350 if (offset < -0x8000 || offset > 0x7fff)
6352 arm_emit_movpair (operands[0], operands[1]);
6353 emit_insn (gen_rtx_SET (operands[0],
6354 gen_rtx_PLUS (SImode, operands[0], operands[2])));
6358 rtx op = gen_rtx_CONST (SImode,
6359 gen_rtx_PLUS (SImode, operands[1], operands[2]));
6360 arm_emit_movpair (operands[0], op);
6365 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6366 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6367 ;; and lo_sum would be merged back into memory load at cprop. However,
6368 ;; if the default is to prefer movt/movw rather than a load from the constant
6369 ;; pool, the performance is better.
6371 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6372 (match_operand:SI 1 "general_operand" ""))]
6373 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6374 && !target_word_relocations
6375 && !arm_tls_referenced_p (operands[1])"
6376 [(clobber (const_int 0))]
6378 arm_emit_movpair (operands[0], operands[1]);
6382 ;; When generating pic, we need to load the symbol offset into a register.
6383 ;; So that the optimizer does not confuse this with a normal symbol load
6384 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6385 ;; since that is the only type of relocation we can use.
6387 ;; Wrap calculation of the whole PIC address in a single pattern for the
6388 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6389 ;; a PIC address involves two loads from memory, so we want to CSE it
6390 ;; as often as possible.
6391 ;; This pattern will be split into one of the pic_load_addr_* patterns
6392 ;; and a move after GCSE optimizations.
6394 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6395 (define_expand "calculate_pic_address"
6396 [(set (match_operand:SI 0 "register_operand")
6397 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6398 (unspec:SI [(match_operand:SI 2 "" "")]
6403 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6405 [(set (match_operand:SI 0 "register_operand" "")
6406 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6407 (unspec:SI [(match_operand:SI 2 "" "")]
6410 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6411 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6412 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6415 ;; operand1 is the memory address to go into
6416 ;; pic_load_addr_32bit.
6417 ;; operand2 is the PIC label to be emitted
6418 ;; from pic_add_dot_plus_eight.
6419 ;; We do this to allow hoisting of the entire insn.
6420 (define_insn_and_split "pic_load_addr_unified"
6421 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6422 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6423 (match_operand:SI 2 "" "")]
6424 UNSPEC_PIC_UNIFIED))]
6427 "&& reload_completed"
6428 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6429 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6430 (match_dup 2)] UNSPEC_PIC_BASE))]
6431 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6432 [(set_attr "type" "load_4,load_4,load_4")
6433 (set_attr "pool_range" "4096,4094,1022")
6434 (set_attr "neg_pool_range" "4084,0,0")
6435 (set_attr "arch" "a,t2,t1")
6436 (set_attr "length" "8,6,4")]
6439 ;; The rather odd constraints on the following are to force reload to leave
6440 ;; the insn alone, and to force the minipool generation pass to then move
6441 ;; the GOT symbol to memory.
6443 (define_insn "pic_load_addr_32bit"
6444 [(set (match_operand:SI 0 "s_register_operand" "=r")
6445 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6446 "TARGET_32BIT && flag_pic"
6448 [(set_attr "type" "load_4")
6449 (set (attr "pool_range")
6450 (if_then_else (eq_attr "is_thumb" "no")
6453 (set (attr "neg_pool_range")
6454 (if_then_else (eq_attr "is_thumb" "no")
6459 (define_insn "pic_load_addr_thumb1"
6460 [(set (match_operand:SI 0 "s_register_operand" "=l")
6461 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6462 "TARGET_THUMB1 && flag_pic"
6464 [(set_attr "type" "load_4")
6465 (set (attr "pool_range") (const_int 1018))]
6468 (define_insn "pic_add_dot_plus_four"
6469 [(set (match_operand:SI 0 "register_operand" "=r")
6470 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6472 (match_operand 2 "" "")]
6476 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6477 INTVAL (operands[2]));
6478 return \"add\\t%0, %|pc\";
6480 [(set_attr "length" "2")
6481 (set_attr "type" "alu_sreg")]
6484 (define_insn "pic_add_dot_plus_eight"
6485 [(set (match_operand:SI 0 "register_operand" "=r")
6486 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6488 (match_operand 2 "" "")]
6492 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6493 INTVAL (operands[2]));
6494 return \"add%?\\t%0, %|pc, %1\";
6496 [(set_attr "predicable" "yes")
6497 (set_attr "type" "alu_sreg")]
6500 (define_insn "tls_load_dot_plus_eight"
6501 [(set (match_operand:SI 0 "register_operand" "=r")
6502 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6504 (match_operand 2 "" "")]
6508 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6509 INTVAL (operands[2]));
6510 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6512 [(set_attr "predicable" "yes")
6513 (set_attr "type" "load_4")]
6516 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6517 ;; followed by a load. These sequences can be crunched down to
6518 ;; tls_load_dot_plus_eight by a peephole.
6521 [(set (match_operand:SI 0 "register_operand" "")
6522 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6524 (match_operand 1 "" "")]
6526 (set (match_operand:SI 2 "arm_general_register_operand" "")
6527 (mem:SI (match_dup 0)))]
6528 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6530 (mem:SI (unspec:SI [(match_dup 3)
6537 (define_insn "pic_offset_arm"
6538 [(set (match_operand:SI 0 "register_operand" "=r")
6539 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6540 (unspec:SI [(match_operand:SI 2 "" "X")]
6541 UNSPEC_PIC_OFFSET))))]
6542 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6543 "ldr%?\\t%0, [%1,%2]"
6544 [(set_attr "type" "load_4")]
6547 (define_expand "builtin_setjmp_receiver"
6548 [(label_ref (match_operand 0 "" ""))]
6552 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6554 if (arm_pic_register != INVALID_REGNUM)
6555 arm_load_pic_register (1UL << 3, NULL_RTX);
6559 ;; If copying one reg to another we can set the condition codes according to
6560 ;; its value. Such a move is common after a return from subroutine and the
6561 ;; result is being tested against zero.
6563 (define_insn "*movsi_compare0"
6564 [(set (reg:CC CC_REGNUM)
6565 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6567 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6572 subs%?\\t%0, %1, #0"
6573 [(set_attr "conds" "set")
6574 (set_attr "type" "alus_imm,alus_imm")]
6577 ;; Subroutine to store a half word from a register into memory.
6578 ;; Operand 0 is the source register (HImode)
6579 ;; Operand 1 is the destination address in a register (SImode)
6581 ;; In both this routine and the next, we must be careful not to spill
6582 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6583 ;; can generate unrecognizable rtl.
6585 (define_expand "storehi"
6586 [;; store the low byte
6587 (set (match_operand 1 "" "") (match_dup 3))
6588 ;; extract the high byte
6590 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6591 ;; store the high byte
6592 (set (match_dup 4) (match_dup 5))]
6596 rtx op1 = operands[1];
6597 rtx addr = XEXP (op1, 0);
6598 enum rtx_code code = GET_CODE (addr);
6600 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6602 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6604 operands[4] = adjust_address (op1, QImode, 1);
6605 operands[1] = adjust_address (operands[1], QImode, 0);
6606 operands[3] = gen_lowpart (QImode, operands[0]);
6607 operands[0] = gen_lowpart (SImode, operands[0]);
6608 operands[2] = gen_reg_rtx (SImode);
6609 operands[5] = gen_lowpart (QImode, operands[2]);
6613 (define_expand "storehi_bigend"
6614 [(set (match_dup 4) (match_dup 3))
6616 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6617 (set (match_operand 1 "" "") (match_dup 5))]
6621 rtx op1 = operands[1];
6622 rtx addr = XEXP (op1, 0);
6623 enum rtx_code code = GET_CODE (addr);
6625 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6627 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6629 operands[4] = adjust_address (op1, QImode, 1);
6630 operands[1] = adjust_address (operands[1], QImode, 0);
6631 operands[3] = gen_lowpart (QImode, operands[0]);
6632 operands[0] = gen_lowpart (SImode, operands[0]);
6633 operands[2] = gen_reg_rtx (SImode);
6634 operands[5] = gen_lowpart (QImode, operands[2]);
6638 ;; Subroutine to store a half word integer constant into memory.
6639 (define_expand "storeinthi"
6640 [(set (match_operand 0 "" "")
6641 (match_operand 1 "" ""))
6642 (set (match_dup 3) (match_dup 2))]
6646 HOST_WIDE_INT value = INTVAL (operands[1]);
6647 rtx addr = XEXP (operands[0], 0);
6648 rtx op0 = operands[0];
6649 enum rtx_code code = GET_CODE (addr);
6651 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6653 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6655 operands[1] = gen_reg_rtx (SImode);
6656 if (BYTES_BIG_ENDIAN)
6658 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6659 if ((value & 255) == ((value >> 8) & 255))
6660 operands[2] = operands[1];
6663 operands[2] = gen_reg_rtx (SImode);
6664 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6669 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6670 if ((value & 255) == ((value >> 8) & 255))
6671 operands[2] = operands[1];
6674 operands[2] = gen_reg_rtx (SImode);
6675 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6679 operands[3] = adjust_address (op0, QImode, 1);
6680 operands[0] = adjust_address (operands[0], QImode, 0);
6681 operands[2] = gen_lowpart (QImode, operands[2]);
6682 operands[1] = gen_lowpart (QImode, operands[1]);
6686 (define_expand "storehi_single_op"
6687 [(set (match_operand:HI 0 "memory_operand")
6688 (match_operand:HI 1 "general_operand"))]
6689 "TARGET_32BIT && arm_arch4"
6691 if (!s_register_operand (operands[1], HImode))
6692 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6696 (define_expand "movhi"
6697 [(set (match_operand:HI 0 "general_operand")
6698 (match_operand:HI 1 "general_operand"))]
6701 gcc_checking_assert (aligned_operand (operands[0], HImode));
6702 gcc_checking_assert (aligned_operand (operands[1], HImode));
6705 if (can_create_pseudo_p ())
6707 if (MEM_P (operands[0]))
6711 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6714 if (CONST_INT_P (operands[1]))
6715 emit_insn (gen_storeinthi (operands[0], operands[1]));
6718 if (MEM_P (operands[1]))
6719 operands[1] = force_reg (HImode, operands[1]);
6720 if (BYTES_BIG_ENDIAN)
6721 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6723 emit_insn (gen_storehi (operands[1], operands[0]));
6727 /* Sign extend a constant, and keep it in an SImode reg. */
6728 else if (CONST_INT_P (operands[1]))
6730 rtx reg = gen_reg_rtx (SImode);
6731 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6733 /* If the constant is already valid, leave it alone. */
6734 if (!const_ok_for_arm (val))
6736 /* If setting all the top bits will make the constant
6737 loadable in a single instruction, then set them.
6738 Otherwise, sign extend the number. */
6740 if (const_ok_for_arm (~(val | ~0xffff)))
6742 else if (val & 0x8000)
6746 emit_insn (gen_movsi (reg, GEN_INT (val)));
6747 operands[1] = gen_lowpart (HImode, reg);
6749 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6750 && MEM_P (operands[1]))
6752 rtx reg = gen_reg_rtx (SImode);
6754 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6755 operands[1] = gen_lowpart (HImode, reg);
6757 else if (!arm_arch4)
6759 if (MEM_P (operands[1]))
6762 rtx offset = const0_rtx;
6763 rtx reg = gen_reg_rtx (SImode);
6765 if ((REG_P (base = XEXP (operands[1], 0))
6766 || (GET_CODE (base) == PLUS
6767 && (CONST_INT_P (offset = XEXP (base, 1)))
6768 && ((INTVAL(offset) & 1) != 1)
6769 && REG_P (base = XEXP (base, 0))))
6770 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6774 new_rtx = widen_memory_access (operands[1], SImode,
6775 ((INTVAL (offset) & ~3)
6776 - INTVAL (offset)));
6777 emit_insn (gen_movsi (reg, new_rtx));
6778 if (((INTVAL (offset) & 2) != 0)
6779 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6781 rtx reg2 = gen_reg_rtx (SImode);
6783 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6788 emit_insn (gen_movhi_bytes (reg, operands[1]));
6790 operands[1] = gen_lowpart (HImode, reg);
6794 /* Handle loading a large integer during reload. */
6795 else if (CONST_INT_P (operands[1])
6796 && !const_ok_for_arm (INTVAL (operands[1]))
6797 && !const_ok_for_arm (~INTVAL (operands[1])))
6799 /* Writing a constant to memory needs a scratch, which should
6800 be handled with SECONDARY_RELOADs. */
6801 gcc_assert (REG_P (operands[0]));
6803 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6804 emit_insn (gen_movsi (operands[0], operands[1]));
6808 else if (TARGET_THUMB2)
6810 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6811 if (can_create_pseudo_p ())
6813 if (!REG_P (operands[0]))
6814 operands[1] = force_reg (HImode, operands[1]);
6815 /* Zero extend a constant, and keep it in an SImode reg. */
6816 else if (CONST_INT_P (operands[1]))
6818 rtx reg = gen_reg_rtx (SImode);
6819 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6821 emit_insn (gen_movsi (reg, GEN_INT (val)));
6822 operands[1] = gen_lowpart (HImode, reg);
6826 else /* TARGET_THUMB1 */
6828 if (can_create_pseudo_p ())
6830 if (CONST_INT_P (operands[1]))
6832 rtx reg = gen_reg_rtx (SImode);
6834 emit_insn (gen_movsi (reg, operands[1]));
6835 operands[1] = gen_lowpart (HImode, reg);
6838 /* ??? We shouldn't really get invalid addresses here, but this can
6839 happen if we are passed a SP (never OK for HImode/QImode) or
6840 virtual register (also rejected as illegitimate for HImode/QImode)
6841 relative address. */
6842 /* ??? This should perhaps be fixed elsewhere, for instance, in
6843 fixup_stack_1, by checking for other kinds of invalid addresses,
6844 e.g. a bare reference to a virtual register. This may confuse the
6845 alpha though, which must handle this case differently. */
6846 if (MEM_P (operands[0])
6847 && !memory_address_p (GET_MODE (operands[0]),
6848 XEXP (operands[0], 0)))
6850 = replace_equiv_address (operands[0],
6851 copy_to_reg (XEXP (operands[0], 0)));
6853 if (MEM_P (operands[1])
6854 && !memory_address_p (GET_MODE (operands[1]),
6855 XEXP (operands[1], 0)))
6857 = replace_equiv_address (operands[1],
6858 copy_to_reg (XEXP (operands[1], 0)));
6860 if (MEM_P (operands[1]) && optimize > 0)
6862 rtx reg = gen_reg_rtx (SImode);
6864 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6865 operands[1] = gen_lowpart (HImode, reg);
6868 if (MEM_P (operands[0]))
6869 operands[1] = force_reg (HImode, operands[1]);
6871 else if (CONST_INT_P (operands[1])
6872 && !satisfies_constraint_I (operands[1]))
6874 /* Handle loading a large integer during reload. */
6876 /* Writing a constant to memory needs a scratch, which should
6877 be handled with SECONDARY_RELOADs. */
6878 gcc_assert (REG_P (operands[0]));
6880 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6881 emit_insn (gen_movsi (operands[0], operands[1]));
6888 (define_expand "movhi_bytes"
6889 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6891 (zero_extend:SI (match_dup 6)))
6892 (set (match_operand:SI 0 "" "")
6893 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6898 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6900 mem1 = change_address (operands[1], QImode, addr);
6901 mem2 = change_address (operands[1], QImode,
6902 plus_constant (Pmode, addr, 1));
6903 operands[0] = gen_lowpart (SImode, operands[0]);
6905 operands[2] = gen_reg_rtx (SImode);
6906 operands[3] = gen_reg_rtx (SImode);
6909 if (BYTES_BIG_ENDIAN)
6911 operands[4] = operands[2];
6912 operands[5] = operands[3];
6916 operands[4] = operands[3];
6917 operands[5] = operands[2];
6922 (define_expand "movhi_bigend"
6924 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
6927 (ashiftrt:SI (match_dup 2) (const_int 16)))
6928 (set (match_operand:HI 0 "s_register_operand")
6932 operands[2] = gen_reg_rtx (SImode);
6933 operands[3] = gen_reg_rtx (SImode);
6934 operands[4] = gen_lowpart (HImode, operands[3]);
6938 ;; Pattern to recognize insn generated default case above
6939 (define_insn "*movhi_insn_arch4"
6940 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6941 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
6943 && arm_arch4 && !TARGET_HARD_FLOAT
6944 && (register_operand (operands[0], HImode)
6945 || register_operand (operands[1], HImode))"
6947 mov%?\\t%0, %1\\t%@ movhi
6948 mvn%?\\t%0, #%B1\\t%@ movhi
6949 movw%?\\t%0, %L1\\t%@ movhi
6950 strh%?\\t%1, %0\\t%@ movhi
6951 ldrh%?\\t%0, %1\\t%@ movhi"
6952 [(set_attr "predicable" "yes")
6953 (set_attr "pool_range" "*,*,*,*,256")
6954 (set_attr "neg_pool_range" "*,*,*,*,244")
6955 (set_attr "arch" "*,*,v6t2,*,*")
6956 (set_attr_alternative "type"
6957 [(if_then_else (match_operand 1 "const_int_operand" "")
6958 (const_string "mov_imm" )
6959 (const_string "mov_reg"))
6960 (const_string "mvn_imm")
6961 (const_string "mov_imm")
6962 (const_string "store_4")
6963 (const_string "load_4")])]
6966 (define_insn "*movhi_bytes"
6967 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6968 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
6969 "TARGET_ARM && !TARGET_HARD_FLOAT"
6971 mov%?\\t%0, %1\\t%@ movhi
6972 mov%?\\t%0, %1\\t%@ movhi
6973 mvn%?\\t%0, #%B1\\t%@ movhi"
6974 [(set_attr "predicable" "yes")
6975 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
6978 ;; We use a DImode scratch because we may occasionally need an additional
6979 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6980 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6981 ;; The reload_in<m> and reload_out<m> patterns require special constraints
6982 ;; to be correctly handled in default_secondary_reload function.
6983 (define_expand "reload_outhi"
6984 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6985 (match_operand:HI 1 "s_register_operand" "r")
6986 (match_operand:DI 2 "s_register_operand" "=&l")])]
6989 arm_reload_out_hi (operands);
6991 thumb_reload_out_hi (operands);
6996 (define_expand "reload_inhi"
6997 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6998 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6999 (match_operand:DI 2 "s_register_operand" "=&r")])]
7003 arm_reload_in_hi (operands);
7005 thumb_reload_out_hi (operands);
7009 (define_expand "movqi"
7010 [(set (match_operand:QI 0 "general_operand")
7011 (match_operand:QI 1 "general_operand"))]
7014 /* Everything except mem = const or mem = mem can be done easily */
7016 if (can_create_pseudo_p ())
7018 if (CONST_INT_P (operands[1]))
7020 rtx reg = gen_reg_rtx (SImode);
7022 /* For thumb we want an unsigned immediate, then we are more likely
7023 to be able to use a movs insn. */
7025 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7027 emit_insn (gen_movsi (reg, operands[1]));
7028 operands[1] = gen_lowpart (QImode, reg);
7033 /* ??? We shouldn't really get invalid addresses here, but this can
7034 happen if we are passed a SP (never OK for HImode/QImode) or
7035 virtual register (also rejected as illegitimate for HImode/QImode)
7036 relative address. */
7037 /* ??? This should perhaps be fixed elsewhere, for instance, in
7038 fixup_stack_1, by checking for other kinds of invalid addresses,
7039 e.g. a bare reference to a virtual register. This may confuse the
7040 alpha though, which must handle this case differently. */
7041 if (MEM_P (operands[0])
7042 && !memory_address_p (GET_MODE (operands[0]),
7043 XEXP (operands[0], 0)))
7045 = replace_equiv_address (operands[0],
7046 copy_to_reg (XEXP (operands[0], 0)));
7047 if (MEM_P (operands[1])
7048 && !memory_address_p (GET_MODE (operands[1]),
7049 XEXP (operands[1], 0)))
7051 = replace_equiv_address (operands[1],
7052 copy_to_reg (XEXP (operands[1], 0)));
7055 if (MEM_P (operands[1]) && optimize > 0)
7057 rtx reg = gen_reg_rtx (SImode);
7059 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7060 operands[1] = gen_lowpart (QImode, reg);
7063 if (MEM_P (operands[0]))
7064 operands[1] = force_reg (QImode, operands[1]);
7066 else if (TARGET_THUMB
7067 && CONST_INT_P (operands[1])
7068 && !satisfies_constraint_I (operands[1]))
7070 /* Handle loading a large integer during reload. */
7072 /* Writing a constant to memory needs a scratch, which should
7073 be handled with SECONDARY_RELOADs. */
7074 gcc_assert (REG_P (operands[0]));
7076 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7077 emit_insn (gen_movsi (operands[0], operands[1]));
7083 (define_insn "*arm_movqi_insn"
7084 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7085 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
7087 && ( register_operand (operands[0], QImode)
7088 || register_operand (operands[1], QImode))"
7099 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
7100 (set_attr "predicable" "yes")
7101 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
7102 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7103 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7107 (define_expand "movhf"
7108 [(set (match_operand:HF 0 "general_operand")
7109 (match_operand:HF 1 "general_operand"))]
7112 gcc_checking_assert (aligned_operand (operands[0], HFmode));
7113 gcc_checking_assert (aligned_operand (operands[1], HFmode));
7116 if (MEM_P (operands[0]))
7117 operands[1] = force_reg (HFmode, operands[1]);
7119 else /* TARGET_THUMB1 */
7121 if (can_create_pseudo_p ())
7123 if (!REG_P (operands[0]))
7124 operands[1] = force_reg (HFmode, operands[1]);
7130 (define_insn "*arm32_movhf"
7131 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
7132 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
7133 "TARGET_32BIT && !TARGET_HARD_FLOAT
7134 && ( s_register_operand (operands[0], HFmode)
7135 || s_register_operand (operands[1], HFmode))"
7137 switch (which_alternative)
7139 case 0: /* ARM register from memory */
7140 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
7141 case 1: /* memory from ARM register */
7142 return \"strh%?\\t%1, %0\\t%@ __fp16\";
7143 case 2: /* ARM register from ARM register */
7144 return \"mov%?\\t%0, %1\\t%@ __fp16\";
7145 case 3: /* ARM register from constant */
7150 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
7152 ops[0] = operands[0];
7153 ops[1] = GEN_INT (bits);
7154 ops[2] = GEN_INT (bits & 0xff00);
7155 ops[3] = GEN_INT (bits & 0x00ff);
7157 if (arm_arch_thumb2)
7158 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7160 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7167 [(set_attr "conds" "unconditional")
7168 (set_attr "type" "load_4,store_4,mov_reg,multiple")
7169 (set_attr "length" "4,4,4,8")
7170 (set_attr "predicable" "yes")]
7173 (define_expand "movsf"
7174 [(set (match_operand:SF 0 "general_operand")
7175 (match_operand:SF 1 "general_operand"))]
7178 gcc_checking_assert (aligned_operand (operands[0], SFmode));
7179 gcc_checking_assert (aligned_operand (operands[1], SFmode));
7182 if (MEM_P (operands[0]))
7183 operands[1] = force_reg (SFmode, operands[1]);
7185 else /* TARGET_THUMB1 */
7187 if (can_create_pseudo_p ())
7189 if (!REG_P (operands[0]))
7190 operands[1] = force_reg (SFmode, operands[1]);
7194 /* Cannot load it directly, generate a load with clobber so that it can be
7195 loaded via GPR with MOV / MOVT. */
7196 if (arm_disable_literal_pool
7197 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7198 && CONST_DOUBLE_P (operands[1])
7199 && TARGET_HARD_FLOAT
7200 && !vfp3_const_double_rtx (operands[1]))
7202 rtx clobreg = gen_reg_rtx (SFmode);
7203 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
7210 ;; Transform a floating-point move of a constant into a core register into
7211 ;; an SImode operation.
7213 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7214 (match_operand:SF 1 "immediate_operand" ""))]
7217 && CONST_DOUBLE_P (operands[1])"
7218 [(set (match_dup 2) (match_dup 3))]
7220 operands[2] = gen_lowpart (SImode, operands[0]);
7221 operands[3] = gen_lowpart (SImode, operands[1]);
7222 if (operands[2] == 0 || operands[3] == 0)
7227 (define_insn "*arm_movsf_soft_insn"
7228 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7229 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7231 && TARGET_SOFT_FLOAT
7232 && (!MEM_P (operands[0])
7233 || register_operand (operands[1], SFmode))"
7235 switch (which_alternative)
7237 case 0: return \"mov%?\\t%0, %1\";
7239 /* Cannot load it directly, split to load it via MOV / MOVT. */
7240 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7242 return \"ldr%?\\t%0, %1\\t%@ float\";
7243 case 2: return \"str%?\\t%1, %0\\t%@ float\";
7244 default: gcc_unreachable ();
7247 [(set_attr "predicable" "yes")
7248 (set_attr "type" "mov_reg,load_4,store_4")
7249 (set_attr "arm_pool_range" "*,4096,*")
7250 (set_attr "thumb2_pool_range" "*,4094,*")
7251 (set_attr "arm_neg_pool_range" "*,4084,*")
7252 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7255 ;; Splitter for the above.
7257 [(set (match_operand:SF 0 "s_register_operand")
7258 (match_operand:SF 1 "const_double_operand"))]
7259 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7263 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
7264 rtx cst = gen_int_mode (buf, SImode);
7265 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
7270 (define_expand "movdf"
7271 [(set (match_operand:DF 0 "general_operand")
7272 (match_operand:DF 1 "general_operand"))]
7275 gcc_checking_assert (aligned_operand (operands[0], DFmode));
7276 gcc_checking_assert (aligned_operand (operands[1], DFmode));
7279 if (MEM_P (operands[0]))
7280 operands[1] = force_reg (DFmode, operands[1]);
7282 else /* TARGET_THUMB */
7284 if (can_create_pseudo_p ())
7286 if (!REG_P (operands[0]))
7287 operands[1] = force_reg (DFmode, operands[1]);
7291 /* Cannot load it directly, generate a load with clobber so that it can be
7292 loaded via GPR with MOV / MOVT. */
7293 if (arm_disable_literal_pool
7294 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7295 && CONSTANT_P (operands[1])
7296 && TARGET_HARD_FLOAT
7297 && !arm_const_double_rtx (operands[1])
7298 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
7300 rtx clobreg = gen_reg_rtx (DFmode);
7301 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
7308 ;; Reloading a df mode value stored in integer regs to memory can require a
7310 ;; Another reload_out<m> pattern that requires special constraints.
7311 (define_expand "reload_outdf"
7312 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7313 (match_operand:DF 1 "s_register_operand" "r")
7314 (match_operand:SI 2 "s_register_operand" "=&r")]
7318 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7321 operands[2] = XEXP (operands[0], 0);
7322 else if (code == POST_INC || code == PRE_DEC)
7324 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7325 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7326 emit_insn (gen_movdi (operands[0], operands[1]));
7329 else if (code == PRE_INC)
7331 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7333 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7336 else if (code == POST_DEC)
7337 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7339 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7340 XEXP (XEXP (operands[0], 0), 1)));
7342 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
7345 if (code == POST_DEC)
7346 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7352 (define_insn "*movdf_soft_insn"
7353 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
7354 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
7355 "TARGET_32BIT && TARGET_SOFT_FLOAT
7356 && ( register_operand (operands[0], DFmode)
7357 || register_operand (operands[1], DFmode))"
7359 switch (which_alternative)
7366 /* Cannot load it directly, split to load it via MOV / MOVT. */
7367 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7371 return output_move_double (operands, true, NULL);
7374 [(set_attr "length" "8,12,16,8,8")
7375 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
7376 (set_attr "arm_pool_range" "*,*,*,1020,*")
7377 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7378 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7379 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7382 ;; Splitter for the above.
7384 [(set (match_operand:DF 0 "s_register_operand")
7385 (match_operand:DF 1 "const_double_operand"))]
7386 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7390 int order = BYTES_BIG_ENDIAN ? 1 : 0;
7391 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7392 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7393 ival |= (zext_hwi (buf[1 - order], 32) << 32);
7394 rtx cst = gen_int_mode (ival, DImode);
7395 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7401 ;; load- and store-multiple insns
7402 ;; The arm can load/store any set of registers, provided that they are in
7403 ;; ascending order, but these expanders assume a contiguous set.
7405 (define_expand "load_multiple"
7406 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7407 (match_operand:SI 1 "" ""))
7408 (use (match_operand:SI 2 "" ""))])]
7411 HOST_WIDE_INT offset = 0;
7413 /* Support only fixed point registers. */
7414 if (!CONST_INT_P (operands[2])
7415 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7416 || INTVAL (operands[2]) < 2
7417 || !MEM_P (operands[1])
7418 || !REG_P (operands[0])
7419 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7420 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7424 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7425 INTVAL (operands[2]),
7426 force_reg (SImode, XEXP (operands[1], 0)),
7427 FALSE, operands[1], &offset);
7430 (define_expand "store_multiple"
7431 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7432 (match_operand:SI 1 "" ""))
7433 (use (match_operand:SI 2 "" ""))])]
7436 HOST_WIDE_INT offset = 0;
7438 /* Support only fixed point registers. */
7439 if (!CONST_INT_P (operands[2])
7440 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7441 || INTVAL (operands[2]) < 2
7442 || !REG_P (operands[1])
7443 || !MEM_P (operands[0])
7444 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7445 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7449 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7450 INTVAL (operands[2]),
7451 force_reg (SImode, XEXP (operands[0], 0)),
7452 FALSE, operands[0], &offset);
7456 (define_expand "setmemsi"
7457 [(match_operand:BLK 0 "general_operand")
7458 (match_operand:SI 1 "const_int_operand")
7459 (match_operand:SI 2 "const_int_operand")
7460 (match_operand:SI 3 "const_int_operand")]
7463 if (arm_gen_setmem (operands))
7470 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7471 ;; We could let this apply for blocks of less than this, but it clobbers so
7472 ;; many registers that there is then probably a better way.
7474 (define_expand "cpymemqi"
7475 [(match_operand:BLK 0 "general_operand")
7476 (match_operand:BLK 1 "general_operand")
7477 (match_operand:SI 2 "const_int_operand")
7478 (match_operand:SI 3 "const_int_operand")]
7483 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7484 && !optimize_function_for_size_p (cfun))
7486 if (gen_cpymem_ldrd_strd (operands))
7491 if (arm_gen_cpymemqi (operands))
7495 else /* TARGET_THUMB1 */
7497 if ( INTVAL (operands[3]) != 4
7498 || INTVAL (operands[2]) > 48)
7501 thumb_expand_cpymemqi (operands);
7508 ;; Compare & branch insns
7509 ;; The range calculations are based as follows:
7510 ;; For forward branches, the address calculation returns the address of
7511 ;; the next instruction. This is 2 beyond the branch instruction.
7512 ;; For backward branches, the address calculation returns the address of
7513 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7514 ;; instruction for the shortest sequence, and 4 before the branch instruction
7515 ;; if we have to jump around an unconditional branch.
7516 ;; To the basic branch range the PC offset must be added (this is +4).
7517 ;; So for forward branches we have
7518 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7519 ;; And for backward branches we have
7520 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7522 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7523 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7525 (define_expand "cbranchsi4"
7526 [(set (pc) (if_then_else
7527 (match_operator 0 "expandable_comparison_operator"
7528 [(match_operand:SI 1 "s_register_operand")
7529 (match_operand:SI 2 "nonmemory_operand")])
7530 (label_ref (match_operand 3 "" ""))
7536 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7538 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7542 if (thumb1_cmpneg_operand (operands[2], SImode))
7544 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7545 operands[3], operands[0]));
7548 if (!thumb1_cmp_operand (operands[2], SImode))
7549 operands[2] = force_reg (SImode, operands[2]);
7552 (define_expand "cbranchsf4"
7553 [(set (pc) (if_then_else
7554 (match_operator 0 "expandable_comparison_operator"
7555 [(match_operand:SF 1 "s_register_operand")
7556 (match_operand:SF 2 "vfp_compare_operand")])
7557 (label_ref (match_operand 3 "" ""))
7559 "TARGET_32BIT && TARGET_HARD_FLOAT"
7560 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7561 operands[3])); DONE;"
7564 (define_expand "cbranchdf4"
7565 [(set (pc) (if_then_else
7566 (match_operator 0 "expandable_comparison_operator"
7567 [(match_operand:DF 1 "s_register_operand")
7568 (match_operand:DF 2 "vfp_compare_operand")])
7569 (label_ref (match_operand 3 "" ""))
7571 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7572 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7573 operands[3])); DONE;"
7576 (define_expand "cbranchdi4"
7577 [(set (pc) (if_then_else
7578 (match_operator 0 "expandable_comparison_operator"
7579 [(match_operand:DI 1 "s_register_operand")
7580 (match_operand:DI 2 "reg_or_int_operand")])
7581 (label_ref (match_operand 3 "" ""))
7585 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7587 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7593 ;; Comparison and test insns
7595 (define_insn "*arm_cmpsi_insn"
7596 [(set (reg:CC CC_REGNUM)
7597 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7598 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7606 [(set_attr "conds" "set")
7607 (set_attr "arch" "t2,t2,any,any,any")
7608 (set_attr "length" "2,2,4,4,4")
7609 (set_attr "predicable" "yes")
7610 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7611 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7614 (define_insn "*cmpsi_shiftsi"
7615 [(set (reg:CC CC_REGNUM)
7616 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7617 (match_operator:SI 3 "shift_operator"
7618 [(match_operand:SI 1 "s_register_operand" "r,r")
7619 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7622 [(set_attr "conds" "set")
7623 (set_attr "shift" "1")
7624 (set_attr "arch" "32,a")
7625 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7627 (define_insn "*cmpsi_shiftsi_swp"
7628 [(set (reg:CC_SWP CC_REGNUM)
7629 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7630 [(match_operand:SI 1 "s_register_operand" "r,r")
7631 (match_operand:SI 2 "shift_amount_operand" "M,r")])
7632 (match_operand:SI 0 "s_register_operand" "r,r")))]
7635 [(set_attr "conds" "set")
7636 (set_attr "shift" "1")
7637 (set_attr "arch" "32,a")
7638 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7640 (define_insn "*arm_cmpsi_negshiftsi_si"
7641 [(set (reg:CC_Z CC_REGNUM)
7643 (neg:SI (match_operator:SI 1 "shift_operator"
7644 [(match_operand:SI 2 "s_register_operand" "r,r")
7645 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7646 (match_operand:SI 0 "s_register_operand" "r,r")))]
7649 [(set_attr "conds" "set")
7650 (set_attr "arch" "32,a")
7651 (set_attr "shift" "2")
7652 (set_attr "type" "alus_shift_imm,alus_shift_reg")
7653 (set_attr "predicable" "yes")]
7656 ; This insn allows redundant compares to be removed by cse, nothing should
7657 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7658 ; is deleted later on. The match_dup will match the mode here, so that
7659 ; mode changes of the condition codes aren't lost by this even though we don't
7660 ; specify what they are.
7662 (define_insn "*deleted_compare"
7663 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7665 "\\t%@ deleted compare"
7666 [(set_attr "conds" "set")
7667 (set_attr "length" "0")
7668 (set_attr "type" "no_insn")]
7672 ;; Conditional branch insns
7674 (define_expand "cbranch_cc"
7676 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7677 (match_operand 2 "" "")])
7678 (label_ref (match_operand 3 "" ""))
7681 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7682 operands[1], operands[2], NULL_RTX);
7683 operands[2] = const0_rtx;"
7687 ;; Patterns to match conditional branch insns.
7690 (define_insn "arm_cond_branch"
7692 (if_then_else (match_operator 1 "arm_comparison_operator"
7693 [(match_operand 2 "cc_register" "") (const_int 0)])
7694 (label_ref (match_operand 0 "" ""))
7698 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7700 arm_ccfsm_state += 2;
7703 return \"b%d1\\t%l0\";
7705 [(set_attr "conds" "use")
7706 (set_attr "type" "branch")
7707 (set (attr "length")
7709 (and (match_test "TARGET_THUMB2")
7710 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7711 (le (minus (match_dup 0) (pc)) (const_int 256))))
7716 (define_insn "*arm_cond_branch_reversed"
7718 (if_then_else (match_operator 1 "arm_comparison_operator"
7719 [(match_operand 2 "cc_register" "") (const_int 0)])
7721 (label_ref (match_operand 0 "" ""))))]
7724 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7726 arm_ccfsm_state += 2;
7729 return \"b%D1\\t%l0\";
7731 [(set_attr "conds" "use")
7732 (set_attr "type" "branch")
7733 (set (attr "length")
7735 (and (match_test "TARGET_THUMB2")
7736 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7737 (le (minus (match_dup 0) (pc)) (const_int 256))))
7746 (define_expand "cstore_cc"
7747 [(set (match_operand:SI 0 "s_register_operand")
7748 (match_operator:SI 1 "" [(match_operand 2 "" "")
7749 (match_operand 3 "" "")]))]
7751 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7752 operands[2], operands[3], NULL_RTX);
7753 operands[3] = const0_rtx;"
7756 (define_insn_and_split "*mov_scc"
7757 [(set (match_operand:SI 0 "s_register_operand" "=r")
7758 (match_operator:SI 1 "arm_comparison_operator_mode"
7759 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7761 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7764 (if_then_else:SI (match_dup 1)
7768 [(set_attr "conds" "use")
7769 (set_attr "length" "8")
7770 (set_attr "type" "multiple")]
7773 (define_insn "*negscc_borrow"
7774 [(set (match_operand:SI 0 "s_register_operand" "=r")
7775 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
7778 [(set_attr "conds" "use")
7779 (set_attr "length" "4")
7780 (set_attr "type" "adc_reg")]
7783 (define_insn_and_split "*mov_negscc"
7784 [(set (match_operand:SI 0 "s_register_operand" "=r")
7785 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
7786 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7787 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
7788 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7791 (if_then_else:SI (match_dup 1)
7795 operands[3] = GEN_INT (~0);
7797 [(set_attr "conds" "use")
7798 (set_attr "length" "8")
7799 (set_attr "type" "multiple")]
7802 (define_insn_and_split "*mov_notscc"
7803 [(set (match_operand:SI 0 "s_register_operand" "=r")
7804 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7805 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7807 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7810 (if_then_else:SI (match_dup 1)
7814 operands[3] = GEN_INT (~1);
7815 operands[4] = GEN_INT (~0);
7817 [(set_attr "conds" "use")
7818 (set_attr "length" "8")
7819 (set_attr "type" "multiple")]
7822 (define_expand "cstoresi4"
7823 [(set (match_operand:SI 0 "s_register_operand")
7824 (match_operator:SI 1 "expandable_comparison_operator"
7825 [(match_operand:SI 2 "s_register_operand")
7826 (match_operand:SI 3 "reg_or_int_operand")]))]
7827 "TARGET_32BIT || TARGET_THUMB1"
7829 rtx op3, scratch, scratch2;
7833 if (!arm_add_operand (operands[3], SImode))
7834 operands[3] = force_reg (SImode, operands[3]);
7835 emit_insn (gen_cstore_cc (operands[0], operands[1],
7836 operands[2], operands[3]));
7840 if (operands[3] == const0_rtx)
7842 switch (GET_CODE (operands[1]))
7845 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7849 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7853 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7854 NULL_RTX, 0, OPTAB_WIDEN);
7855 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7856 NULL_RTX, 0, OPTAB_WIDEN);
7857 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7858 operands[0], 1, OPTAB_WIDEN);
7862 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7864 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7865 NULL_RTX, 1, OPTAB_WIDEN);
7869 scratch = expand_binop (SImode, ashr_optab, operands[2],
7870 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7871 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7872 NULL_RTX, 0, OPTAB_WIDEN);
7873 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7877 /* LT is handled by generic code. No need for unsigned with 0. */
7884 switch (GET_CODE (operands[1]))
7887 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7888 NULL_RTX, 0, OPTAB_WIDEN);
7889 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7893 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7894 NULL_RTX, 0, OPTAB_WIDEN);
7895 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7899 op3 = force_reg (SImode, operands[3]);
7901 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7902 NULL_RTX, 1, OPTAB_WIDEN);
7903 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7904 NULL_RTX, 0, OPTAB_WIDEN);
7905 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7911 if (!thumb1_cmp_operand (op3, SImode))
7912 op3 = force_reg (SImode, op3);
7913 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7914 NULL_RTX, 0, OPTAB_WIDEN);
7915 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7916 NULL_RTX, 1, OPTAB_WIDEN);
7917 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7922 op3 = force_reg (SImode, operands[3]);
7923 scratch = force_reg (SImode, const0_rtx);
7924 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7930 if (!thumb1_cmp_operand (op3, SImode))
7931 op3 = force_reg (SImode, op3);
7932 scratch = force_reg (SImode, const0_rtx);
7933 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7939 if (!thumb1_cmp_operand (op3, SImode))
7940 op3 = force_reg (SImode, op3);
7941 scratch = gen_reg_rtx (SImode);
7942 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7946 op3 = force_reg (SImode, operands[3]);
7947 scratch = gen_reg_rtx (SImode);
7948 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7951 /* No good sequences for GT, LT. */
7958 (define_expand "cstorehf4"
7959 [(set (match_operand:SI 0 "s_register_operand")
7960 (match_operator:SI 1 "expandable_comparison_operator"
7961 [(match_operand:HF 2 "s_register_operand")
7962 (match_operand:HF 3 "vfp_compare_operand")]))]
7963 "TARGET_VFP_FP16INST"
7965 if (!arm_validize_comparison (&operands[1],
7970 emit_insn (gen_cstore_cc (operands[0], operands[1],
7971 operands[2], operands[3]));
7976 (define_expand "cstoresf4"
7977 [(set (match_operand:SI 0 "s_register_operand")
7978 (match_operator:SI 1 "expandable_comparison_operator"
7979 [(match_operand:SF 2 "s_register_operand")
7980 (match_operand:SF 3 "vfp_compare_operand")]))]
7981 "TARGET_32BIT && TARGET_HARD_FLOAT"
7982 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7983 operands[2], operands[3])); DONE;"
7986 (define_expand "cstoredf4"
7987 [(set (match_operand:SI 0 "s_register_operand")
7988 (match_operator:SI 1 "expandable_comparison_operator"
7989 [(match_operand:DF 2 "s_register_operand")
7990 (match_operand:DF 3 "vfp_compare_operand")]))]
7991 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7992 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7993 operands[2], operands[3])); DONE;"
7996 (define_expand "cstoredi4"
7997 [(set (match_operand:SI 0 "s_register_operand")
7998 (match_operator:SI 1 "expandable_comparison_operator"
7999 [(match_operand:DI 2 "s_register_operand")
8000 (match_operand:DI 3 "reg_or_int_operand")]))]
8003 if (!arm_validize_comparison (&operands[1],
8007 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8014 ;; Conditional move insns
8016 (define_expand "movsicc"
8017 [(set (match_operand:SI 0 "s_register_operand")
8018 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
8019 (match_operand:SI 2 "arm_not_operand")
8020 (match_operand:SI 3 "arm_not_operand")))]
8027 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8028 &XEXP (operands[1], 1)))
8031 code = GET_CODE (operands[1]);
8032 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8033 XEXP (operands[1], 1), NULL_RTX);
8034 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8038 (define_expand "movhfcc"
8039 [(set (match_operand:HF 0 "s_register_operand")
8040 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
8041 (match_operand:HF 2 "s_register_operand")
8042 (match_operand:HF 3 "s_register_operand")))]
8043 "TARGET_VFP_FP16INST"
8046 enum rtx_code code = GET_CODE (operands[1]);
8049 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8050 &XEXP (operands[1], 1)))
8053 code = GET_CODE (operands[1]);
8054 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8055 XEXP (operands[1], 1), NULL_RTX);
8056 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8060 (define_expand "movsfcc"
8061 [(set (match_operand:SF 0 "s_register_operand")
8062 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
8063 (match_operand:SF 2 "s_register_operand")
8064 (match_operand:SF 3 "s_register_operand")))]
8065 "TARGET_32BIT && TARGET_HARD_FLOAT"
8068 enum rtx_code code = GET_CODE (operands[1]);
8071 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8072 &XEXP (operands[1], 1)))
8075 code = GET_CODE (operands[1]);
8076 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8077 XEXP (operands[1], 1), NULL_RTX);
8078 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8082 (define_expand "movdfcc"
8083 [(set (match_operand:DF 0 "s_register_operand")
8084 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
8085 (match_operand:DF 2 "s_register_operand")
8086 (match_operand:DF 3 "s_register_operand")))]
8087 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8090 enum rtx_code code = GET_CODE (operands[1]);
8093 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8094 &XEXP (operands[1], 1)))
8096 code = GET_CODE (operands[1]);
8097 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8098 XEXP (operands[1], 1), NULL_RTX);
8099 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8103 (define_insn "*cmov<mode>"
8104 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8105 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8106 [(match_operand 2 "cc_register" "") (const_int 0)])
8107 (match_operand:SDF 3 "s_register_operand"
8109 (match_operand:SDF 4 "s_register_operand"
8110 "<F_constraint>")))]
8111 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
8114 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8121 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8126 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8132 [(set_attr "conds" "use")
8133 (set_attr "type" "fcsel")]
8136 (define_insn "*cmovhf"
8137 [(set (match_operand:HF 0 "s_register_operand" "=t")
8138 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
8139 [(match_operand 2 "cc_register" "") (const_int 0)])
8140 (match_operand:HF 3 "s_register_operand" "t")
8141 (match_operand:HF 4 "s_register_operand" "t")))]
8142 "TARGET_VFP_FP16INST"
8145 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8152 return \"vsel%d1.f16\\t%0, %3, %4\";
8157 return \"vsel%D1.f16\\t%0, %4, %3\";
8163 [(set_attr "conds" "use")
8164 (set_attr "type" "fcsel")]
8167 (define_insn_and_split "*movsicc_insn"
8168 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8170 (match_operator 3 "arm_comparison_operator"
8171 [(match_operand 4 "cc_register" "") (const_int 0)])
8172 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8173 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8184 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8185 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8186 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8187 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8188 "&& reload_completed"
8191 enum rtx_code rev_code;
8195 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8197 gen_rtx_SET (operands[0], operands[1])));
8199 rev_code = GET_CODE (operands[3]);
8200 mode = GET_MODE (operands[4]);
8201 if (mode == CCFPmode || mode == CCFPEmode)
8202 rev_code = reverse_condition_maybe_unordered (rev_code);
8204 rev_code = reverse_condition (rev_code);
8206 rev_cond = gen_rtx_fmt_ee (rev_code,
8210 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8212 gen_rtx_SET (operands[0], operands[2])));
8215 [(set_attr "length" "4,4,4,4,8,8,8,8")
8216 (set_attr "conds" "use")
8217 (set_attr_alternative "type"
8218 [(if_then_else (match_operand 2 "const_int_operand" "")
8219 (const_string "mov_imm")
8220 (const_string "mov_reg"))
8221 (const_string "mvn_imm")
8222 (if_then_else (match_operand 1 "const_int_operand" "")
8223 (const_string "mov_imm")
8224 (const_string "mov_reg"))
8225 (const_string "mvn_imm")
8226 (const_string "multiple")
8227 (const_string "multiple")
8228 (const_string "multiple")
8229 (const_string "multiple")])]
8232 (define_insn "*movsfcc_soft_insn"
8233 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8234 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8235 [(match_operand 4 "cc_register" "") (const_int 0)])
8236 (match_operand:SF 1 "s_register_operand" "0,r")
8237 (match_operand:SF 2 "s_register_operand" "r,0")))]
8238 "TARGET_ARM && TARGET_SOFT_FLOAT"
8242 [(set_attr "conds" "use")
8243 (set_attr "type" "mov_reg")]
8247 ;; Jump and linkage insns
8249 (define_expand "jump"
8251 (label_ref (match_operand 0 "" "")))]
8256 (define_insn "*arm_jump"
8258 (label_ref (match_operand 0 "" "")))]
8262 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8264 arm_ccfsm_state += 2;
8267 return \"b%?\\t%l0\";
8270 [(set_attr "predicable" "yes")
8271 (set (attr "length")
8273 (and (match_test "TARGET_THUMB2")
8274 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8275 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8278 (set_attr "type" "branch")]
8281 (define_expand "call"
8282 [(parallel [(call (match_operand 0 "memory_operand")
8283 (match_operand 1 "general_operand"))
8284 (use (match_operand 2 "" ""))
8285 (clobber (reg:SI LR_REGNUM))])]
8290 tree addr = MEM_EXPR (operands[0]);
8292 /* In an untyped call, we can get NULL for operand 2. */
8293 if (operands[2] == NULL_RTX)
8294 operands[2] = const0_rtx;
8296 /* Decide if we should generate indirect calls by loading the
8297 32-bit address of the callee into a register before performing the
8299 callee = XEXP (operands[0], 0);
8300 if (GET_CODE (callee) == SYMBOL_REF
8301 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8303 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8305 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
8306 /* Indirect call: set r9 with FDPIC value of callee. */
8307 XEXP (operands[0], 0)
8308 = arm_load_function_descriptor (XEXP (operands[0], 0));
8310 if (detect_cmse_nonsecure_call (addr))
8312 pat = gen_nonsecure_call_internal (operands[0], operands[1],
8314 emit_call_insn (pat);
8318 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8319 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
8322 /* Restore FDPIC register (r9) after call. */
8325 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8326 rtx initial_fdpic_reg
8327 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8329 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8330 initial_fdpic_reg));
8337 (define_insn "restore_pic_register_after_call"
8338 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
8339 (unspec:SI [(match_dup 0)
8340 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
8341 UNSPEC_PIC_RESTORE))]
8348 (define_expand "call_internal"
8349 [(parallel [(call (match_operand 0 "memory_operand")
8350 (match_operand 1 "general_operand"))
8351 (use (match_operand 2 "" ""))
8352 (clobber (reg:SI LR_REGNUM))])])
8354 (define_expand "nonsecure_call_internal"
8355 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
8356 UNSPEC_NONSECURE_MEM)
8357 (match_operand 1 "general_operand"))
8358 (use (match_operand 2 "" ""))
8359 (clobber (reg:SI LR_REGNUM))])]
8364 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
8365 gen_rtx_REG (SImode, R4_REGNUM),
8368 operands[0] = replace_equiv_address (operands[0], tmp);
8371 (define_insn "*call_reg_armv5"
8372 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8373 (match_operand 1 "" ""))
8374 (use (match_operand 2 "" ""))
8375 (clobber (reg:SI LR_REGNUM))]
8376 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8378 [(set_attr "type" "call")]
8381 (define_insn "*call_reg_arm"
8382 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8383 (match_operand 1 "" ""))
8384 (use (match_operand 2 "" ""))
8385 (clobber (reg:SI LR_REGNUM))]
8386 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8388 return output_call (operands);
8390 ;; length is worst case, normally it is only two
8391 [(set_attr "length" "12")
8392 (set_attr "type" "call")]
8396 (define_expand "call_value"
8397 [(parallel [(set (match_operand 0 "" "")
8398 (call (match_operand 1 "memory_operand")
8399 (match_operand 2 "general_operand")))
8400 (use (match_operand 3 "" ""))
8401 (clobber (reg:SI LR_REGNUM))])]
8406 tree addr = MEM_EXPR (operands[1]);
8408 /* In an untyped call, we can get NULL for operand 2. */
8409 if (operands[3] == 0)
8410 operands[3] = const0_rtx;
8412 /* Decide if we should generate indirect calls by loading the
8413 32-bit address of the callee into a register before performing the
8415 callee = XEXP (operands[1], 0);
8416 if (GET_CODE (callee) == SYMBOL_REF
8417 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8419 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8421 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8422 /* Indirect call: set r9 with FDPIC value of callee. */
8423 XEXP (operands[1], 0)
8424 = arm_load_function_descriptor (XEXP (operands[1], 0));
8426 if (detect_cmse_nonsecure_call (addr))
8428 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8429 operands[2], operands[3]);
8430 emit_call_insn (pat);
8434 pat = gen_call_value_internal (operands[0], operands[1],
8435 operands[2], operands[3]);
8436 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8439 /* Restore FDPIC register (r9) after call. */
8442 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8443 rtx initial_fdpic_reg
8444 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8446 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8447 initial_fdpic_reg));
8454 (define_expand "call_value_internal"
8455 [(parallel [(set (match_operand 0 "" "")
8456 (call (match_operand 1 "memory_operand")
8457 (match_operand 2 "general_operand")))
8458 (use (match_operand 3 "" ""))
8459 (clobber (reg:SI LR_REGNUM))])])
8461 (define_expand "nonsecure_call_value_internal"
8462 [(parallel [(set (match_operand 0 "" "")
8463 (call (unspec:SI [(match_operand 1 "memory_operand")]
8464 UNSPEC_NONSECURE_MEM)
8465 (match_operand 2 "general_operand")))
8466 (use (match_operand 3 "" ""))
8467 (clobber (reg:SI LR_REGNUM))])]
8472 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
8473 gen_rtx_REG (SImode, R4_REGNUM),
8476 operands[1] = replace_equiv_address (operands[1], tmp);
8479 (define_insn "*call_value_reg_armv5"
8480 [(set (match_operand 0 "" "")
8481 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8482 (match_operand 2 "" "")))
8483 (use (match_operand 3 "" ""))
8484 (clobber (reg:SI LR_REGNUM))]
8485 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8487 [(set_attr "type" "call")]
8490 (define_insn "*call_value_reg_arm"
8491 [(set (match_operand 0 "" "")
8492 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8493 (match_operand 2 "" "")))
8494 (use (match_operand 3 "" ""))
8495 (clobber (reg:SI LR_REGNUM))]
8496 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8498 return output_call (&operands[1]);
8500 [(set_attr "length" "12")
8501 (set_attr "type" "call")]
8504 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8505 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8507 (define_insn "*call_symbol"
8508 [(call (mem:SI (match_operand:SI 0 "" ""))
8509 (match_operand 1 "" ""))
8510 (use (match_operand 2 "" ""))
8511 (clobber (reg:SI LR_REGNUM))]
8513 && !SIBLING_CALL_P (insn)
8514 && (GET_CODE (operands[0]) == SYMBOL_REF)
8515 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8518 rtx op = operands[0];
8520 /* Switch mode now when possible. */
8521 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8522 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8523 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8525 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8527 [(set_attr "type" "call")]
8530 (define_insn "*call_value_symbol"
8531 [(set (match_operand 0 "" "")
8532 (call (mem:SI (match_operand:SI 1 "" ""))
8533 (match_operand:SI 2 "" "")))
8534 (use (match_operand 3 "" ""))
8535 (clobber (reg:SI LR_REGNUM))]
8537 && !SIBLING_CALL_P (insn)
8538 && (GET_CODE (operands[1]) == SYMBOL_REF)
8539 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8542 rtx op = operands[1];
8544 /* Switch mode now when possible. */
8545 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8546 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8547 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8549 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8551 [(set_attr "type" "call")]
8554 (define_expand "sibcall_internal"
8555 [(parallel [(call (match_operand 0 "memory_operand")
8556 (match_operand 1 "general_operand"))
8558 (use (match_operand 2 "" ""))])])
8560 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8561 (define_expand "sibcall"
8562 [(parallel [(call (match_operand 0 "memory_operand")
8563 (match_operand 1 "general_operand"))
8565 (use (match_operand 2 "" ""))])]
8571 if ((!REG_P (XEXP (operands[0], 0))
8572 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8573 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8574 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8575 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8577 if (operands[2] == NULL_RTX)
8578 operands[2] = const0_rtx;
8580 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8581 arm_emit_call_insn (pat, operands[0], true);
8586 (define_expand "sibcall_value_internal"
8587 [(parallel [(set (match_operand 0 "" "")
8588 (call (match_operand 1 "memory_operand")
8589 (match_operand 2 "general_operand")))
8591 (use (match_operand 3 "" ""))])])
8593 (define_expand "sibcall_value"
8594 [(parallel [(set (match_operand 0 "" "")
8595 (call (match_operand 1 "memory_operand")
8596 (match_operand 2 "general_operand")))
8598 (use (match_operand 3 "" ""))])]
8604 if ((!REG_P (XEXP (operands[1], 0))
8605 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8606 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8607 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8608 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8610 if (operands[3] == NULL_RTX)
8611 operands[3] = const0_rtx;
8613 pat = gen_sibcall_value_internal (operands[0], operands[1],
8614 operands[2], operands[3]);
8615 arm_emit_call_insn (pat, operands[1], true);
8620 (define_insn "*sibcall_insn"
8621 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8622 (match_operand 1 "" ""))
8624 (use (match_operand 2 "" ""))]
8625 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8627 if (which_alternative == 1)
8628 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8631 if (arm_arch5t || arm_arch4t)
8632 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8634 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8637 [(set_attr "type" "call")]
8640 (define_insn "*sibcall_value_insn"
8641 [(set (match_operand 0 "" "")
8642 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8643 (match_operand 2 "" "")))
8645 (use (match_operand 3 "" ""))]
8646 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8648 if (which_alternative == 1)
8649 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8652 if (arm_arch5t || arm_arch4t)
8653 return \"bx%?\\t%1\";
8655 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8658 [(set_attr "type" "call")]
8661 (define_expand "<return_str>return"
8663 "(TARGET_ARM || (TARGET_THUMB2
8664 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8665 && !IS_STACKALIGN (arm_current_func_type ())))
8666 <return_cond_false>"
8671 thumb2_expand_return (<return_simple_p>);
8678 ;; Often the return insn will be the same as loading from memory, so set attr
8679 (define_insn "*arm_return"
8681 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8684 if (arm_ccfsm_state == 2)
8686 arm_ccfsm_state += 2;
8689 return output_return_instruction (const_true_rtx, true, false, false);
8691 [(set_attr "type" "load_4")
8692 (set_attr "length" "12")
8693 (set_attr "predicable" "yes")]
8696 (define_insn "*cond_<return_str>return"
8698 (if_then_else (match_operator 0 "arm_comparison_operator"
8699 [(match_operand 1 "cc_register" "") (const_int 0)])
8702 "TARGET_ARM <return_cond_true>"
8705 if (arm_ccfsm_state == 2)
8707 arm_ccfsm_state += 2;
8710 return output_return_instruction (operands[0], true, false,
8713 [(set_attr "conds" "use")
8714 (set_attr "length" "12")
8715 (set_attr "type" "load_4")]
8718 (define_insn "*cond_<return_str>return_inverted"
8720 (if_then_else (match_operator 0 "arm_comparison_operator"
8721 [(match_operand 1 "cc_register" "") (const_int 0)])
8724 "TARGET_ARM <return_cond_true>"
8727 if (arm_ccfsm_state == 2)
8729 arm_ccfsm_state += 2;
8732 return output_return_instruction (operands[0], true, true,
8735 [(set_attr "conds" "use")
8736 (set_attr "length" "12")
8737 (set_attr "type" "load_4")]
8740 (define_insn "*arm_simple_return"
8745 if (arm_ccfsm_state == 2)
8747 arm_ccfsm_state += 2;
8750 return output_return_instruction (const_true_rtx, true, false, true);
8752 [(set_attr "type" "branch")
8753 (set_attr "length" "4")
8754 (set_attr "predicable" "yes")]
8757 ;; Generate a sequence of instructions to determine if the processor is
8758 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8761 (define_expand "return_addr_mask"
8763 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8765 (set (match_operand:SI 0 "s_register_operand")
8766 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8768 (const_int 67108860)))] ; 0x03fffffc
8771 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8774 (define_insn "*check_arch2"
8775 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8776 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8779 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8780 [(set_attr "length" "8")
8781 (set_attr "conds" "set")
8782 (set_attr "type" "multiple")]
8785 ;; Call subroutine returning any type.
8787 (define_expand "untyped_call"
8788 [(parallel [(call (match_operand 0 "" "")
8790 (match_operand 1 "" "")
8791 (match_operand 2 "" "")])]
8792 "TARGET_EITHER && !TARGET_FDPIC"
8796 rtx par = gen_rtx_PARALLEL (VOIDmode,
8797 rtvec_alloc (XVECLEN (operands[2], 0)));
8798 rtx addr = gen_reg_rtx (Pmode);
8802 emit_move_insn (addr, XEXP (operands[1], 0));
8803 mem = change_address (operands[1], BLKmode, addr);
8805 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8807 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8809 /* Default code only uses r0 as a return value, but we could
8810 be using anything up to 4 registers. */
8811 if (REGNO (src) == R0_REGNUM)
8812 src = gen_rtx_REG (TImode, R0_REGNUM);
8814 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8816 size += GET_MODE_SIZE (GET_MODE (src));
8819 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
8823 for (i = 0; i < XVECLEN (par, 0); i++)
8825 HOST_WIDE_INT offset = 0;
8826 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8829 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8831 mem = change_address (mem, GET_MODE (reg), NULL);
8832 if (REGNO (reg) == R0_REGNUM)
8834 /* On thumb we have to use a write-back instruction. */
8835 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8836 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8837 size = TARGET_ARM ? 16 : 0;
8841 emit_move_insn (mem, reg);
8842 size = GET_MODE_SIZE (GET_MODE (reg));
8846 /* The optimizer does not know that the call sets the function value
8847 registers we stored in the result block. We avoid problems by
8848 claiming that all hard registers are used and clobbered at this
8850 emit_insn (gen_blockage ());
8856 (define_expand "untyped_return"
8857 [(match_operand:BLK 0 "memory_operand")
8858 (match_operand 1 "" "")]
8859 "TARGET_EITHER && !TARGET_FDPIC"
8863 rtx addr = gen_reg_rtx (Pmode);
8867 emit_move_insn (addr, XEXP (operands[0], 0));
8868 mem = change_address (operands[0], BLKmode, addr);
8870 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8872 HOST_WIDE_INT offset = 0;
8873 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8876 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8878 mem = change_address (mem, GET_MODE (reg), NULL);
8879 if (REGNO (reg) == R0_REGNUM)
8881 /* On thumb we have to use a write-back instruction. */
8882 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8883 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8884 size = TARGET_ARM ? 16 : 0;
8888 emit_move_insn (reg, mem);
8889 size = GET_MODE_SIZE (GET_MODE (reg));
8893 /* Emit USE insns before the return. */
8894 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8895 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8897 /* Construct the return. */
8898 expand_naked_return ();
8904 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8905 ;; all of memory. This blocks insns from being moved across this point.
8907 (define_insn "blockage"
8908 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8911 [(set_attr "length" "0")
8912 (set_attr "type" "block")]
8915 ;; Since we hard code r0 here use the 'o' constraint to prevent
8916 ;; provoking undefined behaviour in the hardware with putting out
8917 ;; auto-increment operations with potentially r0 as the base register.
8918 (define_insn "probe_stack"
8919 [(set (match_operand:SI 0 "memory_operand" "=o")
8920 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
8923 [(set_attr "type" "store_4")
8924 (set_attr "predicable" "yes")]
8927 (define_insn "probe_stack_range"
8928 [(set (match_operand:SI 0 "register_operand" "=r")
8929 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
8930 (match_operand:SI 2 "register_operand" "r")]
8931 VUNSPEC_PROBE_STACK_RANGE))]
8934 return output_probe_stack_range (operands[0], operands[2]);
8936 [(set_attr "type" "multiple")
8937 (set_attr "conds" "clob")]
8940 ;; Named patterns for stack smashing protection.
8941 (define_expand "stack_protect_combined_set"
8943 [(set (match_operand:SI 0 "memory_operand")
8944 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8946 (clobber (match_scratch:SI 2 ""))
8947 (clobber (match_scratch:SI 3 ""))])]
8952 ;; Use a separate insn from the above expand to be able to have the mem outside
8953 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8954 ;; try to reload the guard since we need to control how PIC access is done in
8955 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8956 ;; legitimize_pic_address ()).
8957 (define_insn_and_split "*stack_protect_combined_set_insn"
8958 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8959 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8961 (clobber (match_scratch:SI 2 "=&l,&r"))
8962 (clobber (match_scratch:SI 3 "=&l,&r"))]
8966 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
8968 (clobber (match_dup 2))])]
8976 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8978 pic_reg = operands[3];
8980 /* Forces recomputing of GOT base now. */
8981 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
8982 true /*compute_now*/);
8986 if (address_operand (operands[1], SImode))
8987 operands[2] = operands[1];
8990 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8991 emit_move_insn (operands[2], mem);
8995 [(set_attr "arch" "t1,32")]
8998 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
8999 ;; canary value does not live beyond the life of this sequence.
9000 (define_insn "*stack_protect_set_insn"
9001 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9002 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
9004 (clobber (match_dup 1))]
9007 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
9008 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
9009 [(set_attr "length" "8,12")
9010 (set_attr "conds" "clob,nocond")
9011 (set_attr "type" "multiple")
9012 (set_attr "arch" "t1,32")]
9015 (define_expand "stack_protect_combined_test"
9019 (eq (match_operand:SI 0 "memory_operand")
9020 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9022 (label_ref (match_operand 2))
9024 (clobber (match_scratch:SI 3 ""))
9025 (clobber (match_scratch:SI 4 ""))
9026 (clobber (reg:CC CC_REGNUM))])]
9031 ;; Use a separate insn from the above expand to be able to have the mem outside
9032 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9033 ;; try to reload the guard since we need to control how PIC access is done in
9034 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9035 ;; legitimize_pic_address ()).
9036 (define_insn_and_split "*stack_protect_combined_test_insn"
9039 (eq (match_operand:SI 0 "memory_operand" "m,m")
9040 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9042 (label_ref (match_operand 2))
9044 (clobber (match_scratch:SI 3 "=&l,&r"))
9045 (clobber (match_scratch:SI 4 "=&l,&r"))
9046 (clobber (reg:CC CC_REGNUM))]
9059 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9061 pic_reg = operands[4];
9063 /* Forces recomputing of GOT base now. */
9064 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
9065 true /*compute_now*/);
9069 if (address_operand (operands[1], SImode))
9070 operands[3] = operands[1];
9073 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
9074 emit_move_insn (operands[3], mem);
9079 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
9081 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9082 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9083 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9087 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
9089 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
9090 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
9095 [(set_attr "arch" "t1,32")]
9098 (define_insn "arm_stack_protect_test_insn"
9099 [(set (reg:CC_Z CC_REGNUM)
9100 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
9101 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
9104 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
9105 (clobber (match_dup 2))]
9107 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
9108 [(set_attr "length" "8,12")
9109 (set_attr "conds" "set")
9110 (set_attr "type" "multiple")
9111 (set_attr "arch" "t,32")]
9114 (define_expand "casesi"
9115 [(match_operand:SI 0 "s_register_operand") ; index to jump on
9116 (match_operand:SI 1 "const_int_operand") ; lower bound
9117 (match_operand:SI 2 "const_int_operand") ; total range
9118 (match_operand:SI 3 "" "") ; table label
9119 (match_operand:SI 4 "" "")] ; Out of range label
9120 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
9123 enum insn_code code;
9124 if (operands[1] != const0_rtx)
9126 rtx reg = gen_reg_rtx (SImode);
9128 emit_insn (gen_addsi3 (reg, operands[0],
9129 gen_int_mode (-INTVAL (operands[1]),
9135 code = CODE_FOR_arm_casesi_internal;
9136 else if (TARGET_THUMB1)
9137 code = CODE_FOR_thumb1_casesi_internal_pic;
9139 code = CODE_FOR_thumb2_casesi_internal_pic;
9141 code = CODE_FOR_thumb2_casesi_internal;
9143 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9144 operands[2] = force_reg (SImode, operands[2]);
9146 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9147 operands[3], operands[4]));
9152 ;; The USE in this pattern is needed to tell flow analysis that this is
9153 ;; a CASESI insn. It has no other purpose.
9154 (define_expand "arm_casesi_internal"
9155 [(parallel [(set (pc)
9157 (leu (match_operand:SI 0 "s_register_operand")
9158 (match_operand:SI 1 "arm_rhs_operand"))
9160 (label_ref:SI (match_operand 3 ""))))
9161 (clobber (reg:CC CC_REGNUM))
9162 (use (label_ref:SI (match_operand 2 "")))])]
9165 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
9166 operands[4] = gen_rtx_PLUS (SImode, operands[4],
9167 gen_rtx_LABEL_REF (SImode, operands[2]));
9168 operands[4] = gen_rtx_MEM (SImode, operands[4]);
9169 MEM_READONLY_P (operands[4]) = 1;
9170 MEM_NOTRAP_P (operands[4]) = 1;
9173 (define_insn "*arm_casesi_internal"
9174 [(parallel [(set (pc)
9176 (leu (match_operand:SI 0 "s_register_operand" "r")
9177 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9178 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9179 (label_ref:SI (match_operand 2 "" ""))))
9180 (label_ref:SI (match_operand 3 "" ""))))
9181 (clobber (reg:CC CC_REGNUM))
9182 (use (label_ref:SI (match_dup 2)))])]
9186 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9187 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9189 [(set_attr "conds" "clob")
9190 (set_attr "length" "12")
9191 (set_attr "type" "multiple")]
9194 (define_expand "indirect_jump"
9196 (match_operand:SI 0 "s_register_operand"))]
9199 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9200 address and use bx. */
9204 tmp = gen_reg_rtx (SImode);
9205 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9211 ;; NB Never uses BX.
9212 (define_insn "*arm_indirect_jump"
9214 (match_operand:SI 0 "s_register_operand" "r"))]
9216 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9217 [(set_attr "predicable" "yes")
9218 (set_attr "type" "branch")]
9221 (define_insn "*load_indirect_jump"
9223 (match_operand:SI 0 "memory_operand" "m"))]
9225 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9226 [(set_attr "type" "load_4")
9227 (set_attr "pool_range" "4096")
9228 (set_attr "neg_pool_range" "4084")
9229 (set_attr "predicable" "yes")]
9239 [(set (attr "length")
9240 (if_then_else (eq_attr "is_thumb" "yes")
9243 (set_attr "type" "mov_reg")]
9247 [(trap_if (const_int 1) (const_int 0))]
9251 return \".inst\\t0xe7f000f0\";
9253 return \".inst\\t0xdeff\";
9255 [(set (attr "length")
9256 (if_then_else (eq_attr "is_thumb" "yes")
9259 (set_attr "type" "trap")
9260 (set_attr "conds" "unconditional")]
9264 ;; Patterns to allow combination of arithmetic, cond code and shifts
9266 (define_insn "*<arith_shift_insn>_multsi"
9267 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9269 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
9270 (match_operand:SI 3 "power_of_two_operand" ""))
9271 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
9273 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
9274 [(set_attr "predicable" "yes")
9275 (set_attr "shift" "2")
9276 (set_attr "arch" "a,t2")
9277 (set_attr "type" "alu_shift_imm")])
9279 (define_insn "*<arith_shift_insn>_shiftsi"
9280 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9282 (match_operator:SI 2 "shift_nomul_operator"
9283 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9284 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
9285 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
9286 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
9287 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
9288 [(set_attr "predicable" "yes")
9289 (set_attr "shift" "3")
9290 (set_attr "arch" "a,t2,a")
9291 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
9294 [(set (match_operand:SI 0 "s_register_operand" "")
9295 (match_operator:SI 1 "shiftable_operator"
9296 [(match_operator:SI 2 "shiftable_operator"
9297 [(match_operator:SI 3 "shift_operator"
9298 [(match_operand:SI 4 "s_register_operand" "")
9299 (match_operand:SI 5 "reg_or_int_operand" "")])
9300 (match_operand:SI 6 "s_register_operand" "")])
9301 (match_operand:SI 7 "arm_rhs_operand" "")]))
9302 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9305 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9308 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9311 (define_insn "*arith_shiftsi_compare0"
9312 [(set (reg:CC_NOOV CC_REGNUM)
9314 (match_operator:SI 1 "shiftable_operator"
9315 [(match_operator:SI 3 "shift_operator"
9316 [(match_operand:SI 4 "s_register_operand" "r,r")
9317 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9318 (match_operand:SI 2 "s_register_operand" "r,r")])
9320 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9321 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9324 "%i1s%?\\t%0, %2, %4%S3"
9325 [(set_attr "conds" "set")
9326 (set_attr "shift" "4")
9327 (set_attr "arch" "32,a")
9328 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9330 (define_insn "*arith_shiftsi_compare0_scratch"
9331 [(set (reg:CC_NOOV CC_REGNUM)
9333 (match_operator:SI 1 "shiftable_operator"
9334 [(match_operator:SI 3 "shift_operator"
9335 [(match_operand:SI 4 "s_register_operand" "r,r")
9336 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9337 (match_operand:SI 2 "s_register_operand" "r,r")])
9339 (clobber (match_scratch:SI 0 "=r,r"))]
9341 "%i1s%?\\t%0, %2, %4%S3"
9342 [(set_attr "conds" "set")
9343 (set_attr "shift" "4")
9344 (set_attr "arch" "32,a")
9345 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9347 (define_insn "*sub_shiftsi"
9348 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9349 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9350 (match_operator:SI 2 "shift_operator"
9351 [(match_operand:SI 3 "s_register_operand" "r,r")
9352 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9354 "sub%?\\t%0, %1, %3%S2"
9355 [(set_attr "predicable" "yes")
9356 (set_attr "predicable_short_it" "no")
9357 (set_attr "shift" "3")
9358 (set_attr "arch" "32,a")
9359 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9361 (define_insn "*sub_shiftsi_compare0"
9362 [(set (reg:CC_NOOV CC_REGNUM)
9364 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9365 (match_operator:SI 2 "shift_operator"
9366 [(match_operand:SI 3 "s_register_operand" "r,r")
9367 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9369 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9370 (minus:SI (match_dup 1)
9371 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9373 "subs%?\\t%0, %1, %3%S2"
9374 [(set_attr "conds" "set")
9375 (set_attr "shift" "3")
9376 (set_attr "arch" "32,a")
9377 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9379 (define_insn "*sub_shiftsi_compare0_scratch"
9380 [(set (reg:CC_NOOV CC_REGNUM)
9382 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9383 (match_operator:SI 2 "shift_operator"
9384 [(match_operand:SI 3 "s_register_operand" "r,r")
9385 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9387 (clobber (match_scratch:SI 0 "=r,r"))]
9389 "subs%?\\t%0, %1, %3%S2"
9390 [(set_attr "conds" "set")
9391 (set_attr "shift" "3")
9392 (set_attr "arch" "32,a")
9393 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9396 (define_insn_and_split "*and_scc"
9397 [(set (match_operand:SI 0 "s_register_operand" "=r")
9398 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9399 [(match_operand 2 "cc_register" "") (const_int 0)])
9400 (match_operand:SI 3 "s_register_operand" "r")))]
9402 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9403 "&& reload_completed"
9404 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9405 (cond_exec (match_dup 4) (set (match_dup 0)
9406 (and:SI (match_dup 3) (const_int 1))))]
9408 machine_mode mode = GET_MODE (operands[2]);
9409 enum rtx_code rc = GET_CODE (operands[1]);
9411 /* Note that operands[4] is the same as operands[1],
9412 but with VOIDmode as the result. */
9413 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9414 if (mode == CCFPmode || mode == CCFPEmode)
9415 rc = reverse_condition_maybe_unordered (rc);
9417 rc = reverse_condition (rc);
9418 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9420 [(set_attr "conds" "use")
9421 (set_attr "type" "multiple")
9422 (set_attr "length" "8")]
9425 (define_insn_and_split "*ior_scc"
9426 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9427 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9428 [(match_operand 2 "cc_register" "") (const_int 0)])
9429 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9434 "&& reload_completed
9435 && REGNO (operands [0]) != REGNO (operands[3])"
9436 ;; && which_alternative == 1
9437 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9438 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9439 (cond_exec (match_dup 4) (set (match_dup 0)
9440 (ior:SI (match_dup 3) (const_int 1))))]
9442 machine_mode mode = GET_MODE (operands[2]);
9443 enum rtx_code rc = GET_CODE (operands[1]);
9445 /* Note that operands[4] is the same as operands[1],
9446 but with VOIDmode as the result. */
9447 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9448 if (mode == CCFPmode || mode == CCFPEmode)
9449 rc = reverse_condition_maybe_unordered (rc);
9451 rc = reverse_condition (rc);
9452 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9454 [(set_attr "conds" "use")
9455 (set_attr "length" "4,8")
9456 (set_attr "type" "logic_imm,multiple")]
9459 ; A series of splitters for the compare_scc pattern below. Note that
9460 ; order is important.
9462 [(set (match_operand:SI 0 "s_register_operand" "")
9463 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9465 (clobber (reg:CC CC_REGNUM))]
9466 "TARGET_32BIT && reload_completed"
9467 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9470 [(set (match_operand:SI 0 "s_register_operand" "")
9471 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9473 (clobber (reg:CC CC_REGNUM))]
9474 "TARGET_32BIT && reload_completed"
9475 [(set (match_dup 0) (not:SI (match_dup 1)))
9476 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9479 [(set (match_operand:SI 0 "s_register_operand" "")
9480 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9482 (clobber (reg:CC CC_REGNUM))]
9483 "arm_arch5t && TARGET_32BIT"
9484 [(set (match_dup 0) (clz:SI (match_dup 1)))
9485 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9489 [(set (match_operand:SI 0 "s_register_operand" "")
9490 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9492 (clobber (reg:CC CC_REGNUM))]
9493 "TARGET_32BIT && reload_completed"
9495 [(set (reg:CC CC_REGNUM)
9496 (compare:CC (const_int 1) (match_dup 1)))
9498 (minus:SI (const_int 1) (match_dup 1)))])
9499 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9500 (set (match_dup 0) (const_int 0)))])
9503 [(set (match_operand:SI 0 "s_register_operand" "")
9504 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9505 (match_operand:SI 2 "const_int_operand" "")))
9506 (clobber (reg:CC CC_REGNUM))]
9507 "TARGET_32BIT && reload_completed"
9509 [(set (reg:CC CC_REGNUM)
9510 (compare:CC (match_dup 1) (match_dup 2)))
9511 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9512 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9513 (set (match_dup 0) (const_int 1)))]
9515 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9519 [(set (match_operand:SI 0 "s_register_operand" "")
9520 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9521 (match_operand:SI 2 "arm_add_operand" "")))
9522 (clobber (reg:CC CC_REGNUM))]
9523 "TARGET_32BIT && reload_completed"
9525 [(set (reg:CC_NOOV CC_REGNUM)
9526 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9528 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9529 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9530 (set (match_dup 0) (const_int 1)))])
9532 (define_insn_and_split "*compare_scc"
9533 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9534 (match_operator:SI 1 "arm_comparison_operator"
9535 [(match_operand:SI 2 "s_register_operand" "r,r")
9536 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9537 (clobber (reg:CC CC_REGNUM))]
9540 "&& reload_completed"
9541 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9542 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9543 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9546 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9547 operands[2], operands[3]);
9548 enum rtx_code rc = GET_CODE (operands[1]);
9550 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9552 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9553 if (mode == CCFPmode || mode == CCFPEmode)
9554 rc = reverse_condition_maybe_unordered (rc);
9556 rc = reverse_condition (rc);
9557 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9559 [(set_attr "type" "multiple")]
9562 ;; Attempt to improve the sequence generated by the compare_scc splitters
9563 ;; not to use conditional execution.
9565 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9569 [(set (reg:CC CC_REGNUM)
9570 (compare:CC (match_operand:SI 1 "register_operand" "")
9572 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9573 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9574 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9575 (set (match_dup 0) (const_int 1)))]
9576 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9577 [(set (match_dup 0) (clz:SI (match_dup 1)))
9578 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9581 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9585 [(set (reg:CC CC_REGNUM)
9586 (compare:CC (match_operand:SI 1 "register_operand" "")
9588 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9589 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9590 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9591 (set (match_dup 0) (const_int 1)))
9592 (match_scratch:SI 2 "r")]
9593 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9595 [(set (reg:CC CC_REGNUM)
9596 (compare:CC (const_int 0) (match_dup 1)))
9597 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9599 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9600 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9603 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9604 ;; sub Rd, Reg1, reg2
9608 [(set (reg:CC CC_REGNUM)
9609 (compare:CC (match_operand:SI 1 "register_operand" "")
9610 (match_operand:SI 2 "arm_rhs_operand" "")))
9611 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9612 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9613 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9614 (set (match_dup 0) (const_int 1)))]
9615 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9616 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9617 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9618 (set (match_dup 0) (clz:SI (match_dup 0)))
9619 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9623 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9624 ;; sub T1, Reg1, reg2
9628 [(set (reg:CC CC_REGNUM)
9629 (compare:CC (match_operand:SI 1 "register_operand" "")
9630 (match_operand:SI 2 "arm_rhs_operand" "")))
9631 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9632 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9633 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9634 (set (match_dup 0) (const_int 1)))
9635 (match_scratch:SI 3 "r")]
9636 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9637 [(set (match_dup 3) (match_dup 4))
9639 [(set (reg:CC CC_REGNUM)
9640 (compare:CC (const_int 0) (match_dup 3)))
9641 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9643 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9644 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9646 if (CONST_INT_P (operands[2]))
9647 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9649 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9652 (define_insn "*cond_move"
9653 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9654 (if_then_else:SI (match_operator 3 "equality_operator"
9655 [(match_operator 4 "arm_comparison_operator"
9656 [(match_operand 5 "cc_register" "") (const_int 0)])
9658 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9659 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9662 if (GET_CODE (operands[3]) == NE)
9664 if (which_alternative != 1)
9665 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9666 if (which_alternative != 0)
9667 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9670 if (which_alternative != 0)
9671 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9672 if (which_alternative != 1)
9673 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9676 [(set_attr "conds" "use")
9677 (set_attr_alternative "type"
9678 [(if_then_else (match_operand 2 "const_int_operand" "")
9679 (const_string "mov_imm")
9680 (const_string "mov_reg"))
9681 (if_then_else (match_operand 1 "const_int_operand" "")
9682 (const_string "mov_imm")
9683 (const_string "mov_reg"))
9684 (const_string "multiple")])
9685 (set_attr "length" "4,4,8")]
9688 (define_insn "*cond_arith"
9689 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9690 (match_operator:SI 5 "shiftable_operator"
9691 [(match_operator:SI 4 "arm_comparison_operator"
9692 [(match_operand:SI 2 "s_register_operand" "r,r")
9693 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9694 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9695 (clobber (reg:CC CC_REGNUM))]
9698 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9699 return \"%i5\\t%0, %1, %2, lsr #31\";
9701 output_asm_insn (\"cmp\\t%2, %3\", operands);
9702 if (GET_CODE (operands[5]) == AND)
9703 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9704 else if (GET_CODE (operands[5]) == MINUS)
9705 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9706 else if (which_alternative != 0)
9707 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9708 return \"%i5%d4\\t%0, %1, #1\";
9710 [(set_attr "conds" "clob")
9711 (set_attr "length" "12")
9712 (set_attr "type" "multiple")]
9715 (define_insn "*cond_sub"
9716 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9717 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9718 (match_operator:SI 4 "arm_comparison_operator"
9719 [(match_operand:SI 2 "s_register_operand" "r,r")
9720 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9721 (clobber (reg:CC CC_REGNUM))]
9724 output_asm_insn (\"cmp\\t%2, %3\", operands);
9725 if (which_alternative != 0)
9726 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9727 return \"sub%d4\\t%0, %1, #1\";
9729 [(set_attr "conds" "clob")
9730 (set_attr "length" "8,12")
9731 (set_attr "type" "multiple")]
9734 (define_insn "*cmp_ite0"
9735 [(set (match_operand 6 "dominant_cc_register" "")
9738 (match_operator 4 "arm_comparison_operator"
9739 [(match_operand:SI 0 "s_register_operand"
9740 "l,l,l,r,r,r,r,r,r")
9741 (match_operand:SI 1 "arm_add_operand"
9742 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9743 (match_operator:SI 5 "arm_comparison_operator"
9744 [(match_operand:SI 2 "s_register_operand"
9745 "l,r,r,l,l,r,r,r,r")
9746 (match_operand:SI 3 "arm_add_operand"
9747 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9753 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9755 {\"cmp%d5\\t%0, %1\",
9756 \"cmp%d4\\t%2, %3\"},
9757 {\"cmn%d5\\t%0, #%n1\",
9758 \"cmp%d4\\t%2, %3\"},
9759 {\"cmp%d5\\t%0, %1\",
9760 \"cmn%d4\\t%2, #%n3\"},
9761 {\"cmn%d5\\t%0, #%n1\",
9762 \"cmn%d4\\t%2, #%n3\"}
9764 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9769 \"cmn\\t%0, #%n1\"},
9770 {\"cmn\\t%2, #%n3\",
9772 {\"cmn\\t%2, #%n3\",
9775 static const char * const ite[2] =
9780 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9781 CMP_CMP, CMN_CMP, CMP_CMP,
9782 CMN_CMP, CMP_CMN, CMN_CMN};
9784 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9786 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9787 if (TARGET_THUMB2) {
9788 output_asm_insn (ite[swap], operands);
9790 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9793 [(set_attr "conds" "set")
9794 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9795 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9796 (set_attr "type" "multiple")
9797 (set_attr_alternative "length"
9803 (if_then_else (eq_attr "is_thumb" "no")
9806 (if_then_else (eq_attr "is_thumb" "no")
9809 (if_then_else (eq_attr "is_thumb" "no")
9812 (if_then_else (eq_attr "is_thumb" "no")
9817 (define_insn "*cmp_ite1"
9818 [(set (match_operand 6 "dominant_cc_register" "")
9821 (match_operator 4 "arm_comparison_operator"
9822 [(match_operand:SI 0 "s_register_operand"
9823 "l,l,l,r,r,r,r,r,r")
9824 (match_operand:SI 1 "arm_add_operand"
9825 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9826 (match_operator:SI 5 "arm_comparison_operator"
9827 [(match_operand:SI 2 "s_register_operand"
9828 "l,r,r,l,l,r,r,r,r")
9829 (match_operand:SI 3 "arm_add_operand"
9830 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9836 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9840 {\"cmn\\t%0, #%n1\",
9843 \"cmn\\t%2, #%n3\"},
9844 {\"cmn\\t%0, #%n1\",
9847 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9849 {\"cmp%d4\\t%2, %3\",
9850 \"cmp%D5\\t%0, %1\"},
9851 {\"cmp%d4\\t%2, %3\",
9852 \"cmn%D5\\t%0, #%n1\"},
9853 {\"cmn%d4\\t%2, #%n3\",
9854 \"cmp%D5\\t%0, %1\"},
9855 {\"cmn%d4\\t%2, #%n3\",
9856 \"cmn%D5\\t%0, #%n1\"}
9858 static const char * const ite[2] =
9863 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9864 CMP_CMP, CMN_CMP, CMP_CMP,
9865 CMN_CMP, CMP_CMN, CMN_CMN};
9867 comparison_dominates_p (GET_CODE (operands[5]),
9868 reverse_condition (GET_CODE (operands[4])));
9870 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9871 if (TARGET_THUMB2) {
9872 output_asm_insn (ite[swap], operands);
9874 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9877 [(set_attr "conds" "set")
9878 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9879 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9880 (set_attr_alternative "length"
9886 (if_then_else (eq_attr "is_thumb" "no")
9889 (if_then_else (eq_attr "is_thumb" "no")
9892 (if_then_else (eq_attr "is_thumb" "no")
9895 (if_then_else (eq_attr "is_thumb" "no")
9898 (set_attr "type" "multiple")]
9901 (define_insn "*cmp_and"
9902 [(set (match_operand 6 "dominant_cc_register" "")
9905 (match_operator 4 "arm_comparison_operator"
9906 [(match_operand:SI 0 "s_register_operand"
9907 "l,l,l,r,r,r,r,r,r,r")
9908 (match_operand:SI 1 "arm_add_operand"
9909 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9910 (match_operator:SI 5 "arm_comparison_operator"
9911 [(match_operand:SI 2 "s_register_operand"
9912 "l,r,r,l,l,r,r,r,r,r")
9913 (match_operand:SI 3 "arm_add_operand"
9914 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9919 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9921 {\"cmp%d5\\t%0, %1\",
9922 \"cmp%d4\\t%2, %3\"},
9923 {\"cmn%d5\\t%0, #%n1\",
9924 \"cmp%d4\\t%2, %3\"},
9925 {\"cmp%d5\\t%0, %1\",
9926 \"cmn%d4\\t%2, #%n3\"},
9927 {\"cmn%d5\\t%0, #%n1\",
9928 \"cmn%d4\\t%2, #%n3\"}
9930 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9935 \"cmn\\t%0, #%n1\"},
9936 {\"cmn\\t%2, #%n3\",
9938 {\"cmn\\t%2, #%n3\",
9941 static const char *const ite[2] =
9946 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9947 CMP_CMP, CMN_CMP, CMP_CMP,
9948 CMP_CMP, CMN_CMP, CMP_CMN,
9951 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9953 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9954 if (TARGET_THUMB2) {
9955 output_asm_insn (ite[swap], operands);
9957 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9960 [(set_attr "conds" "set")
9961 (set_attr "predicable" "no")
9962 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9963 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9964 (set_attr_alternative "length"
9971 (if_then_else (eq_attr "is_thumb" "no")
9974 (if_then_else (eq_attr "is_thumb" "no")
9977 (if_then_else (eq_attr "is_thumb" "no")
9980 (if_then_else (eq_attr "is_thumb" "no")
9983 (set_attr "type" "multiple")]
9986 (define_insn "*cmp_ior"
9987 [(set (match_operand 6 "dominant_cc_register" "")
9990 (match_operator 4 "arm_comparison_operator"
9991 [(match_operand:SI 0 "s_register_operand"
9992 "l,l,l,r,r,r,r,r,r,r")
9993 (match_operand:SI 1 "arm_add_operand"
9994 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9995 (match_operator:SI 5 "arm_comparison_operator"
9996 [(match_operand:SI 2 "s_register_operand"
9997 "l,r,r,l,l,r,r,r,r,r")
9998 (match_operand:SI 3 "arm_add_operand"
9999 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10004 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10008 {\"cmn\\t%0, #%n1\",
10011 \"cmn\\t%2, #%n3\"},
10012 {\"cmn\\t%0, #%n1\",
10013 \"cmn\\t%2, #%n3\"}
10015 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10017 {\"cmp%D4\\t%2, %3\",
10018 \"cmp%D5\\t%0, %1\"},
10019 {\"cmp%D4\\t%2, %3\",
10020 \"cmn%D5\\t%0, #%n1\"},
10021 {\"cmn%D4\\t%2, #%n3\",
10022 \"cmp%D5\\t%0, %1\"},
10023 {\"cmn%D4\\t%2, #%n3\",
10024 \"cmn%D5\\t%0, #%n1\"}
10026 static const char *const ite[2] =
10031 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10032 CMP_CMP, CMN_CMP, CMP_CMP,
10033 CMP_CMP, CMN_CMP, CMP_CMN,
10036 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10038 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10039 if (TARGET_THUMB2) {
10040 output_asm_insn (ite[swap], operands);
10042 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10046 [(set_attr "conds" "set")
10047 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10048 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10049 (set_attr_alternative "length"
10056 (if_then_else (eq_attr "is_thumb" "no")
10059 (if_then_else (eq_attr "is_thumb" "no")
10062 (if_then_else (eq_attr "is_thumb" "no")
10065 (if_then_else (eq_attr "is_thumb" "no")
10068 (set_attr "type" "multiple")]
10071 (define_insn_and_split "*ior_scc_scc"
10072 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10073 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10074 [(match_operand:SI 1 "s_register_operand" "l,r")
10075 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10076 (match_operator:SI 6 "arm_comparison_operator"
10077 [(match_operand:SI 4 "s_register_operand" "l,r")
10078 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10079 (clobber (reg:CC CC_REGNUM))]
10081 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10084 "TARGET_32BIT && reload_completed"
10085 [(set (match_dup 7)
10088 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10089 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10091 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10093 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10096 [(set_attr "conds" "clob")
10097 (set_attr "enabled_for_short_it" "yes,no")
10098 (set_attr "length" "16")
10099 (set_attr "type" "multiple")]
10102 ; If the above pattern is followed by a CMP insn, then the compare is
10103 ; redundant, since we can rework the conditional instruction that follows.
10104 (define_insn_and_split "*ior_scc_scc_cmp"
10105 [(set (match_operand 0 "dominant_cc_register" "")
10106 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10107 [(match_operand:SI 1 "s_register_operand" "l,r")
10108 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10109 (match_operator:SI 6 "arm_comparison_operator"
10110 [(match_operand:SI 4 "s_register_operand" "l,r")
10111 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10113 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10114 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10115 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10118 "TARGET_32BIT && reload_completed"
10119 [(set (match_dup 0)
10122 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10123 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10125 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10127 [(set_attr "conds" "set")
10128 (set_attr "enabled_for_short_it" "yes,no")
10129 (set_attr "length" "16")
10130 (set_attr "type" "multiple")]
10133 (define_insn_and_split "*and_scc_scc"
10134 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10135 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10136 [(match_operand:SI 1 "s_register_operand" "l,r")
10137 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10138 (match_operator:SI 6 "arm_comparison_operator"
10139 [(match_operand:SI 4 "s_register_operand" "l,r")
10140 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10141 (clobber (reg:CC CC_REGNUM))]
10143 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10146 "TARGET_32BIT && reload_completed
10147 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10149 [(set (match_dup 7)
10152 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10153 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10155 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10157 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10160 [(set_attr "conds" "clob")
10161 (set_attr "enabled_for_short_it" "yes,no")
10162 (set_attr "length" "16")
10163 (set_attr "type" "multiple")]
10166 ; If the above pattern is followed by a CMP insn, then the compare is
10167 ; redundant, since we can rework the conditional instruction that follows.
10168 (define_insn_and_split "*and_scc_scc_cmp"
10169 [(set (match_operand 0 "dominant_cc_register" "")
10170 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10171 [(match_operand:SI 1 "s_register_operand" "l,r")
10172 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10173 (match_operator:SI 6 "arm_comparison_operator"
10174 [(match_operand:SI 4 "s_register_operand" "l,r")
10175 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10177 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10178 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10179 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10182 "TARGET_32BIT && reload_completed"
10183 [(set (match_dup 0)
10186 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10187 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10189 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10191 [(set_attr "conds" "set")
10192 (set_attr "enabled_for_short_it" "yes,no")
10193 (set_attr "length" "16")
10194 (set_attr "type" "multiple")]
10197 ;; If there is no dominance in the comparison, then we can still save an
10198 ;; instruction in the AND case, since we can know that the second compare
10199 ;; need only zero the value if false (if true, then the value is already
10201 (define_insn_and_split "*and_scc_scc_nodom"
10202 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10203 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10204 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10205 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10206 (match_operator:SI 6 "arm_comparison_operator"
10207 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10208 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10209 (clobber (reg:CC CC_REGNUM))]
10211 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10214 "TARGET_32BIT && reload_completed"
10215 [(parallel [(set (match_dup 0)
10216 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10217 (clobber (reg:CC CC_REGNUM))])
10218 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10220 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10223 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10224 operands[4], operands[5]),
10226 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10228 [(set_attr "conds" "clob")
10229 (set_attr "length" "20")
10230 (set_attr "type" "multiple")]
10234 [(set (reg:CC_NOOV CC_REGNUM)
10235 (compare:CC_NOOV (ior:SI
10236 (and:SI (match_operand:SI 0 "s_register_operand" "")
10238 (match_operator:SI 1 "arm_comparison_operator"
10239 [(match_operand:SI 2 "s_register_operand" "")
10240 (match_operand:SI 3 "arm_add_operand" "")]))
10242 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10244 [(set (match_dup 4)
10245 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10247 (set (reg:CC_NOOV CC_REGNUM)
10248 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10253 [(set (reg:CC_NOOV CC_REGNUM)
10254 (compare:CC_NOOV (ior:SI
10255 (match_operator:SI 1 "arm_comparison_operator"
10256 [(match_operand:SI 2 "s_register_operand" "")
10257 (match_operand:SI 3 "arm_add_operand" "")])
10258 (and:SI (match_operand:SI 0 "s_register_operand" "")
10261 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10263 [(set (match_dup 4)
10264 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10266 (set (reg:CC_NOOV CC_REGNUM)
10267 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10270 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10272 (define_insn_and_split "*negscc"
10273 [(set (match_operand:SI 0 "s_register_operand" "=r")
10274 (neg:SI (match_operator 3 "arm_comparison_operator"
10275 [(match_operand:SI 1 "s_register_operand" "r")
10276 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10277 (clobber (reg:CC CC_REGNUM))]
10280 "&& reload_completed"
10283 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10285 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10287 /* Emit mov\\t%0, %1, asr #31 */
10288 emit_insn (gen_rtx_SET (operands[0],
10289 gen_rtx_ASHIFTRT (SImode,
10294 else if (GET_CODE (operands[3]) == NE)
10296 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10297 if (CONST_INT_P (operands[2]))
10298 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10299 gen_int_mode (-INTVAL (operands[2]),
10302 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10304 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10305 gen_rtx_NE (SImode,
10308 gen_rtx_SET (operands[0],
10314 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10315 emit_insn (gen_rtx_SET (cc_reg,
10316 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10317 enum rtx_code rc = GET_CODE (operands[3]);
10319 rc = reverse_condition (rc);
10320 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10321 gen_rtx_fmt_ee (rc,
10325 gen_rtx_SET (operands[0], const0_rtx)));
10326 rc = GET_CODE (operands[3]);
10327 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10328 gen_rtx_fmt_ee (rc,
10332 gen_rtx_SET (operands[0],
10338 [(set_attr "conds" "clob")
10339 (set_attr "length" "12")
10340 (set_attr "type" "multiple")]
10343 (define_insn_and_split "movcond_addsi"
10344 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
10346 (match_operator 5 "comparison_operator"
10347 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
10348 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
10350 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
10351 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
10352 (clobber (reg:CC CC_REGNUM))]
10355 "&& reload_completed"
10356 [(set (reg:CC_NOOV CC_REGNUM)
10358 (plus:SI (match_dup 3)
10361 (set (match_dup 0) (match_dup 1))
10362 (cond_exec (match_dup 6)
10363 (set (match_dup 0) (match_dup 2)))]
10366 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
10367 operands[3], operands[4]);
10368 enum rtx_code rc = GET_CODE (operands[5]);
10369 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10370 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
10371 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
10372 rc = reverse_condition (rc);
10374 std::swap (operands[1], operands[2]);
10376 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10379 [(set_attr "conds" "clob")
10380 (set_attr "enabled_for_short_it" "no,yes,yes")
10381 (set_attr "type" "multiple")]
10384 (define_insn "movcond"
10385 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10387 (match_operator 5 "arm_comparison_operator"
10388 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10389 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10390 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10391 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10392 (clobber (reg:CC CC_REGNUM))]
10395 if (GET_CODE (operands[5]) == LT
10396 && (operands[4] == const0_rtx))
10398 if (which_alternative != 1 && REG_P (operands[1]))
10400 if (operands[2] == const0_rtx)
10401 return \"and\\t%0, %1, %3, asr #31\";
10402 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10404 else if (which_alternative != 0 && REG_P (operands[2]))
10406 if (operands[1] == const0_rtx)
10407 return \"bic\\t%0, %2, %3, asr #31\";
10408 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10410 /* The only case that falls through to here is when both ops 1 & 2
10414 if (GET_CODE (operands[5]) == GE
10415 && (operands[4] == const0_rtx))
10417 if (which_alternative != 1 && REG_P (operands[1]))
10419 if (operands[2] == const0_rtx)
10420 return \"bic\\t%0, %1, %3, asr #31\";
10421 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10423 else if (which_alternative != 0 && REG_P (operands[2]))
10425 if (operands[1] == const0_rtx)
10426 return \"and\\t%0, %2, %3, asr #31\";
10427 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10429 /* The only case that falls through to here is when both ops 1 & 2
10432 if (CONST_INT_P (operands[4])
10433 && !const_ok_for_arm (INTVAL (operands[4])))
10434 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10436 output_asm_insn (\"cmp\\t%3, %4\", operands);
10437 if (which_alternative != 0)
10438 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10439 if (which_alternative != 1)
10440 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10443 [(set_attr "conds" "clob")
10444 (set_attr "length" "8,8,12")
10445 (set_attr "type" "multiple")]
10448 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10450 (define_insn "*ifcompare_plus_move"
10451 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10452 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10453 [(match_operand:SI 4 "s_register_operand" "r,r")
10454 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10456 (match_operand:SI 2 "s_register_operand" "r,r")
10457 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10458 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10459 (clobber (reg:CC CC_REGNUM))]
10462 [(set_attr "conds" "clob")
10463 (set_attr "length" "8,12")
10464 (set_attr "type" "multiple")]
10467 (define_insn "*if_plus_move"
10468 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10470 (match_operator 4 "arm_comparison_operator"
10471 [(match_operand 5 "cc_register" "") (const_int 0)])
10473 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10474 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10475 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10478 add%d4\\t%0, %2, %3
10479 sub%d4\\t%0, %2, #%n3
10480 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10481 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10482 [(set_attr "conds" "use")
10483 (set_attr "length" "4,4,8,8")
10484 (set_attr_alternative "type"
10485 [(if_then_else (match_operand 3 "const_int_operand" "")
10486 (const_string "alu_imm" )
10487 (const_string "alu_sreg"))
10488 (const_string "alu_imm")
10489 (const_string "multiple")
10490 (const_string "multiple")])]
10493 (define_insn "*ifcompare_move_plus"
10494 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10495 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10496 [(match_operand:SI 4 "s_register_operand" "r,r")
10497 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10498 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10500 (match_operand:SI 2 "s_register_operand" "r,r")
10501 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10502 (clobber (reg:CC CC_REGNUM))]
10505 [(set_attr "conds" "clob")
10506 (set_attr "length" "8,12")
10507 (set_attr "type" "multiple")]
10510 (define_insn "*if_move_plus"
10511 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10513 (match_operator 4 "arm_comparison_operator"
10514 [(match_operand 5 "cc_register" "") (const_int 0)])
10515 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10517 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10518 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10521 add%D4\\t%0, %2, %3
10522 sub%D4\\t%0, %2, #%n3
10523 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10524 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10525 [(set_attr "conds" "use")
10526 (set_attr "length" "4,4,8,8")
10527 (set_attr_alternative "type"
10528 [(if_then_else (match_operand 3 "const_int_operand" "")
10529 (const_string "alu_imm" )
10530 (const_string "alu_sreg"))
10531 (const_string "alu_imm")
10532 (const_string "multiple")
10533 (const_string "multiple")])]
10536 (define_insn "*ifcompare_arith_arith"
10537 [(set (match_operand:SI 0 "s_register_operand" "=r")
10538 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10539 [(match_operand:SI 5 "s_register_operand" "r")
10540 (match_operand:SI 6 "arm_add_operand" "rIL")])
10541 (match_operator:SI 8 "shiftable_operator"
10542 [(match_operand:SI 1 "s_register_operand" "r")
10543 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10544 (match_operator:SI 7 "shiftable_operator"
10545 [(match_operand:SI 3 "s_register_operand" "r")
10546 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10547 (clobber (reg:CC CC_REGNUM))]
10550 [(set_attr "conds" "clob")
10551 (set_attr "length" "12")
10552 (set_attr "type" "multiple")]
10555 (define_insn "*if_arith_arith"
10556 [(set (match_operand:SI 0 "s_register_operand" "=r")
10557 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10558 [(match_operand 8 "cc_register" "") (const_int 0)])
10559 (match_operator:SI 6 "shiftable_operator"
10560 [(match_operand:SI 1 "s_register_operand" "r")
10561 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10562 (match_operator:SI 7 "shiftable_operator"
10563 [(match_operand:SI 3 "s_register_operand" "r")
10564 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10566 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10567 [(set_attr "conds" "use")
10568 (set_attr "length" "8")
10569 (set_attr "type" "multiple")]
10572 (define_insn "*ifcompare_arith_move"
10573 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10574 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10575 [(match_operand:SI 2 "s_register_operand" "r,r")
10576 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10577 (match_operator:SI 7 "shiftable_operator"
10578 [(match_operand:SI 4 "s_register_operand" "r,r")
10579 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10580 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10581 (clobber (reg:CC CC_REGNUM))]
10584 /* If we have an operation where (op x 0) is the identity operation and
10585 the conditional operator is LT or GE and we are comparing against zero and
10586 everything is in registers then we can do this in two instructions. */
10587 if (operands[3] == const0_rtx
10588 && GET_CODE (operands[7]) != AND
10589 && REG_P (operands[5])
10590 && REG_P (operands[1])
10591 && REGNO (operands[1]) == REGNO (operands[4])
10592 && REGNO (operands[4]) != REGNO (operands[0]))
10594 if (GET_CODE (operands[6]) == LT)
10595 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10596 else if (GET_CODE (operands[6]) == GE)
10597 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10599 if (CONST_INT_P (operands[3])
10600 && !const_ok_for_arm (INTVAL (operands[3])))
10601 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10603 output_asm_insn (\"cmp\\t%2, %3\", operands);
10604 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10605 if (which_alternative != 0)
10606 return \"mov%D6\\t%0, %1\";
10609 [(set_attr "conds" "clob")
10610 (set_attr "length" "8,12")
10611 (set_attr "type" "multiple")]
10614 (define_insn "*if_arith_move"
10615 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10616 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10617 [(match_operand 6 "cc_register" "") (const_int 0)])
10618 (match_operator:SI 5 "shiftable_operator"
10619 [(match_operand:SI 2 "s_register_operand" "r,r")
10620 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10621 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10624 %I5%d4\\t%0, %2, %3
10625 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10626 [(set_attr "conds" "use")
10627 (set_attr "length" "4,8")
10628 (set_attr_alternative "type"
10629 [(if_then_else (match_operand 3 "const_int_operand" "")
10630 (const_string "alu_shift_imm" )
10631 (const_string "alu_shift_reg"))
10632 (const_string "multiple")])]
10635 (define_insn "*ifcompare_move_arith"
10636 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10637 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10638 [(match_operand:SI 4 "s_register_operand" "r,r")
10639 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10640 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10641 (match_operator:SI 7 "shiftable_operator"
10642 [(match_operand:SI 2 "s_register_operand" "r,r")
10643 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10644 (clobber (reg:CC CC_REGNUM))]
10647 /* If we have an operation where (op x 0) is the identity operation and
10648 the conditional operator is LT or GE and we are comparing against zero and
10649 everything is in registers then we can do this in two instructions */
10650 if (operands[5] == const0_rtx
10651 && GET_CODE (operands[7]) != AND
10652 && REG_P (operands[3])
10653 && REG_P (operands[1])
10654 && REGNO (operands[1]) == REGNO (operands[2])
10655 && REGNO (operands[2]) != REGNO (operands[0]))
10657 if (GET_CODE (operands[6]) == GE)
10658 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10659 else if (GET_CODE (operands[6]) == LT)
10660 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10663 if (CONST_INT_P (operands[5])
10664 && !const_ok_for_arm (INTVAL (operands[5])))
10665 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10667 output_asm_insn (\"cmp\\t%4, %5\", operands);
10669 if (which_alternative != 0)
10670 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10671 return \"%I7%D6\\t%0, %2, %3\";
10673 [(set_attr "conds" "clob")
10674 (set_attr "length" "8,12")
10675 (set_attr "type" "multiple")]
10678 (define_insn "*if_move_arith"
10679 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10681 (match_operator 4 "arm_comparison_operator"
10682 [(match_operand 6 "cc_register" "") (const_int 0)])
10683 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10684 (match_operator:SI 5 "shiftable_operator"
10685 [(match_operand:SI 2 "s_register_operand" "r,r")
10686 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10689 %I5%D4\\t%0, %2, %3
10690 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10691 [(set_attr "conds" "use")
10692 (set_attr "length" "4,8")
10693 (set_attr_alternative "type"
10694 [(if_then_else (match_operand 3 "const_int_operand" "")
10695 (const_string "alu_shift_imm" )
10696 (const_string "alu_shift_reg"))
10697 (const_string "multiple")])]
10700 (define_insn "*ifcompare_move_not"
10701 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10703 (match_operator 5 "arm_comparison_operator"
10704 [(match_operand:SI 3 "s_register_operand" "r,r")
10705 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10706 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10708 (match_operand:SI 2 "s_register_operand" "r,r"))))
10709 (clobber (reg:CC CC_REGNUM))]
10712 [(set_attr "conds" "clob")
10713 (set_attr "length" "8,12")
10714 (set_attr "type" "multiple")]
10717 (define_insn "*if_move_not"
10718 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10720 (match_operator 4 "arm_comparison_operator"
10721 [(match_operand 3 "cc_register" "") (const_int 0)])
10722 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10723 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10727 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10728 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10729 [(set_attr "conds" "use")
10730 (set_attr "type" "mvn_reg")
10731 (set_attr "length" "4,8,8")
10732 (set_attr "type" "mvn_reg,multiple,multiple")]
10735 (define_insn "*ifcompare_not_move"
10736 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10738 (match_operator 5 "arm_comparison_operator"
10739 [(match_operand:SI 3 "s_register_operand" "r,r")
10740 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10742 (match_operand:SI 2 "s_register_operand" "r,r"))
10743 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10744 (clobber (reg:CC CC_REGNUM))]
10747 [(set_attr "conds" "clob")
10748 (set_attr "length" "8,12")
10749 (set_attr "type" "multiple")]
10752 (define_insn "*if_not_move"
10753 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10755 (match_operator 4 "arm_comparison_operator"
10756 [(match_operand 3 "cc_register" "") (const_int 0)])
10757 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10758 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10762 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10763 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10764 [(set_attr "conds" "use")
10765 (set_attr "type" "mvn_reg,multiple,multiple")
10766 (set_attr "length" "4,8,8")]
10769 (define_insn "*ifcompare_shift_move"
10770 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10772 (match_operator 6 "arm_comparison_operator"
10773 [(match_operand:SI 4 "s_register_operand" "r,r")
10774 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10775 (match_operator:SI 7 "shift_operator"
10776 [(match_operand:SI 2 "s_register_operand" "r,r")
10777 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10778 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10779 (clobber (reg:CC CC_REGNUM))]
10782 [(set_attr "conds" "clob")
10783 (set_attr "length" "8,12")
10784 (set_attr "type" "multiple")]
10787 (define_insn "*if_shift_move"
10788 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10790 (match_operator 5 "arm_comparison_operator"
10791 [(match_operand 6 "cc_register" "") (const_int 0)])
10792 (match_operator:SI 4 "shift_operator"
10793 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10794 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10795 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10799 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10800 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10801 [(set_attr "conds" "use")
10802 (set_attr "shift" "2")
10803 (set_attr "length" "4,8,8")
10804 (set_attr_alternative "type"
10805 [(if_then_else (match_operand 3 "const_int_operand" "")
10806 (const_string "mov_shift" )
10807 (const_string "mov_shift_reg"))
10808 (const_string "multiple")
10809 (const_string "multiple")])]
10812 (define_insn "*ifcompare_move_shift"
10813 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10815 (match_operator 6 "arm_comparison_operator"
10816 [(match_operand:SI 4 "s_register_operand" "r,r")
10817 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10818 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10819 (match_operator:SI 7 "shift_operator"
10820 [(match_operand:SI 2 "s_register_operand" "r,r")
10821 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10822 (clobber (reg:CC CC_REGNUM))]
10825 [(set_attr "conds" "clob")
10826 (set_attr "length" "8,12")
10827 (set_attr "type" "multiple")]
10830 (define_insn "*if_move_shift"
10831 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10833 (match_operator 5 "arm_comparison_operator"
10834 [(match_operand 6 "cc_register" "") (const_int 0)])
10835 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10836 (match_operator:SI 4 "shift_operator"
10837 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10838 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10842 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10843 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10844 [(set_attr "conds" "use")
10845 (set_attr "shift" "2")
10846 (set_attr "length" "4,8,8")
10847 (set_attr_alternative "type"
10848 [(if_then_else (match_operand 3 "const_int_operand" "")
10849 (const_string "mov_shift" )
10850 (const_string "mov_shift_reg"))
10851 (const_string "multiple")
10852 (const_string "multiple")])]
10855 (define_insn "*ifcompare_shift_shift"
10856 [(set (match_operand:SI 0 "s_register_operand" "=r")
10858 (match_operator 7 "arm_comparison_operator"
10859 [(match_operand:SI 5 "s_register_operand" "r")
10860 (match_operand:SI 6 "arm_add_operand" "rIL")])
10861 (match_operator:SI 8 "shift_operator"
10862 [(match_operand:SI 1 "s_register_operand" "r")
10863 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10864 (match_operator:SI 9 "shift_operator"
10865 [(match_operand:SI 3 "s_register_operand" "r")
10866 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10867 (clobber (reg:CC CC_REGNUM))]
10870 [(set_attr "conds" "clob")
10871 (set_attr "length" "12")
10872 (set_attr "type" "multiple")]
10875 (define_insn "*if_shift_shift"
10876 [(set (match_operand:SI 0 "s_register_operand" "=r")
10878 (match_operator 5 "arm_comparison_operator"
10879 [(match_operand 8 "cc_register" "") (const_int 0)])
10880 (match_operator:SI 6 "shift_operator"
10881 [(match_operand:SI 1 "s_register_operand" "r")
10882 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10883 (match_operator:SI 7 "shift_operator"
10884 [(match_operand:SI 3 "s_register_operand" "r")
10885 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10887 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10888 [(set_attr "conds" "use")
10889 (set_attr "shift" "1")
10890 (set_attr "length" "8")
10891 (set (attr "type") (if_then_else
10892 (and (match_operand 2 "const_int_operand" "")
10893 (match_operand 4 "const_int_operand" ""))
10894 (const_string "mov_shift")
10895 (const_string "mov_shift_reg")))]
10898 (define_insn "*ifcompare_not_arith"
10899 [(set (match_operand:SI 0 "s_register_operand" "=r")
10901 (match_operator 6 "arm_comparison_operator"
10902 [(match_operand:SI 4 "s_register_operand" "r")
10903 (match_operand:SI 5 "arm_add_operand" "rIL")])
10904 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10905 (match_operator:SI 7 "shiftable_operator"
10906 [(match_operand:SI 2 "s_register_operand" "r")
10907 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10908 (clobber (reg:CC CC_REGNUM))]
10911 [(set_attr "conds" "clob")
10912 (set_attr "length" "12")
10913 (set_attr "type" "multiple")]
10916 (define_insn "*if_not_arith"
10917 [(set (match_operand:SI 0 "s_register_operand" "=r")
10919 (match_operator 5 "arm_comparison_operator"
10920 [(match_operand 4 "cc_register" "") (const_int 0)])
10921 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10922 (match_operator:SI 6 "shiftable_operator"
10923 [(match_operand:SI 2 "s_register_operand" "r")
10924 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10926 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10927 [(set_attr "conds" "use")
10928 (set_attr "type" "mvn_reg")
10929 (set_attr "length" "8")]
10932 (define_insn "*ifcompare_arith_not"
10933 [(set (match_operand:SI 0 "s_register_operand" "=r")
10935 (match_operator 6 "arm_comparison_operator"
10936 [(match_operand:SI 4 "s_register_operand" "r")
10937 (match_operand:SI 5 "arm_add_operand" "rIL")])
10938 (match_operator:SI 7 "shiftable_operator"
10939 [(match_operand:SI 2 "s_register_operand" "r")
10940 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10941 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10942 (clobber (reg:CC CC_REGNUM))]
10945 [(set_attr "conds" "clob")
10946 (set_attr "length" "12")
10947 (set_attr "type" "multiple")]
10950 (define_insn "*if_arith_not"
10951 [(set (match_operand:SI 0 "s_register_operand" "=r")
10953 (match_operator 5 "arm_comparison_operator"
10954 [(match_operand 4 "cc_register" "") (const_int 0)])
10955 (match_operator:SI 6 "shiftable_operator"
10956 [(match_operand:SI 2 "s_register_operand" "r")
10957 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10958 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10960 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10961 [(set_attr "conds" "use")
10962 (set_attr "type" "multiple")
10963 (set_attr "length" "8")]
10966 (define_insn "*ifcompare_neg_move"
10967 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10969 (match_operator 5 "arm_comparison_operator"
10970 [(match_operand:SI 3 "s_register_operand" "r,r")
10971 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10972 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10973 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10974 (clobber (reg:CC CC_REGNUM))]
10977 [(set_attr "conds" "clob")
10978 (set_attr "length" "8,12")
10979 (set_attr "type" "multiple")]
10982 (define_insn_and_split "*if_neg_move"
10983 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10985 (match_operator 4 "arm_comparison_operator"
10986 [(match_operand 3 "cc_register" "") (const_int 0)])
10987 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
10988 (match_operand:SI 1 "s_register_operand" "0,0")))]
10991 "&& reload_completed"
10992 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
10993 (set (match_dup 0) (neg:SI (match_dup 2))))]
10995 [(set_attr "conds" "use")
10996 (set_attr "length" "4")
10997 (set_attr "arch" "t2,32")
10998 (set_attr "enabled_for_short_it" "yes,no")
10999 (set_attr "type" "logic_shift_imm")]
11002 (define_insn "*ifcompare_move_neg"
11003 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11005 (match_operator 5 "arm_comparison_operator"
11006 [(match_operand:SI 3 "s_register_operand" "r,r")
11007 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11008 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11009 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11010 (clobber (reg:CC CC_REGNUM))]
11013 [(set_attr "conds" "clob")
11014 (set_attr "length" "8,12")
11015 (set_attr "type" "multiple")]
11018 (define_insn_and_split "*if_move_neg"
11019 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11021 (match_operator 4 "arm_comparison_operator"
11022 [(match_operand 3 "cc_register" "") (const_int 0)])
11023 (match_operand:SI 1 "s_register_operand" "0,0")
11024 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
11027 "&& reload_completed"
11028 [(cond_exec (match_dup 5)
11029 (set (match_dup 0) (neg:SI (match_dup 2))))]
11031 machine_mode mode = GET_MODE (operands[3]);
11032 rtx_code rc = GET_CODE (operands[4]);
11034 if (mode == CCFPmode || mode == CCFPEmode)
11035 rc = reverse_condition_maybe_unordered (rc);
11037 rc = reverse_condition (rc);
11039 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
11041 [(set_attr "conds" "use")
11042 (set_attr "length" "4")
11043 (set_attr "arch" "t2,32")
11044 (set_attr "enabled_for_short_it" "yes,no")
11045 (set_attr "type" "logic_shift_imm")]
11048 (define_insn "*arith_adjacentmem"
11049 [(set (match_operand:SI 0 "s_register_operand" "=r")
11050 (match_operator:SI 1 "shiftable_operator"
11051 [(match_operand:SI 2 "memory_operand" "m")
11052 (match_operand:SI 3 "memory_operand" "m")]))
11053 (clobber (match_scratch:SI 4 "=r"))]
11054 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11060 HOST_WIDE_INT val1 = 0, val2 = 0;
11062 if (REGNO (operands[0]) > REGNO (operands[4]))
11064 ldm[1] = operands[4];
11065 ldm[2] = operands[0];
11069 ldm[1] = operands[0];
11070 ldm[2] = operands[4];
11073 base_reg = XEXP (operands[2], 0);
11075 if (!REG_P (base_reg))
11077 val1 = INTVAL (XEXP (base_reg, 1));
11078 base_reg = XEXP (base_reg, 0);
11081 if (!REG_P (XEXP (operands[3], 0)))
11082 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11084 arith[0] = operands[0];
11085 arith[3] = operands[1];
11099 if (val1 !=0 && val2 != 0)
11103 if (val1 == 4 || val2 == 4)
11104 /* Other val must be 8, since we know they are adjacent and neither
11106 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
11107 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11109 ldm[0] = ops[0] = operands[4];
11111 ops[2] = GEN_INT (val1);
11112 output_add_immediate (ops);
11114 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11116 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11120 /* Offset is out of range for a single add, so use two ldr. */
11123 ops[2] = GEN_INT (val1);
11124 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11126 ops[2] = GEN_INT (val2);
11127 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11130 else if (val1 != 0)
11133 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11135 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11140 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11142 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11144 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11147 [(set_attr "length" "12")
11148 (set_attr "predicable" "yes")
11149 (set_attr "type" "load_4")]
11152 ; This pattern is never tried by combine, so do it as a peephole
11155 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11156 (match_operand:SI 1 "arm_general_register_operand" ""))
11157 (set (reg:CC CC_REGNUM)
11158 (compare:CC (match_dup 1) (const_int 0)))]
11160 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11161 (set (match_dup 0) (match_dup 1))])]
11166 [(set (match_operand:SI 0 "s_register_operand" "")
11167 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11169 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11170 [(match_operand:SI 3 "s_register_operand" "")
11171 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11172 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11174 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11175 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11180 ;; This split can be used because CC_Z mode implies that the following
11181 ;; branch will be an equality, or an unsigned inequality, so the sign
11182 ;; extension is not needed.
11185 [(set (reg:CC_Z CC_REGNUM)
11187 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11189 (match_operand 1 "const_int_operand" "")))
11190 (clobber (match_scratch:SI 2 ""))]
11192 && ((UINTVAL (operands[1]))
11193 == ((UINTVAL (operands[1])) >> 24) << 24)"
11194 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11195 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11197 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11200 ;; ??? Check the patterns above for Thumb-2 usefulness
11202 (define_expand "prologue"
11203 [(clobber (const_int 0))]
11206 arm_expand_prologue ();
11208 thumb1_expand_prologue ();
11213 (define_expand "epilogue"
11214 [(clobber (const_int 0))]
11217 if (crtl->calls_eh_return)
11218 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11221 thumb1_expand_epilogue ();
11222 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11223 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11225 else if (HAVE_return)
11227 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11228 no need for explicit testing again. */
11229 emit_jump_insn (gen_return ());
11231 else if (TARGET_32BIT)
11233 arm_expand_epilogue (true);
11239 ;; Note - although unspec_volatile's USE all hard registers,
11240 ;; USEs are ignored after relaod has completed. Thus we need
11241 ;; to add an unspec of the link register to ensure that flow
11242 ;; does not think that it is unused by the sibcall branch that
11243 ;; will replace the standard function epilogue.
11244 (define_expand "sibcall_epilogue"
11245 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11246 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11249 arm_expand_epilogue (false);
11254 (define_expand "eh_epilogue"
11255 [(use (match_operand:SI 0 "register_operand"))
11256 (use (match_operand:SI 1 "register_operand"))
11257 (use (match_operand:SI 2 "register_operand"))]
11261 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11262 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11264 rtx ra = gen_rtx_REG (Pmode, 2);
11266 emit_move_insn (ra, operands[2]);
11269 /* This is a hack -- we may have crystalized the function type too
11271 cfun->machine->func_type = 0;
11275 ;; This split is only used during output to reduce the number of patterns
11276 ;; that need assembler instructions adding to them. We allowed the setting
11277 ;; of the conditions to be implicit during rtl generation so that
11278 ;; the conditional compare patterns would work. However this conflicts to
11279 ;; some extent with the conditional data operations, so we have to split them
11282 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11283 ;; conditional execution sufficient?
11286 [(set (match_operand:SI 0 "s_register_operand" "")
11287 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11288 [(match_operand 2 "" "") (match_operand 3 "" "")])
11290 (match_operand 4 "" "")))
11291 (clobber (reg:CC CC_REGNUM))]
11292 "TARGET_ARM && reload_completed"
11293 [(set (match_dup 5) (match_dup 6))
11294 (cond_exec (match_dup 7)
11295 (set (match_dup 0) (match_dup 4)))]
11298 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11299 operands[2], operands[3]);
11300 enum rtx_code rc = GET_CODE (operands[1]);
11302 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11303 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11304 if (mode == CCFPmode || mode == CCFPEmode)
11305 rc = reverse_condition_maybe_unordered (rc);
11307 rc = reverse_condition (rc);
11309 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11314 [(set (match_operand:SI 0 "s_register_operand" "")
11315 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11316 [(match_operand 2 "" "") (match_operand 3 "" "")])
11317 (match_operand 4 "" "")
11319 (clobber (reg:CC CC_REGNUM))]
11320 "TARGET_ARM && reload_completed"
11321 [(set (match_dup 5) (match_dup 6))
11322 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11323 (set (match_dup 0) (match_dup 4)))]
11326 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11327 operands[2], operands[3]);
11329 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11330 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11335 [(set (match_operand:SI 0 "s_register_operand" "")
11336 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11337 [(match_operand 2 "" "") (match_operand 3 "" "")])
11338 (match_operand 4 "" "")
11339 (match_operand 5 "" "")))
11340 (clobber (reg:CC CC_REGNUM))]
11341 "TARGET_ARM && reload_completed"
11342 [(set (match_dup 6) (match_dup 7))
11343 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11344 (set (match_dup 0) (match_dup 4)))
11345 (cond_exec (match_dup 8)
11346 (set (match_dup 0) (match_dup 5)))]
11349 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11350 operands[2], operands[3]);
11351 enum rtx_code rc = GET_CODE (operands[1]);
11353 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11354 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11355 if (mode == CCFPmode || mode == CCFPEmode)
11356 rc = reverse_condition_maybe_unordered (rc);
11358 rc = reverse_condition (rc);
11360 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11365 [(set (match_operand:SI 0 "s_register_operand" "")
11366 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11367 [(match_operand:SI 2 "s_register_operand" "")
11368 (match_operand:SI 3 "arm_add_operand" "")])
11369 (match_operand:SI 4 "arm_rhs_operand" "")
11371 (match_operand:SI 5 "s_register_operand" ""))))
11372 (clobber (reg:CC CC_REGNUM))]
11373 "TARGET_ARM && reload_completed"
11374 [(set (match_dup 6) (match_dup 7))
11375 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11376 (set (match_dup 0) (match_dup 4)))
11377 (cond_exec (match_dup 8)
11378 (set (match_dup 0) (not:SI (match_dup 5))))]
11381 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11382 operands[2], operands[3]);
11383 enum rtx_code rc = GET_CODE (operands[1]);
11385 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11386 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11387 if (mode == CCFPmode || mode == CCFPEmode)
11388 rc = reverse_condition_maybe_unordered (rc);
11390 rc = reverse_condition (rc);
11392 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11396 (define_insn "*cond_move_not"
11397 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11398 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11399 [(match_operand 3 "cc_register" "") (const_int 0)])
11400 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11402 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11406 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11407 [(set_attr "conds" "use")
11408 (set_attr "type" "mvn_reg,multiple")
11409 (set_attr "length" "4,8")]
11412 ;; The next two patterns occur when an AND operation is followed by a
11413 ;; scc insn sequence
11415 (define_insn "*sign_extract_onebit"
11416 [(set (match_operand:SI 0 "s_register_operand" "=r")
11417 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11419 (match_operand:SI 2 "const_int_operand" "n")))
11420 (clobber (reg:CC CC_REGNUM))]
11423 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11424 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11425 return \"mvnne\\t%0, #0\";
11427 [(set_attr "conds" "clob")
11428 (set_attr "length" "8")
11429 (set_attr "type" "multiple")]
11432 (define_insn "*not_signextract_onebit"
11433 [(set (match_operand:SI 0 "s_register_operand" "=r")
11435 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11437 (match_operand:SI 2 "const_int_operand" "n"))))
11438 (clobber (reg:CC CC_REGNUM))]
11441 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11442 output_asm_insn (\"tst\\t%1, %2\", operands);
11443 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11444 return \"movne\\t%0, #0\";
11446 [(set_attr "conds" "clob")
11447 (set_attr "length" "12")
11448 (set_attr "type" "multiple")]
11450 ;; ??? The above patterns need auditing for Thumb-2
11452 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11453 ;; expressions. For simplicity, the first register is also in the unspec
11455 ;; To avoid the usage of GNU extension, the length attribute is computed
11456 ;; in a C function arm_attr_length_push_multi.
11457 (define_insn "*push_multi"
11458 [(match_parallel 2 "multi_register_push"
11459 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11460 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11461 UNSPEC_PUSH_MULT))])]
11465 int num_saves = XVECLEN (operands[2], 0);
11467 /* For the StrongARM at least it is faster to
11468 use STR to store only a single register.
11469 In Thumb mode always use push, and the assembler will pick
11470 something appropriate. */
11471 if (num_saves == 1 && TARGET_ARM)
11472 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11479 strcpy (pattern, \"push%?\\t{%1\");
11481 strcpy (pattern, \"push\\t{%1\");
11483 for (i = 1; i < num_saves; i++)
11485 strcat (pattern, \", %|\");
11487 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11490 strcat (pattern, \"}\");
11491 output_asm_insn (pattern, operands);
11496 [(set_attr "type" "store_16")
11497 (set (attr "length")
11498 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11501 (define_insn "stack_tie"
11502 [(set (mem:BLK (scratch))
11503 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11504 (match_operand:SI 1 "s_register_operand" "rk")]
11508 [(set_attr "length" "0")
11509 (set_attr "type" "block")]
11512 ;; Pop (as used in epilogue RTL)
11514 (define_insn "*load_multiple_with_writeback"
11515 [(match_parallel 0 "load_multiple_operation"
11516 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11517 (plus:SI (match_dup 1)
11518 (match_operand:SI 2 "const_int_I_operand" "I")))
11519 (set (match_operand:SI 3 "s_register_operand" "=rk")
11520 (mem:SI (match_dup 1)))
11522 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11525 arm_output_multireg_pop (operands, /*return_pc=*/false,
11526 /*cond=*/const_true_rtx,
11532 [(set_attr "type" "load_16")
11533 (set_attr "predicable" "yes")
11534 (set (attr "length")
11535 (symbol_ref "arm_attr_length_pop_multi (operands,
11536 /*return_pc=*/false,
11537 /*write_back_p=*/true)"))]
11540 ;; Pop with return (as used in epilogue RTL)
11542 ;; This instruction is generated when the registers are popped at the end of
11543 ;; epilogue. Here, instead of popping the value into LR and then generating
11544 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11546 (define_insn "*pop_multiple_with_writeback_and_return"
11547 [(match_parallel 0 "pop_multiple_return"
11549 (set (match_operand:SI 1 "s_register_operand" "+rk")
11550 (plus:SI (match_dup 1)
11551 (match_operand:SI 2 "const_int_I_operand" "I")))
11552 (set (match_operand:SI 3 "s_register_operand" "=rk")
11553 (mem:SI (match_dup 1)))
11555 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11558 arm_output_multireg_pop (operands, /*return_pc=*/true,
11559 /*cond=*/const_true_rtx,
11565 [(set_attr "type" "load_16")
11566 (set_attr "predicable" "yes")
11567 (set (attr "length")
11568 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11569 /*write_back_p=*/true)"))]
11572 (define_insn "*pop_multiple_with_return"
11573 [(match_parallel 0 "pop_multiple_return"
11575 (set (match_operand:SI 2 "s_register_operand" "=rk")
11576 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11578 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11581 arm_output_multireg_pop (operands, /*return_pc=*/true,
11582 /*cond=*/const_true_rtx,
11588 [(set_attr "type" "load_16")
11589 (set_attr "predicable" "yes")
11590 (set (attr "length")
11591 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11592 /*write_back_p=*/false)"))]
11595 ;; Load into PC and return
11596 (define_insn "*ldr_with_return"
11598 (set (reg:SI PC_REGNUM)
11599 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11600 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11601 "ldr%?\t%|pc, [%0], #4"
11602 [(set_attr "type" "load_4")
11603 (set_attr "predicable" "yes")]
11605 ;; Pop for floating point registers (as used in epilogue RTL)
11606 (define_insn "*vfp_pop_multiple_with_writeback"
11607 [(match_parallel 0 "pop_multiple_fp"
11608 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11609 (plus:SI (match_dup 1)
11610 (match_operand:SI 2 "const_int_I_operand" "I")))
11611 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11612 (mem:DF (match_dup 1)))])]
11613 "TARGET_32BIT && TARGET_HARD_FLOAT"
11616 int num_regs = XVECLEN (operands[0], 0);
11619 strcpy (pattern, \"vldm\\t\");
11620 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11621 strcat (pattern, \"!, {\");
11622 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11623 strcat (pattern, \"%P0\");
11624 if ((num_regs - 1) > 1)
11626 strcat (pattern, \"-%P1\");
11627 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11630 strcat (pattern, \"}\");
11631 output_asm_insn (pattern, op_list);
11635 [(set_attr "type" "load_16")
11636 (set_attr "conds" "unconditional")
11637 (set_attr "predicable" "no")]
11640 ;; Special patterns for dealing with the constant pool
11642 (define_insn "align_4"
11643 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11646 assemble_align (32);
11649 [(set_attr "type" "no_insn")]
11652 (define_insn "align_8"
11653 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11656 assemble_align (64);
11659 [(set_attr "type" "no_insn")]
11662 (define_insn "consttable_end"
11663 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11666 making_const_table = FALSE;
11669 [(set_attr "type" "no_insn")]
11672 (define_insn "consttable_1"
11673 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11676 making_const_table = TRUE;
11677 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11678 assemble_zeros (3);
11681 [(set_attr "length" "4")
11682 (set_attr "type" "no_insn")]
11685 (define_insn "consttable_2"
11686 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11690 rtx x = operands[0];
11691 making_const_table = TRUE;
11692 switch (GET_MODE_CLASS (GET_MODE (x)))
11695 arm_emit_fp16_const (x);
11698 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11699 assemble_zeros (2);
11704 [(set_attr "length" "4")
11705 (set_attr "type" "no_insn")]
11708 (define_insn "consttable_4"
11709 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11713 rtx x = operands[0];
11714 making_const_table = TRUE;
11715 scalar_float_mode float_mode;
11716 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11717 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11720 /* XXX: Sometimes gcc does something really dumb and ends up with
11721 a HIGH in a constant pool entry, usually because it's trying to
11722 load into a VFP register. We know this will always be used in
11723 combination with a LO_SUM which ignores the high bits, so just
11724 strip off the HIGH. */
11725 if (GET_CODE (x) == HIGH)
11727 assemble_integer (x, 4, BITS_PER_WORD, 1);
11728 mark_symbol_refs_as_used (x);
11732 [(set_attr "length" "4")
11733 (set_attr "type" "no_insn")]
11736 (define_insn "consttable_8"
11737 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11741 making_const_table = TRUE;
11742 scalar_float_mode float_mode;
11743 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11744 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11745 float_mode, BITS_PER_WORD);
11747 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11750 [(set_attr "length" "8")
11751 (set_attr "type" "no_insn")]
11754 (define_insn "consttable_16"
11755 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11759 making_const_table = TRUE;
11760 scalar_float_mode float_mode;
11761 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11762 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11763 float_mode, BITS_PER_WORD);
11765 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11768 [(set_attr "length" "16")
11769 (set_attr "type" "no_insn")]
11772 ;; V5 Instructions,
11774 (define_insn "clzsi2"
11775 [(set (match_operand:SI 0 "s_register_operand" "=r")
11776 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11777 "TARGET_32BIT && arm_arch5t"
11779 [(set_attr "predicable" "yes")
11780 (set_attr "type" "clz")])
11782 (define_insn "rbitsi2"
11783 [(set (match_operand:SI 0 "s_register_operand" "=r")
11784 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11785 "TARGET_32BIT && arm_arch_thumb2"
11787 [(set_attr "predicable" "yes")
11788 (set_attr "type" "clz")])
11790 ;; Keep this as a CTZ expression until after reload and then split
11791 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
11792 ;; to fold with any other expression.
11794 (define_insn_and_split "ctzsi2"
11795 [(set (match_operand:SI 0 "s_register_operand" "=r")
11796 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11797 "TARGET_32BIT && arm_arch_thumb2"
11799 "&& reload_completed"
11802 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
11803 emit_insn (gen_clzsi2 (operands[0], operands[0]));
11807 ;; V5E instructions.
11809 (define_insn "prefetch"
11810 [(prefetch (match_operand:SI 0 "address_operand" "p")
11811 (match_operand:SI 1 "" "")
11812 (match_operand:SI 2 "" ""))]
11813 "TARGET_32BIT && arm_arch5te"
11815 [(set_attr "type" "load_4")]
11818 ;; General predication pattern
11821 [(match_operator 0 "arm_comparison_operator"
11822 [(match_operand 1 "cc_register" "")
11825 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
11827 [(set_attr "predicated" "yes")]
11830 (define_insn "force_register_use"
11831 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11834 [(set_attr "length" "0")
11835 (set_attr "type" "no_insn")]
11839 ;; Patterns for exception handling
11841 (define_expand "eh_return"
11842 [(use (match_operand 0 "general_operand"))]
11847 emit_insn (gen_arm_eh_return (operands[0]));
11849 emit_insn (gen_thumb_eh_return (operands[0]));
11854 ;; We can't expand this before we know where the link register is stored.
11855 (define_insn_and_split "arm_eh_return"
11856 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11858 (clobber (match_scratch:SI 1 "=&r"))]
11861 "&& reload_completed"
11865 arm_set_return_address (operands[0], operands[1]);
11873 (define_insn "load_tp_hard"
11874 [(set (match_operand:SI 0 "register_operand" "=r")
11875 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11877 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11878 [(set_attr "predicable" "yes")
11879 (set_attr "type" "mrs")]
11882 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11883 (define_insn "load_tp_soft_fdpic"
11884 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11885 (clobber (reg:SI FDPIC_REGNUM))
11886 (clobber (reg:SI LR_REGNUM))
11887 (clobber (reg:SI IP_REGNUM))
11888 (clobber (reg:CC CC_REGNUM))]
11889 "TARGET_SOFT_TP && TARGET_FDPIC"
11890 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11891 [(set_attr "conds" "clob")
11892 (set_attr "type" "branch")]
11895 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11896 (define_insn "load_tp_soft"
11897 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11898 (clobber (reg:SI LR_REGNUM))
11899 (clobber (reg:SI IP_REGNUM))
11900 (clobber (reg:CC CC_REGNUM))]
11901 "TARGET_SOFT_TP && !TARGET_FDPIC"
11902 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11903 [(set_attr "conds" "clob")
11904 (set_attr "type" "branch")]
11907 ;; tls descriptor call
11908 (define_insn "tlscall"
11909 [(set (reg:SI R0_REGNUM)
11910 (unspec:SI [(reg:SI R0_REGNUM)
11911 (match_operand:SI 0 "" "X")
11912 (match_operand 1 "" "")] UNSPEC_TLS))
11913 (clobber (reg:SI R1_REGNUM))
11914 (clobber (reg:SI LR_REGNUM))
11915 (clobber (reg:SI CC_REGNUM))]
11918 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11919 INTVAL (operands[1]));
11920 return "bl\\t%c0(tlscall)";
11922 [(set_attr "conds" "clob")
11923 (set_attr "length" "4")
11924 (set_attr "type" "branch")]
11927 ;; For thread pointer builtin
11928 (define_expand "get_thread_pointersi"
11929 [(match_operand:SI 0 "s_register_operand")]
11933 arm_load_tp (operands[0]);
11939 ;; We only care about the lower 16 bits of the constant
11940 ;; being inserted into the upper 16 bits of the register.
11941 (define_insn "*arm_movtas_ze"
11942 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
11945 (match_operand:SI 1 "const_int_operand" ""))]
11950 [(set_attr "arch" "32,v8mb")
11951 (set_attr "predicable" "yes")
11952 (set_attr "length" "4")
11953 (set_attr "type" "alu_sreg")]
11956 (define_insn "*arm_rev"
11957 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11958 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11964 [(set_attr "arch" "t1,t2,32")
11965 (set_attr "length" "2,2,4")
11966 (set_attr "predicable" "no,yes,yes")
11967 (set_attr "type" "rev")]
11970 (define_expand "arm_legacy_rev"
11971 [(set (match_operand:SI 2 "s_register_operand")
11972 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
11976 (lshiftrt:SI (match_dup 2)
11978 (set (match_operand:SI 3 "s_register_operand")
11979 (rotatert:SI (match_dup 1)
11982 (and:SI (match_dup 2)
11983 (const_int -65281)))
11984 (set (match_operand:SI 0 "s_register_operand")
11985 (xor:SI (match_dup 3)
11991 ;; Reuse temporaries to keep register pressure down.
11992 (define_expand "thumb_legacy_rev"
11993 [(set (match_operand:SI 2 "s_register_operand")
11994 (ashift:SI (match_operand:SI 1 "s_register_operand")
11996 (set (match_operand:SI 3 "s_register_operand")
11997 (lshiftrt:SI (match_dup 1)
12000 (ior:SI (match_dup 3)
12002 (set (match_operand:SI 4 "s_register_operand")
12004 (set (match_operand:SI 5 "s_register_operand")
12005 (rotatert:SI (match_dup 1)
12008 (ashift:SI (match_dup 5)
12011 (lshiftrt:SI (match_dup 5)
12014 (ior:SI (match_dup 5)
12017 (rotatert:SI (match_dup 5)
12019 (set (match_operand:SI 0 "s_register_operand")
12020 (ior:SI (match_dup 5)
12026 ;; ARM-specific expansion of signed mod by power of 2
12027 ;; using conditional negate.
12028 ;; For r0 % n where n is a power of 2 produce:
12030 ;; and r0, r0, #(n - 1)
12031 ;; and r1, r1, #(n - 1)
12032 ;; rsbpl r0, r1, #0
12034 (define_expand "modsi3"
12035 [(match_operand:SI 0 "register_operand")
12036 (match_operand:SI 1 "register_operand")
12037 (match_operand:SI 2 "const_int_operand")]
12040 HOST_WIDE_INT val = INTVAL (operands[2]);
12043 || exact_log2 (val) <= 0)
12046 rtx mask = GEN_INT (val - 1);
12048 /* In the special case of x0 % 2 we can do the even shorter:
12051 rsblt r0, r0, #0. */
12055 rtx cc_reg = arm_gen_compare_reg (LT,
12056 operands[1], const0_rtx, NULL_RTX);
12057 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
12058 rtx masked = gen_reg_rtx (SImode);
12060 emit_insn (gen_andsi3 (masked, operands[1], mask));
12061 emit_move_insn (operands[0],
12062 gen_rtx_IF_THEN_ELSE (SImode, cond,
12063 gen_rtx_NEG (SImode,
12069 rtx neg_op = gen_reg_rtx (SImode);
12070 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
12073 /* Extract the condition register and mode. */
12074 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
12075 rtx cc_reg = SET_DEST (cmp);
12076 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
12078 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
12080 rtx masked_neg = gen_reg_rtx (SImode);
12081 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
12083 /* We want a conditional negate here, but emitting COND_EXEC rtxes
12084 during expand does not always work. Do an IF_THEN_ELSE instead. */
12085 emit_move_insn (operands[0],
12086 gen_rtx_IF_THEN_ELSE (SImode, cond,
12087 gen_rtx_NEG (SImode, masked_neg),
12095 (define_expand "bswapsi2"
12096 [(set (match_operand:SI 0 "s_register_operand")
12097 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
12098 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12102 rtx op2 = gen_reg_rtx (SImode);
12103 rtx op3 = gen_reg_rtx (SImode);
12107 rtx op4 = gen_reg_rtx (SImode);
12108 rtx op5 = gen_reg_rtx (SImode);
12110 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12111 op2, op3, op4, op5));
12115 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12124 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12125 ;; and unsigned variants, respectively. For rev16, expose
12126 ;; byte-swapping in the lower 16 bits only.
12127 (define_insn "*arm_revsh"
12128 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12129 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12135 [(set_attr "arch" "t1,t2,32")
12136 (set_attr "length" "2,2,4")
12137 (set_attr "type" "rev")]
12140 (define_insn "*arm_rev16"
12141 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12142 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12148 [(set_attr "arch" "t1,t2,32")
12149 (set_attr "length" "2,2,4")
12150 (set_attr "type" "rev")]
12153 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
12154 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
12155 ;; each valid permutation.
12157 (define_insn "arm_rev16si2"
12158 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12159 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
12161 (match_operand:SI 3 "const_int_operand" "n,n,n"))
12162 (and:SI (lshiftrt:SI (match_dup 1)
12164 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
12166 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12167 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12169 [(set_attr "arch" "t1,t2,32")
12170 (set_attr "length" "2,2,4")
12171 (set_attr "type" "rev")]
12174 (define_insn "arm_rev16si2_alt"
12175 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12176 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
12178 (match_operand:SI 2 "const_int_operand" "n,n,n"))
12179 (and:SI (ashift:SI (match_dup 1)
12181 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
12183 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12184 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12186 [(set_attr "arch" "t1,t2,32")
12187 (set_attr "length" "2,2,4")
12188 (set_attr "type" "rev")]
12191 (define_expand "bswaphi2"
12192 [(set (match_operand:HI 0 "s_register_operand")
12193 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
12198 ;; Patterns for LDRD/STRD in Thumb2 mode
12200 (define_insn "*thumb2_ldrd"
12201 [(set (match_operand:SI 0 "s_register_operand" "=r")
12202 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12203 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12204 (set (match_operand:SI 3 "s_register_operand" "=r")
12205 (mem:SI (plus:SI (match_dup 1)
12206 (match_operand:SI 4 "const_int_operand" ""))))]
12207 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12208 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12209 && (operands_ok_ldrd_strd (operands[0], operands[3],
12210 operands[1], INTVAL (operands[2]),
12212 "ldrd%?\t%0, %3, [%1, %2]"
12213 [(set_attr "type" "load_8")
12214 (set_attr "predicable" "yes")])
12216 (define_insn "*thumb2_ldrd_base"
12217 [(set (match_operand:SI 0 "s_register_operand" "=r")
12218 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12219 (set (match_operand:SI 2 "s_register_operand" "=r")
12220 (mem:SI (plus:SI (match_dup 1)
12222 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12223 && (operands_ok_ldrd_strd (operands[0], operands[2],
12224 operands[1], 0, false, true))"
12225 "ldrd%?\t%0, %2, [%1]"
12226 [(set_attr "type" "load_8")
12227 (set_attr "predicable" "yes")])
12229 (define_insn "*thumb2_ldrd_base_neg"
12230 [(set (match_operand:SI 0 "s_register_operand" "=r")
12231 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12233 (set (match_operand:SI 2 "s_register_operand" "=r")
12234 (mem:SI (match_dup 1)))]
12235 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12236 && (operands_ok_ldrd_strd (operands[0], operands[2],
12237 operands[1], -4, false, true))"
12238 "ldrd%?\t%0, %2, [%1, #-4]"
12239 [(set_attr "type" "load_8")
12240 (set_attr "predicable" "yes")])
12242 (define_insn "*thumb2_strd"
12243 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12244 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12245 (match_operand:SI 2 "s_register_operand" "r"))
12246 (set (mem:SI (plus:SI (match_dup 0)
12247 (match_operand:SI 3 "const_int_operand" "")))
12248 (match_operand:SI 4 "s_register_operand" "r"))]
12249 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12250 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12251 && (operands_ok_ldrd_strd (operands[2], operands[4],
12252 operands[0], INTVAL (operands[1]),
12254 "strd%?\t%2, %4, [%0, %1]"
12255 [(set_attr "type" "store_8")
12256 (set_attr "predicable" "yes")])
12258 (define_insn "*thumb2_strd_base"
12259 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12260 (match_operand:SI 1 "s_register_operand" "r"))
12261 (set (mem:SI (plus:SI (match_dup 0)
12263 (match_operand:SI 2 "s_register_operand" "r"))]
12264 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12265 && (operands_ok_ldrd_strd (operands[1], operands[2],
12266 operands[0], 0, false, false))"
12267 "strd%?\t%1, %2, [%0]"
12268 [(set_attr "type" "store_8")
12269 (set_attr "predicable" "yes")])
12271 (define_insn "*thumb2_strd_base_neg"
12272 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12274 (match_operand:SI 1 "s_register_operand" "r"))
12275 (set (mem:SI (match_dup 0))
12276 (match_operand:SI 2 "s_register_operand" "r"))]
12277 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12278 && (operands_ok_ldrd_strd (operands[1], operands[2],
12279 operands[0], -4, false, false))"
12280 "strd%?\t%1, %2, [%0, #-4]"
12281 [(set_attr "type" "store_8")
12282 (set_attr "predicable" "yes")])
12284 ;; ARMv8 CRC32 instructions.
12285 (define_insn "arm_<crc_variant>"
12286 [(set (match_operand:SI 0 "s_register_operand" "=r")
12287 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
12288 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
12291 "<crc_variant>\\t%0, %1, %2"
12292 [(set_attr "type" "crc")
12293 (set_attr "conds" "unconditional")]
12296 ;; Load the load/store double peephole optimizations.
12297 (include "ldrdstrd.md")
12299 ;; Load the load/store multiple patterns
12300 (include "ldmstm.md")
12302 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12303 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12304 ;; The operands are validated through the load_multiple_operation
12305 ;; match_parallel predicate rather than through constraints so enable it only
12307 (define_insn "*load_multiple"
12308 [(match_parallel 0 "load_multiple_operation"
12309 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12310 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12312 "TARGET_32BIT && reload_completed"
12315 arm_output_multireg_pop (operands, /*return_pc=*/false,
12316 /*cond=*/const_true_rtx,
12322 [(set_attr "predicable" "yes")]
12325 (define_expand "copysignsf3"
12326 [(match_operand:SF 0 "register_operand")
12327 (match_operand:SF 1 "register_operand")
12328 (match_operand:SF 2 "register_operand")]
12329 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12331 emit_move_insn (operands[0], operands[2]);
12332 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
12333 GEN_INT (31), GEN_INT (0),
12334 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
12339 (define_expand "copysigndf3"
12340 [(match_operand:DF 0 "register_operand")
12341 (match_operand:DF 1 "register_operand")
12342 (match_operand:DF 2 "register_operand")]
12343 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12345 rtx op0_low = gen_lowpart (SImode, operands[0]);
12346 rtx op0_high = gen_highpart (SImode, operands[0]);
12347 rtx op1_low = gen_lowpart (SImode, operands[1]);
12348 rtx op1_high = gen_highpart (SImode, operands[1]);
12349 rtx op2_high = gen_highpart (SImode, operands[2]);
12351 rtx scratch1 = gen_reg_rtx (SImode);
12352 rtx scratch2 = gen_reg_rtx (SImode);
12353 emit_move_insn (scratch1, op2_high);
12354 emit_move_insn (scratch2, op1_high);
12356 emit_insn(gen_rtx_SET(scratch1,
12357 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
12358 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
12359 emit_move_insn (op0_low, op1_low);
12360 emit_move_insn (op0_high, scratch2);
12366 ;; movmisalign patterns for HImode and SImode.
12367 (define_expand "movmisalign<mode>"
12368 [(match_operand:HSI 0 "general_operand")
12369 (match_operand:HSI 1 "general_operand")]
12372 /* This pattern is not permitted to fail during expansion: if both arguments
12373 are non-registers (e.g. memory := constant), force operand 1 into a
12375 rtx (* gen_unaligned_load)(rtx, rtx);
12376 rtx tmp_dest = operands[0];
12377 if (!s_register_operand (operands[0], <MODE>mode)
12378 && !s_register_operand (operands[1], <MODE>mode))
12379 operands[1] = force_reg (<MODE>mode, operands[1]);
12381 if (<MODE>mode == HImode)
12383 gen_unaligned_load = gen_unaligned_loadhiu;
12384 tmp_dest = gen_reg_rtx (SImode);
12387 gen_unaligned_load = gen_unaligned_loadsi;
12389 if (MEM_P (operands[1]))
12391 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12392 if (<MODE>mode == HImode)
12393 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12396 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12401 (define_insn "arm_<cdp>"
12402 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12403 (match_operand:SI 1 "immediate_operand" "n")
12404 (match_operand:SI 2 "immediate_operand" "n")
12405 (match_operand:SI 3 "immediate_operand" "n")
12406 (match_operand:SI 4 "immediate_operand" "n")
12407 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12408 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12410 arm_const_bounds (operands[0], 0, 16);
12411 arm_const_bounds (operands[1], 0, 16);
12412 arm_const_bounds (operands[2], 0, (1 << 5));
12413 arm_const_bounds (operands[3], 0, (1 << 5));
12414 arm_const_bounds (operands[4], 0, (1 << 5));
12415 arm_const_bounds (operands[5], 0, 8);
12416 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12418 [(set_attr "length" "4")
12419 (set_attr "type" "coproc")])
12421 (define_insn "*ldc"
12422 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12423 (match_operand:SI 1 "immediate_operand" "n")
12424 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12425 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12427 arm_const_bounds (operands[0], 0, 16);
12428 arm_const_bounds (operands[1], 0, (1 << 5));
12429 return "<ldc>\\tp%c0, CR%c1, %2";
12431 [(set_attr "length" "4")
12432 (set_attr "type" "coproc")])
12434 (define_insn "*stc"
12435 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12436 (match_operand:SI 1 "immediate_operand" "n")
12437 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12438 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12440 arm_const_bounds (operands[0], 0, 16);
12441 arm_const_bounds (operands[1], 0, (1 << 5));
12442 return "<stc>\\tp%c0, CR%c1, %2";
12444 [(set_attr "length" "4")
12445 (set_attr "type" "coproc")])
12447 (define_expand "arm_<ldc>"
12448 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12449 (match_operand:SI 1 "immediate_operand")
12450 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12451 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12453 (define_expand "arm_<stc>"
12454 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12455 (match_operand:SI 1 "immediate_operand")
12456 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12457 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12459 (define_insn "arm_<mcr>"
12460 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12461 (match_operand:SI 1 "immediate_operand" "n")
12462 (match_operand:SI 2 "s_register_operand" "r")
12463 (match_operand:SI 3 "immediate_operand" "n")
12464 (match_operand:SI 4 "immediate_operand" "n")
12465 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12466 (use (match_dup 2))]
12467 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12469 arm_const_bounds (operands[0], 0, 16);
12470 arm_const_bounds (operands[1], 0, 8);
12471 arm_const_bounds (operands[3], 0, (1 << 5));
12472 arm_const_bounds (operands[4], 0, (1 << 5));
12473 arm_const_bounds (operands[5], 0, 8);
12474 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12476 [(set_attr "length" "4")
12477 (set_attr "type" "coproc")])
12479 (define_insn "arm_<mrc>"
12480 [(set (match_operand:SI 0 "s_register_operand" "=r")
12481 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12482 (match_operand:SI 2 "immediate_operand" "n")
12483 (match_operand:SI 3 "immediate_operand" "n")
12484 (match_operand:SI 4 "immediate_operand" "n")
12485 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12486 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12488 arm_const_bounds (operands[1], 0, 16);
12489 arm_const_bounds (operands[2], 0, 8);
12490 arm_const_bounds (operands[3], 0, (1 << 5));
12491 arm_const_bounds (operands[4], 0, (1 << 5));
12492 arm_const_bounds (operands[5], 0, 8);
12493 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12495 [(set_attr "length" "4")
12496 (set_attr "type" "coproc")])
12498 (define_insn "arm_<mcrr>"
12499 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12500 (match_operand:SI 1 "immediate_operand" "n")
12501 (match_operand:DI 2 "s_register_operand" "r")
12502 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12503 (use (match_dup 2))]
12504 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12506 arm_const_bounds (operands[0], 0, 16);
12507 arm_const_bounds (operands[1], 0, 8);
12508 arm_const_bounds (operands[3], 0, (1 << 5));
12509 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12511 [(set_attr "length" "4")
12512 (set_attr "type" "coproc")])
12514 (define_insn "arm_<mrrc>"
12515 [(set (match_operand:DI 0 "s_register_operand" "=r")
12516 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12517 (match_operand:SI 2 "immediate_operand" "n")
12518 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12519 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12521 arm_const_bounds (operands[1], 0, 16);
12522 arm_const_bounds (operands[2], 0, 8);
12523 arm_const_bounds (operands[3], 0, (1 << 5));
12524 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12526 [(set_attr "length" "4")
12527 (set_attr "type" "coproc")])
12529 (define_expand "speculation_barrier"
12530 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12533 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12534 have a usable barrier (and probably don't need one in practice).
12535 But to be safe if such code is run on later architectures, call a
12536 helper function in libgcc that will do the thing for the active
12538 if (!(arm_arch7 || arm_arch8))
12540 arm_emit_speculation_barrier_function ();
12546 ;; Generate a hard speculation barrier when we have not enabled speculation
12548 (define_insn "*speculation_barrier_insn"
12549 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12550 "arm_arch7 || arm_arch8"
12552 [(set_attr "type" "block")
12553 (set_attr "length" "8")]
12556 ;; Vector bits common to IWMMXT and Neon
12557 (include "vec-common.md")
12558 ;; Load the Intel Wireless Multimedia Extension patterns
12559 (include "iwmmxt.md")
12560 ;; Load the VFP co-processor patterns
12562 ;; Thumb-1 patterns
12563 (include "thumb1.md")
12564 ;; Thumb-2 patterns
12565 (include "thumb2.md")
12567 (include "neon.md")
12569 (include "crypto.md")
12570 ;; Synchronization Primitives
12571 (include "sync.md")
12572 ;; Fixed-point patterns
12573 (include "arm-fixed.md")