1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
491 (define_expand "addvsi4"
492 [(match_operand:SI 0 "s_register_operand")
493 (match_operand:SI 1 "s_register_operand")
494 (match_operand:SI 2 "arm_add_operand")
495 (match_operand 3 "")]
498 if (CONST_INT_P (operands[2]))
499 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
501 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
502 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
507 (define_expand "addvdi4"
508 [(match_operand:DI 0 "s_register_operand")
509 (match_operand:DI 1 "s_register_operand")
510 (match_operand:DI 2 "reg_or_int_operand")
511 (match_operand 3 "")]
514 rtx lo_result, hi_result;
515 rtx lo_op1, hi_op1, lo_op2, hi_op2;
516 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
518 lo_result = gen_lowpart (SImode, operands[0]);
519 hi_result = gen_highpart (SImode, operands[0]);
521 if (lo_op2 == const0_rtx)
523 emit_move_insn (lo_result, lo_op1);
524 if (!arm_add_operand (hi_op2, SImode))
525 hi_op2 = force_reg (SImode, hi_op2);
527 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
531 if (!arm_add_operand (lo_op2, SImode))
532 lo_op2 = force_reg (SImode, lo_op2);
533 if (!arm_not_operand (hi_op2, SImode))
534 hi_op2 = force_reg (SImode, hi_op2);
536 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
538 if (hi_op2 == const0_rtx)
539 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
540 else if (CONST_INT_P (hi_op2))
541 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
543 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
545 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
551 (define_expand "addsi3_cin_vout_reg"
556 (plus:DI (match_dup 4)
557 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
558 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
559 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
561 (set (match_operand:SI 0 "s_register_operand")
562 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
566 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
567 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
568 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
569 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
573 (define_insn "*addsi3_cin_vout_reg_insn"
574 [(set (reg:CC_V CC_REGNUM)
578 (match_operand:DI 3 "arm_carry_operation" "")
579 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
582 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
585 (set (match_operand:SI 0 "s_register_operand" "=l,r")
586 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
592 [(set_attr "type" "alus_sreg")
593 (set_attr "arch" "t2,*")
594 (set_attr "length" "2,4")]
597 (define_expand "addsi3_cin_vout_imm"
602 (plus:DI (match_dup 4)
603 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
605 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
607 (set (match_operand:SI 0 "s_register_operand")
608 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
609 (match_operand 2 "arm_adcimm_operand")))])]
612 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
613 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
614 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
615 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
619 (define_insn "*addsi3_cin_vout_imm_insn"
620 [(set (reg:CC_V CC_REGNUM)
624 (match_operand:DI 3 "arm_carry_operation" "")
625 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
626 (match_operand 2 "arm_adcimm_operand" "I,K"))
628 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
631 (set (match_operand:SI 0 "s_register_operand" "=r,r")
632 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
637 sbcs%?\\t%0, %1, #%B2"
638 [(set_attr "type" "alus_imm")]
641 (define_expand "addsi3_cin_vout_0"
645 (plus:DI (match_dup 3)
646 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
647 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
648 (set (match_operand:SI 0 "s_register_operand")
649 (plus:SI (match_dup 4) (match_dup 1)))])]
652 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
659 (define_insn "*addsi3_cin_vout_0_insn"
660 [(set (reg:CC_V CC_REGNUM)
663 (match_operand:DI 2 "arm_carry_operation" "")
664 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
665 (sign_extend:DI (plus:SI
666 (match_operand:SI 3 "arm_carry_operation" "")
668 (set (match_operand:SI 0 "s_register_operand" "=r")
669 (plus:SI (match_dup 3) (match_dup 1)))]
671 "adcs%?\\t%0, %1, #0"
672 [(set_attr "type" "alus_imm")]
675 (define_expand "uaddvsi4"
676 [(match_operand:SI 0 "s_register_operand")
677 (match_operand:SI 1 "s_register_operand")
678 (match_operand:SI 2 "arm_add_operand")
679 (match_operand 3 "")]
682 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
683 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
688 (define_expand "uaddvdi4"
689 [(match_operand:DI 0 "s_register_operand")
690 (match_operand:DI 1 "s_register_operand")
691 (match_operand:DI 2 "reg_or_int_operand")
692 (match_operand 3 "")]
695 rtx lo_result, hi_result;
696 rtx lo_op1, hi_op1, lo_op2, hi_op2;
697 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
699 lo_result = gen_lowpart (SImode, operands[0]);
700 hi_result = gen_highpart (SImode, operands[0]);
702 if (lo_op2 == const0_rtx)
704 emit_move_insn (lo_result, lo_op1);
705 if (!arm_add_operand (hi_op2, SImode))
706 hi_op2 = force_reg (SImode, hi_op2);
708 gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]);
712 if (!arm_add_operand (lo_op2, SImode))
713 lo_op2 = force_reg (SImode, lo_op2);
714 if (!arm_not_operand (hi_op2, SImode))
715 hi_op2 = force_reg (SImode, hi_op2);
717 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
719 if (hi_op2 == const0_rtx)
720 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
721 else if (CONST_INT_P (hi_op2))
722 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
724 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
726 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
732 (define_expand "addsi3_cin_cout_reg"
737 (plus:DI (match_dup 4)
738 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
739 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
740 (const_int 4294967296)))
741 (set (match_operand:SI 0 "s_register_operand")
742 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
746 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
747 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
748 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
749 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
753 (define_insn "*addsi3_cin_cout_reg_insn"
754 [(set (reg:CC_ADC CC_REGNUM)
758 (match_operand:DI 3 "arm_carry_operation" "")
759 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
760 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
761 (const_int 4294967296)))
762 (set (match_operand:SI 0 "s_register_operand" "=l,r")
763 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
770 [(set_attr "type" "alus_sreg")
771 (set_attr "arch" "t2,*")
772 (set_attr "length" "2,4")]
775 (define_expand "addsi3_cin_cout_imm"
780 (plus:DI (match_dup 4)
781 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
783 (const_int 4294967296)))
784 (set (match_operand:SI 0 "s_register_operand")
785 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
786 (match_operand:SI 2 "arm_adcimm_operand")))])]
789 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
790 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
791 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
792 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
793 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
797 (define_insn "*addsi3_cin_cout_imm_insn"
798 [(set (reg:CC_ADC CC_REGNUM)
802 (match_operand:DI 3 "arm_carry_operation" "")
803 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
804 (match_operand:DI 5 "const_int_operand" "n,n"))
805 (const_int 4294967296)))
806 (set (match_operand:SI 0 "s_register_operand" "=r,r")
807 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
809 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
811 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
814 sbcs%?\\t%0, %1, #%B2"
815 [(set_attr "type" "alus_imm")]
818 (define_expand "addsi3_cin_cout_0"
822 (plus:DI (match_dup 3)
823 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
824 (const_int 4294967296)))
825 (set (match_operand:SI 0 "s_register_operand")
826 (plus:SI (match_dup 4) (match_dup 1)))])]
829 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
836 (define_insn "*addsi3_cin_cout_0_insn"
837 [(set (reg:CC_ADC CC_REGNUM)
840 (match_operand:DI 2 "arm_carry_operation" "")
841 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
842 (const_int 4294967296)))
843 (set (match_operand:SI 0 "s_register_operand" "=r")
844 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
846 "adcs%?\\t%0, %1, #0"
847 [(set_attr "type" "alus_imm")]
850 (define_expand "addsi3"
851 [(set (match_operand:SI 0 "s_register_operand")
852 (plus:SI (match_operand:SI 1 "s_register_operand")
853 (match_operand:SI 2 "reg_or_int_operand")))]
856 if (TARGET_32BIT && CONST_INT_P (operands[2]))
858 arm_split_constant (PLUS, SImode, NULL_RTX,
859 INTVAL (operands[2]), operands[0], operands[1],
860 optimize && can_create_pseudo_p ());
866 ; If there is a scratch available, this will be faster than synthesizing the
869 [(match_scratch:SI 3 "r")
870 (set (match_operand:SI 0 "arm_general_register_operand" "")
871 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
872 (match_operand:SI 2 "const_int_operand" "")))]
874 !(const_ok_for_arm (INTVAL (operands[2]))
875 || const_ok_for_arm (-INTVAL (operands[2])))
876 && const_ok_for_arm (~INTVAL (operands[2]))"
877 [(set (match_dup 3) (match_dup 2))
878 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
882 ;; The r/r/k alternative is required when reloading the address
883 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
884 ;; put the duplicated register first, and not try the commutative version.
885 (define_insn_and_split "*arm_addsi3"
886 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
887 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
888 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
904 subw%?\\t%0, %1, #%n2
905 subw%?\\t%0, %1, #%n2
908 && CONST_INT_P (operands[2])
909 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
910 && (reload_completed || !arm_eliminable_register (operands[1]))"
911 [(clobber (const_int 0))]
913 arm_split_constant (PLUS, SImode, curr_insn,
914 INTVAL (operands[2]), operands[0],
918 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
919 (set_attr "predicable" "yes")
920 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
921 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
922 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
923 (const_string "alu_imm")
924 (const_string "alu_sreg")))
928 (define_insn "addsi3_compareV_reg"
929 [(set (reg:CC_V CC_REGNUM)
932 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
933 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
934 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
935 (set (match_operand:SI 0 "register_operand" "=l,r,r")
936 (plus:SI (match_dup 1) (match_dup 2)))]
938 "adds%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "arch" "t2,t2,*")
941 (set_attr "length" "2,2,4")
942 (set_attr "type" "alus_sreg")]
945 (define_insn "*addsi3_compareV_reg_nosum"
946 [(set (reg:CC_V CC_REGNUM)
949 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
950 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
951 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
954 [(set_attr "conds" "set")
955 (set_attr "arch" "t2,*")
956 (set_attr "length" "2,4")
957 (set_attr "type" "alus_sreg")]
960 (define_insn "addsi3_compareV_imm"
961 [(set (reg:CC_V CC_REGNUM)
965 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
966 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
967 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
968 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
969 (plus:SI (match_dup 1) (match_dup 2)))]
971 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
975 subs%?\\t%0, %1, #%n2
976 subs%?\\t%0, %0, #%n2
978 subs%?\\t%0, %1, #%n2"
979 [(set_attr "conds" "set")
980 (set_attr "arch" "t2,t2,t2,t2,*,*")
981 (set_attr "length" "2,2,2,2,4,4")
982 (set_attr "type" "alus_imm")]
985 (define_insn "addsi3_compareV_imm_nosum"
986 [(set (reg:CC_V CC_REGNUM)
990 (match_operand:SI 0 "register_operand" "l,r,r"))
991 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
992 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
994 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
999 [(set_attr "conds" "set")
1000 (set_attr "arch" "t2,*,*")
1001 (set_attr "length" "2,4,4")
1002 (set_attr "type" "alus_imm")]
1005 ;; We can handle more constants efficently if we can clobber either a scratch
1006 ;; or the other source operand. We deliberately leave this late as in
1007 ;; high register pressure situations it's not worth forcing any reloads.
1009 [(match_scratch:SI 2 "l")
1010 (set (reg:CC_V CC_REGNUM)
1014 (match_operand:SI 0 "low_register_operand"))
1015 (match_operand 1 "const_int_operand"))
1016 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1018 && satisfies_constraint_Pd (operands[1])"
1020 (set (reg:CC_V CC_REGNUM)
1022 (plus:DI (sign_extend:DI (match_dup 0))
1023 (sign_extend:DI (match_dup 1)))
1024 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1025 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1029 [(set (reg:CC_V CC_REGNUM)
1033 (match_operand:SI 0 "low_register_operand"))
1034 (match_operand 1 "const_int_operand"))
1035 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1037 && dead_or_set_p (peep2_next_insn (0), operands[0])
1038 && satisfies_constraint_Py (operands[1])"
1040 (set (reg:CC_V CC_REGNUM)
1042 (plus:DI (sign_extend:DI (match_dup 0))
1043 (sign_extend:DI (match_dup 1)))
1044 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1045 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1048 (define_insn "addsi3_compare0"
1049 [(set (reg:CC_NOOV CC_REGNUM)
1051 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1052 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1054 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1055 (plus:SI (match_dup 1) (match_dup 2)))]
1059 subs%?\\t%0, %1, #%n2
1060 adds%?\\t%0, %1, %2"
1061 [(set_attr "conds" "set")
1062 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1065 (define_insn "*addsi3_compare0_scratch"
1066 [(set (reg:CC_NOOV CC_REGNUM)
1068 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1069 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1076 [(set_attr "conds" "set")
1077 (set_attr "predicable" "yes")
1078 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1081 (define_insn "*compare_negsi_si"
1082 [(set (reg:CC_Z CC_REGNUM)
1084 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1085 (match_operand:SI 1 "s_register_operand" "l,r")))]
1088 [(set_attr "conds" "set")
1089 (set_attr "predicable" "yes")
1090 (set_attr "arch" "t2,*")
1091 (set_attr "length" "2,4")
1092 (set_attr "predicable_short_it" "yes,no")
1093 (set_attr "type" "alus_sreg")]
1096 ;; This is the canonicalization of subsi3_compare when the
1097 ;; addend is a constant.
1098 (define_insn "cmpsi2_addneg"
1099 [(set (reg:CC CC_REGNUM)
1101 (match_operand:SI 1 "s_register_operand" "r,r")
1102 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1103 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1104 (plus:SI (match_dup 1)
1105 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1107 && (INTVAL (operands[2])
1108 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1110 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1111 in different condition codes (like cmn rather than like cmp), so that
1112 alternative comes first. Both alternatives can match for any 0x??000000
1113 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1114 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1115 as it is shorter. */
1116 if (which_alternative == 0 && operands[3] != const1_rtx)
1117 return "subs%?\\t%0, %1, #%n3";
1119 return "adds%?\\t%0, %1, %3";
1121 [(set_attr "conds" "set")
1122 (set_attr "type" "alus_sreg")]
1125 ;; Convert the sequence
1127 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1131 ;; bcs dest ((unsigned)rn >= 1)
1132 ;; similarly for the beq variant using bcc.
1133 ;; This is a common looping idiom (while (n--))
1135 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1136 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1138 (set (match_operand 2 "cc_register" "")
1139 (compare (match_dup 0) (const_int -1)))
1141 (if_then_else (match_operator 3 "equality_operator"
1142 [(match_dup 2) (const_int 0)])
1143 (match_operand 4 "" "")
1144 (match_operand 5 "" "")))]
1145 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1149 (match_dup 1) (const_int 1)))
1150 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1152 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1155 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1156 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1159 operands[2], const0_rtx);"
1162 ;; The next four insns work because they compare the result with one of
1163 ;; the operands, and we know that the use of the condition code is
1164 ;; either GEU or LTU, so we can use the carry flag from the addition
1165 ;; instead of doing the compare a second time.
1166 (define_insn "addsi3_compare_op1"
1167 [(set (reg:CC_C CC_REGNUM)
1169 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1170 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1172 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1173 (plus:SI (match_dup 1) (match_dup 2)))]
1178 subs%?\\t%0, %1, #%n2
1179 subs%?\\t%0, %0, #%n2
1181 subs%?\\t%0, %1, #%n2"
1182 [(set_attr "conds" "set")
1183 (set_attr "arch" "t2,t2,t2,t2,*,*")
1184 (set_attr "length" "2,2,2,2,4,4")
1186 (if_then_else (match_operand 2 "const_int_operand")
1187 (const_string "alu_imm")
1188 (const_string "alu_sreg")))]
1191 (define_insn "*addsi3_compare_op2"
1192 [(set (reg:CC_C CC_REGNUM)
1194 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1195 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1197 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1198 (plus:SI (match_dup 1) (match_dup 2)))]
1203 subs%?\\t%0, %1, #%n2
1204 subs%?\\t%0, %0, #%n2
1206 subs%?\\t%0, %1, #%n2"
1207 [(set_attr "conds" "set")
1208 (set_attr "arch" "t2,t2,t2,t2,*,*")
1209 (set_attr "length" "2,2,2,2,4,4")
1211 (if_then_else (match_operand 2 "const_int_operand")
1212 (const_string "alu_imm")
1213 (const_string "alu_sreg")))]
1216 (define_insn "*compare_addsi2_op0"
1217 [(set (reg:CC_C CC_REGNUM)
1219 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1220 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1228 [(set_attr "conds" "set")
1229 (set_attr "predicable" "yes")
1230 (set_attr "arch" "t2,t2,*,*")
1231 (set_attr "predicable_short_it" "yes,yes,no,no")
1232 (set_attr "length" "2,2,4,4")
1234 (if_then_else (match_operand 1 "const_int_operand")
1235 (const_string "alu_imm")
1236 (const_string "alu_sreg")))]
1239 (define_insn "*compare_addsi2_op1"
1240 [(set (reg:CC_C CC_REGNUM)
1242 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1243 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1251 [(set_attr "conds" "set")
1252 (set_attr "predicable" "yes")
1253 (set_attr "arch" "t2,t2,*,*")
1254 (set_attr "predicable_short_it" "yes,yes,no,no")
1255 (set_attr "length" "2,2,4,4")
1257 (if_then_else (match_operand 1 "const_int_operand")
1258 (const_string "alu_imm")
1259 (const_string "alu_sreg")))]
1262 (define_insn "addsi3_carryin"
1263 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1264 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1265 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1266 (match_operand:SI 3 "arm_carry_operation" "")))]
1271 sbc%?\\t%0, %1, #%B2"
1272 [(set_attr "conds" "use")
1273 (set_attr "predicable" "yes")
1274 (set_attr "arch" "t2,*,*")
1275 (set_attr "length" "4")
1276 (set_attr "predicable_short_it" "yes,no,no")
1277 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1280 ;; Canonicalization of the above when the immediate is zero.
1281 (define_insn "add0si3_carryin"
1282 [(set (match_operand:SI 0 "s_register_operand" "=r")
1283 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1284 (match_operand:SI 1 "arm_not_operand" "r")))]
1286 "adc%?\\t%0, %1, #0"
1287 [(set_attr "conds" "use")
1288 (set_attr "predicable" "yes")
1289 (set_attr "length" "4")
1290 (set_attr "type" "adc_imm")]
1293 (define_insn "*addsi3_carryin_alt2"
1294 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1295 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1296 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1297 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1302 sbc%?\\t%0, %1, #%B2"
1303 [(set_attr "conds" "use")
1304 (set_attr "predicable" "yes")
1305 (set_attr "arch" "t2,*,*")
1306 (set_attr "length" "4")
1307 (set_attr "predicable_short_it" "yes,no,no")
1308 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1311 (define_insn "*addsi3_carryin_shift"
1312 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1314 (match_operator:SI 2 "shift_operator"
1315 [(match_operand:SI 3 "s_register_operand" "r,r")
1316 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1317 (match_operand:SI 5 "arm_carry_operation" ""))
1318 (match_operand:SI 1 "s_register_operand" "r,r")))]
1320 "adc%?\\t%0, %1, %3%S2"
1321 [(set_attr "conds" "use")
1322 (set_attr "arch" "32,a")
1323 (set_attr "shift" "3")
1324 (set_attr "predicable" "yes")
1325 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1326 (const_string "alu_shift_imm")
1327 (const_string "alu_shift_reg")))]
1330 (define_insn "*addsi3_carryin_clobercc"
1331 [(set (match_operand:SI 0 "s_register_operand" "=r")
1332 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1333 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1334 (match_operand:SI 3 "arm_carry_operation" "")))
1335 (clobber (reg:CC CC_REGNUM))]
1337 "adcs%?\\t%0, %1, %2"
1338 [(set_attr "conds" "set")
1339 (set_attr "type" "adcs_reg")]
1342 (define_expand "subv<mode>4"
1343 [(match_operand:SIDI 0 "register_operand")
1344 (match_operand:SIDI 1 "register_operand")
1345 (match_operand:SIDI 2 "register_operand")
1346 (match_operand 3 "")]
1349 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
1350 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1355 (define_expand "usubvsi4"
1356 [(match_operand:SI 0 "s_register_operand")
1357 (match_operand:SI 1 "arm_rhs_operand")
1358 (match_operand:SI 2 "arm_add_operand")
1359 (match_operand 3 "")]
1362 machine_mode mode = CCmode;
1363 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1365 /* If both operands are constants we can decide the result statically. */
1366 wi::overflow_type overflow;
1367 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1368 rtx_mode_t (operands[2], SImode),
1369 UNSIGNED, &overflow);
1370 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1371 if (overflow != wi::OVF_NONE)
1372 emit_jump_insn (gen_jump (operands[3]));
1375 else if (CONST_INT_P (operands[2]))
1376 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1377 GEN_INT (-INTVAL (operands[2]))));
1378 else if (CONST_INT_P (operands[1]))
1381 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1382 GEN_INT (~UINTVAL (operands[1]))));
1385 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1386 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1391 (define_expand "usubvdi4"
1392 [(match_operand:DI 0 "s_register_operand")
1393 (match_operand:DI 1 "reg_or_int_operand")
1394 (match_operand:DI 2 "reg_or_int_operand")
1395 (match_operand 3 "")]
1398 rtx lo_result, hi_result;
1399 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1400 lo_result = gen_lowpart (SImode, operands[0]);
1401 hi_result = gen_highpart (SImode, operands[0]);
1402 machine_mode mode = CCmode;
1404 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1406 /* If both operands are constants we can decide the result statically. */
1407 wi::overflow_type overflow;
1408 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1409 rtx_mode_t (operands[2], DImode),
1410 UNSIGNED, &overflow);
1411 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1412 if (overflow != wi::OVF_NONE)
1413 emit_jump_insn (gen_jump (operands[3]));
1416 else if (CONST_INT_P (operands[1]))
1418 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1420 if (const_ok_for_arm (INTVAL (lo_op1)))
1422 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1423 GEN_INT (~UINTVAL (lo_op1))));
1424 /* We could potentially use RSC here in Arm state, but not
1425 in Thumb, so it's probably not worth the effort of handling
1427 hi_op1 = force_reg (SImode, hi_op1);
1431 operands[1] = force_reg (DImode, operands[1]);
1434 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1436 if (lo_op2 == const0_rtx)
1438 emit_move_insn (lo_result, lo_op1);
1439 if (!arm_add_operand (hi_op2, SImode))
1440 hi_op2 = force_reg (SImode, hi_op2);
1441 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1445 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1446 lo_op2 = force_reg (SImode, lo_op2);
1447 if (CONST_INT_P (lo_op2))
1448 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1449 GEN_INT (-INTVAL (lo_op2))));
1451 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1454 if (!arm_not_operand (hi_op2, SImode))
1455 hi_op2 = force_reg (SImode, hi_op2);
1456 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1457 if (CONST_INT_P (hi_op2))
1458 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1459 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1460 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1461 gen_rtx_LTU (DImode, ccreg,
1464 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1465 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1466 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1467 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1472 (define_insn "subdi3_compare1"
1473 [(set (reg:CC CC_REGNUM)
1475 (match_operand:DI 1 "s_register_operand" "r")
1476 (match_operand:DI 2 "s_register_operand" "r")))
1477 (set (match_operand:DI 0 "s_register_operand" "=&r")
1478 (minus:DI (match_dup 1) (match_dup 2)))]
1480 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
1481 [(set_attr "conds" "set")
1482 (set_attr "length" "8")
1483 (set_attr "type" "multiple")]
1486 (define_insn "subsi3_compare1"
1487 [(set (reg:CC CC_REGNUM)
1489 (match_operand:SI 1 "register_operand" "r")
1490 (match_operand:SI 2 "register_operand" "r")))
1491 (set (match_operand:SI 0 "register_operand" "=r")
1492 (minus:SI (match_dup 1) (match_dup 2)))]
1494 "subs%?\\t%0, %1, %2"
1495 [(set_attr "conds" "set")
1496 (set_attr "type" "alus_sreg")]
1499 (define_insn "subsi3_carryin"
1500 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1501 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1502 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1503 (match_operand:SI 3 "arm_borrow_operation" "")))]
1508 sbc%?\\t%0, %2, %2, lsl #1"
1509 [(set_attr "conds" "use")
1510 (set_attr "arch" "*,a,t2")
1511 (set_attr "predicable" "yes")
1512 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1515 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1516 [(set (reg:<CC_EXTEND> CC_REGNUM)
1517 (compare:<CC_EXTEND>
1518 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1519 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1520 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1521 (clobber (match_scratch:SI 0 "=l,r"))]
1524 [(set_attr "conds" "set")
1525 (set_attr "arch" "t2,*")
1526 (set_attr "length" "2,4")
1527 (set_attr "type" "adc_reg")]
1530 ;; Similar to the above, but handling a constant which has a different
1531 ;; canonicalization.
1532 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1533 [(set (reg:<CC_EXTEND> CC_REGNUM)
1534 (compare:<CC_EXTEND>
1535 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1536 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1537 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1538 (clobber (match_scratch:SI 0 "=l,r"))]
1542 adcs\\t%0, %1, #%B2"
1543 [(set_attr "conds" "set")
1544 (set_attr "type" "adc_imm")]
1547 ;; Further canonicalization when the constant is zero.
1548 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1549 [(set (reg:<CC_EXTEND> CC_REGNUM)
1550 (compare:<CC_EXTEND>
1551 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1552 (match_operand:DI 2 "arm_borrow_operation" "")))
1553 (clobber (match_scratch:SI 0 "=l,r"))]
1556 [(set_attr "conds" "set")
1557 (set_attr "type" "adc_imm")]
1560 (define_insn "*subsi3_carryin_const"
1561 [(set (match_operand:SI 0 "s_register_operand" "=r")
1563 (match_operand:SI 1 "s_register_operand" "r")
1564 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1565 (match_operand:SI 3 "arm_borrow_operation" "")))]
1567 "sbc\\t%0, %1, #%n2"
1568 [(set_attr "conds" "use")
1569 (set_attr "type" "adc_imm")]
1572 (define_insn "*subsi3_carryin_const0"
1573 [(set (match_operand:SI 0 "s_register_operand" "=r")
1574 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1575 (match_operand:SI 2 "arm_borrow_operation" "")))]
1578 [(set_attr "conds" "use")
1579 (set_attr "type" "adc_imm")]
1582 (define_insn "*subsi3_carryin_shift"
1583 [(set (match_operand:SI 0 "s_register_operand" "=r")
1585 (match_operand:SI 1 "s_register_operand" "r")
1586 (match_operator:SI 2 "shift_operator"
1587 [(match_operand:SI 3 "s_register_operand" "r")
1588 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1589 (match_operand:SI 5 "arm_borrow_operation" "")))]
1591 "sbc%?\\t%0, %1, %3%S2"
1592 [(set_attr "conds" "use")
1593 (set_attr "predicable" "yes")
1594 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1595 (const_string "alu_shift_imm")
1596 (const_string "alu_shift_reg")))]
1599 (define_insn "*subsi3_carryin_shift_alt"
1600 [(set (match_operand:SI 0 "s_register_operand" "=r")
1602 (match_operand:SI 1 "s_register_operand" "r")
1603 (match_operand:SI 5 "arm_borrow_operation" ""))
1604 (match_operator:SI 2 "shift_operator"
1605 [(match_operand:SI 3 "s_register_operand" "r")
1606 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1608 "sbc%?\\t%0, %1, %3%S2"
1609 [(set_attr "conds" "use")
1610 (set_attr "predicable" "yes")
1611 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1612 (const_string "alu_shift_imm")
1613 (const_string "alu_shift_reg")))]
1616 (define_insn "*rsbsi3_carryin_shift"
1617 [(set (match_operand:SI 0 "s_register_operand" "=r")
1619 (match_operator:SI 2 "shift_operator"
1620 [(match_operand:SI 3 "s_register_operand" "r")
1621 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1622 (match_operand:SI 1 "s_register_operand" "r"))
1623 (match_operand:SI 5 "arm_borrow_operation" "")))]
1625 "rsc%?\\t%0, %1, %3%S2"
1626 [(set_attr "conds" "use")
1627 (set_attr "predicable" "yes")
1628 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1629 (const_string "alu_shift_imm")
1630 (const_string "alu_shift_reg")))]
1633 (define_insn "*rsbsi3_carryin_shift_alt"
1634 [(set (match_operand:SI 0 "s_register_operand" "=r")
1636 (match_operator:SI 2 "shift_operator"
1637 [(match_operand:SI 3 "s_register_operand" "r")
1638 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1639 (match_operand:SI 5 "arm_borrow_operation" ""))
1640 (match_operand:SI 1 "s_register_operand" "r")))]
1642 "rsc%?\\t%0, %1, %3%S2"
1643 [(set_attr "conds" "use")
1644 (set_attr "predicable" "yes")
1645 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1646 (const_string "alu_shift_imm")
1647 (const_string "alu_shift_reg")))]
1650 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1652 [(set (match_operand:SI 0 "s_register_operand" "")
1653 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1654 (match_operand:SI 2 "s_register_operand" ""))
1656 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1658 [(set (match_dup 3) (match_dup 1))
1659 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1661 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1664 (define_expand "addsf3"
1665 [(set (match_operand:SF 0 "s_register_operand")
1666 (plus:SF (match_operand:SF 1 "s_register_operand")
1667 (match_operand:SF 2 "s_register_operand")))]
1668 "TARGET_32BIT && TARGET_HARD_FLOAT"
1672 (define_expand "adddf3"
1673 [(set (match_operand:DF 0 "s_register_operand")
1674 (plus:DF (match_operand:DF 1 "s_register_operand")
1675 (match_operand:DF 2 "s_register_operand")))]
1676 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1680 (define_expand "subdi3"
1682 [(set (match_operand:DI 0 "s_register_operand")
1683 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1684 (match_operand:DI 2 "s_register_operand")))
1685 (clobber (reg:CC CC_REGNUM))])]
1690 if (!REG_P (operands[1]))
1691 operands[1] = force_reg (DImode, operands[1]);
1695 rtx lo_result, hi_result, lo_dest, hi_dest;
1696 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1699 /* Since operands[1] may be an integer, pass it second, so that
1700 any necessary simplifications will be done on the decomposed
1702 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1704 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1705 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1707 if (!arm_rhs_operand (lo_op1, SImode))
1708 lo_op1 = force_reg (SImode, lo_op1);
1710 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1711 || !arm_rhs_operand (hi_op1, SImode))
1712 hi_op1 = force_reg (SImode, hi_op1);
1715 if (lo_op1 == const0_rtx)
1717 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1718 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1720 else if (CONST_INT_P (lo_op1))
1722 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1723 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1724 GEN_INT (~UINTVAL (lo_op1))));
1728 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1729 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1732 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1734 if (hi_op1 == const0_rtx)
1735 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1737 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1739 if (lo_result != lo_dest)
1740 emit_move_insn (lo_result, lo_dest);
1742 if (hi_result != hi_dest)
1743 emit_move_insn (hi_result, hi_dest);
1750 (define_expand "subsi3"
1751 [(set (match_operand:SI 0 "s_register_operand")
1752 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1753 (match_operand:SI 2 "s_register_operand")))]
1756 if (CONST_INT_P (operands[1]))
1760 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1761 operands[1] = force_reg (SImode, operands[1]);
1764 arm_split_constant (MINUS, SImode, NULL_RTX,
1765 INTVAL (operands[1]), operands[0],
1767 optimize && can_create_pseudo_p ());
1771 else /* TARGET_THUMB1 */
1772 operands[1] = force_reg (SImode, operands[1]);
1777 ; ??? Check Thumb-2 split length
1778 (define_insn_and_split "*arm_subsi3_insn"
1779 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1780 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1781 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1793 "&& (CONST_INT_P (operands[1])
1794 && !const_ok_for_arm (INTVAL (operands[1])))"
1795 [(clobber (const_int 0))]
1797 arm_split_constant (MINUS, SImode, curr_insn,
1798 INTVAL (operands[1]), operands[0], operands[2], 0);
1801 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1802 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1803 (set_attr "predicable" "yes")
1804 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1805 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1809 [(match_scratch:SI 3 "r")
1810 (set (match_operand:SI 0 "arm_general_register_operand" "")
1811 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1812 (match_operand:SI 2 "arm_general_register_operand" "")))]
1814 && !const_ok_for_arm (INTVAL (operands[1]))
1815 && const_ok_for_arm (~INTVAL (operands[1]))"
1816 [(set (match_dup 3) (match_dup 1))
1817 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1821 (define_insn "subsi3_compare0"
1822 [(set (reg:CC_NOOV CC_REGNUM)
1824 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1825 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1827 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1828 (minus:SI (match_dup 1) (match_dup 2)))]
1833 rsbs%?\\t%0, %2, %1"
1834 [(set_attr "conds" "set")
1835 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1838 (define_insn "subsi3_compare"
1839 [(set (reg:CC CC_REGNUM)
1840 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1841 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1842 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1843 (minus:SI (match_dup 1) (match_dup 2)))]
1848 rsbs%?\\t%0, %2, %1"
1849 [(set_attr "conds" "set")
1850 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1853 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1854 ;; rather than (0 cmp reg). This gives the same results for unsigned
1855 ;; and equality compares which is what we mostly need here.
1856 (define_insn "rsb_imm_compare"
1857 [(set (reg:CC_RSB CC_REGNUM)
1858 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1859 (match_operand 3 "const_int_operand" "")))
1860 (set (match_operand:SI 0 "s_register_operand" "=r")
1861 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1863 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1865 [(set_attr "conds" "set")
1866 (set_attr "type" "alus_imm")]
1869 ;; Similarly, but the result is unused.
1870 (define_insn "rsb_imm_compare_scratch"
1871 [(set (reg:CC_RSB CC_REGNUM)
1872 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1873 (match_operand 1 "arm_not_immediate_operand" "K")))
1874 (clobber (match_scratch:SI 0 "=r"))]
1876 "rsbs\\t%0, %2, #%B1"
1877 [(set_attr "conds" "set")
1878 (set_attr "type" "alus_imm")]
1881 ;; Compare the sum of a value plus a carry against a constant. Uses
1882 ;; RSC, so the result is swapped. Only available on Arm
1883 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
1884 [(set (reg:CC_SWP CC_REGNUM)
1886 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
1887 (match_operand:DI 3 "arm_borrow_operation" ""))
1888 (match_operand 1 "arm_immediate_operand" "I")))
1889 (clobber (match_scratch:SI 0 "=r"))]
1892 [(set_attr "conds" "set")
1893 (set_attr "type" "alus_imm")]
1896 (define_insn "usubvsi3_borrow"
1897 [(set (reg:CC_B CC_REGNUM)
1899 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1900 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
1902 (match_operand:SI 2 "s_register_operand" "l,r")))))
1903 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1904 (minus:SI (match_dup 1)
1905 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
1908 "sbcs%?\\t%0, %1, %2"
1909 [(set_attr "conds" "set")
1910 (set_attr "arch" "t2,*")
1911 (set_attr "length" "2,4")]
1914 (define_insn "usubvsi3_borrow_imm"
1915 [(set (reg:CC_B CC_REGNUM)
1917 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1918 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
1919 (match_operand:DI 3 "const_int_operand" "n,n"))))
1920 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1921 (minus:SI (match_dup 1)
1922 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
1923 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
1925 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
1928 adcs%?\\t%0, %1, #%B2"
1929 [(set_attr "conds" "set")
1930 (set_attr "type" "alus_imm")]
1933 (define_expand "subsf3"
1934 [(set (match_operand:SF 0 "s_register_operand")
1935 (minus:SF (match_operand:SF 1 "s_register_operand")
1936 (match_operand:SF 2 "s_register_operand")))]
1937 "TARGET_32BIT && TARGET_HARD_FLOAT"
1941 (define_expand "subdf3"
1942 [(set (match_operand:DF 0 "s_register_operand")
1943 (minus:DF (match_operand:DF 1 "s_register_operand")
1944 (match_operand:DF 2 "s_register_operand")))]
1945 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1950 ;; Multiplication insns
1952 (define_expand "mulhi3"
1953 [(set (match_operand:HI 0 "s_register_operand")
1954 (mult:HI (match_operand:HI 1 "s_register_operand")
1955 (match_operand:HI 2 "s_register_operand")))]
1956 "TARGET_DSP_MULTIPLY"
1959 rtx result = gen_reg_rtx (SImode);
1960 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1961 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1966 (define_expand "mulsi3"
1967 [(set (match_operand:SI 0 "s_register_operand")
1968 (mult:SI (match_operand:SI 2 "s_register_operand")
1969 (match_operand:SI 1 "s_register_operand")))]
1974 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1976 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1977 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1978 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1980 "mul%?\\t%0, %2, %1"
1981 [(set_attr "type" "mul")
1982 (set_attr "predicable" "yes")
1983 (set_attr "arch" "t2,v6,nov6,nov6")
1984 (set_attr "length" "4")
1985 (set_attr "predicable_short_it" "yes,no,*,*")]
1988 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1989 ;; reusing the same register.
1992 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1994 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1995 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1996 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1998 "mla%?\\t%0, %3, %2, %1"
1999 [(set_attr "type" "mla")
2000 (set_attr "predicable" "yes")
2001 (set_attr "arch" "v6,nov6,nov6,nov6")]
2005 [(set (match_operand:SI 0 "s_register_operand" "=r")
2007 (match_operand:SI 1 "s_register_operand" "r")
2008 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2009 (match_operand:SI 2 "s_register_operand" "r"))))]
2010 "TARGET_32BIT && arm_arch_thumb2"
2011 "mls%?\\t%0, %3, %2, %1"
2012 [(set_attr "type" "mla")
2013 (set_attr "predicable" "yes")]
2016 (define_insn "*mulsi3_compare0"
2017 [(set (reg:CC_NOOV CC_REGNUM)
2018 (compare:CC_NOOV (mult:SI
2019 (match_operand:SI 2 "s_register_operand" "r,r")
2020 (match_operand:SI 1 "s_register_operand" "%0,r"))
2022 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2023 (mult:SI (match_dup 2) (match_dup 1)))]
2024 "TARGET_ARM && !arm_arch6"
2025 "muls%?\\t%0, %2, %1"
2026 [(set_attr "conds" "set")
2027 (set_attr "type" "muls")]
2030 (define_insn "*mulsi3_compare0_v6"
2031 [(set (reg:CC_NOOV CC_REGNUM)
2032 (compare:CC_NOOV (mult:SI
2033 (match_operand:SI 2 "s_register_operand" "r")
2034 (match_operand:SI 1 "s_register_operand" "r"))
2036 (set (match_operand:SI 0 "s_register_operand" "=r")
2037 (mult:SI (match_dup 2) (match_dup 1)))]
2038 "TARGET_ARM && arm_arch6 && optimize_size"
2039 "muls%?\\t%0, %2, %1"
2040 [(set_attr "conds" "set")
2041 (set_attr "type" "muls")]
2044 (define_insn "*mulsi_compare0_scratch"
2045 [(set (reg:CC_NOOV CC_REGNUM)
2046 (compare:CC_NOOV (mult:SI
2047 (match_operand:SI 2 "s_register_operand" "r,r")
2048 (match_operand:SI 1 "s_register_operand" "%0,r"))
2050 (clobber (match_scratch:SI 0 "=&r,&r"))]
2051 "TARGET_ARM && !arm_arch6"
2052 "muls%?\\t%0, %2, %1"
2053 [(set_attr "conds" "set")
2054 (set_attr "type" "muls")]
2057 (define_insn "*mulsi_compare0_scratch_v6"
2058 [(set (reg:CC_NOOV CC_REGNUM)
2059 (compare:CC_NOOV (mult:SI
2060 (match_operand:SI 2 "s_register_operand" "r")
2061 (match_operand:SI 1 "s_register_operand" "r"))
2063 (clobber (match_scratch:SI 0 "=r"))]
2064 "TARGET_ARM && arm_arch6 && optimize_size"
2065 "muls%?\\t%0, %2, %1"
2066 [(set_attr "conds" "set")
2067 (set_attr "type" "muls")]
2070 (define_insn "*mulsi3addsi_compare0"
2071 [(set (reg:CC_NOOV CC_REGNUM)
2074 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2075 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2076 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2078 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2079 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2081 "TARGET_ARM && arm_arch6"
2082 "mlas%?\\t%0, %2, %1, %3"
2083 [(set_attr "conds" "set")
2084 (set_attr "type" "mlas")]
2087 (define_insn "*mulsi3addsi_compare0_v6"
2088 [(set (reg:CC_NOOV CC_REGNUM)
2091 (match_operand:SI 2 "s_register_operand" "r")
2092 (match_operand:SI 1 "s_register_operand" "r"))
2093 (match_operand:SI 3 "s_register_operand" "r"))
2095 (set (match_operand:SI 0 "s_register_operand" "=r")
2096 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2098 "TARGET_ARM && arm_arch6 && optimize_size"
2099 "mlas%?\\t%0, %2, %1, %3"
2100 [(set_attr "conds" "set")
2101 (set_attr "type" "mlas")]
2104 (define_insn "*mulsi3addsi_compare0_scratch"
2105 [(set (reg:CC_NOOV CC_REGNUM)
2108 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2109 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2110 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2112 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2113 "TARGET_ARM && !arm_arch6"
2114 "mlas%?\\t%0, %2, %1, %3"
2115 [(set_attr "conds" "set")
2116 (set_attr "type" "mlas")]
2119 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2120 [(set (reg:CC_NOOV CC_REGNUM)
2123 (match_operand:SI 2 "s_register_operand" "r")
2124 (match_operand:SI 1 "s_register_operand" "r"))
2125 (match_operand:SI 3 "s_register_operand" "r"))
2127 (clobber (match_scratch:SI 0 "=r"))]
2128 "TARGET_ARM && arm_arch6 && optimize_size"
2129 "mlas%?\\t%0, %2, %1, %3"
2130 [(set_attr "conds" "set")
2131 (set_attr "type" "mlas")]
2134 ;; 32x32->64 widening multiply.
2135 ;; The only difference between the v3-5 and v6+ versions is the requirement
2136 ;; that the output does not overlap with either input.
2138 (define_expand "<Us>mulsidi3"
2139 [(set (match_operand:DI 0 "s_register_operand")
2141 (SE:DI (match_operand:SI 1 "s_register_operand"))
2142 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2145 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2146 gen_highpart (SImode, operands[0]),
2147 operands[1], operands[2]));
2152 (define_insn "<US>mull"
2153 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2155 (match_operand:SI 2 "s_register_operand" "%r,r")
2156 (match_operand:SI 3 "s_register_operand" "r,r")))
2157 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2160 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2163 "<US>mull%?\\t%0, %1, %2, %3"
2164 [(set_attr "type" "umull")
2165 (set_attr "predicable" "yes")
2166 (set_attr "arch" "v6,nov6")]
2169 (define_expand "<Us>maddsidi4"
2170 [(set (match_operand:DI 0 "s_register_operand")
2173 (SE:DI (match_operand:SI 1 "s_register_operand"))
2174 (SE:DI (match_operand:SI 2 "s_register_operand")))
2175 (match_operand:DI 3 "s_register_operand")))]
2178 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2179 gen_lowpart (SImode, operands[3]),
2180 gen_highpart (SImode, operands[0]),
2181 gen_highpart (SImode, operands[3]),
2182 operands[1], operands[2]));
2187 (define_insn "<US>mlal"
2188 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2191 (match_operand:SI 4 "s_register_operand" "%r,r")
2192 (match_operand:SI 5 "s_register_operand" "r,r"))
2193 (match_operand:SI 1 "s_register_operand" "0,0")))
2194 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2199 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2200 (zero_extend:DI (match_dup 1)))
2202 (match_operand:SI 3 "s_register_operand" "2,2")))]
2204 "<US>mlal%?\\t%0, %2, %4, %5"
2205 [(set_attr "type" "umlal")
2206 (set_attr "predicable" "yes")
2207 (set_attr "arch" "v6,nov6")]
2210 (define_expand "<US>mulsi3_highpart"
2212 [(set (match_operand:SI 0 "s_register_operand")
2216 (SE:DI (match_operand:SI 1 "s_register_operand"))
2217 (SE:DI (match_operand:SI 2 "s_register_operand")))
2219 (clobber (match_scratch:SI 3 ""))])]
2224 (define_insn "*<US>mull_high"
2225 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2229 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2230 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2232 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2234 "<US>mull%?\\t%3, %0, %2, %1"
2235 [(set_attr "type" "umull")
2236 (set_attr "predicable" "yes")
2237 (set_attr "arch" "v6,nov6,nov6")]
2240 (define_insn "mulhisi3"
2241 [(set (match_operand:SI 0 "s_register_operand" "=r")
2242 (mult:SI (sign_extend:SI
2243 (match_operand:HI 1 "s_register_operand" "%r"))
2245 (match_operand:HI 2 "s_register_operand" "r"))))]
2246 "TARGET_DSP_MULTIPLY"
2247 "smulbb%?\\t%0, %1, %2"
2248 [(set_attr "type" "smulxy")
2249 (set_attr "predicable" "yes")]
2252 (define_insn "*mulhisi3tb"
2253 [(set (match_operand:SI 0 "s_register_operand" "=r")
2254 (mult:SI (ashiftrt:SI
2255 (match_operand:SI 1 "s_register_operand" "r")
2258 (match_operand:HI 2 "s_register_operand" "r"))))]
2259 "TARGET_DSP_MULTIPLY"
2260 "smultb%?\\t%0, %1, %2"
2261 [(set_attr "type" "smulxy")
2262 (set_attr "predicable" "yes")]
2265 (define_insn "*mulhisi3bt"
2266 [(set (match_operand:SI 0 "s_register_operand" "=r")
2267 (mult:SI (sign_extend:SI
2268 (match_operand:HI 1 "s_register_operand" "r"))
2270 (match_operand:SI 2 "s_register_operand" "r")
2272 "TARGET_DSP_MULTIPLY"
2273 "smulbt%?\\t%0, %1, %2"
2274 [(set_attr "type" "smulxy")
2275 (set_attr "predicable" "yes")]
2278 (define_insn "*mulhisi3tt"
2279 [(set (match_operand:SI 0 "s_register_operand" "=r")
2280 (mult:SI (ashiftrt:SI
2281 (match_operand:SI 1 "s_register_operand" "r")
2284 (match_operand:SI 2 "s_register_operand" "r")
2286 "TARGET_DSP_MULTIPLY"
2287 "smultt%?\\t%0, %1, %2"
2288 [(set_attr "type" "smulxy")
2289 (set_attr "predicable" "yes")]
2292 (define_insn "maddhisi4"
2293 [(set (match_operand:SI 0 "s_register_operand" "=r")
2294 (plus:SI (mult:SI (sign_extend:SI
2295 (match_operand:HI 1 "s_register_operand" "r"))
2297 (match_operand:HI 2 "s_register_operand" "r")))
2298 (match_operand:SI 3 "s_register_operand" "r")))]
2299 "TARGET_DSP_MULTIPLY"
2300 "smlabb%?\\t%0, %1, %2, %3"
2301 [(set_attr "type" "smlaxy")
2302 (set_attr "predicable" "yes")]
2305 ;; Note: there is no maddhisi4ibt because this one is canonical form
2306 (define_insn "*maddhisi4tb"
2307 [(set (match_operand:SI 0 "s_register_operand" "=r")
2308 (plus:SI (mult:SI (ashiftrt:SI
2309 (match_operand:SI 1 "s_register_operand" "r")
2312 (match_operand:HI 2 "s_register_operand" "r")))
2313 (match_operand:SI 3 "s_register_operand" "r")))]
2314 "TARGET_DSP_MULTIPLY"
2315 "smlatb%?\\t%0, %1, %2, %3"
2316 [(set_attr "type" "smlaxy")
2317 (set_attr "predicable" "yes")]
2320 (define_insn "*maddhisi4tt"
2321 [(set (match_operand:SI 0 "s_register_operand" "=r")
2322 (plus:SI (mult:SI (ashiftrt:SI
2323 (match_operand:SI 1 "s_register_operand" "r")
2326 (match_operand:SI 2 "s_register_operand" "r")
2328 (match_operand:SI 3 "s_register_operand" "r")))]
2329 "TARGET_DSP_MULTIPLY"
2330 "smlatt%?\\t%0, %1, %2, %3"
2331 [(set_attr "type" "smlaxy")
2332 (set_attr "predicable" "yes")]
2335 (define_insn "maddhidi4"
2336 [(set (match_operand:DI 0 "s_register_operand" "=r")
2338 (mult:DI (sign_extend:DI
2339 (match_operand:HI 1 "s_register_operand" "r"))
2341 (match_operand:HI 2 "s_register_operand" "r")))
2342 (match_operand:DI 3 "s_register_operand" "0")))]
2343 "TARGET_DSP_MULTIPLY"
2344 "smlalbb%?\\t%Q0, %R0, %1, %2"
2345 [(set_attr "type" "smlalxy")
2346 (set_attr "predicable" "yes")])
2348 ;; Note: there is no maddhidi4ibt because this one is canonical form
2349 (define_insn "*maddhidi4tb"
2350 [(set (match_operand:DI 0 "s_register_operand" "=r")
2352 (mult:DI (sign_extend:DI
2354 (match_operand:SI 1 "s_register_operand" "r")
2357 (match_operand:HI 2 "s_register_operand" "r")))
2358 (match_operand:DI 3 "s_register_operand" "0")))]
2359 "TARGET_DSP_MULTIPLY"
2360 "smlaltb%?\\t%Q0, %R0, %1, %2"
2361 [(set_attr "type" "smlalxy")
2362 (set_attr "predicable" "yes")])
2364 (define_insn "*maddhidi4tt"
2365 [(set (match_operand:DI 0 "s_register_operand" "=r")
2367 (mult:DI (sign_extend:DI
2369 (match_operand:SI 1 "s_register_operand" "r")
2373 (match_operand:SI 2 "s_register_operand" "r")
2375 (match_operand:DI 3 "s_register_operand" "0")))]
2376 "TARGET_DSP_MULTIPLY"
2377 "smlaltt%?\\t%Q0, %R0, %1, %2"
2378 [(set_attr "type" "smlalxy")
2379 (set_attr "predicable" "yes")])
2381 (define_expand "mulsf3"
2382 [(set (match_operand:SF 0 "s_register_operand")
2383 (mult:SF (match_operand:SF 1 "s_register_operand")
2384 (match_operand:SF 2 "s_register_operand")))]
2385 "TARGET_32BIT && TARGET_HARD_FLOAT"
2389 (define_expand "muldf3"
2390 [(set (match_operand:DF 0 "s_register_operand")
2391 (mult:DF (match_operand:DF 1 "s_register_operand")
2392 (match_operand:DF 2 "s_register_operand")))]
2393 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2399 (define_expand "divsf3"
2400 [(set (match_operand:SF 0 "s_register_operand")
2401 (div:SF (match_operand:SF 1 "s_register_operand")
2402 (match_operand:SF 2 "s_register_operand")))]
2403 "TARGET_32BIT && TARGET_HARD_FLOAT"
2406 (define_expand "divdf3"
2407 [(set (match_operand:DF 0 "s_register_operand")
2408 (div:DF (match_operand:DF 1 "s_register_operand")
2409 (match_operand:DF 2 "s_register_operand")))]
2410 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2414 ; Expand logical operations. The mid-end expander does not split off memory
2415 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2416 ; So an explicit expander is needed to generate better code.
2418 (define_expand "<LOGICAL:optab>di3"
2419 [(set (match_operand:DI 0 "s_register_operand")
2420 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2421 (match_operand:DI 2 "arm_<optab>di_operand")))]
2424 rtx low = simplify_gen_binary (<CODE>, SImode,
2425 gen_lowpart (SImode, operands[1]),
2426 gen_lowpart (SImode, operands[2]));
2427 rtx high = simplify_gen_binary (<CODE>, SImode,
2428 gen_highpart (SImode, operands[1]),
2429 gen_highpart_mode (SImode, DImode,
2432 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2433 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2438 (define_expand "one_cmpldi2"
2439 [(set (match_operand:DI 0 "s_register_operand")
2440 (not:DI (match_operand:DI 1 "s_register_operand")))]
2443 rtx low = simplify_gen_unary (NOT, SImode,
2444 gen_lowpart (SImode, operands[1]),
2446 rtx high = simplify_gen_unary (NOT, SImode,
2447 gen_highpart_mode (SImode, DImode,
2451 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2452 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2457 ;; Split DImode and, ior, xor operations. Simply perform the logical
2458 ;; operation on the upper and lower halves of the registers.
2459 ;; This is needed for atomic operations in arm_split_atomic_op.
2460 ;; Avoid splitting IWMMXT instructions.
2462 [(set (match_operand:DI 0 "s_register_operand" "")
2463 (match_operator:DI 6 "logical_binary_operator"
2464 [(match_operand:DI 1 "s_register_operand" "")
2465 (match_operand:DI 2 "s_register_operand" "")]))]
2466 "TARGET_32BIT && reload_completed
2467 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2468 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2469 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2472 operands[3] = gen_highpart (SImode, operands[0]);
2473 operands[0] = gen_lowpart (SImode, operands[0]);
2474 operands[4] = gen_highpart (SImode, operands[1]);
2475 operands[1] = gen_lowpart (SImode, operands[1]);
2476 operands[5] = gen_highpart (SImode, operands[2]);
2477 operands[2] = gen_lowpart (SImode, operands[2]);
2481 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2482 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2484 [(set (match_operand:DI 0 "s_register_operand")
2485 (not:DI (match_operand:DI 1 "s_register_operand")))]
2487 [(set (match_dup 0) (not:SI (match_dup 1)))
2488 (set (match_dup 2) (not:SI (match_dup 3)))]
2491 operands[2] = gen_highpart (SImode, operands[0]);
2492 operands[0] = gen_lowpart (SImode, operands[0]);
2493 operands[3] = gen_highpart (SImode, operands[1]);
2494 operands[1] = gen_lowpart (SImode, operands[1]);
2498 (define_expand "andsi3"
2499 [(set (match_operand:SI 0 "s_register_operand")
2500 (and:SI (match_operand:SI 1 "s_register_operand")
2501 (match_operand:SI 2 "reg_or_int_operand")))]
2506 if (CONST_INT_P (operands[2]))
2508 if (INTVAL (operands[2]) == 255 && arm_arch6)
2510 operands[1] = convert_to_mode (QImode, operands[1], 1);
2511 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2515 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2516 operands[2] = force_reg (SImode, operands[2]);
2519 arm_split_constant (AND, SImode, NULL_RTX,
2520 INTVAL (operands[2]), operands[0],
2522 optimize && can_create_pseudo_p ());
2528 else /* TARGET_THUMB1 */
2530 if (!CONST_INT_P (operands[2]))
2532 rtx tmp = force_reg (SImode, operands[2]);
2533 if (rtx_equal_p (operands[0], operands[1]))
2537 operands[2] = operands[1];
2545 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2547 operands[2] = force_reg (SImode,
2548 GEN_INT (~INTVAL (operands[2])));
2550 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2555 for (i = 9; i <= 31; i++)
2557 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2559 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2563 else if ((HOST_WIDE_INT_1 << i) - 1
2564 == ~INTVAL (operands[2]))
2566 rtx shift = GEN_INT (i);
2567 rtx reg = gen_reg_rtx (SImode);
2569 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2570 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2576 operands[2] = force_reg (SImode, operands[2]);
2582 ; ??? Check split length for Thumb-2
2583 (define_insn_and_split "*arm_andsi3_insn"
2584 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2585 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2586 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2591 bic%?\\t%0, %1, #%B2
2595 && CONST_INT_P (operands[2])
2596 && !(const_ok_for_arm (INTVAL (operands[2]))
2597 || const_ok_for_arm (~INTVAL (operands[2])))"
2598 [(clobber (const_int 0))]
2600 arm_split_constant (AND, SImode, curr_insn,
2601 INTVAL (operands[2]), operands[0], operands[1], 0);
2604 [(set_attr "length" "4,4,4,4,16")
2605 (set_attr "predicable" "yes")
2606 (set_attr "predicable_short_it" "no,yes,no,no,no")
2607 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2610 (define_insn "*andsi3_compare0"
2611 [(set (reg:CC_NOOV CC_REGNUM)
2613 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2614 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2616 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2617 (and:SI (match_dup 1) (match_dup 2)))]
2621 bics%?\\t%0, %1, #%B2
2622 ands%?\\t%0, %1, %2"
2623 [(set_attr "conds" "set")
2624 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2627 (define_insn "*andsi3_compare0_scratch"
2628 [(set (reg:CC_NOOV CC_REGNUM)
2630 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2631 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2633 (clobber (match_scratch:SI 2 "=X,r,X"))]
2637 bics%?\\t%2, %0, #%B1
2639 [(set_attr "conds" "set")
2640 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2643 (define_insn "*zeroextractsi_compare0_scratch"
2644 [(set (reg:CC_NOOV CC_REGNUM)
2645 (compare:CC_NOOV (zero_extract:SI
2646 (match_operand:SI 0 "s_register_operand" "r")
2647 (match_operand 1 "const_int_operand" "n")
2648 (match_operand 2 "const_int_operand" "n"))
2651 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2652 && INTVAL (operands[1]) > 0
2653 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2654 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2656 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2657 << INTVAL (operands[2]));
2658 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2661 [(set_attr "conds" "set")
2662 (set_attr "predicable" "yes")
2663 (set_attr "type" "logics_imm")]
2666 (define_insn_and_split "*ne_zeroextractsi"
2667 [(set (match_operand:SI 0 "s_register_operand" "=r")
2668 (ne:SI (zero_extract:SI
2669 (match_operand:SI 1 "s_register_operand" "r")
2670 (match_operand:SI 2 "const_int_operand" "n")
2671 (match_operand:SI 3 "const_int_operand" "n"))
2673 (clobber (reg:CC CC_REGNUM))]
2675 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2676 && INTVAL (operands[2]) > 0
2677 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2678 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2681 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2682 && INTVAL (operands[2]) > 0
2683 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2684 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2685 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2686 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2688 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2690 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2691 (match_dup 0) (const_int 1)))]
2693 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2694 << INTVAL (operands[3]));
2696 [(set_attr "conds" "clob")
2697 (set (attr "length")
2698 (if_then_else (eq_attr "is_thumb" "yes")
2701 (set_attr "type" "multiple")]
2704 (define_insn_and_split "*ne_zeroextractsi_shifted"
2705 [(set (match_operand:SI 0 "s_register_operand" "=r")
2706 (ne:SI (zero_extract:SI
2707 (match_operand:SI 1 "s_register_operand" "r")
2708 (match_operand:SI 2 "const_int_operand" "n")
2711 (clobber (reg:CC CC_REGNUM))]
2715 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2716 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2718 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2720 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2721 (match_dup 0) (const_int 1)))]
2723 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2725 [(set_attr "conds" "clob")
2726 (set_attr "length" "8")
2727 (set_attr "type" "multiple")]
2730 (define_insn_and_split "*ite_ne_zeroextractsi"
2731 [(set (match_operand:SI 0 "s_register_operand" "=r")
2732 (if_then_else:SI (ne (zero_extract:SI
2733 (match_operand:SI 1 "s_register_operand" "r")
2734 (match_operand:SI 2 "const_int_operand" "n")
2735 (match_operand:SI 3 "const_int_operand" "n"))
2737 (match_operand:SI 4 "arm_not_operand" "rIK")
2739 (clobber (reg:CC CC_REGNUM))]
2741 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2742 && INTVAL (operands[2]) > 0
2743 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2744 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2745 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2748 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2749 && INTVAL (operands[2]) > 0
2750 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2751 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2752 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2753 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2754 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2756 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2758 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2759 (match_dup 0) (match_dup 4)))]
2761 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2762 << INTVAL (operands[3]));
2764 [(set_attr "conds" "clob")
2765 (set_attr "length" "8")
2766 (set_attr "type" "multiple")]
2769 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2770 [(set (match_operand:SI 0 "s_register_operand" "=r")
2771 (if_then_else:SI (ne (zero_extract:SI
2772 (match_operand:SI 1 "s_register_operand" "r")
2773 (match_operand:SI 2 "const_int_operand" "n")
2776 (match_operand:SI 3 "arm_not_operand" "rIK")
2778 (clobber (reg:CC CC_REGNUM))]
2779 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2781 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2782 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2783 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2785 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2787 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2788 (match_dup 0) (match_dup 3)))]
2790 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2792 [(set_attr "conds" "clob")
2793 (set_attr "length" "8")
2794 (set_attr "type" "multiple")]
2797 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2799 [(set (match_operand:SI 0 "s_register_operand" "")
2800 (match_operator:SI 1 "shiftable_operator"
2801 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2802 (match_operand:SI 3 "const_int_operand" "")
2803 (match_operand:SI 4 "const_int_operand" ""))
2804 (match_operand:SI 5 "s_register_operand" "")]))
2805 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2807 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2810 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2813 HOST_WIDE_INT temp = INTVAL (operands[3]);
2815 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2816 operands[4] = GEN_INT (32 - temp);
2821 [(set (match_operand:SI 0 "s_register_operand" "")
2822 (match_operator:SI 1 "shiftable_operator"
2823 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2824 (match_operand:SI 3 "const_int_operand" "")
2825 (match_operand:SI 4 "const_int_operand" ""))
2826 (match_operand:SI 5 "s_register_operand" "")]))
2827 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2829 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2832 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2835 HOST_WIDE_INT temp = INTVAL (operands[3]);
2837 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2838 operands[4] = GEN_INT (32 - temp);
2842 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2843 ;;; represented by the bitfield, then this will produce incorrect results.
2844 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2845 ;;; which have a real bit-field insert instruction, the truncation happens
2846 ;;; in the bit-field insert instruction itself. Since arm does not have a
2847 ;;; bit-field insert instruction, we would have to emit code here to truncate
2848 ;;; the value before we insert. This loses some of the advantage of having
2849 ;;; this insv pattern, so this pattern needs to be reevalutated.
2851 (define_expand "insv"
2852 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2853 (match_operand 1 "general_operand")
2854 (match_operand 2 "general_operand"))
2855 (match_operand 3 "reg_or_int_operand"))]
2856 "TARGET_ARM || arm_arch_thumb2"
2859 int start_bit = INTVAL (operands[2]);
2860 int width = INTVAL (operands[1]);
2861 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2862 rtx target, subtarget;
2864 if (arm_arch_thumb2)
2866 if (unaligned_access && MEM_P (operands[0])
2867 && s_register_operand (operands[3], GET_MODE (operands[3]))
2868 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2872 if (BYTES_BIG_ENDIAN)
2873 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2878 base_addr = adjust_address (operands[0], SImode,
2879 start_bit / BITS_PER_UNIT);
2880 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2884 rtx tmp = gen_reg_rtx (HImode);
2886 base_addr = adjust_address (operands[0], HImode,
2887 start_bit / BITS_PER_UNIT);
2888 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2889 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2893 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2895 bool use_bfi = TRUE;
2897 if (CONST_INT_P (operands[3]))
2899 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2903 emit_insn (gen_insv_zero (operands[0], operands[1],
2908 /* See if the set can be done with a single orr instruction. */
2909 if (val == mask && const_ok_for_arm (val << start_bit))
2915 if (!REG_P (operands[3]))
2916 operands[3] = force_reg (SImode, operands[3]);
2918 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2927 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2930 target = copy_rtx (operands[0]);
2931 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2932 subreg as the final target. */
2933 if (GET_CODE (target) == SUBREG)
2935 subtarget = gen_reg_rtx (SImode);
2936 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2937 < GET_MODE_SIZE (SImode))
2938 target = SUBREG_REG (target);
2943 if (CONST_INT_P (operands[3]))
2945 /* Since we are inserting a known constant, we may be able to
2946 reduce the number of bits that we have to clear so that
2947 the mask becomes simple. */
2948 /* ??? This code does not check to see if the new mask is actually
2949 simpler. It may not be. */
2950 rtx op1 = gen_reg_rtx (SImode);
2951 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2952 start of this pattern. */
2953 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2954 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2956 emit_insn (gen_andsi3 (op1, operands[0],
2957 gen_int_mode (~mask2, SImode)));
2958 emit_insn (gen_iorsi3 (subtarget, op1,
2959 gen_int_mode (op3_value << start_bit, SImode)));
2961 else if (start_bit == 0
2962 && !(const_ok_for_arm (mask)
2963 || const_ok_for_arm (~mask)))
2965 /* A Trick, since we are setting the bottom bits in the word,
2966 we can shift operand[3] up, operand[0] down, OR them together
2967 and rotate the result back again. This takes 3 insns, and
2968 the third might be mergeable into another op. */
2969 /* The shift up copes with the possibility that operand[3] is
2970 wider than the bitfield. */
2971 rtx op0 = gen_reg_rtx (SImode);
2972 rtx op1 = gen_reg_rtx (SImode);
2974 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2975 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2976 emit_insn (gen_iorsi3 (op1, op1, op0));
2977 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2979 else if ((width + start_bit == 32)
2980 && !(const_ok_for_arm (mask)
2981 || const_ok_for_arm (~mask)))
2983 /* Similar trick, but slightly less efficient. */
2985 rtx op0 = gen_reg_rtx (SImode);
2986 rtx op1 = gen_reg_rtx (SImode);
2988 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2989 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2990 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2991 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2995 rtx op0 = gen_int_mode (mask, SImode);
2996 rtx op1 = gen_reg_rtx (SImode);
2997 rtx op2 = gen_reg_rtx (SImode);
2999 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3001 rtx tmp = gen_reg_rtx (SImode);
3003 emit_insn (gen_movsi (tmp, op0));
3007 /* Mask out any bits in operand[3] that are not needed. */
3008 emit_insn (gen_andsi3 (op1, operands[3], op0));
3010 if (CONST_INT_P (op0)
3011 && (const_ok_for_arm (mask << start_bit)
3012 || const_ok_for_arm (~(mask << start_bit))))
3014 op0 = gen_int_mode (~(mask << start_bit), SImode);
3015 emit_insn (gen_andsi3 (op2, operands[0], op0));
3019 if (CONST_INT_P (op0))
3021 rtx tmp = gen_reg_rtx (SImode);
3023 emit_insn (gen_movsi (tmp, op0));
3028 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3030 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3034 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3036 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3039 if (subtarget != target)
3041 /* If TARGET is still a SUBREG, then it must be wider than a word,
3042 so we must be careful only to set the subword we were asked to. */
3043 if (GET_CODE (target) == SUBREG)
3044 emit_move_insn (target, subtarget);
3046 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3053 (define_insn "insv_zero"
3054 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3055 (match_operand:SI 1 "const_int_M_operand" "M")
3056 (match_operand:SI 2 "const_int_M_operand" "M"))
3060 [(set_attr "length" "4")
3061 (set_attr "predicable" "yes")
3062 (set_attr "type" "bfm")]
3065 (define_insn "insv_t2"
3066 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3067 (match_operand:SI 1 "const_int_M_operand" "M")
3068 (match_operand:SI 2 "const_int_M_operand" "M"))
3069 (match_operand:SI 3 "s_register_operand" "r"))]
3071 "bfi%?\t%0, %3, %2, %1"
3072 [(set_attr "length" "4")
3073 (set_attr "predicable" "yes")
3074 (set_attr "type" "bfm")]
3077 (define_insn "andsi_notsi_si"
3078 [(set (match_operand:SI 0 "s_register_operand" "=r")
3079 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3080 (match_operand:SI 1 "s_register_operand" "r")))]
3082 "bic%?\\t%0, %1, %2"
3083 [(set_attr "predicable" "yes")
3084 (set_attr "type" "logic_reg")]
3087 (define_insn "andsi_not_shiftsi_si"
3088 [(set (match_operand:SI 0 "s_register_operand" "=r")
3089 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3090 [(match_operand:SI 2 "s_register_operand" "r")
3091 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
3092 (match_operand:SI 1 "s_register_operand" "r")))]
3094 "bic%?\\t%0, %1, %2%S4"
3095 [(set_attr "predicable" "yes")
3096 (set_attr "shift" "2")
3097 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
3098 (const_string "logic_shift_imm")
3099 (const_string "logic_shift_reg")))]
3102 ;; Shifted bics pattern used to set up CC status register and not reusing
3103 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3104 ;; does not support shift by register.
3105 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3106 [(set (reg:CC_NOOV CC_REGNUM)
3108 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3109 [(match_operand:SI 1 "s_register_operand" "r")
3110 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3111 (match_operand:SI 3 "s_register_operand" "r"))
3113 (clobber (match_scratch:SI 4 "=r"))]
3114 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
3115 "bics%?\\t%4, %3, %1%S0"
3116 [(set_attr "predicable" "yes")
3117 (set_attr "conds" "set")
3118 (set_attr "shift" "1")
3119 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3120 (const_string "logic_shift_imm")
3121 (const_string "logic_shift_reg")))]
3124 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3125 ;; getting reused later.
3126 (define_insn "andsi_not_shiftsi_si_scc"
3127 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3129 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3130 [(match_operand:SI 1 "s_register_operand" "r")
3131 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3132 (match_operand:SI 3 "s_register_operand" "r"))
3134 (set (match_operand:SI 4 "s_register_operand" "=r")
3135 (and:SI (not:SI (match_op_dup 0
3139 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
3140 "bics%?\\t%4, %3, %1%S0"
3141 [(set_attr "predicable" "yes")
3142 (set_attr "conds" "set")
3143 (set_attr "shift" "1")
3144 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3145 (const_string "logic_shift_imm")
3146 (const_string "logic_shift_reg")))]
3149 (define_insn "*andsi_notsi_si_compare0"
3150 [(set (reg:CC_NOOV CC_REGNUM)
3152 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3153 (match_operand:SI 1 "s_register_operand" "r"))
3155 (set (match_operand:SI 0 "s_register_operand" "=r")
3156 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3159 [(set_attr "conds" "set")
3160 (set_attr "type" "logics_shift_reg")]
3163 (define_insn "*andsi_notsi_si_compare0_scratch"
3164 [(set (reg:CC_NOOV CC_REGNUM)
3166 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3167 (match_operand:SI 1 "s_register_operand" "r"))
3169 (clobber (match_scratch:SI 0 "=r"))]
3172 [(set_attr "conds" "set")
3173 (set_attr "type" "logics_shift_reg")]
3176 (define_expand "iorsi3"
3177 [(set (match_operand:SI 0 "s_register_operand")
3178 (ior:SI (match_operand:SI 1 "s_register_operand")
3179 (match_operand:SI 2 "reg_or_int_operand")))]
3182 if (CONST_INT_P (operands[2]))
3186 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3187 operands[2] = force_reg (SImode, operands[2]);
3190 arm_split_constant (IOR, SImode, NULL_RTX,
3191 INTVAL (operands[2]), operands[0],
3193 optimize && can_create_pseudo_p ());
3197 else /* TARGET_THUMB1 */
3199 rtx tmp = force_reg (SImode, operands[2]);
3200 if (rtx_equal_p (operands[0], operands[1]))
3204 operands[2] = operands[1];
3212 (define_insn_and_split "*iorsi3_insn"
3213 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3214 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3215 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3220 orn%?\\t%0, %1, #%B2
3224 && CONST_INT_P (operands[2])
3225 && !(const_ok_for_arm (INTVAL (operands[2]))
3226 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3227 [(clobber (const_int 0))]
3229 arm_split_constant (IOR, SImode, curr_insn,
3230 INTVAL (operands[2]), operands[0], operands[1], 0);
3233 [(set_attr "length" "4,4,4,4,16")
3234 (set_attr "arch" "32,t2,t2,32,32")
3235 (set_attr "predicable" "yes")
3236 (set_attr "predicable_short_it" "no,yes,no,no,no")
3237 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3241 [(match_scratch:SI 3 "r")
3242 (set (match_operand:SI 0 "arm_general_register_operand" "")
3243 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3244 (match_operand:SI 2 "const_int_operand" "")))]
3246 && !const_ok_for_arm (INTVAL (operands[2]))
3247 && const_ok_for_arm (~INTVAL (operands[2]))"
3248 [(set (match_dup 3) (match_dup 2))
3249 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3253 (define_insn "*iorsi3_compare0"
3254 [(set (reg:CC_NOOV CC_REGNUM)
3256 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3257 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3259 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3260 (ior:SI (match_dup 1) (match_dup 2)))]
3262 "orrs%?\\t%0, %1, %2"
3263 [(set_attr "conds" "set")
3264 (set_attr "arch" "*,t2,*")
3265 (set_attr "length" "4,2,4")
3266 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3269 (define_insn "*iorsi3_compare0_scratch"
3270 [(set (reg:CC_NOOV CC_REGNUM)
3272 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3273 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3275 (clobber (match_scratch:SI 0 "=r,l,r"))]
3277 "orrs%?\\t%0, %1, %2"
3278 [(set_attr "conds" "set")
3279 (set_attr "arch" "*,t2,*")
3280 (set_attr "length" "4,2,4")
3281 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3284 (define_expand "xorsi3"
3285 [(set (match_operand:SI 0 "s_register_operand")
3286 (xor:SI (match_operand:SI 1 "s_register_operand")
3287 (match_operand:SI 2 "reg_or_int_operand")))]
3289 "if (CONST_INT_P (operands[2]))
3293 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3294 operands[2] = force_reg (SImode, operands[2]);
3297 arm_split_constant (XOR, SImode, NULL_RTX,
3298 INTVAL (operands[2]), operands[0],
3300 optimize && can_create_pseudo_p ());
3304 else /* TARGET_THUMB1 */
3306 rtx tmp = force_reg (SImode, operands[2]);
3307 if (rtx_equal_p (operands[0], operands[1]))
3311 operands[2] = operands[1];
3318 (define_insn_and_split "*arm_xorsi3"
3319 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3320 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3321 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3329 && CONST_INT_P (operands[2])
3330 && !const_ok_for_arm (INTVAL (operands[2]))"
3331 [(clobber (const_int 0))]
3333 arm_split_constant (XOR, SImode, curr_insn,
3334 INTVAL (operands[2]), operands[0], operands[1], 0);
3337 [(set_attr "length" "4,4,4,16")
3338 (set_attr "predicable" "yes")
3339 (set_attr "predicable_short_it" "no,yes,no,no")
3340 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3343 (define_insn "*xorsi3_compare0"
3344 [(set (reg:CC_NOOV CC_REGNUM)
3345 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3346 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3348 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3349 (xor:SI (match_dup 1) (match_dup 2)))]
3351 "eors%?\\t%0, %1, %2"
3352 [(set_attr "conds" "set")
3353 (set_attr "type" "logics_imm,logics_reg")]
3356 (define_insn "*xorsi3_compare0_scratch"
3357 [(set (reg:CC_NOOV CC_REGNUM)
3358 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3359 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3363 [(set_attr "conds" "set")
3364 (set_attr "type" "logics_imm,logics_reg")]
3367 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3368 ; (NOT D) we can sometimes merge the final NOT into one of the following
3372 [(set (match_operand:SI 0 "s_register_operand" "")
3373 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3374 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3375 (match_operand:SI 3 "arm_rhs_operand" "")))
3376 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3378 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3379 (not:SI (match_dup 3))))
3380 (set (match_dup 0) (not:SI (match_dup 4)))]
3384 (define_insn_and_split "*andsi_iorsi3_notsi"
3385 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3386 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3387 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3388 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3390 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3391 "&& reload_completed"
3392 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3393 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3395 /* If operands[3] is a constant make sure to fold the NOT into it
3396 to avoid creating a NOT of a CONST_INT. */
3397 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3398 if (CONST_INT_P (not_rtx))
3400 operands[4] = operands[0];
3401 operands[5] = not_rtx;
3405 operands[5] = operands[0];
3406 operands[4] = not_rtx;
3409 [(set_attr "length" "8")
3410 (set_attr "ce_count" "2")
3411 (set_attr "predicable" "yes")
3412 (set_attr "type" "multiple")]
3415 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3416 ; insns are available?
3418 [(set (match_operand:SI 0 "s_register_operand" "")
3419 (match_operator:SI 1 "logical_binary_operator"
3420 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3421 (match_operand:SI 3 "const_int_operand" "")
3422 (match_operand:SI 4 "const_int_operand" ""))
3423 (match_operator:SI 9 "logical_binary_operator"
3424 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3425 (match_operand:SI 6 "const_int_operand" ""))
3426 (match_operand:SI 7 "s_register_operand" "")])]))
3427 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3429 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3430 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3433 [(ashift:SI (match_dup 2) (match_dup 4))
3437 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3440 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3444 [(set (match_operand:SI 0 "s_register_operand" "")
3445 (match_operator:SI 1 "logical_binary_operator"
3446 [(match_operator:SI 9 "logical_binary_operator"
3447 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3448 (match_operand:SI 6 "const_int_operand" ""))
3449 (match_operand:SI 7 "s_register_operand" "")])
3450 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3451 (match_operand:SI 3 "const_int_operand" "")
3452 (match_operand:SI 4 "const_int_operand" ""))]))
3453 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3455 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3456 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3459 [(ashift:SI (match_dup 2) (match_dup 4))
3463 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3466 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3470 [(set (match_operand:SI 0 "s_register_operand" "")
3471 (match_operator:SI 1 "logical_binary_operator"
3472 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3473 (match_operand:SI 3 "const_int_operand" "")
3474 (match_operand:SI 4 "const_int_operand" ""))
3475 (match_operator:SI 9 "logical_binary_operator"
3476 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3477 (match_operand:SI 6 "const_int_operand" ""))
3478 (match_operand:SI 7 "s_register_operand" "")])]))
3479 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3481 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3482 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3485 [(ashift:SI (match_dup 2) (match_dup 4))
3489 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3492 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3496 [(set (match_operand:SI 0 "s_register_operand" "")
3497 (match_operator:SI 1 "logical_binary_operator"
3498 [(match_operator:SI 9 "logical_binary_operator"
3499 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3500 (match_operand:SI 6 "const_int_operand" ""))
3501 (match_operand:SI 7 "s_register_operand" "")])
3502 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3503 (match_operand:SI 3 "const_int_operand" "")
3504 (match_operand:SI 4 "const_int_operand" ""))]))
3505 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3507 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3508 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3511 [(ashift:SI (match_dup 2) (match_dup 4))
3515 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3518 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3522 ;; Minimum and maximum insns
3524 (define_expand "smaxsi3"
3526 (set (match_operand:SI 0 "s_register_operand")
3527 (smax:SI (match_operand:SI 1 "s_register_operand")
3528 (match_operand:SI 2 "arm_rhs_operand")))
3529 (clobber (reg:CC CC_REGNUM))])]
3532 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3534 /* No need for a clobber of the condition code register here. */
3535 emit_insn (gen_rtx_SET (operands[0],
3536 gen_rtx_SMAX (SImode, operands[1],
3542 (define_insn "*smax_0"
3543 [(set (match_operand:SI 0 "s_register_operand" "=r")
3544 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3547 "bic%?\\t%0, %1, %1, asr #31"
3548 [(set_attr "predicable" "yes")
3549 (set_attr "type" "logic_shift_reg")]
3552 (define_insn "*smax_m1"
3553 [(set (match_operand:SI 0 "s_register_operand" "=r")
3554 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3557 "orr%?\\t%0, %1, %1, asr #31"
3558 [(set_attr "predicable" "yes")
3559 (set_attr "type" "logic_shift_reg")]
3562 (define_insn_and_split "*arm_smax_insn"
3563 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3564 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3565 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3566 (clobber (reg:CC CC_REGNUM))]
3569 ; cmp\\t%1, %2\;movlt\\t%0, %2
3570 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3572 [(set (reg:CC CC_REGNUM)
3573 (compare:CC (match_dup 1) (match_dup 2)))
3575 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3579 [(set_attr "conds" "clob")
3580 (set_attr "length" "8,12")
3581 (set_attr "type" "multiple")]
3584 (define_expand "sminsi3"
3586 (set (match_operand:SI 0 "s_register_operand")
3587 (smin:SI (match_operand:SI 1 "s_register_operand")
3588 (match_operand:SI 2 "arm_rhs_operand")))
3589 (clobber (reg:CC CC_REGNUM))])]
3592 if (operands[2] == const0_rtx)
3594 /* No need for a clobber of the condition code register here. */
3595 emit_insn (gen_rtx_SET (operands[0],
3596 gen_rtx_SMIN (SImode, operands[1],
3602 (define_insn "*smin_0"
3603 [(set (match_operand:SI 0 "s_register_operand" "=r")
3604 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3607 "and%?\\t%0, %1, %1, asr #31"
3608 [(set_attr "predicable" "yes")
3609 (set_attr "type" "logic_shift_reg")]
3612 (define_insn_and_split "*arm_smin_insn"
3613 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3614 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3615 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3616 (clobber (reg:CC CC_REGNUM))]
3619 ; cmp\\t%1, %2\;movge\\t%0, %2
3620 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3622 [(set (reg:CC CC_REGNUM)
3623 (compare:CC (match_dup 1) (match_dup 2)))
3625 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3629 [(set_attr "conds" "clob")
3630 (set_attr "length" "8,12")
3631 (set_attr "type" "multiple,multiple")]
3634 (define_expand "umaxsi3"
3636 (set (match_operand:SI 0 "s_register_operand")
3637 (umax:SI (match_operand:SI 1 "s_register_operand")
3638 (match_operand:SI 2 "arm_rhs_operand")))
3639 (clobber (reg:CC CC_REGNUM))])]
3644 (define_insn_and_split "*arm_umaxsi3"
3645 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3646 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3647 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3648 (clobber (reg:CC CC_REGNUM))]
3651 ; cmp\\t%1, %2\;movcc\\t%0, %2
3652 ; cmp\\t%1, %2\;movcs\\t%0, %1
3653 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3655 [(set (reg:CC CC_REGNUM)
3656 (compare:CC (match_dup 1) (match_dup 2)))
3658 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3662 [(set_attr "conds" "clob")
3663 (set_attr "length" "8,8,12")
3664 (set_attr "type" "store_4")]
3667 (define_expand "uminsi3"
3669 (set (match_operand:SI 0 "s_register_operand")
3670 (umin:SI (match_operand:SI 1 "s_register_operand")
3671 (match_operand:SI 2 "arm_rhs_operand")))
3672 (clobber (reg:CC CC_REGNUM))])]
3677 (define_insn_and_split "*arm_uminsi3"
3678 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3679 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3680 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3681 (clobber (reg:CC CC_REGNUM))]
3684 ; cmp\\t%1, %2\;movcs\\t%0, %2
3685 ; cmp\\t%1, %2\;movcc\\t%0, %1
3686 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3688 [(set (reg:CC CC_REGNUM)
3689 (compare:CC (match_dup 1) (match_dup 2)))
3691 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3695 [(set_attr "conds" "clob")
3696 (set_attr "length" "8,8,12")
3697 (set_attr "type" "store_4")]
3700 (define_insn "*store_minmaxsi"
3701 [(set (match_operand:SI 0 "memory_operand" "=m")
3702 (match_operator:SI 3 "minmax_operator"
3703 [(match_operand:SI 1 "s_register_operand" "r")
3704 (match_operand:SI 2 "s_register_operand" "r")]))
3705 (clobber (reg:CC CC_REGNUM))]
3706 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3708 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3709 operands[1], operands[2]);
3710 output_asm_insn (\"cmp\\t%1, %2\", operands);
3712 output_asm_insn (\"ite\t%d3\", operands);
3713 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3714 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3717 [(set_attr "conds" "clob")
3718 (set (attr "length")
3719 (if_then_else (eq_attr "is_thumb" "yes")
3722 (set_attr "type" "store_4")]
3725 ; Reject the frame pointer in operand[1], since reloading this after
3726 ; it has been eliminated can cause carnage.
3727 (define_insn "*minmax_arithsi"
3728 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3729 (match_operator:SI 4 "shiftable_operator"
3730 [(match_operator:SI 5 "minmax_operator"
3731 [(match_operand:SI 2 "s_register_operand" "r,r")
3732 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3733 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3734 (clobber (reg:CC CC_REGNUM))]
3735 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3738 enum rtx_code code = GET_CODE (operands[4]);
3741 if (which_alternative != 0 || operands[3] != const0_rtx
3742 || (code != PLUS && code != IOR && code != XOR))
3747 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3748 operands[2], operands[3]);
3749 output_asm_insn (\"cmp\\t%2, %3\", operands);
3753 output_asm_insn (\"ite\\t%d5\", operands);
3755 output_asm_insn (\"it\\t%d5\", operands);
3757 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3759 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3762 [(set_attr "conds" "clob")
3763 (set (attr "length")
3764 (if_then_else (eq_attr "is_thumb" "yes")
3767 (set_attr "type" "multiple")]
3770 ; Reject the frame pointer in operand[1], since reloading this after
3771 ; it has been eliminated can cause carnage.
3772 (define_insn_and_split "*minmax_arithsi_non_canon"
3773 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3775 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3776 (match_operator:SI 4 "minmax_operator"
3777 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3778 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3779 (clobber (reg:CC CC_REGNUM))]
3780 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3781 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3783 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3784 [(set (reg:CC CC_REGNUM)
3785 (compare:CC (match_dup 2) (match_dup 3)))
3787 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3789 (minus:SI (match_dup 1)
3791 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3795 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3796 operands[2], operands[3]);
3797 enum rtx_code rc = minmax_code (operands[4]);
3798 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3799 operands[2], operands[3]);
3801 if (mode == CCFPmode || mode == CCFPEmode)
3802 rc = reverse_condition_maybe_unordered (rc);
3804 rc = reverse_condition (rc);
3805 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3806 if (CONST_INT_P (operands[3]))
3807 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3809 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3811 [(set_attr "conds" "clob")
3812 (set (attr "length")
3813 (if_then_else (eq_attr "is_thumb" "yes")
3816 (set_attr "type" "multiple")]
3819 (define_code_iterator SAT [smin smax])
3820 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3821 (define_code_attr SATlo [(smin "1") (smax "2")])
3822 (define_code_attr SAThi [(smin "2") (smax "1")])
3824 (define_insn "*satsi_<SAT:code>"
3825 [(set (match_operand:SI 0 "s_register_operand" "=r")
3826 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3827 (match_operand:SI 1 "const_int_operand" "i"))
3828 (match_operand:SI 2 "const_int_operand" "i")))]
3829 "TARGET_32BIT && arm_arch6
3830 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3834 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3835 &mask, &signed_sat))
3838 operands[1] = GEN_INT (mask);
3840 return "ssat%?\t%0, %1, %3";
3842 return "usat%?\t%0, %1, %3";
3844 [(set_attr "predicable" "yes")
3845 (set_attr "type" "alus_imm")]
3848 (define_insn "*satsi_<SAT:code>_shift"
3849 [(set (match_operand:SI 0 "s_register_operand" "=r")
3850 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3851 [(match_operand:SI 4 "s_register_operand" "r")
3852 (match_operand:SI 5 "const_int_operand" "i")])
3853 (match_operand:SI 1 "const_int_operand" "i"))
3854 (match_operand:SI 2 "const_int_operand" "i")))]
3855 "TARGET_32BIT && arm_arch6
3856 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3860 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3861 &mask, &signed_sat))
3864 operands[1] = GEN_INT (mask);
3866 return "ssat%?\t%0, %1, %4%S3";
3868 return "usat%?\t%0, %1, %4%S3";
3870 [(set_attr "predicable" "yes")
3871 (set_attr "shift" "3")
3872 (set_attr "type" "logic_shift_reg")])
3874 ;; Shift and rotation insns
3876 (define_expand "ashldi3"
3877 [(set (match_operand:DI 0 "s_register_operand")
3878 (ashift:DI (match_operand:DI 1 "s_register_operand")
3879 (match_operand:SI 2 "reg_or_int_operand")))]
3882 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3883 operands[2], gen_reg_rtx (SImode),
3884 gen_reg_rtx (SImode));
3888 (define_expand "ashlsi3"
3889 [(set (match_operand:SI 0 "s_register_operand")
3890 (ashift:SI (match_operand:SI 1 "s_register_operand")
3891 (match_operand:SI 2 "arm_rhs_operand")))]
3894 if (CONST_INT_P (operands[2])
3895 && (UINTVAL (operands[2])) > 31)
3897 emit_insn (gen_movsi (operands[0], const0_rtx));
3903 (define_expand "ashrdi3"
3904 [(set (match_operand:DI 0 "s_register_operand")
3905 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3906 (match_operand:SI 2 "reg_or_int_operand")))]
3909 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3910 operands[2], gen_reg_rtx (SImode),
3911 gen_reg_rtx (SImode));
3915 (define_expand "ashrsi3"
3916 [(set (match_operand:SI 0 "s_register_operand")
3917 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3918 (match_operand:SI 2 "arm_rhs_operand")))]
3921 if (CONST_INT_P (operands[2])
3922 && UINTVAL (operands[2]) > 31)
3923 operands[2] = GEN_INT (31);
3927 (define_expand "lshrdi3"
3928 [(set (match_operand:DI 0 "s_register_operand")
3929 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3930 (match_operand:SI 2 "reg_or_int_operand")))]
3933 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3934 operands[2], gen_reg_rtx (SImode),
3935 gen_reg_rtx (SImode));
3939 (define_expand "lshrsi3"
3940 [(set (match_operand:SI 0 "s_register_operand")
3941 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3942 (match_operand:SI 2 "arm_rhs_operand")))]
3945 if (CONST_INT_P (operands[2])
3946 && (UINTVAL (operands[2])) > 31)
3948 emit_insn (gen_movsi (operands[0], const0_rtx));
3954 (define_expand "rotlsi3"
3955 [(set (match_operand:SI 0 "s_register_operand")
3956 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3957 (match_operand:SI 2 "reg_or_int_operand")))]
3960 if (CONST_INT_P (operands[2]))
3961 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3964 rtx reg = gen_reg_rtx (SImode);
3965 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3971 (define_expand "rotrsi3"
3972 [(set (match_operand:SI 0 "s_register_operand")
3973 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3974 (match_operand:SI 2 "arm_rhs_operand")))]
3979 if (CONST_INT_P (operands[2])
3980 && UINTVAL (operands[2]) > 31)
3981 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3983 else /* TARGET_THUMB1 */
3985 if (CONST_INT_P (operands [2]))
3986 operands [2] = force_reg (SImode, operands[2]);
3991 (define_insn "*arm_shiftsi3"
3992 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3993 (match_operator:SI 3 "shift_operator"
3994 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3995 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3997 "* return arm_output_shift(operands, 0);"
3998 [(set_attr "predicable" "yes")
3999 (set_attr "arch" "t2,t2,*,*")
4000 (set_attr "predicable_short_it" "yes,yes,no,no")
4001 (set_attr "length" "4")
4002 (set_attr "shift" "1")
4003 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
4006 (define_insn "*shiftsi3_compare0"
4007 [(set (reg:CC_NOOV CC_REGNUM)
4008 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4009 [(match_operand:SI 1 "s_register_operand" "r,r")
4010 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4012 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4013 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4015 "* return arm_output_shift(operands, 1);"
4016 [(set_attr "conds" "set")
4017 (set_attr "shift" "1")
4018 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4021 (define_insn "*shiftsi3_compare0_scratch"
4022 [(set (reg:CC_NOOV CC_REGNUM)
4023 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4024 [(match_operand:SI 1 "s_register_operand" "r,r")
4025 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4027 (clobber (match_scratch:SI 0 "=r,r"))]
4029 "* return arm_output_shift(operands, 1);"
4030 [(set_attr "conds" "set")
4031 (set_attr "shift" "1")
4032 (set_attr "type" "shift_imm,shift_reg")]
4035 (define_insn "*not_shiftsi"
4036 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4037 (not:SI (match_operator:SI 3 "shift_operator"
4038 [(match_operand:SI 1 "s_register_operand" "r,r")
4039 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4042 [(set_attr "predicable" "yes")
4043 (set_attr "shift" "1")
4044 (set_attr "arch" "32,a")
4045 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4047 (define_insn "*not_shiftsi_compare0"
4048 [(set (reg:CC_NOOV CC_REGNUM)
4050 (not:SI (match_operator:SI 3 "shift_operator"
4051 [(match_operand:SI 1 "s_register_operand" "r,r")
4052 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4054 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4055 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4057 "mvns%?\\t%0, %1%S3"
4058 [(set_attr "conds" "set")
4059 (set_attr "shift" "1")
4060 (set_attr "arch" "32,a")
4061 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4063 (define_insn "*not_shiftsi_compare0_scratch"
4064 [(set (reg:CC_NOOV CC_REGNUM)
4066 (not:SI (match_operator:SI 3 "shift_operator"
4067 [(match_operand:SI 1 "s_register_operand" "r,r")
4068 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4070 (clobber (match_scratch:SI 0 "=r,r"))]
4072 "mvns%?\\t%0, %1%S3"
4073 [(set_attr "conds" "set")
4074 (set_attr "shift" "1")
4075 (set_attr "arch" "32,a")
4076 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4078 ;; We don't really have extzv, but defining this using shifts helps
4079 ;; to reduce register pressure later on.
4081 (define_expand "extzv"
4082 [(set (match_operand 0 "s_register_operand")
4083 (zero_extract (match_operand 1 "nonimmediate_operand")
4084 (match_operand 2 "const_int_operand")
4085 (match_operand 3 "const_int_operand")))]
4086 "TARGET_THUMB1 || arm_arch_thumb2"
4089 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4090 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4092 if (arm_arch_thumb2)
4094 HOST_WIDE_INT width = INTVAL (operands[2]);
4095 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4097 if (unaligned_access && MEM_P (operands[1])
4098 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4102 if (BYTES_BIG_ENDIAN)
4103 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4108 base_addr = adjust_address (operands[1], SImode,
4109 bitpos / BITS_PER_UNIT);
4110 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4114 rtx dest = operands[0];
4115 rtx tmp = gen_reg_rtx (SImode);
4117 /* We may get a paradoxical subreg here. Strip it off. */
4118 if (GET_CODE (dest) == SUBREG
4119 && GET_MODE (dest) == SImode
4120 && GET_MODE (SUBREG_REG (dest)) == HImode)
4121 dest = SUBREG_REG (dest);
4123 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4126 base_addr = adjust_address (operands[1], HImode,
4127 bitpos / BITS_PER_UNIT);
4128 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4129 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4133 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4135 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4143 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4146 operands[3] = GEN_INT (rshift);
4150 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4154 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4155 operands[3], gen_reg_rtx (SImode)));
4160 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4162 (define_expand "extzv_t1"
4163 [(set (match_operand:SI 4 "s_register_operand")
4164 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4165 (match_operand:SI 2 "const_int_operand")))
4166 (set (match_operand:SI 0 "s_register_operand")
4167 (lshiftrt:SI (match_dup 4)
4168 (match_operand:SI 3 "const_int_operand")))]
4172 (define_expand "extv"
4173 [(set (match_operand 0 "s_register_operand")
4174 (sign_extract (match_operand 1 "nonimmediate_operand")
4175 (match_operand 2 "const_int_operand")
4176 (match_operand 3 "const_int_operand")))]
4179 HOST_WIDE_INT width = INTVAL (operands[2]);
4180 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4182 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4183 && (bitpos % BITS_PER_UNIT) == 0)
4187 if (BYTES_BIG_ENDIAN)
4188 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4192 base_addr = adjust_address (operands[1], SImode,
4193 bitpos / BITS_PER_UNIT);
4194 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4198 rtx dest = operands[0];
4199 rtx tmp = gen_reg_rtx (SImode);
4201 /* We may get a paradoxical subreg here. Strip it off. */
4202 if (GET_CODE (dest) == SUBREG
4203 && GET_MODE (dest) == SImode
4204 && GET_MODE (SUBREG_REG (dest)) == HImode)
4205 dest = SUBREG_REG (dest);
4207 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4210 base_addr = adjust_address (operands[1], HImode,
4211 bitpos / BITS_PER_UNIT);
4212 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4213 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4218 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4220 else if (GET_MODE (operands[0]) == SImode
4221 && GET_MODE (operands[1]) == SImode)
4223 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4231 ; Helper to expand register forms of extv with the proper modes.
4233 (define_expand "extv_regsi"
4234 [(set (match_operand:SI 0 "s_register_operand")
4235 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4236 (match_operand 2 "const_int_operand")
4237 (match_operand 3 "const_int_operand")))]
4242 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4244 (define_insn "unaligned_loaddi"
4245 [(set (match_operand:DI 0 "s_register_operand" "=r")
4246 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4247 UNSPEC_UNALIGNED_LOAD))]
4248 "TARGET_32BIT && TARGET_LDRD"
4250 return output_move_double (operands, true, NULL);
4252 [(set_attr "length" "8")
4253 (set_attr "type" "load_8")])
4255 (define_insn "unaligned_loadsi"
4256 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4257 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4258 UNSPEC_UNALIGNED_LOAD))]
4261 ldr\t%0, %1\t@ unaligned
4262 ldr%?\t%0, %1\t@ unaligned
4263 ldr%?\t%0, %1\t@ unaligned"
4264 [(set_attr "arch" "t1,t2,32")
4265 (set_attr "length" "2,2,4")
4266 (set_attr "predicable" "no,yes,yes")
4267 (set_attr "predicable_short_it" "no,yes,no")
4268 (set_attr "type" "load_4")])
4270 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4271 ;; address (there's no immediate format). That's tricky to support
4272 ;; here and we don't really need this pattern for that case, so only
4273 ;; enable for 32-bit ISAs.
4274 (define_insn "unaligned_loadhis"
4275 [(set (match_operand:SI 0 "s_register_operand" "=r")
4277 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4278 UNSPEC_UNALIGNED_LOAD)))]
4279 "unaligned_access && TARGET_32BIT"
4280 "ldrsh%?\t%0, %1\t@ unaligned"
4281 [(set_attr "predicable" "yes")
4282 (set_attr "type" "load_byte")])
4284 (define_insn "unaligned_loadhiu"
4285 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4287 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4288 UNSPEC_UNALIGNED_LOAD)))]
4291 ldrh\t%0, %1\t@ unaligned
4292 ldrh%?\t%0, %1\t@ unaligned
4293 ldrh%?\t%0, %1\t@ unaligned"
4294 [(set_attr "arch" "t1,t2,32")
4295 (set_attr "length" "2,2,4")
4296 (set_attr "predicable" "no,yes,yes")
4297 (set_attr "predicable_short_it" "no,yes,no")
4298 (set_attr "type" "load_byte")])
4300 (define_insn "unaligned_storedi"
4301 [(set (match_operand:DI 0 "memory_operand" "=m")
4302 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4303 UNSPEC_UNALIGNED_STORE))]
4304 "TARGET_32BIT && TARGET_LDRD"
4306 return output_move_double (operands, true, NULL);
4308 [(set_attr "length" "8")
4309 (set_attr "type" "store_8")])
4311 (define_insn "unaligned_storesi"
4312 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4313 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4314 UNSPEC_UNALIGNED_STORE))]
4317 str\t%1, %0\t@ unaligned
4318 str%?\t%1, %0\t@ unaligned
4319 str%?\t%1, %0\t@ unaligned"
4320 [(set_attr "arch" "t1,t2,32")
4321 (set_attr "length" "2,2,4")
4322 (set_attr "predicable" "no,yes,yes")
4323 (set_attr "predicable_short_it" "no,yes,no")
4324 (set_attr "type" "store_4")])
4326 (define_insn "unaligned_storehi"
4327 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4328 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4329 UNSPEC_UNALIGNED_STORE))]
4332 strh\t%1, %0\t@ unaligned
4333 strh%?\t%1, %0\t@ unaligned
4334 strh%?\t%1, %0\t@ unaligned"
4335 [(set_attr "arch" "t1,t2,32")
4336 (set_attr "length" "2,2,4")
4337 (set_attr "predicable" "no,yes,yes")
4338 (set_attr "predicable_short_it" "no,yes,no")
4339 (set_attr "type" "store_4")])
4342 (define_insn "*extv_reg"
4343 [(set (match_operand:SI 0 "s_register_operand" "=r")
4344 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4345 (match_operand:SI 2 "const_int_operand" "n")
4346 (match_operand:SI 3 "const_int_operand" "n")))]
4348 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4349 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4350 "sbfx%?\t%0, %1, %3, %2"
4351 [(set_attr "length" "4")
4352 (set_attr "predicable" "yes")
4353 (set_attr "type" "bfm")]
4356 (define_insn "extzv_t2"
4357 [(set (match_operand:SI 0 "s_register_operand" "=r")
4358 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4359 (match_operand:SI 2 "const_int_operand" "n")
4360 (match_operand:SI 3 "const_int_operand" "n")))]
4362 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4363 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4364 "ubfx%?\t%0, %1, %3, %2"
4365 [(set_attr "length" "4")
4366 (set_attr "predicable" "yes")
4367 (set_attr "type" "bfm")]
4371 ;; Division instructions
4372 (define_insn "divsi3"
4373 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4374 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
4375 (match_operand:SI 2 "s_register_operand" "r,r")))]
4380 [(set_attr "arch" "32,v8mb")
4381 (set_attr "predicable" "yes")
4382 (set_attr "type" "sdiv")]
4385 (define_insn "udivsi3"
4386 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4387 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
4388 (match_operand:SI 2 "s_register_operand" "r,r")))]
4393 [(set_attr "arch" "32,v8mb")
4394 (set_attr "predicable" "yes")
4395 (set_attr "type" "udiv")]
4399 ;; Unary arithmetic insns
4401 (define_expand "negvsi3"
4402 [(match_operand:SI 0 "register_operand")
4403 (match_operand:SI 1 "register_operand")
4404 (match_operand 2 "")]
4407 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
4408 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4413 (define_expand "negvdi3"
4414 [(match_operand:DI 0 "s_register_operand")
4415 (match_operand:DI 1 "s_register_operand")
4416 (match_operand 2 "")]
4419 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
4420 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4426 (define_insn "negdi2_compare"
4427 [(set (reg:CC CC_REGNUM)
4430 (match_operand:DI 1 "register_operand" "r,r")))
4431 (set (match_operand:DI 0 "register_operand" "=&r,&r")
4432 (minus:DI (const_int 0) (match_dup 1)))]
4435 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
4436 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
4437 [(set_attr "conds" "set")
4438 (set_attr "arch" "a,t2")
4439 (set_attr "length" "8")
4440 (set_attr "type" "multiple")]
4443 (define_expand "negsi2"
4444 [(set (match_operand:SI 0 "s_register_operand")
4445 (neg:SI (match_operand:SI 1 "s_register_operand")))]
4450 (define_insn "*arm_negsi2"
4451 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4452 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4454 "rsb%?\\t%0, %1, #0"
4455 [(set_attr "predicable" "yes")
4456 (set_attr "predicable_short_it" "yes,no")
4457 (set_attr "arch" "t2,*")
4458 (set_attr "length" "4")
4459 (set_attr "type" "alu_imm")]
4462 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
4463 ;; rather than (0 cmp reg). This gives the same results for unsigned
4464 ;; and equality compares which is what we mostly need here.
4465 (define_insn "negsi2_0compare"
4466 [(set (reg:CC_RSB CC_REGNUM)
4467 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
4469 (set (match_operand:SI 0 "s_register_operand" "=l,r")
4470 (neg:SI (match_dup 1)))]
4475 [(set_attr "conds" "set")
4476 (set_attr "arch" "t2,*")
4477 (set_attr "length" "2,*")
4478 (set_attr "type" "alus_imm")]
4481 (define_insn "negsi2_carryin"
4482 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4483 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
4484 (match_operand:SI 2 "arm_borrow_operation" "")))]
4488 sbc\\t%0, %1, %1, lsl #1"
4489 [(set_attr "conds" "use")
4490 (set_attr "arch" "a,t2")
4491 (set_attr "type" "adc_imm,adc_reg")]
4494 (define_expand "negsf2"
4495 [(set (match_operand:SF 0 "s_register_operand")
4496 (neg:SF (match_operand:SF 1 "s_register_operand")))]
4497 "TARGET_32BIT && TARGET_HARD_FLOAT"
4501 (define_expand "negdf2"
4502 [(set (match_operand:DF 0 "s_register_operand")
4503 (neg:DF (match_operand:DF 1 "s_register_operand")))]
4504 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4507 ;; abssi2 doesn't really clobber the condition codes if a different register
4508 ;; is being set. To keep things simple, assume during rtl manipulations that
4509 ;; it does, but tell the final scan operator the truth. Similarly for
4512 (define_expand "abssi2"
4514 [(set (match_operand:SI 0 "s_register_operand")
4515 (abs:SI (match_operand:SI 1 "s_register_operand")))
4516 (clobber (match_dup 2))])]
4520 operands[2] = gen_rtx_SCRATCH (SImode);
4522 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4525 (define_insn_and_split "*arm_abssi2"
4526 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4527 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4528 (clobber (reg:CC CC_REGNUM))]
4531 "&& reload_completed"
4534 /* if (which_alternative == 0) */
4535 if (REGNO(operands[0]) == REGNO(operands[1]))
4537 /* Emit the pattern:
4538 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4539 [(set (reg:CC CC_REGNUM)
4540 (compare:CC (match_dup 0) (const_int 0)))
4541 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4542 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4544 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4545 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4546 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4547 (gen_rtx_LT (SImode,
4548 gen_rtx_REG (CCmode, CC_REGNUM),
4550 (gen_rtx_SET (operands[0],
4551 (gen_rtx_MINUS (SImode,
4558 /* Emit the pattern:
4559 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4561 (xor:SI (match_dup 1)
4562 (ashiftrt:SI (match_dup 1) (const_int 31))))
4564 (minus:SI (match_dup 0)
4565 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4567 emit_insn (gen_rtx_SET (operands[0],
4568 gen_rtx_XOR (SImode,
4569 gen_rtx_ASHIFTRT (SImode,
4573 emit_insn (gen_rtx_SET (operands[0],
4574 gen_rtx_MINUS (SImode,
4576 gen_rtx_ASHIFTRT (SImode,
4582 [(set_attr "conds" "clob,*")
4583 (set_attr "shift" "1")
4584 (set_attr "predicable" "no, yes")
4585 (set_attr "length" "8")
4586 (set_attr "type" "multiple")]
4589 (define_insn_and_split "*arm_neg_abssi2"
4590 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4591 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4592 (clobber (reg:CC CC_REGNUM))]
4595 "&& reload_completed"
4598 /* if (which_alternative == 0) */
4599 if (REGNO (operands[0]) == REGNO (operands[1]))
4601 /* Emit the pattern:
4602 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4604 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4605 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4606 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4608 gen_rtx_REG (CCmode, CC_REGNUM),
4610 gen_rtx_SET (operands[0],
4611 (gen_rtx_MINUS (SImode,
4617 /* Emit the pattern:
4618 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4620 emit_insn (gen_rtx_SET (operands[0],
4621 gen_rtx_XOR (SImode,
4622 gen_rtx_ASHIFTRT (SImode,
4626 emit_insn (gen_rtx_SET (operands[0],
4627 gen_rtx_MINUS (SImode,
4628 gen_rtx_ASHIFTRT (SImode,
4635 [(set_attr "conds" "clob,*")
4636 (set_attr "shift" "1")
4637 (set_attr "predicable" "no, yes")
4638 (set_attr "length" "8")
4639 (set_attr "type" "multiple")]
4642 (define_expand "abssf2"
4643 [(set (match_operand:SF 0 "s_register_operand")
4644 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4645 "TARGET_32BIT && TARGET_HARD_FLOAT"
4648 (define_expand "absdf2"
4649 [(set (match_operand:DF 0 "s_register_operand")
4650 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4651 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4654 (define_expand "sqrtsf2"
4655 [(set (match_operand:SF 0 "s_register_operand")
4656 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4657 "TARGET_32BIT && TARGET_HARD_FLOAT"
4660 (define_expand "sqrtdf2"
4661 [(set (match_operand:DF 0 "s_register_operand")
4662 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4663 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4666 (define_expand "one_cmplsi2"
4667 [(set (match_operand:SI 0 "s_register_operand")
4668 (not:SI (match_operand:SI 1 "s_register_operand")))]
4673 (define_insn "*arm_one_cmplsi2"
4674 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4675 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4678 [(set_attr "predicable" "yes")
4679 (set_attr "predicable_short_it" "yes,no")
4680 (set_attr "arch" "t2,*")
4681 (set_attr "length" "4")
4682 (set_attr "type" "mvn_reg")]
4685 (define_insn "*notsi_compare0"
4686 [(set (reg:CC_NOOV CC_REGNUM)
4687 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4689 (set (match_operand:SI 0 "s_register_operand" "=r")
4690 (not:SI (match_dup 1)))]
4693 [(set_attr "conds" "set")
4694 (set_attr "type" "mvn_reg")]
4697 (define_insn "*notsi_compare0_scratch"
4698 [(set (reg:CC_NOOV CC_REGNUM)
4699 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4701 (clobber (match_scratch:SI 0 "=r"))]
4704 [(set_attr "conds" "set")
4705 (set_attr "type" "mvn_reg")]
4708 ;; Fixed <--> Floating conversion insns
4710 (define_expand "floatsihf2"
4711 [(set (match_operand:HF 0 "general_operand")
4712 (float:HF (match_operand:SI 1 "general_operand")))]
4716 rtx op1 = gen_reg_rtx (SFmode);
4717 expand_float (op1, operands[1], 0);
4718 op1 = convert_to_mode (HFmode, op1, 0);
4719 emit_move_insn (operands[0], op1);
4724 (define_expand "floatdihf2"
4725 [(set (match_operand:HF 0 "general_operand")
4726 (float:HF (match_operand:DI 1 "general_operand")))]
4730 rtx op1 = gen_reg_rtx (SFmode);
4731 expand_float (op1, operands[1], 0);
4732 op1 = convert_to_mode (HFmode, op1, 0);
4733 emit_move_insn (operands[0], op1);
4738 (define_expand "floatsisf2"
4739 [(set (match_operand:SF 0 "s_register_operand")
4740 (float:SF (match_operand:SI 1 "s_register_operand")))]
4741 "TARGET_32BIT && TARGET_HARD_FLOAT"
4745 (define_expand "floatsidf2"
4746 [(set (match_operand:DF 0 "s_register_operand")
4747 (float:DF (match_operand:SI 1 "s_register_operand")))]
4748 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4752 (define_expand "fix_trunchfsi2"
4753 [(set (match_operand:SI 0 "general_operand")
4754 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4758 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4759 expand_fix (operands[0], op1, 0);
4764 (define_expand "fix_trunchfdi2"
4765 [(set (match_operand:DI 0 "general_operand")
4766 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4770 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4771 expand_fix (operands[0], op1, 0);
4776 (define_expand "fix_truncsfsi2"
4777 [(set (match_operand:SI 0 "s_register_operand")
4778 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4779 "TARGET_32BIT && TARGET_HARD_FLOAT"
4783 (define_expand "fix_truncdfsi2"
4784 [(set (match_operand:SI 0 "s_register_operand")
4785 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4786 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4792 (define_expand "truncdfsf2"
4793 [(set (match_operand:SF 0 "s_register_operand")
4795 (match_operand:DF 1 "s_register_operand")))]
4796 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4800 ;; DFmode to HFmode conversions on targets without a single-step hardware
4801 ;; instruction for it would have to go through SFmode. This is dangerous
4802 ;; as it introduces double rounding.
4804 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4805 ;; a single-step instruction.
4807 (define_expand "truncdfhf2"
4808 [(set (match_operand:HF 0 "s_register_operand")
4810 (match_operand:DF 1 "s_register_operand")))]
4811 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4812 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4814 /* We don't have a direct instruction for this, so we must be in
4815 an unsafe math mode, and going via SFmode. */
4817 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4820 op1 = convert_to_mode (SFmode, operands[1], 0);
4821 op1 = convert_to_mode (HFmode, op1, 0);
4822 emit_move_insn (operands[0], op1);
4825 /* Otherwise, we will pick this up as a single instruction with
4826 no intermediary rounding. */
4830 ;; Zero and sign extension instructions.
4832 (define_expand "zero_extend<mode>di2"
4833 [(set (match_operand:DI 0 "s_register_operand" "")
4834 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4835 "TARGET_32BIT <qhs_zextenddi_cond>"
4837 rtx res_lo, res_hi, op0_lo, op0_hi;
4838 res_lo = gen_lowpart (SImode, operands[0]);
4839 res_hi = gen_highpart (SImode, operands[0]);
4840 if (can_create_pseudo_p ())
4842 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4843 op0_hi = gen_reg_rtx (SImode);
4847 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4850 if (<MODE>mode != SImode)
4851 emit_insn (gen_rtx_SET (op0_lo,
4852 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4853 emit_insn (gen_movsi (op0_hi, const0_rtx));
4854 if (res_lo != op0_lo)
4855 emit_move_insn (res_lo, op0_lo);
4856 if (res_hi != op0_hi)
4857 emit_move_insn (res_hi, op0_hi);
4862 (define_expand "extend<mode>di2"
4863 [(set (match_operand:DI 0 "s_register_operand" "")
4864 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4865 "TARGET_32BIT <qhs_sextenddi_cond>"
4867 rtx res_lo, res_hi, op0_lo, op0_hi;
4868 res_lo = gen_lowpart (SImode, operands[0]);
4869 res_hi = gen_highpart (SImode, operands[0]);
4870 if (can_create_pseudo_p ())
4872 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4873 op0_hi = gen_reg_rtx (SImode);
4877 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4880 if (<MODE>mode != SImode)
4881 emit_insn (gen_rtx_SET (op0_lo,
4882 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4883 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4884 if (res_lo != op0_lo)
4885 emit_move_insn (res_lo, op0_lo);
4886 if (res_hi != op0_hi)
4887 emit_move_insn (res_hi, op0_hi);
4892 ;; Splits for all extensions to DImode
4894 [(set (match_operand:DI 0 "s_register_operand" "")
4895 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4897 [(set (match_dup 0) (match_dup 1))]
4899 rtx lo_part = gen_lowpart (SImode, operands[0]);
4900 machine_mode src_mode = GET_MODE (operands[1]);
4902 if (src_mode == SImode)
4903 emit_move_insn (lo_part, operands[1]);
4905 emit_insn (gen_rtx_SET (lo_part,
4906 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4907 operands[0] = gen_highpart (SImode, operands[0]);
4908 operands[1] = const0_rtx;
4912 [(set (match_operand:DI 0 "s_register_operand" "")
4913 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4915 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4917 rtx lo_part = gen_lowpart (SImode, operands[0]);
4918 machine_mode src_mode = GET_MODE (operands[1]);
4920 if (src_mode == SImode)
4921 emit_move_insn (lo_part, operands[1]);
4923 emit_insn (gen_rtx_SET (lo_part,
4924 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4925 operands[1] = lo_part;
4926 operands[0] = gen_highpart (SImode, operands[0]);
4929 (define_expand "zero_extendhisi2"
4930 [(set (match_operand:SI 0 "s_register_operand")
4931 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4934 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4936 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4939 if (!arm_arch6 && !MEM_P (operands[1]))
4941 rtx t = gen_lowpart (SImode, operands[1]);
4942 rtx tmp = gen_reg_rtx (SImode);
4943 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4944 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4950 [(set (match_operand:SI 0 "s_register_operand" "")
4951 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4952 "!TARGET_THUMB2 && !arm_arch6"
4953 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4954 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4956 operands[2] = gen_lowpart (SImode, operands[1]);
4959 (define_insn "*arm_zero_extendhisi2"
4960 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4961 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4962 "TARGET_ARM && arm_arch4 && !arm_arch6"
4966 [(set_attr "type" "alu_shift_reg,load_byte")
4967 (set_attr "predicable" "yes")]
4970 (define_insn "*arm_zero_extendhisi2_v6"
4971 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4972 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4973 "TARGET_ARM && arm_arch6"
4977 [(set_attr "predicable" "yes")
4978 (set_attr "type" "extend,load_byte")]
4981 (define_insn "*arm_zero_extendhisi2addsi"
4982 [(set (match_operand:SI 0 "s_register_operand" "=r")
4983 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4984 (match_operand:SI 2 "s_register_operand" "r")))]
4986 "uxtah%?\\t%0, %2, %1"
4987 [(set_attr "type" "alu_shift_reg")
4988 (set_attr "predicable" "yes")]
4991 (define_expand "zero_extendqisi2"
4992 [(set (match_operand:SI 0 "s_register_operand")
4993 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4996 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4998 emit_insn (gen_andsi3 (operands[0],
4999 gen_lowpart (SImode, operands[1]),
5003 if (!arm_arch6 && !MEM_P (operands[1]))
5005 rtx t = gen_lowpart (SImode, operands[1]);
5006 rtx tmp = gen_reg_rtx (SImode);
5007 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5008 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5014 [(set (match_operand:SI 0 "s_register_operand" "")
5015 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5017 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5018 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5020 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5023 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5028 (define_insn "*arm_zero_extendqisi2"
5029 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5030 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5031 "TARGET_ARM && !arm_arch6"
5034 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5035 [(set_attr "length" "8,4")
5036 (set_attr "type" "alu_shift_reg,load_byte")
5037 (set_attr "predicable" "yes")]
5040 (define_insn "*arm_zero_extendqisi2_v6"
5041 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5042 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5043 "TARGET_ARM && arm_arch6"
5046 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5047 [(set_attr "type" "extend,load_byte")
5048 (set_attr "predicable" "yes")]
5051 (define_insn "*arm_zero_extendqisi2addsi"
5052 [(set (match_operand:SI 0 "s_register_operand" "=r")
5053 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5054 (match_operand:SI 2 "s_register_operand" "r")))]
5056 "uxtab%?\\t%0, %2, %1"
5057 [(set_attr "predicable" "yes")
5058 (set_attr "type" "alu_shift_reg")]
5062 [(set (match_operand:SI 0 "s_register_operand" "")
5063 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5064 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5065 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5066 [(set (match_dup 2) (match_dup 1))
5067 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5072 [(set (match_operand:SI 0 "s_register_operand" "")
5073 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5074 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5075 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5076 [(set (match_dup 2) (match_dup 1))
5077 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5083 [(set (match_operand:SI 0 "s_register_operand" "")
5084 (IOR_XOR:SI (and:SI (ashift:SI
5085 (match_operand:SI 1 "s_register_operand" "")
5086 (match_operand:SI 2 "const_int_operand" ""))
5087 (match_operand:SI 3 "const_int_operand" ""))
5089 (match_operator 5 "subreg_lowpart_operator"
5090 [(match_operand:SI 4 "s_register_operand" "")]))))]
5092 && (UINTVAL (operands[3])
5093 == (GET_MODE_MASK (GET_MODE (operands[5]))
5094 & (GET_MODE_MASK (GET_MODE (operands[5]))
5095 << (INTVAL (operands[2])))))"
5096 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5098 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5099 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5102 (define_insn "*compareqi_eq0"
5103 [(set (reg:CC_Z CC_REGNUM)
5104 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5108 [(set_attr "conds" "set")
5109 (set_attr "predicable" "yes")
5110 (set_attr "type" "logic_imm")]
5113 (define_expand "extendhisi2"
5114 [(set (match_operand:SI 0 "s_register_operand")
5115 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5120 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5123 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5125 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5129 if (!arm_arch6 && !MEM_P (operands[1]))
5131 rtx t = gen_lowpart (SImode, operands[1]);
5132 rtx tmp = gen_reg_rtx (SImode);
5133 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5134 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5141 [(set (match_operand:SI 0 "register_operand" "")
5142 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5143 (clobber (match_scratch:SI 2 ""))])]
5145 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5146 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5148 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5151 ;; This pattern will only be used when ldsh is not available
5152 (define_expand "extendhisi2_mem"
5153 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5155 (zero_extend:SI (match_dup 7)))
5156 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5157 (set (match_operand:SI 0 "" "")
5158 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5163 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5165 mem1 = change_address (operands[1], QImode, addr);
5166 mem2 = change_address (operands[1], QImode,
5167 plus_constant (Pmode, addr, 1));
5168 operands[0] = gen_lowpart (SImode, operands[0]);
5170 operands[2] = gen_reg_rtx (SImode);
5171 operands[3] = gen_reg_rtx (SImode);
5172 operands[6] = gen_reg_rtx (SImode);
5175 if (BYTES_BIG_ENDIAN)
5177 operands[4] = operands[2];
5178 operands[5] = operands[3];
5182 operands[4] = operands[3];
5183 operands[5] = operands[2];
5189 [(set (match_operand:SI 0 "register_operand" "")
5190 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5192 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5193 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5195 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5198 (define_insn "*arm_extendhisi2"
5199 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5200 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5201 "TARGET_ARM && arm_arch4 && !arm_arch6"
5205 [(set_attr "length" "8,4")
5206 (set_attr "type" "alu_shift_reg,load_byte")
5207 (set_attr "predicable" "yes")]
5210 ;; ??? Check Thumb-2 pool range
5211 (define_insn "*arm_extendhisi2_v6"
5212 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5213 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5214 "TARGET_32BIT && arm_arch6"
5218 [(set_attr "type" "extend,load_byte")
5219 (set_attr "predicable" "yes")]
5222 (define_insn "*arm_extendhisi2addsi"
5223 [(set (match_operand:SI 0 "s_register_operand" "=r")
5224 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5225 (match_operand:SI 2 "s_register_operand" "r")))]
5227 "sxtah%?\\t%0, %2, %1"
5228 [(set_attr "type" "alu_shift_reg")]
5231 (define_expand "extendqihi2"
5233 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5235 (set (match_operand:HI 0 "s_register_operand")
5236 (ashiftrt:SI (match_dup 2)
5241 if (arm_arch4 && MEM_P (operands[1]))
5243 emit_insn (gen_rtx_SET (operands[0],
5244 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5247 if (!s_register_operand (operands[1], QImode))
5248 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5249 operands[0] = gen_lowpart (SImode, operands[0]);
5250 operands[1] = gen_lowpart (SImode, operands[1]);
5251 operands[2] = gen_reg_rtx (SImode);
5255 (define_insn "*arm_extendqihi_insn"
5256 [(set (match_operand:HI 0 "s_register_operand" "=r")
5257 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5258 "TARGET_ARM && arm_arch4"
5260 [(set_attr "type" "load_byte")
5261 (set_attr "predicable" "yes")]
5264 (define_expand "extendqisi2"
5265 [(set (match_operand:SI 0 "s_register_operand")
5266 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5269 if (!arm_arch4 && MEM_P (operands[1]))
5270 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5272 if (!arm_arch6 && !MEM_P (operands[1]))
5274 rtx t = gen_lowpart (SImode, operands[1]);
5275 rtx tmp = gen_reg_rtx (SImode);
5276 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5277 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5283 [(set (match_operand:SI 0 "register_operand" "")
5284 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5286 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5287 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5289 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5292 (define_insn "*arm_extendqisi"
5293 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5294 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5295 "TARGET_ARM && arm_arch4 && !arm_arch6"
5299 [(set_attr "length" "8,4")
5300 (set_attr "type" "alu_shift_reg,load_byte")
5301 (set_attr "predicable" "yes")]
5304 (define_insn "*arm_extendqisi_v6"
5305 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5307 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5308 "TARGET_ARM && arm_arch6"
5312 [(set_attr "type" "extend,load_byte")
5313 (set_attr "predicable" "yes")]
5316 (define_insn "*arm_extendqisi2addsi"
5317 [(set (match_operand:SI 0 "s_register_operand" "=r")
5318 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5319 (match_operand:SI 2 "s_register_operand" "r")))]
5321 "sxtab%?\\t%0, %2, %1"
5322 [(set_attr "type" "alu_shift_reg")
5323 (set_attr "predicable" "yes")]
5326 (define_insn "arm_<sup>xtb16"
5327 [(set (match_operand:SI 0 "s_register_operand" "=r")
5329 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5331 "<sup>xtb16%?\\t%0, %1"
5332 [(set_attr "predicable" "yes")
5333 (set_attr "type" "alu_dsp_reg")])
5335 (define_insn "arm_<simd32_op>"
5336 [(set (match_operand:SI 0 "s_register_operand" "=r")
5338 [(match_operand:SI 1 "s_register_operand" "r")
5339 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5341 "<simd32_op>%?\\t%0, %1, %2"
5342 [(set_attr "predicable" "yes")
5343 (set_attr "type" "alu_dsp_reg")])
5345 (define_insn "arm_usada8"
5346 [(set (match_operand:SI 0 "s_register_operand" "=r")
5348 [(match_operand:SI 1 "s_register_operand" "r")
5349 (match_operand:SI 2 "s_register_operand" "r")
5350 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5352 "usada8%?\\t%0, %1, %2, %3"
5353 [(set_attr "predicable" "yes")
5354 (set_attr "type" "alu_dsp_reg")])
5356 (define_insn "arm_<simd32_op>"
5357 [(set (match_operand:DI 0 "s_register_operand" "=r")
5359 [(match_operand:SI 1 "s_register_operand" "r")
5360 (match_operand:SI 2 "s_register_operand" "r")
5361 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5363 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5364 [(set_attr "predicable" "yes")
5365 (set_attr "type" "smlald")])
5367 (define_expand "extendsfdf2"
5368 [(set (match_operand:DF 0 "s_register_operand")
5369 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
5370 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5374 ;; HFmode -> DFmode conversions where we don't have an instruction for it
5375 ;; must go through SFmode.
5377 ;; This is always safe for an extend.
5379 (define_expand "extendhfdf2"
5380 [(set (match_operand:DF 0 "s_register_operand")
5381 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
5384 /* We don't have a direct instruction for this, so go via SFmode. */
5385 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5388 op1 = convert_to_mode (SFmode, operands[1], 0);
5389 op1 = convert_to_mode (DFmode, op1, 0);
5390 emit_insn (gen_movdf (operands[0], op1));
5393 /* Otherwise, we're done producing RTL and will pick up the correct
5394 pattern to do this with one rounding-step in a single instruction. */
5398 ;; Move insns (including loads and stores)
5400 ;; XXX Just some ideas about movti.
5401 ;; I don't think these are a good idea on the arm, there just aren't enough
5403 ;;(define_expand "loadti"
5404 ;; [(set (match_operand:TI 0 "s_register_operand")
5405 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
5408 ;;(define_expand "storeti"
5409 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
5410 ;; (match_operand:TI 1 "s_register_operand"))]
5413 ;;(define_expand "movti"
5414 ;; [(set (match_operand:TI 0 "general_operand")
5415 ;; (match_operand:TI 1 "general_operand"))]
5421 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5422 ;; operands[1] = copy_to_reg (operands[1]);
5423 ;; if (MEM_P (operands[0]))
5424 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5425 ;; else if (MEM_P (operands[1]))
5426 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5430 ;; emit_insn (insn);
5434 ;; Recognize garbage generated above.
5437 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5438 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5442 ;; register mem = (which_alternative < 3);
5443 ;; register const char *template;
5445 ;; operands[mem] = XEXP (operands[mem], 0);
5446 ;; switch (which_alternative)
5448 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5449 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5450 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5451 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5452 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5453 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5455 ;; output_asm_insn (template, operands);
5459 (define_expand "movdi"
5460 [(set (match_operand:DI 0 "general_operand")
5461 (match_operand:DI 1 "general_operand"))]
5464 gcc_checking_assert (aligned_operand (operands[0], DImode));
5465 gcc_checking_assert (aligned_operand (operands[1], DImode));
5466 if (can_create_pseudo_p ())
5468 if (!REG_P (operands[0]))
5469 operands[1] = force_reg (DImode, operands[1]);
5471 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
5472 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
5474 /* Avoid LDRD's into an odd-numbered register pair in ARM state
5475 when expanding function calls. */
5476 gcc_assert (can_create_pseudo_p ());
5477 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
5479 /* Perform load into legal reg pair first, then move. */
5480 rtx reg = gen_reg_rtx (DImode);
5481 emit_insn (gen_movdi (reg, operands[1]));
5484 emit_move_insn (gen_lowpart (SImode, operands[0]),
5485 gen_lowpart (SImode, operands[1]));
5486 emit_move_insn (gen_highpart (SImode, operands[0]),
5487 gen_highpart (SImode, operands[1]));
5490 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
5491 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
5493 /* Avoid STRD's from an odd-numbered register pair in ARM state
5494 when expanding function prologue. */
5495 gcc_assert (can_create_pseudo_p ());
5496 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
5497 ? gen_reg_rtx (DImode)
5499 emit_move_insn (gen_lowpart (SImode, split_dest),
5500 gen_lowpart (SImode, operands[1]));
5501 emit_move_insn (gen_highpart (SImode, split_dest),
5502 gen_highpart (SImode, operands[1]));
5503 if (split_dest != operands[0])
5504 emit_insn (gen_movdi (operands[0], split_dest));
5510 (define_insn "*arm_movdi"
5511 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5512 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5514 && !(TARGET_HARD_FLOAT)
5516 && ( register_operand (operands[0], DImode)
5517 || register_operand (operands[1], DImode))"
5519 switch (which_alternative)
5526 /* Cannot load it directly, split to load it via MOV / MOVT. */
5527 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
5531 return output_move_double (operands, true, NULL);
5534 [(set_attr "length" "8,12,16,8,8")
5535 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
5536 (set_attr "arm_pool_range" "*,*,*,1020,*")
5537 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5538 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5539 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5543 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5544 (match_operand:ANY64 1 "immediate_operand" ""))]
5547 && (arm_disable_literal_pool
5548 || (arm_const_double_inline_cost (operands[1])
5549 <= arm_max_const_double_inline_cost ()))"
5552 arm_split_constant (SET, SImode, curr_insn,
5553 INTVAL (gen_lowpart (SImode, operands[1])),
5554 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5555 arm_split_constant (SET, SImode, curr_insn,
5556 INTVAL (gen_highpart_mode (SImode,
5557 GET_MODE (operands[0]),
5559 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5564 ; If optimizing for size, or if we have load delay slots, then
5565 ; we want to split the constant into two separate operations.
5566 ; In both cases this may split a trivial part into a single data op
5567 ; leaving a single complex constant to load. We can also get longer
5568 ; offsets in a LDR which means we get better chances of sharing the pool
5569 ; entries. Finally, we can normally do a better job of scheduling
5570 ; LDR instructions than we can with LDM.
5571 ; This pattern will only match if the one above did not.
5573 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5574 (match_operand:ANY64 1 "const_double_operand" ""))]
5575 "TARGET_ARM && reload_completed
5576 && arm_const_double_by_parts (operands[1])"
5577 [(set (match_dup 0) (match_dup 1))
5578 (set (match_dup 2) (match_dup 3))]
5580 operands[2] = gen_highpart (SImode, operands[0]);
5581 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5583 operands[0] = gen_lowpart (SImode, operands[0]);
5584 operands[1] = gen_lowpart (SImode, operands[1]);
5589 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5590 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5591 "TARGET_EITHER && reload_completed"
5592 [(set (match_dup 0) (match_dup 1))
5593 (set (match_dup 2) (match_dup 3))]
5595 operands[2] = gen_highpart (SImode, operands[0]);
5596 operands[3] = gen_highpart (SImode, operands[1]);
5597 operands[0] = gen_lowpart (SImode, operands[0]);
5598 operands[1] = gen_lowpart (SImode, operands[1]);
5600 /* Handle a partial overlap. */
5601 if (rtx_equal_p (operands[0], operands[3]))
5603 rtx tmp0 = operands[0];
5604 rtx tmp1 = operands[1];
5606 operands[0] = operands[2];
5607 operands[1] = operands[3];
5614 ;; We can't actually do base+index doubleword loads if the index and
5615 ;; destination overlap. Split here so that we at least have chance to
5618 [(set (match_operand:DI 0 "s_register_operand" "")
5619 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5620 (match_operand:SI 2 "s_register_operand" ""))))]
5622 && reg_overlap_mentioned_p (operands[0], operands[1])
5623 && reg_overlap_mentioned_p (operands[0], operands[2])"
5625 (plus:SI (match_dup 1)
5628 (mem:DI (match_dup 4)))]
5630 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5634 (define_expand "movsi"
5635 [(set (match_operand:SI 0 "general_operand")
5636 (match_operand:SI 1 "general_operand"))]
5640 rtx base, offset, tmp;
5642 gcc_checking_assert (aligned_operand (operands[0], SImode));
5643 gcc_checking_assert (aligned_operand (operands[1], SImode));
5644 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5646 /* Everything except mem = const or mem = mem can be done easily. */
5647 if (MEM_P (operands[0]))
5648 operands[1] = force_reg (SImode, operands[1]);
5649 if (arm_general_register_operand (operands[0], SImode)
5650 && CONST_INT_P (operands[1])
5651 && !(const_ok_for_arm (INTVAL (operands[1]))
5652 || const_ok_for_arm (~INTVAL (operands[1]))))
5654 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5656 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5661 arm_split_constant (SET, SImode, NULL_RTX,
5662 INTVAL (operands[1]), operands[0], NULL_RTX,
5663 optimize && can_create_pseudo_p ());
5668 else /* Target doesn't have MOVT... */
5670 if (can_create_pseudo_p ())
5672 if (!REG_P (operands[0]))
5673 operands[1] = force_reg (SImode, operands[1]);
5677 split_const (operands[1], &base, &offset);
5678 if (INTVAL (offset) != 0
5679 && targetm.cannot_force_const_mem (SImode, operands[1]))
5681 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5682 emit_move_insn (tmp, base);
5683 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5687 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5689 /* Recognize the case where operand[1] is a reference to thread-local
5690 data and load its address to a register. Offsets have been split off
5692 if (arm_tls_referenced_p (operands[1]))
5693 operands[1] = legitimize_tls_address (operands[1], tmp);
5695 && (CONSTANT_P (operands[1])
5696 || symbol_mentioned_p (operands[1])
5697 || label_mentioned_p (operands[1])))
5699 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5704 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5705 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5706 ;; so this does not matter.
5707 (define_insn "*arm_movt"
5708 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5709 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5710 (match_operand:SI 2 "general_operand" "i,i")))]
5711 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5713 movt%?\t%0, #:upper16:%c2
5714 movt\t%0, #:upper16:%c2"
5715 [(set_attr "arch" "32,v8mb")
5716 (set_attr "predicable" "yes")
5717 (set_attr "length" "4")
5718 (set_attr "type" "alu_sreg")]
5721 (define_insn "*arm_movsi_insn"
5722 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5723 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5724 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5725 && ( register_operand (operands[0], SImode)
5726 || register_operand (operands[1], SImode))"
5734 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5735 (set_attr "predicable" "yes")
5736 (set_attr "arch" "*,*,*,v6t2,*,*")
5737 (set_attr "pool_range" "*,*,*,*,4096,*")
5738 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5742 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5743 (match_operand:SI 1 "const_int_operand" ""))]
5744 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5745 && (!(const_ok_for_arm (INTVAL (operands[1]))
5746 || const_ok_for_arm (~INTVAL (operands[1]))))"
5747 [(clobber (const_int 0))]
5749 arm_split_constant (SET, SImode, NULL_RTX,
5750 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5755 ;; A normal way to do (symbol + offset) requires three instructions at least
5756 ;; (depends on how big the offset is) as below:
5757 ;; movw r0, #:lower16:g
5758 ;; movw r0, #:upper16:g
5761 ;; A better way would be:
5762 ;; movw r0, #:lower16:g+4
5763 ;; movw r0, #:upper16:g+4
5765 ;; The limitation of this way is that the length of offset should be a 16-bit
5766 ;; signed value, because current assembler only supports REL type relocation for
5767 ;; such case. If the more powerful RELA type is supported in future, we should
5768 ;; update this pattern to go with better way.
5770 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5771 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5772 (match_operand:SI 2 "const_int_operand" ""))))]
5775 && arm_disable_literal_pool
5777 && GET_CODE (operands[1]) == SYMBOL_REF"
5778 [(clobber (const_int 0))]
5780 int offset = INTVAL (operands[2]);
5782 if (offset < -0x8000 || offset > 0x7fff)
5784 arm_emit_movpair (operands[0], operands[1]);
5785 emit_insn (gen_rtx_SET (operands[0],
5786 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5790 rtx op = gen_rtx_CONST (SImode,
5791 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5792 arm_emit_movpair (operands[0], op);
5797 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5798 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5799 ;; and lo_sum would be merged back into memory load at cprop. However,
5800 ;; if the default is to prefer movt/movw rather than a load from the constant
5801 ;; pool, the performance is better.
5803 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5804 (match_operand:SI 1 "general_operand" ""))]
5805 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5806 && !target_word_relocations
5807 && !arm_tls_referenced_p (operands[1])"
5808 [(clobber (const_int 0))]
5810 arm_emit_movpair (operands[0], operands[1]);
5814 ;; When generating pic, we need to load the symbol offset into a register.
5815 ;; So that the optimizer does not confuse this with a normal symbol load
5816 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5817 ;; since that is the only type of relocation we can use.
5819 ;; Wrap calculation of the whole PIC address in a single pattern for the
5820 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5821 ;; a PIC address involves two loads from memory, so we want to CSE it
5822 ;; as often as possible.
5823 ;; This pattern will be split into one of the pic_load_addr_* patterns
5824 ;; and a move after GCSE optimizations.
5826 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5827 (define_expand "calculate_pic_address"
5828 [(set (match_operand:SI 0 "register_operand")
5829 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5830 (unspec:SI [(match_operand:SI 2 "" "")]
5835 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5837 [(set (match_operand:SI 0 "register_operand" "")
5838 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5839 (unspec:SI [(match_operand:SI 2 "" "")]
5842 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5843 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5844 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5847 ;; operand1 is the memory address to go into
5848 ;; pic_load_addr_32bit.
5849 ;; operand2 is the PIC label to be emitted
5850 ;; from pic_add_dot_plus_eight.
5851 ;; We do this to allow hoisting of the entire insn.
5852 (define_insn_and_split "pic_load_addr_unified"
5853 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5854 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5855 (match_operand:SI 2 "" "")]
5856 UNSPEC_PIC_UNIFIED))]
5859 "&& reload_completed"
5860 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5861 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5862 (match_dup 2)] UNSPEC_PIC_BASE))]
5863 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5864 [(set_attr "type" "load_4,load_4,load_4")
5865 (set_attr "pool_range" "4096,4094,1022")
5866 (set_attr "neg_pool_range" "4084,0,0")
5867 (set_attr "arch" "a,t2,t1")
5868 (set_attr "length" "8,6,4")]
5871 ;; The rather odd constraints on the following are to force reload to leave
5872 ;; the insn alone, and to force the minipool generation pass to then move
5873 ;; the GOT symbol to memory.
5875 (define_insn "pic_load_addr_32bit"
5876 [(set (match_operand:SI 0 "s_register_operand" "=r")
5877 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5878 "TARGET_32BIT && flag_pic"
5880 [(set_attr "type" "load_4")
5881 (set (attr "pool_range")
5882 (if_then_else (eq_attr "is_thumb" "no")
5885 (set (attr "neg_pool_range")
5886 (if_then_else (eq_attr "is_thumb" "no")
5891 (define_insn "pic_load_addr_thumb1"
5892 [(set (match_operand:SI 0 "s_register_operand" "=l")
5893 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5894 "TARGET_THUMB1 && flag_pic"
5896 [(set_attr "type" "load_4")
5897 (set (attr "pool_range") (const_int 1018))]
5900 (define_insn "pic_add_dot_plus_four"
5901 [(set (match_operand:SI 0 "register_operand" "=r")
5902 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5904 (match_operand 2 "" "")]
5908 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5909 INTVAL (operands[2]));
5910 return \"add\\t%0, %|pc\";
5912 [(set_attr "length" "2")
5913 (set_attr "type" "alu_sreg")]
5916 (define_insn "pic_add_dot_plus_eight"
5917 [(set (match_operand:SI 0 "register_operand" "=r")
5918 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5920 (match_operand 2 "" "")]
5924 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5925 INTVAL (operands[2]));
5926 return \"add%?\\t%0, %|pc, %1\";
5928 [(set_attr "predicable" "yes")
5929 (set_attr "type" "alu_sreg")]
5932 (define_insn "tls_load_dot_plus_eight"
5933 [(set (match_operand:SI 0 "register_operand" "=r")
5934 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5936 (match_operand 2 "" "")]
5940 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5941 INTVAL (operands[2]));
5942 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5944 [(set_attr "predicable" "yes")
5945 (set_attr "type" "load_4")]
5948 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5949 ;; followed by a load. These sequences can be crunched down to
5950 ;; tls_load_dot_plus_eight by a peephole.
5953 [(set (match_operand:SI 0 "register_operand" "")
5954 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5956 (match_operand 1 "" "")]
5958 (set (match_operand:SI 2 "arm_general_register_operand" "")
5959 (mem:SI (match_dup 0)))]
5960 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5962 (mem:SI (unspec:SI [(match_dup 3)
5969 (define_insn "pic_offset_arm"
5970 [(set (match_operand:SI 0 "register_operand" "=r")
5971 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5972 (unspec:SI [(match_operand:SI 2 "" "X")]
5973 UNSPEC_PIC_OFFSET))))]
5974 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5975 "ldr%?\\t%0, [%1,%2]"
5976 [(set_attr "type" "load_4")]
5979 (define_expand "builtin_setjmp_receiver"
5980 [(label_ref (match_operand 0 "" ""))]
5984 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5986 if (arm_pic_register != INVALID_REGNUM)
5987 arm_load_pic_register (1UL << 3, NULL_RTX);
5991 ;; If copying one reg to another we can set the condition codes according to
5992 ;; its value. Such a move is common after a return from subroutine and the
5993 ;; result is being tested against zero.
5995 (define_insn "*movsi_compare0"
5996 [(set (reg:CC CC_REGNUM)
5997 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5999 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6004 subs%?\\t%0, %1, #0"
6005 [(set_attr "conds" "set")
6006 (set_attr "type" "alus_imm,alus_imm")]
6009 ;; Subroutine to store a half word from a register into memory.
6010 ;; Operand 0 is the source register (HImode)
6011 ;; Operand 1 is the destination address in a register (SImode)
6013 ;; In both this routine and the next, we must be careful not to spill
6014 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6015 ;; can generate unrecognizable rtl.
6017 (define_expand "storehi"
6018 [;; store the low byte
6019 (set (match_operand 1 "" "") (match_dup 3))
6020 ;; extract the high byte
6022 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6023 ;; store the high byte
6024 (set (match_dup 4) (match_dup 5))]
6028 rtx op1 = operands[1];
6029 rtx addr = XEXP (op1, 0);
6030 enum rtx_code code = GET_CODE (addr);
6032 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6034 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6036 operands[4] = adjust_address (op1, QImode, 1);
6037 operands[1] = adjust_address (operands[1], QImode, 0);
6038 operands[3] = gen_lowpart (QImode, operands[0]);
6039 operands[0] = gen_lowpart (SImode, operands[0]);
6040 operands[2] = gen_reg_rtx (SImode);
6041 operands[5] = gen_lowpart (QImode, operands[2]);
6045 (define_expand "storehi_bigend"
6046 [(set (match_dup 4) (match_dup 3))
6048 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6049 (set (match_operand 1 "" "") (match_dup 5))]
6053 rtx op1 = operands[1];
6054 rtx addr = XEXP (op1, 0);
6055 enum rtx_code code = GET_CODE (addr);
6057 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6059 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6061 operands[4] = adjust_address (op1, QImode, 1);
6062 operands[1] = adjust_address (operands[1], QImode, 0);
6063 operands[3] = gen_lowpart (QImode, operands[0]);
6064 operands[0] = gen_lowpart (SImode, operands[0]);
6065 operands[2] = gen_reg_rtx (SImode);
6066 operands[5] = gen_lowpart (QImode, operands[2]);
6070 ;; Subroutine to store a half word integer constant into memory.
6071 (define_expand "storeinthi"
6072 [(set (match_operand 0 "" "")
6073 (match_operand 1 "" ""))
6074 (set (match_dup 3) (match_dup 2))]
6078 HOST_WIDE_INT value = INTVAL (operands[1]);
6079 rtx addr = XEXP (operands[0], 0);
6080 rtx op0 = operands[0];
6081 enum rtx_code code = GET_CODE (addr);
6083 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6085 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6087 operands[1] = gen_reg_rtx (SImode);
6088 if (BYTES_BIG_ENDIAN)
6090 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6091 if ((value & 255) == ((value >> 8) & 255))
6092 operands[2] = operands[1];
6095 operands[2] = gen_reg_rtx (SImode);
6096 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6101 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6102 if ((value & 255) == ((value >> 8) & 255))
6103 operands[2] = operands[1];
6106 operands[2] = gen_reg_rtx (SImode);
6107 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6111 operands[3] = adjust_address (op0, QImode, 1);
6112 operands[0] = adjust_address (operands[0], QImode, 0);
6113 operands[2] = gen_lowpart (QImode, operands[2]);
6114 operands[1] = gen_lowpart (QImode, operands[1]);
6118 (define_expand "storehi_single_op"
6119 [(set (match_operand:HI 0 "memory_operand")
6120 (match_operand:HI 1 "general_operand"))]
6121 "TARGET_32BIT && arm_arch4"
6123 if (!s_register_operand (operands[1], HImode))
6124 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6128 (define_expand "movhi"
6129 [(set (match_operand:HI 0 "general_operand")
6130 (match_operand:HI 1 "general_operand"))]
6133 gcc_checking_assert (aligned_operand (operands[0], HImode));
6134 gcc_checking_assert (aligned_operand (operands[1], HImode));
6137 if (can_create_pseudo_p ())
6139 if (MEM_P (operands[0]))
6143 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6146 if (CONST_INT_P (operands[1]))
6147 emit_insn (gen_storeinthi (operands[0], operands[1]));
6150 if (MEM_P (operands[1]))
6151 operands[1] = force_reg (HImode, operands[1]);
6152 if (BYTES_BIG_ENDIAN)
6153 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6155 emit_insn (gen_storehi (operands[1], operands[0]));
6159 /* Sign extend a constant, and keep it in an SImode reg. */
6160 else if (CONST_INT_P (operands[1]))
6162 rtx reg = gen_reg_rtx (SImode);
6163 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6165 /* If the constant is already valid, leave it alone. */
6166 if (!const_ok_for_arm (val))
6168 /* If setting all the top bits will make the constant
6169 loadable in a single instruction, then set them.
6170 Otherwise, sign extend the number. */
6172 if (const_ok_for_arm (~(val | ~0xffff)))
6174 else if (val & 0x8000)
6178 emit_insn (gen_movsi (reg, GEN_INT (val)));
6179 operands[1] = gen_lowpart (HImode, reg);
6181 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6182 && MEM_P (operands[1]))
6184 rtx reg = gen_reg_rtx (SImode);
6186 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6187 operands[1] = gen_lowpart (HImode, reg);
6189 else if (!arm_arch4)
6191 if (MEM_P (operands[1]))
6194 rtx offset = const0_rtx;
6195 rtx reg = gen_reg_rtx (SImode);
6197 if ((REG_P (base = XEXP (operands[1], 0))
6198 || (GET_CODE (base) == PLUS
6199 && (CONST_INT_P (offset = XEXP (base, 1)))
6200 && ((INTVAL(offset) & 1) != 1)
6201 && REG_P (base = XEXP (base, 0))))
6202 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6206 new_rtx = widen_memory_access (operands[1], SImode,
6207 ((INTVAL (offset) & ~3)
6208 - INTVAL (offset)));
6209 emit_insn (gen_movsi (reg, new_rtx));
6210 if (((INTVAL (offset) & 2) != 0)
6211 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6213 rtx reg2 = gen_reg_rtx (SImode);
6215 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6220 emit_insn (gen_movhi_bytes (reg, operands[1]));
6222 operands[1] = gen_lowpart (HImode, reg);
6226 /* Handle loading a large integer during reload. */
6227 else if (CONST_INT_P (operands[1])
6228 && !const_ok_for_arm (INTVAL (operands[1]))
6229 && !const_ok_for_arm (~INTVAL (operands[1])))
6231 /* Writing a constant to memory needs a scratch, which should
6232 be handled with SECONDARY_RELOADs. */
6233 gcc_assert (REG_P (operands[0]));
6235 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6236 emit_insn (gen_movsi (operands[0], operands[1]));
6240 else if (TARGET_THUMB2)
6242 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6243 if (can_create_pseudo_p ())
6245 if (!REG_P (operands[0]))
6246 operands[1] = force_reg (HImode, operands[1]);
6247 /* Zero extend a constant, and keep it in an SImode reg. */
6248 else if (CONST_INT_P (operands[1]))
6250 rtx reg = gen_reg_rtx (SImode);
6251 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6253 emit_insn (gen_movsi (reg, GEN_INT (val)));
6254 operands[1] = gen_lowpart (HImode, reg);
6258 else /* TARGET_THUMB1 */
6260 if (can_create_pseudo_p ())
6262 if (CONST_INT_P (operands[1]))
6264 rtx reg = gen_reg_rtx (SImode);
6266 emit_insn (gen_movsi (reg, operands[1]));
6267 operands[1] = gen_lowpart (HImode, reg);
6270 /* ??? We shouldn't really get invalid addresses here, but this can
6271 happen if we are passed a SP (never OK for HImode/QImode) or
6272 virtual register (also rejected as illegitimate for HImode/QImode)
6273 relative address. */
6274 /* ??? This should perhaps be fixed elsewhere, for instance, in
6275 fixup_stack_1, by checking for other kinds of invalid addresses,
6276 e.g. a bare reference to a virtual register. This may confuse the
6277 alpha though, which must handle this case differently. */
6278 if (MEM_P (operands[0])
6279 && !memory_address_p (GET_MODE (operands[0]),
6280 XEXP (operands[0], 0)))
6282 = replace_equiv_address (operands[0],
6283 copy_to_reg (XEXP (operands[0], 0)));
6285 if (MEM_P (operands[1])
6286 && !memory_address_p (GET_MODE (operands[1]),
6287 XEXP (operands[1], 0)))
6289 = replace_equiv_address (operands[1],
6290 copy_to_reg (XEXP (operands[1], 0)));
6292 if (MEM_P (operands[1]) && optimize > 0)
6294 rtx reg = gen_reg_rtx (SImode);
6296 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6297 operands[1] = gen_lowpart (HImode, reg);
6300 if (MEM_P (operands[0]))
6301 operands[1] = force_reg (HImode, operands[1]);
6303 else if (CONST_INT_P (operands[1])
6304 && !satisfies_constraint_I (operands[1]))
6306 /* Handle loading a large integer during reload. */
6308 /* Writing a constant to memory needs a scratch, which should
6309 be handled with SECONDARY_RELOADs. */
6310 gcc_assert (REG_P (operands[0]));
6312 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6313 emit_insn (gen_movsi (operands[0], operands[1]));
6320 (define_expand "movhi_bytes"
6321 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6323 (zero_extend:SI (match_dup 6)))
6324 (set (match_operand:SI 0 "" "")
6325 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6330 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6332 mem1 = change_address (operands[1], QImode, addr);
6333 mem2 = change_address (operands[1], QImode,
6334 plus_constant (Pmode, addr, 1));
6335 operands[0] = gen_lowpart (SImode, operands[0]);
6337 operands[2] = gen_reg_rtx (SImode);
6338 operands[3] = gen_reg_rtx (SImode);
6341 if (BYTES_BIG_ENDIAN)
6343 operands[4] = operands[2];
6344 operands[5] = operands[3];
6348 operands[4] = operands[3];
6349 operands[5] = operands[2];
6354 (define_expand "movhi_bigend"
6356 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
6359 (ashiftrt:SI (match_dup 2) (const_int 16)))
6360 (set (match_operand:HI 0 "s_register_operand")
6364 operands[2] = gen_reg_rtx (SImode);
6365 operands[3] = gen_reg_rtx (SImode);
6366 operands[4] = gen_lowpart (HImode, operands[3]);
6370 ;; Pattern to recognize insn generated default case above
6371 (define_insn "*movhi_insn_arch4"
6372 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6373 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
6375 && arm_arch4 && !TARGET_HARD_FLOAT
6376 && (register_operand (operands[0], HImode)
6377 || register_operand (operands[1], HImode))"
6379 mov%?\\t%0, %1\\t%@ movhi
6380 mvn%?\\t%0, #%B1\\t%@ movhi
6381 movw%?\\t%0, %L1\\t%@ movhi
6382 strh%?\\t%1, %0\\t%@ movhi
6383 ldrh%?\\t%0, %1\\t%@ movhi"
6384 [(set_attr "predicable" "yes")
6385 (set_attr "pool_range" "*,*,*,*,256")
6386 (set_attr "neg_pool_range" "*,*,*,*,244")
6387 (set_attr "arch" "*,*,v6t2,*,*")
6388 (set_attr_alternative "type"
6389 [(if_then_else (match_operand 1 "const_int_operand" "")
6390 (const_string "mov_imm" )
6391 (const_string "mov_reg"))
6392 (const_string "mvn_imm")
6393 (const_string "mov_imm")
6394 (const_string "store_4")
6395 (const_string "load_4")])]
6398 (define_insn "*movhi_bytes"
6399 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6400 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
6401 "TARGET_ARM && !TARGET_HARD_FLOAT"
6403 mov%?\\t%0, %1\\t%@ movhi
6404 mov%?\\t%0, %1\\t%@ movhi
6405 mvn%?\\t%0, #%B1\\t%@ movhi"
6406 [(set_attr "predicable" "yes")
6407 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
6410 ;; We use a DImode scratch because we may occasionally need an additional
6411 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6412 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6413 ;; The reload_in<m> and reload_out<m> patterns require special constraints
6414 ;; to be correctly handled in default_secondary_reload function.
6415 (define_expand "reload_outhi"
6416 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6417 (match_operand:HI 1 "s_register_operand" "r")
6418 (match_operand:DI 2 "s_register_operand" "=&l")])]
6421 arm_reload_out_hi (operands);
6423 thumb_reload_out_hi (operands);
6428 (define_expand "reload_inhi"
6429 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6430 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6431 (match_operand:DI 2 "s_register_operand" "=&r")])]
6435 arm_reload_in_hi (operands);
6437 thumb_reload_out_hi (operands);
6441 (define_expand "movqi"
6442 [(set (match_operand:QI 0 "general_operand")
6443 (match_operand:QI 1 "general_operand"))]
6446 /* Everything except mem = const or mem = mem can be done easily */
6448 if (can_create_pseudo_p ())
6450 if (CONST_INT_P (operands[1]))
6452 rtx reg = gen_reg_rtx (SImode);
6454 /* For thumb we want an unsigned immediate, then we are more likely
6455 to be able to use a movs insn. */
6457 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6459 emit_insn (gen_movsi (reg, operands[1]));
6460 operands[1] = gen_lowpart (QImode, reg);
6465 /* ??? We shouldn't really get invalid addresses here, but this can
6466 happen if we are passed a SP (never OK for HImode/QImode) or
6467 virtual register (also rejected as illegitimate for HImode/QImode)
6468 relative address. */
6469 /* ??? This should perhaps be fixed elsewhere, for instance, in
6470 fixup_stack_1, by checking for other kinds of invalid addresses,
6471 e.g. a bare reference to a virtual register. This may confuse the
6472 alpha though, which must handle this case differently. */
6473 if (MEM_P (operands[0])
6474 && !memory_address_p (GET_MODE (operands[0]),
6475 XEXP (operands[0], 0)))
6477 = replace_equiv_address (operands[0],
6478 copy_to_reg (XEXP (operands[0], 0)));
6479 if (MEM_P (operands[1])
6480 && !memory_address_p (GET_MODE (operands[1]),
6481 XEXP (operands[1], 0)))
6483 = replace_equiv_address (operands[1],
6484 copy_to_reg (XEXP (operands[1], 0)));
6487 if (MEM_P (operands[1]) && optimize > 0)
6489 rtx reg = gen_reg_rtx (SImode);
6491 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6492 operands[1] = gen_lowpart (QImode, reg);
6495 if (MEM_P (operands[0]))
6496 operands[1] = force_reg (QImode, operands[1]);
6498 else if (TARGET_THUMB
6499 && CONST_INT_P (operands[1])
6500 && !satisfies_constraint_I (operands[1]))
6502 /* Handle loading a large integer during reload. */
6504 /* Writing a constant to memory needs a scratch, which should
6505 be handled with SECONDARY_RELOADs. */
6506 gcc_assert (REG_P (operands[0]));
6508 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6509 emit_insn (gen_movsi (operands[0], operands[1]));
6515 (define_insn "*arm_movqi_insn"
6516 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
6517 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
6519 && ( register_operand (operands[0], QImode)
6520 || register_operand (operands[1], QImode))"
6531 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
6532 (set_attr "predicable" "yes")
6533 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
6534 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
6535 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
6539 (define_expand "movhf"
6540 [(set (match_operand:HF 0 "general_operand")
6541 (match_operand:HF 1 "general_operand"))]
6544 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6545 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6548 if (MEM_P (operands[0]))
6549 operands[1] = force_reg (HFmode, operands[1]);
6551 else /* TARGET_THUMB1 */
6553 if (can_create_pseudo_p ())
6555 if (!REG_P (operands[0]))
6556 operands[1] = force_reg (HFmode, operands[1]);
6562 (define_insn "*arm32_movhf"
6563 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6564 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6565 "TARGET_32BIT && !TARGET_HARD_FLOAT
6566 && ( s_register_operand (operands[0], HFmode)
6567 || s_register_operand (operands[1], HFmode))"
6569 switch (which_alternative)
6571 case 0: /* ARM register from memory */
6572 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6573 case 1: /* memory from ARM register */
6574 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6575 case 2: /* ARM register from ARM register */
6576 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6577 case 3: /* ARM register from constant */
6582 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6584 ops[0] = operands[0];
6585 ops[1] = GEN_INT (bits);
6586 ops[2] = GEN_INT (bits & 0xff00);
6587 ops[3] = GEN_INT (bits & 0x00ff);
6589 if (arm_arch_thumb2)
6590 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6592 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6599 [(set_attr "conds" "unconditional")
6600 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6601 (set_attr "length" "4,4,4,8")
6602 (set_attr "predicable" "yes")]
6605 (define_expand "movsf"
6606 [(set (match_operand:SF 0 "general_operand")
6607 (match_operand:SF 1 "general_operand"))]
6610 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6611 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6614 if (MEM_P (operands[0]))
6615 operands[1] = force_reg (SFmode, operands[1]);
6617 else /* TARGET_THUMB1 */
6619 if (can_create_pseudo_p ())
6621 if (!REG_P (operands[0]))
6622 operands[1] = force_reg (SFmode, operands[1]);
6626 /* Cannot load it directly, generate a load with clobber so that it can be
6627 loaded via GPR with MOV / MOVT. */
6628 if (arm_disable_literal_pool
6629 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6630 && CONST_DOUBLE_P (operands[1])
6631 && TARGET_HARD_FLOAT
6632 && !vfp3_const_double_rtx (operands[1]))
6634 rtx clobreg = gen_reg_rtx (SFmode);
6635 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6642 ;; Transform a floating-point move of a constant into a core register into
6643 ;; an SImode operation.
6645 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6646 (match_operand:SF 1 "immediate_operand" ""))]
6649 && CONST_DOUBLE_P (operands[1])"
6650 [(set (match_dup 2) (match_dup 3))]
6652 operands[2] = gen_lowpart (SImode, operands[0]);
6653 operands[3] = gen_lowpart (SImode, operands[1]);
6654 if (operands[2] == 0 || operands[3] == 0)
6659 (define_insn "*arm_movsf_soft_insn"
6660 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6661 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6663 && TARGET_SOFT_FLOAT
6664 && (!MEM_P (operands[0])
6665 || register_operand (operands[1], SFmode))"
6667 switch (which_alternative)
6669 case 0: return \"mov%?\\t%0, %1\";
6671 /* Cannot load it directly, split to load it via MOV / MOVT. */
6672 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6674 return \"ldr%?\\t%0, %1\\t%@ float\";
6675 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6676 default: gcc_unreachable ();
6679 [(set_attr "predicable" "yes")
6680 (set_attr "type" "mov_reg,load_4,store_4")
6681 (set_attr "arm_pool_range" "*,4096,*")
6682 (set_attr "thumb2_pool_range" "*,4094,*")
6683 (set_attr "arm_neg_pool_range" "*,4084,*")
6684 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6687 ;; Splitter for the above.
6689 [(set (match_operand:SF 0 "s_register_operand")
6690 (match_operand:SF 1 "const_double_operand"))]
6691 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6695 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6696 rtx cst = gen_int_mode (buf, SImode);
6697 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6702 (define_expand "movdf"
6703 [(set (match_operand:DF 0 "general_operand")
6704 (match_operand:DF 1 "general_operand"))]
6707 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6708 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6711 if (MEM_P (operands[0]))
6712 operands[1] = force_reg (DFmode, operands[1]);
6714 else /* TARGET_THUMB */
6716 if (can_create_pseudo_p ())
6718 if (!REG_P (operands[0]))
6719 operands[1] = force_reg (DFmode, operands[1]);
6723 /* Cannot load it directly, generate a load with clobber so that it can be
6724 loaded via GPR with MOV / MOVT. */
6725 if (arm_disable_literal_pool
6726 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6727 && CONSTANT_P (operands[1])
6728 && TARGET_HARD_FLOAT
6729 && !arm_const_double_rtx (operands[1])
6730 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6732 rtx clobreg = gen_reg_rtx (DFmode);
6733 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6740 ;; Reloading a df mode value stored in integer regs to memory can require a
6742 ;; Another reload_out<m> pattern that requires special constraints.
6743 (define_expand "reload_outdf"
6744 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6745 (match_operand:DF 1 "s_register_operand" "r")
6746 (match_operand:SI 2 "s_register_operand" "=&r")]
6750 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6753 operands[2] = XEXP (operands[0], 0);
6754 else if (code == POST_INC || code == PRE_DEC)
6756 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6757 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6758 emit_insn (gen_movdi (operands[0], operands[1]));
6761 else if (code == PRE_INC)
6763 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6765 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6768 else if (code == POST_DEC)
6769 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6771 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6772 XEXP (XEXP (operands[0], 0), 1)));
6774 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6777 if (code == POST_DEC)
6778 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6784 (define_insn "*movdf_soft_insn"
6785 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6786 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6787 "TARGET_32BIT && TARGET_SOFT_FLOAT
6788 && ( register_operand (operands[0], DFmode)
6789 || register_operand (operands[1], DFmode))"
6791 switch (which_alternative)
6798 /* Cannot load it directly, split to load it via MOV / MOVT. */
6799 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6803 return output_move_double (operands, true, NULL);
6806 [(set_attr "length" "8,12,16,8,8")
6807 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6808 (set_attr "arm_pool_range" "*,*,*,1020,*")
6809 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6810 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6811 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6814 ;; Splitter for the above.
6816 [(set (match_operand:DF 0 "s_register_operand")
6817 (match_operand:DF 1 "const_double_operand"))]
6818 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6822 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6823 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6824 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6825 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6826 rtx cst = gen_int_mode (ival, DImode);
6827 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6833 ;; load- and store-multiple insns
6834 ;; The arm can load/store any set of registers, provided that they are in
6835 ;; ascending order, but these expanders assume a contiguous set.
6837 (define_expand "load_multiple"
6838 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6839 (match_operand:SI 1 "" ""))
6840 (use (match_operand:SI 2 "" ""))])]
6843 HOST_WIDE_INT offset = 0;
6845 /* Support only fixed point registers. */
6846 if (!CONST_INT_P (operands[2])
6847 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6848 || INTVAL (operands[2]) < 2
6849 || !MEM_P (operands[1])
6850 || !REG_P (operands[0])
6851 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6852 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6856 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6857 INTVAL (operands[2]),
6858 force_reg (SImode, XEXP (operands[1], 0)),
6859 FALSE, operands[1], &offset);
6862 (define_expand "store_multiple"
6863 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6864 (match_operand:SI 1 "" ""))
6865 (use (match_operand:SI 2 "" ""))])]
6868 HOST_WIDE_INT offset = 0;
6870 /* Support only fixed point registers. */
6871 if (!CONST_INT_P (operands[2])
6872 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6873 || INTVAL (operands[2]) < 2
6874 || !REG_P (operands[1])
6875 || !MEM_P (operands[0])
6876 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6877 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6881 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6882 INTVAL (operands[2]),
6883 force_reg (SImode, XEXP (operands[0], 0)),
6884 FALSE, operands[0], &offset);
6888 (define_expand "setmemsi"
6889 [(match_operand:BLK 0 "general_operand")
6890 (match_operand:SI 1 "const_int_operand")
6891 (match_operand:SI 2 "const_int_operand")
6892 (match_operand:SI 3 "const_int_operand")]
6895 if (arm_gen_setmem (operands))
6902 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6903 ;; We could let this apply for blocks of less than this, but it clobbers so
6904 ;; many registers that there is then probably a better way.
6906 (define_expand "cpymemqi"
6907 [(match_operand:BLK 0 "general_operand")
6908 (match_operand:BLK 1 "general_operand")
6909 (match_operand:SI 2 "const_int_operand")
6910 (match_operand:SI 3 "const_int_operand")]
6915 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6916 && !optimize_function_for_size_p (cfun))
6918 if (gen_cpymem_ldrd_strd (operands))
6923 if (arm_gen_cpymemqi (operands))
6927 else /* TARGET_THUMB1 */
6929 if ( INTVAL (operands[3]) != 4
6930 || INTVAL (operands[2]) > 48)
6933 thumb_expand_cpymemqi (operands);
6940 ;; Compare & branch insns
6941 ;; The range calculations are based as follows:
6942 ;; For forward branches, the address calculation returns the address of
6943 ;; the next instruction. This is 2 beyond the branch instruction.
6944 ;; For backward branches, the address calculation returns the address of
6945 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6946 ;; instruction for the shortest sequence, and 4 before the branch instruction
6947 ;; if we have to jump around an unconditional branch.
6948 ;; To the basic branch range the PC offset must be added (this is +4).
6949 ;; So for forward branches we have
6950 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6951 ;; And for backward branches we have
6952 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6954 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6955 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6957 (define_expand "cbranchsi4"
6958 [(set (pc) (if_then_else
6959 (match_operator 0 "expandable_comparison_operator"
6960 [(match_operand:SI 1 "s_register_operand")
6961 (match_operand:SI 2 "nonmemory_operand")])
6962 (label_ref (match_operand 3 "" ""))
6968 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6970 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6974 if (thumb1_cmpneg_operand (operands[2], SImode))
6976 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6977 operands[3], operands[0]));
6980 if (!thumb1_cmp_operand (operands[2], SImode))
6981 operands[2] = force_reg (SImode, operands[2]);
6984 (define_expand "cbranchsf4"
6985 [(set (pc) (if_then_else
6986 (match_operator 0 "expandable_comparison_operator"
6987 [(match_operand:SF 1 "s_register_operand")
6988 (match_operand:SF 2 "vfp_compare_operand")])
6989 (label_ref (match_operand 3 "" ""))
6991 "TARGET_32BIT && TARGET_HARD_FLOAT"
6992 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6993 operands[3])); DONE;"
6996 (define_expand "cbranchdf4"
6997 [(set (pc) (if_then_else
6998 (match_operator 0 "expandable_comparison_operator"
6999 [(match_operand:DF 1 "s_register_operand")
7000 (match_operand:DF 2 "vfp_compare_operand")])
7001 (label_ref (match_operand 3 "" ""))
7003 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7004 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7005 operands[3])); DONE;"
7008 (define_expand "cbranchdi4"
7009 [(set (pc) (if_then_else
7010 (match_operator 0 "expandable_comparison_operator"
7011 [(match_operand:DI 1 "s_register_operand")
7012 (match_operand:DI 2 "reg_or_int_operand")])
7013 (label_ref (match_operand 3 "" ""))
7017 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7019 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7025 ;; Comparison and test insns
7027 (define_insn "*arm_cmpsi_insn"
7028 [(set (reg:CC CC_REGNUM)
7029 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7030 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7038 [(set_attr "conds" "set")
7039 (set_attr "arch" "t2,t2,any,any,any")
7040 (set_attr "length" "2,2,4,4,4")
7041 (set_attr "predicable" "yes")
7042 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7043 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7046 (define_insn "*cmpsi_shiftsi"
7047 [(set (reg:CC CC_REGNUM)
7048 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
7049 (match_operator:SI 3 "shift_operator"
7050 [(match_operand:SI 1 "s_register_operand" "r,r,r")
7051 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
7054 [(set_attr "conds" "set")
7055 (set_attr "shift" "1")
7056 (set_attr "arch" "32,a,a")
7057 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
7059 (define_insn "*cmpsi_shiftsi_swp"
7060 [(set (reg:CC_SWP CC_REGNUM)
7061 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7062 [(match_operand:SI 1 "s_register_operand" "r,r,r")
7063 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
7064 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
7067 [(set_attr "conds" "set")
7068 (set_attr "shift" "1")
7069 (set_attr "arch" "32,a,a")
7070 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
7072 (define_insn "*arm_cmpsi_negshiftsi_si"
7073 [(set (reg:CC_Z CC_REGNUM)
7075 (neg:SI (match_operator:SI 1 "shift_operator"
7076 [(match_operand:SI 2 "s_register_operand" "r")
7077 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7078 (match_operand:SI 0 "s_register_operand" "r")))]
7081 [(set_attr "conds" "set")
7082 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7083 (const_string "alus_shift_imm")
7084 (const_string "alus_shift_reg")))
7085 (set_attr "predicable" "yes")]
7088 ; This insn allows redundant compares to be removed by cse, nothing should
7089 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7090 ; is deleted later on. The match_dup will match the mode here, so that
7091 ; mode changes of the condition codes aren't lost by this even though we don't
7092 ; specify what they are.
7094 (define_insn "*deleted_compare"
7095 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7097 "\\t%@ deleted compare"
7098 [(set_attr "conds" "set")
7099 (set_attr "length" "0")
7100 (set_attr "type" "no_insn")]
7104 ;; Conditional branch insns
7106 (define_expand "cbranch_cc"
7108 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7109 (match_operand 2 "" "")])
7110 (label_ref (match_operand 3 "" ""))
7113 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7114 operands[1], operands[2], NULL_RTX);
7115 operands[2] = const0_rtx;"
7119 ;; Patterns to match conditional branch insns.
7122 (define_insn "arm_cond_branch"
7124 (if_then_else (match_operator 1 "arm_comparison_operator"
7125 [(match_operand 2 "cc_register" "") (const_int 0)])
7126 (label_ref (match_operand 0 "" ""))
7130 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7132 arm_ccfsm_state += 2;
7135 return \"b%d1\\t%l0\";
7137 [(set_attr "conds" "use")
7138 (set_attr "type" "branch")
7139 (set (attr "length")
7141 (and (match_test "TARGET_THUMB2")
7142 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7143 (le (minus (match_dup 0) (pc)) (const_int 256))))
7148 (define_insn "*arm_cond_branch_reversed"
7150 (if_then_else (match_operator 1 "arm_comparison_operator"
7151 [(match_operand 2 "cc_register" "") (const_int 0)])
7153 (label_ref (match_operand 0 "" ""))))]
7156 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7158 arm_ccfsm_state += 2;
7161 return \"b%D1\\t%l0\";
7163 [(set_attr "conds" "use")
7164 (set_attr "type" "branch")
7165 (set (attr "length")
7167 (and (match_test "TARGET_THUMB2")
7168 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7169 (le (minus (match_dup 0) (pc)) (const_int 256))))
7178 (define_expand "cstore_cc"
7179 [(set (match_operand:SI 0 "s_register_operand")
7180 (match_operator:SI 1 "" [(match_operand 2 "" "")
7181 (match_operand 3 "" "")]))]
7183 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7184 operands[2], operands[3], NULL_RTX);
7185 operands[3] = const0_rtx;"
7188 (define_insn_and_split "*mov_scc"
7189 [(set (match_operand:SI 0 "s_register_operand" "=r")
7190 (match_operator:SI 1 "arm_comparison_operator_mode"
7191 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7193 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7196 (if_then_else:SI (match_dup 1)
7200 [(set_attr "conds" "use")
7201 (set_attr "length" "8")
7202 (set_attr "type" "multiple")]
7205 (define_insn "*negscc_borrow"
7206 [(set (match_operand:SI 0 "s_register_operand" "=r")
7207 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
7210 [(set_attr "conds" "use")
7211 (set_attr "length" "4")
7212 (set_attr "type" "adc_reg")]
7215 (define_insn_and_split "*mov_negscc"
7216 [(set (match_operand:SI 0 "s_register_operand" "=r")
7217 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
7218 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7219 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
7220 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7223 (if_then_else:SI (match_dup 1)
7227 operands[3] = GEN_INT (~0);
7229 [(set_attr "conds" "use")
7230 (set_attr "length" "8")
7231 (set_attr "type" "multiple")]
7234 (define_insn_and_split "*mov_notscc"
7235 [(set (match_operand:SI 0 "s_register_operand" "=r")
7236 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7237 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7239 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7242 (if_then_else:SI (match_dup 1)
7246 operands[3] = GEN_INT (~1);
7247 operands[4] = GEN_INT (~0);
7249 [(set_attr "conds" "use")
7250 (set_attr "length" "8")
7251 (set_attr "type" "multiple")]
7254 (define_expand "cstoresi4"
7255 [(set (match_operand:SI 0 "s_register_operand")
7256 (match_operator:SI 1 "expandable_comparison_operator"
7257 [(match_operand:SI 2 "s_register_operand")
7258 (match_operand:SI 3 "reg_or_int_operand")]))]
7259 "TARGET_32BIT || TARGET_THUMB1"
7261 rtx op3, scratch, scratch2;
7265 if (!arm_add_operand (operands[3], SImode))
7266 operands[3] = force_reg (SImode, operands[3]);
7267 emit_insn (gen_cstore_cc (operands[0], operands[1],
7268 operands[2], operands[3]));
7272 if (operands[3] == const0_rtx)
7274 switch (GET_CODE (operands[1]))
7277 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7281 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7285 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7286 NULL_RTX, 0, OPTAB_WIDEN);
7287 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7288 NULL_RTX, 0, OPTAB_WIDEN);
7289 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7290 operands[0], 1, OPTAB_WIDEN);
7294 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7296 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7297 NULL_RTX, 1, OPTAB_WIDEN);
7301 scratch = expand_binop (SImode, ashr_optab, operands[2],
7302 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7303 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7304 NULL_RTX, 0, OPTAB_WIDEN);
7305 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7309 /* LT is handled by generic code. No need for unsigned with 0. */
7316 switch (GET_CODE (operands[1]))
7319 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7320 NULL_RTX, 0, OPTAB_WIDEN);
7321 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7325 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7326 NULL_RTX, 0, OPTAB_WIDEN);
7327 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7331 op3 = force_reg (SImode, operands[3]);
7333 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7334 NULL_RTX, 1, OPTAB_WIDEN);
7335 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7336 NULL_RTX, 0, OPTAB_WIDEN);
7337 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7343 if (!thumb1_cmp_operand (op3, SImode))
7344 op3 = force_reg (SImode, op3);
7345 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7346 NULL_RTX, 0, OPTAB_WIDEN);
7347 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7348 NULL_RTX, 1, OPTAB_WIDEN);
7349 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7354 op3 = force_reg (SImode, operands[3]);
7355 scratch = force_reg (SImode, const0_rtx);
7356 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7362 if (!thumb1_cmp_operand (op3, SImode))
7363 op3 = force_reg (SImode, op3);
7364 scratch = force_reg (SImode, const0_rtx);
7365 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7371 if (!thumb1_cmp_operand (op3, SImode))
7372 op3 = force_reg (SImode, op3);
7373 scratch = gen_reg_rtx (SImode);
7374 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7378 op3 = force_reg (SImode, operands[3]);
7379 scratch = gen_reg_rtx (SImode);
7380 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7383 /* No good sequences for GT, LT. */
7390 (define_expand "cstorehf4"
7391 [(set (match_operand:SI 0 "s_register_operand")
7392 (match_operator:SI 1 "expandable_comparison_operator"
7393 [(match_operand:HF 2 "s_register_operand")
7394 (match_operand:HF 3 "vfp_compare_operand")]))]
7395 "TARGET_VFP_FP16INST"
7397 if (!arm_validize_comparison (&operands[1],
7402 emit_insn (gen_cstore_cc (operands[0], operands[1],
7403 operands[2], operands[3]));
7408 (define_expand "cstoresf4"
7409 [(set (match_operand:SI 0 "s_register_operand")
7410 (match_operator:SI 1 "expandable_comparison_operator"
7411 [(match_operand:SF 2 "s_register_operand")
7412 (match_operand:SF 3 "vfp_compare_operand")]))]
7413 "TARGET_32BIT && TARGET_HARD_FLOAT"
7414 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7415 operands[2], operands[3])); DONE;"
7418 (define_expand "cstoredf4"
7419 [(set (match_operand:SI 0 "s_register_operand")
7420 (match_operator:SI 1 "expandable_comparison_operator"
7421 [(match_operand:DF 2 "s_register_operand")
7422 (match_operand:DF 3 "vfp_compare_operand")]))]
7423 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7424 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7425 operands[2], operands[3])); DONE;"
7428 (define_expand "cstoredi4"
7429 [(set (match_operand:SI 0 "s_register_operand")
7430 (match_operator:SI 1 "expandable_comparison_operator"
7431 [(match_operand:DI 2 "s_register_operand")
7432 (match_operand:DI 3 "reg_or_int_operand")]))]
7435 if (!arm_validize_comparison (&operands[1],
7439 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7446 ;; Conditional move insns
7448 (define_expand "movsicc"
7449 [(set (match_operand:SI 0 "s_register_operand")
7450 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
7451 (match_operand:SI 2 "arm_not_operand")
7452 (match_operand:SI 3 "arm_not_operand")))]
7459 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7460 &XEXP (operands[1], 1)))
7463 code = GET_CODE (operands[1]);
7464 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7465 XEXP (operands[1], 1), NULL_RTX);
7466 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7470 (define_expand "movhfcc"
7471 [(set (match_operand:HF 0 "s_register_operand")
7472 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
7473 (match_operand:HF 2 "s_register_operand")
7474 (match_operand:HF 3 "s_register_operand")))]
7475 "TARGET_VFP_FP16INST"
7478 enum rtx_code code = GET_CODE (operands[1]);
7481 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7482 &XEXP (operands[1], 1)))
7485 code = GET_CODE (operands[1]);
7486 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7487 XEXP (operands[1], 1), NULL_RTX);
7488 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7492 (define_expand "movsfcc"
7493 [(set (match_operand:SF 0 "s_register_operand")
7494 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
7495 (match_operand:SF 2 "s_register_operand")
7496 (match_operand:SF 3 "s_register_operand")))]
7497 "TARGET_32BIT && TARGET_HARD_FLOAT"
7500 enum rtx_code code = GET_CODE (operands[1]);
7503 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7504 &XEXP (operands[1], 1)))
7507 code = GET_CODE (operands[1]);
7508 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7509 XEXP (operands[1], 1), NULL_RTX);
7510 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7514 (define_expand "movdfcc"
7515 [(set (match_operand:DF 0 "s_register_operand")
7516 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7517 (match_operand:DF 2 "s_register_operand")
7518 (match_operand:DF 3 "s_register_operand")))]
7519 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7522 enum rtx_code code = GET_CODE (operands[1]);
7525 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7526 &XEXP (operands[1], 1)))
7528 code = GET_CODE (operands[1]);
7529 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7530 XEXP (operands[1], 1), NULL_RTX);
7531 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7535 (define_insn "*cmov<mode>"
7536 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7537 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7538 [(match_operand 2 "cc_register" "") (const_int 0)])
7539 (match_operand:SDF 3 "s_register_operand"
7541 (match_operand:SDF 4 "s_register_operand"
7542 "<F_constraint>")))]
7543 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7546 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7553 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7558 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7564 [(set_attr "conds" "use")
7565 (set_attr "type" "fcsel")]
7568 (define_insn "*cmovhf"
7569 [(set (match_operand:HF 0 "s_register_operand" "=t")
7570 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7571 [(match_operand 2 "cc_register" "") (const_int 0)])
7572 (match_operand:HF 3 "s_register_operand" "t")
7573 (match_operand:HF 4 "s_register_operand" "t")))]
7574 "TARGET_VFP_FP16INST"
7577 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7584 return \"vsel%d1.f16\\t%0, %3, %4\";
7589 return \"vsel%D1.f16\\t%0, %4, %3\";
7595 [(set_attr "conds" "use")
7596 (set_attr "type" "fcsel")]
7599 (define_insn_and_split "*movsicc_insn"
7600 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7602 (match_operator 3 "arm_comparison_operator"
7603 [(match_operand 4 "cc_register" "") (const_int 0)])
7604 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7605 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7616 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7617 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7618 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7619 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7620 "&& reload_completed"
7623 enum rtx_code rev_code;
7627 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7629 gen_rtx_SET (operands[0], operands[1])));
7631 rev_code = GET_CODE (operands[3]);
7632 mode = GET_MODE (operands[4]);
7633 if (mode == CCFPmode || mode == CCFPEmode)
7634 rev_code = reverse_condition_maybe_unordered (rev_code);
7636 rev_code = reverse_condition (rev_code);
7638 rev_cond = gen_rtx_fmt_ee (rev_code,
7642 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7644 gen_rtx_SET (operands[0], operands[2])));
7647 [(set_attr "length" "4,4,4,4,8,8,8,8")
7648 (set_attr "conds" "use")
7649 (set_attr_alternative "type"
7650 [(if_then_else (match_operand 2 "const_int_operand" "")
7651 (const_string "mov_imm")
7652 (const_string "mov_reg"))
7653 (const_string "mvn_imm")
7654 (if_then_else (match_operand 1 "const_int_operand" "")
7655 (const_string "mov_imm")
7656 (const_string "mov_reg"))
7657 (const_string "mvn_imm")
7658 (const_string "multiple")
7659 (const_string "multiple")
7660 (const_string "multiple")
7661 (const_string "multiple")])]
7664 (define_insn "*movsfcc_soft_insn"
7665 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7666 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7667 [(match_operand 4 "cc_register" "") (const_int 0)])
7668 (match_operand:SF 1 "s_register_operand" "0,r")
7669 (match_operand:SF 2 "s_register_operand" "r,0")))]
7670 "TARGET_ARM && TARGET_SOFT_FLOAT"
7674 [(set_attr "conds" "use")
7675 (set_attr "type" "mov_reg")]
7679 ;; Jump and linkage insns
7681 (define_expand "jump"
7683 (label_ref (match_operand 0 "" "")))]
7688 (define_insn "*arm_jump"
7690 (label_ref (match_operand 0 "" "")))]
7694 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7696 arm_ccfsm_state += 2;
7699 return \"b%?\\t%l0\";
7702 [(set_attr "predicable" "yes")
7703 (set (attr "length")
7705 (and (match_test "TARGET_THUMB2")
7706 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7707 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7710 (set_attr "type" "branch")]
7713 (define_expand "call"
7714 [(parallel [(call (match_operand 0 "memory_operand")
7715 (match_operand 1 "general_operand"))
7716 (use (match_operand 2 "" ""))
7717 (clobber (reg:SI LR_REGNUM))])]
7722 tree addr = MEM_EXPR (operands[0]);
7724 /* In an untyped call, we can get NULL for operand 2. */
7725 if (operands[2] == NULL_RTX)
7726 operands[2] = const0_rtx;
7728 /* Decide if we should generate indirect calls by loading the
7729 32-bit address of the callee into a register before performing the
7731 callee = XEXP (operands[0], 0);
7732 if (GET_CODE (callee) == SYMBOL_REF
7733 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7735 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7737 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7738 /* Indirect call: set r9 with FDPIC value of callee. */
7739 XEXP (operands[0], 0)
7740 = arm_load_function_descriptor (XEXP (operands[0], 0));
7742 if (detect_cmse_nonsecure_call (addr))
7744 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7746 emit_call_insn (pat);
7750 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7751 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7754 /* Restore FDPIC register (r9) after call. */
7757 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7758 rtx initial_fdpic_reg
7759 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7761 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7762 initial_fdpic_reg));
7769 (define_insn "restore_pic_register_after_call"
7770 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7771 (unspec:SI [(match_dup 0)
7772 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7773 UNSPEC_PIC_RESTORE))]
7780 (define_expand "call_internal"
7781 [(parallel [(call (match_operand 0 "memory_operand")
7782 (match_operand 1 "general_operand"))
7783 (use (match_operand 2 "" ""))
7784 (clobber (reg:SI LR_REGNUM))])])
7786 (define_expand "nonsecure_call_internal"
7787 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7788 UNSPEC_NONSECURE_MEM)
7789 (match_operand 1 "general_operand"))
7790 (use (match_operand 2 "" ""))
7791 (clobber (reg:SI LR_REGNUM))])]
7796 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7797 gen_rtx_REG (SImode, R4_REGNUM),
7800 operands[0] = replace_equiv_address (operands[0], tmp);
7803 (define_insn "*call_reg_armv5"
7804 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7805 (match_operand 1 "" ""))
7806 (use (match_operand 2 "" ""))
7807 (clobber (reg:SI LR_REGNUM))]
7808 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7810 [(set_attr "type" "call")]
7813 (define_insn "*call_reg_arm"
7814 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7815 (match_operand 1 "" ""))
7816 (use (match_operand 2 "" ""))
7817 (clobber (reg:SI LR_REGNUM))]
7818 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7820 return output_call (operands);
7822 ;; length is worst case, normally it is only two
7823 [(set_attr "length" "12")
7824 (set_attr "type" "call")]
7828 (define_expand "call_value"
7829 [(parallel [(set (match_operand 0 "" "")
7830 (call (match_operand 1 "memory_operand")
7831 (match_operand 2 "general_operand")))
7832 (use (match_operand 3 "" ""))
7833 (clobber (reg:SI LR_REGNUM))])]
7838 tree addr = MEM_EXPR (operands[1]);
7840 /* In an untyped call, we can get NULL for operand 2. */
7841 if (operands[3] == 0)
7842 operands[3] = const0_rtx;
7844 /* Decide if we should generate indirect calls by loading the
7845 32-bit address of the callee into a register before performing the
7847 callee = XEXP (operands[1], 0);
7848 if (GET_CODE (callee) == SYMBOL_REF
7849 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7851 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7853 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7854 /* Indirect call: set r9 with FDPIC value of callee. */
7855 XEXP (operands[1], 0)
7856 = arm_load_function_descriptor (XEXP (operands[1], 0));
7858 if (detect_cmse_nonsecure_call (addr))
7860 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7861 operands[2], operands[3]);
7862 emit_call_insn (pat);
7866 pat = gen_call_value_internal (operands[0], operands[1],
7867 operands[2], operands[3]);
7868 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7871 /* Restore FDPIC register (r9) after call. */
7874 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7875 rtx initial_fdpic_reg
7876 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7878 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7879 initial_fdpic_reg));
7886 (define_expand "call_value_internal"
7887 [(parallel [(set (match_operand 0 "" "")
7888 (call (match_operand 1 "memory_operand")
7889 (match_operand 2 "general_operand")))
7890 (use (match_operand 3 "" ""))
7891 (clobber (reg:SI LR_REGNUM))])])
7893 (define_expand "nonsecure_call_value_internal"
7894 [(parallel [(set (match_operand 0 "" "")
7895 (call (unspec:SI [(match_operand 1 "memory_operand")]
7896 UNSPEC_NONSECURE_MEM)
7897 (match_operand 2 "general_operand")))
7898 (use (match_operand 3 "" ""))
7899 (clobber (reg:SI LR_REGNUM))])]
7904 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7905 gen_rtx_REG (SImode, R4_REGNUM),
7908 operands[1] = replace_equiv_address (operands[1], tmp);
7911 (define_insn "*call_value_reg_armv5"
7912 [(set (match_operand 0 "" "")
7913 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7914 (match_operand 2 "" "")))
7915 (use (match_operand 3 "" ""))
7916 (clobber (reg:SI LR_REGNUM))]
7917 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7919 [(set_attr "type" "call")]
7922 (define_insn "*call_value_reg_arm"
7923 [(set (match_operand 0 "" "")
7924 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7925 (match_operand 2 "" "")))
7926 (use (match_operand 3 "" ""))
7927 (clobber (reg:SI LR_REGNUM))]
7928 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7930 return output_call (&operands[1]);
7932 [(set_attr "length" "12")
7933 (set_attr "type" "call")]
7936 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7937 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7939 (define_insn "*call_symbol"
7940 [(call (mem:SI (match_operand:SI 0 "" ""))
7941 (match_operand 1 "" ""))
7942 (use (match_operand 2 "" ""))
7943 (clobber (reg:SI LR_REGNUM))]
7945 && !SIBLING_CALL_P (insn)
7946 && (GET_CODE (operands[0]) == SYMBOL_REF)
7947 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7950 rtx op = operands[0];
7952 /* Switch mode now when possible. */
7953 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7954 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7955 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7957 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7959 [(set_attr "type" "call")]
7962 (define_insn "*call_value_symbol"
7963 [(set (match_operand 0 "" "")
7964 (call (mem:SI (match_operand:SI 1 "" ""))
7965 (match_operand:SI 2 "" "")))
7966 (use (match_operand 3 "" ""))
7967 (clobber (reg:SI LR_REGNUM))]
7969 && !SIBLING_CALL_P (insn)
7970 && (GET_CODE (operands[1]) == SYMBOL_REF)
7971 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7974 rtx op = operands[1];
7976 /* Switch mode now when possible. */
7977 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7978 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7979 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7981 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7983 [(set_attr "type" "call")]
7986 (define_expand "sibcall_internal"
7987 [(parallel [(call (match_operand 0 "memory_operand")
7988 (match_operand 1 "general_operand"))
7990 (use (match_operand 2 "" ""))])])
7992 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7993 (define_expand "sibcall"
7994 [(parallel [(call (match_operand 0 "memory_operand")
7995 (match_operand 1 "general_operand"))
7997 (use (match_operand 2 "" ""))])]
8003 if ((!REG_P (XEXP (operands[0], 0))
8004 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8005 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8006 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8007 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8009 if (operands[2] == NULL_RTX)
8010 operands[2] = const0_rtx;
8012 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8013 arm_emit_call_insn (pat, operands[0], true);
8018 (define_expand "sibcall_value_internal"
8019 [(parallel [(set (match_operand 0 "" "")
8020 (call (match_operand 1 "memory_operand")
8021 (match_operand 2 "general_operand")))
8023 (use (match_operand 3 "" ""))])])
8025 (define_expand "sibcall_value"
8026 [(parallel [(set (match_operand 0 "" "")
8027 (call (match_operand 1 "memory_operand")
8028 (match_operand 2 "general_operand")))
8030 (use (match_operand 3 "" ""))])]
8036 if ((!REG_P (XEXP (operands[1], 0))
8037 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8038 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8039 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8040 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8042 if (operands[3] == NULL_RTX)
8043 operands[3] = const0_rtx;
8045 pat = gen_sibcall_value_internal (operands[0], operands[1],
8046 operands[2], operands[3]);
8047 arm_emit_call_insn (pat, operands[1], true);
8052 (define_insn "*sibcall_insn"
8053 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8054 (match_operand 1 "" ""))
8056 (use (match_operand 2 "" ""))]
8057 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8059 if (which_alternative == 1)
8060 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8063 if (arm_arch5t || arm_arch4t)
8064 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8066 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8069 [(set_attr "type" "call")]
8072 (define_insn "*sibcall_value_insn"
8073 [(set (match_operand 0 "" "")
8074 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8075 (match_operand 2 "" "")))
8077 (use (match_operand 3 "" ""))]
8078 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8080 if (which_alternative == 1)
8081 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8084 if (arm_arch5t || arm_arch4t)
8085 return \"bx%?\\t%1\";
8087 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8090 [(set_attr "type" "call")]
8093 (define_expand "<return_str>return"
8095 "(TARGET_ARM || (TARGET_THUMB2
8096 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8097 && !IS_STACKALIGN (arm_current_func_type ())))
8098 <return_cond_false>"
8103 thumb2_expand_return (<return_simple_p>);
8110 ;; Often the return insn will be the same as loading from memory, so set attr
8111 (define_insn "*arm_return"
8113 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8116 if (arm_ccfsm_state == 2)
8118 arm_ccfsm_state += 2;
8121 return output_return_instruction (const_true_rtx, true, false, false);
8123 [(set_attr "type" "load_4")
8124 (set_attr "length" "12")
8125 (set_attr "predicable" "yes")]
8128 (define_insn "*cond_<return_str>return"
8130 (if_then_else (match_operator 0 "arm_comparison_operator"
8131 [(match_operand 1 "cc_register" "") (const_int 0)])
8134 "TARGET_ARM <return_cond_true>"
8137 if (arm_ccfsm_state == 2)
8139 arm_ccfsm_state += 2;
8142 return output_return_instruction (operands[0], true, false,
8145 [(set_attr "conds" "use")
8146 (set_attr "length" "12")
8147 (set_attr "type" "load_4")]
8150 (define_insn "*cond_<return_str>return_inverted"
8152 (if_then_else (match_operator 0 "arm_comparison_operator"
8153 [(match_operand 1 "cc_register" "") (const_int 0)])
8156 "TARGET_ARM <return_cond_true>"
8159 if (arm_ccfsm_state == 2)
8161 arm_ccfsm_state += 2;
8164 return output_return_instruction (operands[0], true, true,
8167 [(set_attr "conds" "use")
8168 (set_attr "length" "12")
8169 (set_attr "type" "load_4")]
8172 (define_insn "*arm_simple_return"
8177 if (arm_ccfsm_state == 2)
8179 arm_ccfsm_state += 2;
8182 return output_return_instruction (const_true_rtx, true, false, true);
8184 [(set_attr "type" "branch")
8185 (set_attr "length" "4")
8186 (set_attr "predicable" "yes")]
8189 ;; Generate a sequence of instructions to determine if the processor is
8190 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8193 (define_expand "return_addr_mask"
8195 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8197 (set (match_operand:SI 0 "s_register_operand")
8198 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8200 (const_int 67108860)))] ; 0x03fffffc
8203 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8206 (define_insn "*check_arch2"
8207 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8208 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8211 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8212 [(set_attr "length" "8")
8213 (set_attr "conds" "set")
8214 (set_attr "type" "multiple")]
8217 ;; Call subroutine returning any type.
8219 (define_expand "untyped_call"
8220 [(parallel [(call (match_operand 0 "" "")
8222 (match_operand 1 "" "")
8223 (match_operand 2 "" "")])]
8224 "TARGET_EITHER && !TARGET_FDPIC"
8228 rtx par = gen_rtx_PARALLEL (VOIDmode,
8229 rtvec_alloc (XVECLEN (operands[2], 0)));
8230 rtx addr = gen_reg_rtx (Pmode);
8234 emit_move_insn (addr, XEXP (operands[1], 0));
8235 mem = change_address (operands[1], BLKmode, addr);
8237 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8239 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8241 /* Default code only uses r0 as a return value, but we could
8242 be using anything up to 4 registers. */
8243 if (REGNO (src) == R0_REGNUM)
8244 src = gen_rtx_REG (TImode, R0_REGNUM);
8246 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8248 size += GET_MODE_SIZE (GET_MODE (src));
8251 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
8255 for (i = 0; i < XVECLEN (par, 0); i++)
8257 HOST_WIDE_INT offset = 0;
8258 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8261 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8263 mem = change_address (mem, GET_MODE (reg), NULL);
8264 if (REGNO (reg) == R0_REGNUM)
8266 /* On thumb we have to use a write-back instruction. */
8267 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8268 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8269 size = TARGET_ARM ? 16 : 0;
8273 emit_move_insn (mem, reg);
8274 size = GET_MODE_SIZE (GET_MODE (reg));
8278 /* The optimizer does not know that the call sets the function value
8279 registers we stored in the result block. We avoid problems by
8280 claiming that all hard registers are used and clobbered at this
8282 emit_insn (gen_blockage ());
8288 (define_expand "untyped_return"
8289 [(match_operand:BLK 0 "memory_operand")
8290 (match_operand 1 "" "")]
8291 "TARGET_EITHER && !TARGET_FDPIC"
8295 rtx addr = gen_reg_rtx (Pmode);
8299 emit_move_insn (addr, XEXP (operands[0], 0));
8300 mem = change_address (operands[0], BLKmode, addr);
8302 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8304 HOST_WIDE_INT offset = 0;
8305 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8308 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8310 mem = change_address (mem, GET_MODE (reg), NULL);
8311 if (REGNO (reg) == R0_REGNUM)
8313 /* On thumb we have to use a write-back instruction. */
8314 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8315 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8316 size = TARGET_ARM ? 16 : 0;
8320 emit_move_insn (reg, mem);
8321 size = GET_MODE_SIZE (GET_MODE (reg));
8325 /* Emit USE insns before the return. */
8326 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8327 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8329 /* Construct the return. */
8330 expand_naked_return ();
8336 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8337 ;; all of memory. This blocks insns from being moved across this point.
8339 (define_insn "blockage"
8340 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8343 [(set_attr "length" "0")
8344 (set_attr "type" "block")]
8347 ;; Since we hard code r0 here use the 'o' constraint to prevent
8348 ;; provoking undefined behaviour in the hardware with putting out
8349 ;; auto-increment operations with potentially r0 as the base register.
8350 (define_insn "probe_stack"
8351 [(set (match_operand:SI 0 "memory_operand" "=o")
8352 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
8355 [(set_attr "type" "store_4")
8356 (set_attr "predicable" "yes")]
8359 (define_insn "probe_stack_range"
8360 [(set (match_operand:SI 0 "register_operand" "=r")
8361 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
8362 (match_operand:SI 2 "register_operand" "r")]
8363 VUNSPEC_PROBE_STACK_RANGE))]
8366 return output_probe_stack_range (operands[0], operands[2]);
8368 [(set_attr "type" "multiple")
8369 (set_attr "conds" "clob")]
8372 ;; Named patterns for stack smashing protection.
8373 (define_expand "stack_protect_combined_set"
8375 [(set (match_operand:SI 0 "memory_operand")
8376 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8378 (clobber (match_scratch:SI 2 ""))
8379 (clobber (match_scratch:SI 3 ""))])]
8384 ;; Use a separate insn from the above expand to be able to have the mem outside
8385 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8386 ;; try to reload the guard since we need to control how PIC access is done in
8387 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8388 ;; legitimize_pic_address ()).
8389 (define_insn_and_split "*stack_protect_combined_set_insn"
8390 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8391 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8393 (clobber (match_scratch:SI 2 "=&l,&r"))
8394 (clobber (match_scratch:SI 3 "=&l,&r"))]
8398 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
8400 (clobber (match_dup 2))])]
8408 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8410 pic_reg = operands[3];
8412 /* Forces recomputing of GOT base now. */
8413 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
8414 true /*compute_now*/);
8418 if (address_operand (operands[1], SImode))
8419 operands[2] = operands[1];
8422 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8423 emit_move_insn (operands[2], mem);
8427 [(set_attr "arch" "t1,32")]
8430 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
8431 ;; canary value does not live beyond the life of this sequence.
8432 (define_insn "*stack_protect_set_insn"
8433 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8434 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
8436 (clobber (match_dup 1))]
8439 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
8440 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
8441 [(set_attr "length" "8,12")
8442 (set_attr "conds" "clob,nocond")
8443 (set_attr "type" "multiple")
8444 (set_attr "arch" "t1,32")]
8447 (define_expand "stack_protect_combined_test"
8451 (eq (match_operand:SI 0 "memory_operand")
8452 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8454 (label_ref (match_operand 2))
8456 (clobber (match_scratch:SI 3 ""))
8457 (clobber (match_scratch:SI 4 ""))
8458 (clobber (reg:CC CC_REGNUM))])]
8463 ;; Use a separate insn from the above expand to be able to have the mem outside
8464 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8465 ;; try to reload the guard since we need to control how PIC access is done in
8466 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8467 ;; legitimize_pic_address ()).
8468 (define_insn_and_split "*stack_protect_combined_test_insn"
8471 (eq (match_operand:SI 0 "memory_operand" "m,m")
8472 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8474 (label_ref (match_operand 2))
8476 (clobber (match_scratch:SI 3 "=&l,&r"))
8477 (clobber (match_scratch:SI 4 "=&l,&r"))
8478 (clobber (reg:CC CC_REGNUM))]
8491 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8493 pic_reg = operands[4];
8495 /* Forces recomputing of GOT base now. */
8496 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
8497 true /*compute_now*/);
8501 if (address_operand (operands[1], SImode))
8502 operands[3] = operands[1];
8505 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8506 emit_move_insn (operands[3], mem);
8511 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8513 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8514 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8515 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8519 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8521 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8522 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8527 [(set_attr "arch" "t1,32")]
8530 (define_insn "arm_stack_protect_test_insn"
8531 [(set (reg:CC_Z CC_REGNUM)
8532 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8533 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8536 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8537 (clobber (match_dup 2))]
8539 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8540 [(set_attr "length" "8,12")
8541 (set_attr "conds" "set")
8542 (set_attr "type" "multiple")
8543 (set_attr "arch" "t,32")]
8546 (define_expand "casesi"
8547 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8548 (match_operand:SI 1 "const_int_operand") ; lower bound
8549 (match_operand:SI 2 "const_int_operand") ; total range
8550 (match_operand:SI 3 "" "") ; table label
8551 (match_operand:SI 4 "" "")] ; Out of range label
8552 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8555 enum insn_code code;
8556 if (operands[1] != const0_rtx)
8558 rtx reg = gen_reg_rtx (SImode);
8560 emit_insn (gen_addsi3 (reg, operands[0],
8561 gen_int_mode (-INTVAL (operands[1]),
8567 code = CODE_FOR_arm_casesi_internal;
8568 else if (TARGET_THUMB1)
8569 code = CODE_FOR_thumb1_casesi_internal_pic;
8571 code = CODE_FOR_thumb2_casesi_internal_pic;
8573 code = CODE_FOR_thumb2_casesi_internal;
8575 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8576 operands[2] = force_reg (SImode, operands[2]);
8578 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8579 operands[3], operands[4]));
8584 ;; The USE in this pattern is needed to tell flow analysis that this is
8585 ;; a CASESI insn. It has no other purpose.
8586 (define_expand "arm_casesi_internal"
8587 [(parallel [(set (pc)
8589 (leu (match_operand:SI 0 "s_register_operand")
8590 (match_operand:SI 1 "arm_rhs_operand"))
8592 (label_ref:SI (match_operand 3 ""))))
8593 (clobber (reg:CC CC_REGNUM))
8594 (use (label_ref:SI (match_operand 2 "")))])]
8597 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8598 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8599 gen_rtx_LABEL_REF (SImode, operands[2]));
8600 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8601 MEM_READONLY_P (operands[4]) = 1;
8602 MEM_NOTRAP_P (operands[4]) = 1;
8605 (define_insn "*arm_casesi_internal"
8606 [(parallel [(set (pc)
8608 (leu (match_operand:SI 0 "s_register_operand" "r")
8609 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8610 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8611 (label_ref:SI (match_operand 2 "" ""))))
8612 (label_ref:SI (match_operand 3 "" ""))))
8613 (clobber (reg:CC CC_REGNUM))
8614 (use (label_ref:SI (match_dup 2)))])]
8618 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8619 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8621 [(set_attr "conds" "clob")
8622 (set_attr "length" "12")
8623 (set_attr "type" "multiple")]
8626 (define_expand "indirect_jump"
8628 (match_operand:SI 0 "s_register_operand"))]
8631 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8632 address and use bx. */
8636 tmp = gen_reg_rtx (SImode);
8637 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8643 ;; NB Never uses BX.
8644 (define_insn "*arm_indirect_jump"
8646 (match_operand:SI 0 "s_register_operand" "r"))]
8648 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8649 [(set_attr "predicable" "yes")
8650 (set_attr "type" "branch")]
8653 (define_insn "*load_indirect_jump"
8655 (match_operand:SI 0 "memory_operand" "m"))]
8657 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8658 [(set_attr "type" "load_4")
8659 (set_attr "pool_range" "4096")
8660 (set_attr "neg_pool_range" "4084")
8661 (set_attr "predicable" "yes")]
8671 [(set (attr "length")
8672 (if_then_else (eq_attr "is_thumb" "yes")
8675 (set_attr "type" "mov_reg")]
8679 [(trap_if (const_int 1) (const_int 0))]
8683 return \".inst\\t0xe7f000f0\";
8685 return \".inst\\t0xdeff\";
8687 [(set (attr "length")
8688 (if_then_else (eq_attr "is_thumb" "yes")
8691 (set_attr "type" "trap")
8692 (set_attr "conds" "unconditional")]
8696 ;; Patterns to allow combination of arithmetic, cond code and shifts
8698 (define_insn "*<arith_shift_insn>_multsi"
8699 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8701 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8702 (match_operand:SI 3 "power_of_two_operand" ""))
8703 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8705 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8706 [(set_attr "predicable" "yes")
8707 (set_attr "shift" "2")
8708 (set_attr "arch" "a,t2")
8709 (set_attr "type" "alu_shift_imm")])
8711 (define_insn "*<arith_shift_insn>_shiftsi"
8712 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8714 (match_operator:SI 2 "shift_nomul_operator"
8715 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8716 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8717 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8718 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8719 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8720 [(set_attr "predicable" "yes")
8721 (set_attr "shift" "3")
8722 (set_attr "arch" "a,t2,a")
8723 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8726 [(set (match_operand:SI 0 "s_register_operand" "")
8727 (match_operator:SI 1 "shiftable_operator"
8728 [(match_operator:SI 2 "shiftable_operator"
8729 [(match_operator:SI 3 "shift_operator"
8730 [(match_operand:SI 4 "s_register_operand" "")
8731 (match_operand:SI 5 "reg_or_int_operand" "")])
8732 (match_operand:SI 6 "s_register_operand" "")])
8733 (match_operand:SI 7 "arm_rhs_operand" "")]))
8734 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8737 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8740 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8743 (define_insn "*arith_shiftsi_compare0"
8744 [(set (reg:CC_NOOV CC_REGNUM)
8746 (match_operator:SI 1 "shiftable_operator"
8747 [(match_operator:SI 3 "shift_operator"
8748 [(match_operand:SI 4 "s_register_operand" "r,r")
8749 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8750 (match_operand:SI 2 "s_register_operand" "r,r")])
8752 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8753 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8756 "%i1s%?\\t%0, %2, %4%S3"
8757 [(set_attr "conds" "set")
8758 (set_attr "shift" "4")
8759 (set_attr "arch" "32,a")
8760 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8762 (define_insn "*arith_shiftsi_compare0_scratch"
8763 [(set (reg:CC_NOOV CC_REGNUM)
8765 (match_operator:SI 1 "shiftable_operator"
8766 [(match_operator:SI 3 "shift_operator"
8767 [(match_operand:SI 4 "s_register_operand" "r,r")
8768 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8769 (match_operand:SI 2 "s_register_operand" "r,r")])
8771 (clobber (match_scratch:SI 0 "=r,r"))]
8773 "%i1s%?\\t%0, %2, %4%S3"
8774 [(set_attr "conds" "set")
8775 (set_attr "shift" "4")
8776 (set_attr "arch" "32,a")
8777 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8779 (define_insn "*sub_shiftsi"
8780 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8781 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8782 (match_operator:SI 2 "shift_operator"
8783 [(match_operand:SI 3 "s_register_operand" "r,r")
8784 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8786 "sub%?\\t%0, %1, %3%S2"
8787 [(set_attr "predicable" "yes")
8788 (set_attr "predicable_short_it" "no")
8789 (set_attr "shift" "3")
8790 (set_attr "arch" "32,a")
8791 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8793 (define_insn "*sub_shiftsi_compare0"
8794 [(set (reg:CC_NOOV CC_REGNUM)
8796 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8797 (match_operator:SI 2 "shift_operator"
8798 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8799 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8801 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8802 (minus:SI (match_dup 1)
8803 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8805 "subs%?\\t%0, %1, %3%S2"
8806 [(set_attr "conds" "set")
8807 (set_attr "shift" "3")
8808 (set_attr "arch" "32,a,a")
8809 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8811 (define_insn "*sub_shiftsi_compare0_scratch"
8812 [(set (reg:CC_NOOV CC_REGNUM)
8814 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8815 (match_operator:SI 2 "shift_operator"
8816 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8817 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8819 (clobber (match_scratch:SI 0 "=r,r,r"))]
8821 "subs%?\\t%0, %1, %3%S2"
8822 [(set_attr "conds" "set")
8823 (set_attr "shift" "3")
8824 (set_attr "arch" "32,a,a")
8825 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8828 (define_insn_and_split "*and_scc"
8829 [(set (match_operand:SI 0 "s_register_operand" "=r")
8830 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8831 [(match_operand 2 "cc_register" "") (const_int 0)])
8832 (match_operand:SI 3 "s_register_operand" "r")))]
8834 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8835 "&& reload_completed"
8836 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8837 (cond_exec (match_dup 4) (set (match_dup 0)
8838 (and:SI (match_dup 3) (const_int 1))))]
8840 machine_mode mode = GET_MODE (operands[2]);
8841 enum rtx_code rc = GET_CODE (operands[1]);
8843 /* Note that operands[4] is the same as operands[1],
8844 but with VOIDmode as the result. */
8845 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8846 if (mode == CCFPmode || mode == CCFPEmode)
8847 rc = reverse_condition_maybe_unordered (rc);
8849 rc = reverse_condition (rc);
8850 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8852 [(set_attr "conds" "use")
8853 (set_attr "type" "multiple")
8854 (set_attr "length" "8")]
8857 (define_insn_and_split "*ior_scc"
8858 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8859 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8860 [(match_operand 2 "cc_register" "") (const_int 0)])
8861 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8866 "&& reload_completed
8867 && REGNO (operands [0]) != REGNO (operands[3])"
8868 ;; && which_alternative == 1
8869 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8870 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8871 (cond_exec (match_dup 4) (set (match_dup 0)
8872 (ior:SI (match_dup 3) (const_int 1))))]
8874 machine_mode mode = GET_MODE (operands[2]);
8875 enum rtx_code rc = GET_CODE (operands[1]);
8877 /* Note that operands[4] is the same as operands[1],
8878 but with VOIDmode as the result. */
8879 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8880 if (mode == CCFPmode || mode == CCFPEmode)
8881 rc = reverse_condition_maybe_unordered (rc);
8883 rc = reverse_condition (rc);
8884 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8886 [(set_attr "conds" "use")
8887 (set_attr "length" "4,8")
8888 (set_attr "type" "logic_imm,multiple")]
8891 ; A series of splitters for the compare_scc pattern below. Note that
8892 ; order is important.
8894 [(set (match_operand:SI 0 "s_register_operand" "")
8895 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8897 (clobber (reg:CC CC_REGNUM))]
8898 "TARGET_32BIT && reload_completed"
8899 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8902 [(set (match_operand:SI 0 "s_register_operand" "")
8903 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8905 (clobber (reg:CC CC_REGNUM))]
8906 "TARGET_32BIT && reload_completed"
8907 [(set (match_dup 0) (not:SI (match_dup 1)))
8908 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8911 [(set (match_operand:SI 0 "s_register_operand" "")
8912 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8914 (clobber (reg:CC CC_REGNUM))]
8915 "arm_arch5t && TARGET_32BIT"
8916 [(set (match_dup 0) (clz:SI (match_dup 1)))
8917 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8921 [(set (match_operand:SI 0 "s_register_operand" "")
8922 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8924 (clobber (reg:CC CC_REGNUM))]
8925 "TARGET_32BIT && reload_completed"
8927 [(set (reg:CC CC_REGNUM)
8928 (compare:CC (const_int 1) (match_dup 1)))
8930 (minus:SI (const_int 1) (match_dup 1)))])
8931 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8932 (set (match_dup 0) (const_int 0)))])
8935 [(set (match_operand:SI 0 "s_register_operand" "")
8936 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8937 (match_operand:SI 2 "const_int_operand" "")))
8938 (clobber (reg:CC CC_REGNUM))]
8939 "TARGET_32BIT && reload_completed"
8941 [(set (reg:CC CC_REGNUM)
8942 (compare:CC (match_dup 1) (match_dup 2)))
8943 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8944 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8945 (set (match_dup 0) (const_int 1)))]
8947 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8951 [(set (match_operand:SI 0 "s_register_operand" "")
8952 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8953 (match_operand:SI 2 "arm_add_operand" "")))
8954 (clobber (reg:CC CC_REGNUM))]
8955 "TARGET_32BIT && reload_completed"
8957 [(set (reg:CC_NOOV CC_REGNUM)
8958 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8960 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8961 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8962 (set (match_dup 0) (const_int 1)))])
8964 (define_insn_and_split "*compare_scc"
8965 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8966 (match_operator:SI 1 "arm_comparison_operator"
8967 [(match_operand:SI 2 "s_register_operand" "r,r")
8968 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8969 (clobber (reg:CC CC_REGNUM))]
8972 "&& reload_completed"
8973 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8974 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8975 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8978 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8979 operands[2], operands[3]);
8980 enum rtx_code rc = GET_CODE (operands[1]);
8982 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8984 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8985 if (mode == CCFPmode || mode == CCFPEmode)
8986 rc = reverse_condition_maybe_unordered (rc);
8988 rc = reverse_condition (rc);
8989 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8991 [(set_attr "type" "multiple")]
8994 ;; Attempt to improve the sequence generated by the compare_scc splitters
8995 ;; not to use conditional execution.
8997 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9001 [(set (reg:CC CC_REGNUM)
9002 (compare:CC (match_operand:SI 1 "register_operand" "")
9004 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9005 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9006 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9007 (set (match_dup 0) (const_int 1)))]
9008 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9009 [(set (match_dup 0) (clz:SI (match_dup 1)))
9010 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9013 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9017 [(set (reg:CC CC_REGNUM)
9018 (compare:CC (match_operand:SI 1 "register_operand" "")
9020 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9021 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9022 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9023 (set (match_dup 0) (const_int 1)))
9024 (match_scratch:SI 2 "r")]
9025 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9027 [(set (reg:CC CC_REGNUM)
9028 (compare:CC (const_int 0) (match_dup 1)))
9029 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9031 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9032 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9035 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9036 ;; sub Rd, Reg1, reg2
9040 [(set (reg:CC CC_REGNUM)
9041 (compare:CC (match_operand:SI 1 "register_operand" "")
9042 (match_operand:SI 2 "arm_rhs_operand" "")))
9043 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9044 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9045 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9046 (set (match_dup 0) (const_int 1)))]
9047 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9048 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9049 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9050 (set (match_dup 0) (clz:SI (match_dup 0)))
9051 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9055 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9056 ;; sub T1, Reg1, reg2
9060 [(set (reg:CC CC_REGNUM)
9061 (compare:CC (match_operand:SI 1 "register_operand" "")
9062 (match_operand:SI 2 "arm_rhs_operand" "")))
9063 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9064 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9065 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9066 (set (match_dup 0) (const_int 1)))
9067 (match_scratch:SI 3 "r")]
9068 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9069 [(set (match_dup 3) (match_dup 4))
9071 [(set (reg:CC CC_REGNUM)
9072 (compare:CC (const_int 0) (match_dup 3)))
9073 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9075 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9076 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9078 if (CONST_INT_P (operands[2]))
9079 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9081 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9084 (define_insn "*cond_move"
9085 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9086 (if_then_else:SI (match_operator 3 "equality_operator"
9087 [(match_operator 4 "arm_comparison_operator"
9088 [(match_operand 5 "cc_register" "") (const_int 0)])
9090 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9091 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9094 if (GET_CODE (operands[3]) == NE)
9096 if (which_alternative != 1)
9097 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9098 if (which_alternative != 0)
9099 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9102 if (which_alternative != 0)
9103 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9104 if (which_alternative != 1)
9105 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9108 [(set_attr "conds" "use")
9109 (set_attr_alternative "type"
9110 [(if_then_else (match_operand 2 "const_int_operand" "")
9111 (const_string "mov_imm")
9112 (const_string "mov_reg"))
9113 (if_then_else (match_operand 1 "const_int_operand" "")
9114 (const_string "mov_imm")
9115 (const_string "mov_reg"))
9116 (const_string "multiple")])
9117 (set_attr "length" "4,4,8")]
9120 (define_insn "*cond_arith"
9121 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9122 (match_operator:SI 5 "shiftable_operator"
9123 [(match_operator:SI 4 "arm_comparison_operator"
9124 [(match_operand:SI 2 "s_register_operand" "r,r")
9125 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9126 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9127 (clobber (reg:CC CC_REGNUM))]
9130 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9131 return \"%i5\\t%0, %1, %2, lsr #31\";
9133 output_asm_insn (\"cmp\\t%2, %3\", operands);
9134 if (GET_CODE (operands[5]) == AND)
9135 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9136 else if (GET_CODE (operands[5]) == MINUS)
9137 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9138 else if (which_alternative != 0)
9139 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9140 return \"%i5%d4\\t%0, %1, #1\";
9142 [(set_attr "conds" "clob")
9143 (set_attr "length" "12")
9144 (set_attr "type" "multiple")]
9147 (define_insn "*cond_sub"
9148 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9149 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9150 (match_operator:SI 4 "arm_comparison_operator"
9151 [(match_operand:SI 2 "s_register_operand" "r,r")
9152 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9153 (clobber (reg:CC CC_REGNUM))]
9156 output_asm_insn (\"cmp\\t%2, %3\", operands);
9157 if (which_alternative != 0)
9158 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9159 return \"sub%d4\\t%0, %1, #1\";
9161 [(set_attr "conds" "clob")
9162 (set_attr "length" "8,12")
9163 (set_attr "type" "multiple")]
9166 (define_insn "*cmp_ite0"
9167 [(set (match_operand 6 "dominant_cc_register" "")
9170 (match_operator 4 "arm_comparison_operator"
9171 [(match_operand:SI 0 "s_register_operand"
9172 "l,l,l,r,r,r,r,r,r")
9173 (match_operand:SI 1 "arm_add_operand"
9174 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9175 (match_operator:SI 5 "arm_comparison_operator"
9176 [(match_operand:SI 2 "s_register_operand"
9177 "l,r,r,l,l,r,r,r,r")
9178 (match_operand:SI 3 "arm_add_operand"
9179 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9185 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9187 {\"cmp%d5\\t%0, %1\",
9188 \"cmp%d4\\t%2, %3\"},
9189 {\"cmn%d5\\t%0, #%n1\",
9190 \"cmp%d4\\t%2, %3\"},
9191 {\"cmp%d5\\t%0, %1\",
9192 \"cmn%d4\\t%2, #%n3\"},
9193 {\"cmn%d5\\t%0, #%n1\",
9194 \"cmn%d4\\t%2, #%n3\"}
9196 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9201 \"cmn\\t%0, #%n1\"},
9202 {\"cmn\\t%2, #%n3\",
9204 {\"cmn\\t%2, #%n3\",
9207 static const char * const ite[2] =
9212 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9213 CMP_CMP, CMN_CMP, CMP_CMP,
9214 CMN_CMP, CMP_CMN, CMN_CMN};
9216 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9218 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9219 if (TARGET_THUMB2) {
9220 output_asm_insn (ite[swap], operands);
9222 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9225 [(set_attr "conds" "set")
9226 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9227 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9228 (set_attr "type" "multiple")
9229 (set_attr_alternative "length"
9235 (if_then_else (eq_attr "is_thumb" "no")
9238 (if_then_else (eq_attr "is_thumb" "no")
9241 (if_then_else (eq_attr "is_thumb" "no")
9244 (if_then_else (eq_attr "is_thumb" "no")
9249 (define_insn "*cmp_ite1"
9250 [(set (match_operand 6 "dominant_cc_register" "")
9253 (match_operator 4 "arm_comparison_operator"
9254 [(match_operand:SI 0 "s_register_operand"
9255 "l,l,l,r,r,r,r,r,r")
9256 (match_operand:SI 1 "arm_add_operand"
9257 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9258 (match_operator:SI 5 "arm_comparison_operator"
9259 [(match_operand:SI 2 "s_register_operand"
9260 "l,r,r,l,l,r,r,r,r")
9261 (match_operand:SI 3 "arm_add_operand"
9262 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9268 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9272 {\"cmn\\t%0, #%n1\",
9275 \"cmn\\t%2, #%n3\"},
9276 {\"cmn\\t%0, #%n1\",
9279 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9281 {\"cmp%d4\\t%2, %3\",
9282 \"cmp%D5\\t%0, %1\"},
9283 {\"cmp%d4\\t%2, %3\",
9284 \"cmn%D5\\t%0, #%n1\"},
9285 {\"cmn%d4\\t%2, #%n3\",
9286 \"cmp%D5\\t%0, %1\"},
9287 {\"cmn%d4\\t%2, #%n3\",
9288 \"cmn%D5\\t%0, #%n1\"}
9290 static const char * const ite[2] =
9295 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9296 CMP_CMP, CMN_CMP, CMP_CMP,
9297 CMN_CMP, CMP_CMN, CMN_CMN};
9299 comparison_dominates_p (GET_CODE (operands[5]),
9300 reverse_condition (GET_CODE (operands[4])));
9302 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9303 if (TARGET_THUMB2) {
9304 output_asm_insn (ite[swap], operands);
9306 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9309 [(set_attr "conds" "set")
9310 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9311 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9312 (set_attr_alternative "length"
9318 (if_then_else (eq_attr "is_thumb" "no")
9321 (if_then_else (eq_attr "is_thumb" "no")
9324 (if_then_else (eq_attr "is_thumb" "no")
9327 (if_then_else (eq_attr "is_thumb" "no")
9330 (set_attr "type" "multiple")]
9333 (define_insn "*cmp_and"
9334 [(set (match_operand 6 "dominant_cc_register" "")
9337 (match_operator 4 "arm_comparison_operator"
9338 [(match_operand:SI 0 "s_register_operand"
9339 "l,l,l,r,r,r,r,r,r,r")
9340 (match_operand:SI 1 "arm_add_operand"
9341 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9342 (match_operator:SI 5 "arm_comparison_operator"
9343 [(match_operand:SI 2 "s_register_operand"
9344 "l,r,r,l,l,r,r,r,r,r")
9345 (match_operand:SI 3 "arm_add_operand"
9346 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9351 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9353 {\"cmp%d5\\t%0, %1\",
9354 \"cmp%d4\\t%2, %3\"},
9355 {\"cmn%d5\\t%0, #%n1\",
9356 \"cmp%d4\\t%2, %3\"},
9357 {\"cmp%d5\\t%0, %1\",
9358 \"cmn%d4\\t%2, #%n3\"},
9359 {\"cmn%d5\\t%0, #%n1\",
9360 \"cmn%d4\\t%2, #%n3\"}
9362 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9367 \"cmn\\t%0, #%n1\"},
9368 {\"cmn\\t%2, #%n3\",
9370 {\"cmn\\t%2, #%n3\",
9373 static const char *const ite[2] =
9378 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9379 CMP_CMP, CMN_CMP, CMP_CMP,
9380 CMP_CMP, CMN_CMP, CMP_CMN,
9383 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9385 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9386 if (TARGET_THUMB2) {
9387 output_asm_insn (ite[swap], operands);
9389 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9392 [(set_attr "conds" "set")
9393 (set_attr "predicable" "no")
9394 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9395 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9396 (set_attr_alternative "length"
9403 (if_then_else (eq_attr "is_thumb" "no")
9406 (if_then_else (eq_attr "is_thumb" "no")
9409 (if_then_else (eq_attr "is_thumb" "no")
9412 (if_then_else (eq_attr "is_thumb" "no")
9415 (set_attr "type" "multiple")]
9418 (define_insn "*cmp_ior"
9419 [(set (match_operand 6 "dominant_cc_register" "")
9422 (match_operator 4 "arm_comparison_operator"
9423 [(match_operand:SI 0 "s_register_operand"
9424 "l,l,l,r,r,r,r,r,r,r")
9425 (match_operand:SI 1 "arm_add_operand"
9426 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9427 (match_operator:SI 5 "arm_comparison_operator"
9428 [(match_operand:SI 2 "s_register_operand"
9429 "l,r,r,l,l,r,r,r,r,r")
9430 (match_operand:SI 3 "arm_add_operand"
9431 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9436 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9440 {\"cmn\\t%0, #%n1\",
9443 \"cmn\\t%2, #%n3\"},
9444 {\"cmn\\t%0, #%n1\",
9447 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9449 {\"cmp%D4\\t%2, %3\",
9450 \"cmp%D5\\t%0, %1\"},
9451 {\"cmp%D4\\t%2, %3\",
9452 \"cmn%D5\\t%0, #%n1\"},
9453 {\"cmn%D4\\t%2, #%n3\",
9454 \"cmp%D5\\t%0, %1\"},
9455 {\"cmn%D4\\t%2, #%n3\",
9456 \"cmn%D5\\t%0, #%n1\"}
9458 static const char *const ite[2] =
9463 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9464 CMP_CMP, CMN_CMP, CMP_CMP,
9465 CMP_CMP, CMN_CMP, CMP_CMN,
9468 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9470 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9471 if (TARGET_THUMB2) {
9472 output_asm_insn (ite[swap], operands);
9474 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9478 [(set_attr "conds" "set")
9479 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9480 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9481 (set_attr_alternative "length"
9488 (if_then_else (eq_attr "is_thumb" "no")
9491 (if_then_else (eq_attr "is_thumb" "no")
9494 (if_then_else (eq_attr "is_thumb" "no")
9497 (if_then_else (eq_attr "is_thumb" "no")
9500 (set_attr "type" "multiple")]
9503 (define_insn_and_split "*ior_scc_scc"
9504 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9505 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9506 [(match_operand:SI 1 "s_register_operand" "l,r")
9507 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9508 (match_operator:SI 6 "arm_comparison_operator"
9509 [(match_operand:SI 4 "s_register_operand" "l,r")
9510 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9511 (clobber (reg:CC CC_REGNUM))]
9513 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9516 "TARGET_32BIT && reload_completed"
9520 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9521 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9523 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9525 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9528 [(set_attr "conds" "clob")
9529 (set_attr "enabled_for_short_it" "yes,no")
9530 (set_attr "length" "16")
9531 (set_attr "type" "multiple")]
9534 ; If the above pattern is followed by a CMP insn, then the compare is
9535 ; redundant, since we can rework the conditional instruction that follows.
9536 (define_insn_and_split "*ior_scc_scc_cmp"
9537 [(set (match_operand 0 "dominant_cc_register" "")
9538 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9539 [(match_operand:SI 1 "s_register_operand" "l,r")
9540 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9541 (match_operator:SI 6 "arm_comparison_operator"
9542 [(match_operand:SI 4 "s_register_operand" "l,r")
9543 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9545 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9546 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9547 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9550 "TARGET_32BIT && reload_completed"
9554 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9555 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9557 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9559 [(set_attr "conds" "set")
9560 (set_attr "enabled_for_short_it" "yes,no")
9561 (set_attr "length" "16")
9562 (set_attr "type" "multiple")]
9565 (define_insn_and_split "*and_scc_scc"
9566 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9567 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9568 [(match_operand:SI 1 "s_register_operand" "l,r")
9569 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9570 (match_operator:SI 6 "arm_comparison_operator"
9571 [(match_operand:SI 4 "s_register_operand" "l,r")
9572 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9573 (clobber (reg:CC CC_REGNUM))]
9575 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9578 "TARGET_32BIT && reload_completed
9579 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9584 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9585 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9587 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9589 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9592 [(set_attr "conds" "clob")
9593 (set_attr "enabled_for_short_it" "yes,no")
9594 (set_attr "length" "16")
9595 (set_attr "type" "multiple")]
9598 ; If the above pattern is followed by a CMP insn, then the compare is
9599 ; redundant, since we can rework the conditional instruction that follows.
9600 (define_insn_and_split "*and_scc_scc_cmp"
9601 [(set (match_operand 0 "dominant_cc_register" "")
9602 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9603 [(match_operand:SI 1 "s_register_operand" "l,r")
9604 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9605 (match_operator:SI 6 "arm_comparison_operator"
9606 [(match_operand:SI 4 "s_register_operand" "l,r")
9607 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9609 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9610 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9611 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9614 "TARGET_32BIT && reload_completed"
9618 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9619 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9621 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9623 [(set_attr "conds" "set")
9624 (set_attr "enabled_for_short_it" "yes,no")
9625 (set_attr "length" "16")
9626 (set_attr "type" "multiple")]
9629 ;; If there is no dominance in the comparison, then we can still save an
9630 ;; instruction in the AND case, since we can know that the second compare
9631 ;; need only zero the value if false (if true, then the value is already
9633 (define_insn_and_split "*and_scc_scc_nodom"
9634 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9635 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9636 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9637 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9638 (match_operator:SI 6 "arm_comparison_operator"
9639 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9640 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9641 (clobber (reg:CC CC_REGNUM))]
9643 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9646 "TARGET_32BIT && reload_completed"
9647 [(parallel [(set (match_dup 0)
9648 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9649 (clobber (reg:CC CC_REGNUM))])
9650 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9652 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9655 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9656 operands[4], operands[5]),
9658 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9660 [(set_attr "conds" "clob")
9661 (set_attr "length" "20")
9662 (set_attr "type" "multiple")]
9666 [(set (reg:CC_NOOV CC_REGNUM)
9667 (compare:CC_NOOV (ior:SI
9668 (and:SI (match_operand:SI 0 "s_register_operand" "")
9670 (match_operator:SI 1 "arm_comparison_operator"
9671 [(match_operand:SI 2 "s_register_operand" "")
9672 (match_operand:SI 3 "arm_add_operand" "")]))
9674 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9677 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9679 (set (reg:CC_NOOV CC_REGNUM)
9680 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9685 [(set (reg:CC_NOOV CC_REGNUM)
9686 (compare:CC_NOOV (ior:SI
9687 (match_operator:SI 1 "arm_comparison_operator"
9688 [(match_operand:SI 2 "s_register_operand" "")
9689 (match_operand:SI 3 "arm_add_operand" "")])
9690 (and:SI (match_operand:SI 0 "s_register_operand" "")
9693 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9696 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9698 (set (reg:CC_NOOV CC_REGNUM)
9699 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9702 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9704 (define_insn_and_split "*negscc"
9705 [(set (match_operand:SI 0 "s_register_operand" "=r")
9706 (neg:SI (match_operator 3 "arm_comparison_operator"
9707 [(match_operand:SI 1 "s_register_operand" "r")
9708 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9709 (clobber (reg:CC CC_REGNUM))]
9712 "&& reload_completed"
9715 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9717 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9719 /* Emit mov\\t%0, %1, asr #31 */
9720 emit_insn (gen_rtx_SET (operands[0],
9721 gen_rtx_ASHIFTRT (SImode,
9726 else if (GET_CODE (operands[3]) == NE)
9728 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9729 if (CONST_INT_P (operands[2]))
9730 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9731 gen_int_mode (-INTVAL (operands[2]),
9734 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9736 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9740 gen_rtx_SET (operands[0],
9746 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9747 emit_insn (gen_rtx_SET (cc_reg,
9748 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9749 enum rtx_code rc = GET_CODE (operands[3]);
9751 rc = reverse_condition (rc);
9752 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9757 gen_rtx_SET (operands[0], const0_rtx)));
9758 rc = GET_CODE (operands[3]);
9759 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9764 gen_rtx_SET (operands[0],
9770 [(set_attr "conds" "clob")
9771 (set_attr "length" "12")
9772 (set_attr "type" "multiple")]
9775 (define_insn_and_split "movcond_addsi"
9776 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9778 (match_operator 5 "comparison_operator"
9779 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9780 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9782 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9783 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9784 (clobber (reg:CC CC_REGNUM))]
9787 "&& reload_completed"
9788 [(set (reg:CC_NOOV CC_REGNUM)
9790 (plus:SI (match_dup 3)
9793 (set (match_dup 0) (match_dup 1))
9794 (cond_exec (match_dup 6)
9795 (set (match_dup 0) (match_dup 2)))]
9798 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9799 operands[3], operands[4]);
9800 enum rtx_code rc = GET_CODE (operands[5]);
9801 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9802 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9803 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9804 rc = reverse_condition (rc);
9806 std::swap (operands[1], operands[2]);
9808 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9811 [(set_attr "conds" "clob")
9812 (set_attr "enabled_for_short_it" "no,yes,yes")
9813 (set_attr "type" "multiple")]
9816 (define_insn "movcond"
9817 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9819 (match_operator 5 "arm_comparison_operator"
9820 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9821 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9822 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9823 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9824 (clobber (reg:CC CC_REGNUM))]
9827 if (GET_CODE (operands[5]) == LT
9828 && (operands[4] == const0_rtx))
9830 if (which_alternative != 1 && REG_P (operands[1]))
9832 if (operands[2] == const0_rtx)
9833 return \"and\\t%0, %1, %3, asr #31\";
9834 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9836 else if (which_alternative != 0 && REG_P (operands[2]))
9838 if (operands[1] == const0_rtx)
9839 return \"bic\\t%0, %2, %3, asr #31\";
9840 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9842 /* The only case that falls through to here is when both ops 1 & 2
9846 if (GET_CODE (operands[5]) == GE
9847 && (operands[4] == const0_rtx))
9849 if (which_alternative != 1 && REG_P (operands[1]))
9851 if (operands[2] == const0_rtx)
9852 return \"bic\\t%0, %1, %3, asr #31\";
9853 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9855 else if (which_alternative != 0 && REG_P (operands[2]))
9857 if (operands[1] == const0_rtx)
9858 return \"and\\t%0, %2, %3, asr #31\";
9859 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9861 /* The only case that falls through to here is when both ops 1 & 2
9864 if (CONST_INT_P (operands[4])
9865 && !const_ok_for_arm (INTVAL (operands[4])))
9866 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9868 output_asm_insn (\"cmp\\t%3, %4\", operands);
9869 if (which_alternative != 0)
9870 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9871 if (which_alternative != 1)
9872 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9875 [(set_attr "conds" "clob")
9876 (set_attr "length" "8,8,12")
9877 (set_attr "type" "multiple")]
9880 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9882 (define_insn "*ifcompare_plus_move"
9883 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9884 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9885 [(match_operand:SI 4 "s_register_operand" "r,r")
9886 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9888 (match_operand:SI 2 "s_register_operand" "r,r")
9889 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9890 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9891 (clobber (reg:CC CC_REGNUM))]
9894 [(set_attr "conds" "clob")
9895 (set_attr "length" "8,12")
9896 (set_attr "type" "multiple")]
9899 (define_insn "*if_plus_move"
9900 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9902 (match_operator 4 "arm_comparison_operator"
9903 [(match_operand 5 "cc_register" "") (const_int 0)])
9905 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9906 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9907 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9911 sub%d4\\t%0, %2, #%n3
9912 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9913 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9914 [(set_attr "conds" "use")
9915 (set_attr "length" "4,4,8,8")
9916 (set_attr_alternative "type"
9917 [(if_then_else (match_operand 3 "const_int_operand" "")
9918 (const_string "alu_imm" )
9919 (const_string "alu_sreg"))
9920 (const_string "alu_imm")
9921 (const_string "multiple")
9922 (const_string "multiple")])]
9925 (define_insn "*ifcompare_move_plus"
9926 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9927 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9928 [(match_operand:SI 4 "s_register_operand" "r,r")
9929 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9930 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9932 (match_operand:SI 2 "s_register_operand" "r,r")
9933 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9934 (clobber (reg:CC CC_REGNUM))]
9937 [(set_attr "conds" "clob")
9938 (set_attr "length" "8,12")
9939 (set_attr "type" "multiple")]
9942 (define_insn "*if_move_plus"
9943 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9945 (match_operator 4 "arm_comparison_operator"
9946 [(match_operand 5 "cc_register" "") (const_int 0)])
9947 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9949 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9950 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9954 sub%D4\\t%0, %2, #%n3
9955 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9956 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9957 [(set_attr "conds" "use")
9958 (set_attr "length" "4,4,8,8")
9959 (set_attr_alternative "type"
9960 [(if_then_else (match_operand 3 "const_int_operand" "")
9961 (const_string "alu_imm" )
9962 (const_string "alu_sreg"))
9963 (const_string "alu_imm")
9964 (const_string "multiple")
9965 (const_string "multiple")])]
9968 (define_insn "*ifcompare_arith_arith"
9969 [(set (match_operand:SI 0 "s_register_operand" "=r")
9970 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9971 [(match_operand:SI 5 "s_register_operand" "r")
9972 (match_operand:SI 6 "arm_add_operand" "rIL")])
9973 (match_operator:SI 8 "shiftable_operator"
9974 [(match_operand:SI 1 "s_register_operand" "r")
9975 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9976 (match_operator:SI 7 "shiftable_operator"
9977 [(match_operand:SI 3 "s_register_operand" "r")
9978 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9979 (clobber (reg:CC CC_REGNUM))]
9982 [(set_attr "conds" "clob")
9983 (set_attr "length" "12")
9984 (set_attr "type" "multiple")]
9987 (define_insn "*if_arith_arith"
9988 [(set (match_operand:SI 0 "s_register_operand" "=r")
9989 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9990 [(match_operand 8 "cc_register" "") (const_int 0)])
9991 (match_operator:SI 6 "shiftable_operator"
9992 [(match_operand:SI 1 "s_register_operand" "r")
9993 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9994 (match_operator:SI 7 "shiftable_operator"
9995 [(match_operand:SI 3 "s_register_operand" "r")
9996 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9998 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9999 [(set_attr "conds" "use")
10000 (set_attr "length" "8")
10001 (set_attr "type" "multiple")]
10004 (define_insn "*ifcompare_arith_move"
10005 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10006 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10007 [(match_operand:SI 2 "s_register_operand" "r,r")
10008 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10009 (match_operator:SI 7 "shiftable_operator"
10010 [(match_operand:SI 4 "s_register_operand" "r,r")
10011 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10012 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10013 (clobber (reg:CC CC_REGNUM))]
10016 /* If we have an operation where (op x 0) is the identity operation and
10017 the conditional operator is LT or GE and we are comparing against zero and
10018 everything is in registers then we can do this in two instructions. */
10019 if (operands[3] == const0_rtx
10020 && GET_CODE (operands[7]) != AND
10021 && REG_P (operands[5])
10022 && REG_P (operands[1])
10023 && REGNO (operands[1]) == REGNO (operands[4])
10024 && REGNO (operands[4]) != REGNO (operands[0]))
10026 if (GET_CODE (operands[6]) == LT)
10027 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10028 else if (GET_CODE (operands[6]) == GE)
10029 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10031 if (CONST_INT_P (operands[3])
10032 && !const_ok_for_arm (INTVAL (operands[3])))
10033 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10035 output_asm_insn (\"cmp\\t%2, %3\", operands);
10036 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10037 if (which_alternative != 0)
10038 return \"mov%D6\\t%0, %1\";
10041 [(set_attr "conds" "clob")
10042 (set_attr "length" "8,12")
10043 (set_attr "type" "multiple")]
10046 (define_insn "*if_arith_move"
10047 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10048 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10049 [(match_operand 6 "cc_register" "") (const_int 0)])
10050 (match_operator:SI 5 "shiftable_operator"
10051 [(match_operand:SI 2 "s_register_operand" "r,r")
10052 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10053 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10056 %I5%d4\\t%0, %2, %3
10057 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10058 [(set_attr "conds" "use")
10059 (set_attr "length" "4,8")
10060 (set_attr_alternative "type"
10061 [(if_then_else (match_operand 3 "const_int_operand" "")
10062 (const_string "alu_shift_imm" )
10063 (const_string "alu_shift_reg"))
10064 (const_string "multiple")])]
10067 (define_insn "*ifcompare_move_arith"
10068 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10069 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10070 [(match_operand:SI 4 "s_register_operand" "r,r")
10071 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10072 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10073 (match_operator:SI 7 "shiftable_operator"
10074 [(match_operand:SI 2 "s_register_operand" "r,r")
10075 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10076 (clobber (reg:CC CC_REGNUM))]
10079 /* If we have an operation where (op x 0) is the identity operation and
10080 the conditional operator is LT or GE and we are comparing against zero and
10081 everything is in registers then we can do this in two instructions */
10082 if (operands[5] == const0_rtx
10083 && GET_CODE (operands[7]) != AND
10084 && REG_P (operands[3])
10085 && REG_P (operands[1])
10086 && REGNO (operands[1]) == REGNO (operands[2])
10087 && REGNO (operands[2]) != REGNO (operands[0]))
10089 if (GET_CODE (operands[6]) == GE)
10090 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10091 else if (GET_CODE (operands[6]) == LT)
10092 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10095 if (CONST_INT_P (operands[5])
10096 && !const_ok_for_arm (INTVAL (operands[5])))
10097 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10099 output_asm_insn (\"cmp\\t%4, %5\", operands);
10101 if (which_alternative != 0)
10102 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10103 return \"%I7%D6\\t%0, %2, %3\";
10105 [(set_attr "conds" "clob")
10106 (set_attr "length" "8,12")
10107 (set_attr "type" "multiple")]
10110 (define_insn "*if_move_arith"
10111 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10113 (match_operator 4 "arm_comparison_operator"
10114 [(match_operand 6 "cc_register" "") (const_int 0)])
10115 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10116 (match_operator:SI 5 "shiftable_operator"
10117 [(match_operand:SI 2 "s_register_operand" "r,r")
10118 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10121 %I5%D4\\t%0, %2, %3
10122 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10123 [(set_attr "conds" "use")
10124 (set_attr "length" "4,8")
10125 (set_attr_alternative "type"
10126 [(if_then_else (match_operand 3 "const_int_operand" "")
10127 (const_string "alu_shift_imm" )
10128 (const_string "alu_shift_reg"))
10129 (const_string "multiple")])]
10132 (define_insn "*ifcompare_move_not"
10133 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10135 (match_operator 5 "arm_comparison_operator"
10136 [(match_operand:SI 3 "s_register_operand" "r,r")
10137 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10138 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10140 (match_operand:SI 2 "s_register_operand" "r,r"))))
10141 (clobber (reg:CC CC_REGNUM))]
10144 [(set_attr "conds" "clob")
10145 (set_attr "length" "8,12")
10146 (set_attr "type" "multiple")]
10149 (define_insn "*if_move_not"
10150 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10152 (match_operator 4 "arm_comparison_operator"
10153 [(match_operand 3 "cc_register" "") (const_int 0)])
10154 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10155 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10159 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10160 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10161 [(set_attr "conds" "use")
10162 (set_attr "type" "mvn_reg")
10163 (set_attr "length" "4,8,8")
10164 (set_attr "type" "mvn_reg,multiple,multiple")]
10167 (define_insn "*ifcompare_not_move"
10168 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10170 (match_operator 5 "arm_comparison_operator"
10171 [(match_operand:SI 3 "s_register_operand" "r,r")
10172 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10174 (match_operand:SI 2 "s_register_operand" "r,r"))
10175 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10176 (clobber (reg:CC CC_REGNUM))]
10179 [(set_attr "conds" "clob")
10180 (set_attr "length" "8,12")
10181 (set_attr "type" "multiple")]
10184 (define_insn "*if_not_move"
10185 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10187 (match_operator 4 "arm_comparison_operator"
10188 [(match_operand 3 "cc_register" "") (const_int 0)])
10189 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10190 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10194 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10195 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10196 [(set_attr "conds" "use")
10197 (set_attr "type" "mvn_reg,multiple,multiple")
10198 (set_attr "length" "4,8,8")]
10201 (define_insn "*ifcompare_shift_move"
10202 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10204 (match_operator 6 "arm_comparison_operator"
10205 [(match_operand:SI 4 "s_register_operand" "r,r")
10206 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10207 (match_operator:SI 7 "shift_operator"
10208 [(match_operand:SI 2 "s_register_operand" "r,r")
10209 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10210 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10211 (clobber (reg:CC CC_REGNUM))]
10214 [(set_attr "conds" "clob")
10215 (set_attr "length" "8,12")
10216 (set_attr "type" "multiple")]
10219 (define_insn "*if_shift_move"
10220 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10222 (match_operator 5 "arm_comparison_operator"
10223 [(match_operand 6 "cc_register" "") (const_int 0)])
10224 (match_operator:SI 4 "shift_operator"
10225 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10226 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10227 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10231 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10232 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10233 [(set_attr "conds" "use")
10234 (set_attr "shift" "2")
10235 (set_attr "length" "4,8,8")
10236 (set_attr_alternative "type"
10237 [(if_then_else (match_operand 3 "const_int_operand" "")
10238 (const_string "mov_shift" )
10239 (const_string "mov_shift_reg"))
10240 (const_string "multiple")
10241 (const_string "multiple")])]
10244 (define_insn "*ifcompare_move_shift"
10245 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10247 (match_operator 6 "arm_comparison_operator"
10248 [(match_operand:SI 4 "s_register_operand" "r,r")
10249 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10250 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10251 (match_operator:SI 7 "shift_operator"
10252 [(match_operand:SI 2 "s_register_operand" "r,r")
10253 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10254 (clobber (reg:CC CC_REGNUM))]
10257 [(set_attr "conds" "clob")
10258 (set_attr "length" "8,12")
10259 (set_attr "type" "multiple")]
10262 (define_insn "*if_move_shift"
10263 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10265 (match_operator 5 "arm_comparison_operator"
10266 [(match_operand 6 "cc_register" "") (const_int 0)])
10267 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10268 (match_operator:SI 4 "shift_operator"
10269 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10270 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10274 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10275 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10276 [(set_attr "conds" "use")
10277 (set_attr "shift" "2")
10278 (set_attr "length" "4,8,8")
10279 (set_attr_alternative "type"
10280 [(if_then_else (match_operand 3 "const_int_operand" "")
10281 (const_string "mov_shift" )
10282 (const_string "mov_shift_reg"))
10283 (const_string "multiple")
10284 (const_string "multiple")])]
10287 (define_insn "*ifcompare_shift_shift"
10288 [(set (match_operand:SI 0 "s_register_operand" "=r")
10290 (match_operator 7 "arm_comparison_operator"
10291 [(match_operand:SI 5 "s_register_operand" "r")
10292 (match_operand:SI 6 "arm_add_operand" "rIL")])
10293 (match_operator:SI 8 "shift_operator"
10294 [(match_operand:SI 1 "s_register_operand" "r")
10295 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10296 (match_operator:SI 9 "shift_operator"
10297 [(match_operand:SI 3 "s_register_operand" "r")
10298 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10299 (clobber (reg:CC CC_REGNUM))]
10302 [(set_attr "conds" "clob")
10303 (set_attr "length" "12")
10304 (set_attr "type" "multiple")]
10307 (define_insn "*if_shift_shift"
10308 [(set (match_operand:SI 0 "s_register_operand" "=r")
10310 (match_operator 5 "arm_comparison_operator"
10311 [(match_operand 8 "cc_register" "") (const_int 0)])
10312 (match_operator:SI 6 "shift_operator"
10313 [(match_operand:SI 1 "s_register_operand" "r")
10314 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10315 (match_operator:SI 7 "shift_operator"
10316 [(match_operand:SI 3 "s_register_operand" "r")
10317 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10319 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10320 [(set_attr "conds" "use")
10321 (set_attr "shift" "1")
10322 (set_attr "length" "8")
10323 (set (attr "type") (if_then_else
10324 (and (match_operand 2 "const_int_operand" "")
10325 (match_operand 4 "const_int_operand" ""))
10326 (const_string "mov_shift")
10327 (const_string "mov_shift_reg")))]
10330 (define_insn "*ifcompare_not_arith"
10331 [(set (match_operand:SI 0 "s_register_operand" "=r")
10333 (match_operator 6 "arm_comparison_operator"
10334 [(match_operand:SI 4 "s_register_operand" "r")
10335 (match_operand:SI 5 "arm_add_operand" "rIL")])
10336 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10337 (match_operator:SI 7 "shiftable_operator"
10338 [(match_operand:SI 2 "s_register_operand" "r")
10339 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10340 (clobber (reg:CC CC_REGNUM))]
10343 [(set_attr "conds" "clob")
10344 (set_attr "length" "12")
10345 (set_attr "type" "multiple")]
10348 (define_insn "*if_not_arith"
10349 [(set (match_operand:SI 0 "s_register_operand" "=r")
10351 (match_operator 5 "arm_comparison_operator"
10352 [(match_operand 4 "cc_register" "") (const_int 0)])
10353 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10354 (match_operator:SI 6 "shiftable_operator"
10355 [(match_operand:SI 2 "s_register_operand" "r")
10356 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10358 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10359 [(set_attr "conds" "use")
10360 (set_attr "type" "mvn_reg")
10361 (set_attr "length" "8")]
10364 (define_insn "*ifcompare_arith_not"
10365 [(set (match_operand:SI 0 "s_register_operand" "=r")
10367 (match_operator 6 "arm_comparison_operator"
10368 [(match_operand:SI 4 "s_register_operand" "r")
10369 (match_operand:SI 5 "arm_add_operand" "rIL")])
10370 (match_operator:SI 7 "shiftable_operator"
10371 [(match_operand:SI 2 "s_register_operand" "r")
10372 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10373 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10374 (clobber (reg:CC CC_REGNUM))]
10377 [(set_attr "conds" "clob")
10378 (set_attr "length" "12")
10379 (set_attr "type" "multiple")]
10382 (define_insn "*if_arith_not"
10383 [(set (match_operand:SI 0 "s_register_operand" "=r")
10385 (match_operator 5 "arm_comparison_operator"
10386 [(match_operand 4 "cc_register" "") (const_int 0)])
10387 (match_operator:SI 6 "shiftable_operator"
10388 [(match_operand:SI 2 "s_register_operand" "r")
10389 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10390 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10392 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10393 [(set_attr "conds" "use")
10394 (set_attr "type" "multiple")
10395 (set_attr "length" "8")]
10398 (define_insn "*ifcompare_neg_move"
10399 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10401 (match_operator 5 "arm_comparison_operator"
10402 [(match_operand:SI 3 "s_register_operand" "r,r")
10403 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10404 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10405 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10406 (clobber (reg:CC CC_REGNUM))]
10409 [(set_attr "conds" "clob")
10410 (set_attr "length" "8,12")
10411 (set_attr "type" "multiple")]
10414 (define_insn_and_split "*if_neg_move"
10415 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10417 (match_operator 4 "arm_comparison_operator"
10418 [(match_operand 3 "cc_register" "") (const_int 0)])
10419 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
10420 (match_operand:SI 1 "s_register_operand" "0,0")))]
10423 "&& reload_completed"
10424 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
10425 (set (match_dup 0) (neg:SI (match_dup 2))))]
10427 [(set_attr "conds" "use")
10428 (set_attr "length" "4")
10429 (set_attr "arch" "t2,32")
10430 (set_attr "enabled_for_short_it" "yes,no")
10431 (set_attr "type" "logic_shift_imm")]
10434 (define_insn "*ifcompare_move_neg"
10435 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10437 (match_operator 5 "arm_comparison_operator"
10438 [(match_operand:SI 3 "s_register_operand" "r,r")
10439 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10440 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10441 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10442 (clobber (reg:CC CC_REGNUM))]
10445 [(set_attr "conds" "clob")
10446 (set_attr "length" "8,12")
10447 (set_attr "type" "multiple")]
10450 (define_insn_and_split "*if_move_neg"
10451 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10453 (match_operator 4 "arm_comparison_operator"
10454 [(match_operand 3 "cc_register" "") (const_int 0)])
10455 (match_operand:SI 1 "s_register_operand" "0,0")
10456 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
10459 "&& reload_completed"
10460 [(cond_exec (match_dup 5)
10461 (set (match_dup 0) (neg:SI (match_dup 2))))]
10463 machine_mode mode = GET_MODE (operands[3]);
10464 rtx_code rc = GET_CODE (operands[4]);
10466 if (mode == CCFPmode || mode == CCFPEmode)
10467 rc = reverse_condition_maybe_unordered (rc);
10469 rc = reverse_condition (rc);
10471 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
10473 [(set_attr "conds" "use")
10474 (set_attr "length" "4")
10475 (set_attr "arch" "t2,32")
10476 (set_attr "enabled_for_short_it" "yes,no")
10477 (set_attr "type" "logic_shift_imm")]
10480 (define_insn "*arith_adjacentmem"
10481 [(set (match_operand:SI 0 "s_register_operand" "=r")
10482 (match_operator:SI 1 "shiftable_operator"
10483 [(match_operand:SI 2 "memory_operand" "m")
10484 (match_operand:SI 3 "memory_operand" "m")]))
10485 (clobber (match_scratch:SI 4 "=r"))]
10486 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10492 HOST_WIDE_INT val1 = 0, val2 = 0;
10494 if (REGNO (operands[0]) > REGNO (operands[4]))
10496 ldm[1] = operands[4];
10497 ldm[2] = operands[0];
10501 ldm[1] = operands[0];
10502 ldm[2] = operands[4];
10505 base_reg = XEXP (operands[2], 0);
10507 if (!REG_P (base_reg))
10509 val1 = INTVAL (XEXP (base_reg, 1));
10510 base_reg = XEXP (base_reg, 0);
10513 if (!REG_P (XEXP (operands[3], 0)))
10514 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10516 arith[0] = operands[0];
10517 arith[3] = operands[1];
10531 if (val1 !=0 && val2 != 0)
10535 if (val1 == 4 || val2 == 4)
10536 /* Other val must be 8, since we know they are adjacent and neither
10538 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10539 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10541 ldm[0] = ops[0] = operands[4];
10543 ops[2] = GEN_INT (val1);
10544 output_add_immediate (ops);
10546 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10548 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10552 /* Offset is out of range for a single add, so use two ldr. */
10555 ops[2] = GEN_INT (val1);
10556 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10558 ops[2] = GEN_INT (val2);
10559 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10562 else if (val1 != 0)
10565 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10567 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10572 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10574 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10576 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10579 [(set_attr "length" "12")
10580 (set_attr "predicable" "yes")
10581 (set_attr "type" "load_4")]
10584 ; This pattern is never tried by combine, so do it as a peephole
10587 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10588 (match_operand:SI 1 "arm_general_register_operand" ""))
10589 (set (reg:CC CC_REGNUM)
10590 (compare:CC (match_dup 1) (const_int 0)))]
10592 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10593 (set (match_dup 0) (match_dup 1))])]
10598 [(set (match_operand:SI 0 "s_register_operand" "")
10599 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10601 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10602 [(match_operand:SI 3 "s_register_operand" "")
10603 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10604 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10606 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10607 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10612 ;; This split can be used because CC_Z mode implies that the following
10613 ;; branch will be an equality, or an unsigned inequality, so the sign
10614 ;; extension is not needed.
10617 [(set (reg:CC_Z CC_REGNUM)
10619 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10621 (match_operand 1 "const_int_operand" "")))
10622 (clobber (match_scratch:SI 2 ""))]
10624 && ((UINTVAL (operands[1]))
10625 == ((UINTVAL (operands[1])) >> 24) << 24)"
10626 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10627 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10629 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10632 ;; ??? Check the patterns above for Thumb-2 usefulness
10634 (define_expand "prologue"
10635 [(clobber (const_int 0))]
10638 arm_expand_prologue ();
10640 thumb1_expand_prologue ();
10645 (define_expand "epilogue"
10646 [(clobber (const_int 0))]
10649 if (crtl->calls_eh_return)
10650 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10653 thumb1_expand_epilogue ();
10654 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10655 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10657 else if (HAVE_return)
10659 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10660 no need for explicit testing again. */
10661 emit_jump_insn (gen_return ());
10663 else if (TARGET_32BIT)
10665 arm_expand_epilogue (true);
10671 ;; Note - although unspec_volatile's USE all hard registers,
10672 ;; USEs are ignored after relaod has completed. Thus we need
10673 ;; to add an unspec of the link register to ensure that flow
10674 ;; does not think that it is unused by the sibcall branch that
10675 ;; will replace the standard function epilogue.
10676 (define_expand "sibcall_epilogue"
10677 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10678 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10681 arm_expand_epilogue (false);
10686 (define_expand "eh_epilogue"
10687 [(use (match_operand:SI 0 "register_operand"))
10688 (use (match_operand:SI 1 "register_operand"))
10689 (use (match_operand:SI 2 "register_operand"))]
10693 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10694 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10696 rtx ra = gen_rtx_REG (Pmode, 2);
10698 emit_move_insn (ra, operands[2]);
10701 /* This is a hack -- we may have crystalized the function type too
10703 cfun->machine->func_type = 0;
10707 ;; This split is only used during output to reduce the number of patterns
10708 ;; that need assembler instructions adding to them. We allowed the setting
10709 ;; of the conditions to be implicit during rtl generation so that
10710 ;; the conditional compare patterns would work. However this conflicts to
10711 ;; some extent with the conditional data operations, so we have to split them
10714 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10715 ;; conditional execution sufficient?
10718 [(set (match_operand:SI 0 "s_register_operand" "")
10719 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10720 [(match_operand 2 "" "") (match_operand 3 "" "")])
10722 (match_operand 4 "" "")))
10723 (clobber (reg:CC CC_REGNUM))]
10724 "TARGET_ARM && reload_completed"
10725 [(set (match_dup 5) (match_dup 6))
10726 (cond_exec (match_dup 7)
10727 (set (match_dup 0) (match_dup 4)))]
10730 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10731 operands[2], operands[3]);
10732 enum rtx_code rc = GET_CODE (operands[1]);
10734 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10735 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10736 if (mode == CCFPmode || mode == CCFPEmode)
10737 rc = reverse_condition_maybe_unordered (rc);
10739 rc = reverse_condition (rc);
10741 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10746 [(set (match_operand:SI 0 "s_register_operand" "")
10747 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10748 [(match_operand 2 "" "") (match_operand 3 "" "")])
10749 (match_operand 4 "" "")
10751 (clobber (reg:CC CC_REGNUM))]
10752 "TARGET_ARM && reload_completed"
10753 [(set (match_dup 5) (match_dup 6))
10754 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10755 (set (match_dup 0) (match_dup 4)))]
10758 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10759 operands[2], operands[3]);
10761 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10762 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10767 [(set (match_operand:SI 0 "s_register_operand" "")
10768 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10769 [(match_operand 2 "" "") (match_operand 3 "" "")])
10770 (match_operand 4 "" "")
10771 (match_operand 5 "" "")))
10772 (clobber (reg:CC CC_REGNUM))]
10773 "TARGET_ARM && reload_completed"
10774 [(set (match_dup 6) (match_dup 7))
10775 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10776 (set (match_dup 0) (match_dup 4)))
10777 (cond_exec (match_dup 8)
10778 (set (match_dup 0) (match_dup 5)))]
10781 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10782 operands[2], operands[3]);
10783 enum rtx_code rc = GET_CODE (operands[1]);
10785 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10786 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10787 if (mode == CCFPmode || mode == CCFPEmode)
10788 rc = reverse_condition_maybe_unordered (rc);
10790 rc = reverse_condition (rc);
10792 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10797 [(set (match_operand:SI 0 "s_register_operand" "")
10798 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10799 [(match_operand:SI 2 "s_register_operand" "")
10800 (match_operand:SI 3 "arm_add_operand" "")])
10801 (match_operand:SI 4 "arm_rhs_operand" "")
10803 (match_operand:SI 5 "s_register_operand" ""))))
10804 (clobber (reg:CC CC_REGNUM))]
10805 "TARGET_ARM && reload_completed"
10806 [(set (match_dup 6) (match_dup 7))
10807 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10808 (set (match_dup 0) (match_dup 4)))
10809 (cond_exec (match_dup 8)
10810 (set (match_dup 0) (not:SI (match_dup 5))))]
10813 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10814 operands[2], operands[3]);
10815 enum rtx_code rc = GET_CODE (operands[1]);
10817 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10818 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10819 if (mode == CCFPmode || mode == CCFPEmode)
10820 rc = reverse_condition_maybe_unordered (rc);
10822 rc = reverse_condition (rc);
10824 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10828 (define_insn "*cond_move_not"
10829 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10830 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10831 [(match_operand 3 "cc_register" "") (const_int 0)])
10832 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10834 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10838 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10839 [(set_attr "conds" "use")
10840 (set_attr "type" "mvn_reg,multiple")
10841 (set_attr "length" "4,8")]
10844 ;; The next two patterns occur when an AND operation is followed by a
10845 ;; scc insn sequence
10847 (define_insn "*sign_extract_onebit"
10848 [(set (match_operand:SI 0 "s_register_operand" "=r")
10849 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10851 (match_operand:SI 2 "const_int_operand" "n")))
10852 (clobber (reg:CC CC_REGNUM))]
10855 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10856 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10857 return \"mvnne\\t%0, #0\";
10859 [(set_attr "conds" "clob")
10860 (set_attr "length" "8")
10861 (set_attr "type" "multiple")]
10864 (define_insn "*not_signextract_onebit"
10865 [(set (match_operand:SI 0 "s_register_operand" "=r")
10867 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10869 (match_operand:SI 2 "const_int_operand" "n"))))
10870 (clobber (reg:CC CC_REGNUM))]
10873 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10874 output_asm_insn (\"tst\\t%1, %2\", operands);
10875 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10876 return \"movne\\t%0, #0\";
10878 [(set_attr "conds" "clob")
10879 (set_attr "length" "12")
10880 (set_attr "type" "multiple")]
10882 ;; ??? The above patterns need auditing for Thumb-2
10884 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10885 ;; expressions. For simplicity, the first register is also in the unspec
10887 ;; To avoid the usage of GNU extension, the length attribute is computed
10888 ;; in a C function arm_attr_length_push_multi.
10889 (define_insn "*push_multi"
10890 [(match_parallel 2 "multi_register_push"
10891 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10892 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10893 UNSPEC_PUSH_MULT))])]
10897 int num_saves = XVECLEN (operands[2], 0);
10899 /* For the StrongARM at least it is faster to
10900 use STR to store only a single register.
10901 In Thumb mode always use push, and the assembler will pick
10902 something appropriate. */
10903 if (num_saves == 1 && TARGET_ARM)
10904 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10911 strcpy (pattern, \"push%?\\t{%1\");
10913 strcpy (pattern, \"push\\t{%1\");
10915 for (i = 1; i < num_saves; i++)
10917 strcat (pattern, \", %|\");
10919 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10922 strcat (pattern, \"}\");
10923 output_asm_insn (pattern, operands);
10928 [(set_attr "type" "store_16")
10929 (set (attr "length")
10930 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10933 (define_insn "stack_tie"
10934 [(set (mem:BLK (scratch))
10935 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10936 (match_operand:SI 1 "s_register_operand" "rk")]
10940 [(set_attr "length" "0")
10941 (set_attr "type" "block")]
10944 ;; Pop (as used in epilogue RTL)
10946 (define_insn "*load_multiple_with_writeback"
10947 [(match_parallel 0 "load_multiple_operation"
10948 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10949 (plus:SI (match_dup 1)
10950 (match_operand:SI 2 "const_int_I_operand" "I")))
10951 (set (match_operand:SI 3 "s_register_operand" "=rk")
10952 (mem:SI (match_dup 1)))
10954 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10957 arm_output_multireg_pop (operands, /*return_pc=*/false,
10958 /*cond=*/const_true_rtx,
10964 [(set_attr "type" "load_16")
10965 (set_attr "predicable" "yes")
10966 (set (attr "length")
10967 (symbol_ref "arm_attr_length_pop_multi (operands,
10968 /*return_pc=*/false,
10969 /*write_back_p=*/true)"))]
10972 ;; Pop with return (as used in epilogue RTL)
10974 ;; This instruction is generated when the registers are popped at the end of
10975 ;; epilogue. Here, instead of popping the value into LR and then generating
10976 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10978 (define_insn "*pop_multiple_with_writeback_and_return"
10979 [(match_parallel 0 "pop_multiple_return"
10981 (set (match_operand:SI 1 "s_register_operand" "+rk")
10982 (plus:SI (match_dup 1)
10983 (match_operand:SI 2 "const_int_I_operand" "I")))
10984 (set (match_operand:SI 3 "s_register_operand" "=rk")
10985 (mem:SI (match_dup 1)))
10987 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10990 arm_output_multireg_pop (operands, /*return_pc=*/true,
10991 /*cond=*/const_true_rtx,
10997 [(set_attr "type" "load_16")
10998 (set_attr "predicable" "yes")
10999 (set (attr "length")
11000 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11001 /*write_back_p=*/true)"))]
11004 (define_insn "*pop_multiple_with_return"
11005 [(match_parallel 0 "pop_multiple_return"
11007 (set (match_operand:SI 2 "s_register_operand" "=rk")
11008 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11010 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11013 arm_output_multireg_pop (operands, /*return_pc=*/true,
11014 /*cond=*/const_true_rtx,
11020 [(set_attr "type" "load_16")
11021 (set_attr "predicable" "yes")
11022 (set (attr "length")
11023 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11024 /*write_back_p=*/false)"))]
11027 ;; Load into PC and return
11028 (define_insn "*ldr_with_return"
11030 (set (reg:SI PC_REGNUM)
11031 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11032 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11033 "ldr%?\t%|pc, [%0], #4"
11034 [(set_attr "type" "load_4")
11035 (set_attr "predicable" "yes")]
11037 ;; Pop for floating point registers (as used in epilogue RTL)
11038 (define_insn "*vfp_pop_multiple_with_writeback"
11039 [(match_parallel 0 "pop_multiple_fp"
11040 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11041 (plus:SI (match_dup 1)
11042 (match_operand:SI 2 "const_int_I_operand" "I")))
11043 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11044 (mem:DF (match_dup 1)))])]
11045 "TARGET_32BIT && TARGET_HARD_FLOAT"
11048 int num_regs = XVECLEN (operands[0], 0);
11051 strcpy (pattern, \"vldm\\t\");
11052 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11053 strcat (pattern, \"!, {\");
11054 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11055 strcat (pattern, \"%P0\");
11056 if ((num_regs - 1) > 1)
11058 strcat (pattern, \"-%P1\");
11059 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11062 strcat (pattern, \"}\");
11063 output_asm_insn (pattern, op_list);
11067 [(set_attr "type" "load_16")
11068 (set_attr "conds" "unconditional")
11069 (set_attr "predicable" "no")]
11072 ;; Special patterns for dealing with the constant pool
11074 (define_insn "align_4"
11075 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11078 assemble_align (32);
11081 [(set_attr "type" "no_insn")]
11084 (define_insn "align_8"
11085 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11088 assemble_align (64);
11091 [(set_attr "type" "no_insn")]
11094 (define_insn "consttable_end"
11095 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11098 making_const_table = FALSE;
11101 [(set_attr "type" "no_insn")]
11104 (define_insn "consttable_1"
11105 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11108 making_const_table = TRUE;
11109 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11110 assemble_zeros (3);
11113 [(set_attr "length" "4")
11114 (set_attr "type" "no_insn")]
11117 (define_insn "consttable_2"
11118 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11122 rtx x = operands[0];
11123 making_const_table = TRUE;
11124 switch (GET_MODE_CLASS (GET_MODE (x)))
11127 arm_emit_fp16_const (x);
11130 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11131 assemble_zeros (2);
11136 [(set_attr "length" "4")
11137 (set_attr "type" "no_insn")]
11140 (define_insn "consttable_4"
11141 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11145 rtx x = operands[0];
11146 making_const_table = TRUE;
11147 scalar_float_mode float_mode;
11148 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11149 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11152 /* XXX: Sometimes gcc does something really dumb and ends up with
11153 a HIGH in a constant pool entry, usually because it's trying to
11154 load into a VFP register. We know this will always be used in
11155 combination with a LO_SUM which ignores the high bits, so just
11156 strip off the HIGH. */
11157 if (GET_CODE (x) == HIGH)
11159 assemble_integer (x, 4, BITS_PER_WORD, 1);
11160 mark_symbol_refs_as_used (x);
11164 [(set_attr "length" "4")
11165 (set_attr "type" "no_insn")]
11168 (define_insn "consttable_8"
11169 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11173 making_const_table = TRUE;
11174 scalar_float_mode float_mode;
11175 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11176 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11177 float_mode, BITS_PER_WORD);
11179 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11182 [(set_attr "length" "8")
11183 (set_attr "type" "no_insn")]
11186 (define_insn "consttable_16"
11187 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11191 making_const_table = TRUE;
11192 scalar_float_mode float_mode;
11193 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11194 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11195 float_mode, BITS_PER_WORD);
11197 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11200 [(set_attr "length" "16")
11201 (set_attr "type" "no_insn")]
11204 ;; V5 Instructions,
11206 (define_insn "clzsi2"
11207 [(set (match_operand:SI 0 "s_register_operand" "=r")
11208 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11209 "TARGET_32BIT && arm_arch5t"
11211 [(set_attr "predicable" "yes")
11212 (set_attr "type" "clz")])
11214 (define_insn "rbitsi2"
11215 [(set (match_operand:SI 0 "s_register_operand" "=r")
11216 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11217 "TARGET_32BIT && arm_arch_thumb2"
11219 [(set_attr "predicable" "yes")
11220 (set_attr "type" "clz")])
11222 ;; Keep this as a CTZ expression until after reload and then split
11223 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
11224 ;; to fold with any other expression.
11226 (define_insn_and_split "ctzsi2"
11227 [(set (match_operand:SI 0 "s_register_operand" "=r")
11228 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11229 "TARGET_32BIT && arm_arch_thumb2"
11231 "&& reload_completed"
11234 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
11235 emit_insn (gen_clzsi2 (operands[0], operands[0]));
11239 ;; V5E instructions.
11241 (define_insn "prefetch"
11242 [(prefetch (match_operand:SI 0 "address_operand" "p")
11243 (match_operand:SI 1 "" "")
11244 (match_operand:SI 2 "" ""))]
11245 "TARGET_32BIT && arm_arch5te"
11247 [(set_attr "type" "load_4")]
11250 ;; General predication pattern
11253 [(match_operator 0 "arm_comparison_operator"
11254 [(match_operand 1 "cc_register" "")
11257 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
11259 [(set_attr "predicated" "yes")]
11262 (define_insn "force_register_use"
11263 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11266 [(set_attr "length" "0")
11267 (set_attr "type" "no_insn")]
11271 ;; Patterns for exception handling
11273 (define_expand "eh_return"
11274 [(use (match_operand 0 "general_operand"))]
11279 emit_insn (gen_arm_eh_return (operands[0]));
11281 emit_insn (gen_thumb_eh_return (operands[0]));
11286 ;; We can't expand this before we know where the link register is stored.
11287 (define_insn_and_split "arm_eh_return"
11288 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11290 (clobber (match_scratch:SI 1 "=&r"))]
11293 "&& reload_completed"
11297 arm_set_return_address (operands[0], operands[1]);
11305 (define_insn "load_tp_hard"
11306 [(set (match_operand:SI 0 "register_operand" "=r")
11307 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11309 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11310 [(set_attr "predicable" "yes")
11311 (set_attr "type" "mrs")]
11314 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11315 (define_insn "load_tp_soft_fdpic"
11316 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11317 (clobber (reg:SI FDPIC_REGNUM))
11318 (clobber (reg:SI LR_REGNUM))
11319 (clobber (reg:SI IP_REGNUM))
11320 (clobber (reg:CC CC_REGNUM))]
11321 "TARGET_SOFT_TP && TARGET_FDPIC"
11322 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11323 [(set_attr "conds" "clob")
11324 (set_attr "type" "branch")]
11327 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11328 (define_insn "load_tp_soft"
11329 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11330 (clobber (reg:SI LR_REGNUM))
11331 (clobber (reg:SI IP_REGNUM))
11332 (clobber (reg:CC CC_REGNUM))]
11333 "TARGET_SOFT_TP && !TARGET_FDPIC"
11334 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11335 [(set_attr "conds" "clob")
11336 (set_attr "type" "branch")]
11339 ;; tls descriptor call
11340 (define_insn "tlscall"
11341 [(set (reg:SI R0_REGNUM)
11342 (unspec:SI [(reg:SI R0_REGNUM)
11343 (match_operand:SI 0 "" "X")
11344 (match_operand 1 "" "")] UNSPEC_TLS))
11345 (clobber (reg:SI R1_REGNUM))
11346 (clobber (reg:SI LR_REGNUM))
11347 (clobber (reg:SI CC_REGNUM))]
11350 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11351 INTVAL (operands[1]));
11352 return "bl\\t%c0(tlscall)";
11354 [(set_attr "conds" "clob")
11355 (set_attr "length" "4")
11356 (set_attr "type" "branch")]
11359 ;; For thread pointer builtin
11360 (define_expand "get_thread_pointersi"
11361 [(match_operand:SI 0 "s_register_operand")]
11365 arm_load_tp (operands[0]);
11371 ;; We only care about the lower 16 bits of the constant
11372 ;; being inserted into the upper 16 bits of the register.
11373 (define_insn "*arm_movtas_ze"
11374 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
11377 (match_operand:SI 1 "const_int_operand" ""))]
11382 [(set_attr "arch" "32,v8mb")
11383 (set_attr "predicable" "yes")
11384 (set_attr "length" "4")
11385 (set_attr "type" "alu_sreg")]
11388 (define_insn "*arm_rev"
11389 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11390 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11396 [(set_attr "arch" "t1,t2,32")
11397 (set_attr "length" "2,2,4")
11398 (set_attr "predicable" "no,yes,yes")
11399 (set_attr "type" "rev")]
11402 (define_expand "arm_legacy_rev"
11403 [(set (match_operand:SI 2 "s_register_operand")
11404 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
11408 (lshiftrt:SI (match_dup 2)
11410 (set (match_operand:SI 3 "s_register_operand")
11411 (rotatert:SI (match_dup 1)
11414 (and:SI (match_dup 2)
11415 (const_int -65281)))
11416 (set (match_operand:SI 0 "s_register_operand")
11417 (xor:SI (match_dup 3)
11423 ;; Reuse temporaries to keep register pressure down.
11424 (define_expand "thumb_legacy_rev"
11425 [(set (match_operand:SI 2 "s_register_operand")
11426 (ashift:SI (match_operand:SI 1 "s_register_operand")
11428 (set (match_operand:SI 3 "s_register_operand")
11429 (lshiftrt:SI (match_dup 1)
11432 (ior:SI (match_dup 3)
11434 (set (match_operand:SI 4 "s_register_operand")
11436 (set (match_operand:SI 5 "s_register_operand")
11437 (rotatert:SI (match_dup 1)
11440 (ashift:SI (match_dup 5)
11443 (lshiftrt:SI (match_dup 5)
11446 (ior:SI (match_dup 5)
11449 (rotatert:SI (match_dup 5)
11451 (set (match_operand:SI 0 "s_register_operand")
11452 (ior:SI (match_dup 5)
11458 ;; ARM-specific expansion of signed mod by power of 2
11459 ;; using conditional negate.
11460 ;; For r0 % n where n is a power of 2 produce:
11462 ;; and r0, r0, #(n - 1)
11463 ;; and r1, r1, #(n - 1)
11464 ;; rsbpl r0, r1, #0
11466 (define_expand "modsi3"
11467 [(match_operand:SI 0 "register_operand")
11468 (match_operand:SI 1 "register_operand")
11469 (match_operand:SI 2 "const_int_operand")]
11472 HOST_WIDE_INT val = INTVAL (operands[2]);
11475 || exact_log2 (val) <= 0)
11478 rtx mask = GEN_INT (val - 1);
11480 /* In the special case of x0 % 2 we can do the even shorter:
11483 rsblt r0, r0, #0. */
11487 rtx cc_reg = arm_gen_compare_reg (LT,
11488 operands[1], const0_rtx, NULL_RTX);
11489 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
11490 rtx masked = gen_reg_rtx (SImode);
11492 emit_insn (gen_andsi3 (masked, operands[1], mask));
11493 emit_move_insn (operands[0],
11494 gen_rtx_IF_THEN_ELSE (SImode, cond,
11495 gen_rtx_NEG (SImode,
11501 rtx neg_op = gen_reg_rtx (SImode);
11502 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11505 /* Extract the condition register and mode. */
11506 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11507 rtx cc_reg = SET_DEST (cmp);
11508 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11510 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11512 rtx masked_neg = gen_reg_rtx (SImode);
11513 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11515 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11516 during expand does not always work. Do an IF_THEN_ELSE instead. */
11517 emit_move_insn (operands[0],
11518 gen_rtx_IF_THEN_ELSE (SImode, cond,
11519 gen_rtx_NEG (SImode, masked_neg),
11527 (define_expand "bswapsi2"
11528 [(set (match_operand:SI 0 "s_register_operand")
11529 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11530 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11534 rtx op2 = gen_reg_rtx (SImode);
11535 rtx op3 = gen_reg_rtx (SImode);
11539 rtx op4 = gen_reg_rtx (SImode);
11540 rtx op5 = gen_reg_rtx (SImode);
11542 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11543 op2, op3, op4, op5));
11547 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11556 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11557 ;; and unsigned variants, respectively. For rev16, expose
11558 ;; byte-swapping in the lower 16 bits only.
11559 (define_insn "*arm_revsh"
11560 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11561 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11567 [(set_attr "arch" "t1,t2,32")
11568 (set_attr "length" "2,2,4")
11569 (set_attr "type" "rev")]
11572 (define_insn "*arm_rev16"
11573 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11574 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11580 [(set_attr "arch" "t1,t2,32")
11581 (set_attr "length" "2,2,4")
11582 (set_attr "type" "rev")]
11585 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11586 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11587 ;; each valid permutation.
11589 (define_insn "arm_rev16si2"
11590 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11591 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11593 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11594 (and:SI (lshiftrt:SI (match_dup 1)
11596 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11598 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11599 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11601 [(set_attr "arch" "t1,t2,32")
11602 (set_attr "length" "2,2,4")
11603 (set_attr "type" "rev")]
11606 (define_insn "arm_rev16si2_alt"
11607 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11608 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11610 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11611 (and:SI (ashift:SI (match_dup 1)
11613 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11615 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11616 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11618 [(set_attr "arch" "t1,t2,32")
11619 (set_attr "length" "2,2,4")
11620 (set_attr "type" "rev")]
11623 (define_expand "bswaphi2"
11624 [(set (match_operand:HI 0 "s_register_operand")
11625 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11630 ;; Patterns for LDRD/STRD in Thumb2 mode
11632 (define_insn "*thumb2_ldrd"
11633 [(set (match_operand:SI 0 "s_register_operand" "=r")
11634 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11635 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11636 (set (match_operand:SI 3 "s_register_operand" "=r")
11637 (mem:SI (plus:SI (match_dup 1)
11638 (match_operand:SI 4 "const_int_operand" ""))))]
11639 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11640 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11641 && (operands_ok_ldrd_strd (operands[0], operands[3],
11642 operands[1], INTVAL (operands[2]),
11644 "ldrd%?\t%0, %3, [%1, %2]"
11645 [(set_attr "type" "load_8")
11646 (set_attr "predicable" "yes")])
11648 (define_insn "*thumb2_ldrd_base"
11649 [(set (match_operand:SI 0 "s_register_operand" "=r")
11650 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11651 (set (match_operand:SI 2 "s_register_operand" "=r")
11652 (mem:SI (plus:SI (match_dup 1)
11654 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11655 && (operands_ok_ldrd_strd (operands[0], operands[2],
11656 operands[1], 0, false, true))"
11657 "ldrd%?\t%0, %2, [%1]"
11658 [(set_attr "type" "load_8")
11659 (set_attr "predicable" "yes")])
11661 (define_insn "*thumb2_ldrd_base_neg"
11662 [(set (match_operand:SI 0 "s_register_operand" "=r")
11663 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11665 (set (match_operand:SI 2 "s_register_operand" "=r")
11666 (mem:SI (match_dup 1)))]
11667 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11668 && (operands_ok_ldrd_strd (operands[0], operands[2],
11669 operands[1], -4, false, true))"
11670 "ldrd%?\t%0, %2, [%1, #-4]"
11671 [(set_attr "type" "load_8")
11672 (set_attr "predicable" "yes")])
11674 (define_insn "*thumb2_strd"
11675 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11676 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11677 (match_operand:SI 2 "s_register_operand" "r"))
11678 (set (mem:SI (plus:SI (match_dup 0)
11679 (match_operand:SI 3 "const_int_operand" "")))
11680 (match_operand:SI 4 "s_register_operand" "r"))]
11681 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11682 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11683 && (operands_ok_ldrd_strd (operands[2], operands[4],
11684 operands[0], INTVAL (operands[1]),
11686 "strd%?\t%2, %4, [%0, %1]"
11687 [(set_attr "type" "store_8")
11688 (set_attr "predicable" "yes")])
11690 (define_insn "*thumb2_strd_base"
11691 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11692 (match_operand:SI 1 "s_register_operand" "r"))
11693 (set (mem:SI (plus:SI (match_dup 0)
11695 (match_operand:SI 2 "s_register_operand" "r"))]
11696 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11697 && (operands_ok_ldrd_strd (operands[1], operands[2],
11698 operands[0], 0, false, false))"
11699 "strd%?\t%1, %2, [%0]"
11700 [(set_attr "type" "store_8")
11701 (set_attr "predicable" "yes")])
11703 (define_insn "*thumb2_strd_base_neg"
11704 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11706 (match_operand:SI 1 "s_register_operand" "r"))
11707 (set (mem:SI (match_dup 0))
11708 (match_operand:SI 2 "s_register_operand" "r"))]
11709 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11710 && (operands_ok_ldrd_strd (operands[1], operands[2],
11711 operands[0], -4, false, false))"
11712 "strd%?\t%1, %2, [%0, #-4]"
11713 [(set_attr "type" "store_8")
11714 (set_attr "predicable" "yes")])
11716 ;; ARMv8 CRC32 instructions.
11717 (define_insn "arm_<crc_variant>"
11718 [(set (match_operand:SI 0 "s_register_operand" "=r")
11719 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11720 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11723 "<crc_variant>\\t%0, %1, %2"
11724 [(set_attr "type" "crc")
11725 (set_attr "conds" "unconditional")]
11728 ;; Load the load/store double peephole optimizations.
11729 (include "ldrdstrd.md")
11731 ;; Load the load/store multiple patterns
11732 (include "ldmstm.md")
11734 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11735 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11736 ;; The operands are validated through the load_multiple_operation
11737 ;; match_parallel predicate rather than through constraints so enable it only
11739 (define_insn "*load_multiple"
11740 [(match_parallel 0 "load_multiple_operation"
11741 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11742 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11744 "TARGET_32BIT && reload_completed"
11747 arm_output_multireg_pop (operands, /*return_pc=*/false,
11748 /*cond=*/const_true_rtx,
11754 [(set_attr "predicable" "yes")]
11757 (define_expand "copysignsf3"
11758 [(match_operand:SF 0 "register_operand")
11759 (match_operand:SF 1 "register_operand")
11760 (match_operand:SF 2 "register_operand")]
11761 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11763 emit_move_insn (operands[0], operands[2]);
11764 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11765 GEN_INT (31), GEN_INT (0),
11766 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11771 (define_expand "copysigndf3"
11772 [(match_operand:DF 0 "register_operand")
11773 (match_operand:DF 1 "register_operand")
11774 (match_operand:DF 2 "register_operand")]
11775 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11777 rtx op0_low = gen_lowpart (SImode, operands[0]);
11778 rtx op0_high = gen_highpart (SImode, operands[0]);
11779 rtx op1_low = gen_lowpart (SImode, operands[1]);
11780 rtx op1_high = gen_highpart (SImode, operands[1]);
11781 rtx op2_high = gen_highpart (SImode, operands[2]);
11783 rtx scratch1 = gen_reg_rtx (SImode);
11784 rtx scratch2 = gen_reg_rtx (SImode);
11785 emit_move_insn (scratch1, op2_high);
11786 emit_move_insn (scratch2, op1_high);
11788 emit_insn(gen_rtx_SET(scratch1,
11789 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11790 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11791 emit_move_insn (op0_low, op1_low);
11792 emit_move_insn (op0_high, scratch2);
11798 ;; movmisalign patterns for HImode and SImode.
11799 (define_expand "movmisalign<mode>"
11800 [(match_operand:HSI 0 "general_operand")
11801 (match_operand:HSI 1 "general_operand")]
11804 /* This pattern is not permitted to fail during expansion: if both arguments
11805 are non-registers (e.g. memory := constant), force operand 1 into a
11807 rtx (* gen_unaligned_load)(rtx, rtx);
11808 rtx tmp_dest = operands[0];
11809 if (!s_register_operand (operands[0], <MODE>mode)
11810 && !s_register_operand (operands[1], <MODE>mode))
11811 operands[1] = force_reg (<MODE>mode, operands[1]);
11813 if (<MODE>mode == HImode)
11815 gen_unaligned_load = gen_unaligned_loadhiu;
11816 tmp_dest = gen_reg_rtx (SImode);
11819 gen_unaligned_load = gen_unaligned_loadsi;
11821 if (MEM_P (operands[1]))
11823 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11824 if (<MODE>mode == HImode)
11825 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11828 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11833 (define_insn "arm_<cdp>"
11834 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11835 (match_operand:SI 1 "immediate_operand" "n")
11836 (match_operand:SI 2 "immediate_operand" "n")
11837 (match_operand:SI 3 "immediate_operand" "n")
11838 (match_operand:SI 4 "immediate_operand" "n")
11839 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11840 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11842 arm_const_bounds (operands[0], 0, 16);
11843 arm_const_bounds (operands[1], 0, 16);
11844 arm_const_bounds (operands[2], 0, (1 << 5));
11845 arm_const_bounds (operands[3], 0, (1 << 5));
11846 arm_const_bounds (operands[4], 0, (1 << 5));
11847 arm_const_bounds (operands[5], 0, 8);
11848 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11850 [(set_attr "length" "4")
11851 (set_attr "type" "coproc")])
11853 (define_insn "*ldc"
11854 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11855 (match_operand:SI 1 "immediate_operand" "n")
11856 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11857 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11859 arm_const_bounds (operands[0], 0, 16);
11860 arm_const_bounds (operands[1], 0, (1 << 5));
11861 return "<ldc>\\tp%c0, CR%c1, %2";
11863 [(set_attr "length" "4")
11864 (set_attr "type" "coproc")])
11866 (define_insn "*stc"
11867 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11868 (match_operand:SI 1 "immediate_operand" "n")
11869 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11870 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11872 arm_const_bounds (operands[0], 0, 16);
11873 arm_const_bounds (operands[1], 0, (1 << 5));
11874 return "<stc>\\tp%c0, CR%c1, %2";
11876 [(set_attr "length" "4")
11877 (set_attr "type" "coproc")])
11879 (define_expand "arm_<ldc>"
11880 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11881 (match_operand:SI 1 "immediate_operand")
11882 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11883 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11885 (define_expand "arm_<stc>"
11886 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11887 (match_operand:SI 1 "immediate_operand")
11888 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11889 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11891 (define_insn "arm_<mcr>"
11892 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11893 (match_operand:SI 1 "immediate_operand" "n")
11894 (match_operand:SI 2 "s_register_operand" "r")
11895 (match_operand:SI 3 "immediate_operand" "n")
11896 (match_operand:SI 4 "immediate_operand" "n")
11897 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11898 (use (match_dup 2))]
11899 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11901 arm_const_bounds (operands[0], 0, 16);
11902 arm_const_bounds (operands[1], 0, 8);
11903 arm_const_bounds (operands[3], 0, (1 << 5));
11904 arm_const_bounds (operands[4], 0, (1 << 5));
11905 arm_const_bounds (operands[5], 0, 8);
11906 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11908 [(set_attr "length" "4")
11909 (set_attr "type" "coproc")])
11911 (define_insn "arm_<mrc>"
11912 [(set (match_operand:SI 0 "s_register_operand" "=r")
11913 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11914 (match_operand:SI 2 "immediate_operand" "n")
11915 (match_operand:SI 3 "immediate_operand" "n")
11916 (match_operand:SI 4 "immediate_operand" "n")
11917 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11918 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11920 arm_const_bounds (operands[1], 0, 16);
11921 arm_const_bounds (operands[2], 0, 8);
11922 arm_const_bounds (operands[3], 0, (1 << 5));
11923 arm_const_bounds (operands[4], 0, (1 << 5));
11924 arm_const_bounds (operands[5], 0, 8);
11925 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11927 [(set_attr "length" "4")
11928 (set_attr "type" "coproc")])
11930 (define_insn "arm_<mcrr>"
11931 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11932 (match_operand:SI 1 "immediate_operand" "n")
11933 (match_operand:DI 2 "s_register_operand" "r")
11934 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11935 (use (match_dup 2))]
11936 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11938 arm_const_bounds (operands[0], 0, 16);
11939 arm_const_bounds (operands[1], 0, 8);
11940 arm_const_bounds (operands[3], 0, (1 << 5));
11941 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11943 [(set_attr "length" "4")
11944 (set_attr "type" "coproc")])
11946 (define_insn "arm_<mrrc>"
11947 [(set (match_operand:DI 0 "s_register_operand" "=r")
11948 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11949 (match_operand:SI 2 "immediate_operand" "n")
11950 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11951 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11953 arm_const_bounds (operands[1], 0, 16);
11954 arm_const_bounds (operands[2], 0, 8);
11955 arm_const_bounds (operands[3], 0, (1 << 5));
11956 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11958 [(set_attr "length" "4")
11959 (set_attr "type" "coproc")])
11961 (define_expand "speculation_barrier"
11962 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11965 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11966 have a usable barrier (and probably don't need one in practice).
11967 But to be safe if such code is run on later architectures, call a
11968 helper function in libgcc that will do the thing for the active
11970 if (!(arm_arch7 || arm_arch8))
11972 arm_emit_speculation_barrier_function ();
11978 ;; Generate a hard speculation barrier when we have not enabled speculation
11980 (define_insn "*speculation_barrier_insn"
11981 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11982 "arm_arch7 || arm_arch8"
11984 [(set_attr "type" "block")
11985 (set_attr "length" "8")]
11988 ;; Vector bits common to IWMMXT and Neon
11989 (include "vec-common.md")
11990 ;; Load the Intel Wireless Multimedia Extension patterns
11991 (include "iwmmxt.md")
11992 ;; Load the VFP co-processor patterns
11994 ;; Thumb-1 patterns
11995 (include "thumb1.md")
11996 ;; Thumb-2 patterns
11997 (include "thumb2.md")
11999 (include "neon.md")
12001 (include "crypto.md")
12002 ;; Synchronization Primitives
12003 (include "sync.md")
12004 ;; Fixed-point patterns
12005 (include "arm-fixed.md")