]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
[arm] Improve handling of DImode comparisions against constants.
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51 ;; conditional compare combination
52 (define_constants
53 [(CMP_CMP 0)
54 (CMN_CMP 1)
55 (CMP_CMN 2)
56 (CMN_CMN 3)
57 (NUM_OF_COND_CMP 4)
58 ]
59 )
60
61 \f
62 ;;---------------------------------------------------------------------------
63 ;; Attributes
64
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
67
68 ;; Instruction classification types
69 (include "types.md")
70
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
77
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
85
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
101
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
104 ;; registers.
105 (define_attr "fp" "no,yes" (const_string "no"))
106
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
112
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
117
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
120 (const_int 4))
121
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
131
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
134 (const_string "yes")
135
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
138 (const_string "yes")
139
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
142 (const_string "yes")
143
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
182 (const_string "yes")
183 ]
184
185 (const_string "no")))
186
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
189
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
192 (const_string "yes")
193
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
196 (const_string "yes")
197
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
202
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
208
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
225 (const_string "no")
226
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
229 (const_string "no")
230
231 (eq_attr "arch_enabled" "no")
232 (const_string "no")]
233 (const_string "yes")))
234
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
254
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
262
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
270 ;
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
273 ; inlined branches
274 ;
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
277 ;
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
280 ;
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
283 ;
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
286
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (if_then_else
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
295
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
301
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
312 "block,call,load_4")
313 (const_string "yes")
314 (const_string "no")))
315
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
338
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
342
343
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
347
348 ;;---------------------------------------------------------------------------
349 ;; Unspecs
350
351 (include "unspecs.md")
352
353 ;;---------------------------------------------------------------------------
354 ;; Mode iterators
355
356 (include "iterators.md")
357
358 ;;---------------------------------------------------------------------------
359 ;; Predicates
360
361 (include "predicates.md")
362 (include "constraints.md")
363
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
366
367 (define_attr "tune_cortexr4" "yes,no"
368 (const (if_then_else
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "yes")
371 (const_string "no"))))
372
373 ;; True if the generic scheduling description should be used.
374
375 (define_attr "generic_sched" "yes,no"
376 (const (if_then_else
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "no")
385 (const_string "yes"))))
386
387 (define_attr "generic_vfp" "yes,no"
388 (const (if_then_else
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
394 (const_string "yes")
395 (const_string "no"))))
396
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
403 (include "fa526.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
422 (include "vfp11.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
425
426 \f
427 ;;---------------------------------------------------------------------------
428 ;; Insn patterns
429 ;;
430 ;; Addition insns.
431
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
435
436 (define_expand "adddi3"
437 [(parallel
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
442 "TARGET_EITHER"
443 "
444 if (TARGET_THUMB1)
445 {
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
448 }
449 else
450 {
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
454 &lo_op2, &hi_op2);
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
457
458 if (lo_op2 == const0_rtx)
459 {
460 lo_dest = lo_op1;
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
465 }
466 else
467 {
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
472
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
475 const0_rtx);
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
478 else
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
480 }
481
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
486 DONE;
487 }
488 "
489 )
490
491 (define_expand "addv<mode>4"
492 [(match_operand:SIDI 0 "register_operand")
493 (match_operand:SIDI 1 "register_operand")
494 (match_operand:SIDI 2 "register_operand")
495 (match_operand 3 "")]
496 "TARGET_32BIT"
497 {
498 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
499 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
500
501 DONE;
502 })
503
504 (define_expand "uaddv<mode>4"
505 [(match_operand:SIDI 0 "register_operand")
506 (match_operand:SIDI 1 "register_operand")
507 (match_operand:SIDI 2 "register_operand")
508 (match_operand 3 "")]
509 "TARGET_32BIT"
510 {
511 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
512 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
513
514 DONE;
515 })
516
517 (define_expand "addsi3"
518 [(set (match_operand:SI 0 "s_register_operand")
519 (plus:SI (match_operand:SI 1 "s_register_operand")
520 (match_operand:SI 2 "reg_or_int_operand")))]
521 "TARGET_EITHER"
522 "
523 if (TARGET_32BIT && CONST_INT_P (operands[2]))
524 {
525 arm_split_constant (PLUS, SImode, NULL_RTX,
526 INTVAL (operands[2]), operands[0], operands[1],
527 optimize && can_create_pseudo_p ());
528 DONE;
529 }
530 "
531 )
532
533 ; If there is a scratch available, this will be faster than synthesizing the
534 ; addition.
535 (define_peephole2
536 [(match_scratch:SI 3 "r")
537 (set (match_operand:SI 0 "arm_general_register_operand" "")
538 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
539 (match_operand:SI 2 "const_int_operand" "")))]
540 "TARGET_32BIT &&
541 !(const_ok_for_arm (INTVAL (operands[2]))
542 || const_ok_for_arm (-INTVAL (operands[2])))
543 && const_ok_for_arm (~INTVAL (operands[2]))"
544 [(set (match_dup 3) (match_dup 2))
545 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
546 ""
547 )
548
549 ;; The r/r/k alternative is required when reloading the address
550 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
551 ;; put the duplicated register first, and not try the commutative version.
552 (define_insn_and_split "*arm_addsi3"
553 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
554 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
555 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
556 "TARGET_32BIT"
557 "@
558 add%?\\t%0, %0, %2
559 add%?\\t%0, %1, %2
560 add%?\\t%0, %1, %2
561 add%?\\t%0, %1, %2
562 add%?\\t%0, %1, %2
563 add%?\\t%0, %1, %2
564 add%?\\t%0, %2, %1
565 add%?\\t%0, %1, %2
566 addw%?\\t%0, %1, %2
567 addw%?\\t%0, %1, %2
568 sub%?\\t%0, %1, #%n2
569 sub%?\\t%0, %1, #%n2
570 sub%?\\t%0, %1, #%n2
571 subw%?\\t%0, %1, #%n2
572 subw%?\\t%0, %1, #%n2
573 #"
574 "TARGET_32BIT
575 && CONST_INT_P (operands[2])
576 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
577 && (reload_completed || !arm_eliminable_register (operands[1]))"
578 [(clobber (const_int 0))]
579 "
580 arm_split_constant (PLUS, SImode, curr_insn,
581 INTVAL (operands[2]), operands[0],
582 operands[1], 0);
583 DONE;
584 "
585 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
586 (set_attr "predicable" "yes")
587 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
588 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
589 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
590 (const_string "alu_imm")
591 (const_string "alu_sreg")))
592 ]
593 )
594
595 (define_insn "adddi3_compareV"
596 [(set (reg:CC_V CC_REGNUM)
597 (ne:CC_V
598 (plus:TI
599 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
600 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
601 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
602 (set (match_operand:DI 0 "s_register_operand" "=&r")
603 (plus:DI (match_dup 1) (match_dup 2)))]
604 "TARGET_32BIT"
605 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
606 [(set_attr "conds" "set")
607 (set_attr "length" "8")
608 (set_attr "type" "multiple")]
609 )
610
611 (define_insn "addsi3_compareV"
612 [(set (reg:CC_V CC_REGNUM)
613 (ne:CC_V
614 (plus:DI
615 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
616 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
617 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
618 (set (match_operand:SI 0 "register_operand" "=r")
619 (plus:SI (match_dup 1) (match_dup 2)))]
620 "TARGET_32BIT"
621 "adds%?\\t%0, %1, %2"
622 [(set_attr "conds" "set")
623 (set_attr "type" "alus_sreg")]
624 )
625
626 (define_insn "adddi3_compareC"
627 [(set (reg:CC_C CC_REGNUM)
628 (compare:CC_C
629 (plus:DI
630 (match_operand:DI 1 "register_operand" "r")
631 (match_operand:DI 2 "register_operand" "r"))
632 (match_dup 1)))
633 (set (match_operand:DI 0 "register_operand" "=&r")
634 (plus:DI (match_dup 1) (match_dup 2)))]
635 "TARGET_32BIT"
636 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
637 [(set_attr "conds" "set")
638 (set_attr "length" "8")
639 (set_attr "type" "multiple")]
640 )
641
642 (define_insn "addsi3_compareC"
643 [(set (reg:CC_C CC_REGNUM)
644 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
645 (match_operand:SI 2 "register_operand" "r"))
646 (match_dup 1)))
647 (set (match_operand:SI 0 "register_operand" "=r")
648 (plus:SI (match_dup 1) (match_dup 2)))]
649 "TARGET_32BIT"
650 "adds%?\\t%0, %1, %2"
651 [(set_attr "conds" "set")
652 (set_attr "type" "alus_sreg")]
653 )
654
655 (define_insn "addsi3_compare0"
656 [(set (reg:CC_NOOV CC_REGNUM)
657 (compare:CC_NOOV
658 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
659 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
660 (const_int 0)))
661 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
662 (plus:SI (match_dup 1) (match_dup 2)))]
663 "TARGET_ARM"
664 "@
665 adds%?\\t%0, %1, %2
666 subs%?\\t%0, %1, #%n2
667 adds%?\\t%0, %1, %2"
668 [(set_attr "conds" "set")
669 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
670 )
671
672 (define_insn "*addsi3_compare0_scratch"
673 [(set (reg:CC_NOOV CC_REGNUM)
674 (compare:CC_NOOV
675 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
676 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
677 (const_int 0)))]
678 "TARGET_ARM"
679 "@
680 cmn%?\\t%0, %1
681 cmp%?\\t%0, #%n1
682 cmn%?\\t%0, %1"
683 [(set_attr "conds" "set")
684 (set_attr "predicable" "yes")
685 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
686 )
687
688 (define_insn "*compare_negsi_si"
689 [(set (reg:CC_Z CC_REGNUM)
690 (compare:CC_Z
691 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
692 (match_operand:SI 1 "s_register_operand" "l,r")))]
693 "TARGET_32BIT"
694 "cmn%?\\t%1, %0"
695 [(set_attr "conds" "set")
696 (set_attr "predicable" "yes")
697 (set_attr "arch" "t2,*")
698 (set_attr "length" "2,4")
699 (set_attr "predicable_short_it" "yes,no")
700 (set_attr "type" "alus_sreg")]
701 )
702
703 ;; This is the canonicalization of subsi3_compare when the
704 ;; addend is a constant.
705 (define_insn "cmpsi2_addneg"
706 [(set (reg:CC CC_REGNUM)
707 (compare:CC
708 (match_operand:SI 1 "s_register_operand" "r,r")
709 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
710 (set (match_operand:SI 0 "s_register_operand" "=r,r")
711 (plus:SI (match_dup 1)
712 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
713 "TARGET_32BIT
714 && (INTVAL (operands[2])
715 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
716 {
717 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
718 in different condition codes (like cmn rather than like cmp), so that
719 alternative comes first. Both alternatives can match for any 0x??000000
720 where except for 0 and INT_MIN it doesn't matter what we choose, and also
721 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
722 as it is shorter. */
723 if (which_alternative == 0 && operands[3] != const1_rtx)
724 return "subs%?\\t%0, %1, #%n3";
725 else
726 return "adds%?\\t%0, %1, %3";
727 }
728 [(set_attr "conds" "set")
729 (set_attr "type" "alus_sreg")]
730 )
731
732 ;; Convert the sequence
733 ;; sub rd, rn, #1
734 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
735 ;; bne dest
736 ;; into
737 ;; subs rd, rn, #1
738 ;; bcs dest ((unsigned)rn >= 1)
739 ;; similarly for the beq variant using bcc.
740 ;; This is a common looping idiom (while (n--))
741 (define_peephole2
742 [(set (match_operand:SI 0 "arm_general_register_operand" "")
743 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
744 (const_int -1)))
745 (set (match_operand 2 "cc_register" "")
746 (compare (match_dup 0) (const_int -1)))
747 (set (pc)
748 (if_then_else (match_operator 3 "equality_operator"
749 [(match_dup 2) (const_int 0)])
750 (match_operand 4 "" "")
751 (match_operand 5 "" "")))]
752 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
753 [(parallel[
754 (set (match_dup 2)
755 (compare:CC
756 (match_dup 1) (const_int 1)))
757 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
758 (set (pc)
759 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
760 (match_dup 4)
761 (match_dup 5)))]
762 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
763 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
764 ? GEU : LTU),
765 VOIDmode,
766 operands[2], const0_rtx);"
767 )
768
769 ;; The next four insns work because they compare the result with one of
770 ;; the operands, and we know that the use of the condition code is
771 ;; either GEU or LTU, so we can use the carry flag from the addition
772 ;; instead of doing the compare a second time.
773 (define_insn "*addsi3_compare_op1"
774 [(set (reg:CC_C CC_REGNUM)
775 (compare:CC_C
776 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
777 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
778 (match_dup 1)))
779 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
780 (plus:SI (match_dup 1) (match_dup 2)))]
781 "TARGET_32BIT"
782 "@
783 adds%?\\t%0, %1, %2
784 adds%?\\t%0, %0, %2
785 subs%?\\t%0, %1, #%n2
786 subs%?\\t%0, %0, #%n2
787 adds%?\\t%0, %1, %2
788 subs%?\\t%0, %1, #%n2
789 adds%?\\t%0, %1, %2"
790 [(set_attr "conds" "set")
791 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
792 (set_attr "length" "2,2,2,2,4,4,4")
793 (set_attr "type"
794 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
795 )
796
797 (define_insn "*addsi3_compare_op2"
798 [(set (reg:CC_C CC_REGNUM)
799 (compare:CC_C
800 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
801 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
802 (match_dup 2)))
803 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
804 (plus:SI (match_dup 1) (match_dup 2)))]
805 "TARGET_32BIT"
806 "@
807 adds%?\\t%0, %1, %2
808 adds%?\\t%0, %0, %2
809 subs%?\\t%0, %1, #%n2
810 subs%?\\t%0, %0, #%n2
811 adds%?\\t%0, %1, %2
812 subs%?\\t%0, %1, #%n2
813 adds%?\\t%0, %1, %2"
814 [(set_attr "conds" "set")
815 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
816 (set_attr "length" "2,2,2,2,4,4,4")
817 (set_attr "type"
818 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
819 )
820
821 (define_insn "*compare_addsi2_op0"
822 [(set (reg:CC_C CC_REGNUM)
823 (compare:CC_C
824 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
825 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
826 (match_dup 0)))]
827 "TARGET_32BIT"
828 "@
829 cmp%?\\t%0, #%n1
830 cmn%?\\t%0, %1
831 cmn%?\\t%0, %1
832 cmp%?\\t%0, #%n1
833 cmn%?\\t%0, %1"
834 [(set_attr "conds" "set")
835 (set_attr "predicable" "yes")
836 (set_attr "arch" "t2,t2,*,*,*")
837 (set_attr "predicable_short_it" "yes,yes,no,no,no")
838 (set_attr "length" "2,2,4,4,4")
839 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
840 )
841
842 (define_insn "*compare_addsi2_op1"
843 [(set (reg:CC_C CC_REGNUM)
844 (compare:CC_C
845 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
846 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
847 (match_dup 1)))]
848 "TARGET_32BIT"
849 "@
850 cmp%?\\t%0, #%n1
851 cmn%?\\t%0, %1
852 cmn%?\\t%0, %1
853 cmp%?\\t%0, #%n1
854 cmn%?\\t%0, %1"
855 [(set_attr "conds" "set")
856 (set_attr "predicable" "yes")
857 (set_attr "arch" "t2,t2,*,*,*")
858 (set_attr "predicable_short_it" "yes,yes,no,no,no")
859 (set_attr "length" "2,2,4,4,4")
860 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
861 )
862
863 (define_insn "addsi3_carryin"
864 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
865 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
866 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
867 (match_operand:SI 3 "arm_carry_operation" "")))]
868 "TARGET_32BIT"
869 "@
870 adc%?\\t%0, %1, %2
871 adc%?\\t%0, %1, %2
872 sbc%?\\t%0, %1, #%B2"
873 [(set_attr "conds" "use")
874 (set_attr "predicable" "yes")
875 (set_attr "arch" "t2,*,*")
876 (set_attr "length" "4")
877 (set_attr "predicable_short_it" "yes,no,no")
878 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
879 )
880
881 ;; Canonicalization of the above when the immediate is zero.
882 (define_insn "add0si3_carryin"
883 [(set (match_operand:SI 0 "s_register_operand" "=r")
884 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
885 (match_operand:SI 1 "arm_not_operand" "r")))]
886 "TARGET_32BIT"
887 "adc%?\\t%0, %1, #0"
888 [(set_attr "conds" "use")
889 (set_attr "predicable" "yes")
890 (set_attr "length" "4")
891 (set_attr "type" "adc_imm")]
892 )
893
894 (define_insn "*addsi3_carryin_alt2"
895 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
896 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
897 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
898 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
899 "TARGET_32BIT"
900 "@
901 adc%?\\t%0, %1, %2
902 adc%?\\t%0, %1, %2
903 sbc%?\\t%0, %1, #%B2"
904 [(set_attr "conds" "use")
905 (set_attr "predicable" "yes")
906 (set_attr "arch" "t2,*,*")
907 (set_attr "length" "4")
908 (set_attr "predicable_short_it" "yes,no,no")
909 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
910 )
911
912 (define_insn "*addsi3_carryin_shift"
913 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
914 (plus:SI (plus:SI
915 (match_operator:SI 2 "shift_operator"
916 [(match_operand:SI 3 "s_register_operand" "r,r")
917 (match_operand:SI 4 "shift_amount_operand" "M,r")])
918 (match_operand:SI 5 "arm_carry_operation" ""))
919 (match_operand:SI 1 "s_register_operand" "r,r")))]
920 "TARGET_32BIT"
921 "adc%?\\t%0, %1, %3%S2"
922 [(set_attr "conds" "use")
923 (set_attr "arch" "32,a")
924 (set_attr "shift" "3")
925 (set_attr "predicable" "yes")
926 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
927 (const_string "alu_shift_imm")
928 (const_string "alu_shift_reg")))]
929 )
930
931 (define_insn "*addsi3_carryin_clobercc"
932 [(set (match_operand:SI 0 "s_register_operand" "=r")
933 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
934 (match_operand:SI 2 "arm_rhs_operand" "rI"))
935 (match_operand:SI 3 "arm_carry_operation" "")))
936 (clobber (reg:CC CC_REGNUM))]
937 "TARGET_32BIT"
938 "adcs%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "type" "adcs_reg")]
941 )
942
943 (define_expand "subv<mode>4"
944 [(match_operand:SIDI 0 "register_operand")
945 (match_operand:SIDI 1 "register_operand")
946 (match_operand:SIDI 2 "register_operand")
947 (match_operand 3 "")]
948 "TARGET_32BIT"
949 {
950 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
951 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
952
953 DONE;
954 })
955
956 (define_expand "usubv<mode>4"
957 [(match_operand:SIDI 0 "register_operand")
958 (match_operand:SIDI 1 "register_operand")
959 (match_operand:SIDI 2 "register_operand")
960 (match_operand 3 "")]
961 "TARGET_32BIT"
962 {
963 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
964 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
965
966 DONE;
967 })
968
969 (define_insn "subdi3_compare1"
970 [(set (reg:CC CC_REGNUM)
971 (compare:CC
972 (match_operand:DI 1 "s_register_operand" "r")
973 (match_operand:DI 2 "s_register_operand" "r")))
974 (set (match_operand:DI 0 "s_register_operand" "=&r")
975 (minus:DI (match_dup 1) (match_dup 2)))]
976 "TARGET_32BIT"
977 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
978 [(set_attr "conds" "set")
979 (set_attr "length" "8")
980 (set_attr "type" "multiple")]
981 )
982
983 (define_insn "subsi3_compare1"
984 [(set (reg:CC CC_REGNUM)
985 (compare:CC
986 (match_operand:SI 1 "register_operand" "r")
987 (match_operand:SI 2 "register_operand" "r")))
988 (set (match_operand:SI 0 "register_operand" "=r")
989 (minus:SI (match_dup 1) (match_dup 2)))]
990 "TARGET_32BIT"
991 "subs%?\\t%0, %1, %2"
992 [(set_attr "conds" "set")
993 (set_attr "type" "alus_sreg")]
994 )
995
996 (define_insn "subsi3_carryin"
997 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
998 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
999 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1000 (match_operand:SI 3 "arm_borrow_operation" "")))]
1001 "TARGET_32BIT"
1002 "@
1003 sbc%?\\t%0, %1, %2
1004 rsc%?\\t%0, %2, %1
1005 sbc%?\\t%0, %2, %2, lsl #1"
1006 [(set_attr "conds" "use")
1007 (set_attr "arch" "*,a,t2")
1008 (set_attr "predicable" "yes")
1009 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1010 )
1011
1012 (define_insn "*subsi3_carryin_const"
1013 [(set (match_operand:SI 0 "s_register_operand" "=r")
1014 (minus:SI (plus:SI
1015 (match_operand:SI 1 "s_register_operand" "r")
1016 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1017 (match_operand:SI 3 "arm_borrow_operation" "")))]
1018 "TARGET_32BIT"
1019 "sbc\\t%0, %1, #%n2"
1020 [(set_attr "conds" "use")
1021 (set_attr "type" "adc_imm")]
1022 )
1023
1024 (define_insn "*subsi3_carryin_const0"
1025 [(set (match_operand:SI 0 "s_register_operand" "=r")
1026 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1027 (match_operand:SI 2 "arm_borrow_operation" "")))]
1028 "TARGET_32BIT"
1029 "sbc\\t%0, %1, #0"
1030 [(set_attr "conds" "use")
1031 (set_attr "type" "adc_imm")]
1032 )
1033
1034 (define_insn "*subsi3_carryin_shift"
1035 [(set (match_operand:SI 0 "s_register_operand" "=r")
1036 (minus:SI (minus:SI
1037 (match_operand:SI 1 "s_register_operand" "r")
1038 (match_operator:SI 2 "shift_operator"
1039 [(match_operand:SI 3 "s_register_operand" "r")
1040 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1041 (match_operand:SI 5 "arm_borrow_operation" "")))]
1042 "TARGET_32BIT"
1043 "sbc%?\\t%0, %1, %3%S2"
1044 [(set_attr "conds" "use")
1045 (set_attr "predicable" "yes")
1046 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1047 (const_string "alu_shift_imm")
1048 (const_string "alu_shift_reg")))]
1049 )
1050
1051 (define_insn "*subsi3_carryin_shift_alt"
1052 [(set (match_operand:SI 0 "s_register_operand" "=r")
1053 (minus:SI (minus:SI
1054 (match_operand:SI 1 "s_register_operand" "r")
1055 (match_operand:SI 5 "arm_borrow_operation" ""))
1056 (match_operator:SI 2 "shift_operator"
1057 [(match_operand:SI 3 "s_register_operand" "r")
1058 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1059 "TARGET_32BIT"
1060 "sbc%?\\t%0, %1, %3%S2"
1061 [(set_attr "conds" "use")
1062 (set_attr "predicable" "yes")
1063 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1064 (const_string "alu_shift_imm")
1065 (const_string "alu_shift_reg")))]
1066 )
1067
1068 (define_insn "*rsbsi3_carryin_shift"
1069 [(set (match_operand:SI 0 "s_register_operand" "=r")
1070 (minus:SI (minus:SI
1071 (match_operator:SI 2 "shift_operator"
1072 [(match_operand:SI 3 "s_register_operand" "r")
1073 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1074 (match_operand:SI 1 "s_register_operand" "r"))
1075 (match_operand:SI 5 "arm_borrow_operation" "")))]
1076 "TARGET_ARM"
1077 "rsc%?\\t%0, %1, %3%S2"
1078 [(set_attr "conds" "use")
1079 (set_attr "predicable" "yes")
1080 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1081 (const_string "alu_shift_imm")
1082 (const_string "alu_shift_reg")))]
1083 )
1084
1085 (define_insn "*rsbsi3_carryin_shift_alt"
1086 [(set (match_operand:SI 0 "s_register_operand" "=r")
1087 (minus:SI (minus:SI
1088 (match_operator:SI 2 "shift_operator"
1089 [(match_operand:SI 3 "s_register_operand" "r")
1090 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1091 (match_operand:SI 5 "arm_borrow_operation" ""))
1092 (match_operand:SI 1 "s_register_operand" "r")))]
1093 "TARGET_ARM"
1094 "rsc%?\\t%0, %1, %3%S2"
1095 [(set_attr "conds" "use")
1096 (set_attr "predicable" "yes")
1097 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1098 (const_string "alu_shift_imm")
1099 (const_string "alu_shift_reg")))]
1100 )
1101
1102 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1103 (define_split
1104 [(set (match_operand:SI 0 "s_register_operand" "")
1105 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1106 (match_operand:SI 2 "s_register_operand" ""))
1107 (const_int -1)))
1108 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1109 "TARGET_32BIT"
1110 [(set (match_dup 3) (match_dup 1))
1111 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1112 "
1113 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1114 ")
1115
1116 (define_expand "addsf3"
1117 [(set (match_operand:SF 0 "s_register_operand")
1118 (plus:SF (match_operand:SF 1 "s_register_operand")
1119 (match_operand:SF 2 "s_register_operand")))]
1120 "TARGET_32BIT && TARGET_HARD_FLOAT"
1121 "
1122 ")
1123
1124 (define_expand "adddf3"
1125 [(set (match_operand:DF 0 "s_register_operand")
1126 (plus:DF (match_operand:DF 1 "s_register_operand")
1127 (match_operand:DF 2 "s_register_operand")))]
1128 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1129 "
1130 ")
1131
1132 (define_expand "subdi3"
1133 [(parallel
1134 [(set (match_operand:DI 0 "s_register_operand")
1135 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1136 (match_operand:DI 2 "s_register_operand")))
1137 (clobber (reg:CC CC_REGNUM))])]
1138 "TARGET_EITHER"
1139 "
1140 if (TARGET_THUMB1)
1141 {
1142 if (!REG_P (operands[1]))
1143 operands[1] = force_reg (DImode, operands[1]);
1144 }
1145 else
1146 {
1147 rtx lo_result, hi_result, lo_dest, hi_dest;
1148 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1149 rtx condition;
1150
1151 /* Since operands[1] may be an integer, pass it second, so that
1152 any necessary simplifications will be done on the decomposed
1153 constant. */
1154 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1155 &lo_op1, &hi_op1);
1156 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1157 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1158
1159 if (!arm_rhs_operand (lo_op1, SImode))
1160 lo_op1 = force_reg (SImode, lo_op1);
1161
1162 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1163 || !arm_rhs_operand (hi_op1, SImode))
1164 hi_op1 = force_reg (SImode, hi_op1);
1165
1166 rtx cc_reg;
1167 if (lo_op1 == const0_rtx)
1168 {
1169 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1170 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1171 }
1172 else if (CONST_INT_P (lo_op1))
1173 {
1174 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1175 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1176 GEN_INT (~UINTVAL (lo_op1))));
1177 }
1178 else
1179 {
1180 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1181 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1182 }
1183
1184 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1185
1186 if (hi_op1 == const0_rtx)
1187 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1188 else
1189 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1190
1191 if (lo_result != lo_dest)
1192 emit_move_insn (lo_result, lo_dest);
1193
1194 if (hi_result != hi_dest)
1195 emit_move_insn (hi_result, hi_dest);
1196
1197 DONE;
1198 }
1199 "
1200 )
1201
1202 (define_expand "subsi3"
1203 [(set (match_operand:SI 0 "s_register_operand")
1204 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1205 (match_operand:SI 2 "s_register_operand")))]
1206 "TARGET_EITHER"
1207 "
1208 if (CONST_INT_P (operands[1]))
1209 {
1210 if (TARGET_32BIT)
1211 {
1212 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1213 operands[1] = force_reg (SImode, operands[1]);
1214 else
1215 {
1216 arm_split_constant (MINUS, SImode, NULL_RTX,
1217 INTVAL (operands[1]), operands[0],
1218 operands[2],
1219 optimize && can_create_pseudo_p ());
1220 DONE;
1221 }
1222 }
1223 else /* TARGET_THUMB1 */
1224 operands[1] = force_reg (SImode, operands[1]);
1225 }
1226 "
1227 )
1228
1229 ; ??? Check Thumb-2 split length
1230 (define_insn_and_split "*arm_subsi3_insn"
1231 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1232 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1233 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1234 "TARGET_32BIT"
1235 "@
1236 sub%?\\t%0, %1, %2
1237 sub%?\\t%0, %2
1238 sub%?\\t%0, %1, %2
1239 rsb%?\\t%0, %2, %1
1240 rsb%?\\t%0, %2, %1
1241 sub%?\\t%0, %1, %2
1242 sub%?\\t%0, %1, %2
1243 sub%?\\t%0, %1, %2
1244 #"
1245 "&& (CONST_INT_P (operands[1])
1246 && !const_ok_for_arm (INTVAL (operands[1])))"
1247 [(clobber (const_int 0))]
1248 "
1249 arm_split_constant (MINUS, SImode, curr_insn,
1250 INTVAL (operands[1]), operands[0], operands[2], 0);
1251 DONE;
1252 "
1253 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1254 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1255 (set_attr "predicable" "yes")
1256 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1257 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1258 )
1259
1260 (define_peephole2
1261 [(match_scratch:SI 3 "r")
1262 (set (match_operand:SI 0 "arm_general_register_operand" "")
1263 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1264 (match_operand:SI 2 "arm_general_register_operand" "")))]
1265 "TARGET_32BIT
1266 && !const_ok_for_arm (INTVAL (operands[1]))
1267 && const_ok_for_arm (~INTVAL (operands[1]))"
1268 [(set (match_dup 3) (match_dup 1))
1269 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1270 ""
1271 )
1272
1273 (define_insn "subsi3_compare0"
1274 [(set (reg:CC_NOOV CC_REGNUM)
1275 (compare:CC_NOOV
1276 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1277 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1278 (const_int 0)))
1279 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1280 (minus:SI (match_dup 1) (match_dup 2)))]
1281 "TARGET_32BIT"
1282 "@
1283 subs%?\\t%0, %1, %2
1284 subs%?\\t%0, %1, %2
1285 rsbs%?\\t%0, %2, %1"
1286 [(set_attr "conds" "set")
1287 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1288 )
1289
1290 (define_insn "subsi3_compare"
1291 [(set (reg:CC CC_REGNUM)
1292 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1293 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1294 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1295 (minus:SI (match_dup 1) (match_dup 2)))]
1296 "TARGET_32BIT"
1297 "@
1298 subs%?\\t%0, %1, %2
1299 subs%?\\t%0, %1, %2
1300 rsbs%?\\t%0, %2, %1"
1301 [(set_attr "conds" "set")
1302 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1303 )
1304
1305 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1306 ;; rather than (0 cmp reg). This gives the same results for unsigned
1307 ;; and equality compares which is what we mostly need here.
1308 (define_insn "rsb_imm_compare"
1309 [(set (reg:CC_RSB CC_REGNUM)
1310 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1311 (match_operand 3 "const_int_operand" "")))
1312 (set (match_operand:SI 0 "s_register_operand" "=r")
1313 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1314 (match_dup 2)))]
1315 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1316 "rsbs\\t%0, %2, %1"
1317 [(set_attr "conds" "set")
1318 (set_attr "type" "alus_imm")]
1319 )
1320
1321 (define_expand "subsf3"
1322 [(set (match_operand:SF 0 "s_register_operand")
1323 (minus:SF (match_operand:SF 1 "s_register_operand")
1324 (match_operand:SF 2 "s_register_operand")))]
1325 "TARGET_32BIT && TARGET_HARD_FLOAT"
1326 "
1327 ")
1328
1329 (define_expand "subdf3"
1330 [(set (match_operand:DF 0 "s_register_operand")
1331 (minus:DF (match_operand:DF 1 "s_register_operand")
1332 (match_operand:DF 2 "s_register_operand")))]
1333 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1334 "
1335 ")
1336
1337 \f
1338 ;; Multiplication insns
1339
1340 (define_expand "mulhi3"
1341 [(set (match_operand:HI 0 "s_register_operand")
1342 (mult:HI (match_operand:HI 1 "s_register_operand")
1343 (match_operand:HI 2 "s_register_operand")))]
1344 "TARGET_DSP_MULTIPLY"
1345 "
1346 {
1347 rtx result = gen_reg_rtx (SImode);
1348 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1349 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1350 DONE;
1351 }"
1352 )
1353
1354 (define_expand "mulsi3"
1355 [(set (match_operand:SI 0 "s_register_operand")
1356 (mult:SI (match_operand:SI 2 "s_register_operand")
1357 (match_operand:SI 1 "s_register_operand")))]
1358 "TARGET_EITHER"
1359 ""
1360 )
1361
1362 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1363 (define_insn "*mul"
1364 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1365 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1366 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1367 "TARGET_32BIT"
1368 "mul%?\\t%0, %2, %1"
1369 [(set_attr "type" "mul")
1370 (set_attr "predicable" "yes")
1371 (set_attr "arch" "t2,v6,nov6,nov6")
1372 (set_attr "length" "4")
1373 (set_attr "predicable_short_it" "yes,no,*,*")]
1374 )
1375
1376 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1377 ;; reusing the same register.
1378
1379 (define_insn "*mla"
1380 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1381 (plus:SI
1382 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1383 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1384 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1385 "TARGET_32BIT"
1386 "mla%?\\t%0, %3, %2, %1"
1387 [(set_attr "type" "mla")
1388 (set_attr "predicable" "yes")
1389 (set_attr "arch" "v6,nov6,nov6,nov6")]
1390 )
1391
1392 (define_insn "*mls"
1393 [(set (match_operand:SI 0 "s_register_operand" "=r")
1394 (minus:SI
1395 (match_operand:SI 1 "s_register_operand" "r")
1396 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1397 (match_operand:SI 2 "s_register_operand" "r"))))]
1398 "TARGET_32BIT && arm_arch_thumb2"
1399 "mls%?\\t%0, %3, %2, %1"
1400 [(set_attr "type" "mla")
1401 (set_attr "predicable" "yes")]
1402 )
1403
1404 (define_insn "*mulsi3_compare0"
1405 [(set (reg:CC_NOOV CC_REGNUM)
1406 (compare:CC_NOOV (mult:SI
1407 (match_operand:SI 2 "s_register_operand" "r,r")
1408 (match_operand:SI 1 "s_register_operand" "%0,r"))
1409 (const_int 0)))
1410 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1411 (mult:SI (match_dup 2) (match_dup 1)))]
1412 "TARGET_ARM && !arm_arch6"
1413 "muls%?\\t%0, %2, %1"
1414 [(set_attr "conds" "set")
1415 (set_attr "type" "muls")]
1416 )
1417
1418 (define_insn "*mulsi3_compare0_v6"
1419 [(set (reg:CC_NOOV CC_REGNUM)
1420 (compare:CC_NOOV (mult:SI
1421 (match_operand:SI 2 "s_register_operand" "r")
1422 (match_operand:SI 1 "s_register_operand" "r"))
1423 (const_int 0)))
1424 (set (match_operand:SI 0 "s_register_operand" "=r")
1425 (mult:SI (match_dup 2) (match_dup 1)))]
1426 "TARGET_ARM && arm_arch6 && optimize_size"
1427 "muls%?\\t%0, %2, %1"
1428 [(set_attr "conds" "set")
1429 (set_attr "type" "muls")]
1430 )
1431
1432 (define_insn "*mulsi_compare0_scratch"
1433 [(set (reg:CC_NOOV CC_REGNUM)
1434 (compare:CC_NOOV (mult:SI
1435 (match_operand:SI 2 "s_register_operand" "r,r")
1436 (match_operand:SI 1 "s_register_operand" "%0,r"))
1437 (const_int 0)))
1438 (clobber (match_scratch:SI 0 "=&r,&r"))]
1439 "TARGET_ARM && !arm_arch6"
1440 "muls%?\\t%0, %2, %1"
1441 [(set_attr "conds" "set")
1442 (set_attr "type" "muls")]
1443 )
1444
1445 (define_insn "*mulsi_compare0_scratch_v6"
1446 [(set (reg:CC_NOOV CC_REGNUM)
1447 (compare:CC_NOOV (mult:SI
1448 (match_operand:SI 2 "s_register_operand" "r")
1449 (match_operand:SI 1 "s_register_operand" "r"))
1450 (const_int 0)))
1451 (clobber (match_scratch:SI 0 "=r"))]
1452 "TARGET_ARM && arm_arch6 && optimize_size"
1453 "muls%?\\t%0, %2, %1"
1454 [(set_attr "conds" "set")
1455 (set_attr "type" "muls")]
1456 )
1457
1458 (define_insn "*mulsi3addsi_compare0"
1459 [(set (reg:CC_NOOV CC_REGNUM)
1460 (compare:CC_NOOV
1461 (plus:SI (mult:SI
1462 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1463 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1464 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1465 (const_int 0)))
1466 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1467 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1468 (match_dup 3)))]
1469 "TARGET_ARM && arm_arch6"
1470 "mlas%?\\t%0, %2, %1, %3"
1471 [(set_attr "conds" "set")
1472 (set_attr "type" "mlas")]
1473 )
1474
1475 (define_insn "*mulsi3addsi_compare0_v6"
1476 [(set (reg:CC_NOOV CC_REGNUM)
1477 (compare:CC_NOOV
1478 (plus:SI (mult:SI
1479 (match_operand:SI 2 "s_register_operand" "r")
1480 (match_operand:SI 1 "s_register_operand" "r"))
1481 (match_operand:SI 3 "s_register_operand" "r"))
1482 (const_int 0)))
1483 (set (match_operand:SI 0 "s_register_operand" "=r")
1484 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1485 (match_dup 3)))]
1486 "TARGET_ARM && arm_arch6 && optimize_size"
1487 "mlas%?\\t%0, %2, %1, %3"
1488 [(set_attr "conds" "set")
1489 (set_attr "type" "mlas")]
1490 )
1491
1492 (define_insn "*mulsi3addsi_compare0_scratch"
1493 [(set (reg:CC_NOOV CC_REGNUM)
1494 (compare:CC_NOOV
1495 (plus:SI (mult:SI
1496 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1497 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1498 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1499 (const_int 0)))
1500 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1501 "TARGET_ARM && !arm_arch6"
1502 "mlas%?\\t%0, %2, %1, %3"
1503 [(set_attr "conds" "set")
1504 (set_attr "type" "mlas")]
1505 )
1506
1507 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1508 [(set (reg:CC_NOOV CC_REGNUM)
1509 (compare:CC_NOOV
1510 (plus:SI (mult:SI
1511 (match_operand:SI 2 "s_register_operand" "r")
1512 (match_operand:SI 1 "s_register_operand" "r"))
1513 (match_operand:SI 3 "s_register_operand" "r"))
1514 (const_int 0)))
1515 (clobber (match_scratch:SI 0 "=r"))]
1516 "TARGET_ARM && arm_arch6 && optimize_size"
1517 "mlas%?\\t%0, %2, %1, %3"
1518 [(set_attr "conds" "set")
1519 (set_attr "type" "mlas")]
1520 )
1521
1522 ;; 32x32->64 widening multiply.
1523 ;; The only difference between the v3-5 and v6+ versions is the requirement
1524 ;; that the output does not overlap with either input.
1525
1526 (define_expand "<Us>mulsidi3"
1527 [(set (match_operand:DI 0 "s_register_operand")
1528 (mult:DI
1529 (SE:DI (match_operand:SI 1 "s_register_operand"))
1530 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1531 "TARGET_32BIT"
1532 {
1533 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1534 gen_highpart (SImode, operands[0]),
1535 operands[1], operands[2]));
1536 DONE;
1537 }
1538 )
1539
1540 (define_insn "<US>mull"
1541 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1542 (mult:SI
1543 (match_operand:SI 2 "s_register_operand" "%r,r")
1544 (match_operand:SI 3 "s_register_operand" "r,r")))
1545 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1546 (truncate:SI
1547 (lshiftrt:DI
1548 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1549 (const_int 32))))]
1550 "TARGET_32BIT"
1551 "<US>mull%?\\t%0, %1, %2, %3"
1552 [(set_attr "type" "umull")
1553 (set_attr "predicable" "yes")
1554 (set_attr "arch" "v6,nov6")]
1555 )
1556
1557 (define_expand "<Us>maddsidi4"
1558 [(set (match_operand:DI 0 "s_register_operand")
1559 (plus:DI
1560 (mult:DI
1561 (SE:DI (match_operand:SI 1 "s_register_operand"))
1562 (SE:DI (match_operand:SI 2 "s_register_operand")))
1563 (match_operand:DI 3 "s_register_operand")))]
1564 "TARGET_32BIT"
1565 {
1566 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1567 gen_lowpart (SImode, operands[3]),
1568 gen_highpart (SImode, operands[0]),
1569 gen_highpart (SImode, operands[3]),
1570 operands[1], operands[2]));
1571 DONE;
1572 }
1573 )
1574
1575 (define_insn "<US>mlal"
1576 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1577 (plus:SI
1578 (mult:SI
1579 (match_operand:SI 4 "s_register_operand" "%r,r")
1580 (match_operand:SI 5 "s_register_operand" "r,r"))
1581 (match_operand:SI 1 "s_register_operand" "0,0")))
1582 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1583 (plus:SI
1584 (truncate:SI
1585 (lshiftrt:DI
1586 (plus:DI
1587 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1588 (zero_extend:DI (match_dup 1)))
1589 (const_int 32)))
1590 (match_operand:SI 3 "s_register_operand" "2,2")))]
1591 "TARGET_32BIT"
1592 "<US>mlal%?\\t%0, %2, %4, %5"
1593 [(set_attr "type" "umlal")
1594 (set_attr "predicable" "yes")
1595 (set_attr "arch" "v6,nov6")]
1596 )
1597
1598 (define_expand "<US>mulsi3_highpart"
1599 [(parallel
1600 [(set (match_operand:SI 0 "s_register_operand")
1601 (truncate:SI
1602 (lshiftrt:DI
1603 (mult:DI
1604 (SE:DI (match_operand:SI 1 "s_register_operand"))
1605 (SE:DI (match_operand:SI 2 "s_register_operand")))
1606 (const_int 32))))
1607 (clobber (match_scratch:SI 3 ""))])]
1608 "TARGET_32BIT"
1609 ""
1610 )
1611
1612 (define_insn "*<US>mull_high"
1613 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1614 (truncate:SI
1615 (lshiftrt:DI
1616 (mult:DI
1617 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1618 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1619 (const_int 32))))
1620 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1621 "TARGET_32BIT"
1622 "<US>mull%?\\t%3, %0, %2, %1"
1623 [(set_attr "type" "umull")
1624 (set_attr "predicable" "yes")
1625 (set_attr "arch" "v6,nov6,nov6")]
1626 )
1627
1628 (define_insn "mulhisi3"
1629 [(set (match_operand:SI 0 "s_register_operand" "=r")
1630 (mult:SI (sign_extend:SI
1631 (match_operand:HI 1 "s_register_operand" "%r"))
1632 (sign_extend:SI
1633 (match_operand:HI 2 "s_register_operand" "r"))))]
1634 "TARGET_DSP_MULTIPLY"
1635 "smulbb%?\\t%0, %1, %2"
1636 [(set_attr "type" "smulxy")
1637 (set_attr "predicable" "yes")]
1638 )
1639
1640 (define_insn "*mulhisi3tb"
1641 [(set (match_operand:SI 0 "s_register_operand" "=r")
1642 (mult:SI (ashiftrt:SI
1643 (match_operand:SI 1 "s_register_operand" "r")
1644 (const_int 16))
1645 (sign_extend:SI
1646 (match_operand:HI 2 "s_register_operand" "r"))))]
1647 "TARGET_DSP_MULTIPLY"
1648 "smultb%?\\t%0, %1, %2"
1649 [(set_attr "type" "smulxy")
1650 (set_attr "predicable" "yes")]
1651 )
1652
1653 (define_insn "*mulhisi3bt"
1654 [(set (match_operand:SI 0 "s_register_operand" "=r")
1655 (mult:SI (sign_extend:SI
1656 (match_operand:HI 1 "s_register_operand" "r"))
1657 (ashiftrt:SI
1658 (match_operand:SI 2 "s_register_operand" "r")
1659 (const_int 16))))]
1660 "TARGET_DSP_MULTIPLY"
1661 "smulbt%?\\t%0, %1, %2"
1662 [(set_attr "type" "smulxy")
1663 (set_attr "predicable" "yes")]
1664 )
1665
1666 (define_insn "*mulhisi3tt"
1667 [(set (match_operand:SI 0 "s_register_operand" "=r")
1668 (mult:SI (ashiftrt:SI
1669 (match_operand:SI 1 "s_register_operand" "r")
1670 (const_int 16))
1671 (ashiftrt:SI
1672 (match_operand:SI 2 "s_register_operand" "r")
1673 (const_int 16))))]
1674 "TARGET_DSP_MULTIPLY"
1675 "smultt%?\\t%0, %1, %2"
1676 [(set_attr "type" "smulxy")
1677 (set_attr "predicable" "yes")]
1678 )
1679
1680 (define_insn "maddhisi4"
1681 [(set (match_operand:SI 0 "s_register_operand" "=r")
1682 (plus:SI (mult:SI (sign_extend:SI
1683 (match_operand:HI 1 "s_register_operand" "r"))
1684 (sign_extend:SI
1685 (match_operand:HI 2 "s_register_operand" "r")))
1686 (match_operand:SI 3 "s_register_operand" "r")))]
1687 "TARGET_DSP_MULTIPLY"
1688 "smlabb%?\\t%0, %1, %2, %3"
1689 [(set_attr "type" "smlaxy")
1690 (set_attr "predicable" "yes")]
1691 )
1692
1693 ;; Note: there is no maddhisi4ibt because this one is canonical form
1694 (define_insn "*maddhisi4tb"
1695 [(set (match_operand:SI 0 "s_register_operand" "=r")
1696 (plus:SI (mult:SI (ashiftrt:SI
1697 (match_operand:SI 1 "s_register_operand" "r")
1698 (const_int 16))
1699 (sign_extend:SI
1700 (match_operand:HI 2 "s_register_operand" "r")))
1701 (match_operand:SI 3 "s_register_operand" "r")))]
1702 "TARGET_DSP_MULTIPLY"
1703 "smlatb%?\\t%0, %1, %2, %3"
1704 [(set_attr "type" "smlaxy")
1705 (set_attr "predicable" "yes")]
1706 )
1707
1708 (define_insn "*maddhisi4tt"
1709 [(set (match_operand:SI 0 "s_register_operand" "=r")
1710 (plus:SI (mult:SI (ashiftrt:SI
1711 (match_operand:SI 1 "s_register_operand" "r")
1712 (const_int 16))
1713 (ashiftrt:SI
1714 (match_operand:SI 2 "s_register_operand" "r")
1715 (const_int 16)))
1716 (match_operand:SI 3 "s_register_operand" "r")))]
1717 "TARGET_DSP_MULTIPLY"
1718 "smlatt%?\\t%0, %1, %2, %3"
1719 [(set_attr "type" "smlaxy")
1720 (set_attr "predicable" "yes")]
1721 )
1722
1723 (define_insn "maddhidi4"
1724 [(set (match_operand:DI 0 "s_register_operand" "=r")
1725 (plus:DI
1726 (mult:DI (sign_extend:DI
1727 (match_operand:HI 1 "s_register_operand" "r"))
1728 (sign_extend:DI
1729 (match_operand:HI 2 "s_register_operand" "r")))
1730 (match_operand:DI 3 "s_register_operand" "0")))]
1731 "TARGET_DSP_MULTIPLY"
1732 "smlalbb%?\\t%Q0, %R0, %1, %2"
1733 [(set_attr "type" "smlalxy")
1734 (set_attr "predicable" "yes")])
1735
1736 ;; Note: there is no maddhidi4ibt because this one is canonical form
1737 (define_insn "*maddhidi4tb"
1738 [(set (match_operand:DI 0 "s_register_operand" "=r")
1739 (plus:DI
1740 (mult:DI (sign_extend:DI
1741 (ashiftrt:SI
1742 (match_operand:SI 1 "s_register_operand" "r")
1743 (const_int 16)))
1744 (sign_extend:DI
1745 (match_operand:HI 2 "s_register_operand" "r")))
1746 (match_operand:DI 3 "s_register_operand" "0")))]
1747 "TARGET_DSP_MULTIPLY"
1748 "smlaltb%?\\t%Q0, %R0, %1, %2"
1749 [(set_attr "type" "smlalxy")
1750 (set_attr "predicable" "yes")])
1751
1752 (define_insn "*maddhidi4tt"
1753 [(set (match_operand:DI 0 "s_register_operand" "=r")
1754 (plus:DI
1755 (mult:DI (sign_extend:DI
1756 (ashiftrt:SI
1757 (match_operand:SI 1 "s_register_operand" "r")
1758 (const_int 16)))
1759 (sign_extend:DI
1760 (ashiftrt:SI
1761 (match_operand:SI 2 "s_register_operand" "r")
1762 (const_int 16))))
1763 (match_operand:DI 3 "s_register_operand" "0")))]
1764 "TARGET_DSP_MULTIPLY"
1765 "smlaltt%?\\t%Q0, %R0, %1, %2"
1766 [(set_attr "type" "smlalxy")
1767 (set_attr "predicable" "yes")])
1768
1769 (define_expand "mulsf3"
1770 [(set (match_operand:SF 0 "s_register_operand")
1771 (mult:SF (match_operand:SF 1 "s_register_operand")
1772 (match_operand:SF 2 "s_register_operand")))]
1773 "TARGET_32BIT && TARGET_HARD_FLOAT"
1774 "
1775 ")
1776
1777 (define_expand "muldf3"
1778 [(set (match_operand:DF 0 "s_register_operand")
1779 (mult:DF (match_operand:DF 1 "s_register_operand")
1780 (match_operand:DF 2 "s_register_operand")))]
1781 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1782 "
1783 ")
1784 \f
1785 ;; Division insns
1786
1787 (define_expand "divsf3"
1788 [(set (match_operand:SF 0 "s_register_operand")
1789 (div:SF (match_operand:SF 1 "s_register_operand")
1790 (match_operand:SF 2 "s_register_operand")))]
1791 "TARGET_32BIT && TARGET_HARD_FLOAT"
1792 "")
1793
1794 (define_expand "divdf3"
1795 [(set (match_operand:DF 0 "s_register_operand")
1796 (div:DF (match_operand:DF 1 "s_register_operand")
1797 (match_operand:DF 2 "s_register_operand")))]
1798 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1799 "")
1800 \f
1801
1802 ; Expand logical operations. The mid-end expander does not split off memory
1803 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1804 ; So an explicit expander is needed to generate better code.
1805
1806 (define_expand "<LOGICAL:optab>di3"
1807 [(set (match_operand:DI 0 "s_register_operand")
1808 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1809 (match_operand:DI 2 "arm_<optab>di_operand")))]
1810 "TARGET_32BIT"
1811 {
1812 rtx low = simplify_gen_binary (<CODE>, SImode,
1813 gen_lowpart (SImode, operands[1]),
1814 gen_lowpart (SImode, operands[2]));
1815 rtx high = simplify_gen_binary (<CODE>, SImode,
1816 gen_highpart (SImode, operands[1]),
1817 gen_highpart_mode (SImode, DImode,
1818 operands[2]));
1819
1820 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1821 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1822 DONE;
1823 }
1824 )
1825
1826 (define_expand "one_cmpldi2"
1827 [(set (match_operand:DI 0 "s_register_operand")
1828 (not:DI (match_operand:DI 1 "s_register_operand")))]
1829 "TARGET_32BIT"
1830 {
1831 rtx low = simplify_gen_unary (NOT, SImode,
1832 gen_lowpart (SImode, operands[1]),
1833 SImode);
1834 rtx high = simplify_gen_unary (NOT, SImode,
1835 gen_highpart_mode (SImode, DImode,
1836 operands[1]),
1837 SImode);
1838
1839 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1840 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1841 DONE;
1842 }
1843 )
1844
1845 ;; Split DImode and, ior, xor operations. Simply perform the logical
1846 ;; operation on the upper and lower halves of the registers.
1847 ;; This is needed for atomic operations in arm_split_atomic_op.
1848 ;; Avoid splitting IWMMXT instructions.
1849 (define_split
1850 [(set (match_operand:DI 0 "s_register_operand" "")
1851 (match_operator:DI 6 "logical_binary_operator"
1852 [(match_operand:DI 1 "s_register_operand" "")
1853 (match_operand:DI 2 "s_register_operand" "")]))]
1854 "TARGET_32BIT && reload_completed
1855 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1856 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1857 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1858 "
1859 {
1860 operands[3] = gen_highpart (SImode, operands[0]);
1861 operands[0] = gen_lowpart (SImode, operands[0]);
1862 operands[4] = gen_highpart (SImode, operands[1]);
1863 operands[1] = gen_lowpart (SImode, operands[1]);
1864 operands[5] = gen_highpart (SImode, operands[2]);
1865 operands[2] = gen_lowpart (SImode, operands[2]);
1866 }"
1867 )
1868
1869 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1870 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1871 (define_split
1872 [(set (match_operand:DI 0 "s_register_operand")
1873 (not:DI (match_operand:DI 1 "s_register_operand")))]
1874 "TARGET_32BIT"
1875 [(set (match_dup 0) (not:SI (match_dup 1)))
1876 (set (match_dup 2) (not:SI (match_dup 3)))]
1877 "
1878 {
1879 operands[2] = gen_highpart (SImode, operands[0]);
1880 operands[0] = gen_lowpart (SImode, operands[0]);
1881 operands[3] = gen_highpart (SImode, operands[1]);
1882 operands[1] = gen_lowpart (SImode, operands[1]);
1883 }"
1884 )
1885
1886 (define_expand "andsi3"
1887 [(set (match_operand:SI 0 "s_register_operand")
1888 (and:SI (match_operand:SI 1 "s_register_operand")
1889 (match_operand:SI 2 "reg_or_int_operand")))]
1890 "TARGET_EITHER"
1891 "
1892 if (TARGET_32BIT)
1893 {
1894 if (CONST_INT_P (operands[2]))
1895 {
1896 if (INTVAL (operands[2]) == 255 && arm_arch6)
1897 {
1898 operands[1] = convert_to_mode (QImode, operands[1], 1);
1899 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1900 operands[1]));
1901 DONE;
1902 }
1903 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1904 operands[2] = force_reg (SImode, operands[2]);
1905 else
1906 {
1907 arm_split_constant (AND, SImode, NULL_RTX,
1908 INTVAL (operands[2]), operands[0],
1909 operands[1],
1910 optimize && can_create_pseudo_p ());
1911
1912 DONE;
1913 }
1914 }
1915 }
1916 else /* TARGET_THUMB1 */
1917 {
1918 if (!CONST_INT_P (operands[2]))
1919 {
1920 rtx tmp = force_reg (SImode, operands[2]);
1921 if (rtx_equal_p (operands[0], operands[1]))
1922 operands[2] = tmp;
1923 else
1924 {
1925 operands[2] = operands[1];
1926 operands[1] = tmp;
1927 }
1928 }
1929 else
1930 {
1931 int i;
1932
1933 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1934 {
1935 operands[2] = force_reg (SImode,
1936 GEN_INT (~INTVAL (operands[2])));
1937
1938 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
1939
1940 DONE;
1941 }
1942
1943 for (i = 9; i <= 31; i++)
1944 {
1945 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
1946 {
1947 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1948 const0_rtx));
1949 DONE;
1950 }
1951 else if ((HOST_WIDE_INT_1 << i) - 1
1952 == ~INTVAL (operands[2]))
1953 {
1954 rtx shift = GEN_INT (i);
1955 rtx reg = gen_reg_rtx (SImode);
1956
1957 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1958 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1959
1960 DONE;
1961 }
1962 }
1963
1964 operands[2] = force_reg (SImode, operands[2]);
1965 }
1966 }
1967 "
1968 )
1969
1970 ; ??? Check split length for Thumb-2
1971 (define_insn_and_split "*arm_andsi3_insn"
1972 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
1973 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
1974 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
1975 "TARGET_32BIT"
1976 "@
1977 and%?\\t%0, %1, %2
1978 and%?\\t%0, %1, %2
1979 bic%?\\t%0, %1, #%B2
1980 and%?\\t%0, %1, %2
1981 #"
1982 "TARGET_32BIT
1983 && CONST_INT_P (operands[2])
1984 && !(const_ok_for_arm (INTVAL (operands[2]))
1985 || const_ok_for_arm (~INTVAL (operands[2])))"
1986 [(clobber (const_int 0))]
1987 "
1988 arm_split_constant (AND, SImode, curr_insn,
1989 INTVAL (operands[2]), operands[0], operands[1], 0);
1990 DONE;
1991 "
1992 [(set_attr "length" "4,4,4,4,16")
1993 (set_attr "predicable" "yes")
1994 (set_attr "predicable_short_it" "no,yes,no,no,no")
1995 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
1996 )
1997
1998 (define_insn "*andsi3_compare0"
1999 [(set (reg:CC_NOOV CC_REGNUM)
2000 (compare:CC_NOOV
2001 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2002 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2003 (const_int 0)))
2004 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2005 (and:SI (match_dup 1) (match_dup 2)))]
2006 "TARGET_32BIT"
2007 "@
2008 ands%?\\t%0, %1, %2
2009 bics%?\\t%0, %1, #%B2
2010 ands%?\\t%0, %1, %2"
2011 [(set_attr "conds" "set")
2012 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2013 )
2014
2015 (define_insn "*andsi3_compare0_scratch"
2016 [(set (reg:CC_NOOV CC_REGNUM)
2017 (compare:CC_NOOV
2018 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2019 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2020 (const_int 0)))
2021 (clobber (match_scratch:SI 2 "=X,r,X"))]
2022 "TARGET_32BIT"
2023 "@
2024 tst%?\\t%0, %1
2025 bics%?\\t%2, %0, #%B1
2026 tst%?\\t%0, %1"
2027 [(set_attr "conds" "set")
2028 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2029 )
2030
2031 (define_insn "*zeroextractsi_compare0_scratch"
2032 [(set (reg:CC_NOOV CC_REGNUM)
2033 (compare:CC_NOOV (zero_extract:SI
2034 (match_operand:SI 0 "s_register_operand" "r")
2035 (match_operand 1 "const_int_operand" "n")
2036 (match_operand 2 "const_int_operand" "n"))
2037 (const_int 0)))]
2038 "TARGET_32BIT
2039 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2040 && INTVAL (operands[1]) > 0
2041 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2042 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2043 "*
2044 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2045 << INTVAL (operands[2]));
2046 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2047 return \"\";
2048 "
2049 [(set_attr "conds" "set")
2050 (set_attr "predicable" "yes")
2051 (set_attr "type" "logics_imm")]
2052 )
2053
2054 (define_insn_and_split "*ne_zeroextractsi"
2055 [(set (match_operand:SI 0 "s_register_operand" "=r")
2056 (ne:SI (zero_extract:SI
2057 (match_operand:SI 1 "s_register_operand" "r")
2058 (match_operand:SI 2 "const_int_operand" "n")
2059 (match_operand:SI 3 "const_int_operand" "n"))
2060 (const_int 0)))
2061 (clobber (reg:CC CC_REGNUM))]
2062 "TARGET_32BIT
2063 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2064 && INTVAL (operands[2]) > 0
2065 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2066 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2067 "#"
2068 "TARGET_32BIT
2069 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2070 && INTVAL (operands[2]) > 0
2071 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2072 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2073 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2074 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2075 (const_int 0)))
2076 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2077 (set (match_dup 0)
2078 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2079 (match_dup 0) (const_int 1)))]
2080 "
2081 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2082 << INTVAL (operands[3]));
2083 "
2084 [(set_attr "conds" "clob")
2085 (set (attr "length")
2086 (if_then_else (eq_attr "is_thumb" "yes")
2087 (const_int 12)
2088 (const_int 8)))
2089 (set_attr "type" "multiple")]
2090 )
2091
2092 (define_insn_and_split "*ne_zeroextractsi_shifted"
2093 [(set (match_operand:SI 0 "s_register_operand" "=r")
2094 (ne:SI (zero_extract:SI
2095 (match_operand:SI 1 "s_register_operand" "r")
2096 (match_operand:SI 2 "const_int_operand" "n")
2097 (const_int 0))
2098 (const_int 0)))
2099 (clobber (reg:CC CC_REGNUM))]
2100 "TARGET_ARM"
2101 "#"
2102 "TARGET_ARM"
2103 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2104 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2105 (const_int 0)))
2106 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2107 (set (match_dup 0)
2108 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2109 (match_dup 0) (const_int 1)))]
2110 "
2111 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2112 "
2113 [(set_attr "conds" "clob")
2114 (set_attr "length" "8")
2115 (set_attr "type" "multiple")]
2116 )
2117
2118 (define_insn_and_split "*ite_ne_zeroextractsi"
2119 [(set (match_operand:SI 0 "s_register_operand" "=r")
2120 (if_then_else:SI (ne (zero_extract:SI
2121 (match_operand:SI 1 "s_register_operand" "r")
2122 (match_operand:SI 2 "const_int_operand" "n")
2123 (match_operand:SI 3 "const_int_operand" "n"))
2124 (const_int 0))
2125 (match_operand:SI 4 "arm_not_operand" "rIK")
2126 (const_int 0)))
2127 (clobber (reg:CC CC_REGNUM))]
2128 "TARGET_ARM
2129 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2130 && INTVAL (operands[2]) > 0
2131 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2132 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2133 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2134 "#"
2135 "TARGET_ARM
2136 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2137 && INTVAL (operands[2]) > 0
2138 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2139 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2140 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2141 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2142 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2143 (const_int 0)))
2144 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2145 (set (match_dup 0)
2146 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2147 (match_dup 0) (match_dup 4)))]
2148 "
2149 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2150 << INTVAL (operands[3]));
2151 "
2152 [(set_attr "conds" "clob")
2153 (set_attr "length" "8")
2154 (set_attr "type" "multiple")]
2155 )
2156
2157 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2158 [(set (match_operand:SI 0 "s_register_operand" "=r")
2159 (if_then_else:SI (ne (zero_extract:SI
2160 (match_operand:SI 1 "s_register_operand" "r")
2161 (match_operand:SI 2 "const_int_operand" "n")
2162 (const_int 0))
2163 (const_int 0))
2164 (match_operand:SI 3 "arm_not_operand" "rIK")
2165 (const_int 0)))
2166 (clobber (reg:CC CC_REGNUM))]
2167 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2168 "#"
2169 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2170 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2171 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2172 (const_int 0)))
2173 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2174 (set (match_dup 0)
2175 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2176 (match_dup 0) (match_dup 3)))]
2177 "
2178 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2179 "
2180 [(set_attr "conds" "clob")
2181 (set_attr "length" "8")
2182 (set_attr "type" "multiple")]
2183 )
2184
2185 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2186 (define_split
2187 [(set (match_operand:SI 0 "s_register_operand" "")
2188 (match_operator:SI 1 "shiftable_operator"
2189 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2190 (match_operand:SI 3 "const_int_operand" "")
2191 (match_operand:SI 4 "const_int_operand" ""))
2192 (match_operand:SI 5 "s_register_operand" "")]))
2193 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2194 "TARGET_ARM"
2195 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2196 (set (match_dup 0)
2197 (match_op_dup 1
2198 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2199 (match_dup 5)]))]
2200 "{
2201 HOST_WIDE_INT temp = INTVAL (operands[3]);
2202
2203 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2204 operands[4] = GEN_INT (32 - temp);
2205 }"
2206 )
2207
2208 (define_split
2209 [(set (match_operand:SI 0 "s_register_operand" "")
2210 (match_operator:SI 1 "shiftable_operator"
2211 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2212 (match_operand:SI 3 "const_int_operand" "")
2213 (match_operand:SI 4 "const_int_operand" ""))
2214 (match_operand:SI 5 "s_register_operand" "")]))
2215 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2216 "TARGET_ARM"
2217 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2218 (set (match_dup 0)
2219 (match_op_dup 1
2220 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2221 (match_dup 5)]))]
2222 "{
2223 HOST_WIDE_INT temp = INTVAL (operands[3]);
2224
2225 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2226 operands[4] = GEN_INT (32 - temp);
2227 }"
2228 )
2229
2230 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2231 ;;; represented by the bitfield, then this will produce incorrect results.
2232 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2233 ;;; which have a real bit-field insert instruction, the truncation happens
2234 ;;; in the bit-field insert instruction itself. Since arm does not have a
2235 ;;; bit-field insert instruction, we would have to emit code here to truncate
2236 ;;; the value before we insert. This loses some of the advantage of having
2237 ;;; this insv pattern, so this pattern needs to be reevalutated.
2238
2239 (define_expand "insv"
2240 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2241 (match_operand 1 "general_operand")
2242 (match_operand 2 "general_operand"))
2243 (match_operand 3 "reg_or_int_operand"))]
2244 "TARGET_ARM || arm_arch_thumb2"
2245 "
2246 {
2247 int start_bit = INTVAL (operands[2]);
2248 int width = INTVAL (operands[1]);
2249 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2250 rtx target, subtarget;
2251
2252 if (arm_arch_thumb2)
2253 {
2254 if (unaligned_access && MEM_P (operands[0])
2255 && s_register_operand (operands[3], GET_MODE (operands[3]))
2256 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2257 {
2258 rtx base_addr;
2259
2260 if (BYTES_BIG_ENDIAN)
2261 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2262 - start_bit;
2263
2264 if (width == 32)
2265 {
2266 base_addr = adjust_address (operands[0], SImode,
2267 start_bit / BITS_PER_UNIT);
2268 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2269 }
2270 else
2271 {
2272 rtx tmp = gen_reg_rtx (HImode);
2273
2274 base_addr = adjust_address (operands[0], HImode,
2275 start_bit / BITS_PER_UNIT);
2276 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2277 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2278 }
2279 DONE;
2280 }
2281 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2282 {
2283 bool use_bfi = TRUE;
2284
2285 if (CONST_INT_P (operands[3]))
2286 {
2287 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2288
2289 if (val == 0)
2290 {
2291 emit_insn (gen_insv_zero (operands[0], operands[1],
2292 operands[2]));
2293 DONE;
2294 }
2295
2296 /* See if the set can be done with a single orr instruction. */
2297 if (val == mask && const_ok_for_arm (val << start_bit))
2298 use_bfi = FALSE;
2299 }
2300
2301 if (use_bfi)
2302 {
2303 if (!REG_P (operands[3]))
2304 operands[3] = force_reg (SImode, operands[3]);
2305
2306 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2307 operands[3]));
2308 DONE;
2309 }
2310 }
2311 else
2312 FAIL;
2313 }
2314
2315 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2316 FAIL;
2317
2318 target = copy_rtx (operands[0]);
2319 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2320 subreg as the final target. */
2321 if (GET_CODE (target) == SUBREG)
2322 {
2323 subtarget = gen_reg_rtx (SImode);
2324 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2325 < GET_MODE_SIZE (SImode))
2326 target = SUBREG_REG (target);
2327 }
2328 else
2329 subtarget = target;
2330
2331 if (CONST_INT_P (operands[3]))
2332 {
2333 /* Since we are inserting a known constant, we may be able to
2334 reduce the number of bits that we have to clear so that
2335 the mask becomes simple. */
2336 /* ??? This code does not check to see if the new mask is actually
2337 simpler. It may not be. */
2338 rtx op1 = gen_reg_rtx (SImode);
2339 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2340 start of this pattern. */
2341 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2342 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2343
2344 emit_insn (gen_andsi3 (op1, operands[0],
2345 gen_int_mode (~mask2, SImode)));
2346 emit_insn (gen_iorsi3 (subtarget, op1,
2347 gen_int_mode (op3_value << start_bit, SImode)));
2348 }
2349 else if (start_bit == 0
2350 && !(const_ok_for_arm (mask)
2351 || const_ok_for_arm (~mask)))
2352 {
2353 /* A Trick, since we are setting the bottom bits in the word,
2354 we can shift operand[3] up, operand[0] down, OR them together
2355 and rotate the result back again. This takes 3 insns, and
2356 the third might be mergeable into another op. */
2357 /* The shift up copes with the possibility that operand[3] is
2358 wider than the bitfield. */
2359 rtx op0 = gen_reg_rtx (SImode);
2360 rtx op1 = gen_reg_rtx (SImode);
2361
2362 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2363 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2364 emit_insn (gen_iorsi3 (op1, op1, op0));
2365 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2366 }
2367 else if ((width + start_bit == 32)
2368 && !(const_ok_for_arm (mask)
2369 || const_ok_for_arm (~mask)))
2370 {
2371 /* Similar trick, but slightly less efficient. */
2372
2373 rtx op0 = gen_reg_rtx (SImode);
2374 rtx op1 = gen_reg_rtx (SImode);
2375
2376 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2377 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2378 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2379 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2380 }
2381 else
2382 {
2383 rtx op0 = gen_int_mode (mask, SImode);
2384 rtx op1 = gen_reg_rtx (SImode);
2385 rtx op2 = gen_reg_rtx (SImode);
2386
2387 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2388 {
2389 rtx tmp = gen_reg_rtx (SImode);
2390
2391 emit_insn (gen_movsi (tmp, op0));
2392 op0 = tmp;
2393 }
2394
2395 /* Mask out any bits in operand[3] that are not needed. */
2396 emit_insn (gen_andsi3 (op1, operands[3], op0));
2397
2398 if (CONST_INT_P (op0)
2399 && (const_ok_for_arm (mask << start_bit)
2400 || const_ok_for_arm (~(mask << start_bit))))
2401 {
2402 op0 = gen_int_mode (~(mask << start_bit), SImode);
2403 emit_insn (gen_andsi3 (op2, operands[0], op0));
2404 }
2405 else
2406 {
2407 if (CONST_INT_P (op0))
2408 {
2409 rtx tmp = gen_reg_rtx (SImode);
2410
2411 emit_insn (gen_movsi (tmp, op0));
2412 op0 = tmp;
2413 }
2414
2415 if (start_bit != 0)
2416 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2417
2418 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2419 }
2420
2421 if (start_bit != 0)
2422 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2423
2424 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2425 }
2426
2427 if (subtarget != target)
2428 {
2429 /* If TARGET is still a SUBREG, then it must be wider than a word,
2430 so we must be careful only to set the subword we were asked to. */
2431 if (GET_CODE (target) == SUBREG)
2432 emit_move_insn (target, subtarget);
2433 else
2434 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2435 }
2436
2437 DONE;
2438 }"
2439 )
2440
2441 (define_insn "insv_zero"
2442 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2443 (match_operand:SI 1 "const_int_M_operand" "M")
2444 (match_operand:SI 2 "const_int_M_operand" "M"))
2445 (const_int 0))]
2446 "arm_arch_thumb2"
2447 "bfc%?\t%0, %2, %1"
2448 [(set_attr "length" "4")
2449 (set_attr "predicable" "yes")
2450 (set_attr "type" "bfm")]
2451 )
2452
2453 (define_insn "insv_t2"
2454 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2455 (match_operand:SI 1 "const_int_M_operand" "M")
2456 (match_operand:SI 2 "const_int_M_operand" "M"))
2457 (match_operand:SI 3 "s_register_operand" "r"))]
2458 "arm_arch_thumb2"
2459 "bfi%?\t%0, %3, %2, %1"
2460 [(set_attr "length" "4")
2461 (set_attr "predicable" "yes")
2462 (set_attr "type" "bfm")]
2463 )
2464
2465 (define_insn "andsi_notsi_si"
2466 [(set (match_operand:SI 0 "s_register_operand" "=r")
2467 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2468 (match_operand:SI 1 "s_register_operand" "r")))]
2469 "TARGET_32BIT"
2470 "bic%?\\t%0, %1, %2"
2471 [(set_attr "predicable" "yes")
2472 (set_attr "type" "logic_reg")]
2473 )
2474
2475 (define_insn "andsi_not_shiftsi_si"
2476 [(set (match_operand:SI 0 "s_register_operand" "=r")
2477 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2478 [(match_operand:SI 2 "s_register_operand" "r")
2479 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2480 (match_operand:SI 1 "s_register_operand" "r")))]
2481 "TARGET_ARM"
2482 "bic%?\\t%0, %1, %2%S4"
2483 [(set_attr "predicable" "yes")
2484 (set_attr "shift" "2")
2485 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2486 (const_string "logic_shift_imm")
2487 (const_string "logic_shift_reg")))]
2488 )
2489
2490 ;; Shifted bics pattern used to set up CC status register and not reusing
2491 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2492 ;; does not support shift by register.
2493 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2494 [(set (reg:CC_NOOV CC_REGNUM)
2495 (compare:CC_NOOV
2496 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2497 [(match_operand:SI 1 "s_register_operand" "r")
2498 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2499 (match_operand:SI 3 "s_register_operand" "r"))
2500 (const_int 0)))
2501 (clobber (match_scratch:SI 4 "=r"))]
2502 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2503 "bics%?\\t%4, %3, %1%S0"
2504 [(set_attr "predicable" "yes")
2505 (set_attr "conds" "set")
2506 (set_attr "shift" "1")
2507 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2508 (const_string "logic_shift_imm")
2509 (const_string "logic_shift_reg")))]
2510 )
2511
2512 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2513 ;; getting reused later.
2514 (define_insn "andsi_not_shiftsi_si_scc"
2515 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2516 (compare:CC_NOOV
2517 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2518 [(match_operand:SI 1 "s_register_operand" "r")
2519 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2520 (match_operand:SI 3 "s_register_operand" "r"))
2521 (const_int 0)))
2522 (set (match_operand:SI 4 "s_register_operand" "=r")
2523 (and:SI (not:SI (match_op_dup 0
2524 [(match_dup 1)
2525 (match_dup 2)]))
2526 (match_dup 3)))])]
2527 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2528 "bics%?\\t%4, %3, %1%S0"
2529 [(set_attr "predicable" "yes")
2530 (set_attr "conds" "set")
2531 (set_attr "shift" "1")
2532 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2533 (const_string "logic_shift_imm")
2534 (const_string "logic_shift_reg")))]
2535 )
2536
2537 (define_insn "*andsi_notsi_si_compare0"
2538 [(set (reg:CC_NOOV CC_REGNUM)
2539 (compare:CC_NOOV
2540 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2541 (match_operand:SI 1 "s_register_operand" "r"))
2542 (const_int 0)))
2543 (set (match_operand:SI 0 "s_register_operand" "=r")
2544 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2545 "TARGET_32BIT"
2546 "bics\\t%0, %1, %2"
2547 [(set_attr "conds" "set")
2548 (set_attr "type" "logics_shift_reg")]
2549 )
2550
2551 (define_insn "*andsi_notsi_si_compare0_scratch"
2552 [(set (reg:CC_NOOV CC_REGNUM)
2553 (compare:CC_NOOV
2554 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2555 (match_operand:SI 1 "s_register_operand" "r"))
2556 (const_int 0)))
2557 (clobber (match_scratch:SI 0 "=r"))]
2558 "TARGET_32BIT"
2559 "bics\\t%0, %1, %2"
2560 [(set_attr "conds" "set")
2561 (set_attr "type" "logics_shift_reg")]
2562 )
2563
2564 (define_expand "iorsi3"
2565 [(set (match_operand:SI 0 "s_register_operand")
2566 (ior:SI (match_operand:SI 1 "s_register_operand")
2567 (match_operand:SI 2 "reg_or_int_operand")))]
2568 "TARGET_EITHER"
2569 "
2570 if (CONST_INT_P (operands[2]))
2571 {
2572 if (TARGET_32BIT)
2573 {
2574 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2575 operands[2] = force_reg (SImode, operands[2]);
2576 else
2577 {
2578 arm_split_constant (IOR, SImode, NULL_RTX,
2579 INTVAL (operands[2]), operands[0],
2580 operands[1],
2581 optimize && can_create_pseudo_p ());
2582 DONE;
2583 }
2584 }
2585 else /* TARGET_THUMB1 */
2586 {
2587 rtx tmp = force_reg (SImode, operands[2]);
2588 if (rtx_equal_p (operands[0], operands[1]))
2589 operands[2] = tmp;
2590 else
2591 {
2592 operands[2] = operands[1];
2593 operands[1] = tmp;
2594 }
2595 }
2596 }
2597 "
2598 )
2599
2600 (define_insn_and_split "*iorsi3_insn"
2601 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2602 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2603 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2604 "TARGET_32BIT"
2605 "@
2606 orr%?\\t%0, %1, %2
2607 orr%?\\t%0, %1, %2
2608 orn%?\\t%0, %1, #%B2
2609 orr%?\\t%0, %1, %2
2610 #"
2611 "TARGET_32BIT
2612 && CONST_INT_P (operands[2])
2613 && !(const_ok_for_arm (INTVAL (operands[2]))
2614 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2615 [(clobber (const_int 0))]
2616 {
2617 arm_split_constant (IOR, SImode, curr_insn,
2618 INTVAL (operands[2]), operands[0], operands[1], 0);
2619 DONE;
2620 }
2621 [(set_attr "length" "4,4,4,4,16")
2622 (set_attr "arch" "32,t2,t2,32,32")
2623 (set_attr "predicable" "yes")
2624 (set_attr "predicable_short_it" "no,yes,no,no,no")
2625 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2626 )
2627
2628 (define_peephole2
2629 [(match_scratch:SI 3 "r")
2630 (set (match_operand:SI 0 "arm_general_register_operand" "")
2631 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2632 (match_operand:SI 2 "const_int_operand" "")))]
2633 "TARGET_ARM
2634 && !const_ok_for_arm (INTVAL (operands[2]))
2635 && const_ok_for_arm (~INTVAL (operands[2]))"
2636 [(set (match_dup 3) (match_dup 2))
2637 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2638 ""
2639 )
2640
2641 (define_insn "*iorsi3_compare0"
2642 [(set (reg:CC_NOOV CC_REGNUM)
2643 (compare:CC_NOOV
2644 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2645 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2646 (const_int 0)))
2647 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2648 (ior:SI (match_dup 1) (match_dup 2)))]
2649 "TARGET_32BIT"
2650 "orrs%?\\t%0, %1, %2"
2651 [(set_attr "conds" "set")
2652 (set_attr "arch" "*,t2,*")
2653 (set_attr "length" "4,2,4")
2654 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2655 )
2656
2657 (define_insn "*iorsi3_compare0_scratch"
2658 [(set (reg:CC_NOOV CC_REGNUM)
2659 (compare:CC_NOOV
2660 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2661 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2662 (const_int 0)))
2663 (clobber (match_scratch:SI 0 "=r,l,r"))]
2664 "TARGET_32BIT"
2665 "orrs%?\\t%0, %1, %2"
2666 [(set_attr "conds" "set")
2667 (set_attr "arch" "*,t2,*")
2668 (set_attr "length" "4,2,4")
2669 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2670 )
2671
2672 (define_expand "xorsi3"
2673 [(set (match_operand:SI 0 "s_register_operand")
2674 (xor:SI (match_operand:SI 1 "s_register_operand")
2675 (match_operand:SI 2 "reg_or_int_operand")))]
2676 "TARGET_EITHER"
2677 "if (CONST_INT_P (operands[2]))
2678 {
2679 if (TARGET_32BIT)
2680 {
2681 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2682 operands[2] = force_reg (SImode, operands[2]);
2683 else
2684 {
2685 arm_split_constant (XOR, SImode, NULL_RTX,
2686 INTVAL (operands[2]), operands[0],
2687 operands[1],
2688 optimize && can_create_pseudo_p ());
2689 DONE;
2690 }
2691 }
2692 else /* TARGET_THUMB1 */
2693 {
2694 rtx tmp = force_reg (SImode, operands[2]);
2695 if (rtx_equal_p (operands[0], operands[1]))
2696 operands[2] = tmp;
2697 else
2698 {
2699 operands[2] = operands[1];
2700 operands[1] = tmp;
2701 }
2702 }
2703 }"
2704 )
2705
2706 (define_insn_and_split "*arm_xorsi3"
2707 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2708 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2709 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2710 "TARGET_32BIT"
2711 "@
2712 eor%?\\t%0, %1, %2
2713 eor%?\\t%0, %1, %2
2714 eor%?\\t%0, %1, %2
2715 #"
2716 "TARGET_32BIT
2717 && CONST_INT_P (operands[2])
2718 && !const_ok_for_arm (INTVAL (operands[2]))"
2719 [(clobber (const_int 0))]
2720 {
2721 arm_split_constant (XOR, SImode, curr_insn,
2722 INTVAL (operands[2]), operands[0], operands[1], 0);
2723 DONE;
2724 }
2725 [(set_attr "length" "4,4,4,16")
2726 (set_attr "predicable" "yes")
2727 (set_attr "predicable_short_it" "no,yes,no,no")
2728 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2729 )
2730
2731 (define_insn "*xorsi3_compare0"
2732 [(set (reg:CC_NOOV CC_REGNUM)
2733 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2734 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2735 (const_int 0)))
2736 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2737 (xor:SI (match_dup 1) (match_dup 2)))]
2738 "TARGET_32BIT"
2739 "eors%?\\t%0, %1, %2"
2740 [(set_attr "conds" "set")
2741 (set_attr "type" "logics_imm,logics_reg")]
2742 )
2743
2744 (define_insn "*xorsi3_compare0_scratch"
2745 [(set (reg:CC_NOOV CC_REGNUM)
2746 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2747 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2748 (const_int 0)))]
2749 "TARGET_32BIT"
2750 "teq%?\\t%0, %1"
2751 [(set_attr "conds" "set")
2752 (set_attr "type" "logics_imm,logics_reg")]
2753 )
2754
2755 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2756 ; (NOT D) we can sometimes merge the final NOT into one of the following
2757 ; insns.
2758
2759 (define_split
2760 [(set (match_operand:SI 0 "s_register_operand" "")
2761 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2762 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2763 (match_operand:SI 3 "arm_rhs_operand" "")))
2764 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2765 "TARGET_32BIT"
2766 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2767 (not:SI (match_dup 3))))
2768 (set (match_dup 0) (not:SI (match_dup 4)))]
2769 ""
2770 )
2771
2772 (define_insn_and_split "*andsi_iorsi3_notsi"
2773 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2774 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2775 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2776 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2777 "TARGET_32BIT"
2778 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2779 "&& reload_completed"
2780 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2781 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2782 {
2783 /* If operands[3] is a constant make sure to fold the NOT into it
2784 to avoid creating a NOT of a CONST_INT. */
2785 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2786 if (CONST_INT_P (not_rtx))
2787 {
2788 operands[4] = operands[0];
2789 operands[5] = not_rtx;
2790 }
2791 else
2792 {
2793 operands[5] = operands[0];
2794 operands[4] = not_rtx;
2795 }
2796 }
2797 [(set_attr "length" "8")
2798 (set_attr "ce_count" "2")
2799 (set_attr "predicable" "yes")
2800 (set_attr "type" "multiple")]
2801 )
2802
2803 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2804 ; insns are available?
2805 (define_split
2806 [(set (match_operand:SI 0 "s_register_operand" "")
2807 (match_operator:SI 1 "logical_binary_operator"
2808 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2809 (match_operand:SI 3 "const_int_operand" "")
2810 (match_operand:SI 4 "const_int_operand" ""))
2811 (match_operator:SI 9 "logical_binary_operator"
2812 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2813 (match_operand:SI 6 "const_int_operand" ""))
2814 (match_operand:SI 7 "s_register_operand" "")])]))
2815 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2816 "TARGET_32BIT
2817 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2818 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2819 [(set (match_dup 8)
2820 (match_op_dup 1
2821 [(ashift:SI (match_dup 2) (match_dup 4))
2822 (match_dup 5)]))
2823 (set (match_dup 0)
2824 (match_op_dup 1
2825 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2826 (match_dup 7)]))]
2827 "
2828 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2829 ")
2830
2831 (define_split
2832 [(set (match_operand:SI 0 "s_register_operand" "")
2833 (match_operator:SI 1 "logical_binary_operator"
2834 [(match_operator:SI 9 "logical_binary_operator"
2835 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2836 (match_operand:SI 6 "const_int_operand" ""))
2837 (match_operand:SI 7 "s_register_operand" "")])
2838 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2839 (match_operand:SI 3 "const_int_operand" "")
2840 (match_operand:SI 4 "const_int_operand" ""))]))
2841 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2842 "TARGET_32BIT
2843 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2844 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2845 [(set (match_dup 8)
2846 (match_op_dup 1
2847 [(ashift:SI (match_dup 2) (match_dup 4))
2848 (match_dup 5)]))
2849 (set (match_dup 0)
2850 (match_op_dup 1
2851 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2852 (match_dup 7)]))]
2853 "
2854 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2855 ")
2856
2857 (define_split
2858 [(set (match_operand:SI 0 "s_register_operand" "")
2859 (match_operator:SI 1 "logical_binary_operator"
2860 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2861 (match_operand:SI 3 "const_int_operand" "")
2862 (match_operand:SI 4 "const_int_operand" ""))
2863 (match_operator:SI 9 "logical_binary_operator"
2864 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2865 (match_operand:SI 6 "const_int_operand" ""))
2866 (match_operand:SI 7 "s_register_operand" "")])]))
2867 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2868 "TARGET_32BIT
2869 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2870 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2871 [(set (match_dup 8)
2872 (match_op_dup 1
2873 [(ashift:SI (match_dup 2) (match_dup 4))
2874 (match_dup 5)]))
2875 (set (match_dup 0)
2876 (match_op_dup 1
2877 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2878 (match_dup 7)]))]
2879 "
2880 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2881 ")
2882
2883 (define_split
2884 [(set (match_operand:SI 0 "s_register_operand" "")
2885 (match_operator:SI 1 "logical_binary_operator"
2886 [(match_operator:SI 9 "logical_binary_operator"
2887 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2888 (match_operand:SI 6 "const_int_operand" ""))
2889 (match_operand:SI 7 "s_register_operand" "")])
2890 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2891 (match_operand:SI 3 "const_int_operand" "")
2892 (match_operand:SI 4 "const_int_operand" ""))]))
2893 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2894 "TARGET_32BIT
2895 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2896 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2897 [(set (match_dup 8)
2898 (match_op_dup 1
2899 [(ashift:SI (match_dup 2) (match_dup 4))
2900 (match_dup 5)]))
2901 (set (match_dup 0)
2902 (match_op_dup 1
2903 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2904 (match_dup 7)]))]
2905 "
2906 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2907 ")
2908 \f
2909
2910 ;; Minimum and maximum insns
2911
2912 (define_expand "smaxsi3"
2913 [(parallel [
2914 (set (match_operand:SI 0 "s_register_operand")
2915 (smax:SI (match_operand:SI 1 "s_register_operand")
2916 (match_operand:SI 2 "arm_rhs_operand")))
2917 (clobber (reg:CC CC_REGNUM))])]
2918 "TARGET_32BIT"
2919 "
2920 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2921 {
2922 /* No need for a clobber of the condition code register here. */
2923 emit_insn (gen_rtx_SET (operands[0],
2924 gen_rtx_SMAX (SImode, operands[1],
2925 operands[2])));
2926 DONE;
2927 }
2928 ")
2929
2930 (define_insn "*smax_0"
2931 [(set (match_operand:SI 0 "s_register_operand" "=r")
2932 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2933 (const_int 0)))]
2934 "TARGET_32BIT"
2935 "bic%?\\t%0, %1, %1, asr #31"
2936 [(set_attr "predicable" "yes")
2937 (set_attr "type" "logic_shift_reg")]
2938 )
2939
2940 (define_insn "*smax_m1"
2941 [(set (match_operand:SI 0 "s_register_operand" "=r")
2942 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2943 (const_int -1)))]
2944 "TARGET_32BIT"
2945 "orr%?\\t%0, %1, %1, asr #31"
2946 [(set_attr "predicable" "yes")
2947 (set_attr "type" "logic_shift_reg")]
2948 )
2949
2950 (define_insn_and_split "*arm_smax_insn"
2951 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2952 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2953 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2954 (clobber (reg:CC CC_REGNUM))]
2955 "TARGET_ARM"
2956 "#"
2957 ; cmp\\t%1, %2\;movlt\\t%0, %2
2958 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2959 "TARGET_ARM"
2960 [(set (reg:CC CC_REGNUM)
2961 (compare:CC (match_dup 1) (match_dup 2)))
2962 (set (match_dup 0)
2963 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
2964 (match_dup 1)
2965 (match_dup 2)))]
2966 ""
2967 [(set_attr "conds" "clob")
2968 (set_attr "length" "8,12")
2969 (set_attr "type" "multiple")]
2970 )
2971
2972 (define_expand "sminsi3"
2973 [(parallel [
2974 (set (match_operand:SI 0 "s_register_operand")
2975 (smin:SI (match_operand:SI 1 "s_register_operand")
2976 (match_operand:SI 2 "arm_rhs_operand")))
2977 (clobber (reg:CC CC_REGNUM))])]
2978 "TARGET_32BIT"
2979 "
2980 if (operands[2] == const0_rtx)
2981 {
2982 /* No need for a clobber of the condition code register here. */
2983 emit_insn (gen_rtx_SET (operands[0],
2984 gen_rtx_SMIN (SImode, operands[1],
2985 operands[2])));
2986 DONE;
2987 }
2988 ")
2989
2990 (define_insn "*smin_0"
2991 [(set (match_operand:SI 0 "s_register_operand" "=r")
2992 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2993 (const_int 0)))]
2994 "TARGET_32BIT"
2995 "and%?\\t%0, %1, %1, asr #31"
2996 [(set_attr "predicable" "yes")
2997 (set_attr "type" "logic_shift_reg")]
2998 )
2999
3000 (define_insn_and_split "*arm_smin_insn"
3001 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3002 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3003 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3004 (clobber (reg:CC CC_REGNUM))]
3005 "TARGET_ARM"
3006 "#"
3007 ; cmp\\t%1, %2\;movge\\t%0, %2
3008 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3009 "TARGET_ARM"
3010 [(set (reg:CC CC_REGNUM)
3011 (compare:CC (match_dup 1) (match_dup 2)))
3012 (set (match_dup 0)
3013 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3014 (match_dup 1)
3015 (match_dup 2)))]
3016 ""
3017 [(set_attr "conds" "clob")
3018 (set_attr "length" "8,12")
3019 (set_attr "type" "multiple,multiple")]
3020 )
3021
3022 (define_expand "umaxsi3"
3023 [(parallel [
3024 (set (match_operand:SI 0 "s_register_operand")
3025 (umax:SI (match_operand:SI 1 "s_register_operand")
3026 (match_operand:SI 2 "arm_rhs_operand")))
3027 (clobber (reg:CC CC_REGNUM))])]
3028 "TARGET_32BIT"
3029 ""
3030 )
3031
3032 (define_insn_and_split "*arm_umaxsi3"
3033 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3034 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3035 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3036 (clobber (reg:CC CC_REGNUM))]
3037 "TARGET_ARM"
3038 "#"
3039 ; cmp\\t%1, %2\;movcc\\t%0, %2
3040 ; cmp\\t%1, %2\;movcs\\t%0, %1
3041 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3042 "TARGET_ARM"
3043 [(set (reg:CC CC_REGNUM)
3044 (compare:CC (match_dup 1) (match_dup 2)))
3045 (set (match_dup 0)
3046 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3047 (match_dup 1)
3048 (match_dup 2)))]
3049 ""
3050 [(set_attr "conds" "clob")
3051 (set_attr "length" "8,8,12")
3052 (set_attr "type" "store_4")]
3053 )
3054
3055 (define_expand "uminsi3"
3056 [(parallel [
3057 (set (match_operand:SI 0 "s_register_operand")
3058 (umin:SI (match_operand:SI 1 "s_register_operand")
3059 (match_operand:SI 2 "arm_rhs_operand")))
3060 (clobber (reg:CC CC_REGNUM))])]
3061 "TARGET_32BIT"
3062 ""
3063 )
3064
3065 (define_insn_and_split "*arm_uminsi3"
3066 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3067 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3068 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3069 (clobber (reg:CC CC_REGNUM))]
3070 "TARGET_ARM"
3071 "#"
3072 ; cmp\\t%1, %2\;movcs\\t%0, %2
3073 ; cmp\\t%1, %2\;movcc\\t%0, %1
3074 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3075 "TARGET_ARM"
3076 [(set (reg:CC CC_REGNUM)
3077 (compare:CC (match_dup 1) (match_dup 2)))
3078 (set (match_dup 0)
3079 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3080 (match_dup 1)
3081 (match_dup 2)))]
3082 ""
3083 [(set_attr "conds" "clob")
3084 (set_attr "length" "8,8,12")
3085 (set_attr "type" "store_4")]
3086 )
3087
3088 (define_insn "*store_minmaxsi"
3089 [(set (match_operand:SI 0 "memory_operand" "=m")
3090 (match_operator:SI 3 "minmax_operator"
3091 [(match_operand:SI 1 "s_register_operand" "r")
3092 (match_operand:SI 2 "s_register_operand" "r")]))
3093 (clobber (reg:CC CC_REGNUM))]
3094 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3095 "*
3096 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3097 operands[1], operands[2]);
3098 output_asm_insn (\"cmp\\t%1, %2\", operands);
3099 if (TARGET_THUMB2)
3100 output_asm_insn (\"ite\t%d3\", operands);
3101 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3102 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3103 return \"\";
3104 "
3105 [(set_attr "conds" "clob")
3106 (set (attr "length")
3107 (if_then_else (eq_attr "is_thumb" "yes")
3108 (const_int 14)
3109 (const_int 12)))
3110 (set_attr "type" "store_4")]
3111 )
3112
3113 ; Reject the frame pointer in operand[1], since reloading this after
3114 ; it has been eliminated can cause carnage.
3115 (define_insn "*minmax_arithsi"
3116 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3117 (match_operator:SI 4 "shiftable_operator"
3118 [(match_operator:SI 5 "minmax_operator"
3119 [(match_operand:SI 2 "s_register_operand" "r,r")
3120 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3121 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3122 (clobber (reg:CC CC_REGNUM))]
3123 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3124 "*
3125 {
3126 enum rtx_code code = GET_CODE (operands[4]);
3127 bool need_else;
3128
3129 if (which_alternative != 0 || operands[3] != const0_rtx
3130 || (code != PLUS && code != IOR && code != XOR))
3131 need_else = true;
3132 else
3133 need_else = false;
3134
3135 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3136 operands[2], operands[3]);
3137 output_asm_insn (\"cmp\\t%2, %3\", operands);
3138 if (TARGET_THUMB2)
3139 {
3140 if (need_else)
3141 output_asm_insn (\"ite\\t%d5\", operands);
3142 else
3143 output_asm_insn (\"it\\t%d5\", operands);
3144 }
3145 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3146 if (need_else)
3147 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3148 return \"\";
3149 }"
3150 [(set_attr "conds" "clob")
3151 (set (attr "length")
3152 (if_then_else (eq_attr "is_thumb" "yes")
3153 (const_int 14)
3154 (const_int 12)))
3155 (set_attr "type" "multiple")]
3156 )
3157
3158 ; Reject the frame pointer in operand[1], since reloading this after
3159 ; it has been eliminated can cause carnage.
3160 (define_insn_and_split "*minmax_arithsi_non_canon"
3161 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3162 (minus:SI
3163 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3164 (match_operator:SI 4 "minmax_operator"
3165 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3166 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3167 (clobber (reg:CC CC_REGNUM))]
3168 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3169 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3170 "#"
3171 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3172 [(set (reg:CC CC_REGNUM)
3173 (compare:CC (match_dup 2) (match_dup 3)))
3174
3175 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3176 (set (match_dup 0)
3177 (minus:SI (match_dup 1)
3178 (match_dup 2))))
3179 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3180 (set (match_dup 0)
3181 (match_dup 6)))]
3182 {
3183 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3184 operands[2], operands[3]);
3185 enum rtx_code rc = minmax_code (operands[4]);
3186 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3187 operands[2], operands[3]);
3188
3189 if (mode == CCFPmode || mode == CCFPEmode)
3190 rc = reverse_condition_maybe_unordered (rc);
3191 else
3192 rc = reverse_condition (rc);
3193 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3194 if (CONST_INT_P (operands[3]))
3195 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3196 else
3197 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3198 }
3199 [(set_attr "conds" "clob")
3200 (set (attr "length")
3201 (if_then_else (eq_attr "is_thumb" "yes")
3202 (const_int 14)
3203 (const_int 12)))
3204 (set_attr "type" "multiple")]
3205 )
3206
3207 (define_code_iterator SAT [smin smax])
3208 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3209 (define_code_attr SATlo [(smin "1") (smax "2")])
3210 (define_code_attr SAThi [(smin "2") (smax "1")])
3211
3212 (define_insn "*satsi_<SAT:code>"
3213 [(set (match_operand:SI 0 "s_register_operand" "=r")
3214 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3215 (match_operand:SI 1 "const_int_operand" "i"))
3216 (match_operand:SI 2 "const_int_operand" "i")))]
3217 "TARGET_32BIT && arm_arch6
3218 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3219 {
3220 int mask;
3221 bool signed_sat;
3222 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3223 &mask, &signed_sat))
3224 gcc_unreachable ();
3225
3226 operands[1] = GEN_INT (mask);
3227 if (signed_sat)
3228 return "ssat%?\t%0, %1, %3";
3229 else
3230 return "usat%?\t%0, %1, %3";
3231 }
3232 [(set_attr "predicable" "yes")
3233 (set_attr "type" "alus_imm")]
3234 )
3235
3236 (define_insn "*satsi_<SAT:code>_shift"
3237 [(set (match_operand:SI 0 "s_register_operand" "=r")
3238 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3239 [(match_operand:SI 4 "s_register_operand" "r")
3240 (match_operand:SI 5 "const_int_operand" "i")])
3241 (match_operand:SI 1 "const_int_operand" "i"))
3242 (match_operand:SI 2 "const_int_operand" "i")))]
3243 "TARGET_32BIT && arm_arch6
3244 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3245 {
3246 int mask;
3247 bool signed_sat;
3248 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3249 &mask, &signed_sat))
3250 gcc_unreachable ();
3251
3252 operands[1] = GEN_INT (mask);
3253 if (signed_sat)
3254 return "ssat%?\t%0, %1, %4%S3";
3255 else
3256 return "usat%?\t%0, %1, %4%S3";
3257 }
3258 [(set_attr "predicable" "yes")
3259 (set_attr "shift" "3")
3260 (set_attr "type" "logic_shift_reg")])
3261 \f
3262 ;; Shift and rotation insns
3263
3264 (define_expand "ashldi3"
3265 [(set (match_operand:DI 0 "s_register_operand")
3266 (ashift:DI (match_operand:DI 1 "s_register_operand")
3267 (match_operand:SI 2 "reg_or_int_operand")))]
3268 "TARGET_32BIT"
3269 "
3270 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3271 operands[2], gen_reg_rtx (SImode),
3272 gen_reg_rtx (SImode));
3273 DONE;
3274 ")
3275
3276 (define_expand "ashlsi3"
3277 [(set (match_operand:SI 0 "s_register_operand")
3278 (ashift:SI (match_operand:SI 1 "s_register_operand")
3279 (match_operand:SI 2 "arm_rhs_operand")))]
3280 "TARGET_EITHER"
3281 "
3282 if (CONST_INT_P (operands[2])
3283 && (UINTVAL (operands[2])) > 31)
3284 {
3285 emit_insn (gen_movsi (operands[0], const0_rtx));
3286 DONE;
3287 }
3288 "
3289 )
3290
3291 (define_expand "ashrdi3"
3292 [(set (match_operand:DI 0 "s_register_operand")
3293 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3294 (match_operand:SI 2 "reg_or_int_operand")))]
3295 "TARGET_32BIT"
3296 "
3297 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3298 operands[2], gen_reg_rtx (SImode),
3299 gen_reg_rtx (SImode));
3300 DONE;
3301 ")
3302
3303 (define_expand "ashrsi3"
3304 [(set (match_operand:SI 0 "s_register_operand")
3305 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3306 (match_operand:SI 2 "arm_rhs_operand")))]
3307 "TARGET_EITHER"
3308 "
3309 if (CONST_INT_P (operands[2])
3310 && UINTVAL (operands[2]) > 31)
3311 operands[2] = GEN_INT (31);
3312 "
3313 )
3314
3315 (define_expand "lshrdi3"
3316 [(set (match_operand:DI 0 "s_register_operand")
3317 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3318 (match_operand:SI 2 "reg_or_int_operand")))]
3319 "TARGET_32BIT"
3320 "
3321 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3322 operands[2], gen_reg_rtx (SImode),
3323 gen_reg_rtx (SImode));
3324 DONE;
3325 ")
3326
3327 (define_expand "lshrsi3"
3328 [(set (match_operand:SI 0 "s_register_operand")
3329 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3330 (match_operand:SI 2 "arm_rhs_operand")))]
3331 "TARGET_EITHER"
3332 "
3333 if (CONST_INT_P (operands[2])
3334 && (UINTVAL (operands[2])) > 31)
3335 {
3336 emit_insn (gen_movsi (operands[0], const0_rtx));
3337 DONE;
3338 }
3339 "
3340 )
3341
3342 (define_expand "rotlsi3"
3343 [(set (match_operand:SI 0 "s_register_operand")
3344 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3345 (match_operand:SI 2 "reg_or_int_operand")))]
3346 "TARGET_32BIT"
3347 "
3348 if (CONST_INT_P (operands[2]))
3349 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3350 else
3351 {
3352 rtx reg = gen_reg_rtx (SImode);
3353 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3354 operands[2] = reg;
3355 }
3356 "
3357 )
3358
3359 (define_expand "rotrsi3"
3360 [(set (match_operand:SI 0 "s_register_operand")
3361 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3362 (match_operand:SI 2 "arm_rhs_operand")))]
3363 "TARGET_EITHER"
3364 "
3365 if (TARGET_32BIT)
3366 {
3367 if (CONST_INT_P (operands[2])
3368 && UINTVAL (operands[2]) > 31)
3369 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3370 }
3371 else /* TARGET_THUMB1 */
3372 {
3373 if (CONST_INT_P (operands [2]))
3374 operands [2] = force_reg (SImode, operands[2]);
3375 }
3376 "
3377 )
3378
3379 (define_insn "*arm_shiftsi3"
3380 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3381 (match_operator:SI 3 "shift_operator"
3382 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3383 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3384 "TARGET_32BIT"
3385 "* return arm_output_shift(operands, 0);"
3386 [(set_attr "predicable" "yes")
3387 (set_attr "arch" "t2,t2,*,*")
3388 (set_attr "predicable_short_it" "yes,yes,no,no")
3389 (set_attr "length" "4")
3390 (set_attr "shift" "1")
3391 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3392 )
3393
3394 (define_insn "*shiftsi3_compare0"
3395 [(set (reg:CC_NOOV CC_REGNUM)
3396 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3397 [(match_operand:SI 1 "s_register_operand" "r,r")
3398 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3399 (const_int 0)))
3400 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3401 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3402 "TARGET_32BIT"
3403 "* return arm_output_shift(operands, 1);"
3404 [(set_attr "conds" "set")
3405 (set_attr "shift" "1")
3406 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3407 )
3408
3409 (define_insn "*shiftsi3_compare0_scratch"
3410 [(set (reg:CC_NOOV CC_REGNUM)
3411 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3412 [(match_operand:SI 1 "s_register_operand" "r,r")
3413 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3414 (const_int 0)))
3415 (clobber (match_scratch:SI 0 "=r,r"))]
3416 "TARGET_32BIT"
3417 "* return arm_output_shift(operands, 1);"
3418 [(set_attr "conds" "set")
3419 (set_attr "shift" "1")
3420 (set_attr "type" "shift_imm,shift_reg")]
3421 )
3422
3423 (define_insn "*not_shiftsi"
3424 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3425 (not:SI (match_operator:SI 3 "shift_operator"
3426 [(match_operand:SI 1 "s_register_operand" "r,r")
3427 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3428 "TARGET_32BIT"
3429 "mvn%?\\t%0, %1%S3"
3430 [(set_attr "predicable" "yes")
3431 (set_attr "shift" "1")
3432 (set_attr "arch" "32,a")
3433 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3434
3435 (define_insn "*not_shiftsi_compare0"
3436 [(set (reg:CC_NOOV CC_REGNUM)
3437 (compare:CC_NOOV
3438 (not:SI (match_operator:SI 3 "shift_operator"
3439 [(match_operand:SI 1 "s_register_operand" "r,r")
3440 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3441 (const_int 0)))
3442 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3443 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3444 "TARGET_32BIT"
3445 "mvns%?\\t%0, %1%S3"
3446 [(set_attr "conds" "set")
3447 (set_attr "shift" "1")
3448 (set_attr "arch" "32,a")
3449 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3450
3451 (define_insn "*not_shiftsi_compare0_scratch"
3452 [(set (reg:CC_NOOV CC_REGNUM)
3453 (compare:CC_NOOV
3454 (not:SI (match_operator:SI 3 "shift_operator"
3455 [(match_operand:SI 1 "s_register_operand" "r,r")
3456 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3457 (const_int 0)))
3458 (clobber (match_scratch:SI 0 "=r,r"))]
3459 "TARGET_32BIT"
3460 "mvns%?\\t%0, %1%S3"
3461 [(set_attr "conds" "set")
3462 (set_attr "shift" "1")
3463 (set_attr "arch" "32,a")
3464 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3465
3466 ;; We don't really have extzv, but defining this using shifts helps
3467 ;; to reduce register pressure later on.
3468
3469 (define_expand "extzv"
3470 [(set (match_operand 0 "s_register_operand")
3471 (zero_extract (match_operand 1 "nonimmediate_operand")
3472 (match_operand 2 "const_int_operand")
3473 (match_operand 3 "const_int_operand")))]
3474 "TARGET_THUMB1 || arm_arch_thumb2"
3475 "
3476 {
3477 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3478 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3479
3480 if (arm_arch_thumb2)
3481 {
3482 HOST_WIDE_INT width = INTVAL (operands[2]);
3483 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3484
3485 if (unaligned_access && MEM_P (operands[1])
3486 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3487 {
3488 rtx base_addr;
3489
3490 if (BYTES_BIG_ENDIAN)
3491 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3492 - bitpos;
3493
3494 if (width == 32)
3495 {
3496 base_addr = adjust_address (operands[1], SImode,
3497 bitpos / BITS_PER_UNIT);
3498 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3499 }
3500 else
3501 {
3502 rtx dest = operands[0];
3503 rtx tmp = gen_reg_rtx (SImode);
3504
3505 /* We may get a paradoxical subreg here. Strip it off. */
3506 if (GET_CODE (dest) == SUBREG
3507 && GET_MODE (dest) == SImode
3508 && GET_MODE (SUBREG_REG (dest)) == HImode)
3509 dest = SUBREG_REG (dest);
3510
3511 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3512 FAIL;
3513
3514 base_addr = adjust_address (operands[1], HImode,
3515 bitpos / BITS_PER_UNIT);
3516 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3517 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3518 }
3519 DONE;
3520 }
3521 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3522 {
3523 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3524 operands[3]));
3525 DONE;
3526 }
3527 else
3528 FAIL;
3529 }
3530
3531 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3532 FAIL;
3533
3534 operands[3] = GEN_INT (rshift);
3535
3536 if (lshift == 0)
3537 {
3538 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3539 DONE;
3540 }
3541
3542 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3543 operands[3], gen_reg_rtx (SImode)));
3544 DONE;
3545 }"
3546 )
3547
3548 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3549
3550 (define_expand "extzv_t1"
3551 [(set (match_operand:SI 4 "s_register_operand")
3552 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3553 (match_operand:SI 2 "const_int_operand")))
3554 (set (match_operand:SI 0 "s_register_operand")
3555 (lshiftrt:SI (match_dup 4)
3556 (match_operand:SI 3 "const_int_operand")))]
3557 "TARGET_THUMB1"
3558 "")
3559
3560 (define_expand "extv"
3561 [(set (match_operand 0 "s_register_operand")
3562 (sign_extract (match_operand 1 "nonimmediate_operand")
3563 (match_operand 2 "const_int_operand")
3564 (match_operand 3 "const_int_operand")))]
3565 "arm_arch_thumb2"
3566 {
3567 HOST_WIDE_INT width = INTVAL (operands[2]);
3568 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3569
3570 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3571 && (bitpos % BITS_PER_UNIT) == 0)
3572 {
3573 rtx base_addr;
3574
3575 if (BYTES_BIG_ENDIAN)
3576 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3577
3578 if (width == 32)
3579 {
3580 base_addr = adjust_address (operands[1], SImode,
3581 bitpos / BITS_PER_UNIT);
3582 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3583 }
3584 else
3585 {
3586 rtx dest = operands[0];
3587 rtx tmp = gen_reg_rtx (SImode);
3588
3589 /* We may get a paradoxical subreg here. Strip it off. */
3590 if (GET_CODE (dest) == SUBREG
3591 && GET_MODE (dest) == SImode
3592 && GET_MODE (SUBREG_REG (dest)) == HImode)
3593 dest = SUBREG_REG (dest);
3594
3595 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3596 FAIL;
3597
3598 base_addr = adjust_address (operands[1], HImode,
3599 bitpos / BITS_PER_UNIT);
3600 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3601 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3602 }
3603
3604 DONE;
3605 }
3606 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3607 FAIL;
3608 else if (GET_MODE (operands[0]) == SImode
3609 && GET_MODE (operands[1]) == SImode)
3610 {
3611 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3612 operands[3]));
3613 DONE;
3614 }
3615
3616 FAIL;
3617 })
3618
3619 ; Helper to expand register forms of extv with the proper modes.
3620
3621 (define_expand "extv_regsi"
3622 [(set (match_operand:SI 0 "s_register_operand")
3623 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3624 (match_operand 2 "const_int_operand")
3625 (match_operand 3 "const_int_operand")))]
3626 ""
3627 {
3628 })
3629
3630 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3631
3632 (define_insn "unaligned_loaddi"
3633 [(set (match_operand:DI 0 "s_register_operand" "=r")
3634 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3635 UNSPEC_UNALIGNED_LOAD))]
3636 "TARGET_32BIT && TARGET_LDRD"
3637 "*
3638 return output_move_double (operands, true, NULL);
3639 "
3640 [(set_attr "length" "8")
3641 (set_attr "type" "load_8")])
3642
3643 (define_insn "unaligned_loadsi"
3644 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3645 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3646 UNSPEC_UNALIGNED_LOAD))]
3647 "unaligned_access"
3648 "@
3649 ldr\t%0, %1\t@ unaligned
3650 ldr%?\t%0, %1\t@ unaligned
3651 ldr%?\t%0, %1\t@ unaligned"
3652 [(set_attr "arch" "t1,t2,32")
3653 (set_attr "length" "2,2,4")
3654 (set_attr "predicable" "no,yes,yes")
3655 (set_attr "predicable_short_it" "no,yes,no")
3656 (set_attr "type" "load_4")])
3657
3658 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3659 ;; address (there's no immediate format). That's tricky to support
3660 ;; here and we don't really need this pattern for that case, so only
3661 ;; enable for 32-bit ISAs.
3662 (define_insn "unaligned_loadhis"
3663 [(set (match_operand:SI 0 "s_register_operand" "=r")
3664 (sign_extend:SI
3665 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3666 UNSPEC_UNALIGNED_LOAD)))]
3667 "unaligned_access && TARGET_32BIT"
3668 "ldrsh%?\t%0, %1\t@ unaligned"
3669 [(set_attr "predicable" "yes")
3670 (set_attr "type" "load_byte")])
3671
3672 (define_insn "unaligned_loadhiu"
3673 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3674 (zero_extend:SI
3675 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3676 UNSPEC_UNALIGNED_LOAD)))]
3677 "unaligned_access"
3678 "@
3679 ldrh\t%0, %1\t@ unaligned
3680 ldrh%?\t%0, %1\t@ unaligned
3681 ldrh%?\t%0, %1\t@ unaligned"
3682 [(set_attr "arch" "t1,t2,32")
3683 (set_attr "length" "2,2,4")
3684 (set_attr "predicable" "no,yes,yes")
3685 (set_attr "predicable_short_it" "no,yes,no")
3686 (set_attr "type" "load_byte")])
3687
3688 (define_insn "unaligned_storedi"
3689 [(set (match_operand:DI 0 "memory_operand" "=m")
3690 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3691 UNSPEC_UNALIGNED_STORE))]
3692 "TARGET_32BIT && TARGET_LDRD"
3693 "*
3694 return output_move_double (operands, true, NULL);
3695 "
3696 [(set_attr "length" "8")
3697 (set_attr "type" "store_8")])
3698
3699 (define_insn "unaligned_storesi"
3700 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3701 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3702 UNSPEC_UNALIGNED_STORE))]
3703 "unaligned_access"
3704 "@
3705 str\t%1, %0\t@ unaligned
3706 str%?\t%1, %0\t@ unaligned
3707 str%?\t%1, %0\t@ unaligned"
3708 [(set_attr "arch" "t1,t2,32")
3709 (set_attr "length" "2,2,4")
3710 (set_attr "predicable" "no,yes,yes")
3711 (set_attr "predicable_short_it" "no,yes,no")
3712 (set_attr "type" "store_4")])
3713
3714 (define_insn "unaligned_storehi"
3715 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3716 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3717 UNSPEC_UNALIGNED_STORE))]
3718 "unaligned_access"
3719 "@
3720 strh\t%1, %0\t@ unaligned
3721 strh%?\t%1, %0\t@ unaligned
3722 strh%?\t%1, %0\t@ unaligned"
3723 [(set_attr "arch" "t1,t2,32")
3724 (set_attr "length" "2,2,4")
3725 (set_attr "predicable" "no,yes,yes")
3726 (set_attr "predicable_short_it" "no,yes,no")
3727 (set_attr "type" "store_4")])
3728
3729
3730 (define_insn "*extv_reg"
3731 [(set (match_operand:SI 0 "s_register_operand" "=r")
3732 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3733 (match_operand:SI 2 "const_int_operand" "n")
3734 (match_operand:SI 3 "const_int_operand" "n")))]
3735 "arm_arch_thumb2
3736 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3737 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3738 "sbfx%?\t%0, %1, %3, %2"
3739 [(set_attr "length" "4")
3740 (set_attr "predicable" "yes")
3741 (set_attr "type" "bfm")]
3742 )
3743
3744 (define_insn "extzv_t2"
3745 [(set (match_operand:SI 0 "s_register_operand" "=r")
3746 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3747 (match_operand:SI 2 "const_int_operand" "n")
3748 (match_operand:SI 3 "const_int_operand" "n")))]
3749 "arm_arch_thumb2
3750 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3751 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3752 "ubfx%?\t%0, %1, %3, %2"
3753 [(set_attr "length" "4")
3754 (set_attr "predicable" "yes")
3755 (set_attr "type" "bfm")]
3756 )
3757
3758
3759 ;; Division instructions
3760 (define_insn "divsi3"
3761 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3762 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3763 (match_operand:SI 2 "s_register_operand" "r,r")))]
3764 "TARGET_IDIV"
3765 "@
3766 sdiv%?\t%0, %1, %2
3767 sdiv\t%0, %1, %2"
3768 [(set_attr "arch" "32,v8mb")
3769 (set_attr "predicable" "yes")
3770 (set_attr "type" "sdiv")]
3771 )
3772
3773 (define_insn "udivsi3"
3774 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3775 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3776 (match_operand:SI 2 "s_register_operand" "r,r")))]
3777 "TARGET_IDIV"
3778 "@
3779 udiv%?\t%0, %1, %2
3780 udiv\t%0, %1, %2"
3781 [(set_attr "arch" "32,v8mb")
3782 (set_attr "predicable" "yes")
3783 (set_attr "type" "udiv")]
3784 )
3785
3786 \f
3787 ;; Unary arithmetic insns
3788
3789 (define_expand "negvsi3"
3790 [(match_operand:SI 0 "register_operand")
3791 (match_operand:SI 1 "register_operand")
3792 (match_operand 2 "")]
3793 "TARGET_32BIT"
3794 {
3795 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3796 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3797
3798 DONE;
3799 })
3800
3801 (define_expand "negvdi3"
3802 [(match_operand:DI 0 "s_register_operand")
3803 (match_operand:DI 1 "s_register_operand")
3804 (match_operand 2 "")]
3805 "TARGET_ARM"
3806 {
3807 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3808 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3809
3810 DONE;
3811 })
3812
3813
3814 (define_insn "negdi2_compare"
3815 [(set (reg:CC CC_REGNUM)
3816 (compare:CC
3817 (const_int 0)
3818 (match_operand:DI 1 "register_operand" "r,r")))
3819 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3820 (minus:DI (const_int 0) (match_dup 1)))]
3821 "TARGET_ARM"
3822 "@
3823 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3824 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3825 [(set_attr "conds" "set")
3826 (set_attr "arch" "a,t2")
3827 (set_attr "length" "8")
3828 (set_attr "type" "multiple")]
3829 )
3830
3831 (define_expand "negsi2"
3832 [(set (match_operand:SI 0 "s_register_operand")
3833 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3834 "TARGET_EITHER"
3835 ""
3836 )
3837
3838 (define_insn "*arm_negsi2"
3839 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3840 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3841 "TARGET_32BIT"
3842 "rsb%?\\t%0, %1, #0"
3843 [(set_attr "predicable" "yes")
3844 (set_attr "predicable_short_it" "yes,no")
3845 (set_attr "arch" "t2,*")
3846 (set_attr "length" "4")
3847 (set_attr "type" "alu_imm")]
3848 )
3849
3850 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3851 ;; rather than (0 cmp reg). This gives the same results for unsigned
3852 ;; and equality compares which is what we mostly need here.
3853 (define_insn "negsi2_0compare"
3854 [(set (reg:CC_RSB CC_REGNUM)
3855 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3856 (const_int -1)))
3857 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3858 (neg:SI (match_dup 1)))]
3859 "TARGET_32BIT"
3860 "@
3861 negs\\t%0, %1
3862 rsbs\\t%0, %1, #0"
3863 [(set_attr "conds" "set")
3864 (set_attr "arch" "t2,*")
3865 (set_attr "length" "2,*")
3866 (set_attr "type" "alus_imm")]
3867 )
3868
3869 (define_insn "negsi2_carryin"
3870 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3871 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3872 (match_operand:SI 2 "arm_borrow_operation" "")))]
3873 "TARGET_32BIT"
3874 "@
3875 rsc\\t%0, %1, #0
3876 sbc\\t%0, %1, %1, lsl #1"
3877 [(set_attr "conds" "use")
3878 (set_attr "arch" "a,t2")
3879 (set_attr "type" "adc_imm,adc_reg")]
3880 )
3881
3882 (define_expand "negsf2"
3883 [(set (match_operand:SF 0 "s_register_operand")
3884 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3885 "TARGET_32BIT && TARGET_HARD_FLOAT"
3886 ""
3887 )
3888
3889 (define_expand "negdf2"
3890 [(set (match_operand:DF 0 "s_register_operand")
3891 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3892 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3893 "")
3894
3895 ;; abssi2 doesn't really clobber the condition codes if a different register
3896 ;; is being set. To keep things simple, assume during rtl manipulations that
3897 ;; it does, but tell the final scan operator the truth. Similarly for
3898 ;; (neg (abs...))
3899
3900 (define_expand "abssi2"
3901 [(parallel
3902 [(set (match_operand:SI 0 "s_register_operand")
3903 (abs:SI (match_operand:SI 1 "s_register_operand")))
3904 (clobber (match_dup 2))])]
3905 "TARGET_EITHER"
3906 "
3907 if (TARGET_THUMB1)
3908 operands[2] = gen_rtx_SCRATCH (SImode);
3909 else
3910 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3911 ")
3912
3913 (define_insn_and_split "*arm_abssi2"
3914 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3915 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3916 (clobber (reg:CC CC_REGNUM))]
3917 "TARGET_ARM"
3918 "#"
3919 "&& reload_completed"
3920 [(const_int 0)]
3921 {
3922 /* if (which_alternative == 0) */
3923 if (REGNO(operands[0]) == REGNO(operands[1]))
3924 {
3925 /* Emit the pattern:
3926 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3927 [(set (reg:CC CC_REGNUM)
3928 (compare:CC (match_dup 0) (const_int 0)))
3929 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
3930 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
3931 */
3932 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3933 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3934 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3935 (gen_rtx_LT (SImode,
3936 gen_rtx_REG (CCmode, CC_REGNUM),
3937 const0_rtx)),
3938 (gen_rtx_SET (operands[0],
3939 (gen_rtx_MINUS (SImode,
3940 const0_rtx,
3941 operands[1]))))));
3942 DONE;
3943 }
3944 else
3945 {
3946 /* Emit the pattern:
3947 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
3948 [(set (match_dup 0)
3949 (xor:SI (match_dup 1)
3950 (ashiftrt:SI (match_dup 1) (const_int 31))))
3951 (set (match_dup 0)
3952 (minus:SI (match_dup 0)
3953 (ashiftrt:SI (match_dup 1) (const_int 31))))]
3954 */
3955 emit_insn (gen_rtx_SET (operands[0],
3956 gen_rtx_XOR (SImode,
3957 gen_rtx_ASHIFTRT (SImode,
3958 operands[1],
3959 GEN_INT (31)),
3960 operands[1])));
3961 emit_insn (gen_rtx_SET (operands[0],
3962 gen_rtx_MINUS (SImode,
3963 operands[0],
3964 gen_rtx_ASHIFTRT (SImode,
3965 operands[1],
3966 GEN_INT (31)))));
3967 DONE;
3968 }
3969 }
3970 [(set_attr "conds" "clob,*")
3971 (set_attr "shift" "1")
3972 (set_attr "predicable" "no, yes")
3973 (set_attr "length" "8")
3974 (set_attr "type" "multiple")]
3975 )
3976
3977 (define_insn_and_split "*arm_neg_abssi2"
3978 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3979 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3980 (clobber (reg:CC CC_REGNUM))]
3981 "TARGET_ARM"
3982 "#"
3983 "&& reload_completed"
3984 [(const_int 0)]
3985 {
3986 /* if (which_alternative == 0) */
3987 if (REGNO (operands[0]) == REGNO (operands[1]))
3988 {
3989 /* Emit the pattern:
3990 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3991 */
3992 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3993 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3994 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3995 gen_rtx_GT (SImode,
3996 gen_rtx_REG (CCmode, CC_REGNUM),
3997 const0_rtx),
3998 gen_rtx_SET (operands[0],
3999 (gen_rtx_MINUS (SImode,
4000 const0_rtx,
4001 operands[1])))));
4002 }
4003 else
4004 {
4005 /* Emit the pattern:
4006 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4007 */
4008 emit_insn (gen_rtx_SET (operands[0],
4009 gen_rtx_XOR (SImode,
4010 gen_rtx_ASHIFTRT (SImode,
4011 operands[1],
4012 GEN_INT (31)),
4013 operands[1])));
4014 emit_insn (gen_rtx_SET (operands[0],
4015 gen_rtx_MINUS (SImode,
4016 gen_rtx_ASHIFTRT (SImode,
4017 operands[1],
4018 GEN_INT (31)),
4019 operands[0])));
4020 }
4021 DONE;
4022 }
4023 [(set_attr "conds" "clob,*")
4024 (set_attr "shift" "1")
4025 (set_attr "predicable" "no, yes")
4026 (set_attr "length" "8")
4027 (set_attr "type" "multiple")]
4028 )
4029
4030 (define_expand "abssf2"
4031 [(set (match_operand:SF 0 "s_register_operand")
4032 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4033 "TARGET_32BIT && TARGET_HARD_FLOAT"
4034 "")
4035
4036 (define_expand "absdf2"
4037 [(set (match_operand:DF 0 "s_register_operand")
4038 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4039 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4040 "")
4041
4042 (define_expand "sqrtsf2"
4043 [(set (match_operand:SF 0 "s_register_operand")
4044 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4045 "TARGET_32BIT && TARGET_HARD_FLOAT"
4046 "")
4047
4048 (define_expand "sqrtdf2"
4049 [(set (match_operand:DF 0 "s_register_operand")
4050 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4051 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4052 "")
4053
4054 (define_expand "one_cmplsi2"
4055 [(set (match_operand:SI 0 "s_register_operand")
4056 (not:SI (match_operand:SI 1 "s_register_operand")))]
4057 "TARGET_EITHER"
4058 ""
4059 )
4060
4061 (define_insn "*arm_one_cmplsi2"
4062 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4063 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4064 "TARGET_32BIT"
4065 "mvn%?\\t%0, %1"
4066 [(set_attr "predicable" "yes")
4067 (set_attr "predicable_short_it" "yes,no")
4068 (set_attr "arch" "t2,*")
4069 (set_attr "length" "4")
4070 (set_attr "type" "mvn_reg")]
4071 )
4072
4073 (define_insn "*notsi_compare0"
4074 [(set (reg:CC_NOOV CC_REGNUM)
4075 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4076 (const_int 0)))
4077 (set (match_operand:SI 0 "s_register_operand" "=r")
4078 (not:SI (match_dup 1)))]
4079 "TARGET_32BIT"
4080 "mvns%?\\t%0, %1"
4081 [(set_attr "conds" "set")
4082 (set_attr "type" "mvn_reg")]
4083 )
4084
4085 (define_insn "*notsi_compare0_scratch"
4086 [(set (reg:CC_NOOV CC_REGNUM)
4087 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4088 (const_int 0)))
4089 (clobber (match_scratch:SI 0 "=r"))]
4090 "TARGET_32BIT"
4091 "mvns%?\\t%0, %1"
4092 [(set_attr "conds" "set")
4093 (set_attr "type" "mvn_reg")]
4094 )
4095 \f
4096 ;; Fixed <--> Floating conversion insns
4097
4098 (define_expand "floatsihf2"
4099 [(set (match_operand:HF 0 "general_operand")
4100 (float:HF (match_operand:SI 1 "general_operand")))]
4101 "TARGET_EITHER"
4102 "
4103 {
4104 rtx op1 = gen_reg_rtx (SFmode);
4105 expand_float (op1, operands[1], 0);
4106 op1 = convert_to_mode (HFmode, op1, 0);
4107 emit_move_insn (operands[0], op1);
4108 DONE;
4109 }"
4110 )
4111
4112 (define_expand "floatdihf2"
4113 [(set (match_operand:HF 0 "general_operand")
4114 (float:HF (match_operand:DI 1 "general_operand")))]
4115 "TARGET_EITHER"
4116 "
4117 {
4118 rtx op1 = gen_reg_rtx (SFmode);
4119 expand_float (op1, operands[1], 0);
4120 op1 = convert_to_mode (HFmode, op1, 0);
4121 emit_move_insn (operands[0], op1);
4122 DONE;
4123 }"
4124 )
4125
4126 (define_expand "floatsisf2"
4127 [(set (match_operand:SF 0 "s_register_operand")
4128 (float:SF (match_operand:SI 1 "s_register_operand")))]
4129 "TARGET_32BIT && TARGET_HARD_FLOAT"
4130 "
4131 ")
4132
4133 (define_expand "floatsidf2"
4134 [(set (match_operand:DF 0 "s_register_operand")
4135 (float:DF (match_operand:SI 1 "s_register_operand")))]
4136 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4137 "
4138 ")
4139
4140 (define_expand "fix_trunchfsi2"
4141 [(set (match_operand:SI 0 "general_operand")
4142 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4143 "TARGET_EITHER"
4144 "
4145 {
4146 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4147 expand_fix (operands[0], op1, 0);
4148 DONE;
4149 }"
4150 )
4151
4152 (define_expand "fix_trunchfdi2"
4153 [(set (match_operand:DI 0 "general_operand")
4154 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4155 "TARGET_EITHER"
4156 "
4157 {
4158 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4159 expand_fix (operands[0], op1, 0);
4160 DONE;
4161 }"
4162 )
4163
4164 (define_expand "fix_truncsfsi2"
4165 [(set (match_operand:SI 0 "s_register_operand")
4166 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4167 "TARGET_32BIT && TARGET_HARD_FLOAT"
4168 "
4169 ")
4170
4171 (define_expand "fix_truncdfsi2"
4172 [(set (match_operand:SI 0 "s_register_operand")
4173 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4174 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4175 "
4176 ")
4177
4178 ;; Truncation insns
4179
4180 (define_expand "truncdfsf2"
4181 [(set (match_operand:SF 0 "s_register_operand")
4182 (float_truncate:SF
4183 (match_operand:DF 1 "s_register_operand")))]
4184 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4185 ""
4186 )
4187
4188 ;; DFmode to HFmode conversions on targets without a single-step hardware
4189 ;; instruction for it would have to go through SFmode. This is dangerous
4190 ;; as it introduces double rounding.
4191 ;;
4192 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4193 ;; a single-step instruction.
4194
4195 (define_expand "truncdfhf2"
4196 [(set (match_operand:HF 0 "s_register_operand")
4197 (float_truncate:HF
4198 (match_operand:DF 1 "s_register_operand")))]
4199 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4200 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4201 {
4202 /* We don't have a direct instruction for this, so we must be in
4203 an unsafe math mode, and going via SFmode. */
4204
4205 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4206 {
4207 rtx op1;
4208 op1 = convert_to_mode (SFmode, operands[1], 0);
4209 op1 = convert_to_mode (HFmode, op1, 0);
4210 emit_move_insn (operands[0], op1);
4211 DONE;
4212 }
4213 /* Otherwise, we will pick this up as a single instruction with
4214 no intermediary rounding. */
4215 }
4216 )
4217 \f
4218 ;; Zero and sign extension instructions.
4219
4220 (define_expand "zero_extend<mode>di2"
4221 [(set (match_operand:DI 0 "s_register_operand" "")
4222 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4223 "TARGET_32BIT <qhs_zextenddi_cond>"
4224 {
4225 rtx res_lo, res_hi, op0_lo, op0_hi;
4226 res_lo = gen_lowpart (SImode, operands[0]);
4227 res_hi = gen_highpart (SImode, operands[0]);
4228 if (can_create_pseudo_p ())
4229 {
4230 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4231 op0_hi = gen_reg_rtx (SImode);
4232 }
4233 else
4234 {
4235 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4236 op0_hi = res_hi;
4237 }
4238 if (<MODE>mode != SImode)
4239 emit_insn (gen_rtx_SET (op0_lo,
4240 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4241 emit_insn (gen_movsi (op0_hi, const0_rtx));
4242 if (res_lo != op0_lo)
4243 emit_move_insn (res_lo, op0_lo);
4244 if (res_hi != op0_hi)
4245 emit_move_insn (res_hi, op0_hi);
4246 DONE;
4247 }
4248 )
4249
4250 (define_expand "extend<mode>di2"
4251 [(set (match_operand:DI 0 "s_register_operand" "")
4252 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4253 "TARGET_32BIT <qhs_sextenddi_cond>"
4254 {
4255 rtx res_lo, res_hi, op0_lo, op0_hi;
4256 res_lo = gen_lowpart (SImode, operands[0]);
4257 res_hi = gen_highpart (SImode, operands[0]);
4258 if (can_create_pseudo_p ())
4259 {
4260 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4261 op0_hi = gen_reg_rtx (SImode);
4262 }
4263 else
4264 {
4265 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4266 op0_hi = res_hi;
4267 }
4268 if (<MODE>mode != SImode)
4269 emit_insn (gen_rtx_SET (op0_lo,
4270 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4271 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4272 if (res_lo != op0_lo)
4273 emit_move_insn (res_lo, op0_lo);
4274 if (res_hi != op0_hi)
4275 emit_move_insn (res_hi, op0_hi);
4276 DONE;
4277 }
4278 )
4279
4280 ;; Splits for all extensions to DImode
4281 (define_split
4282 [(set (match_operand:DI 0 "s_register_operand" "")
4283 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4284 "TARGET_32BIT"
4285 [(set (match_dup 0) (match_dup 1))]
4286 {
4287 rtx lo_part = gen_lowpart (SImode, operands[0]);
4288 machine_mode src_mode = GET_MODE (operands[1]);
4289
4290 if (src_mode == SImode)
4291 emit_move_insn (lo_part, operands[1]);
4292 else
4293 emit_insn (gen_rtx_SET (lo_part,
4294 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4295 operands[0] = gen_highpart (SImode, operands[0]);
4296 operands[1] = const0_rtx;
4297 })
4298
4299 (define_split
4300 [(set (match_operand:DI 0 "s_register_operand" "")
4301 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4302 "TARGET_32BIT"
4303 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4304 {
4305 rtx lo_part = gen_lowpart (SImode, operands[0]);
4306 machine_mode src_mode = GET_MODE (operands[1]);
4307
4308 if (src_mode == SImode)
4309 emit_move_insn (lo_part, operands[1]);
4310 else
4311 emit_insn (gen_rtx_SET (lo_part,
4312 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4313 operands[1] = lo_part;
4314 operands[0] = gen_highpart (SImode, operands[0]);
4315 })
4316
4317 (define_expand "zero_extendhisi2"
4318 [(set (match_operand:SI 0 "s_register_operand")
4319 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4320 "TARGET_EITHER"
4321 {
4322 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4323 {
4324 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4325 DONE;
4326 }
4327 if (!arm_arch6 && !MEM_P (operands[1]))
4328 {
4329 rtx t = gen_lowpart (SImode, operands[1]);
4330 rtx tmp = gen_reg_rtx (SImode);
4331 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4332 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4333 DONE;
4334 }
4335 })
4336
4337 (define_split
4338 [(set (match_operand:SI 0 "s_register_operand" "")
4339 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4340 "!TARGET_THUMB2 && !arm_arch6"
4341 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4342 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4343 {
4344 operands[2] = gen_lowpart (SImode, operands[1]);
4345 })
4346
4347 (define_insn "*arm_zero_extendhisi2"
4348 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4349 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4350 "TARGET_ARM && arm_arch4 && !arm_arch6"
4351 "@
4352 #
4353 ldrh%?\\t%0, %1"
4354 [(set_attr "type" "alu_shift_reg,load_byte")
4355 (set_attr "predicable" "yes")]
4356 )
4357
4358 (define_insn "*arm_zero_extendhisi2_v6"
4359 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4360 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4361 "TARGET_ARM && arm_arch6"
4362 "@
4363 uxth%?\\t%0, %1
4364 ldrh%?\\t%0, %1"
4365 [(set_attr "predicable" "yes")
4366 (set_attr "type" "extend,load_byte")]
4367 )
4368
4369 (define_insn "*arm_zero_extendhisi2addsi"
4370 [(set (match_operand:SI 0 "s_register_operand" "=r")
4371 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4372 (match_operand:SI 2 "s_register_operand" "r")))]
4373 "TARGET_INT_SIMD"
4374 "uxtah%?\\t%0, %2, %1"
4375 [(set_attr "type" "alu_shift_reg")
4376 (set_attr "predicable" "yes")]
4377 )
4378
4379 (define_expand "zero_extendqisi2"
4380 [(set (match_operand:SI 0 "s_register_operand")
4381 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4382 "TARGET_EITHER"
4383 {
4384 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4385 {
4386 emit_insn (gen_andsi3 (operands[0],
4387 gen_lowpart (SImode, operands[1]),
4388 GEN_INT (255)));
4389 DONE;
4390 }
4391 if (!arm_arch6 && !MEM_P (operands[1]))
4392 {
4393 rtx t = gen_lowpart (SImode, operands[1]);
4394 rtx tmp = gen_reg_rtx (SImode);
4395 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4396 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4397 DONE;
4398 }
4399 })
4400
4401 (define_split
4402 [(set (match_operand:SI 0 "s_register_operand" "")
4403 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4404 "!arm_arch6"
4405 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4406 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4407 {
4408 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4409 if (TARGET_ARM)
4410 {
4411 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4412 DONE;
4413 }
4414 })
4415
4416 (define_insn "*arm_zero_extendqisi2"
4417 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4418 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4419 "TARGET_ARM && !arm_arch6"
4420 "@
4421 #
4422 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4423 [(set_attr "length" "8,4")
4424 (set_attr "type" "alu_shift_reg,load_byte")
4425 (set_attr "predicable" "yes")]
4426 )
4427
4428 (define_insn "*arm_zero_extendqisi2_v6"
4429 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4430 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4431 "TARGET_ARM && arm_arch6"
4432 "@
4433 uxtb%?\\t%0, %1
4434 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4435 [(set_attr "type" "extend,load_byte")
4436 (set_attr "predicable" "yes")]
4437 )
4438
4439 (define_insn "*arm_zero_extendqisi2addsi"
4440 [(set (match_operand:SI 0 "s_register_operand" "=r")
4441 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4442 (match_operand:SI 2 "s_register_operand" "r")))]
4443 "TARGET_INT_SIMD"
4444 "uxtab%?\\t%0, %2, %1"
4445 [(set_attr "predicable" "yes")
4446 (set_attr "type" "alu_shift_reg")]
4447 )
4448
4449 (define_split
4450 [(set (match_operand:SI 0 "s_register_operand" "")
4451 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4452 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4453 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4454 [(set (match_dup 2) (match_dup 1))
4455 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4456 ""
4457 )
4458
4459 (define_split
4460 [(set (match_operand:SI 0 "s_register_operand" "")
4461 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4462 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4463 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4464 [(set (match_dup 2) (match_dup 1))
4465 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4466 ""
4467 )
4468
4469
4470 (define_split
4471 [(set (match_operand:SI 0 "s_register_operand" "")
4472 (IOR_XOR:SI (and:SI (ashift:SI
4473 (match_operand:SI 1 "s_register_operand" "")
4474 (match_operand:SI 2 "const_int_operand" ""))
4475 (match_operand:SI 3 "const_int_operand" ""))
4476 (zero_extend:SI
4477 (match_operator 5 "subreg_lowpart_operator"
4478 [(match_operand:SI 4 "s_register_operand" "")]))))]
4479 "TARGET_32BIT
4480 && (UINTVAL (operands[3])
4481 == (GET_MODE_MASK (GET_MODE (operands[5]))
4482 & (GET_MODE_MASK (GET_MODE (operands[5]))
4483 << (INTVAL (operands[2])))))"
4484 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4485 (match_dup 4)))
4486 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4487 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4488 )
4489
4490 (define_insn "*compareqi_eq0"
4491 [(set (reg:CC_Z CC_REGNUM)
4492 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4493 (const_int 0)))]
4494 "TARGET_32BIT"
4495 "tst%?\\t%0, #255"
4496 [(set_attr "conds" "set")
4497 (set_attr "predicable" "yes")
4498 (set_attr "type" "logic_imm")]
4499 )
4500
4501 (define_expand "extendhisi2"
4502 [(set (match_operand:SI 0 "s_register_operand")
4503 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4504 "TARGET_EITHER"
4505 {
4506 if (TARGET_THUMB1)
4507 {
4508 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4509 DONE;
4510 }
4511 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4512 {
4513 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4514 DONE;
4515 }
4516
4517 if (!arm_arch6 && !MEM_P (operands[1]))
4518 {
4519 rtx t = gen_lowpart (SImode, operands[1]);
4520 rtx tmp = gen_reg_rtx (SImode);
4521 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4522 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4523 DONE;
4524 }
4525 })
4526
4527 (define_split
4528 [(parallel
4529 [(set (match_operand:SI 0 "register_operand" "")
4530 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4531 (clobber (match_scratch:SI 2 ""))])]
4532 "!arm_arch6"
4533 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4534 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4535 {
4536 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4537 })
4538
4539 ;; This pattern will only be used when ldsh is not available
4540 (define_expand "extendhisi2_mem"
4541 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4542 (set (match_dup 3)
4543 (zero_extend:SI (match_dup 7)))
4544 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4545 (set (match_operand:SI 0 "" "")
4546 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4547 "TARGET_ARM"
4548 "
4549 {
4550 rtx mem1, mem2;
4551 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4552
4553 mem1 = change_address (operands[1], QImode, addr);
4554 mem2 = change_address (operands[1], QImode,
4555 plus_constant (Pmode, addr, 1));
4556 operands[0] = gen_lowpart (SImode, operands[0]);
4557 operands[1] = mem1;
4558 operands[2] = gen_reg_rtx (SImode);
4559 operands[3] = gen_reg_rtx (SImode);
4560 operands[6] = gen_reg_rtx (SImode);
4561 operands[7] = mem2;
4562
4563 if (BYTES_BIG_ENDIAN)
4564 {
4565 operands[4] = operands[2];
4566 operands[5] = operands[3];
4567 }
4568 else
4569 {
4570 operands[4] = operands[3];
4571 operands[5] = operands[2];
4572 }
4573 }"
4574 )
4575
4576 (define_split
4577 [(set (match_operand:SI 0 "register_operand" "")
4578 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4579 "!arm_arch6"
4580 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4581 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4582 {
4583 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4584 })
4585
4586 (define_insn "*arm_extendhisi2"
4587 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4588 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4589 "TARGET_ARM && arm_arch4 && !arm_arch6"
4590 "@
4591 #
4592 ldrsh%?\\t%0, %1"
4593 [(set_attr "length" "8,4")
4594 (set_attr "type" "alu_shift_reg,load_byte")
4595 (set_attr "predicable" "yes")]
4596 )
4597
4598 ;; ??? Check Thumb-2 pool range
4599 (define_insn "*arm_extendhisi2_v6"
4600 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4601 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4602 "TARGET_32BIT && arm_arch6"
4603 "@
4604 sxth%?\\t%0, %1
4605 ldrsh%?\\t%0, %1"
4606 [(set_attr "type" "extend,load_byte")
4607 (set_attr "predicable" "yes")]
4608 )
4609
4610 (define_insn "*arm_extendhisi2addsi"
4611 [(set (match_operand:SI 0 "s_register_operand" "=r")
4612 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4613 (match_operand:SI 2 "s_register_operand" "r")))]
4614 "TARGET_INT_SIMD"
4615 "sxtah%?\\t%0, %2, %1"
4616 [(set_attr "type" "alu_shift_reg")]
4617 )
4618
4619 (define_expand "extendqihi2"
4620 [(set (match_dup 2)
4621 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4622 (const_int 24)))
4623 (set (match_operand:HI 0 "s_register_operand")
4624 (ashiftrt:SI (match_dup 2)
4625 (const_int 24)))]
4626 "TARGET_ARM"
4627 "
4628 {
4629 if (arm_arch4 && MEM_P (operands[1]))
4630 {
4631 emit_insn (gen_rtx_SET (operands[0],
4632 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4633 DONE;
4634 }
4635 if (!s_register_operand (operands[1], QImode))
4636 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4637 operands[0] = gen_lowpart (SImode, operands[0]);
4638 operands[1] = gen_lowpart (SImode, operands[1]);
4639 operands[2] = gen_reg_rtx (SImode);
4640 }"
4641 )
4642
4643 (define_insn "*arm_extendqihi_insn"
4644 [(set (match_operand:HI 0 "s_register_operand" "=r")
4645 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4646 "TARGET_ARM && arm_arch4"
4647 "ldrsb%?\\t%0, %1"
4648 [(set_attr "type" "load_byte")
4649 (set_attr "predicable" "yes")]
4650 )
4651
4652 (define_expand "extendqisi2"
4653 [(set (match_operand:SI 0 "s_register_operand")
4654 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4655 "TARGET_EITHER"
4656 {
4657 if (!arm_arch4 && MEM_P (operands[1]))
4658 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4659
4660 if (!arm_arch6 && !MEM_P (operands[1]))
4661 {
4662 rtx t = gen_lowpart (SImode, operands[1]);
4663 rtx tmp = gen_reg_rtx (SImode);
4664 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4665 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4666 DONE;
4667 }
4668 })
4669
4670 (define_split
4671 [(set (match_operand:SI 0 "register_operand" "")
4672 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4673 "!arm_arch6"
4674 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4675 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4676 {
4677 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4678 })
4679
4680 (define_insn "*arm_extendqisi"
4681 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4682 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4683 "TARGET_ARM && arm_arch4 && !arm_arch6"
4684 "@
4685 #
4686 ldrsb%?\\t%0, %1"
4687 [(set_attr "length" "8,4")
4688 (set_attr "type" "alu_shift_reg,load_byte")
4689 (set_attr "predicable" "yes")]
4690 )
4691
4692 (define_insn "*arm_extendqisi_v6"
4693 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4694 (sign_extend:SI
4695 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4696 "TARGET_ARM && arm_arch6"
4697 "@
4698 sxtb%?\\t%0, %1
4699 ldrsb%?\\t%0, %1"
4700 [(set_attr "type" "extend,load_byte")
4701 (set_attr "predicable" "yes")]
4702 )
4703
4704 (define_insn "*arm_extendqisi2addsi"
4705 [(set (match_operand:SI 0 "s_register_operand" "=r")
4706 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4707 (match_operand:SI 2 "s_register_operand" "r")))]
4708 "TARGET_INT_SIMD"
4709 "sxtab%?\\t%0, %2, %1"
4710 [(set_attr "type" "alu_shift_reg")
4711 (set_attr "predicable" "yes")]
4712 )
4713
4714 (define_insn "arm_<sup>xtb16"
4715 [(set (match_operand:SI 0 "s_register_operand" "=r")
4716 (unspec:SI
4717 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4718 "TARGET_INT_SIMD"
4719 "<sup>xtb16%?\\t%0, %1"
4720 [(set_attr "predicable" "yes")
4721 (set_attr "type" "alu_dsp_reg")])
4722
4723 (define_insn "arm_<simd32_op>"
4724 [(set (match_operand:SI 0 "s_register_operand" "=r")
4725 (unspec:SI
4726 [(match_operand:SI 1 "s_register_operand" "r")
4727 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4728 "TARGET_INT_SIMD"
4729 "<simd32_op>%?\\t%0, %1, %2"
4730 [(set_attr "predicable" "yes")
4731 (set_attr "type" "alu_dsp_reg")])
4732
4733 (define_insn "arm_usada8"
4734 [(set (match_operand:SI 0 "s_register_operand" "=r")
4735 (unspec:SI
4736 [(match_operand:SI 1 "s_register_operand" "r")
4737 (match_operand:SI 2 "s_register_operand" "r")
4738 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4739 "TARGET_INT_SIMD"
4740 "usada8%?\\t%0, %1, %2, %3"
4741 [(set_attr "predicable" "yes")
4742 (set_attr "type" "alu_dsp_reg")])
4743
4744 (define_insn "arm_<simd32_op>"
4745 [(set (match_operand:DI 0 "s_register_operand" "=r")
4746 (unspec:DI
4747 [(match_operand:SI 1 "s_register_operand" "r")
4748 (match_operand:SI 2 "s_register_operand" "r")
4749 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4750 "TARGET_INT_SIMD"
4751 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4752 [(set_attr "predicable" "yes")
4753 (set_attr "type" "smlald")])
4754
4755 (define_expand "extendsfdf2"
4756 [(set (match_operand:DF 0 "s_register_operand")
4757 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4758 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4759 ""
4760 )
4761
4762 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4763 ;; must go through SFmode.
4764 ;;
4765 ;; This is always safe for an extend.
4766
4767 (define_expand "extendhfdf2"
4768 [(set (match_operand:DF 0 "s_register_operand")
4769 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4770 "TARGET_EITHER"
4771 {
4772 /* We don't have a direct instruction for this, so go via SFmode. */
4773 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4774 {
4775 rtx op1;
4776 op1 = convert_to_mode (SFmode, operands[1], 0);
4777 op1 = convert_to_mode (DFmode, op1, 0);
4778 emit_insn (gen_movdf (operands[0], op1));
4779 DONE;
4780 }
4781 /* Otherwise, we're done producing RTL and will pick up the correct
4782 pattern to do this with one rounding-step in a single instruction. */
4783 }
4784 )
4785 \f
4786 ;; Move insns (including loads and stores)
4787
4788 ;; XXX Just some ideas about movti.
4789 ;; I don't think these are a good idea on the arm, there just aren't enough
4790 ;; registers
4791 ;;(define_expand "loadti"
4792 ;; [(set (match_operand:TI 0 "s_register_operand")
4793 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4794 ;; "" "")
4795
4796 ;;(define_expand "storeti"
4797 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4798 ;; (match_operand:TI 1 "s_register_operand"))]
4799 ;; "" "")
4800
4801 ;;(define_expand "movti"
4802 ;; [(set (match_operand:TI 0 "general_operand")
4803 ;; (match_operand:TI 1 "general_operand"))]
4804 ;; ""
4805 ;; "
4806 ;;{
4807 ;; rtx insn;
4808 ;;
4809 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4810 ;; operands[1] = copy_to_reg (operands[1]);
4811 ;; if (MEM_P (operands[0]))
4812 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4813 ;; else if (MEM_P (operands[1]))
4814 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4815 ;; else
4816 ;; FAIL;
4817 ;;
4818 ;; emit_insn (insn);
4819 ;; DONE;
4820 ;;}")
4821
4822 ;; Recognize garbage generated above.
4823
4824 ;;(define_insn ""
4825 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4826 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4827 ;; ""
4828 ;; "*
4829 ;; {
4830 ;; register mem = (which_alternative < 3);
4831 ;; register const char *template;
4832 ;;
4833 ;; operands[mem] = XEXP (operands[mem], 0);
4834 ;; switch (which_alternative)
4835 ;; {
4836 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4837 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4838 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4839 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4840 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4841 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4842 ;; }
4843 ;; output_asm_insn (template, operands);
4844 ;; return \"\";
4845 ;; }")
4846
4847 (define_expand "movdi"
4848 [(set (match_operand:DI 0 "general_operand")
4849 (match_operand:DI 1 "general_operand"))]
4850 "TARGET_EITHER"
4851 "
4852 gcc_checking_assert (aligned_operand (operands[0], DImode));
4853 gcc_checking_assert (aligned_operand (operands[1], DImode));
4854 if (can_create_pseudo_p ())
4855 {
4856 if (!REG_P (operands[0]))
4857 operands[1] = force_reg (DImode, operands[1]);
4858 }
4859 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4860 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4861 {
4862 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4863 when expanding function calls. */
4864 gcc_assert (can_create_pseudo_p ());
4865 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4866 {
4867 /* Perform load into legal reg pair first, then move. */
4868 rtx reg = gen_reg_rtx (DImode);
4869 emit_insn (gen_movdi (reg, operands[1]));
4870 operands[1] = reg;
4871 }
4872 emit_move_insn (gen_lowpart (SImode, operands[0]),
4873 gen_lowpart (SImode, operands[1]));
4874 emit_move_insn (gen_highpart (SImode, operands[0]),
4875 gen_highpart (SImode, operands[1]));
4876 DONE;
4877 }
4878 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4879 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4880 {
4881 /* Avoid STRD's from an odd-numbered register pair in ARM state
4882 when expanding function prologue. */
4883 gcc_assert (can_create_pseudo_p ());
4884 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4885 ? gen_reg_rtx (DImode)
4886 : operands[0];
4887 emit_move_insn (gen_lowpart (SImode, split_dest),
4888 gen_lowpart (SImode, operands[1]));
4889 emit_move_insn (gen_highpart (SImode, split_dest),
4890 gen_highpart (SImode, operands[1]));
4891 if (split_dest != operands[0])
4892 emit_insn (gen_movdi (operands[0], split_dest));
4893 DONE;
4894 }
4895 "
4896 )
4897
4898 (define_insn "*arm_movdi"
4899 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4900 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4901 "TARGET_32BIT
4902 && !(TARGET_HARD_FLOAT)
4903 && !TARGET_IWMMXT
4904 && ( register_operand (operands[0], DImode)
4905 || register_operand (operands[1], DImode))"
4906 "*
4907 switch (which_alternative)
4908 {
4909 case 0:
4910 case 1:
4911 case 2:
4912 return \"#\";
4913 case 3:
4914 /* Cannot load it directly, split to load it via MOV / MOVT. */
4915 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4916 return \"#\";
4917 /* Fall through. */
4918 default:
4919 return output_move_double (operands, true, NULL);
4920 }
4921 "
4922 [(set_attr "length" "8,12,16,8,8")
4923 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4924 (set_attr "arm_pool_range" "*,*,*,1020,*")
4925 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4926 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4927 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4928 )
4929
4930 (define_split
4931 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4932 (match_operand:ANY64 1 "immediate_operand" ""))]
4933 "TARGET_32BIT
4934 && reload_completed
4935 && (arm_disable_literal_pool
4936 || (arm_const_double_inline_cost (operands[1])
4937 <= arm_max_const_double_inline_cost ()))"
4938 [(const_int 0)]
4939 "
4940 arm_split_constant (SET, SImode, curr_insn,
4941 INTVAL (gen_lowpart (SImode, operands[1])),
4942 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4943 arm_split_constant (SET, SImode, curr_insn,
4944 INTVAL (gen_highpart_mode (SImode,
4945 GET_MODE (operands[0]),
4946 operands[1])),
4947 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4948 DONE;
4949 "
4950 )
4951
4952 ; If optimizing for size, or if we have load delay slots, then
4953 ; we want to split the constant into two separate operations.
4954 ; In both cases this may split a trivial part into a single data op
4955 ; leaving a single complex constant to load. We can also get longer
4956 ; offsets in a LDR which means we get better chances of sharing the pool
4957 ; entries. Finally, we can normally do a better job of scheduling
4958 ; LDR instructions than we can with LDM.
4959 ; This pattern will only match if the one above did not.
4960 (define_split
4961 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4962 (match_operand:ANY64 1 "const_double_operand" ""))]
4963 "TARGET_ARM && reload_completed
4964 && arm_const_double_by_parts (operands[1])"
4965 [(set (match_dup 0) (match_dup 1))
4966 (set (match_dup 2) (match_dup 3))]
4967 "
4968 operands[2] = gen_highpart (SImode, operands[0]);
4969 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4970 operands[1]);
4971 operands[0] = gen_lowpart (SImode, operands[0]);
4972 operands[1] = gen_lowpart (SImode, operands[1]);
4973 "
4974 )
4975
4976 (define_split
4977 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4978 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4979 "TARGET_EITHER && reload_completed"
4980 [(set (match_dup 0) (match_dup 1))
4981 (set (match_dup 2) (match_dup 3))]
4982 "
4983 operands[2] = gen_highpart (SImode, operands[0]);
4984 operands[3] = gen_highpart (SImode, operands[1]);
4985 operands[0] = gen_lowpart (SImode, operands[0]);
4986 operands[1] = gen_lowpart (SImode, operands[1]);
4987
4988 /* Handle a partial overlap. */
4989 if (rtx_equal_p (operands[0], operands[3]))
4990 {
4991 rtx tmp0 = operands[0];
4992 rtx tmp1 = operands[1];
4993
4994 operands[0] = operands[2];
4995 operands[1] = operands[3];
4996 operands[2] = tmp0;
4997 operands[3] = tmp1;
4998 }
4999 "
5000 )
5001
5002 ;; We can't actually do base+index doubleword loads if the index and
5003 ;; destination overlap. Split here so that we at least have chance to
5004 ;; schedule.
5005 (define_split
5006 [(set (match_operand:DI 0 "s_register_operand" "")
5007 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5008 (match_operand:SI 2 "s_register_operand" ""))))]
5009 "TARGET_LDRD
5010 && reg_overlap_mentioned_p (operands[0], operands[1])
5011 && reg_overlap_mentioned_p (operands[0], operands[2])"
5012 [(set (match_dup 4)
5013 (plus:SI (match_dup 1)
5014 (match_dup 2)))
5015 (set (match_dup 0)
5016 (mem:DI (match_dup 4)))]
5017 "
5018 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5019 "
5020 )
5021
5022 (define_expand "movsi"
5023 [(set (match_operand:SI 0 "general_operand")
5024 (match_operand:SI 1 "general_operand"))]
5025 "TARGET_EITHER"
5026 "
5027 {
5028 rtx base, offset, tmp;
5029
5030 gcc_checking_assert (aligned_operand (operands[0], SImode));
5031 gcc_checking_assert (aligned_operand (operands[1], SImode));
5032 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5033 {
5034 /* Everything except mem = const or mem = mem can be done easily. */
5035 if (MEM_P (operands[0]))
5036 operands[1] = force_reg (SImode, operands[1]);
5037 if (arm_general_register_operand (operands[0], SImode)
5038 && CONST_INT_P (operands[1])
5039 && !(const_ok_for_arm (INTVAL (operands[1]))
5040 || const_ok_for_arm (~INTVAL (operands[1]))))
5041 {
5042 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5043 {
5044 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5045 DONE;
5046 }
5047 else
5048 {
5049 arm_split_constant (SET, SImode, NULL_RTX,
5050 INTVAL (operands[1]), operands[0], NULL_RTX,
5051 optimize && can_create_pseudo_p ());
5052 DONE;
5053 }
5054 }
5055 }
5056 else /* Target doesn't have MOVT... */
5057 {
5058 if (can_create_pseudo_p ())
5059 {
5060 if (!REG_P (operands[0]))
5061 operands[1] = force_reg (SImode, operands[1]);
5062 }
5063 }
5064
5065 split_const (operands[1], &base, &offset);
5066 if (INTVAL (offset) != 0
5067 && targetm.cannot_force_const_mem (SImode, operands[1]))
5068 {
5069 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5070 emit_move_insn (tmp, base);
5071 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5072 DONE;
5073 }
5074
5075 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5076
5077 /* Recognize the case where operand[1] is a reference to thread-local
5078 data and load its address to a register. Offsets have been split off
5079 already. */
5080 if (arm_tls_referenced_p (operands[1]))
5081 operands[1] = legitimize_tls_address (operands[1], tmp);
5082 else if (flag_pic
5083 && (CONSTANT_P (operands[1])
5084 || symbol_mentioned_p (operands[1])
5085 || label_mentioned_p (operands[1])))
5086 operands[1] =
5087 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5088 }
5089 "
5090 )
5091
5092 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5093 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5094 ;; so this does not matter.
5095 (define_insn "*arm_movt"
5096 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5097 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5098 (match_operand:SI 2 "general_operand" "i,i")))]
5099 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5100 "@
5101 movt%?\t%0, #:upper16:%c2
5102 movt\t%0, #:upper16:%c2"
5103 [(set_attr "arch" "32,v8mb")
5104 (set_attr "predicable" "yes")
5105 (set_attr "length" "4")
5106 (set_attr "type" "alu_sreg")]
5107 )
5108
5109 (define_insn "*arm_movsi_insn"
5110 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5111 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5112 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5113 && ( register_operand (operands[0], SImode)
5114 || register_operand (operands[1], SImode))"
5115 "@
5116 mov%?\\t%0, %1
5117 mov%?\\t%0, %1
5118 mvn%?\\t%0, #%B1
5119 movw%?\\t%0, %1
5120 ldr%?\\t%0, %1
5121 str%?\\t%1, %0"
5122 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5123 (set_attr "predicable" "yes")
5124 (set_attr "arch" "*,*,*,v6t2,*,*")
5125 (set_attr "pool_range" "*,*,*,*,4096,*")
5126 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5127 )
5128
5129 (define_split
5130 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5131 (match_operand:SI 1 "const_int_operand" ""))]
5132 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5133 && (!(const_ok_for_arm (INTVAL (operands[1]))
5134 || const_ok_for_arm (~INTVAL (operands[1]))))"
5135 [(clobber (const_int 0))]
5136 "
5137 arm_split_constant (SET, SImode, NULL_RTX,
5138 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5139 DONE;
5140 "
5141 )
5142
5143 ;; A normal way to do (symbol + offset) requires three instructions at least
5144 ;; (depends on how big the offset is) as below:
5145 ;; movw r0, #:lower16:g
5146 ;; movw r0, #:upper16:g
5147 ;; adds r0, #4
5148 ;;
5149 ;; A better way would be:
5150 ;; movw r0, #:lower16:g+4
5151 ;; movw r0, #:upper16:g+4
5152 ;;
5153 ;; The limitation of this way is that the length of offset should be a 16-bit
5154 ;; signed value, because current assembler only supports REL type relocation for
5155 ;; such case. If the more powerful RELA type is supported in future, we should
5156 ;; update this pattern to go with better way.
5157 (define_split
5158 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5159 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5160 (match_operand:SI 2 "const_int_operand" ""))))]
5161 "TARGET_THUMB
5162 && TARGET_HAVE_MOVT
5163 && arm_disable_literal_pool
5164 && reload_completed
5165 && GET_CODE (operands[1]) == SYMBOL_REF"
5166 [(clobber (const_int 0))]
5167 "
5168 int offset = INTVAL (operands[2]);
5169
5170 if (offset < -0x8000 || offset > 0x7fff)
5171 {
5172 arm_emit_movpair (operands[0], operands[1]);
5173 emit_insn (gen_rtx_SET (operands[0],
5174 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5175 }
5176 else
5177 {
5178 rtx op = gen_rtx_CONST (SImode,
5179 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5180 arm_emit_movpair (operands[0], op);
5181 }
5182 "
5183 )
5184
5185 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5186 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5187 ;; and lo_sum would be merged back into memory load at cprop. However,
5188 ;; if the default is to prefer movt/movw rather than a load from the constant
5189 ;; pool, the performance is better.
5190 (define_split
5191 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5192 (match_operand:SI 1 "general_operand" ""))]
5193 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5194 && !target_word_relocations
5195 && !arm_tls_referenced_p (operands[1])"
5196 [(clobber (const_int 0))]
5197 {
5198 arm_emit_movpair (operands[0], operands[1]);
5199 DONE;
5200 })
5201
5202 ;; When generating pic, we need to load the symbol offset into a register.
5203 ;; So that the optimizer does not confuse this with a normal symbol load
5204 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5205 ;; since that is the only type of relocation we can use.
5206
5207 ;; Wrap calculation of the whole PIC address in a single pattern for the
5208 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5209 ;; a PIC address involves two loads from memory, so we want to CSE it
5210 ;; as often as possible.
5211 ;; This pattern will be split into one of the pic_load_addr_* patterns
5212 ;; and a move after GCSE optimizations.
5213 ;;
5214 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5215 (define_expand "calculate_pic_address"
5216 [(set (match_operand:SI 0 "register_operand")
5217 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5218 (unspec:SI [(match_operand:SI 2 "" "")]
5219 UNSPEC_PIC_SYM))))]
5220 "flag_pic"
5221 )
5222
5223 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5224 (define_split
5225 [(set (match_operand:SI 0 "register_operand" "")
5226 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5227 (unspec:SI [(match_operand:SI 2 "" "")]
5228 UNSPEC_PIC_SYM))))]
5229 "flag_pic"
5230 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5231 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5232 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5233 )
5234
5235 ;; operand1 is the memory address to go into
5236 ;; pic_load_addr_32bit.
5237 ;; operand2 is the PIC label to be emitted
5238 ;; from pic_add_dot_plus_eight.
5239 ;; We do this to allow hoisting of the entire insn.
5240 (define_insn_and_split "pic_load_addr_unified"
5241 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5242 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5243 (match_operand:SI 2 "" "")]
5244 UNSPEC_PIC_UNIFIED))]
5245 "flag_pic"
5246 "#"
5247 "&& reload_completed"
5248 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5249 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5250 (match_dup 2)] UNSPEC_PIC_BASE))]
5251 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5252 [(set_attr "type" "load_4,load_4,load_4")
5253 (set_attr "pool_range" "4096,4094,1022")
5254 (set_attr "neg_pool_range" "4084,0,0")
5255 (set_attr "arch" "a,t2,t1")
5256 (set_attr "length" "8,6,4")]
5257 )
5258
5259 ;; The rather odd constraints on the following are to force reload to leave
5260 ;; the insn alone, and to force the minipool generation pass to then move
5261 ;; the GOT symbol to memory.
5262
5263 (define_insn "pic_load_addr_32bit"
5264 [(set (match_operand:SI 0 "s_register_operand" "=r")
5265 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5266 "TARGET_32BIT && flag_pic"
5267 "ldr%?\\t%0, %1"
5268 [(set_attr "type" "load_4")
5269 (set (attr "pool_range")
5270 (if_then_else (eq_attr "is_thumb" "no")
5271 (const_int 4096)
5272 (const_int 4094)))
5273 (set (attr "neg_pool_range")
5274 (if_then_else (eq_attr "is_thumb" "no")
5275 (const_int 4084)
5276 (const_int 0)))]
5277 )
5278
5279 (define_insn "pic_load_addr_thumb1"
5280 [(set (match_operand:SI 0 "s_register_operand" "=l")
5281 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5282 "TARGET_THUMB1 && flag_pic"
5283 "ldr\\t%0, %1"
5284 [(set_attr "type" "load_4")
5285 (set (attr "pool_range") (const_int 1018))]
5286 )
5287
5288 (define_insn "pic_add_dot_plus_four"
5289 [(set (match_operand:SI 0 "register_operand" "=r")
5290 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5291 (const_int 4)
5292 (match_operand 2 "" "")]
5293 UNSPEC_PIC_BASE))]
5294 "TARGET_THUMB"
5295 "*
5296 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5297 INTVAL (operands[2]));
5298 return \"add\\t%0, %|pc\";
5299 "
5300 [(set_attr "length" "2")
5301 (set_attr "type" "alu_sreg")]
5302 )
5303
5304 (define_insn "pic_add_dot_plus_eight"
5305 [(set (match_operand:SI 0 "register_operand" "=r")
5306 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5307 (const_int 8)
5308 (match_operand 2 "" "")]
5309 UNSPEC_PIC_BASE))]
5310 "TARGET_ARM"
5311 "*
5312 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5313 INTVAL (operands[2]));
5314 return \"add%?\\t%0, %|pc, %1\";
5315 "
5316 [(set_attr "predicable" "yes")
5317 (set_attr "type" "alu_sreg")]
5318 )
5319
5320 (define_insn "tls_load_dot_plus_eight"
5321 [(set (match_operand:SI 0 "register_operand" "=r")
5322 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5323 (const_int 8)
5324 (match_operand 2 "" "")]
5325 UNSPEC_PIC_BASE)))]
5326 "TARGET_ARM"
5327 "*
5328 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5329 INTVAL (operands[2]));
5330 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5331 "
5332 [(set_attr "predicable" "yes")
5333 (set_attr "type" "load_4")]
5334 )
5335
5336 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5337 ;; followed by a load. These sequences can be crunched down to
5338 ;; tls_load_dot_plus_eight by a peephole.
5339
5340 (define_peephole2
5341 [(set (match_operand:SI 0 "register_operand" "")
5342 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5343 (const_int 8)
5344 (match_operand 1 "" "")]
5345 UNSPEC_PIC_BASE))
5346 (set (match_operand:SI 2 "arm_general_register_operand" "")
5347 (mem:SI (match_dup 0)))]
5348 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5349 [(set (match_dup 2)
5350 (mem:SI (unspec:SI [(match_dup 3)
5351 (const_int 8)
5352 (match_dup 1)]
5353 UNSPEC_PIC_BASE)))]
5354 ""
5355 )
5356
5357 (define_insn "pic_offset_arm"
5358 [(set (match_operand:SI 0 "register_operand" "=r")
5359 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5360 (unspec:SI [(match_operand:SI 2 "" "X")]
5361 UNSPEC_PIC_OFFSET))))]
5362 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5363 "ldr%?\\t%0, [%1,%2]"
5364 [(set_attr "type" "load_4")]
5365 )
5366
5367 (define_expand "builtin_setjmp_receiver"
5368 [(label_ref (match_operand 0 "" ""))]
5369 "flag_pic"
5370 "
5371 {
5372 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5373 register. */
5374 if (arm_pic_register != INVALID_REGNUM)
5375 arm_load_pic_register (1UL << 3, NULL_RTX);
5376 DONE;
5377 }")
5378
5379 ;; If copying one reg to another we can set the condition codes according to
5380 ;; its value. Such a move is common after a return from subroutine and the
5381 ;; result is being tested against zero.
5382
5383 (define_insn "*movsi_compare0"
5384 [(set (reg:CC CC_REGNUM)
5385 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5386 (const_int 0)))
5387 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5388 (match_dup 1))]
5389 "TARGET_32BIT"
5390 "@
5391 cmp%?\\t%0, #0
5392 subs%?\\t%0, %1, #0"
5393 [(set_attr "conds" "set")
5394 (set_attr "type" "alus_imm,alus_imm")]
5395 )
5396
5397 ;; Subroutine to store a half word from a register into memory.
5398 ;; Operand 0 is the source register (HImode)
5399 ;; Operand 1 is the destination address in a register (SImode)
5400
5401 ;; In both this routine and the next, we must be careful not to spill
5402 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5403 ;; can generate unrecognizable rtl.
5404
5405 (define_expand "storehi"
5406 [;; store the low byte
5407 (set (match_operand 1 "" "") (match_dup 3))
5408 ;; extract the high byte
5409 (set (match_dup 2)
5410 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5411 ;; store the high byte
5412 (set (match_dup 4) (match_dup 5))]
5413 "TARGET_ARM"
5414 "
5415 {
5416 rtx op1 = operands[1];
5417 rtx addr = XEXP (op1, 0);
5418 enum rtx_code code = GET_CODE (addr);
5419
5420 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5421 || code == MINUS)
5422 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5423
5424 operands[4] = adjust_address (op1, QImode, 1);
5425 operands[1] = adjust_address (operands[1], QImode, 0);
5426 operands[3] = gen_lowpart (QImode, operands[0]);
5427 operands[0] = gen_lowpart (SImode, operands[0]);
5428 operands[2] = gen_reg_rtx (SImode);
5429 operands[5] = gen_lowpart (QImode, operands[2]);
5430 }"
5431 )
5432
5433 (define_expand "storehi_bigend"
5434 [(set (match_dup 4) (match_dup 3))
5435 (set (match_dup 2)
5436 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5437 (set (match_operand 1 "" "") (match_dup 5))]
5438 "TARGET_ARM"
5439 "
5440 {
5441 rtx op1 = operands[1];
5442 rtx addr = XEXP (op1, 0);
5443 enum rtx_code code = GET_CODE (addr);
5444
5445 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5446 || code == MINUS)
5447 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5448
5449 operands[4] = adjust_address (op1, QImode, 1);
5450 operands[1] = adjust_address (operands[1], QImode, 0);
5451 operands[3] = gen_lowpart (QImode, operands[0]);
5452 operands[0] = gen_lowpart (SImode, operands[0]);
5453 operands[2] = gen_reg_rtx (SImode);
5454 operands[5] = gen_lowpart (QImode, operands[2]);
5455 }"
5456 )
5457
5458 ;; Subroutine to store a half word integer constant into memory.
5459 (define_expand "storeinthi"
5460 [(set (match_operand 0 "" "")
5461 (match_operand 1 "" ""))
5462 (set (match_dup 3) (match_dup 2))]
5463 "TARGET_ARM"
5464 "
5465 {
5466 HOST_WIDE_INT value = INTVAL (operands[1]);
5467 rtx addr = XEXP (operands[0], 0);
5468 rtx op0 = operands[0];
5469 enum rtx_code code = GET_CODE (addr);
5470
5471 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5472 || code == MINUS)
5473 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5474
5475 operands[1] = gen_reg_rtx (SImode);
5476 if (BYTES_BIG_ENDIAN)
5477 {
5478 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5479 if ((value & 255) == ((value >> 8) & 255))
5480 operands[2] = operands[1];
5481 else
5482 {
5483 operands[2] = gen_reg_rtx (SImode);
5484 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5485 }
5486 }
5487 else
5488 {
5489 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5490 if ((value & 255) == ((value >> 8) & 255))
5491 operands[2] = operands[1];
5492 else
5493 {
5494 operands[2] = gen_reg_rtx (SImode);
5495 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5496 }
5497 }
5498
5499 operands[3] = adjust_address (op0, QImode, 1);
5500 operands[0] = adjust_address (operands[0], QImode, 0);
5501 operands[2] = gen_lowpart (QImode, operands[2]);
5502 operands[1] = gen_lowpart (QImode, operands[1]);
5503 }"
5504 )
5505
5506 (define_expand "storehi_single_op"
5507 [(set (match_operand:HI 0 "memory_operand")
5508 (match_operand:HI 1 "general_operand"))]
5509 "TARGET_32BIT && arm_arch4"
5510 "
5511 if (!s_register_operand (operands[1], HImode))
5512 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5513 "
5514 )
5515
5516 (define_expand "movhi"
5517 [(set (match_operand:HI 0 "general_operand")
5518 (match_operand:HI 1 "general_operand"))]
5519 "TARGET_EITHER"
5520 "
5521 gcc_checking_assert (aligned_operand (operands[0], HImode));
5522 gcc_checking_assert (aligned_operand (operands[1], HImode));
5523 if (TARGET_ARM)
5524 {
5525 if (can_create_pseudo_p ())
5526 {
5527 if (MEM_P (operands[0]))
5528 {
5529 if (arm_arch4)
5530 {
5531 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5532 DONE;
5533 }
5534 if (CONST_INT_P (operands[1]))
5535 emit_insn (gen_storeinthi (operands[0], operands[1]));
5536 else
5537 {
5538 if (MEM_P (operands[1]))
5539 operands[1] = force_reg (HImode, operands[1]);
5540 if (BYTES_BIG_ENDIAN)
5541 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5542 else
5543 emit_insn (gen_storehi (operands[1], operands[0]));
5544 }
5545 DONE;
5546 }
5547 /* Sign extend a constant, and keep it in an SImode reg. */
5548 else if (CONST_INT_P (operands[1]))
5549 {
5550 rtx reg = gen_reg_rtx (SImode);
5551 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5552
5553 /* If the constant is already valid, leave it alone. */
5554 if (!const_ok_for_arm (val))
5555 {
5556 /* If setting all the top bits will make the constant
5557 loadable in a single instruction, then set them.
5558 Otherwise, sign extend the number. */
5559
5560 if (const_ok_for_arm (~(val | ~0xffff)))
5561 val |= ~0xffff;
5562 else if (val & 0x8000)
5563 val |= ~0xffff;
5564 }
5565
5566 emit_insn (gen_movsi (reg, GEN_INT (val)));
5567 operands[1] = gen_lowpart (HImode, reg);
5568 }
5569 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5570 && MEM_P (operands[1]))
5571 {
5572 rtx reg = gen_reg_rtx (SImode);
5573
5574 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5575 operands[1] = gen_lowpart (HImode, reg);
5576 }
5577 else if (!arm_arch4)
5578 {
5579 if (MEM_P (operands[1]))
5580 {
5581 rtx base;
5582 rtx offset = const0_rtx;
5583 rtx reg = gen_reg_rtx (SImode);
5584
5585 if ((REG_P (base = XEXP (operands[1], 0))
5586 || (GET_CODE (base) == PLUS
5587 && (CONST_INT_P (offset = XEXP (base, 1)))
5588 && ((INTVAL(offset) & 1) != 1)
5589 && REG_P (base = XEXP (base, 0))))
5590 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5591 {
5592 rtx new_rtx;
5593
5594 new_rtx = widen_memory_access (operands[1], SImode,
5595 ((INTVAL (offset) & ~3)
5596 - INTVAL (offset)));
5597 emit_insn (gen_movsi (reg, new_rtx));
5598 if (((INTVAL (offset) & 2) != 0)
5599 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5600 {
5601 rtx reg2 = gen_reg_rtx (SImode);
5602
5603 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5604 reg = reg2;
5605 }
5606 }
5607 else
5608 emit_insn (gen_movhi_bytes (reg, operands[1]));
5609
5610 operands[1] = gen_lowpart (HImode, reg);
5611 }
5612 }
5613 }
5614 /* Handle loading a large integer during reload. */
5615 else if (CONST_INT_P (operands[1])
5616 && !const_ok_for_arm (INTVAL (operands[1]))
5617 && !const_ok_for_arm (~INTVAL (operands[1])))
5618 {
5619 /* Writing a constant to memory needs a scratch, which should
5620 be handled with SECONDARY_RELOADs. */
5621 gcc_assert (REG_P (operands[0]));
5622
5623 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5624 emit_insn (gen_movsi (operands[0], operands[1]));
5625 DONE;
5626 }
5627 }
5628 else if (TARGET_THUMB2)
5629 {
5630 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5631 if (can_create_pseudo_p ())
5632 {
5633 if (!REG_P (operands[0]))
5634 operands[1] = force_reg (HImode, operands[1]);
5635 /* Zero extend a constant, and keep it in an SImode reg. */
5636 else if (CONST_INT_P (operands[1]))
5637 {
5638 rtx reg = gen_reg_rtx (SImode);
5639 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5640
5641 emit_insn (gen_movsi (reg, GEN_INT (val)));
5642 operands[1] = gen_lowpart (HImode, reg);
5643 }
5644 }
5645 }
5646 else /* TARGET_THUMB1 */
5647 {
5648 if (can_create_pseudo_p ())
5649 {
5650 if (CONST_INT_P (operands[1]))
5651 {
5652 rtx reg = gen_reg_rtx (SImode);
5653
5654 emit_insn (gen_movsi (reg, operands[1]));
5655 operands[1] = gen_lowpart (HImode, reg);
5656 }
5657
5658 /* ??? We shouldn't really get invalid addresses here, but this can
5659 happen if we are passed a SP (never OK for HImode/QImode) or
5660 virtual register (also rejected as illegitimate for HImode/QImode)
5661 relative address. */
5662 /* ??? This should perhaps be fixed elsewhere, for instance, in
5663 fixup_stack_1, by checking for other kinds of invalid addresses,
5664 e.g. a bare reference to a virtual register. This may confuse the
5665 alpha though, which must handle this case differently. */
5666 if (MEM_P (operands[0])
5667 && !memory_address_p (GET_MODE (operands[0]),
5668 XEXP (operands[0], 0)))
5669 operands[0]
5670 = replace_equiv_address (operands[0],
5671 copy_to_reg (XEXP (operands[0], 0)));
5672
5673 if (MEM_P (operands[1])
5674 && !memory_address_p (GET_MODE (operands[1]),
5675 XEXP (operands[1], 0)))
5676 operands[1]
5677 = replace_equiv_address (operands[1],
5678 copy_to_reg (XEXP (operands[1], 0)));
5679
5680 if (MEM_P (operands[1]) && optimize > 0)
5681 {
5682 rtx reg = gen_reg_rtx (SImode);
5683
5684 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5685 operands[1] = gen_lowpart (HImode, reg);
5686 }
5687
5688 if (MEM_P (operands[0]))
5689 operands[1] = force_reg (HImode, operands[1]);
5690 }
5691 else if (CONST_INT_P (operands[1])
5692 && !satisfies_constraint_I (operands[1]))
5693 {
5694 /* Handle loading a large integer during reload. */
5695
5696 /* Writing a constant to memory needs a scratch, which should
5697 be handled with SECONDARY_RELOADs. */
5698 gcc_assert (REG_P (operands[0]));
5699
5700 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5701 emit_insn (gen_movsi (operands[0], operands[1]));
5702 DONE;
5703 }
5704 }
5705 "
5706 )
5707
5708 (define_expand "movhi_bytes"
5709 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5710 (set (match_dup 3)
5711 (zero_extend:SI (match_dup 6)))
5712 (set (match_operand:SI 0 "" "")
5713 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5714 "TARGET_ARM"
5715 "
5716 {
5717 rtx mem1, mem2;
5718 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5719
5720 mem1 = change_address (operands[1], QImode, addr);
5721 mem2 = change_address (operands[1], QImode,
5722 plus_constant (Pmode, addr, 1));
5723 operands[0] = gen_lowpart (SImode, operands[0]);
5724 operands[1] = mem1;
5725 operands[2] = gen_reg_rtx (SImode);
5726 operands[3] = gen_reg_rtx (SImode);
5727 operands[6] = mem2;
5728
5729 if (BYTES_BIG_ENDIAN)
5730 {
5731 operands[4] = operands[2];
5732 operands[5] = operands[3];
5733 }
5734 else
5735 {
5736 operands[4] = operands[3];
5737 operands[5] = operands[2];
5738 }
5739 }"
5740 )
5741
5742 (define_expand "movhi_bigend"
5743 [(set (match_dup 2)
5744 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5745 (const_int 16)))
5746 (set (match_dup 3)
5747 (ashiftrt:SI (match_dup 2) (const_int 16)))
5748 (set (match_operand:HI 0 "s_register_operand")
5749 (match_dup 4))]
5750 "TARGET_ARM"
5751 "
5752 operands[2] = gen_reg_rtx (SImode);
5753 operands[3] = gen_reg_rtx (SImode);
5754 operands[4] = gen_lowpart (HImode, operands[3]);
5755 "
5756 )
5757
5758 ;; Pattern to recognize insn generated default case above
5759 (define_insn "*movhi_insn_arch4"
5760 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5761 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5762 "TARGET_ARM
5763 && arm_arch4 && !TARGET_HARD_FLOAT
5764 && (register_operand (operands[0], HImode)
5765 || register_operand (operands[1], HImode))"
5766 "@
5767 mov%?\\t%0, %1\\t%@ movhi
5768 mvn%?\\t%0, #%B1\\t%@ movhi
5769 movw%?\\t%0, %L1\\t%@ movhi
5770 strh%?\\t%1, %0\\t%@ movhi
5771 ldrh%?\\t%0, %1\\t%@ movhi"
5772 [(set_attr "predicable" "yes")
5773 (set_attr "pool_range" "*,*,*,*,256")
5774 (set_attr "neg_pool_range" "*,*,*,*,244")
5775 (set_attr "arch" "*,*,v6t2,*,*")
5776 (set_attr_alternative "type"
5777 [(if_then_else (match_operand 1 "const_int_operand" "")
5778 (const_string "mov_imm" )
5779 (const_string "mov_reg"))
5780 (const_string "mvn_imm")
5781 (const_string "mov_imm")
5782 (const_string "store_4")
5783 (const_string "load_4")])]
5784 )
5785
5786 (define_insn "*movhi_bytes"
5787 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5788 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5789 "TARGET_ARM && !TARGET_HARD_FLOAT"
5790 "@
5791 mov%?\\t%0, %1\\t%@ movhi
5792 mov%?\\t%0, %1\\t%@ movhi
5793 mvn%?\\t%0, #%B1\\t%@ movhi"
5794 [(set_attr "predicable" "yes")
5795 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5796 )
5797
5798 ;; We use a DImode scratch because we may occasionally need an additional
5799 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5800 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5801 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5802 ;; to be correctly handled in default_secondary_reload function.
5803 (define_expand "reload_outhi"
5804 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5805 (match_operand:HI 1 "s_register_operand" "r")
5806 (match_operand:DI 2 "s_register_operand" "=&l")])]
5807 "TARGET_EITHER"
5808 "if (TARGET_ARM)
5809 arm_reload_out_hi (operands);
5810 else
5811 thumb_reload_out_hi (operands);
5812 DONE;
5813 "
5814 )
5815
5816 (define_expand "reload_inhi"
5817 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5818 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5819 (match_operand:DI 2 "s_register_operand" "=&r")])]
5820 "TARGET_EITHER"
5821 "
5822 if (TARGET_ARM)
5823 arm_reload_in_hi (operands);
5824 else
5825 thumb_reload_out_hi (operands);
5826 DONE;
5827 ")
5828
5829 (define_expand "movqi"
5830 [(set (match_operand:QI 0 "general_operand")
5831 (match_operand:QI 1 "general_operand"))]
5832 "TARGET_EITHER"
5833 "
5834 /* Everything except mem = const or mem = mem can be done easily */
5835
5836 if (can_create_pseudo_p ())
5837 {
5838 if (CONST_INT_P (operands[1]))
5839 {
5840 rtx reg = gen_reg_rtx (SImode);
5841
5842 /* For thumb we want an unsigned immediate, then we are more likely
5843 to be able to use a movs insn. */
5844 if (TARGET_THUMB)
5845 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5846
5847 emit_insn (gen_movsi (reg, operands[1]));
5848 operands[1] = gen_lowpart (QImode, reg);
5849 }
5850
5851 if (TARGET_THUMB)
5852 {
5853 /* ??? We shouldn't really get invalid addresses here, but this can
5854 happen if we are passed a SP (never OK for HImode/QImode) or
5855 virtual register (also rejected as illegitimate for HImode/QImode)
5856 relative address. */
5857 /* ??? This should perhaps be fixed elsewhere, for instance, in
5858 fixup_stack_1, by checking for other kinds of invalid addresses,
5859 e.g. a bare reference to a virtual register. This may confuse the
5860 alpha though, which must handle this case differently. */
5861 if (MEM_P (operands[0])
5862 && !memory_address_p (GET_MODE (operands[0]),
5863 XEXP (operands[0], 0)))
5864 operands[0]
5865 = replace_equiv_address (operands[0],
5866 copy_to_reg (XEXP (operands[0], 0)));
5867 if (MEM_P (operands[1])
5868 && !memory_address_p (GET_MODE (operands[1]),
5869 XEXP (operands[1], 0)))
5870 operands[1]
5871 = replace_equiv_address (operands[1],
5872 copy_to_reg (XEXP (operands[1], 0)));
5873 }
5874
5875 if (MEM_P (operands[1]) && optimize > 0)
5876 {
5877 rtx reg = gen_reg_rtx (SImode);
5878
5879 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5880 operands[1] = gen_lowpart (QImode, reg);
5881 }
5882
5883 if (MEM_P (operands[0]))
5884 operands[1] = force_reg (QImode, operands[1]);
5885 }
5886 else if (TARGET_THUMB
5887 && CONST_INT_P (operands[1])
5888 && !satisfies_constraint_I (operands[1]))
5889 {
5890 /* Handle loading a large integer during reload. */
5891
5892 /* Writing a constant to memory needs a scratch, which should
5893 be handled with SECONDARY_RELOADs. */
5894 gcc_assert (REG_P (operands[0]));
5895
5896 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5897 emit_insn (gen_movsi (operands[0], operands[1]));
5898 DONE;
5899 }
5900 "
5901 )
5902
5903 (define_insn "*arm_movqi_insn"
5904 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5905 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5906 "TARGET_32BIT
5907 && ( register_operand (operands[0], QImode)
5908 || register_operand (operands[1], QImode))"
5909 "@
5910 mov%?\\t%0, %1
5911 mov%?\\t%0, %1
5912 mov%?\\t%0, %1
5913 mov%?\\t%0, %1
5914 mvn%?\\t%0, #%B1
5915 ldrb%?\\t%0, %1
5916 strb%?\\t%1, %0
5917 ldrb%?\\t%0, %1
5918 strb%?\\t%1, %0"
5919 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5920 (set_attr "predicable" "yes")
5921 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5922 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5923 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5924 )
5925
5926 ;; HFmode moves
5927 (define_expand "movhf"
5928 [(set (match_operand:HF 0 "general_operand")
5929 (match_operand:HF 1 "general_operand"))]
5930 "TARGET_EITHER"
5931 "
5932 gcc_checking_assert (aligned_operand (operands[0], HFmode));
5933 gcc_checking_assert (aligned_operand (operands[1], HFmode));
5934 if (TARGET_32BIT)
5935 {
5936 if (MEM_P (operands[0]))
5937 operands[1] = force_reg (HFmode, operands[1]);
5938 }
5939 else /* TARGET_THUMB1 */
5940 {
5941 if (can_create_pseudo_p ())
5942 {
5943 if (!REG_P (operands[0]))
5944 operands[1] = force_reg (HFmode, operands[1]);
5945 }
5946 }
5947 "
5948 )
5949
5950 (define_insn "*arm32_movhf"
5951 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5952 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5953 "TARGET_32BIT && !TARGET_HARD_FLOAT
5954 && ( s_register_operand (operands[0], HFmode)
5955 || s_register_operand (operands[1], HFmode))"
5956 "*
5957 switch (which_alternative)
5958 {
5959 case 0: /* ARM register from memory */
5960 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
5961 case 1: /* memory from ARM register */
5962 return \"strh%?\\t%1, %0\\t%@ __fp16\";
5963 case 2: /* ARM register from ARM register */
5964 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5965 case 3: /* ARM register from constant */
5966 {
5967 long bits;
5968 rtx ops[4];
5969
5970 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
5971 HFmode);
5972 ops[0] = operands[0];
5973 ops[1] = GEN_INT (bits);
5974 ops[2] = GEN_INT (bits & 0xff00);
5975 ops[3] = GEN_INT (bits & 0x00ff);
5976
5977 if (arm_arch_thumb2)
5978 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5979 else
5980 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5981 return \"\";
5982 }
5983 default:
5984 gcc_unreachable ();
5985 }
5986 "
5987 [(set_attr "conds" "unconditional")
5988 (set_attr "type" "load_4,store_4,mov_reg,multiple")
5989 (set_attr "length" "4,4,4,8")
5990 (set_attr "predicable" "yes")]
5991 )
5992
5993 (define_expand "movsf"
5994 [(set (match_operand:SF 0 "general_operand")
5995 (match_operand:SF 1 "general_operand"))]
5996 "TARGET_EITHER"
5997 "
5998 gcc_checking_assert (aligned_operand (operands[0], SFmode));
5999 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6000 if (TARGET_32BIT)
6001 {
6002 if (MEM_P (operands[0]))
6003 operands[1] = force_reg (SFmode, operands[1]);
6004 }
6005 else /* TARGET_THUMB1 */
6006 {
6007 if (can_create_pseudo_p ())
6008 {
6009 if (!REG_P (operands[0]))
6010 operands[1] = force_reg (SFmode, operands[1]);
6011 }
6012 }
6013
6014 /* Cannot load it directly, generate a load with clobber so that it can be
6015 loaded via GPR with MOV / MOVT. */
6016 if (arm_disable_literal_pool
6017 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6018 && CONST_DOUBLE_P (operands[1])
6019 && TARGET_HARD_FLOAT
6020 && !vfp3_const_double_rtx (operands[1]))
6021 {
6022 rtx clobreg = gen_reg_rtx (SFmode);
6023 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6024 clobreg));
6025 DONE;
6026 }
6027 "
6028 )
6029
6030 ;; Transform a floating-point move of a constant into a core register into
6031 ;; an SImode operation.
6032 (define_split
6033 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6034 (match_operand:SF 1 "immediate_operand" ""))]
6035 "TARGET_EITHER
6036 && reload_completed
6037 && CONST_DOUBLE_P (operands[1])"
6038 [(set (match_dup 2) (match_dup 3))]
6039 "
6040 operands[2] = gen_lowpart (SImode, operands[0]);
6041 operands[3] = gen_lowpart (SImode, operands[1]);
6042 if (operands[2] == 0 || operands[3] == 0)
6043 FAIL;
6044 "
6045 )
6046
6047 (define_insn "*arm_movsf_soft_insn"
6048 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6049 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6050 "TARGET_32BIT
6051 && TARGET_SOFT_FLOAT
6052 && (!MEM_P (operands[0])
6053 || register_operand (operands[1], SFmode))"
6054 {
6055 switch (which_alternative)
6056 {
6057 case 0: return \"mov%?\\t%0, %1\";
6058 case 1:
6059 /* Cannot load it directly, split to load it via MOV / MOVT. */
6060 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6061 return \"#\";
6062 return \"ldr%?\\t%0, %1\\t%@ float\";
6063 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6064 default: gcc_unreachable ();
6065 }
6066 }
6067 [(set_attr "predicable" "yes")
6068 (set_attr "type" "mov_reg,load_4,store_4")
6069 (set_attr "arm_pool_range" "*,4096,*")
6070 (set_attr "thumb2_pool_range" "*,4094,*")
6071 (set_attr "arm_neg_pool_range" "*,4084,*")
6072 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6073 )
6074
6075 ;; Splitter for the above.
6076 (define_split
6077 [(set (match_operand:SF 0 "s_register_operand")
6078 (match_operand:SF 1 "const_double_operand"))]
6079 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6080 [(const_int 0)]
6081 {
6082 long buf;
6083 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6084 rtx cst = gen_int_mode (buf, SImode);
6085 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6086 DONE;
6087 }
6088 )
6089
6090 (define_expand "movdf"
6091 [(set (match_operand:DF 0 "general_operand")
6092 (match_operand:DF 1 "general_operand"))]
6093 "TARGET_EITHER"
6094 "
6095 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6096 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6097 if (TARGET_32BIT)
6098 {
6099 if (MEM_P (operands[0]))
6100 operands[1] = force_reg (DFmode, operands[1]);
6101 }
6102 else /* TARGET_THUMB */
6103 {
6104 if (can_create_pseudo_p ())
6105 {
6106 if (!REG_P (operands[0]))
6107 operands[1] = force_reg (DFmode, operands[1]);
6108 }
6109 }
6110
6111 /* Cannot load it directly, generate a load with clobber so that it can be
6112 loaded via GPR with MOV / MOVT. */
6113 if (arm_disable_literal_pool
6114 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6115 && CONSTANT_P (operands[1])
6116 && TARGET_HARD_FLOAT
6117 && !arm_const_double_rtx (operands[1])
6118 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6119 {
6120 rtx clobreg = gen_reg_rtx (DFmode);
6121 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6122 clobreg));
6123 DONE;
6124 }
6125 "
6126 )
6127
6128 ;; Reloading a df mode value stored in integer regs to memory can require a
6129 ;; scratch reg.
6130 ;; Another reload_out<m> pattern that requires special constraints.
6131 (define_expand "reload_outdf"
6132 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6133 (match_operand:DF 1 "s_register_operand" "r")
6134 (match_operand:SI 2 "s_register_operand" "=&r")]
6135 "TARGET_THUMB2"
6136 "
6137 {
6138 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6139
6140 if (code == REG)
6141 operands[2] = XEXP (operands[0], 0);
6142 else if (code == POST_INC || code == PRE_DEC)
6143 {
6144 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6145 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6146 emit_insn (gen_movdi (operands[0], operands[1]));
6147 DONE;
6148 }
6149 else if (code == PRE_INC)
6150 {
6151 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6152
6153 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6154 operands[2] = reg;
6155 }
6156 else if (code == POST_DEC)
6157 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6158 else
6159 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6160 XEXP (XEXP (operands[0], 0), 1)));
6161
6162 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6163 operands[1]));
6164
6165 if (code == POST_DEC)
6166 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6167
6168 DONE;
6169 }"
6170 )
6171
6172 (define_insn "*movdf_soft_insn"
6173 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6174 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6175 "TARGET_32BIT && TARGET_SOFT_FLOAT
6176 && ( register_operand (operands[0], DFmode)
6177 || register_operand (operands[1], DFmode))"
6178 "*
6179 switch (which_alternative)
6180 {
6181 case 0:
6182 case 1:
6183 case 2:
6184 return \"#\";
6185 case 3:
6186 /* Cannot load it directly, split to load it via MOV / MOVT. */
6187 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6188 return \"#\";
6189 /* Fall through. */
6190 default:
6191 return output_move_double (operands, true, NULL);
6192 }
6193 "
6194 [(set_attr "length" "8,12,16,8,8")
6195 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6196 (set_attr "arm_pool_range" "*,*,*,1020,*")
6197 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6198 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6199 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6200 )
6201
6202 ;; Splitter for the above.
6203 (define_split
6204 [(set (match_operand:DF 0 "s_register_operand")
6205 (match_operand:DF 1 "const_double_operand"))]
6206 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6207 [(const_int 0)]
6208 {
6209 long buf[2];
6210 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6211 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6212 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6213 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6214 rtx cst = gen_int_mode (ival, DImode);
6215 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6216 DONE;
6217 }
6218 )
6219 \f
6220
6221 ;; load- and store-multiple insns
6222 ;; The arm can load/store any set of registers, provided that they are in
6223 ;; ascending order, but these expanders assume a contiguous set.
6224
6225 (define_expand "load_multiple"
6226 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6227 (match_operand:SI 1 "" ""))
6228 (use (match_operand:SI 2 "" ""))])]
6229 "TARGET_32BIT"
6230 {
6231 HOST_WIDE_INT offset = 0;
6232
6233 /* Support only fixed point registers. */
6234 if (!CONST_INT_P (operands[2])
6235 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6236 || INTVAL (operands[2]) < 2
6237 || !MEM_P (operands[1])
6238 || !REG_P (operands[0])
6239 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6240 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6241 FAIL;
6242
6243 operands[3]
6244 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6245 INTVAL (operands[2]),
6246 force_reg (SImode, XEXP (operands[1], 0)),
6247 FALSE, operands[1], &offset);
6248 })
6249
6250 (define_expand "store_multiple"
6251 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6252 (match_operand:SI 1 "" ""))
6253 (use (match_operand:SI 2 "" ""))])]
6254 "TARGET_32BIT"
6255 {
6256 HOST_WIDE_INT offset = 0;
6257
6258 /* Support only fixed point registers. */
6259 if (!CONST_INT_P (operands[2])
6260 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6261 || INTVAL (operands[2]) < 2
6262 || !REG_P (operands[1])
6263 || !MEM_P (operands[0])
6264 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6265 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6266 FAIL;
6267
6268 operands[3]
6269 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6270 INTVAL (operands[2]),
6271 force_reg (SImode, XEXP (operands[0], 0)),
6272 FALSE, operands[0], &offset);
6273 })
6274
6275
6276 (define_expand "setmemsi"
6277 [(match_operand:BLK 0 "general_operand")
6278 (match_operand:SI 1 "const_int_operand")
6279 (match_operand:SI 2 "const_int_operand")
6280 (match_operand:SI 3 "const_int_operand")]
6281 "TARGET_32BIT"
6282 {
6283 if (arm_gen_setmem (operands))
6284 DONE;
6285
6286 FAIL;
6287 })
6288
6289
6290 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6291 ;; We could let this apply for blocks of less than this, but it clobbers so
6292 ;; many registers that there is then probably a better way.
6293
6294 (define_expand "cpymemqi"
6295 [(match_operand:BLK 0 "general_operand")
6296 (match_operand:BLK 1 "general_operand")
6297 (match_operand:SI 2 "const_int_operand")
6298 (match_operand:SI 3 "const_int_operand")]
6299 ""
6300 "
6301 if (TARGET_32BIT)
6302 {
6303 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6304 && !optimize_function_for_size_p (cfun))
6305 {
6306 if (gen_cpymem_ldrd_strd (operands))
6307 DONE;
6308 FAIL;
6309 }
6310
6311 if (arm_gen_cpymemqi (operands))
6312 DONE;
6313 FAIL;
6314 }
6315 else /* TARGET_THUMB1 */
6316 {
6317 if ( INTVAL (operands[3]) != 4
6318 || INTVAL (operands[2]) > 48)
6319 FAIL;
6320
6321 thumb_expand_cpymemqi (operands);
6322 DONE;
6323 }
6324 "
6325 )
6326 \f
6327
6328 ;; Compare & branch insns
6329 ;; The range calculations are based as follows:
6330 ;; For forward branches, the address calculation returns the address of
6331 ;; the next instruction. This is 2 beyond the branch instruction.
6332 ;; For backward branches, the address calculation returns the address of
6333 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6334 ;; instruction for the shortest sequence, and 4 before the branch instruction
6335 ;; if we have to jump around an unconditional branch.
6336 ;; To the basic branch range the PC offset must be added (this is +4).
6337 ;; So for forward branches we have
6338 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6339 ;; And for backward branches we have
6340 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6341 ;;
6342 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6343 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6344
6345 (define_expand "cbranchsi4"
6346 [(set (pc) (if_then_else
6347 (match_operator 0 "expandable_comparison_operator"
6348 [(match_operand:SI 1 "s_register_operand")
6349 (match_operand:SI 2 "nonmemory_operand")])
6350 (label_ref (match_operand 3 "" ""))
6351 (pc)))]
6352 "TARGET_EITHER"
6353 "
6354 if (!TARGET_THUMB1)
6355 {
6356 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6357 FAIL;
6358 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6359 operands[3]));
6360 DONE;
6361 }
6362 if (thumb1_cmpneg_operand (operands[2], SImode))
6363 {
6364 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6365 operands[3], operands[0]));
6366 DONE;
6367 }
6368 if (!thumb1_cmp_operand (operands[2], SImode))
6369 operands[2] = force_reg (SImode, operands[2]);
6370 ")
6371
6372 (define_expand "cbranchsf4"
6373 [(set (pc) (if_then_else
6374 (match_operator 0 "expandable_comparison_operator"
6375 [(match_operand:SF 1 "s_register_operand")
6376 (match_operand:SF 2 "vfp_compare_operand")])
6377 (label_ref (match_operand 3 "" ""))
6378 (pc)))]
6379 "TARGET_32BIT && TARGET_HARD_FLOAT"
6380 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6381 operands[3])); DONE;"
6382 )
6383
6384 (define_expand "cbranchdf4"
6385 [(set (pc) (if_then_else
6386 (match_operator 0 "expandable_comparison_operator"
6387 [(match_operand:DF 1 "s_register_operand")
6388 (match_operand:DF 2 "vfp_compare_operand")])
6389 (label_ref (match_operand 3 "" ""))
6390 (pc)))]
6391 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6392 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6393 operands[3])); DONE;"
6394 )
6395
6396 (define_expand "cbranchdi4"
6397 [(set (pc) (if_then_else
6398 (match_operator 0 "expandable_comparison_operator"
6399 [(match_operand:DI 1 "s_register_operand")
6400 (match_operand:DI 2 "reg_or_int_operand")])
6401 (label_ref (match_operand 3 "" ""))
6402 (pc)))]
6403 "TARGET_32BIT"
6404 "{
6405 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6406 FAIL;
6407 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6408 operands[3]));
6409 DONE;
6410 }"
6411 )
6412
6413 ;; Comparison and test insns
6414
6415 (define_insn "*arm_cmpsi_insn"
6416 [(set (reg:CC CC_REGNUM)
6417 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6418 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6419 "TARGET_32BIT"
6420 "@
6421 cmp%?\\t%0, %1
6422 cmp%?\\t%0, %1
6423 cmp%?\\t%0, %1
6424 cmp%?\\t%0, %1
6425 cmn%?\\t%0, #%n1"
6426 [(set_attr "conds" "set")
6427 (set_attr "arch" "t2,t2,any,any,any")
6428 (set_attr "length" "2,2,4,4,4")
6429 (set_attr "predicable" "yes")
6430 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6431 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6432 )
6433
6434 (define_insn "*cmpsi_shiftsi"
6435 [(set (reg:CC CC_REGNUM)
6436 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6437 (match_operator:SI 3 "shift_operator"
6438 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6439 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6440 "TARGET_32BIT"
6441 "cmp\\t%0, %1%S3"
6442 [(set_attr "conds" "set")
6443 (set_attr "shift" "1")
6444 (set_attr "arch" "32,a,a")
6445 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6446
6447 (define_insn "*cmpsi_shiftsi_swp"
6448 [(set (reg:CC_SWP CC_REGNUM)
6449 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6450 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6451 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6452 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6453 "TARGET_32BIT"
6454 "cmp%?\\t%0, %1%S3"
6455 [(set_attr "conds" "set")
6456 (set_attr "shift" "1")
6457 (set_attr "arch" "32,a,a")
6458 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6459
6460 (define_insn "*arm_cmpsi_negshiftsi_si"
6461 [(set (reg:CC_Z CC_REGNUM)
6462 (compare:CC_Z
6463 (neg:SI (match_operator:SI 1 "shift_operator"
6464 [(match_operand:SI 2 "s_register_operand" "r")
6465 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6466 (match_operand:SI 0 "s_register_operand" "r")))]
6467 "TARGET_ARM"
6468 "cmn%?\\t%0, %2%S1"
6469 [(set_attr "conds" "set")
6470 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6471 (const_string "alus_shift_imm")
6472 (const_string "alus_shift_reg")))
6473 (set_attr "predicable" "yes")]
6474 )
6475
6476 ;; DImode comparisons. The generic code generates branches that
6477 ;; if-conversion cannot reduce to a conditional compare, so we do
6478 ;; that directly.
6479
6480 (define_insn "*arm_cmpdi_insn"
6481 [(set (reg:CC_NCV CC_REGNUM)
6482 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6483 (match_operand:DI 1 "arm_di_operand" "rDi")))
6484 (clobber (match_scratch:SI 2 "=r"))]
6485 "TARGET_32BIT"
6486 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6487 [(set_attr "conds" "set")
6488 (set_attr "length" "8")
6489 (set_attr "type" "multiple")]
6490 )
6491
6492 (define_insn_and_split "*arm_cmpdi_unsigned"
6493 [(set (reg:CC_CZ CC_REGNUM)
6494 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6495 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6496
6497 "TARGET_32BIT"
6498 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6499 "&& reload_completed"
6500 [(set (reg:CC CC_REGNUM)
6501 (compare:CC (match_dup 2) (match_dup 3)))
6502 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6503 (set (reg:CC CC_REGNUM)
6504 (compare:CC (match_dup 0) (match_dup 1))))]
6505 {
6506 operands[2] = gen_highpart (SImode, operands[0]);
6507 operands[0] = gen_lowpart (SImode, operands[0]);
6508 if (CONST_INT_P (operands[1]))
6509 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6510 else
6511 operands[3] = gen_highpart (SImode, operands[1]);
6512 operands[1] = gen_lowpart (SImode, operands[1]);
6513 }
6514 [(set_attr "conds" "set")
6515 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6516 (set_attr "arch" "t2,t2,t2,a")
6517 (set_attr "length" "6,6,10,8")
6518 (set_attr "type" "multiple")]
6519 )
6520
6521 ; This insn allows redundant compares to be removed by cse, nothing should
6522 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6523 ; is deleted later on. The match_dup will match the mode here, so that
6524 ; mode changes of the condition codes aren't lost by this even though we don't
6525 ; specify what they are.
6526
6527 (define_insn "*deleted_compare"
6528 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6529 "TARGET_32BIT"
6530 "\\t%@ deleted compare"
6531 [(set_attr "conds" "set")
6532 (set_attr "length" "0")
6533 (set_attr "type" "no_insn")]
6534 )
6535
6536 \f
6537 ;; Conditional branch insns
6538
6539 (define_expand "cbranch_cc"
6540 [(set (pc)
6541 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6542 (match_operand 2 "" "")])
6543 (label_ref (match_operand 3 "" ""))
6544 (pc)))]
6545 "TARGET_32BIT"
6546 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6547 operands[1], operands[2], NULL_RTX);
6548 operands[2] = const0_rtx;"
6549 )
6550
6551 ;;
6552 ;; Patterns to match conditional branch insns.
6553 ;;
6554
6555 (define_insn "arm_cond_branch"
6556 [(set (pc)
6557 (if_then_else (match_operator 1 "arm_comparison_operator"
6558 [(match_operand 2 "cc_register" "") (const_int 0)])
6559 (label_ref (match_operand 0 "" ""))
6560 (pc)))]
6561 "TARGET_32BIT"
6562 "*
6563 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6564 {
6565 arm_ccfsm_state += 2;
6566 return \"\";
6567 }
6568 return \"b%d1\\t%l0\";
6569 "
6570 [(set_attr "conds" "use")
6571 (set_attr "type" "branch")
6572 (set (attr "length")
6573 (if_then_else
6574 (and (match_test "TARGET_THUMB2")
6575 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6576 (le (minus (match_dup 0) (pc)) (const_int 256))))
6577 (const_int 2)
6578 (const_int 4)))]
6579 )
6580
6581 (define_insn "*arm_cond_branch_reversed"
6582 [(set (pc)
6583 (if_then_else (match_operator 1 "arm_comparison_operator"
6584 [(match_operand 2 "cc_register" "") (const_int 0)])
6585 (pc)
6586 (label_ref (match_operand 0 "" ""))))]
6587 "TARGET_32BIT"
6588 "*
6589 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6590 {
6591 arm_ccfsm_state += 2;
6592 return \"\";
6593 }
6594 return \"b%D1\\t%l0\";
6595 "
6596 [(set_attr "conds" "use")
6597 (set_attr "type" "branch")
6598 (set (attr "length")
6599 (if_then_else
6600 (and (match_test "TARGET_THUMB2")
6601 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6602 (le (minus (match_dup 0) (pc)) (const_int 256))))
6603 (const_int 2)
6604 (const_int 4)))]
6605 )
6606
6607 \f
6608
6609 ; scc insns
6610
6611 (define_expand "cstore_cc"
6612 [(set (match_operand:SI 0 "s_register_operand")
6613 (match_operator:SI 1 "" [(match_operand 2 "" "")
6614 (match_operand 3 "" "")]))]
6615 "TARGET_32BIT"
6616 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6617 operands[2], operands[3], NULL_RTX);
6618 operands[3] = const0_rtx;"
6619 )
6620
6621 (define_insn_and_split "*mov_scc"
6622 [(set (match_operand:SI 0 "s_register_operand" "=r")
6623 (match_operator:SI 1 "arm_comparison_operator_mode"
6624 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6625 "TARGET_ARM"
6626 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6627 "TARGET_ARM"
6628 [(set (match_dup 0)
6629 (if_then_else:SI (match_dup 1)
6630 (const_int 1)
6631 (const_int 0)))]
6632 ""
6633 [(set_attr "conds" "use")
6634 (set_attr "length" "8")
6635 (set_attr "type" "multiple")]
6636 )
6637
6638 (define_insn "*negscc_borrow"
6639 [(set (match_operand:SI 0 "s_register_operand" "=r")
6640 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
6641 "TARGET_32BIT"
6642 "sbc\\t%0, %0, %0"
6643 [(set_attr "conds" "use")
6644 (set_attr "length" "4")
6645 (set_attr "type" "adc_reg")]
6646 )
6647
6648 (define_insn_and_split "*mov_negscc"
6649 [(set (match_operand:SI 0 "s_register_operand" "=r")
6650 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6651 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6652 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
6653 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6654 "&& true"
6655 [(set (match_dup 0)
6656 (if_then_else:SI (match_dup 1)
6657 (match_dup 3)
6658 (const_int 0)))]
6659 {
6660 operands[3] = GEN_INT (~0);
6661 }
6662 [(set_attr "conds" "use")
6663 (set_attr "length" "8")
6664 (set_attr "type" "multiple")]
6665 )
6666
6667 (define_insn_and_split "*mov_notscc"
6668 [(set (match_operand:SI 0 "s_register_operand" "=r")
6669 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6670 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6671 "TARGET_ARM"
6672 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6673 "TARGET_ARM"
6674 [(set (match_dup 0)
6675 (if_then_else:SI (match_dup 1)
6676 (match_dup 3)
6677 (match_dup 4)))]
6678 {
6679 operands[3] = GEN_INT (~1);
6680 operands[4] = GEN_INT (~0);
6681 }
6682 [(set_attr "conds" "use")
6683 (set_attr "length" "8")
6684 (set_attr "type" "multiple")]
6685 )
6686
6687 (define_expand "cstoresi4"
6688 [(set (match_operand:SI 0 "s_register_operand")
6689 (match_operator:SI 1 "expandable_comparison_operator"
6690 [(match_operand:SI 2 "s_register_operand")
6691 (match_operand:SI 3 "reg_or_int_operand")]))]
6692 "TARGET_32BIT || TARGET_THUMB1"
6693 "{
6694 rtx op3, scratch, scratch2;
6695
6696 if (!TARGET_THUMB1)
6697 {
6698 if (!arm_add_operand (operands[3], SImode))
6699 operands[3] = force_reg (SImode, operands[3]);
6700 emit_insn (gen_cstore_cc (operands[0], operands[1],
6701 operands[2], operands[3]));
6702 DONE;
6703 }
6704
6705 if (operands[3] == const0_rtx)
6706 {
6707 switch (GET_CODE (operands[1]))
6708 {
6709 case EQ:
6710 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6711 break;
6712
6713 case NE:
6714 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6715 break;
6716
6717 case LE:
6718 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6719 NULL_RTX, 0, OPTAB_WIDEN);
6720 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6721 NULL_RTX, 0, OPTAB_WIDEN);
6722 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6723 operands[0], 1, OPTAB_WIDEN);
6724 break;
6725
6726 case GE:
6727 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6728 NULL_RTX, 1);
6729 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6730 NULL_RTX, 1, OPTAB_WIDEN);
6731 break;
6732
6733 case GT:
6734 scratch = expand_binop (SImode, ashr_optab, operands[2],
6735 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6736 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6737 NULL_RTX, 0, OPTAB_WIDEN);
6738 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6739 0, OPTAB_WIDEN);
6740 break;
6741
6742 /* LT is handled by generic code. No need for unsigned with 0. */
6743 default:
6744 FAIL;
6745 }
6746 DONE;
6747 }
6748
6749 switch (GET_CODE (operands[1]))
6750 {
6751 case EQ:
6752 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6753 NULL_RTX, 0, OPTAB_WIDEN);
6754 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6755 break;
6756
6757 case NE:
6758 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6759 NULL_RTX, 0, OPTAB_WIDEN);
6760 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6761 break;
6762
6763 case LE:
6764 op3 = force_reg (SImode, operands[3]);
6765
6766 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6767 NULL_RTX, 1, OPTAB_WIDEN);
6768 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6769 NULL_RTX, 0, OPTAB_WIDEN);
6770 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6771 op3, operands[2]));
6772 break;
6773
6774 case GE:
6775 op3 = operands[3];
6776 if (!thumb1_cmp_operand (op3, SImode))
6777 op3 = force_reg (SImode, op3);
6778 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6779 NULL_RTX, 0, OPTAB_WIDEN);
6780 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6781 NULL_RTX, 1, OPTAB_WIDEN);
6782 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6783 operands[2], op3));
6784 break;
6785
6786 case LEU:
6787 op3 = force_reg (SImode, operands[3]);
6788 scratch = force_reg (SImode, const0_rtx);
6789 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6790 op3, operands[2]));
6791 break;
6792
6793 case GEU:
6794 op3 = operands[3];
6795 if (!thumb1_cmp_operand (op3, SImode))
6796 op3 = force_reg (SImode, op3);
6797 scratch = force_reg (SImode, const0_rtx);
6798 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6799 operands[2], op3));
6800 break;
6801
6802 case LTU:
6803 op3 = operands[3];
6804 if (!thumb1_cmp_operand (op3, SImode))
6805 op3 = force_reg (SImode, op3);
6806 scratch = gen_reg_rtx (SImode);
6807 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6808 break;
6809
6810 case GTU:
6811 op3 = force_reg (SImode, operands[3]);
6812 scratch = gen_reg_rtx (SImode);
6813 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6814 break;
6815
6816 /* No good sequences for GT, LT. */
6817 default:
6818 FAIL;
6819 }
6820 DONE;
6821 }")
6822
6823 (define_expand "cstorehf4"
6824 [(set (match_operand:SI 0 "s_register_operand")
6825 (match_operator:SI 1 "expandable_comparison_operator"
6826 [(match_operand:HF 2 "s_register_operand")
6827 (match_operand:HF 3 "vfp_compare_operand")]))]
6828 "TARGET_VFP_FP16INST"
6829 {
6830 if (!arm_validize_comparison (&operands[1],
6831 &operands[2],
6832 &operands[3]))
6833 FAIL;
6834
6835 emit_insn (gen_cstore_cc (operands[0], operands[1],
6836 operands[2], operands[3]));
6837 DONE;
6838 }
6839 )
6840
6841 (define_expand "cstoresf4"
6842 [(set (match_operand:SI 0 "s_register_operand")
6843 (match_operator:SI 1 "expandable_comparison_operator"
6844 [(match_operand:SF 2 "s_register_operand")
6845 (match_operand:SF 3 "vfp_compare_operand")]))]
6846 "TARGET_32BIT && TARGET_HARD_FLOAT"
6847 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6848 operands[2], operands[3])); DONE;"
6849 )
6850
6851 (define_expand "cstoredf4"
6852 [(set (match_operand:SI 0 "s_register_operand")
6853 (match_operator:SI 1 "expandable_comparison_operator"
6854 [(match_operand:DF 2 "s_register_operand")
6855 (match_operand:DF 3 "vfp_compare_operand")]))]
6856 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6857 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6858 operands[2], operands[3])); DONE;"
6859 )
6860
6861 (define_expand "cstoredi4"
6862 [(set (match_operand:SI 0 "s_register_operand")
6863 (match_operator:SI 1 "expandable_comparison_operator"
6864 [(match_operand:DI 2 "s_register_operand")
6865 (match_operand:DI 3 "reg_or_int_operand")]))]
6866 "TARGET_32BIT"
6867 "{
6868 if (!arm_validize_comparison (&operands[1],
6869 &operands[2],
6870 &operands[3]))
6871 FAIL;
6872 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6873 operands[3]));
6874 DONE;
6875 }"
6876 )
6877
6878 \f
6879 ;; Conditional move insns
6880
6881 (define_expand "movsicc"
6882 [(set (match_operand:SI 0 "s_register_operand")
6883 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6884 (match_operand:SI 2 "arm_not_operand")
6885 (match_operand:SI 3 "arm_not_operand")))]
6886 "TARGET_32BIT"
6887 "
6888 {
6889 enum rtx_code code;
6890 rtx ccreg;
6891
6892 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6893 &XEXP (operands[1], 1)))
6894 FAIL;
6895
6896 code = GET_CODE (operands[1]);
6897 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6898 XEXP (operands[1], 1), NULL_RTX);
6899 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6900 }"
6901 )
6902
6903 (define_expand "movhfcc"
6904 [(set (match_operand:HF 0 "s_register_operand")
6905 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6906 (match_operand:HF 2 "s_register_operand")
6907 (match_operand:HF 3 "s_register_operand")))]
6908 "TARGET_VFP_FP16INST"
6909 "
6910 {
6911 enum rtx_code code = GET_CODE (operands[1]);
6912 rtx ccreg;
6913
6914 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6915 &XEXP (operands[1], 1)))
6916 FAIL;
6917
6918 code = GET_CODE (operands[1]);
6919 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6920 XEXP (operands[1], 1), NULL_RTX);
6921 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6922 }"
6923 )
6924
6925 (define_expand "movsfcc"
6926 [(set (match_operand:SF 0 "s_register_operand")
6927 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6928 (match_operand:SF 2 "s_register_operand")
6929 (match_operand:SF 3 "s_register_operand")))]
6930 "TARGET_32BIT && TARGET_HARD_FLOAT"
6931 "
6932 {
6933 enum rtx_code code = GET_CODE (operands[1]);
6934 rtx ccreg;
6935
6936 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6937 &XEXP (operands[1], 1)))
6938 FAIL;
6939
6940 code = GET_CODE (operands[1]);
6941 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6942 XEXP (operands[1], 1), NULL_RTX);
6943 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6944 }"
6945 )
6946
6947 (define_expand "movdfcc"
6948 [(set (match_operand:DF 0 "s_register_operand")
6949 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
6950 (match_operand:DF 2 "s_register_operand")
6951 (match_operand:DF 3 "s_register_operand")))]
6952 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
6953 "
6954 {
6955 enum rtx_code code = GET_CODE (operands[1]);
6956 rtx ccreg;
6957
6958 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6959 &XEXP (operands[1], 1)))
6960 FAIL;
6961 code = GET_CODE (operands[1]);
6962 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6963 XEXP (operands[1], 1), NULL_RTX);
6964 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6965 }"
6966 )
6967
6968 (define_insn "*cmov<mode>"
6969 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
6970 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
6971 [(match_operand 2 "cc_register" "") (const_int 0)])
6972 (match_operand:SDF 3 "s_register_operand"
6973 "<F_constraint>")
6974 (match_operand:SDF 4 "s_register_operand"
6975 "<F_constraint>")))]
6976 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
6977 "*
6978 {
6979 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6980 switch (code)
6981 {
6982 case ARM_GE:
6983 case ARM_GT:
6984 case ARM_EQ:
6985 case ARM_VS:
6986 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
6987 case ARM_LT:
6988 case ARM_LE:
6989 case ARM_NE:
6990 case ARM_VC:
6991 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
6992 default:
6993 gcc_unreachable ();
6994 }
6995 return \"\";
6996 }"
6997 [(set_attr "conds" "use")
6998 (set_attr "type" "fcsel")]
6999 )
7000
7001 (define_insn "*cmovhf"
7002 [(set (match_operand:HF 0 "s_register_operand" "=t")
7003 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7004 [(match_operand 2 "cc_register" "") (const_int 0)])
7005 (match_operand:HF 3 "s_register_operand" "t")
7006 (match_operand:HF 4 "s_register_operand" "t")))]
7007 "TARGET_VFP_FP16INST"
7008 "*
7009 {
7010 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7011 switch (code)
7012 {
7013 case ARM_GE:
7014 case ARM_GT:
7015 case ARM_EQ:
7016 case ARM_VS:
7017 return \"vsel%d1.f16\\t%0, %3, %4\";
7018 case ARM_LT:
7019 case ARM_LE:
7020 case ARM_NE:
7021 case ARM_VC:
7022 return \"vsel%D1.f16\\t%0, %4, %3\";
7023 default:
7024 gcc_unreachable ();
7025 }
7026 return \"\";
7027 }"
7028 [(set_attr "conds" "use")
7029 (set_attr "type" "fcsel")]
7030 )
7031
7032 (define_insn_and_split "*movsicc_insn"
7033 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7034 (if_then_else:SI
7035 (match_operator 3 "arm_comparison_operator"
7036 [(match_operand 4 "cc_register" "") (const_int 0)])
7037 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7038 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7039 "TARGET_ARM"
7040 "@
7041 mov%D3\\t%0, %2
7042 mvn%D3\\t%0, #%B2
7043 mov%d3\\t%0, %1
7044 mvn%d3\\t%0, #%B1
7045 #
7046 #
7047 #
7048 #"
7049 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7050 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7051 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7052 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7053 "&& reload_completed"
7054 [(const_int 0)]
7055 {
7056 enum rtx_code rev_code;
7057 machine_mode mode;
7058 rtx rev_cond;
7059
7060 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7061 operands[3],
7062 gen_rtx_SET (operands[0], operands[1])));
7063
7064 rev_code = GET_CODE (operands[3]);
7065 mode = GET_MODE (operands[4]);
7066 if (mode == CCFPmode || mode == CCFPEmode)
7067 rev_code = reverse_condition_maybe_unordered (rev_code);
7068 else
7069 rev_code = reverse_condition (rev_code);
7070
7071 rev_cond = gen_rtx_fmt_ee (rev_code,
7072 VOIDmode,
7073 operands[4],
7074 const0_rtx);
7075 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7076 rev_cond,
7077 gen_rtx_SET (operands[0], operands[2])));
7078 DONE;
7079 }
7080 [(set_attr "length" "4,4,4,4,8,8,8,8")
7081 (set_attr "conds" "use")
7082 (set_attr_alternative "type"
7083 [(if_then_else (match_operand 2 "const_int_operand" "")
7084 (const_string "mov_imm")
7085 (const_string "mov_reg"))
7086 (const_string "mvn_imm")
7087 (if_then_else (match_operand 1 "const_int_operand" "")
7088 (const_string "mov_imm")
7089 (const_string "mov_reg"))
7090 (const_string "mvn_imm")
7091 (const_string "multiple")
7092 (const_string "multiple")
7093 (const_string "multiple")
7094 (const_string "multiple")])]
7095 )
7096
7097 (define_insn "*movsfcc_soft_insn"
7098 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7099 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7100 [(match_operand 4 "cc_register" "") (const_int 0)])
7101 (match_operand:SF 1 "s_register_operand" "0,r")
7102 (match_operand:SF 2 "s_register_operand" "r,0")))]
7103 "TARGET_ARM && TARGET_SOFT_FLOAT"
7104 "@
7105 mov%D3\\t%0, %2
7106 mov%d3\\t%0, %1"
7107 [(set_attr "conds" "use")
7108 (set_attr "type" "mov_reg")]
7109 )
7110
7111 \f
7112 ;; Jump and linkage insns
7113
7114 (define_expand "jump"
7115 [(set (pc)
7116 (label_ref (match_operand 0 "" "")))]
7117 "TARGET_EITHER"
7118 ""
7119 )
7120
7121 (define_insn "*arm_jump"
7122 [(set (pc)
7123 (label_ref (match_operand 0 "" "")))]
7124 "TARGET_32BIT"
7125 "*
7126 {
7127 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7128 {
7129 arm_ccfsm_state += 2;
7130 return \"\";
7131 }
7132 return \"b%?\\t%l0\";
7133 }
7134 "
7135 [(set_attr "predicable" "yes")
7136 (set (attr "length")
7137 (if_then_else
7138 (and (match_test "TARGET_THUMB2")
7139 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7140 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7141 (const_int 2)
7142 (const_int 4)))
7143 (set_attr "type" "branch")]
7144 )
7145
7146 (define_expand "call"
7147 [(parallel [(call (match_operand 0 "memory_operand")
7148 (match_operand 1 "general_operand"))
7149 (use (match_operand 2 "" ""))
7150 (clobber (reg:SI LR_REGNUM))])]
7151 "TARGET_EITHER"
7152 "
7153 {
7154 rtx callee, pat;
7155 tree addr = MEM_EXPR (operands[0]);
7156
7157 /* In an untyped call, we can get NULL for operand 2. */
7158 if (operands[2] == NULL_RTX)
7159 operands[2] = const0_rtx;
7160
7161 /* Decide if we should generate indirect calls by loading the
7162 32-bit address of the callee into a register before performing the
7163 branch and link. */
7164 callee = XEXP (operands[0], 0);
7165 if (GET_CODE (callee) == SYMBOL_REF
7166 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7167 : !REG_P (callee))
7168 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7169
7170 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7171 /* Indirect call: set r9 with FDPIC value of callee. */
7172 XEXP (operands[0], 0)
7173 = arm_load_function_descriptor (XEXP (operands[0], 0));
7174
7175 if (detect_cmse_nonsecure_call (addr))
7176 {
7177 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7178 operands[2]);
7179 emit_call_insn (pat);
7180 }
7181 else
7182 {
7183 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7184 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7185 }
7186
7187 /* Restore FDPIC register (r9) after call. */
7188 if (TARGET_FDPIC)
7189 {
7190 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7191 rtx initial_fdpic_reg
7192 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7193
7194 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7195 initial_fdpic_reg));
7196 }
7197
7198 DONE;
7199 }"
7200 )
7201
7202 (define_insn "restore_pic_register_after_call"
7203 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7204 (unspec:SI [(match_dup 0)
7205 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7206 UNSPEC_PIC_RESTORE))]
7207 ""
7208 "@
7209 mov\t%0, %1
7210 ldr\t%0, %1"
7211 )
7212
7213 (define_expand "call_internal"
7214 [(parallel [(call (match_operand 0 "memory_operand")
7215 (match_operand 1 "general_operand"))
7216 (use (match_operand 2 "" ""))
7217 (clobber (reg:SI LR_REGNUM))])])
7218
7219 (define_expand "nonsecure_call_internal"
7220 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7221 UNSPEC_NONSECURE_MEM)
7222 (match_operand 1 "general_operand"))
7223 (use (match_operand 2 "" ""))
7224 (clobber (reg:SI LR_REGNUM))])]
7225 "use_cmse"
7226 "
7227 {
7228 rtx tmp;
7229 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7230 gen_rtx_REG (SImode, R4_REGNUM),
7231 SImode);
7232
7233 operands[0] = replace_equiv_address (operands[0], tmp);
7234 }")
7235
7236 (define_insn "*call_reg_armv5"
7237 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7238 (match_operand 1 "" ""))
7239 (use (match_operand 2 "" ""))
7240 (clobber (reg:SI LR_REGNUM))]
7241 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7242 "blx%?\\t%0"
7243 [(set_attr "type" "call")]
7244 )
7245
7246 (define_insn "*call_reg_arm"
7247 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7248 (match_operand 1 "" ""))
7249 (use (match_operand 2 "" ""))
7250 (clobber (reg:SI LR_REGNUM))]
7251 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7252 "*
7253 return output_call (operands);
7254 "
7255 ;; length is worst case, normally it is only two
7256 [(set_attr "length" "12")
7257 (set_attr "type" "call")]
7258 )
7259
7260
7261 (define_expand "call_value"
7262 [(parallel [(set (match_operand 0 "" "")
7263 (call (match_operand 1 "memory_operand")
7264 (match_operand 2 "general_operand")))
7265 (use (match_operand 3 "" ""))
7266 (clobber (reg:SI LR_REGNUM))])]
7267 "TARGET_EITHER"
7268 "
7269 {
7270 rtx pat, callee;
7271 tree addr = MEM_EXPR (operands[1]);
7272
7273 /* In an untyped call, we can get NULL for operand 2. */
7274 if (operands[3] == 0)
7275 operands[3] = const0_rtx;
7276
7277 /* Decide if we should generate indirect calls by loading the
7278 32-bit address of the callee into a register before performing the
7279 branch and link. */
7280 callee = XEXP (operands[1], 0);
7281 if (GET_CODE (callee) == SYMBOL_REF
7282 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7283 : !REG_P (callee))
7284 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7285
7286 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7287 /* Indirect call: set r9 with FDPIC value of callee. */
7288 XEXP (operands[1], 0)
7289 = arm_load_function_descriptor (XEXP (operands[1], 0));
7290
7291 if (detect_cmse_nonsecure_call (addr))
7292 {
7293 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7294 operands[2], operands[3]);
7295 emit_call_insn (pat);
7296 }
7297 else
7298 {
7299 pat = gen_call_value_internal (operands[0], operands[1],
7300 operands[2], operands[3]);
7301 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7302 }
7303
7304 /* Restore FDPIC register (r9) after call. */
7305 if (TARGET_FDPIC)
7306 {
7307 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7308 rtx initial_fdpic_reg
7309 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7310
7311 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7312 initial_fdpic_reg));
7313 }
7314
7315 DONE;
7316 }"
7317 )
7318
7319 (define_expand "call_value_internal"
7320 [(parallel [(set (match_operand 0 "" "")
7321 (call (match_operand 1 "memory_operand")
7322 (match_operand 2 "general_operand")))
7323 (use (match_operand 3 "" ""))
7324 (clobber (reg:SI LR_REGNUM))])])
7325
7326 (define_expand "nonsecure_call_value_internal"
7327 [(parallel [(set (match_operand 0 "" "")
7328 (call (unspec:SI [(match_operand 1 "memory_operand")]
7329 UNSPEC_NONSECURE_MEM)
7330 (match_operand 2 "general_operand")))
7331 (use (match_operand 3 "" ""))
7332 (clobber (reg:SI LR_REGNUM))])]
7333 "use_cmse"
7334 "
7335 {
7336 rtx tmp;
7337 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7338 gen_rtx_REG (SImode, R4_REGNUM),
7339 SImode);
7340
7341 operands[1] = replace_equiv_address (operands[1], tmp);
7342 }")
7343
7344 (define_insn "*call_value_reg_armv5"
7345 [(set (match_operand 0 "" "")
7346 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7347 (match_operand 2 "" "")))
7348 (use (match_operand 3 "" ""))
7349 (clobber (reg:SI LR_REGNUM))]
7350 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7351 "blx%?\\t%1"
7352 [(set_attr "type" "call")]
7353 )
7354
7355 (define_insn "*call_value_reg_arm"
7356 [(set (match_operand 0 "" "")
7357 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7358 (match_operand 2 "" "")))
7359 (use (match_operand 3 "" ""))
7360 (clobber (reg:SI LR_REGNUM))]
7361 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7362 "*
7363 return output_call (&operands[1]);
7364 "
7365 [(set_attr "length" "12")
7366 (set_attr "type" "call")]
7367 )
7368
7369 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7370 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7371
7372 (define_insn "*call_symbol"
7373 [(call (mem:SI (match_operand:SI 0 "" ""))
7374 (match_operand 1 "" ""))
7375 (use (match_operand 2 "" ""))
7376 (clobber (reg:SI LR_REGNUM))]
7377 "TARGET_32BIT
7378 && !SIBLING_CALL_P (insn)
7379 && (GET_CODE (operands[0]) == SYMBOL_REF)
7380 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7381 "*
7382 {
7383 rtx op = operands[0];
7384
7385 /* Switch mode now when possible. */
7386 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7387 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7388 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7389
7390 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7391 }"
7392 [(set_attr "type" "call")]
7393 )
7394
7395 (define_insn "*call_value_symbol"
7396 [(set (match_operand 0 "" "")
7397 (call (mem:SI (match_operand:SI 1 "" ""))
7398 (match_operand:SI 2 "" "")))
7399 (use (match_operand 3 "" ""))
7400 (clobber (reg:SI LR_REGNUM))]
7401 "TARGET_32BIT
7402 && !SIBLING_CALL_P (insn)
7403 && (GET_CODE (operands[1]) == SYMBOL_REF)
7404 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7405 "*
7406 {
7407 rtx op = operands[1];
7408
7409 /* Switch mode now when possible. */
7410 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7411 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7412 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7413
7414 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7415 }"
7416 [(set_attr "type" "call")]
7417 )
7418
7419 (define_expand "sibcall_internal"
7420 [(parallel [(call (match_operand 0 "memory_operand")
7421 (match_operand 1 "general_operand"))
7422 (return)
7423 (use (match_operand 2 "" ""))])])
7424
7425 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7426 (define_expand "sibcall"
7427 [(parallel [(call (match_operand 0 "memory_operand")
7428 (match_operand 1 "general_operand"))
7429 (return)
7430 (use (match_operand 2 "" ""))])]
7431 "TARGET_32BIT"
7432 "
7433 {
7434 rtx pat;
7435
7436 if ((!REG_P (XEXP (operands[0], 0))
7437 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7438 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7439 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7440 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7441
7442 if (operands[2] == NULL_RTX)
7443 operands[2] = const0_rtx;
7444
7445 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7446 arm_emit_call_insn (pat, operands[0], true);
7447 DONE;
7448 }"
7449 )
7450
7451 (define_expand "sibcall_value_internal"
7452 [(parallel [(set (match_operand 0 "" "")
7453 (call (match_operand 1 "memory_operand")
7454 (match_operand 2 "general_operand")))
7455 (return)
7456 (use (match_operand 3 "" ""))])])
7457
7458 (define_expand "sibcall_value"
7459 [(parallel [(set (match_operand 0 "" "")
7460 (call (match_operand 1 "memory_operand")
7461 (match_operand 2 "general_operand")))
7462 (return)
7463 (use (match_operand 3 "" ""))])]
7464 "TARGET_32BIT"
7465 "
7466 {
7467 rtx pat;
7468
7469 if ((!REG_P (XEXP (operands[1], 0))
7470 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7471 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7472 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7473 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7474
7475 if (operands[3] == NULL_RTX)
7476 operands[3] = const0_rtx;
7477
7478 pat = gen_sibcall_value_internal (operands[0], operands[1],
7479 operands[2], operands[3]);
7480 arm_emit_call_insn (pat, operands[1], true);
7481 DONE;
7482 }"
7483 )
7484
7485 (define_insn "*sibcall_insn"
7486 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7487 (match_operand 1 "" ""))
7488 (return)
7489 (use (match_operand 2 "" ""))]
7490 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7491 "*
7492 if (which_alternative == 1)
7493 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7494 else
7495 {
7496 if (arm_arch5t || arm_arch4t)
7497 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7498 else
7499 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7500 }
7501 "
7502 [(set_attr "type" "call")]
7503 )
7504
7505 (define_insn "*sibcall_value_insn"
7506 [(set (match_operand 0 "" "")
7507 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7508 (match_operand 2 "" "")))
7509 (return)
7510 (use (match_operand 3 "" ""))]
7511 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7512 "*
7513 if (which_alternative == 1)
7514 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7515 else
7516 {
7517 if (arm_arch5t || arm_arch4t)
7518 return \"bx%?\\t%1\";
7519 else
7520 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7521 }
7522 "
7523 [(set_attr "type" "call")]
7524 )
7525
7526 (define_expand "<return_str>return"
7527 [(RETURNS)]
7528 "(TARGET_ARM || (TARGET_THUMB2
7529 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7530 && !IS_STACKALIGN (arm_current_func_type ())))
7531 <return_cond_false>"
7532 "
7533 {
7534 if (TARGET_THUMB2)
7535 {
7536 thumb2_expand_return (<return_simple_p>);
7537 DONE;
7538 }
7539 }
7540 "
7541 )
7542
7543 ;; Often the return insn will be the same as loading from memory, so set attr
7544 (define_insn "*arm_return"
7545 [(return)]
7546 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7547 "*
7548 {
7549 if (arm_ccfsm_state == 2)
7550 {
7551 arm_ccfsm_state += 2;
7552 return \"\";
7553 }
7554 return output_return_instruction (const_true_rtx, true, false, false);
7555 }"
7556 [(set_attr "type" "load_4")
7557 (set_attr "length" "12")
7558 (set_attr "predicable" "yes")]
7559 )
7560
7561 (define_insn "*cond_<return_str>return"
7562 [(set (pc)
7563 (if_then_else (match_operator 0 "arm_comparison_operator"
7564 [(match_operand 1 "cc_register" "") (const_int 0)])
7565 (RETURNS)
7566 (pc)))]
7567 "TARGET_ARM <return_cond_true>"
7568 "*
7569 {
7570 if (arm_ccfsm_state == 2)
7571 {
7572 arm_ccfsm_state += 2;
7573 return \"\";
7574 }
7575 return output_return_instruction (operands[0], true, false,
7576 <return_simple_p>);
7577 }"
7578 [(set_attr "conds" "use")
7579 (set_attr "length" "12")
7580 (set_attr "type" "load_4")]
7581 )
7582
7583 (define_insn "*cond_<return_str>return_inverted"
7584 [(set (pc)
7585 (if_then_else (match_operator 0 "arm_comparison_operator"
7586 [(match_operand 1 "cc_register" "") (const_int 0)])
7587 (pc)
7588 (RETURNS)))]
7589 "TARGET_ARM <return_cond_true>"
7590 "*
7591 {
7592 if (arm_ccfsm_state == 2)
7593 {
7594 arm_ccfsm_state += 2;
7595 return \"\";
7596 }
7597 return output_return_instruction (operands[0], true, true,
7598 <return_simple_p>);
7599 }"
7600 [(set_attr "conds" "use")
7601 (set_attr "length" "12")
7602 (set_attr "type" "load_4")]
7603 )
7604
7605 (define_insn "*arm_simple_return"
7606 [(simple_return)]
7607 "TARGET_ARM"
7608 "*
7609 {
7610 if (arm_ccfsm_state == 2)
7611 {
7612 arm_ccfsm_state += 2;
7613 return \"\";
7614 }
7615 return output_return_instruction (const_true_rtx, true, false, true);
7616 }"
7617 [(set_attr "type" "branch")
7618 (set_attr "length" "4")
7619 (set_attr "predicable" "yes")]
7620 )
7621
7622 ;; Generate a sequence of instructions to determine if the processor is
7623 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7624 ;; mask.
7625
7626 (define_expand "return_addr_mask"
7627 [(set (match_dup 1)
7628 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7629 (const_int 0)))
7630 (set (match_operand:SI 0 "s_register_operand")
7631 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7632 (const_int -1)
7633 (const_int 67108860)))] ; 0x03fffffc
7634 "TARGET_ARM"
7635 "
7636 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7637 ")
7638
7639 (define_insn "*check_arch2"
7640 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7641 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7642 (const_int 0)))]
7643 "TARGET_ARM"
7644 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7645 [(set_attr "length" "8")
7646 (set_attr "conds" "set")
7647 (set_attr "type" "multiple")]
7648 )
7649
7650 ;; Call subroutine returning any type.
7651
7652 (define_expand "untyped_call"
7653 [(parallel [(call (match_operand 0 "" "")
7654 (const_int 0))
7655 (match_operand 1 "" "")
7656 (match_operand 2 "" "")])]
7657 "TARGET_EITHER && !TARGET_FDPIC"
7658 "
7659 {
7660 int i;
7661 rtx par = gen_rtx_PARALLEL (VOIDmode,
7662 rtvec_alloc (XVECLEN (operands[2], 0)));
7663 rtx addr = gen_reg_rtx (Pmode);
7664 rtx mem;
7665 int size = 0;
7666
7667 emit_move_insn (addr, XEXP (operands[1], 0));
7668 mem = change_address (operands[1], BLKmode, addr);
7669
7670 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7671 {
7672 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7673
7674 /* Default code only uses r0 as a return value, but we could
7675 be using anything up to 4 registers. */
7676 if (REGNO (src) == R0_REGNUM)
7677 src = gen_rtx_REG (TImode, R0_REGNUM);
7678
7679 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7680 GEN_INT (size));
7681 size += GET_MODE_SIZE (GET_MODE (src));
7682 }
7683
7684 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7685
7686 size = 0;
7687
7688 for (i = 0; i < XVECLEN (par, 0); i++)
7689 {
7690 HOST_WIDE_INT offset = 0;
7691 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7692
7693 if (size != 0)
7694 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7695
7696 mem = change_address (mem, GET_MODE (reg), NULL);
7697 if (REGNO (reg) == R0_REGNUM)
7698 {
7699 /* On thumb we have to use a write-back instruction. */
7700 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7701 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7702 size = TARGET_ARM ? 16 : 0;
7703 }
7704 else
7705 {
7706 emit_move_insn (mem, reg);
7707 size = GET_MODE_SIZE (GET_MODE (reg));
7708 }
7709 }
7710
7711 /* The optimizer does not know that the call sets the function value
7712 registers we stored in the result block. We avoid problems by
7713 claiming that all hard registers are used and clobbered at this
7714 point. */
7715 emit_insn (gen_blockage ());
7716
7717 DONE;
7718 }"
7719 )
7720
7721 (define_expand "untyped_return"
7722 [(match_operand:BLK 0 "memory_operand")
7723 (match_operand 1 "" "")]
7724 "TARGET_EITHER && !TARGET_FDPIC"
7725 "
7726 {
7727 int i;
7728 rtx addr = gen_reg_rtx (Pmode);
7729 rtx mem;
7730 int size = 0;
7731
7732 emit_move_insn (addr, XEXP (operands[0], 0));
7733 mem = change_address (operands[0], BLKmode, addr);
7734
7735 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7736 {
7737 HOST_WIDE_INT offset = 0;
7738 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7739
7740 if (size != 0)
7741 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7742
7743 mem = change_address (mem, GET_MODE (reg), NULL);
7744 if (REGNO (reg) == R0_REGNUM)
7745 {
7746 /* On thumb we have to use a write-back instruction. */
7747 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7748 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7749 size = TARGET_ARM ? 16 : 0;
7750 }
7751 else
7752 {
7753 emit_move_insn (reg, mem);
7754 size = GET_MODE_SIZE (GET_MODE (reg));
7755 }
7756 }
7757
7758 /* Emit USE insns before the return. */
7759 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7760 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7761
7762 /* Construct the return. */
7763 expand_naked_return ();
7764
7765 DONE;
7766 }"
7767 )
7768
7769 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7770 ;; all of memory. This blocks insns from being moved across this point.
7771
7772 (define_insn "blockage"
7773 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7774 "TARGET_EITHER"
7775 ""
7776 [(set_attr "length" "0")
7777 (set_attr "type" "block")]
7778 )
7779
7780 ;; Since we hard code r0 here use the 'o' constraint to prevent
7781 ;; provoking undefined behaviour in the hardware with putting out
7782 ;; auto-increment operations with potentially r0 as the base register.
7783 (define_insn "probe_stack"
7784 [(set (match_operand:SI 0 "memory_operand" "=o")
7785 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7786 "TARGET_32BIT"
7787 "str%?\\tr0, %0"
7788 [(set_attr "type" "store_4")
7789 (set_attr "predicable" "yes")]
7790 )
7791
7792 (define_insn "probe_stack_range"
7793 [(set (match_operand:SI 0 "register_operand" "=r")
7794 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7795 (match_operand:SI 2 "register_operand" "r")]
7796 VUNSPEC_PROBE_STACK_RANGE))]
7797 "TARGET_32BIT"
7798 {
7799 return output_probe_stack_range (operands[0], operands[2]);
7800 }
7801 [(set_attr "type" "multiple")
7802 (set_attr "conds" "clob")]
7803 )
7804
7805 ;; Named patterns for stack smashing protection.
7806 (define_expand "stack_protect_combined_set"
7807 [(parallel
7808 [(set (match_operand:SI 0 "memory_operand")
7809 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7810 UNSPEC_SP_SET))
7811 (clobber (match_scratch:SI 2 ""))
7812 (clobber (match_scratch:SI 3 ""))])]
7813 ""
7814 ""
7815 )
7816
7817 ;; Use a separate insn from the above expand to be able to have the mem outside
7818 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7819 ;; try to reload the guard since we need to control how PIC access is done in
7820 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7821 ;; legitimize_pic_address ()).
7822 (define_insn_and_split "*stack_protect_combined_set_insn"
7823 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7824 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7825 UNSPEC_SP_SET))
7826 (clobber (match_scratch:SI 2 "=&l,&r"))
7827 (clobber (match_scratch:SI 3 "=&l,&r"))]
7828 ""
7829 "#"
7830 "reload_completed"
7831 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7832 UNSPEC_SP_SET))
7833 (clobber (match_dup 2))])]
7834 "
7835 {
7836 if (flag_pic)
7837 {
7838 rtx pic_reg;
7839
7840 if (TARGET_FDPIC)
7841 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7842 else
7843 pic_reg = operands[3];
7844
7845 /* Forces recomputing of GOT base now. */
7846 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7847 true /*compute_now*/);
7848 }
7849 else
7850 {
7851 if (address_operand (operands[1], SImode))
7852 operands[2] = operands[1];
7853 else
7854 {
7855 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7856 emit_move_insn (operands[2], mem);
7857 }
7858 }
7859 }"
7860 [(set_attr "arch" "t1,32")]
7861 )
7862
7863 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7864 ;; canary value does not live beyond the life of this sequence.
7865 (define_insn "*stack_protect_set_insn"
7866 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7867 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7868 UNSPEC_SP_SET))
7869 (clobber (match_dup 1))]
7870 ""
7871 "@
7872 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7873 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7874 [(set_attr "length" "8,12")
7875 (set_attr "conds" "clob,nocond")
7876 (set_attr "type" "multiple")
7877 (set_attr "arch" "t1,32")]
7878 )
7879
7880 (define_expand "stack_protect_combined_test"
7881 [(parallel
7882 [(set (pc)
7883 (if_then_else
7884 (eq (match_operand:SI 0 "memory_operand")
7885 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7886 UNSPEC_SP_TEST))
7887 (label_ref (match_operand 2))
7888 (pc)))
7889 (clobber (match_scratch:SI 3 ""))
7890 (clobber (match_scratch:SI 4 ""))
7891 (clobber (reg:CC CC_REGNUM))])]
7892 ""
7893 ""
7894 )
7895
7896 ;; Use a separate insn from the above expand to be able to have the mem outside
7897 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7898 ;; try to reload the guard since we need to control how PIC access is done in
7899 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7900 ;; legitimize_pic_address ()).
7901 (define_insn_and_split "*stack_protect_combined_test_insn"
7902 [(set (pc)
7903 (if_then_else
7904 (eq (match_operand:SI 0 "memory_operand" "m,m")
7905 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7906 UNSPEC_SP_TEST))
7907 (label_ref (match_operand 2))
7908 (pc)))
7909 (clobber (match_scratch:SI 3 "=&l,&r"))
7910 (clobber (match_scratch:SI 4 "=&l,&r"))
7911 (clobber (reg:CC CC_REGNUM))]
7912 ""
7913 "#"
7914 "reload_completed"
7915 [(const_int 0)]
7916 {
7917 rtx eq;
7918
7919 if (flag_pic)
7920 {
7921 rtx pic_reg;
7922
7923 if (TARGET_FDPIC)
7924 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7925 else
7926 pic_reg = operands[4];
7927
7928 /* Forces recomputing of GOT base now. */
7929 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7930 true /*compute_now*/);
7931 }
7932 else
7933 {
7934 if (address_operand (operands[1], SImode))
7935 operands[3] = operands[1];
7936 else
7937 {
7938 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7939 emit_move_insn (operands[3], mem);
7940 }
7941 }
7942 if (TARGET_32BIT)
7943 {
7944 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
7945 operands[3]));
7946 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
7947 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
7948 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
7949 }
7950 else
7951 {
7952 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
7953 operands[3]));
7954 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
7955 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
7956 operands[2]));
7957 }
7958 DONE;
7959 }
7960 [(set_attr "arch" "t1,32")]
7961 )
7962
7963 (define_insn "arm_stack_protect_test_insn"
7964 [(set (reg:CC_Z CC_REGNUM)
7965 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
7966 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
7967 UNSPEC_SP_TEST)
7968 (const_int 0)))
7969 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
7970 (clobber (match_dup 2))]
7971 "TARGET_32BIT"
7972 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
7973 [(set_attr "length" "8,12")
7974 (set_attr "conds" "set")
7975 (set_attr "type" "multiple")
7976 (set_attr "arch" "t,32")]
7977 )
7978
7979 (define_expand "casesi"
7980 [(match_operand:SI 0 "s_register_operand") ; index to jump on
7981 (match_operand:SI 1 "const_int_operand") ; lower bound
7982 (match_operand:SI 2 "const_int_operand") ; total range
7983 (match_operand:SI 3 "" "") ; table label
7984 (match_operand:SI 4 "" "")] ; Out of range label
7985 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
7986 "
7987 {
7988 enum insn_code code;
7989 if (operands[1] != const0_rtx)
7990 {
7991 rtx reg = gen_reg_rtx (SImode);
7992
7993 emit_insn (gen_addsi3 (reg, operands[0],
7994 gen_int_mode (-INTVAL (operands[1]),
7995 SImode)));
7996 operands[0] = reg;
7997 }
7998
7999 if (TARGET_ARM)
8000 code = CODE_FOR_arm_casesi_internal;
8001 else if (TARGET_THUMB1)
8002 code = CODE_FOR_thumb1_casesi_internal_pic;
8003 else if (flag_pic)
8004 code = CODE_FOR_thumb2_casesi_internal_pic;
8005 else
8006 code = CODE_FOR_thumb2_casesi_internal;
8007
8008 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8009 operands[2] = force_reg (SImode, operands[2]);
8010
8011 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8012 operands[3], operands[4]));
8013 DONE;
8014 }"
8015 )
8016
8017 ;; The USE in this pattern is needed to tell flow analysis that this is
8018 ;; a CASESI insn. It has no other purpose.
8019 (define_expand "arm_casesi_internal"
8020 [(parallel [(set (pc)
8021 (if_then_else
8022 (leu (match_operand:SI 0 "s_register_operand")
8023 (match_operand:SI 1 "arm_rhs_operand"))
8024 (match_dup 4)
8025 (label_ref:SI (match_operand 3 ""))))
8026 (clobber (reg:CC CC_REGNUM))
8027 (use (label_ref:SI (match_operand 2 "")))])]
8028 "TARGET_ARM"
8029 {
8030 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8031 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8032 gen_rtx_LABEL_REF (SImode, operands[2]));
8033 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8034 MEM_READONLY_P (operands[4]) = 1;
8035 MEM_NOTRAP_P (operands[4]) = 1;
8036 })
8037
8038 (define_insn "*arm_casesi_internal"
8039 [(parallel [(set (pc)
8040 (if_then_else
8041 (leu (match_operand:SI 0 "s_register_operand" "r")
8042 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8043 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8044 (label_ref:SI (match_operand 2 "" ""))))
8045 (label_ref:SI (match_operand 3 "" ""))))
8046 (clobber (reg:CC CC_REGNUM))
8047 (use (label_ref:SI (match_dup 2)))])]
8048 "TARGET_ARM"
8049 "*
8050 if (flag_pic)
8051 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8052 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8053 "
8054 [(set_attr "conds" "clob")
8055 (set_attr "length" "12")
8056 (set_attr "type" "multiple")]
8057 )
8058
8059 (define_expand "indirect_jump"
8060 [(set (pc)
8061 (match_operand:SI 0 "s_register_operand"))]
8062 "TARGET_EITHER"
8063 "
8064 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8065 address and use bx. */
8066 if (TARGET_THUMB2)
8067 {
8068 rtx tmp;
8069 tmp = gen_reg_rtx (SImode);
8070 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8071 operands[0] = tmp;
8072 }
8073 "
8074 )
8075
8076 ;; NB Never uses BX.
8077 (define_insn "*arm_indirect_jump"
8078 [(set (pc)
8079 (match_operand:SI 0 "s_register_operand" "r"))]
8080 "TARGET_ARM"
8081 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8082 [(set_attr "predicable" "yes")
8083 (set_attr "type" "branch")]
8084 )
8085
8086 (define_insn "*load_indirect_jump"
8087 [(set (pc)
8088 (match_operand:SI 0 "memory_operand" "m"))]
8089 "TARGET_ARM"
8090 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8091 [(set_attr "type" "load_4")
8092 (set_attr "pool_range" "4096")
8093 (set_attr "neg_pool_range" "4084")
8094 (set_attr "predicable" "yes")]
8095 )
8096
8097 \f
8098 ;; Misc insns
8099
8100 (define_insn "nop"
8101 [(const_int 0)]
8102 "TARGET_EITHER"
8103 "nop"
8104 [(set (attr "length")
8105 (if_then_else (eq_attr "is_thumb" "yes")
8106 (const_int 2)
8107 (const_int 4)))
8108 (set_attr "type" "mov_reg")]
8109 )
8110
8111 (define_insn "trap"
8112 [(trap_if (const_int 1) (const_int 0))]
8113 ""
8114 "*
8115 if (TARGET_ARM)
8116 return \".inst\\t0xe7f000f0\";
8117 else
8118 return \".inst\\t0xdeff\";
8119 "
8120 [(set (attr "length")
8121 (if_then_else (eq_attr "is_thumb" "yes")
8122 (const_int 2)
8123 (const_int 4)))
8124 (set_attr "type" "trap")
8125 (set_attr "conds" "unconditional")]
8126 )
8127
8128 \f
8129 ;; Patterns to allow combination of arithmetic, cond code and shifts
8130
8131 (define_insn "*<arith_shift_insn>_multsi"
8132 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8133 (SHIFTABLE_OPS:SI
8134 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8135 (match_operand:SI 3 "power_of_two_operand" ""))
8136 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8137 "TARGET_32BIT"
8138 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8139 [(set_attr "predicable" "yes")
8140 (set_attr "shift" "2")
8141 (set_attr "arch" "a,t2")
8142 (set_attr "type" "alu_shift_imm")])
8143
8144 (define_insn "*<arith_shift_insn>_shiftsi"
8145 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8146 (SHIFTABLE_OPS:SI
8147 (match_operator:SI 2 "shift_nomul_operator"
8148 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8149 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8150 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8151 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8152 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8153 [(set_attr "predicable" "yes")
8154 (set_attr "shift" "3")
8155 (set_attr "arch" "a,t2,a")
8156 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8157
8158 (define_split
8159 [(set (match_operand:SI 0 "s_register_operand" "")
8160 (match_operator:SI 1 "shiftable_operator"
8161 [(match_operator:SI 2 "shiftable_operator"
8162 [(match_operator:SI 3 "shift_operator"
8163 [(match_operand:SI 4 "s_register_operand" "")
8164 (match_operand:SI 5 "reg_or_int_operand" "")])
8165 (match_operand:SI 6 "s_register_operand" "")])
8166 (match_operand:SI 7 "arm_rhs_operand" "")]))
8167 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8168 "TARGET_32BIT"
8169 [(set (match_dup 8)
8170 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8171 (match_dup 6)]))
8172 (set (match_dup 0)
8173 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8174 "")
8175
8176 (define_insn "*arith_shiftsi_compare0"
8177 [(set (reg:CC_NOOV CC_REGNUM)
8178 (compare:CC_NOOV
8179 (match_operator:SI 1 "shiftable_operator"
8180 [(match_operator:SI 3 "shift_operator"
8181 [(match_operand:SI 4 "s_register_operand" "r,r")
8182 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8183 (match_operand:SI 2 "s_register_operand" "r,r")])
8184 (const_int 0)))
8185 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8186 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8187 (match_dup 2)]))]
8188 "TARGET_32BIT"
8189 "%i1s%?\\t%0, %2, %4%S3"
8190 [(set_attr "conds" "set")
8191 (set_attr "shift" "4")
8192 (set_attr "arch" "32,a")
8193 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8194
8195 (define_insn "*arith_shiftsi_compare0_scratch"
8196 [(set (reg:CC_NOOV CC_REGNUM)
8197 (compare:CC_NOOV
8198 (match_operator:SI 1 "shiftable_operator"
8199 [(match_operator:SI 3 "shift_operator"
8200 [(match_operand:SI 4 "s_register_operand" "r,r")
8201 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8202 (match_operand:SI 2 "s_register_operand" "r,r")])
8203 (const_int 0)))
8204 (clobber (match_scratch:SI 0 "=r,r"))]
8205 "TARGET_32BIT"
8206 "%i1s%?\\t%0, %2, %4%S3"
8207 [(set_attr "conds" "set")
8208 (set_attr "shift" "4")
8209 (set_attr "arch" "32,a")
8210 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8211
8212 (define_insn "*sub_shiftsi"
8213 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8214 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8215 (match_operator:SI 2 "shift_operator"
8216 [(match_operand:SI 3 "s_register_operand" "r,r")
8217 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8218 "TARGET_32BIT"
8219 "sub%?\\t%0, %1, %3%S2"
8220 [(set_attr "predicable" "yes")
8221 (set_attr "predicable_short_it" "no")
8222 (set_attr "shift" "3")
8223 (set_attr "arch" "32,a")
8224 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8225
8226 (define_insn "*sub_shiftsi_compare0"
8227 [(set (reg:CC_NOOV CC_REGNUM)
8228 (compare:CC_NOOV
8229 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8230 (match_operator:SI 2 "shift_operator"
8231 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8232 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8233 (const_int 0)))
8234 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8235 (minus:SI (match_dup 1)
8236 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8237 "TARGET_32BIT"
8238 "subs%?\\t%0, %1, %3%S2"
8239 [(set_attr "conds" "set")
8240 (set_attr "shift" "3")
8241 (set_attr "arch" "32,a,a")
8242 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8243
8244 (define_insn "*sub_shiftsi_compare0_scratch"
8245 [(set (reg:CC_NOOV CC_REGNUM)
8246 (compare:CC_NOOV
8247 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8248 (match_operator:SI 2 "shift_operator"
8249 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8250 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8251 (const_int 0)))
8252 (clobber (match_scratch:SI 0 "=r,r,r"))]
8253 "TARGET_32BIT"
8254 "subs%?\\t%0, %1, %3%S2"
8255 [(set_attr "conds" "set")
8256 (set_attr "shift" "3")
8257 (set_attr "arch" "32,a,a")
8258 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8259 \f
8260
8261 (define_insn_and_split "*and_scc"
8262 [(set (match_operand:SI 0 "s_register_operand" "=r")
8263 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8264 [(match_operand 2 "cc_register" "") (const_int 0)])
8265 (match_operand:SI 3 "s_register_operand" "r")))]
8266 "TARGET_ARM"
8267 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8268 "&& reload_completed"
8269 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8270 (cond_exec (match_dup 4) (set (match_dup 0)
8271 (and:SI (match_dup 3) (const_int 1))))]
8272 {
8273 machine_mode mode = GET_MODE (operands[2]);
8274 enum rtx_code rc = GET_CODE (operands[1]);
8275
8276 /* Note that operands[4] is the same as operands[1],
8277 but with VOIDmode as the result. */
8278 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8279 if (mode == CCFPmode || mode == CCFPEmode)
8280 rc = reverse_condition_maybe_unordered (rc);
8281 else
8282 rc = reverse_condition (rc);
8283 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8284 }
8285 [(set_attr "conds" "use")
8286 (set_attr "type" "multiple")
8287 (set_attr "length" "8")]
8288 )
8289
8290 (define_insn_and_split "*ior_scc"
8291 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8292 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8293 [(match_operand 2 "cc_register" "") (const_int 0)])
8294 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8295 "TARGET_ARM"
8296 "@
8297 orr%d1\\t%0, %3, #1
8298 #"
8299 "&& reload_completed
8300 && REGNO (operands [0]) != REGNO (operands[3])"
8301 ;; && which_alternative == 1
8302 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8303 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8304 (cond_exec (match_dup 4) (set (match_dup 0)
8305 (ior:SI (match_dup 3) (const_int 1))))]
8306 {
8307 machine_mode mode = GET_MODE (operands[2]);
8308 enum rtx_code rc = GET_CODE (operands[1]);
8309
8310 /* Note that operands[4] is the same as operands[1],
8311 but with VOIDmode as the result. */
8312 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8313 if (mode == CCFPmode || mode == CCFPEmode)
8314 rc = reverse_condition_maybe_unordered (rc);
8315 else
8316 rc = reverse_condition (rc);
8317 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8318 }
8319 [(set_attr "conds" "use")
8320 (set_attr "length" "4,8")
8321 (set_attr "type" "logic_imm,multiple")]
8322 )
8323
8324 ; A series of splitters for the compare_scc pattern below. Note that
8325 ; order is important.
8326 (define_split
8327 [(set (match_operand:SI 0 "s_register_operand" "")
8328 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8329 (const_int 0)))
8330 (clobber (reg:CC CC_REGNUM))]
8331 "TARGET_32BIT && reload_completed"
8332 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8333
8334 (define_split
8335 [(set (match_operand:SI 0 "s_register_operand" "")
8336 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8337 (const_int 0)))
8338 (clobber (reg:CC CC_REGNUM))]
8339 "TARGET_32BIT && reload_completed"
8340 [(set (match_dup 0) (not:SI (match_dup 1)))
8341 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8342
8343 (define_split
8344 [(set (match_operand:SI 0 "s_register_operand" "")
8345 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8346 (const_int 0)))
8347 (clobber (reg:CC CC_REGNUM))]
8348 "arm_arch5t && TARGET_32BIT"
8349 [(set (match_dup 0) (clz:SI (match_dup 1)))
8350 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8351 )
8352
8353 (define_split
8354 [(set (match_operand:SI 0 "s_register_operand" "")
8355 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8356 (const_int 0)))
8357 (clobber (reg:CC CC_REGNUM))]
8358 "TARGET_32BIT && reload_completed"
8359 [(parallel
8360 [(set (reg:CC CC_REGNUM)
8361 (compare:CC (const_int 1) (match_dup 1)))
8362 (set (match_dup 0)
8363 (minus:SI (const_int 1) (match_dup 1)))])
8364 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8365 (set (match_dup 0) (const_int 0)))])
8366
8367 (define_split
8368 [(set (match_operand:SI 0 "s_register_operand" "")
8369 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8370 (match_operand:SI 2 "const_int_operand" "")))
8371 (clobber (reg:CC CC_REGNUM))]
8372 "TARGET_32BIT && reload_completed"
8373 [(parallel
8374 [(set (reg:CC CC_REGNUM)
8375 (compare:CC (match_dup 1) (match_dup 2)))
8376 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8377 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8378 (set (match_dup 0) (const_int 1)))]
8379 {
8380 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8381 })
8382
8383 (define_split
8384 [(set (match_operand:SI 0 "s_register_operand" "")
8385 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8386 (match_operand:SI 2 "arm_add_operand" "")))
8387 (clobber (reg:CC CC_REGNUM))]
8388 "TARGET_32BIT && reload_completed"
8389 [(parallel
8390 [(set (reg:CC_NOOV CC_REGNUM)
8391 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8392 (const_int 0)))
8393 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8394 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8395 (set (match_dup 0) (const_int 1)))])
8396
8397 (define_insn_and_split "*compare_scc"
8398 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8399 (match_operator:SI 1 "arm_comparison_operator"
8400 [(match_operand:SI 2 "s_register_operand" "r,r")
8401 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8402 (clobber (reg:CC CC_REGNUM))]
8403 "TARGET_32BIT"
8404 "#"
8405 "&& reload_completed"
8406 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8407 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8408 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8409 {
8410 rtx tmp1;
8411 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8412 operands[2], operands[3]);
8413 enum rtx_code rc = GET_CODE (operands[1]);
8414
8415 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8416
8417 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8418 if (mode == CCFPmode || mode == CCFPEmode)
8419 rc = reverse_condition_maybe_unordered (rc);
8420 else
8421 rc = reverse_condition (rc);
8422 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8423 }
8424 [(set_attr "type" "multiple")]
8425 )
8426
8427 ;; Attempt to improve the sequence generated by the compare_scc splitters
8428 ;; not to use conditional execution.
8429
8430 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8431 ;; clz Rd, reg1
8432 ;; lsr Rd, Rd, #5
8433 (define_peephole2
8434 [(set (reg:CC CC_REGNUM)
8435 (compare:CC (match_operand:SI 1 "register_operand" "")
8436 (const_int 0)))
8437 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8438 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8439 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8440 (set (match_dup 0) (const_int 1)))]
8441 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8442 [(set (match_dup 0) (clz:SI (match_dup 1)))
8443 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8444 )
8445
8446 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8447 ;; negs Rd, reg1
8448 ;; adc Rd, Rd, reg1
8449 (define_peephole2
8450 [(set (reg:CC CC_REGNUM)
8451 (compare:CC (match_operand:SI 1 "register_operand" "")
8452 (const_int 0)))
8453 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8454 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8455 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8456 (set (match_dup 0) (const_int 1)))
8457 (match_scratch:SI 2 "r")]
8458 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8459 [(parallel
8460 [(set (reg:CC CC_REGNUM)
8461 (compare:CC (const_int 0) (match_dup 1)))
8462 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8463 (set (match_dup 0)
8464 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8465 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8466 )
8467
8468 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8469 ;; sub Rd, Reg1, reg2
8470 ;; clz Rd, Rd
8471 ;; lsr Rd, Rd, #5
8472 (define_peephole2
8473 [(set (reg:CC CC_REGNUM)
8474 (compare:CC (match_operand:SI 1 "register_operand" "")
8475 (match_operand:SI 2 "arm_rhs_operand" "")))
8476 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8477 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8478 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8479 (set (match_dup 0) (const_int 1)))]
8480 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8481 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8482 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8483 (set (match_dup 0) (clz:SI (match_dup 0)))
8484 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8485 )
8486
8487
8488 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8489 ;; sub T1, Reg1, reg2
8490 ;; negs Rd, T1
8491 ;; adc Rd, Rd, T1
8492 (define_peephole2
8493 [(set (reg:CC CC_REGNUM)
8494 (compare:CC (match_operand:SI 1 "register_operand" "")
8495 (match_operand:SI 2 "arm_rhs_operand" "")))
8496 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8497 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8498 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8499 (set (match_dup 0) (const_int 1)))
8500 (match_scratch:SI 3 "r")]
8501 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8502 [(set (match_dup 3) (match_dup 4))
8503 (parallel
8504 [(set (reg:CC CC_REGNUM)
8505 (compare:CC (const_int 0) (match_dup 3)))
8506 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8507 (set (match_dup 0)
8508 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8509 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8510 "
8511 if (CONST_INT_P (operands[2]))
8512 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8513 else
8514 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8515 ")
8516
8517 (define_insn "*cond_move"
8518 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8519 (if_then_else:SI (match_operator 3 "equality_operator"
8520 [(match_operator 4 "arm_comparison_operator"
8521 [(match_operand 5 "cc_register" "") (const_int 0)])
8522 (const_int 0)])
8523 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8524 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8525 "TARGET_ARM"
8526 "*
8527 if (GET_CODE (operands[3]) == NE)
8528 {
8529 if (which_alternative != 1)
8530 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8531 if (which_alternative != 0)
8532 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8533 return \"\";
8534 }
8535 if (which_alternative != 0)
8536 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8537 if (which_alternative != 1)
8538 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8539 return \"\";
8540 "
8541 [(set_attr "conds" "use")
8542 (set_attr_alternative "type"
8543 [(if_then_else (match_operand 2 "const_int_operand" "")
8544 (const_string "mov_imm")
8545 (const_string "mov_reg"))
8546 (if_then_else (match_operand 1 "const_int_operand" "")
8547 (const_string "mov_imm")
8548 (const_string "mov_reg"))
8549 (const_string "multiple")])
8550 (set_attr "length" "4,4,8")]
8551 )
8552
8553 (define_insn "*cond_arith"
8554 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8555 (match_operator:SI 5 "shiftable_operator"
8556 [(match_operator:SI 4 "arm_comparison_operator"
8557 [(match_operand:SI 2 "s_register_operand" "r,r")
8558 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8559 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8560 (clobber (reg:CC CC_REGNUM))]
8561 "TARGET_ARM"
8562 "*
8563 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8564 return \"%i5\\t%0, %1, %2, lsr #31\";
8565
8566 output_asm_insn (\"cmp\\t%2, %3\", operands);
8567 if (GET_CODE (operands[5]) == AND)
8568 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8569 else if (GET_CODE (operands[5]) == MINUS)
8570 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8571 else if (which_alternative != 0)
8572 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8573 return \"%i5%d4\\t%0, %1, #1\";
8574 "
8575 [(set_attr "conds" "clob")
8576 (set_attr "length" "12")
8577 (set_attr "type" "multiple")]
8578 )
8579
8580 (define_insn "*cond_sub"
8581 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8582 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8583 (match_operator:SI 4 "arm_comparison_operator"
8584 [(match_operand:SI 2 "s_register_operand" "r,r")
8585 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8586 (clobber (reg:CC CC_REGNUM))]
8587 "TARGET_ARM"
8588 "*
8589 output_asm_insn (\"cmp\\t%2, %3\", operands);
8590 if (which_alternative != 0)
8591 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8592 return \"sub%d4\\t%0, %1, #1\";
8593 "
8594 [(set_attr "conds" "clob")
8595 (set_attr "length" "8,12")
8596 (set_attr "type" "multiple")]
8597 )
8598
8599 (define_insn "*cmp_ite0"
8600 [(set (match_operand 6 "dominant_cc_register" "")
8601 (compare
8602 (if_then_else:SI
8603 (match_operator 4 "arm_comparison_operator"
8604 [(match_operand:SI 0 "s_register_operand"
8605 "l,l,l,r,r,r,r,r,r")
8606 (match_operand:SI 1 "arm_add_operand"
8607 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8608 (match_operator:SI 5 "arm_comparison_operator"
8609 [(match_operand:SI 2 "s_register_operand"
8610 "l,r,r,l,l,r,r,r,r")
8611 (match_operand:SI 3 "arm_add_operand"
8612 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8613 (const_int 0))
8614 (const_int 0)))]
8615 "TARGET_32BIT"
8616 "*
8617 {
8618 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8619 {
8620 {\"cmp%d5\\t%0, %1\",
8621 \"cmp%d4\\t%2, %3\"},
8622 {\"cmn%d5\\t%0, #%n1\",
8623 \"cmp%d4\\t%2, %3\"},
8624 {\"cmp%d5\\t%0, %1\",
8625 \"cmn%d4\\t%2, #%n3\"},
8626 {\"cmn%d5\\t%0, #%n1\",
8627 \"cmn%d4\\t%2, #%n3\"}
8628 };
8629 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8630 {
8631 {\"cmp\\t%2, %3\",
8632 \"cmp\\t%0, %1\"},
8633 {\"cmp\\t%2, %3\",
8634 \"cmn\\t%0, #%n1\"},
8635 {\"cmn\\t%2, #%n3\",
8636 \"cmp\\t%0, %1\"},
8637 {\"cmn\\t%2, #%n3\",
8638 \"cmn\\t%0, #%n1\"}
8639 };
8640 static const char * const ite[2] =
8641 {
8642 \"it\\t%d5\",
8643 \"it\\t%d4\"
8644 };
8645 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8646 CMP_CMP, CMN_CMP, CMP_CMP,
8647 CMN_CMP, CMP_CMN, CMN_CMN};
8648 int swap =
8649 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8650
8651 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8652 if (TARGET_THUMB2) {
8653 output_asm_insn (ite[swap], operands);
8654 }
8655 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8656 return \"\";
8657 }"
8658 [(set_attr "conds" "set")
8659 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8660 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8661 (set_attr "type" "multiple")
8662 (set_attr_alternative "length"
8663 [(const_int 6)
8664 (const_int 8)
8665 (const_int 8)
8666 (const_int 8)
8667 (const_int 8)
8668 (if_then_else (eq_attr "is_thumb" "no")
8669 (const_int 8)
8670 (const_int 10))
8671 (if_then_else (eq_attr "is_thumb" "no")
8672 (const_int 8)
8673 (const_int 10))
8674 (if_then_else (eq_attr "is_thumb" "no")
8675 (const_int 8)
8676 (const_int 10))
8677 (if_then_else (eq_attr "is_thumb" "no")
8678 (const_int 8)
8679 (const_int 10))])]
8680 )
8681
8682 (define_insn "*cmp_ite1"
8683 [(set (match_operand 6 "dominant_cc_register" "")
8684 (compare
8685 (if_then_else:SI
8686 (match_operator 4 "arm_comparison_operator"
8687 [(match_operand:SI 0 "s_register_operand"
8688 "l,l,l,r,r,r,r,r,r")
8689 (match_operand:SI 1 "arm_add_operand"
8690 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8691 (match_operator:SI 5 "arm_comparison_operator"
8692 [(match_operand:SI 2 "s_register_operand"
8693 "l,r,r,l,l,r,r,r,r")
8694 (match_operand:SI 3 "arm_add_operand"
8695 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8696 (const_int 1))
8697 (const_int 0)))]
8698 "TARGET_32BIT"
8699 "*
8700 {
8701 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8702 {
8703 {\"cmp\\t%0, %1\",
8704 \"cmp\\t%2, %3\"},
8705 {\"cmn\\t%0, #%n1\",
8706 \"cmp\\t%2, %3\"},
8707 {\"cmp\\t%0, %1\",
8708 \"cmn\\t%2, #%n3\"},
8709 {\"cmn\\t%0, #%n1\",
8710 \"cmn\\t%2, #%n3\"}
8711 };
8712 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8713 {
8714 {\"cmp%d4\\t%2, %3\",
8715 \"cmp%D5\\t%0, %1\"},
8716 {\"cmp%d4\\t%2, %3\",
8717 \"cmn%D5\\t%0, #%n1\"},
8718 {\"cmn%d4\\t%2, #%n3\",
8719 \"cmp%D5\\t%0, %1\"},
8720 {\"cmn%d4\\t%2, #%n3\",
8721 \"cmn%D5\\t%0, #%n1\"}
8722 };
8723 static const char * const ite[2] =
8724 {
8725 \"it\\t%d4\",
8726 \"it\\t%D5\"
8727 };
8728 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8729 CMP_CMP, CMN_CMP, CMP_CMP,
8730 CMN_CMP, CMP_CMN, CMN_CMN};
8731 int swap =
8732 comparison_dominates_p (GET_CODE (operands[5]),
8733 reverse_condition (GET_CODE (operands[4])));
8734
8735 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8736 if (TARGET_THUMB2) {
8737 output_asm_insn (ite[swap], operands);
8738 }
8739 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8740 return \"\";
8741 }"
8742 [(set_attr "conds" "set")
8743 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8744 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8745 (set_attr_alternative "length"
8746 [(const_int 6)
8747 (const_int 8)
8748 (const_int 8)
8749 (const_int 8)
8750 (const_int 8)
8751 (if_then_else (eq_attr "is_thumb" "no")
8752 (const_int 8)
8753 (const_int 10))
8754 (if_then_else (eq_attr "is_thumb" "no")
8755 (const_int 8)
8756 (const_int 10))
8757 (if_then_else (eq_attr "is_thumb" "no")
8758 (const_int 8)
8759 (const_int 10))
8760 (if_then_else (eq_attr "is_thumb" "no")
8761 (const_int 8)
8762 (const_int 10))])
8763 (set_attr "type" "multiple")]
8764 )
8765
8766 (define_insn "*cmp_and"
8767 [(set (match_operand 6 "dominant_cc_register" "")
8768 (compare
8769 (and:SI
8770 (match_operator 4 "arm_comparison_operator"
8771 [(match_operand:SI 0 "s_register_operand"
8772 "l,l,l,r,r,r,r,r,r,r")
8773 (match_operand:SI 1 "arm_add_operand"
8774 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8775 (match_operator:SI 5 "arm_comparison_operator"
8776 [(match_operand:SI 2 "s_register_operand"
8777 "l,r,r,l,l,r,r,r,r,r")
8778 (match_operand:SI 3 "arm_add_operand"
8779 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8780 (const_int 0)))]
8781 "TARGET_32BIT"
8782 "*
8783 {
8784 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8785 {
8786 {\"cmp%d5\\t%0, %1\",
8787 \"cmp%d4\\t%2, %3\"},
8788 {\"cmn%d5\\t%0, #%n1\",
8789 \"cmp%d4\\t%2, %3\"},
8790 {\"cmp%d5\\t%0, %1\",
8791 \"cmn%d4\\t%2, #%n3\"},
8792 {\"cmn%d5\\t%0, #%n1\",
8793 \"cmn%d4\\t%2, #%n3\"}
8794 };
8795 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8796 {
8797 {\"cmp\\t%2, %3\",
8798 \"cmp\\t%0, %1\"},
8799 {\"cmp\\t%2, %3\",
8800 \"cmn\\t%0, #%n1\"},
8801 {\"cmn\\t%2, #%n3\",
8802 \"cmp\\t%0, %1\"},
8803 {\"cmn\\t%2, #%n3\",
8804 \"cmn\\t%0, #%n1\"}
8805 };
8806 static const char *const ite[2] =
8807 {
8808 \"it\\t%d5\",
8809 \"it\\t%d4\"
8810 };
8811 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8812 CMP_CMP, CMN_CMP, CMP_CMP,
8813 CMP_CMP, CMN_CMP, CMP_CMN,
8814 CMN_CMN};
8815 int swap =
8816 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8817
8818 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8819 if (TARGET_THUMB2) {
8820 output_asm_insn (ite[swap], operands);
8821 }
8822 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8823 return \"\";
8824 }"
8825 [(set_attr "conds" "set")
8826 (set_attr "predicable" "no")
8827 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8828 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8829 (set_attr_alternative "length"
8830 [(const_int 6)
8831 (const_int 8)
8832 (const_int 8)
8833 (const_int 8)
8834 (const_int 8)
8835 (const_int 6)
8836 (if_then_else (eq_attr "is_thumb" "no")
8837 (const_int 8)
8838 (const_int 10))
8839 (if_then_else (eq_attr "is_thumb" "no")
8840 (const_int 8)
8841 (const_int 10))
8842 (if_then_else (eq_attr "is_thumb" "no")
8843 (const_int 8)
8844 (const_int 10))
8845 (if_then_else (eq_attr "is_thumb" "no")
8846 (const_int 8)
8847 (const_int 10))])
8848 (set_attr "type" "multiple")]
8849 )
8850
8851 (define_insn "*cmp_ior"
8852 [(set (match_operand 6 "dominant_cc_register" "")
8853 (compare
8854 (ior:SI
8855 (match_operator 4 "arm_comparison_operator"
8856 [(match_operand:SI 0 "s_register_operand"
8857 "l,l,l,r,r,r,r,r,r,r")
8858 (match_operand:SI 1 "arm_add_operand"
8859 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8860 (match_operator:SI 5 "arm_comparison_operator"
8861 [(match_operand:SI 2 "s_register_operand"
8862 "l,r,r,l,l,r,r,r,r,r")
8863 (match_operand:SI 3 "arm_add_operand"
8864 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8865 (const_int 0)))]
8866 "TARGET_32BIT"
8867 "*
8868 {
8869 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8870 {
8871 {\"cmp\\t%0, %1\",
8872 \"cmp\\t%2, %3\"},
8873 {\"cmn\\t%0, #%n1\",
8874 \"cmp\\t%2, %3\"},
8875 {\"cmp\\t%0, %1\",
8876 \"cmn\\t%2, #%n3\"},
8877 {\"cmn\\t%0, #%n1\",
8878 \"cmn\\t%2, #%n3\"}
8879 };
8880 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8881 {
8882 {\"cmp%D4\\t%2, %3\",
8883 \"cmp%D5\\t%0, %1\"},
8884 {\"cmp%D4\\t%2, %3\",
8885 \"cmn%D5\\t%0, #%n1\"},
8886 {\"cmn%D4\\t%2, #%n3\",
8887 \"cmp%D5\\t%0, %1\"},
8888 {\"cmn%D4\\t%2, #%n3\",
8889 \"cmn%D5\\t%0, #%n1\"}
8890 };
8891 static const char *const ite[2] =
8892 {
8893 \"it\\t%D4\",
8894 \"it\\t%D5\"
8895 };
8896 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8897 CMP_CMP, CMN_CMP, CMP_CMP,
8898 CMP_CMP, CMN_CMP, CMP_CMN,
8899 CMN_CMN};
8900 int swap =
8901 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8902
8903 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8904 if (TARGET_THUMB2) {
8905 output_asm_insn (ite[swap], operands);
8906 }
8907 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8908 return \"\";
8909 }
8910 "
8911 [(set_attr "conds" "set")
8912 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8913 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8914 (set_attr_alternative "length"
8915 [(const_int 6)
8916 (const_int 8)
8917 (const_int 8)
8918 (const_int 8)
8919 (const_int 8)
8920 (const_int 6)
8921 (if_then_else (eq_attr "is_thumb" "no")
8922 (const_int 8)
8923 (const_int 10))
8924 (if_then_else (eq_attr "is_thumb" "no")
8925 (const_int 8)
8926 (const_int 10))
8927 (if_then_else (eq_attr "is_thumb" "no")
8928 (const_int 8)
8929 (const_int 10))
8930 (if_then_else (eq_attr "is_thumb" "no")
8931 (const_int 8)
8932 (const_int 10))])
8933 (set_attr "type" "multiple")]
8934 )
8935
8936 (define_insn_and_split "*ior_scc_scc"
8937 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8938 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8939 [(match_operand:SI 1 "s_register_operand" "l,r")
8940 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8941 (match_operator:SI 6 "arm_comparison_operator"
8942 [(match_operand:SI 4 "s_register_operand" "l,r")
8943 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8944 (clobber (reg:CC CC_REGNUM))]
8945 "TARGET_32BIT
8946 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8947 != CCmode)"
8948 "#"
8949 "TARGET_32BIT && reload_completed"
8950 [(set (match_dup 7)
8951 (compare
8952 (ior:SI
8953 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8954 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8955 (const_int 0)))
8956 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8957 "operands[7]
8958 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8959 DOM_CC_X_OR_Y),
8960 CC_REGNUM);"
8961 [(set_attr "conds" "clob")
8962 (set_attr "enabled_for_short_it" "yes,no")
8963 (set_attr "length" "16")
8964 (set_attr "type" "multiple")]
8965 )
8966
8967 ; If the above pattern is followed by a CMP insn, then the compare is
8968 ; redundant, since we can rework the conditional instruction that follows.
8969 (define_insn_and_split "*ior_scc_scc_cmp"
8970 [(set (match_operand 0 "dominant_cc_register" "")
8971 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8972 [(match_operand:SI 1 "s_register_operand" "l,r")
8973 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8974 (match_operator:SI 6 "arm_comparison_operator"
8975 [(match_operand:SI 4 "s_register_operand" "l,r")
8976 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
8977 (const_int 0)))
8978 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
8979 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8980 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8981 "TARGET_32BIT"
8982 "#"
8983 "TARGET_32BIT && reload_completed"
8984 [(set (match_dup 0)
8985 (compare
8986 (ior:SI
8987 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8988 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8989 (const_int 0)))
8990 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8991 ""
8992 [(set_attr "conds" "set")
8993 (set_attr "enabled_for_short_it" "yes,no")
8994 (set_attr "length" "16")
8995 (set_attr "type" "multiple")]
8996 )
8997
8998 (define_insn_and_split "*and_scc_scc"
8999 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9000 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9001 [(match_operand:SI 1 "s_register_operand" "l,r")
9002 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9003 (match_operator:SI 6 "arm_comparison_operator"
9004 [(match_operand:SI 4 "s_register_operand" "l,r")
9005 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9006 (clobber (reg:CC CC_REGNUM))]
9007 "TARGET_32BIT
9008 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9009 != CCmode)"
9010 "#"
9011 "TARGET_32BIT && reload_completed
9012 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9013 != CCmode)"
9014 [(set (match_dup 7)
9015 (compare
9016 (and:SI
9017 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9018 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9019 (const_int 0)))
9020 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9021 "operands[7]
9022 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9023 DOM_CC_X_AND_Y),
9024 CC_REGNUM);"
9025 [(set_attr "conds" "clob")
9026 (set_attr "enabled_for_short_it" "yes,no")
9027 (set_attr "length" "16")
9028 (set_attr "type" "multiple")]
9029 )
9030
9031 ; If the above pattern is followed by a CMP insn, then the compare is
9032 ; redundant, since we can rework the conditional instruction that follows.
9033 (define_insn_and_split "*and_scc_scc_cmp"
9034 [(set (match_operand 0 "dominant_cc_register" "")
9035 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9036 [(match_operand:SI 1 "s_register_operand" "l,r")
9037 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9038 (match_operator:SI 6 "arm_comparison_operator"
9039 [(match_operand:SI 4 "s_register_operand" "l,r")
9040 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9041 (const_int 0)))
9042 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9043 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9044 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9045 "TARGET_32BIT"
9046 "#"
9047 "TARGET_32BIT && reload_completed"
9048 [(set (match_dup 0)
9049 (compare
9050 (and:SI
9051 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9052 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9053 (const_int 0)))
9054 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9055 ""
9056 [(set_attr "conds" "set")
9057 (set_attr "enabled_for_short_it" "yes,no")
9058 (set_attr "length" "16")
9059 (set_attr "type" "multiple")]
9060 )
9061
9062 ;; If there is no dominance in the comparison, then we can still save an
9063 ;; instruction in the AND case, since we can know that the second compare
9064 ;; need only zero the value if false (if true, then the value is already
9065 ;; correct).
9066 (define_insn_and_split "*and_scc_scc_nodom"
9067 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9068 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9069 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9070 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9071 (match_operator:SI 6 "arm_comparison_operator"
9072 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9073 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9074 (clobber (reg:CC CC_REGNUM))]
9075 "TARGET_32BIT
9076 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9077 == CCmode)"
9078 "#"
9079 "TARGET_32BIT && reload_completed"
9080 [(parallel [(set (match_dup 0)
9081 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9082 (clobber (reg:CC CC_REGNUM))])
9083 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9084 (set (match_dup 0)
9085 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9086 (match_dup 0)
9087 (const_int 0)))]
9088 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9089 operands[4], operands[5]),
9090 CC_REGNUM);
9091 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9092 operands[5]);"
9093 [(set_attr "conds" "clob")
9094 (set_attr "length" "20")
9095 (set_attr "type" "multiple")]
9096 )
9097
9098 (define_split
9099 [(set (reg:CC_NOOV CC_REGNUM)
9100 (compare:CC_NOOV (ior:SI
9101 (and:SI (match_operand:SI 0 "s_register_operand" "")
9102 (const_int 1))
9103 (match_operator:SI 1 "arm_comparison_operator"
9104 [(match_operand:SI 2 "s_register_operand" "")
9105 (match_operand:SI 3 "arm_add_operand" "")]))
9106 (const_int 0)))
9107 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9108 "TARGET_ARM"
9109 [(set (match_dup 4)
9110 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9111 (match_dup 0)))
9112 (set (reg:CC_NOOV CC_REGNUM)
9113 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9114 (const_int 0)))]
9115 "")
9116
9117 (define_split
9118 [(set (reg:CC_NOOV CC_REGNUM)
9119 (compare:CC_NOOV (ior:SI
9120 (match_operator:SI 1 "arm_comparison_operator"
9121 [(match_operand:SI 2 "s_register_operand" "")
9122 (match_operand:SI 3 "arm_add_operand" "")])
9123 (and:SI (match_operand:SI 0 "s_register_operand" "")
9124 (const_int 1)))
9125 (const_int 0)))
9126 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9127 "TARGET_ARM"
9128 [(set (match_dup 4)
9129 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9130 (match_dup 0)))
9131 (set (reg:CC_NOOV CC_REGNUM)
9132 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9133 (const_int 0)))]
9134 "")
9135 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9136
9137 (define_insn_and_split "*negscc"
9138 [(set (match_operand:SI 0 "s_register_operand" "=r")
9139 (neg:SI (match_operator 3 "arm_comparison_operator"
9140 [(match_operand:SI 1 "s_register_operand" "r")
9141 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9142 (clobber (reg:CC CC_REGNUM))]
9143 "TARGET_ARM"
9144 "#"
9145 "&& reload_completed"
9146 [(const_int 0)]
9147 {
9148 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9149
9150 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9151 {
9152 /* Emit mov\\t%0, %1, asr #31 */
9153 emit_insn (gen_rtx_SET (operands[0],
9154 gen_rtx_ASHIFTRT (SImode,
9155 operands[1],
9156 GEN_INT (31))));
9157 DONE;
9158 }
9159 else if (GET_CODE (operands[3]) == NE)
9160 {
9161 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9162 if (CONST_INT_P (operands[2]))
9163 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9164 gen_int_mode (-INTVAL (operands[2]),
9165 SImode)));
9166 else
9167 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9168
9169 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9170 gen_rtx_NE (SImode,
9171 cc_reg,
9172 const0_rtx),
9173 gen_rtx_SET (operands[0],
9174 GEN_INT (~0))));
9175 DONE;
9176 }
9177 else
9178 {
9179 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9180 emit_insn (gen_rtx_SET (cc_reg,
9181 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9182 enum rtx_code rc = GET_CODE (operands[3]);
9183
9184 rc = reverse_condition (rc);
9185 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9186 gen_rtx_fmt_ee (rc,
9187 VOIDmode,
9188 cc_reg,
9189 const0_rtx),
9190 gen_rtx_SET (operands[0], const0_rtx)));
9191 rc = GET_CODE (operands[3]);
9192 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9193 gen_rtx_fmt_ee (rc,
9194 VOIDmode,
9195 cc_reg,
9196 const0_rtx),
9197 gen_rtx_SET (operands[0],
9198 GEN_INT (~0))));
9199 DONE;
9200 }
9201 FAIL;
9202 }
9203 [(set_attr "conds" "clob")
9204 (set_attr "length" "12")
9205 (set_attr "type" "multiple")]
9206 )
9207
9208 (define_insn_and_split "movcond_addsi"
9209 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9210 (if_then_else:SI
9211 (match_operator 5 "comparison_operator"
9212 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9213 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9214 (const_int 0)])
9215 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9216 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9217 (clobber (reg:CC CC_REGNUM))]
9218 "TARGET_32BIT"
9219 "#"
9220 "&& reload_completed"
9221 [(set (reg:CC_NOOV CC_REGNUM)
9222 (compare:CC_NOOV
9223 (plus:SI (match_dup 3)
9224 (match_dup 4))
9225 (const_int 0)))
9226 (set (match_dup 0) (match_dup 1))
9227 (cond_exec (match_dup 6)
9228 (set (match_dup 0) (match_dup 2)))]
9229 "
9230 {
9231 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9232 operands[3], operands[4]);
9233 enum rtx_code rc = GET_CODE (operands[5]);
9234 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9235 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9236 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9237 rc = reverse_condition (rc);
9238 else
9239 std::swap (operands[1], operands[2]);
9240
9241 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9242 }
9243 "
9244 [(set_attr "conds" "clob")
9245 (set_attr "enabled_for_short_it" "no,yes,yes")
9246 (set_attr "type" "multiple")]
9247 )
9248
9249 (define_insn "movcond"
9250 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9251 (if_then_else:SI
9252 (match_operator 5 "arm_comparison_operator"
9253 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9254 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9255 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9256 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9257 (clobber (reg:CC CC_REGNUM))]
9258 "TARGET_ARM"
9259 "*
9260 if (GET_CODE (operands[5]) == LT
9261 && (operands[4] == const0_rtx))
9262 {
9263 if (which_alternative != 1 && REG_P (operands[1]))
9264 {
9265 if (operands[2] == const0_rtx)
9266 return \"and\\t%0, %1, %3, asr #31\";
9267 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9268 }
9269 else if (which_alternative != 0 && REG_P (operands[2]))
9270 {
9271 if (operands[1] == const0_rtx)
9272 return \"bic\\t%0, %2, %3, asr #31\";
9273 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9274 }
9275 /* The only case that falls through to here is when both ops 1 & 2
9276 are constants. */
9277 }
9278
9279 if (GET_CODE (operands[5]) == GE
9280 && (operands[4] == const0_rtx))
9281 {
9282 if (which_alternative != 1 && REG_P (operands[1]))
9283 {
9284 if (operands[2] == const0_rtx)
9285 return \"bic\\t%0, %1, %3, asr #31\";
9286 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9287 }
9288 else if (which_alternative != 0 && REG_P (operands[2]))
9289 {
9290 if (operands[1] == const0_rtx)
9291 return \"and\\t%0, %2, %3, asr #31\";
9292 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9293 }
9294 /* The only case that falls through to here is when both ops 1 & 2
9295 are constants. */
9296 }
9297 if (CONST_INT_P (operands[4])
9298 && !const_ok_for_arm (INTVAL (operands[4])))
9299 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9300 else
9301 output_asm_insn (\"cmp\\t%3, %4\", operands);
9302 if (which_alternative != 0)
9303 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9304 if (which_alternative != 1)
9305 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9306 return \"\";
9307 "
9308 [(set_attr "conds" "clob")
9309 (set_attr "length" "8,8,12")
9310 (set_attr "type" "multiple")]
9311 )
9312
9313 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9314
9315 (define_insn "*ifcompare_plus_move"
9316 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9317 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9318 [(match_operand:SI 4 "s_register_operand" "r,r")
9319 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9320 (plus:SI
9321 (match_operand:SI 2 "s_register_operand" "r,r")
9322 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9323 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9324 (clobber (reg:CC CC_REGNUM))]
9325 "TARGET_ARM"
9326 "#"
9327 [(set_attr "conds" "clob")
9328 (set_attr "length" "8,12")
9329 (set_attr "type" "multiple")]
9330 )
9331
9332 (define_insn "*if_plus_move"
9333 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9334 (if_then_else:SI
9335 (match_operator 4 "arm_comparison_operator"
9336 [(match_operand 5 "cc_register" "") (const_int 0)])
9337 (plus:SI
9338 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9339 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9340 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9341 "TARGET_ARM"
9342 "@
9343 add%d4\\t%0, %2, %3
9344 sub%d4\\t%0, %2, #%n3
9345 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9346 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9347 [(set_attr "conds" "use")
9348 (set_attr "length" "4,4,8,8")
9349 (set_attr_alternative "type"
9350 [(if_then_else (match_operand 3 "const_int_operand" "")
9351 (const_string "alu_imm" )
9352 (const_string "alu_sreg"))
9353 (const_string "alu_imm")
9354 (const_string "multiple")
9355 (const_string "multiple")])]
9356 )
9357
9358 (define_insn "*ifcompare_move_plus"
9359 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9360 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9361 [(match_operand:SI 4 "s_register_operand" "r,r")
9362 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9363 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9364 (plus:SI
9365 (match_operand:SI 2 "s_register_operand" "r,r")
9366 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9367 (clobber (reg:CC CC_REGNUM))]
9368 "TARGET_ARM"
9369 "#"
9370 [(set_attr "conds" "clob")
9371 (set_attr "length" "8,12")
9372 (set_attr "type" "multiple")]
9373 )
9374
9375 (define_insn "*if_move_plus"
9376 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9377 (if_then_else:SI
9378 (match_operator 4 "arm_comparison_operator"
9379 [(match_operand 5 "cc_register" "") (const_int 0)])
9380 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9381 (plus:SI
9382 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9383 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9384 "TARGET_ARM"
9385 "@
9386 add%D4\\t%0, %2, %3
9387 sub%D4\\t%0, %2, #%n3
9388 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9389 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9390 [(set_attr "conds" "use")
9391 (set_attr "length" "4,4,8,8")
9392 (set_attr_alternative "type"
9393 [(if_then_else (match_operand 3 "const_int_operand" "")
9394 (const_string "alu_imm" )
9395 (const_string "alu_sreg"))
9396 (const_string "alu_imm")
9397 (const_string "multiple")
9398 (const_string "multiple")])]
9399 )
9400
9401 (define_insn "*ifcompare_arith_arith"
9402 [(set (match_operand:SI 0 "s_register_operand" "=r")
9403 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9404 [(match_operand:SI 5 "s_register_operand" "r")
9405 (match_operand:SI 6 "arm_add_operand" "rIL")])
9406 (match_operator:SI 8 "shiftable_operator"
9407 [(match_operand:SI 1 "s_register_operand" "r")
9408 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9409 (match_operator:SI 7 "shiftable_operator"
9410 [(match_operand:SI 3 "s_register_operand" "r")
9411 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9412 (clobber (reg:CC CC_REGNUM))]
9413 "TARGET_ARM"
9414 "#"
9415 [(set_attr "conds" "clob")
9416 (set_attr "length" "12")
9417 (set_attr "type" "multiple")]
9418 )
9419
9420 (define_insn "*if_arith_arith"
9421 [(set (match_operand:SI 0 "s_register_operand" "=r")
9422 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9423 [(match_operand 8 "cc_register" "") (const_int 0)])
9424 (match_operator:SI 6 "shiftable_operator"
9425 [(match_operand:SI 1 "s_register_operand" "r")
9426 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9427 (match_operator:SI 7 "shiftable_operator"
9428 [(match_operand:SI 3 "s_register_operand" "r")
9429 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9430 "TARGET_ARM"
9431 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9432 [(set_attr "conds" "use")
9433 (set_attr "length" "8")
9434 (set_attr "type" "multiple")]
9435 )
9436
9437 (define_insn "*ifcompare_arith_move"
9438 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9439 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9440 [(match_operand:SI 2 "s_register_operand" "r,r")
9441 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9442 (match_operator:SI 7 "shiftable_operator"
9443 [(match_operand:SI 4 "s_register_operand" "r,r")
9444 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9445 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9446 (clobber (reg:CC CC_REGNUM))]
9447 "TARGET_ARM"
9448 "*
9449 /* If we have an operation where (op x 0) is the identity operation and
9450 the conditional operator is LT or GE and we are comparing against zero and
9451 everything is in registers then we can do this in two instructions. */
9452 if (operands[3] == const0_rtx
9453 && GET_CODE (operands[7]) != AND
9454 && REG_P (operands[5])
9455 && REG_P (operands[1])
9456 && REGNO (operands[1]) == REGNO (operands[4])
9457 && REGNO (operands[4]) != REGNO (operands[0]))
9458 {
9459 if (GET_CODE (operands[6]) == LT)
9460 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9461 else if (GET_CODE (operands[6]) == GE)
9462 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9463 }
9464 if (CONST_INT_P (operands[3])
9465 && !const_ok_for_arm (INTVAL (operands[3])))
9466 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9467 else
9468 output_asm_insn (\"cmp\\t%2, %3\", operands);
9469 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9470 if (which_alternative != 0)
9471 return \"mov%D6\\t%0, %1\";
9472 return \"\";
9473 "
9474 [(set_attr "conds" "clob")
9475 (set_attr "length" "8,12")
9476 (set_attr "type" "multiple")]
9477 )
9478
9479 (define_insn "*if_arith_move"
9480 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9481 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9482 [(match_operand 6 "cc_register" "") (const_int 0)])
9483 (match_operator:SI 5 "shiftable_operator"
9484 [(match_operand:SI 2 "s_register_operand" "r,r")
9485 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9486 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9487 "TARGET_ARM"
9488 "@
9489 %I5%d4\\t%0, %2, %3
9490 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9491 [(set_attr "conds" "use")
9492 (set_attr "length" "4,8")
9493 (set_attr_alternative "type"
9494 [(if_then_else (match_operand 3 "const_int_operand" "")
9495 (const_string "alu_shift_imm" )
9496 (const_string "alu_shift_reg"))
9497 (const_string "multiple")])]
9498 )
9499
9500 (define_insn "*ifcompare_move_arith"
9501 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9502 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9503 [(match_operand:SI 4 "s_register_operand" "r,r")
9504 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9505 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9506 (match_operator:SI 7 "shiftable_operator"
9507 [(match_operand:SI 2 "s_register_operand" "r,r")
9508 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9509 (clobber (reg:CC CC_REGNUM))]
9510 "TARGET_ARM"
9511 "*
9512 /* If we have an operation where (op x 0) is the identity operation and
9513 the conditional operator is LT or GE and we are comparing against zero and
9514 everything is in registers then we can do this in two instructions */
9515 if (operands[5] == const0_rtx
9516 && GET_CODE (operands[7]) != AND
9517 && REG_P (operands[3])
9518 && REG_P (operands[1])
9519 && REGNO (operands[1]) == REGNO (operands[2])
9520 && REGNO (operands[2]) != REGNO (operands[0]))
9521 {
9522 if (GET_CODE (operands[6]) == GE)
9523 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9524 else if (GET_CODE (operands[6]) == LT)
9525 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9526 }
9527
9528 if (CONST_INT_P (operands[5])
9529 && !const_ok_for_arm (INTVAL (operands[5])))
9530 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9531 else
9532 output_asm_insn (\"cmp\\t%4, %5\", operands);
9533
9534 if (which_alternative != 0)
9535 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9536 return \"%I7%D6\\t%0, %2, %3\";
9537 "
9538 [(set_attr "conds" "clob")
9539 (set_attr "length" "8,12")
9540 (set_attr "type" "multiple")]
9541 )
9542
9543 (define_insn "*if_move_arith"
9544 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9545 (if_then_else:SI
9546 (match_operator 4 "arm_comparison_operator"
9547 [(match_operand 6 "cc_register" "") (const_int 0)])
9548 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9549 (match_operator:SI 5 "shiftable_operator"
9550 [(match_operand:SI 2 "s_register_operand" "r,r")
9551 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9552 "TARGET_ARM"
9553 "@
9554 %I5%D4\\t%0, %2, %3
9555 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9556 [(set_attr "conds" "use")
9557 (set_attr "length" "4,8")
9558 (set_attr_alternative "type"
9559 [(if_then_else (match_operand 3 "const_int_operand" "")
9560 (const_string "alu_shift_imm" )
9561 (const_string "alu_shift_reg"))
9562 (const_string "multiple")])]
9563 )
9564
9565 (define_insn "*ifcompare_move_not"
9566 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9567 (if_then_else:SI
9568 (match_operator 5 "arm_comparison_operator"
9569 [(match_operand:SI 3 "s_register_operand" "r,r")
9570 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9571 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9572 (not:SI
9573 (match_operand:SI 2 "s_register_operand" "r,r"))))
9574 (clobber (reg:CC CC_REGNUM))]
9575 "TARGET_ARM"
9576 "#"
9577 [(set_attr "conds" "clob")
9578 (set_attr "length" "8,12")
9579 (set_attr "type" "multiple")]
9580 )
9581
9582 (define_insn "*if_move_not"
9583 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9584 (if_then_else:SI
9585 (match_operator 4 "arm_comparison_operator"
9586 [(match_operand 3 "cc_register" "") (const_int 0)])
9587 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9588 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9589 "TARGET_ARM"
9590 "@
9591 mvn%D4\\t%0, %2
9592 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9593 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9594 [(set_attr "conds" "use")
9595 (set_attr "type" "mvn_reg")
9596 (set_attr "length" "4,8,8")
9597 (set_attr "type" "mvn_reg,multiple,multiple")]
9598 )
9599
9600 (define_insn "*ifcompare_not_move"
9601 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9602 (if_then_else:SI
9603 (match_operator 5 "arm_comparison_operator"
9604 [(match_operand:SI 3 "s_register_operand" "r,r")
9605 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9606 (not:SI
9607 (match_operand:SI 2 "s_register_operand" "r,r"))
9608 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9609 (clobber (reg:CC CC_REGNUM))]
9610 "TARGET_ARM"
9611 "#"
9612 [(set_attr "conds" "clob")
9613 (set_attr "length" "8,12")
9614 (set_attr "type" "multiple")]
9615 )
9616
9617 (define_insn "*if_not_move"
9618 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9619 (if_then_else:SI
9620 (match_operator 4 "arm_comparison_operator"
9621 [(match_operand 3 "cc_register" "") (const_int 0)])
9622 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9623 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9624 "TARGET_ARM"
9625 "@
9626 mvn%d4\\t%0, %2
9627 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9628 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9629 [(set_attr "conds" "use")
9630 (set_attr "type" "mvn_reg,multiple,multiple")
9631 (set_attr "length" "4,8,8")]
9632 )
9633
9634 (define_insn "*ifcompare_shift_move"
9635 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9636 (if_then_else:SI
9637 (match_operator 6 "arm_comparison_operator"
9638 [(match_operand:SI 4 "s_register_operand" "r,r")
9639 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9640 (match_operator:SI 7 "shift_operator"
9641 [(match_operand:SI 2 "s_register_operand" "r,r")
9642 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9643 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9644 (clobber (reg:CC CC_REGNUM))]
9645 "TARGET_ARM"
9646 "#"
9647 [(set_attr "conds" "clob")
9648 (set_attr "length" "8,12")
9649 (set_attr "type" "multiple")]
9650 )
9651
9652 (define_insn "*if_shift_move"
9653 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9654 (if_then_else:SI
9655 (match_operator 5 "arm_comparison_operator"
9656 [(match_operand 6 "cc_register" "") (const_int 0)])
9657 (match_operator:SI 4 "shift_operator"
9658 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9659 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9660 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9661 "TARGET_ARM"
9662 "@
9663 mov%d5\\t%0, %2%S4
9664 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9665 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9666 [(set_attr "conds" "use")
9667 (set_attr "shift" "2")
9668 (set_attr "length" "4,8,8")
9669 (set_attr_alternative "type"
9670 [(if_then_else (match_operand 3 "const_int_operand" "")
9671 (const_string "mov_shift" )
9672 (const_string "mov_shift_reg"))
9673 (const_string "multiple")
9674 (const_string "multiple")])]
9675 )
9676
9677 (define_insn "*ifcompare_move_shift"
9678 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9679 (if_then_else:SI
9680 (match_operator 6 "arm_comparison_operator"
9681 [(match_operand:SI 4 "s_register_operand" "r,r")
9682 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9683 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9684 (match_operator:SI 7 "shift_operator"
9685 [(match_operand:SI 2 "s_register_operand" "r,r")
9686 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9687 (clobber (reg:CC CC_REGNUM))]
9688 "TARGET_ARM"
9689 "#"
9690 [(set_attr "conds" "clob")
9691 (set_attr "length" "8,12")
9692 (set_attr "type" "multiple")]
9693 )
9694
9695 (define_insn "*if_move_shift"
9696 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9697 (if_then_else:SI
9698 (match_operator 5 "arm_comparison_operator"
9699 [(match_operand 6 "cc_register" "") (const_int 0)])
9700 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9701 (match_operator:SI 4 "shift_operator"
9702 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9703 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9704 "TARGET_ARM"
9705 "@
9706 mov%D5\\t%0, %2%S4
9707 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9708 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9709 [(set_attr "conds" "use")
9710 (set_attr "shift" "2")
9711 (set_attr "length" "4,8,8")
9712 (set_attr_alternative "type"
9713 [(if_then_else (match_operand 3 "const_int_operand" "")
9714 (const_string "mov_shift" )
9715 (const_string "mov_shift_reg"))
9716 (const_string "multiple")
9717 (const_string "multiple")])]
9718 )
9719
9720 (define_insn "*ifcompare_shift_shift"
9721 [(set (match_operand:SI 0 "s_register_operand" "=r")
9722 (if_then_else:SI
9723 (match_operator 7 "arm_comparison_operator"
9724 [(match_operand:SI 5 "s_register_operand" "r")
9725 (match_operand:SI 6 "arm_add_operand" "rIL")])
9726 (match_operator:SI 8 "shift_operator"
9727 [(match_operand:SI 1 "s_register_operand" "r")
9728 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9729 (match_operator:SI 9 "shift_operator"
9730 [(match_operand:SI 3 "s_register_operand" "r")
9731 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9732 (clobber (reg:CC CC_REGNUM))]
9733 "TARGET_ARM"
9734 "#"
9735 [(set_attr "conds" "clob")
9736 (set_attr "length" "12")
9737 (set_attr "type" "multiple")]
9738 )
9739
9740 (define_insn "*if_shift_shift"
9741 [(set (match_operand:SI 0 "s_register_operand" "=r")
9742 (if_then_else:SI
9743 (match_operator 5 "arm_comparison_operator"
9744 [(match_operand 8 "cc_register" "") (const_int 0)])
9745 (match_operator:SI 6 "shift_operator"
9746 [(match_operand:SI 1 "s_register_operand" "r")
9747 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9748 (match_operator:SI 7 "shift_operator"
9749 [(match_operand:SI 3 "s_register_operand" "r")
9750 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9751 "TARGET_ARM"
9752 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9753 [(set_attr "conds" "use")
9754 (set_attr "shift" "1")
9755 (set_attr "length" "8")
9756 (set (attr "type") (if_then_else
9757 (and (match_operand 2 "const_int_operand" "")
9758 (match_operand 4 "const_int_operand" ""))
9759 (const_string "mov_shift")
9760 (const_string "mov_shift_reg")))]
9761 )
9762
9763 (define_insn "*ifcompare_not_arith"
9764 [(set (match_operand:SI 0 "s_register_operand" "=r")
9765 (if_then_else:SI
9766 (match_operator 6 "arm_comparison_operator"
9767 [(match_operand:SI 4 "s_register_operand" "r")
9768 (match_operand:SI 5 "arm_add_operand" "rIL")])
9769 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9770 (match_operator:SI 7 "shiftable_operator"
9771 [(match_operand:SI 2 "s_register_operand" "r")
9772 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9773 (clobber (reg:CC CC_REGNUM))]
9774 "TARGET_ARM"
9775 "#"
9776 [(set_attr "conds" "clob")
9777 (set_attr "length" "12")
9778 (set_attr "type" "multiple")]
9779 )
9780
9781 (define_insn "*if_not_arith"
9782 [(set (match_operand:SI 0 "s_register_operand" "=r")
9783 (if_then_else:SI
9784 (match_operator 5 "arm_comparison_operator"
9785 [(match_operand 4 "cc_register" "") (const_int 0)])
9786 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9787 (match_operator:SI 6 "shiftable_operator"
9788 [(match_operand:SI 2 "s_register_operand" "r")
9789 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9790 "TARGET_ARM"
9791 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9792 [(set_attr "conds" "use")
9793 (set_attr "type" "mvn_reg")
9794 (set_attr "length" "8")]
9795 )
9796
9797 (define_insn "*ifcompare_arith_not"
9798 [(set (match_operand:SI 0 "s_register_operand" "=r")
9799 (if_then_else:SI
9800 (match_operator 6 "arm_comparison_operator"
9801 [(match_operand:SI 4 "s_register_operand" "r")
9802 (match_operand:SI 5 "arm_add_operand" "rIL")])
9803 (match_operator:SI 7 "shiftable_operator"
9804 [(match_operand:SI 2 "s_register_operand" "r")
9805 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9806 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9807 (clobber (reg:CC CC_REGNUM))]
9808 "TARGET_ARM"
9809 "#"
9810 [(set_attr "conds" "clob")
9811 (set_attr "length" "12")
9812 (set_attr "type" "multiple")]
9813 )
9814
9815 (define_insn "*if_arith_not"
9816 [(set (match_operand:SI 0 "s_register_operand" "=r")
9817 (if_then_else:SI
9818 (match_operator 5 "arm_comparison_operator"
9819 [(match_operand 4 "cc_register" "") (const_int 0)])
9820 (match_operator:SI 6 "shiftable_operator"
9821 [(match_operand:SI 2 "s_register_operand" "r")
9822 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9823 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9824 "TARGET_ARM"
9825 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9826 [(set_attr "conds" "use")
9827 (set_attr "type" "multiple")
9828 (set_attr "length" "8")]
9829 )
9830
9831 (define_insn "*ifcompare_neg_move"
9832 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9833 (if_then_else:SI
9834 (match_operator 5 "arm_comparison_operator"
9835 [(match_operand:SI 3 "s_register_operand" "r,r")
9836 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9837 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9838 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9839 (clobber (reg:CC CC_REGNUM))]
9840 "TARGET_ARM"
9841 "#"
9842 [(set_attr "conds" "clob")
9843 (set_attr "length" "8,12")
9844 (set_attr "type" "multiple")]
9845 )
9846
9847 (define_insn_and_split "*if_neg_move"
9848 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9849 (if_then_else:SI
9850 (match_operator 4 "arm_comparison_operator"
9851 [(match_operand 3 "cc_register" "") (const_int 0)])
9852 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9853 (match_operand:SI 1 "s_register_operand" "0,0")))]
9854 "TARGET_32BIT"
9855 "#"
9856 "&& reload_completed"
9857 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9858 (set (match_dup 0) (neg:SI (match_dup 2))))]
9859 ""
9860 [(set_attr "conds" "use")
9861 (set_attr "length" "4")
9862 (set_attr "arch" "t2,32")
9863 (set_attr "enabled_for_short_it" "yes,no")
9864 (set_attr "type" "logic_shift_imm")]
9865 )
9866
9867 (define_insn "*ifcompare_move_neg"
9868 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9869 (if_then_else:SI
9870 (match_operator 5 "arm_comparison_operator"
9871 [(match_operand:SI 3 "s_register_operand" "r,r")
9872 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9873 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9874 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9875 (clobber (reg:CC CC_REGNUM))]
9876 "TARGET_ARM"
9877 "#"
9878 [(set_attr "conds" "clob")
9879 (set_attr "length" "8,12")
9880 (set_attr "type" "multiple")]
9881 )
9882
9883 (define_insn_and_split "*if_move_neg"
9884 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9885 (if_then_else:SI
9886 (match_operator 4 "arm_comparison_operator"
9887 [(match_operand 3 "cc_register" "") (const_int 0)])
9888 (match_operand:SI 1 "s_register_operand" "0,0")
9889 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9890 "TARGET_32BIT"
9891 "#"
9892 "&& reload_completed"
9893 [(cond_exec (match_dup 5)
9894 (set (match_dup 0) (neg:SI (match_dup 2))))]
9895 {
9896 machine_mode mode = GET_MODE (operands[3]);
9897 rtx_code rc = GET_CODE (operands[4]);
9898
9899 if (mode == CCFPmode || mode == CCFPEmode)
9900 rc = reverse_condition_maybe_unordered (rc);
9901 else
9902 rc = reverse_condition (rc);
9903
9904 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9905 }
9906 [(set_attr "conds" "use")
9907 (set_attr "length" "4")
9908 (set_attr "arch" "t2,32")
9909 (set_attr "enabled_for_short_it" "yes,no")
9910 (set_attr "type" "logic_shift_imm")]
9911 )
9912
9913 (define_insn "*arith_adjacentmem"
9914 [(set (match_operand:SI 0 "s_register_operand" "=r")
9915 (match_operator:SI 1 "shiftable_operator"
9916 [(match_operand:SI 2 "memory_operand" "m")
9917 (match_operand:SI 3 "memory_operand" "m")]))
9918 (clobber (match_scratch:SI 4 "=r"))]
9919 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9920 "*
9921 {
9922 rtx ldm[3];
9923 rtx arith[4];
9924 rtx base_reg;
9925 HOST_WIDE_INT val1 = 0, val2 = 0;
9926
9927 if (REGNO (operands[0]) > REGNO (operands[4]))
9928 {
9929 ldm[1] = operands[4];
9930 ldm[2] = operands[0];
9931 }
9932 else
9933 {
9934 ldm[1] = operands[0];
9935 ldm[2] = operands[4];
9936 }
9937
9938 base_reg = XEXP (operands[2], 0);
9939
9940 if (!REG_P (base_reg))
9941 {
9942 val1 = INTVAL (XEXP (base_reg, 1));
9943 base_reg = XEXP (base_reg, 0);
9944 }
9945
9946 if (!REG_P (XEXP (operands[3], 0)))
9947 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9948
9949 arith[0] = operands[0];
9950 arith[3] = operands[1];
9951
9952 if (val1 < val2)
9953 {
9954 arith[1] = ldm[1];
9955 arith[2] = ldm[2];
9956 }
9957 else
9958 {
9959 arith[1] = ldm[2];
9960 arith[2] = ldm[1];
9961 }
9962
9963 ldm[0] = base_reg;
9964 if (val1 !=0 && val2 != 0)
9965 {
9966 rtx ops[3];
9967
9968 if (val1 == 4 || val2 == 4)
9969 /* Other val must be 8, since we know they are adjacent and neither
9970 is zero. */
9971 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
9972 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9973 {
9974 ldm[0] = ops[0] = operands[4];
9975 ops[1] = base_reg;
9976 ops[2] = GEN_INT (val1);
9977 output_add_immediate (ops);
9978 if (val1 < val2)
9979 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9980 else
9981 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9982 }
9983 else
9984 {
9985 /* Offset is out of range for a single add, so use two ldr. */
9986 ops[0] = ldm[1];
9987 ops[1] = base_reg;
9988 ops[2] = GEN_INT (val1);
9989 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9990 ops[0] = ldm[2];
9991 ops[2] = GEN_INT (val2);
9992 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9993 }
9994 }
9995 else if (val1 != 0)
9996 {
9997 if (val1 < val2)
9998 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9999 else
10000 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10001 }
10002 else
10003 {
10004 if (val1 < val2)
10005 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10006 else
10007 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10008 }
10009 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10010 return \"\";
10011 }"
10012 [(set_attr "length" "12")
10013 (set_attr "predicable" "yes")
10014 (set_attr "type" "load_4")]
10015 )
10016
10017 ; This pattern is never tried by combine, so do it as a peephole
10018
10019 (define_peephole2
10020 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10021 (match_operand:SI 1 "arm_general_register_operand" ""))
10022 (set (reg:CC CC_REGNUM)
10023 (compare:CC (match_dup 1) (const_int 0)))]
10024 "TARGET_ARM"
10025 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10026 (set (match_dup 0) (match_dup 1))])]
10027 ""
10028 )
10029
10030 (define_split
10031 [(set (match_operand:SI 0 "s_register_operand" "")
10032 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10033 (const_int 0))
10034 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10035 [(match_operand:SI 3 "s_register_operand" "")
10036 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10037 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10038 "TARGET_ARM"
10039 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10040 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10041 (match_dup 5)))]
10042 ""
10043 )
10044
10045 ;; This split can be used because CC_Z mode implies that the following
10046 ;; branch will be an equality, or an unsigned inequality, so the sign
10047 ;; extension is not needed.
10048
10049 (define_split
10050 [(set (reg:CC_Z CC_REGNUM)
10051 (compare:CC_Z
10052 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10053 (const_int 24))
10054 (match_operand 1 "const_int_operand" "")))
10055 (clobber (match_scratch:SI 2 ""))]
10056 "TARGET_ARM
10057 && ((UINTVAL (operands[1]))
10058 == ((UINTVAL (operands[1])) >> 24) << 24)"
10059 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10060 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10061 "
10062 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10063 "
10064 )
10065 ;; ??? Check the patterns above for Thumb-2 usefulness
10066
10067 (define_expand "prologue"
10068 [(clobber (const_int 0))]
10069 "TARGET_EITHER"
10070 "if (TARGET_32BIT)
10071 arm_expand_prologue ();
10072 else
10073 thumb1_expand_prologue ();
10074 DONE;
10075 "
10076 )
10077
10078 (define_expand "epilogue"
10079 [(clobber (const_int 0))]
10080 "TARGET_EITHER"
10081 "
10082 if (crtl->calls_eh_return)
10083 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10084 if (TARGET_THUMB1)
10085 {
10086 thumb1_expand_epilogue ();
10087 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10088 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10089 }
10090 else if (HAVE_return)
10091 {
10092 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10093 no need for explicit testing again. */
10094 emit_jump_insn (gen_return ());
10095 }
10096 else if (TARGET_32BIT)
10097 {
10098 arm_expand_epilogue (true);
10099 }
10100 DONE;
10101 "
10102 )
10103
10104 ;; Note - although unspec_volatile's USE all hard registers,
10105 ;; USEs are ignored after relaod has completed. Thus we need
10106 ;; to add an unspec of the link register to ensure that flow
10107 ;; does not think that it is unused by the sibcall branch that
10108 ;; will replace the standard function epilogue.
10109 (define_expand "sibcall_epilogue"
10110 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10111 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10112 "TARGET_32BIT"
10113 "
10114 arm_expand_epilogue (false);
10115 DONE;
10116 "
10117 )
10118
10119 (define_expand "eh_epilogue"
10120 [(use (match_operand:SI 0 "register_operand"))
10121 (use (match_operand:SI 1 "register_operand"))
10122 (use (match_operand:SI 2 "register_operand"))]
10123 "TARGET_EITHER"
10124 "
10125 {
10126 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10127 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10128 {
10129 rtx ra = gen_rtx_REG (Pmode, 2);
10130
10131 emit_move_insn (ra, operands[2]);
10132 operands[2] = ra;
10133 }
10134 /* This is a hack -- we may have crystalized the function type too
10135 early. */
10136 cfun->machine->func_type = 0;
10137 }"
10138 )
10139
10140 ;; This split is only used during output to reduce the number of patterns
10141 ;; that need assembler instructions adding to them. We allowed the setting
10142 ;; of the conditions to be implicit during rtl generation so that
10143 ;; the conditional compare patterns would work. However this conflicts to
10144 ;; some extent with the conditional data operations, so we have to split them
10145 ;; up again here.
10146
10147 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10148 ;; conditional execution sufficient?
10149
10150 (define_split
10151 [(set (match_operand:SI 0 "s_register_operand" "")
10152 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10153 [(match_operand 2 "" "") (match_operand 3 "" "")])
10154 (match_dup 0)
10155 (match_operand 4 "" "")))
10156 (clobber (reg:CC CC_REGNUM))]
10157 "TARGET_ARM && reload_completed"
10158 [(set (match_dup 5) (match_dup 6))
10159 (cond_exec (match_dup 7)
10160 (set (match_dup 0) (match_dup 4)))]
10161 "
10162 {
10163 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10164 operands[2], operands[3]);
10165 enum rtx_code rc = GET_CODE (operands[1]);
10166
10167 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10168 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10169 if (mode == CCFPmode || mode == CCFPEmode)
10170 rc = reverse_condition_maybe_unordered (rc);
10171 else
10172 rc = reverse_condition (rc);
10173
10174 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10175 }"
10176 )
10177
10178 (define_split
10179 [(set (match_operand:SI 0 "s_register_operand" "")
10180 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10181 [(match_operand 2 "" "") (match_operand 3 "" "")])
10182 (match_operand 4 "" "")
10183 (match_dup 0)))
10184 (clobber (reg:CC CC_REGNUM))]
10185 "TARGET_ARM && reload_completed"
10186 [(set (match_dup 5) (match_dup 6))
10187 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10188 (set (match_dup 0) (match_dup 4)))]
10189 "
10190 {
10191 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10192 operands[2], operands[3]);
10193
10194 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10195 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10196 }"
10197 )
10198
10199 (define_split
10200 [(set (match_operand:SI 0 "s_register_operand" "")
10201 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10202 [(match_operand 2 "" "") (match_operand 3 "" "")])
10203 (match_operand 4 "" "")
10204 (match_operand 5 "" "")))
10205 (clobber (reg:CC CC_REGNUM))]
10206 "TARGET_ARM && reload_completed"
10207 [(set (match_dup 6) (match_dup 7))
10208 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10209 (set (match_dup 0) (match_dup 4)))
10210 (cond_exec (match_dup 8)
10211 (set (match_dup 0) (match_dup 5)))]
10212 "
10213 {
10214 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10215 operands[2], operands[3]);
10216 enum rtx_code rc = GET_CODE (operands[1]);
10217
10218 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10219 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10220 if (mode == CCFPmode || mode == CCFPEmode)
10221 rc = reverse_condition_maybe_unordered (rc);
10222 else
10223 rc = reverse_condition (rc);
10224
10225 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10226 }"
10227 )
10228
10229 (define_split
10230 [(set (match_operand:SI 0 "s_register_operand" "")
10231 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10232 [(match_operand:SI 2 "s_register_operand" "")
10233 (match_operand:SI 3 "arm_add_operand" "")])
10234 (match_operand:SI 4 "arm_rhs_operand" "")
10235 (not:SI
10236 (match_operand:SI 5 "s_register_operand" ""))))
10237 (clobber (reg:CC CC_REGNUM))]
10238 "TARGET_ARM && reload_completed"
10239 [(set (match_dup 6) (match_dup 7))
10240 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10241 (set (match_dup 0) (match_dup 4)))
10242 (cond_exec (match_dup 8)
10243 (set (match_dup 0) (not:SI (match_dup 5))))]
10244 "
10245 {
10246 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10247 operands[2], operands[3]);
10248 enum rtx_code rc = GET_CODE (operands[1]);
10249
10250 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10251 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10252 if (mode == CCFPmode || mode == CCFPEmode)
10253 rc = reverse_condition_maybe_unordered (rc);
10254 else
10255 rc = reverse_condition (rc);
10256
10257 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10258 }"
10259 )
10260
10261 (define_insn "*cond_move_not"
10262 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10263 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10264 [(match_operand 3 "cc_register" "") (const_int 0)])
10265 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10266 (not:SI
10267 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10268 "TARGET_ARM"
10269 "@
10270 mvn%D4\\t%0, %2
10271 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10272 [(set_attr "conds" "use")
10273 (set_attr "type" "mvn_reg,multiple")
10274 (set_attr "length" "4,8")]
10275 )
10276
10277 ;; The next two patterns occur when an AND operation is followed by a
10278 ;; scc insn sequence
10279
10280 (define_insn "*sign_extract_onebit"
10281 [(set (match_operand:SI 0 "s_register_operand" "=r")
10282 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10283 (const_int 1)
10284 (match_operand:SI 2 "const_int_operand" "n")))
10285 (clobber (reg:CC CC_REGNUM))]
10286 "TARGET_ARM"
10287 "*
10288 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10289 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10290 return \"mvnne\\t%0, #0\";
10291 "
10292 [(set_attr "conds" "clob")
10293 (set_attr "length" "8")
10294 (set_attr "type" "multiple")]
10295 )
10296
10297 (define_insn "*not_signextract_onebit"
10298 [(set (match_operand:SI 0 "s_register_operand" "=r")
10299 (not:SI
10300 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10301 (const_int 1)
10302 (match_operand:SI 2 "const_int_operand" "n"))))
10303 (clobber (reg:CC CC_REGNUM))]
10304 "TARGET_ARM"
10305 "*
10306 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10307 output_asm_insn (\"tst\\t%1, %2\", operands);
10308 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10309 return \"movne\\t%0, #0\";
10310 "
10311 [(set_attr "conds" "clob")
10312 (set_attr "length" "12")
10313 (set_attr "type" "multiple")]
10314 )
10315 ;; ??? The above patterns need auditing for Thumb-2
10316
10317 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10318 ;; expressions. For simplicity, the first register is also in the unspec
10319 ;; part.
10320 ;; To avoid the usage of GNU extension, the length attribute is computed
10321 ;; in a C function arm_attr_length_push_multi.
10322 (define_insn "*push_multi"
10323 [(match_parallel 2 "multi_register_push"
10324 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10325 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10326 UNSPEC_PUSH_MULT))])]
10327 ""
10328 "*
10329 {
10330 int num_saves = XVECLEN (operands[2], 0);
10331
10332 /* For the StrongARM at least it is faster to
10333 use STR to store only a single register.
10334 In Thumb mode always use push, and the assembler will pick
10335 something appropriate. */
10336 if (num_saves == 1 && TARGET_ARM)
10337 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10338 else
10339 {
10340 int i;
10341 char pattern[100];
10342
10343 if (TARGET_32BIT)
10344 strcpy (pattern, \"push%?\\t{%1\");
10345 else
10346 strcpy (pattern, \"push\\t{%1\");
10347
10348 for (i = 1; i < num_saves; i++)
10349 {
10350 strcat (pattern, \", %|\");
10351 strcat (pattern,
10352 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10353 }
10354
10355 strcat (pattern, \"}\");
10356 output_asm_insn (pattern, operands);
10357 }
10358
10359 return \"\";
10360 }"
10361 [(set_attr "type" "store_16")
10362 (set (attr "length")
10363 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10364 )
10365
10366 (define_insn "stack_tie"
10367 [(set (mem:BLK (scratch))
10368 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10369 (match_operand:SI 1 "s_register_operand" "rk")]
10370 UNSPEC_PRLG_STK))]
10371 ""
10372 ""
10373 [(set_attr "length" "0")
10374 (set_attr "type" "block")]
10375 )
10376
10377 ;; Pop (as used in epilogue RTL)
10378 ;;
10379 (define_insn "*load_multiple_with_writeback"
10380 [(match_parallel 0 "load_multiple_operation"
10381 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10382 (plus:SI (match_dup 1)
10383 (match_operand:SI 2 "const_int_I_operand" "I")))
10384 (set (match_operand:SI 3 "s_register_operand" "=rk")
10385 (mem:SI (match_dup 1)))
10386 ])]
10387 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10388 "*
10389 {
10390 arm_output_multireg_pop (operands, /*return_pc=*/false,
10391 /*cond=*/const_true_rtx,
10392 /*reverse=*/false,
10393 /*update=*/true);
10394 return \"\";
10395 }
10396 "
10397 [(set_attr "type" "load_16")
10398 (set_attr "predicable" "yes")
10399 (set (attr "length")
10400 (symbol_ref "arm_attr_length_pop_multi (operands,
10401 /*return_pc=*/false,
10402 /*write_back_p=*/true)"))]
10403 )
10404
10405 ;; Pop with return (as used in epilogue RTL)
10406 ;;
10407 ;; This instruction is generated when the registers are popped at the end of
10408 ;; epilogue. Here, instead of popping the value into LR and then generating
10409 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10410 ;; with (return).
10411 (define_insn "*pop_multiple_with_writeback_and_return"
10412 [(match_parallel 0 "pop_multiple_return"
10413 [(return)
10414 (set (match_operand:SI 1 "s_register_operand" "+rk")
10415 (plus:SI (match_dup 1)
10416 (match_operand:SI 2 "const_int_I_operand" "I")))
10417 (set (match_operand:SI 3 "s_register_operand" "=rk")
10418 (mem:SI (match_dup 1)))
10419 ])]
10420 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10421 "*
10422 {
10423 arm_output_multireg_pop (operands, /*return_pc=*/true,
10424 /*cond=*/const_true_rtx,
10425 /*reverse=*/false,
10426 /*update=*/true);
10427 return \"\";
10428 }
10429 "
10430 [(set_attr "type" "load_16")
10431 (set_attr "predicable" "yes")
10432 (set (attr "length")
10433 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10434 /*write_back_p=*/true)"))]
10435 )
10436
10437 (define_insn "*pop_multiple_with_return"
10438 [(match_parallel 0 "pop_multiple_return"
10439 [(return)
10440 (set (match_operand:SI 2 "s_register_operand" "=rk")
10441 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10442 ])]
10443 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10444 "*
10445 {
10446 arm_output_multireg_pop (operands, /*return_pc=*/true,
10447 /*cond=*/const_true_rtx,
10448 /*reverse=*/false,
10449 /*update=*/false);
10450 return \"\";
10451 }
10452 "
10453 [(set_attr "type" "load_16")
10454 (set_attr "predicable" "yes")
10455 (set (attr "length")
10456 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10457 /*write_back_p=*/false)"))]
10458 )
10459
10460 ;; Load into PC and return
10461 (define_insn "*ldr_with_return"
10462 [(return)
10463 (set (reg:SI PC_REGNUM)
10464 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10465 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10466 "ldr%?\t%|pc, [%0], #4"
10467 [(set_attr "type" "load_4")
10468 (set_attr "predicable" "yes")]
10469 )
10470 ;; Pop for floating point registers (as used in epilogue RTL)
10471 (define_insn "*vfp_pop_multiple_with_writeback"
10472 [(match_parallel 0 "pop_multiple_fp"
10473 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10474 (plus:SI (match_dup 1)
10475 (match_operand:SI 2 "const_int_I_operand" "I")))
10476 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10477 (mem:DF (match_dup 1)))])]
10478 "TARGET_32BIT && TARGET_HARD_FLOAT"
10479 "*
10480 {
10481 int num_regs = XVECLEN (operands[0], 0);
10482 char pattern[100];
10483 rtx op_list[2];
10484 strcpy (pattern, \"vldm\\t\");
10485 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10486 strcat (pattern, \"!, {\");
10487 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10488 strcat (pattern, \"%P0\");
10489 if ((num_regs - 1) > 1)
10490 {
10491 strcat (pattern, \"-%P1\");
10492 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10493 }
10494
10495 strcat (pattern, \"}\");
10496 output_asm_insn (pattern, op_list);
10497 return \"\";
10498 }
10499 "
10500 [(set_attr "type" "load_16")
10501 (set_attr "conds" "unconditional")
10502 (set_attr "predicable" "no")]
10503 )
10504
10505 ;; Special patterns for dealing with the constant pool
10506
10507 (define_insn "align_4"
10508 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10509 "TARGET_EITHER"
10510 "*
10511 assemble_align (32);
10512 return \"\";
10513 "
10514 [(set_attr "type" "no_insn")]
10515 )
10516
10517 (define_insn "align_8"
10518 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10519 "TARGET_EITHER"
10520 "*
10521 assemble_align (64);
10522 return \"\";
10523 "
10524 [(set_attr "type" "no_insn")]
10525 )
10526
10527 (define_insn "consttable_end"
10528 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10529 "TARGET_EITHER"
10530 "*
10531 making_const_table = FALSE;
10532 return \"\";
10533 "
10534 [(set_attr "type" "no_insn")]
10535 )
10536
10537 (define_insn "consttable_1"
10538 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10539 "TARGET_EITHER"
10540 "*
10541 making_const_table = TRUE;
10542 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10543 assemble_zeros (3);
10544 return \"\";
10545 "
10546 [(set_attr "length" "4")
10547 (set_attr "type" "no_insn")]
10548 )
10549
10550 (define_insn "consttable_2"
10551 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10552 "TARGET_EITHER"
10553 "*
10554 {
10555 rtx x = operands[0];
10556 making_const_table = TRUE;
10557 switch (GET_MODE_CLASS (GET_MODE (x)))
10558 {
10559 case MODE_FLOAT:
10560 arm_emit_fp16_const (x);
10561 break;
10562 default:
10563 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10564 assemble_zeros (2);
10565 break;
10566 }
10567 return \"\";
10568 }"
10569 [(set_attr "length" "4")
10570 (set_attr "type" "no_insn")]
10571 )
10572
10573 (define_insn "consttable_4"
10574 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10575 "TARGET_EITHER"
10576 "*
10577 {
10578 rtx x = operands[0];
10579 making_const_table = TRUE;
10580 scalar_float_mode float_mode;
10581 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10582 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10583 else
10584 {
10585 /* XXX: Sometimes gcc does something really dumb and ends up with
10586 a HIGH in a constant pool entry, usually because it's trying to
10587 load into a VFP register. We know this will always be used in
10588 combination with a LO_SUM which ignores the high bits, so just
10589 strip off the HIGH. */
10590 if (GET_CODE (x) == HIGH)
10591 x = XEXP (x, 0);
10592 assemble_integer (x, 4, BITS_PER_WORD, 1);
10593 mark_symbol_refs_as_used (x);
10594 }
10595 return \"\";
10596 }"
10597 [(set_attr "length" "4")
10598 (set_attr "type" "no_insn")]
10599 )
10600
10601 (define_insn "consttable_8"
10602 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10603 "TARGET_EITHER"
10604 "*
10605 {
10606 making_const_table = TRUE;
10607 scalar_float_mode float_mode;
10608 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10609 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10610 float_mode, BITS_PER_WORD);
10611 else
10612 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10613 return \"\";
10614 }"
10615 [(set_attr "length" "8")
10616 (set_attr "type" "no_insn")]
10617 )
10618
10619 (define_insn "consttable_16"
10620 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10621 "TARGET_EITHER"
10622 "*
10623 {
10624 making_const_table = TRUE;
10625 scalar_float_mode float_mode;
10626 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10627 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10628 float_mode, BITS_PER_WORD);
10629 else
10630 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10631 return \"\";
10632 }"
10633 [(set_attr "length" "16")
10634 (set_attr "type" "no_insn")]
10635 )
10636
10637 ;; V5 Instructions,
10638
10639 (define_insn "clzsi2"
10640 [(set (match_operand:SI 0 "s_register_operand" "=r")
10641 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10642 "TARGET_32BIT && arm_arch5t"
10643 "clz%?\\t%0, %1"
10644 [(set_attr "predicable" "yes")
10645 (set_attr "type" "clz")])
10646
10647 (define_insn "rbitsi2"
10648 [(set (match_operand:SI 0 "s_register_operand" "=r")
10649 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10650 "TARGET_32BIT && arm_arch_thumb2"
10651 "rbit%?\\t%0, %1"
10652 [(set_attr "predicable" "yes")
10653 (set_attr "type" "clz")])
10654
10655 ;; Keep this as a CTZ expression until after reload and then split
10656 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10657 ;; to fold with any other expression.
10658
10659 (define_insn_and_split "ctzsi2"
10660 [(set (match_operand:SI 0 "s_register_operand" "=r")
10661 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10662 "TARGET_32BIT && arm_arch_thumb2"
10663 "#"
10664 "&& reload_completed"
10665 [(const_int 0)]
10666 "
10667 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10668 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10669 DONE;
10670 ")
10671
10672 ;; V5E instructions.
10673
10674 (define_insn "prefetch"
10675 [(prefetch (match_operand:SI 0 "address_operand" "p")
10676 (match_operand:SI 1 "" "")
10677 (match_operand:SI 2 "" ""))]
10678 "TARGET_32BIT && arm_arch5te"
10679 "pld\\t%a0"
10680 [(set_attr "type" "load_4")]
10681 )
10682
10683 ;; General predication pattern
10684
10685 (define_cond_exec
10686 [(match_operator 0 "arm_comparison_operator"
10687 [(match_operand 1 "cc_register" "")
10688 (const_int 0)])]
10689 "TARGET_32BIT
10690 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10691 ""
10692 [(set_attr "predicated" "yes")]
10693 )
10694
10695 (define_insn "force_register_use"
10696 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10697 ""
10698 "%@ %0 needed"
10699 [(set_attr "length" "0")
10700 (set_attr "type" "no_insn")]
10701 )
10702
10703
10704 ;; Patterns for exception handling
10705
10706 (define_expand "eh_return"
10707 [(use (match_operand 0 "general_operand"))]
10708 "TARGET_EITHER"
10709 "
10710 {
10711 if (TARGET_32BIT)
10712 emit_insn (gen_arm_eh_return (operands[0]));
10713 else
10714 emit_insn (gen_thumb_eh_return (operands[0]));
10715 DONE;
10716 }"
10717 )
10718
10719 ;; We can't expand this before we know where the link register is stored.
10720 (define_insn_and_split "arm_eh_return"
10721 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10722 VUNSPEC_EH_RETURN)
10723 (clobber (match_scratch:SI 1 "=&r"))]
10724 "TARGET_ARM"
10725 "#"
10726 "&& reload_completed"
10727 [(const_int 0)]
10728 "
10729 {
10730 arm_set_return_address (operands[0], operands[1]);
10731 DONE;
10732 }"
10733 )
10734
10735 \f
10736 ;; TLS support
10737
10738 (define_insn "load_tp_hard"
10739 [(set (match_operand:SI 0 "register_operand" "=r")
10740 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10741 "TARGET_HARD_TP"
10742 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10743 [(set_attr "predicable" "yes")
10744 (set_attr "type" "mrs")]
10745 )
10746
10747 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10748 (define_insn "load_tp_soft_fdpic"
10749 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10750 (clobber (reg:SI FDPIC_REGNUM))
10751 (clobber (reg:SI LR_REGNUM))
10752 (clobber (reg:SI IP_REGNUM))
10753 (clobber (reg:CC CC_REGNUM))]
10754 "TARGET_SOFT_TP && TARGET_FDPIC"
10755 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10756 [(set_attr "conds" "clob")
10757 (set_attr "type" "branch")]
10758 )
10759
10760 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10761 (define_insn "load_tp_soft"
10762 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10763 (clobber (reg:SI LR_REGNUM))
10764 (clobber (reg:SI IP_REGNUM))
10765 (clobber (reg:CC CC_REGNUM))]
10766 "TARGET_SOFT_TP && !TARGET_FDPIC"
10767 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10768 [(set_attr "conds" "clob")
10769 (set_attr "type" "branch")]
10770 )
10771
10772 ;; tls descriptor call
10773 (define_insn "tlscall"
10774 [(set (reg:SI R0_REGNUM)
10775 (unspec:SI [(reg:SI R0_REGNUM)
10776 (match_operand:SI 0 "" "X")
10777 (match_operand 1 "" "")] UNSPEC_TLS))
10778 (clobber (reg:SI R1_REGNUM))
10779 (clobber (reg:SI LR_REGNUM))
10780 (clobber (reg:SI CC_REGNUM))]
10781 "TARGET_GNU2_TLS"
10782 {
10783 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10784 INTVAL (operands[1]));
10785 return "bl\\t%c0(tlscall)";
10786 }
10787 [(set_attr "conds" "clob")
10788 (set_attr "length" "4")
10789 (set_attr "type" "branch")]
10790 )
10791
10792 ;; For thread pointer builtin
10793 (define_expand "get_thread_pointersi"
10794 [(match_operand:SI 0 "s_register_operand")]
10795 ""
10796 "
10797 {
10798 arm_load_tp (operands[0]);
10799 DONE;
10800 }")
10801
10802 ;;
10803
10804 ;; We only care about the lower 16 bits of the constant
10805 ;; being inserted into the upper 16 bits of the register.
10806 (define_insn "*arm_movtas_ze"
10807 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10808 (const_int 16)
10809 (const_int 16))
10810 (match_operand:SI 1 "const_int_operand" ""))]
10811 "TARGET_HAVE_MOVT"
10812 "@
10813 movt%?\t%0, %L1
10814 movt\t%0, %L1"
10815 [(set_attr "arch" "32,v8mb")
10816 (set_attr "predicable" "yes")
10817 (set_attr "length" "4")
10818 (set_attr "type" "alu_sreg")]
10819 )
10820
10821 (define_insn "*arm_rev"
10822 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10823 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10824 "arm_arch6"
10825 "@
10826 rev\t%0, %1
10827 rev%?\t%0, %1
10828 rev%?\t%0, %1"
10829 [(set_attr "arch" "t1,t2,32")
10830 (set_attr "length" "2,2,4")
10831 (set_attr "predicable" "no,yes,yes")
10832 (set_attr "type" "rev")]
10833 )
10834
10835 (define_expand "arm_legacy_rev"
10836 [(set (match_operand:SI 2 "s_register_operand")
10837 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10838 (const_int 16))
10839 (match_dup 1)))
10840 (set (match_dup 2)
10841 (lshiftrt:SI (match_dup 2)
10842 (const_int 8)))
10843 (set (match_operand:SI 3 "s_register_operand")
10844 (rotatert:SI (match_dup 1)
10845 (const_int 8)))
10846 (set (match_dup 2)
10847 (and:SI (match_dup 2)
10848 (const_int -65281)))
10849 (set (match_operand:SI 0 "s_register_operand")
10850 (xor:SI (match_dup 3)
10851 (match_dup 2)))]
10852 "TARGET_32BIT"
10853 ""
10854 )
10855
10856 ;; Reuse temporaries to keep register pressure down.
10857 (define_expand "thumb_legacy_rev"
10858 [(set (match_operand:SI 2 "s_register_operand")
10859 (ashift:SI (match_operand:SI 1 "s_register_operand")
10860 (const_int 24)))
10861 (set (match_operand:SI 3 "s_register_operand")
10862 (lshiftrt:SI (match_dup 1)
10863 (const_int 24)))
10864 (set (match_dup 3)
10865 (ior:SI (match_dup 3)
10866 (match_dup 2)))
10867 (set (match_operand:SI 4 "s_register_operand")
10868 (const_int 16))
10869 (set (match_operand:SI 5 "s_register_operand")
10870 (rotatert:SI (match_dup 1)
10871 (match_dup 4)))
10872 (set (match_dup 2)
10873 (ashift:SI (match_dup 5)
10874 (const_int 24)))
10875 (set (match_dup 5)
10876 (lshiftrt:SI (match_dup 5)
10877 (const_int 24)))
10878 (set (match_dup 5)
10879 (ior:SI (match_dup 5)
10880 (match_dup 2)))
10881 (set (match_dup 5)
10882 (rotatert:SI (match_dup 5)
10883 (match_dup 4)))
10884 (set (match_operand:SI 0 "s_register_operand")
10885 (ior:SI (match_dup 5)
10886 (match_dup 3)))]
10887 "TARGET_THUMB"
10888 ""
10889 )
10890
10891 ;; ARM-specific expansion of signed mod by power of 2
10892 ;; using conditional negate.
10893 ;; For r0 % n where n is a power of 2 produce:
10894 ;; rsbs r1, r0, #0
10895 ;; and r0, r0, #(n - 1)
10896 ;; and r1, r1, #(n - 1)
10897 ;; rsbpl r0, r1, #0
10898
10899 (define_expand "modsi3"
10900 [(match_operand:SI 0 "register_operand")
10901 (match_operand:SI 1 "register_operand")
10902 (match_operand:SI 2 "const_int_operand")]
10903 "TARGET_32BIT"
10904 {
10905 HOST_WIDE_INT val = INTVAL (operands[2]);
10906
10907 if (val <= 0
10908 || exact_log2 (val) <= 0)
10909 FAIL;
10910
10911 rtx mask = GEN_INT (val - 1);
10912
10913 /* In the special case of x0 % 2 we can do the even shorter:
10914 cmp r0, #0
10915 and r0, r0, #1
10916 rsblt r0, r0, #0. */
10917
10918 if (val == 2)
10919 {
10920 rtx cc_reg = arm_gen_compare_reg (LT,
10921 operands[1], const0_rtx, NULL_RTX);
10922 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10923 rtx masked = gen_reg_rtx (SImode);
10924
10925 emit_insn (gen_andsi3 (masked, operands[1], mask));
10926 emit_move_insn (operands[0],
10927 gen_rtx_IF_THEN_ELSE (SImode, cond,
10928 gen_rtx_NEG (SImode,
10929 masked),
10930 masked));
10931 DONE;
10932 }
10933
10934 rtx neg_op = gen_reg_rtx (SImode);
10935 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
10936 operands[1]));
10937
10938 /* Extract the condition register and mode. */
10939 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
10940 rtx cc_reg = SET_DEST (cmp);
10941 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
10942
10943 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
10944
10945 rtx masked_neg = gen_reg_rtx (SImode);
10946 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
10947
10948 /* We want a conditional negate here, but emitting COND_EXEC rtxes
10949 during expand does not always work. Do an IF_THEN_ELSE instead. */
10950 emit_move_insn (operands[0],
10951 gen_rtx_IF_THEN_ELSE (SImode, cond,
10952 gen_rtx_NEG (SImode, masked_neg),
10953 operands[0]));
10954
10955
10956 DONE;
10957 }
10958 )
10959
10960 (define_expand "bswapsi2"
10961 [(set (match_operand:SI 0 "s_register_operand")
10962 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
10963 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10964 "
10965 if (!arm_arch6)
10966 {
10967 rtx op2 = gen_reg_rtx (SImode);
10968 rtx op3 = gen_reg_rtx (SImode);
10969
10970 if (TARGET_THUMB)
10971 {
10972 rtx op4 = gen_reg_rtx (SImode);
10973 rtx op5 = gen_reg_rtx (SImode);
10974
10975 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10976 op2, op3, op4, op5));
10977 }
10978 else
10979 {
10980 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10981 op2, op3));
10982 }
10983
10984 DONE;
10985 }
10986 "
10987 )
10988
10989 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
10990 ;; and unsigned variants, respectively. For rev16, expose
10991 ;; byte-swapping in the lower 16 bits only.
10992 (define_insn "*arm_revsh"
10993 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10994 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
10995 "arm_arch6"
10996 "@
10997 revsh\t%0, %1
10998 revsh%?\t%0, %1
10999 revsh%?\t%0, %1"
11000 [(set_attr "arch" "t1,t2,32")
11001 (set_attr "length" "2,2,4")
11002 (set_attr "type" "rev")]
11003 )
11004
11005 (define_insn "*arm_rev16"
11006 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11007 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11008 "arm_arch6"
11009 "@
11010 rev16\t%0, %1
11011 rev16%?\t%0, %1
11012 rev16%?\t%0, %1"
11013 [(set_attr "arch" "t1,t2,32")
11014 (set_attr "length" "2,2,4")
11015 (set_attr "type" "rev")]
11016 )
11017
11018 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11019 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11020 ;; each valid permutation.
11021
11022 (define_insn "arm_rev16si2"
11023 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11024 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11025 (const_int 8))
11026 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11027 (and:SI (lshiftrt:SI (match_dup 1)
11028 (const_int 8))
11029 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11030 "arm_arch6
11031 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11032 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11033 "rev16\\t%0, %1"
11034 [(set_attr "arch" "t1,t2,32")
11035 (set_attr "length" "2,2,4")
11036 (set_attr "type" "rev")]
11037 )
11038
11039 (define_insn "arm_rev16si2_alt"
11040 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11041 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11042 (const_int 8))
11043 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11044 (and:SI (ashift:SI (match_dup 1)
11045 (const_int 8))
11046 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11047 "arm_arch6
11048 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11049 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11050 "rev16\\t%0, %1"
11051 [(set_attr "arch" "t1,t2,32")
11052 (set_attr "length" "2,2,4")
11053 (set_attr "type" "rev")]
11054 )
11055
11056 (define_expand "bswaphi2"
11057 [(set (match_operand:HI 0 "s_register_operand")
11058 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11059 "arm_arch6"
11060 ""
11061 )
11062
11063 ;; Patterns for LDRD/STRD in Thumb2 mode
11064
11065 (define_insn "*thumb2_ldrd"
11066 [(set (match_operand:SI 0 "s_register_operand" "=r")
11067 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11068 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11069 (set (match_operand:SI 3 "s_register_operand" "=r")
11070 (mem:SI (plus:SI (match_dup 1)
11071 (match_operand:SI 4 "const_int_operand" ""))))]
11072 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11073 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11074 && (operands_ok_ldrd_strd (operands[0], operands[3],
11075 operands[1], INTVAL (operands[2]),
11076 false, true))"
11077 "ldrd%?\t%0, %3, [%1, %2]"
11078 [(set_attr "type" "load_8")
11079 (set_attr "predicable" "yes")])
11080
11081 (define_insn "*thumb2_ldrd_base"
11082 [(set (match_operand:SI 0 "s_register_operand" "=r")
11083 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11084 (set (match_operand:SI 2 "s_register_operand" "=r")
11085 (mem:SI (plus:SI (match_dup 1)
11086 (const_int 4))))]
11087 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11088 && (operands_ok_ldrd_strd (operands[0], operands[2],
11089 operands[1], 0, false, true))"
11090 "ldrd%?\t%0, %2, [%1]"
11091 [(set_attr "type" "load_8")
11092 (set_attr "predicable" "yes")])
11093
11094 (define_insn "*thumb2_ldrd_base_neg"
11095 [(set (match_operand:SI 0 "s_register_operand" "=r")
11096 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11097 (const_int -4))))
11098 (set (match_operand:SI 2 "s_register_operand" "=r")
11099 (mem:SI (match_dup 1)))]
11100 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11101 && (operands_ok_ldrd_strd (operands[0], operands[2],
11102 operands[1], -4, false, true))"
11103 "ldrd%?\t%0, %2, [%1, #-4]"
11104 [(set_attr "type" "load_8")
11105 (set_attr "predicable" "yes")])
11106
11107 (define_insn "*thumb2_strd"
11108 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11109 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11110 (match_operand:SI 2 "s_register_operand" "r"))
11111 (set (mem:SI (plus:SI (match_dup 0)
11112 (match_operand:SI 3 "const_int_operand" "")))
11113 (match_operand:SI 4 "s_register_operand" "r"))]
11114 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11115 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11116 && (operands_ok_ldrd_strd (operands[2], operands[4],
11117 operands[0], INTVAL (operands[1]),
11118 false, false))"
11119 "strd%?\t%2, %4, [%0, %1]"
11120 [(set_attr "type" "store_8")
11121 (set_attr "predicable" "yes")])
11122
11123 (define_insn "*thumb2_strd_base"
11124 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11125 (match_operand:SI 1 "s_register_operand" "r"))
11126 (set (mem:SI (plus:SI (match_dup 0)
11127 (const_int 4)))
11128 (match_operand:SI 2 "s_register_operand" "r"))]
11129 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11130 && (operands_ok_ldrd_strd (operands[1], operands[2],
11131 operands[0], 0, false, false))"
11132 "strd%?\t%1, %2, [%0]"
11133 [(set_attr "type" "store_8")
11134 (set_attr "predicable" "yes")])
11135
11136 (define_insn "*thumb2_strd_base_neg"
11137 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11138 (const_int -4)))
11139 (match_operand:SI 1 "s_register_operand" "r"))
11140 (set (mem:SI (match_dup 0))
11141 (match_operand:SI 2 "s_register_operand" "r"))]
11142 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11143 && (operands_ok_ldrd_strd (operands[1], operands[2],
11144 operands[0], -4, false, false))"
11145 "strd%?\t%1, %2, [%0, #-4]"
11146 [(set_attr "type" "store_8")
11147 (set_attr "predicable" "yes")])
11148
11149 ;; ARMv8 CRC32 instructions.
11150 (define_insn "arm_<crc_variant>"
11151 [(set (match_operand:SI 0 "s_register_operand" "=r")
11152 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11153 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11154 CRC))]
11155 "TARGET_CRC32"
11156 "<crc_variant>\\t%0, %1, %2"
11157 [(set_attr "type" "crc")
11158 (set_attr "conds" "unconditional")]
11159 )
11160
11161 ;; Load the load/store double peephole optimizations.
11162 (include "ldrdstrd.md")
11163
11164 ;; Load the load/store multiple patterns
11165 (include "ldmstm.md")
11166
11167 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11168 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11169 ;; The operands are validated through the load_multiple_operation
11170 ;; match_parallel predicate rather than through constraints so enable it only
11171 ;; after reload.
11172 (define_insn "*load_multiple"
11173 [(match_parallel 0 "load_multiple_operation"
11174 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11175 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11176 ])]
11177 "TARGET_32BIT && reload_completed"
11178 "*
11179 {
11180 arm_output_multireg_pop (operands, /*return_pc=*/false,
11181 /*cond=*/const_true_rtx,
11182 /*reverse=*/false,
11183 /*update=*/false);
11184 return \"\";
11185 }
11186 "
11187 [(set_attr "predicable" "yes")]
11188 )
11189
11190 (define_expand "copysignsf3"
11191 [(match_operand:SF 0 "register_operand")
11192 (match_operand:SF 1 "register_operand")
11193 (match_operand:SF 2 "register_operand")]
11194 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11195 "{
11196 emit_move_insn (operands[0], operands[2]);
11197 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11198 GEN_INT (31), GEN_INT (0),
11199 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11200 DONE;
11201 }"
11202 )
11203
11204 (define_expand "copysigndf3"
11205 [(match_operand:DF 0 "register_operand")
11206 (match_operand:DF 1 "register_operand")
11207 (match_operand:DF 2 "register_operand")]
11208 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11209 "{
11210 rtx op0_low = gen_lowpart (SImode, operands[0]);
11211 rtx op0_high = gen_highpart (SImode, operands[0]);
11212 rtx op1_low = gen_lowpart (SImode, operands[1]);
11213 rtx op1_high = gen_highpart (SImode, operands[1]);
11214 rtx op2_high = gen_highpart (SImode, operands[2]);
11215
11216 rtx scratch1 = gen_reg_rtx (SImode);
11217 rtx scratch2 = gen_reg_rtx (SImode);
11218 emit_move_insn (scratch1, op2_high);
11219 emit_move_insn (scratch2, op1_high);
11220
11221 emit_insn(gen_rtx_SET(scratch1,
11222 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11223 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11224 emit_move_insn (op0_low, op1_low);
11225 emit_move_insn (op0_high, scratch2);
11226
11227 DONE;
11228 }"
11229 )
11230
11231 ;; movmisalign patterns for HImode and SImode.
11232 (define_expand "movmisalign<mode>"
11233 [(match_operand:HSI 0 "general_operand")
11234 (match_operand:HSI 1 "general_operand")]
11235 "unaligned_access"
11236 {
11237 /* This pattern is not permitted to fail during expansion: if both arguments
11238 are non-registers (e.g. memory := constant), force operand 1 into a
11239 register. */
11240 rtx (* gen_unaligned_load)(rtx, rtx);
11241 rtx tmp_dest = operands[0];
11242 if (!s_register_operand (operands[0], <MODE>mode)
11243 && !s_register_operand (operands[1], <MODE>mode))
11244 operands[1] = force_reg (<MODE>mode, operands[1]);
11245
11246 if (<MODE>mode == HImode)
11247 {
11248 gen_unaligned_load = gen_unaligned_loadhiu;
11249 tmp_dest = gen_reg_rtx (SImode);
11250 }
11251 else
11252 gen_unaligned_load = gen_unaligned_loadsi;
11253
11254 if (MEM_P (operands[1]))
11255 {
11256 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11257 if (<MODE>mode == HImode)
11258 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11259 }
11260 else
11261 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11262
11263 DONE;
11264 })
11265
11266 (define_insn "arm_<cdp>"
11267 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11268 (match_operand:SI 1 "immediate_operand" "n")
11269 (match_operand:SI 2 "immediate_operand" "n")
11270 (match_operand:SI 3 "immediate_operand" "n")
11271 (match_operand:SI 4 "immediate_operand" "n")
11272 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11273 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11274 {
11275 arm_const_bounds (operands[0], 0, 16);
11276 arm_const_bounds (operands[1], 0, 16);
11277 arm_const_bounds (operands[2], 0, (1 << 5));
11278 arm_const_bounds (operands[3], 0, (1 << 5));
11279 arm_const_bounds (operands[4], 0, (1 << 5));
11280 arm_const_bounds (operands[5], 0, 8);
11281 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11282 }
11283 [(set_attr "length" "4")
11284 (set_attr "type" "coproc")])
11285
11286 (define_insn "*ldc"
11287 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11288 (match_operand:SI 1 "immediate_operand" "n")
11289 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11290 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11291 {
11292 arm_const_bounds (operands[0], 0, 16);
11293 arm_const_bounds (operands[1], 0, (1 << 5));
11294 return "<ldc>\\tp%c0, CR%c1, %2";
11295 }
11296 [(set_attr "length" "4")
11297 (set_attr "type" "coproc")])
11298
11299 (define_insn "*stc"
11300 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11301 (match_operand:SI 1 "immediate_operand" "n")
11302 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11303 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11304 {
11305 arm_const_bounds (operands[0], 0, 16);
11306 arm_const_bounds (operands[1], 0, (1 << 5));
11307 return "<stc>\\tp%c0, CR%c1, %2";
11308 }
11309 [(set_attr "length" "4")
11310 (set_attr "type" "coproc")])
11311
11312 (define_expand "arm_<ldc>"
11313 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11314 (match_operand:SI 1 "immediate_operand")
11315 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11316 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11317
11318 (define_expand "arm_<stc>"
11319 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11320 (match_operand:SI 1 "immediate_operand")
11321 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11322 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11323
11324 (define_insn "arm_<mcr>"
11325 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11326 (match_operand:SI 1 "immediate_operand" "n")
11327 (match_operand:SI 2 "s_register_operand" "r")
11328 (match_operand:SI 3 "immediate_operand" "n")
11329 (match_operand:SI 4 "immediate_operand" "n")
11330 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11331 (use (match_dup 2))]
11332 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11333 {
11334 arm_const_bounds (operands[0], 0, 16);
11335 arm_const_bounds (operands[1], 0, 8);
11336 arm_const_bounds (operands[3], 0, (1 << 5));
11337 arm_const_bounds (operands[4], 0, (1 << 5));
11338 arm_const_bounds (operands[5], 0, 8);
11339 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11340 }
11341 [(set_attr "length" "4")
11342 (set_attr "type" "coproc")])
11343
11344 (define_insn "arm_<mrc>"
11345 [(set (match_operand:SI 0 "s_register_operand" "=r")
11346 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11347 (match_operand:SI 2 "immediate_operand" "n")
11348 (match_operand:SI 3 "immediate_operand" "n")
11349 (match_operand:SI 4 "immediate_operand" "n")
11350 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11351 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11352 {
11353 arm_const_bounds (operands[1], 0, 16);
11354 arm_const_bounds (operands[2], 0, 8);
11355 arm_const_bounds (operands[3], 0, (1 << 5));
11356 arm_const_bounds (operands[4], 0, (1 << 5));
11357 arm_const_bounds (operands[5], 0, 8);
11358 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11359 }
11360 [(set_attr "length" "4")
11361 (set_attr "type" "coproc")])
11362
11363 (define_insn "arm_<mcrr>"
11364 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11365 (match_operand:SI 1 "immediate_operand" "n")
11366 (match_operand:DI 2 "s_register_operand" "r")
11367 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11368 (use (match_dup 2))]
11369 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11370 {
11371 arm_const_bounds (operands[0], 0, 16);
11372 arm_const_bounds (operands[1], 0, 8);
11373 arm_const_bounds (operands[3], 0, (1 << 5));
11374 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11375 }
11376 [(set_attr "length" "4")
11377 (set_attr "type" "coproc")])
11378
11379 (define_insn "arm_<mrrc>"
11380 [(set (match_operand:DI 0 "s_register_operand" "=r")
11381 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11382 (match_operand:SI 2 "immediate_operand" "n")
11383 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11384 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11385 {
11386 arm_const_bounds (operands[1], 0, 16);
11387 arm_const_bounds (operands[2], 0, 8);
11388 arm_const_bounds (operands[3], 0, (1 << 5));
11389 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11390 }
11391 [(set_attr "length" "4")
11392 (set_attr "type" "coproc")])
11393
11394 (define_expand "speculation_barrier"
11395 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11396 "TARGET_EITHER"
11397 "
11398 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11399 have a usable barrier (and probably don't need one in practice).
11400 But to be safe if such code is run on later architectures, call a
11401 helper function in libgcc that will do the thing for the active
11402 system. */
11403 if (!(arm_arch7 || arm_arch8))
11404 {
11405 arm_emit_speculation_barrier_function ();
11406 DONE;
11407 }
11408 "
11409 )
11410
11411 ;; Generate a hard speculation barrier when we have not enabled speculation
11412 ;; tracking.
11413 (define_insn "*speculation_barrier_insn"
11414 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11415 "arm_arch7 || arm_arch8"
11416 "isb\;dsb\\tsy"
11417 [(set_attr "type" "block")
11418 (set_attr "length" "8")]
11419 )
11420
11421 ;; Vector bits common to IWMMXT and Neon
11422 (include "vec-common.md")
11423 ;; Load the Intel Wireless Multimedia Extension patterns
11424 (include "iwmmxt.md")
11425 ;; Load the VFP co-processor patterns
11426 (include "vfp.md")
11427 ;; Thumb-1 patterns
11428 (include "thumb1.md")
11429 ;; Thumb-2 patterns
11430 (include "thumb2.md")
11431 ;; Neon patterns
11432 (include "neon.md")
11433 ;; Crypto patterns
11434 (include "crypto.md")
11435 ;; Synchronization Primitives
11436 (include "sync.md")
11437 ;; Fixed-point patterns
11438 (include "arm-fixed.md")