]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
8607c6f95da9e965b192e8c5fb4987967ec135a5
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51 ;; conditional compare combination
52 (define_constants
53 [(CMP_CMP 0)
54 (CMN_CMP 1)
55 (CMP_CMN 2)
56 (CMN_CMN 3)
57 (NUM_OF_COND_CMP 4)
58 ]
59 )
60
61 \f
62 ;;---------------------------------------------------------------------------
63 ;; Attributes
64
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
67
68 ;; Instruction classification types
69 (include "types.md")
70
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
77
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
85
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
101
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
104 ;; registers.
105 (define_attr "fp" "no,yes" (const_string "no"))
106
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
112
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
117
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
120 (const_int 4))
121
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
131
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
134 (const_string "yes")
135
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
138 (const_string "yes")
139
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
142 (const_string "yes")
143
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
182 (const_string "yes")
183 ]
184
185 (const_string "no")))
186
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
189
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
192 (const_string "yes")
193
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
196 (const_string "yes")
197
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
202
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
208
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
225 (const_string "no")
226
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
229 (const_string "no")
230
231 (eq_attr "arch_enabled" "no")
232 (const_string "no")]
233 (const_string "yes")))
234
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
254
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
262
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
270 ;
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
273 ; inlined branches
274 ;
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
277 ;
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
280 ;
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
283 ;
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
286
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (if_then_else
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
295
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
301
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
312 "block,call,load_4")
313 (const_string "yes")
314 (const_string "no")))
315
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
338
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
342
343
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
347
348 ;;---------------------------------------------------------------------------
349 ;; Unspecs
350
351 (include "unspecs.md")
352
353 ;;---------------------------------------------------------------------------
354 ;; Mode iterators
355
356 (include "iterators.md")
357
358 ;;---------------------------------------------------------------------------
359 ;; Predicates
360
361 (include "predicates.md")
362 (include "constraints.md")
363
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
366
367 (define_attr "tune_cortexr4" "yes,no"
368 (const (if_then_else
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "yes")
371 (const_string "no"))))
372
373 ;; True if the generic scheduling description should be used.
374
375 (define_attr "generic_sched" "yes,no"
376 (const (if_then_else
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "no")
385 (const_string "yes"))))
386
387 (define_attr "generic_vfp" "yes,no"
388 (const (if_then_else
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
394 (const_string "yes")
395 (const_string "no"))))
396
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
403 (include "fa526.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
422 (include "vfp11.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
425
426 \f
427 ;;---------------------------------------------------------------------------
428 ;; Insn patterns
429 ;;
430 ;; Addition insns.
431
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
435
436 (define_expand "adddi3"
437 [(parallel
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
442 "TARGET_EITHER"
443 "
444 if (TARGET_THUMB1)
445 {
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
448 }
449 else
450 {
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
454 &lo_op2, &hi_op2);
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
457
458 if (lo_op2 == const0_rtx)
459 {
460 lo_dest = lo_op1;
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
465 }
466 else
467 {
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
472
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
475 const0_rtx);
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
478 else
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
480 }
481
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
486 DONE;
487 }
488 "
489 )
490
491 (define_expand "addv<mode>4"
492 [(match_operand:SIDI 0 "register_operand")
493 (match_operand:SIDI 1 "register_operand")
494 (match_operand:SIDI 2 "register_operand")
495 (match_operand 3 "")]
496 "TARGET_32BIT"
497 {
498 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
499 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
500
501 DONE;
502 })
503
504 (define_expand "uaddv<mode>4"
505 [(match_operand:SIDI 0 "register_operand")
506 (match_operand:SIDI 1 "register_operand")
507 (match_operand:SIDI 2 "register_operand")
508 (match_operand 3 "")]
509 "TARGET_32BIT"
510 {
511 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
512 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
513
514 DONE;
515 })
516
517 (define_expand "addsi3"
518 [(set (match_operand:SI 0 "s_register_operand")
519 (plus:SI (match_operand:SI 1 "s_register_operand")
520 (match_operand:SI 2 "reg_or_int_operand")))]
521 "TARGET_EITHER"
522 "
523 if (TARGET_32BIT && CONST_INT_P (operands[2]))
524 {
525 arm_split_constant (PLUS, SImode, NULL_RTX,
526 INTVAL (operands[2]), operands[0], operands[1],
527 optimize && can_create_pseudo_p ());
528 DONE;
529 }
530 "
531 )
532
533 ; If there is a scratch available, this will be faster than synthesizing the
534 ; addition.
535 (define_peephole2
536 [(match_scratch:SI 3 "r")
537 (set (match_operand:SI 0 "arm_general_register_operand" "")
538 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
539 (match_operand:SI 2 "const_int_operand" "")))]
540 "TARGET_32BIT &&
541 !(const_ok_for_arm (INTVAL (operands[2]))
542 || const_ok_for_arm (-INTVAL (operands[2])))
543 && const_ok_for_arm (~INTVAL (operands[2]))"
544 [(set (match_dup 3) (match_dup 2))
545 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
546 ""
547 )
548
549 ;; The r/r/k alternative is required when reloading the address
550 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
551 ;; put the duplicated register first, and not try the commutative version.
552 (define_insn_and_split "*arm_addsi3"
553 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
554 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
555 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
556 "TARGET_32BIT"
557 "@
558 add%?\\t%0, %0, %2
559 add%?\\t%0, %1, %2
560 add%?\\t%0, %1, %2
561 add%?\\t%0, %1, %2
562 add%?\\t%0, %1, %2
563 add%?\\t%0, %1, %2
564 add%?\\t%0, %2, %1
565 add%?\\t%0, %1, %2
566 addw%?\\t%0, %1, %2
567 addw%?\\t%0, %1, %2
568 sub%?\\t%0, %1, #%n2
569 sub%?\\t%0, %1, #%n2
570 sub%?\\t%0, %1, #%n2
571 subw%?\\t%0, %1, #%n2
572 subw%?\\t%0, %1, #%n2
573 #"
574 "TARGET_32BIT
575 && CONST_INT_P (operands[2])
576 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
577 && (reload_completed || !arm_eliminable_register (operands[1]))"
578 [(clobber (const_int 0))]
579 "
580 arm_split_constant (PLUS, SImode, curr_insn,
581 INTVAL (operands[2]), operands[0],
582 operands[1], 0);
583 DONE;
584 "
585 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
586 (set_attr "predicable" "yes")
587 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
588 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
589 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
590 (const_string "alu_imm")
591 (const_string "alu_sreg")))
592 ]
593 )
594
595 (define_insn "adddi3_compareV"
596 [(set (reg:CC_V CC_REGNUM)
597 (ne:CC_V
598 (plus:TI
599 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
600 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
601 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
602 (set (match_operand:DI 0 "s_register_operand" "=&r")
603 (plus:DI (match_dup 1) (match_dup 2)))]
604 "TARGET_32BIT"
605 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
606 [(set_attr "conds" "set")
607 (set_attr "length" "8")
608 (set_attr "type" "multiple")]
609 )
610
611 (define_insn "addsi3_compareV"
612 [(set (reg:CC_V CC_REGNUM)
613 (ne:CC_V
614 (plus:DI
615 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
616 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
617 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
618 (set (match_operand:SI 0 "register_operand" "=r")
619 (plus:SI (match_dup 1) (match_dup 2)))]
620 "TARGET_32BIT"
621 "adds%?\\t%0, %1, %2"
622 [(set_attr "conds" "set")
623 (set_attr "type" "alus_sreg")]
624 )
625
626 (define_insn "adddi3_compareC"
627 [(set (reg:CC_C CC_REGNUM)
628 (compare:CC_C
629 (plus:DI
630 (match_operand:DI 1 "register_operand" "r")
631 (match_operand:DI 2 "register_operand" "r"))
632 (match_dup 1)))
633 (set (match_operand:DI 0 "register_operand" "=&r")
634 (plus:DI (match_dup 1) (match_dup 2)))]
635 "TARGET_32BIT"
636 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
637 [(set_attr "conds" "set")
638 (set_attr "length" "8")
639 (set_attr "type" "multiple")]
640 )
641
642 (define_insn "addsi3_compareC"
643 [(set (reg:CC_C CC_REGNUM)
644 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
645 (match_operand:SI 2 "register_operand" "r"))
646 (match_dup 1)))
647 (set (match_operand:SI 0 "register_operand" "=r")
648 (plus:SI (match_dup 1) (match_dup 2)))]
649 "TARGET_32BIT"
650 "adds%?\\t%0, %1, %2"
651 [(set_attr "conds" "set")
652 (set_attr "type" "alus_sreg")]
653 )
654
655 (define_insn "addsi3_compare0"
656 [(set (reg:CC_NOOV CC_REGNUM)
657 (compare:CC_NOOV
658 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
659 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
660 (const_int 0)))
661 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
662 (plus:SI (match_dup 1) (match_dup 2)))]
663 "TARGET_ARM"
664 "@
665 adds%?\\t%0, %1, %2
666 subs%?\\t%0, %1, #%n2
667 adds%?\\t%0, %1, %2"
668 [(set_attr "conds" "set")
669 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
670 )
671
672 (define_insn "*addsi3_compare0_scratch"
673 [(set (reg:CC_NOOV CC_REGNUM)
674 (compare:CC_NOOV
675 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
676 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
677 (const_int 0)))]
678 "TARGET_ARM"
679 "@
680 cmn%?\\t%0, %1
681 cmp%?\\t%0, #%n1
682 cmn%?\\t%0, %1"
683 [(set_attr "conds" "set")
684 (set_attr "predicable" "yes")
685 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
686 )
687
688 (define_insn "*compare_negsi_si"
689 [(set (reg:CC_Z CC_REGNUM)
690 (compare:CC_Z
691 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
692 (match_operand:SI 1 "s_register_operand" "l,r")))]
693 "TARGET_32BIT"
694 "cmn%?\\t%1, %0"
695 [(set_attr "conds" "set")
696 (set_attr "predicable" "yes")
697 (set_attr "arch" "t2,*")
698 (set_attr "length" "2,4")
699 (set_attr "predicable_short_it" "yes,no")
700 (set_attr "type" "alus_sreg")]
701 )
702
703 ;; This is the canonicalization of subsi3_compare when the
704 ;; addend is a constant.
705 (define_insn "cmpsi2_addneg"
706 [(set (reg:CC CC_REGNUM)
707 (compare:CC
708 (match_operand:SI 1 "s_register_operand" "r,r")
709 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
710 (set (match_operand:SI 0 "s_register_operand" "=r,r")
711 (plus:SI (match_dup 1)
712 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
713 "TARGET_32BIT
714 && (INTVAL (operands[2])
715 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
716 {
717 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
718 in different condition codes (like cmn rather than like cmp), so that
719 alternative comes first. Both alternatives can match for any 0x??000000
720 where except for 0 and INT_MIN it doesn't matter what we choose, and also
721 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
722 as it is shorter. */
723 if (which_alternative == 0 && operands[3] != const1_rtx)
724 return "subs%?\\t%0, %1, #%n3";
725 else
726 return "adds%?\\t%0, %1, %3";
727 }
728 [(set_attr "conds" "set")
729 (set_attr "type" "alus_sreg")]
730 )
731
732 ;; Convert the sequence
733 ;; sub rd, rn, #1
734 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
735 ;; bne dest
736 ;; into
737 ;; subs rd, rn, #1
738 ;; bcs dest ((unsigned)rn >= 1)
739 ;; similarly for the beq variant using bcc.
740 ;; This is a common looping idiom (while (n--))
741 (define_peephole2
742 [(set (match_operand:SI 0 "arm_general_register_operand" "")
743 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
744 (const_int -1)))
745 (set (match_operand 2 "cc_register" "")
746 (compare (match_dup 0) (const_int -1)))
747 (set (pc)
748 (if_then_else (match_operator 3 "equality_operator"
749 [(match_dup 2) (const_int 0)])
750 (match_operand 4 "" "")
751 (match_operand 5 "" "")))]
752 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
753 [(parallel[
754 (set (match_dup 2)
755 (compare:CC
756 (match_dup 1) (const_int 1)))
757 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
758 (set (pc)
759 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
760 (match_dup 4)
761 (match_dup 5)))]
762 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
763 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
764 ? GEU : LTU),
765 VOIDmode,
766 operands[2], const0_rtx);"
767 )
768
769 ;; The next four insns work because they compare the result with one of
770 ;; the operands, and we know that the use of the condition code is
771 ;; either GEU or LTU, so we can use the carry flag from the addition
772 ;; instead of doing the compare a second time.
773 (define_insn "*addsi3_compare_op1"
774 [(set (reg:CC_C CC_REGNUM)
775 (compare:CC_C
776 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
777 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
778 (match_dup 1)))
779 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
780 (plus:SI (match_dup 1) (match_dup 2)))]
781 "TARGET_32BIT"
782 "@
783 adds%?\\t%0, %1, %2
784 adds%?\\t%0, %0, %2
785 subs%?\\t%0, %1, #%n2
786 subs%?\\t%0, %0, #%n2
787 adds%?\\t%0, %1, %2
788 subs%?\\t%0, %1, #%n2
789 adds%?\\t%0, %1, %2"
790 [(set_attr "conds" "set")
791 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
792 (set_attr "length" "2,2,2,2,4,4,4")
793 (set_attr "type"
794 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
795 )
796
797 (define_insn "*addsi3_compare_op2"
798 [(set (reg:CC_C CC_REGNUM)
799 (compare:CC_C
800 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
801 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
802 (match_dup 2)))
803 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
804 (plus:SI (match_dup 1) (match_dup 2)))]
805 "TARGET_32BIT"
806 "@
807 adds%?\\t%0, %1, %2
808 adds%?\\t%0, %0, %2
809 subs%?\\t%0, %1, #%n2
810 subs%?\\t%0, %0, #%n2
811 adds%?\\t%0, %1, %2
812 subs%?\\t%0, %1, #%n2
813 adds%?\\t%0, %1, %2"
814 [(set_attr "conds" "set")
815 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
816 (set_attr "length" "2,2,2,2,4,4,4")
817 (set_attr "type"
818 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
819 )
820
821 (define_insn "*compare_addsi2_op0"
822 [(set (reg:CC_C CC_REGNUM)
823 (compare:CC_C
824 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
825 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
826 (match_dup 0)))]
827 "TARGET_32BIT"
828 "@
829 cmp%?\\t%0, #%n1
830 cmn%?\\t%0, %1
831 cmn%?\\t%0, %1
832 cmp%?\\t%0, #%n1
833 cmn%?\\t%0, %1"
834 [(set_attr "conds" "set")
835 (set_attr "predicable" "yes")
836 (set_attr "arch" "t2,t2,*,*,*")
837 (set_attr "predicable_short_it" "yes,yes,no,no,no")
838 (set_attr "length" "2,2,4,4,4")
839 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
840 )
841
842 (define_insn "*compare_addsi2_op1"
843 [(set (reg:CC_C CC_REGNUM)
844 (compare:CC_C
845 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
846 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
847 (match_dup 1)))]
848 "TARGET_32BIT"
849 "@
850 cmp%?\\t%0, #%n1
851 cmn%?\\t%0, %1
852 cmn%?\\t%0, %1
853 cmp%?\\t%0, #%n1
854 cmn%?\\t%0, %1"
855 [(set_attr "conds" "set")
856 (set_attr "predicable" "yes")
857 (set_attr "arch" "t2,t2,*,*,*")
858 (set_attr "predicable_short_it" "yes,yes,no,no,no")
859 (set_attr "length" "2,2,4,4,4")
860 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
861 )
862
863 (define_insn "addsi3_carryin"
864 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
865 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
866 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
867 (match_operand:SI 3 "arm_carry_operation" "")))]
868 "TARGET_32BIT"
869 "@
870 adc%?\\t%0, %1, %2
871 adc%?\\t%0, %1, %2
872 sbc%?\\t%0, %1, #%B2"
873 [(set_attr "conds" "use")
874 (set_attr "predicable" "yes")
875 (set_attr "arch" "t2,*,*")
876 (set_attr "length" "4")
877 (set_attr "predicable_short_it" "yes,no,no")
878 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
879 )
880
881 ;; Canonicalization of the above when the immediate is zero.
882 (define_insn "add0si3_carryin"
883 [(set (match_operand:SI 0 "s_register_operand" "=r")
884 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
885 (match_operand:SI 1 "arm_not_operand" "r")))]
886 "TARGET_32BIT"
887 "adc%?\\t%0, %1, #0"
888 [(set_attr "conds" "use")
889 (set_attr "predicable" "yes")
890 (set_attr "length" "4")
891 (set_attr "type" "adc_imm")]
892 )
893
894 (define_insn "*addsi3_carryin_alt2"
895 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
896 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
897 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
898 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
899 "TARGET_32BIT"
900 "@
901 adc%?\\t%0, %1, %2
902 adc%?\\t%0, %1, %2
903 sbc%?\\t%0, %1, #%B2"
904 [(set_attr "conds" "use")
905 (set_attr "predicable" "yes")
906 (set_attr "arch" "t2,*,*")
907 (set_attr "length" "4")
908 (set_attr "predicable_short_it" "yes,no,no")
909 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
910 )
911
912 (define_insn "*addsi3_carryin_shift"
913 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
914 (plus:SI (plus:SI
915 (match_operator:SI 2 "shift_operator"
916 [(match_operand:SI 3 "s_register_operand" "r,r")
917 (match_operand:SI 4 "shift_amount_operand" "M,r")])
918 (match_operand:SI 5 "arm_carry_operation" ""))
919 (match_operand:SI 1 "s_register_operand" "r,r")))]
920 "TARGET_32BIT"
921 "adc%?\\t%0, %1, %3%S2"
922 [(set_attr "conds" "use")
923 (set_attr "arch" "32,a")
924 (set_attr "shift" "3")
925 (set_attr "predicable" "yes")
926 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
927 (const_string "alu_shift_imm")
928 (const_string "alu_shift_reg")))]
929 )
930
931 (define_insn "*addsi3_carryin_clobercc"
932 [(set (match_operand:SI 0 "s_register_operand" "=r")
933 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
934 (match_operand:SI 2 "arm_rhs_operand" "rI"))
935 (match_operand:SI 3 "arm_carry_operation" "")))
936 (clobber (reg:CC CC_REGNUM))]
937 "TARGET_32BIT"
938 "adcs%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "type" "adcs_reg")]
941 )
942
943 (define_expand "subv<mode>4"
944 [(match_operand:SIDI 0 "register_operand")
945 (match_operand:SIDI 1 "register_operand")
946 (match_operand:SIDI 2 "register_operand")
947 (match_operand 3 "")]
948 "TARGET_32BIT"
949 {
950 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
951 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
952
953 DONE;
954 })
955
956 (define_expand "usubv<mode>4"
957 [(match_operand:SIDI 0 "register_operand")
958 (match_operand:SIDI 1 "register_operand")
959 (match_operand:SIDI 2 "register_operand")
960 (match_operand 3 "")]
961 "TARGET_32BIT"
962 {
963 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
964 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
965
966 DONE;
967 })
968
969 (define_insn "subdi3_compare1"
970 [(set (reg:CC CC_REGNUM)
971 (compare:CC
972 (match_operand:DI 1 "s_register_operand" "r")
973 (match_operand:DI 2 "s_register_operand" "r")))
974 (set (match_operand:DI 0 "s_register_operand" "=&r")
975 (minus:DI (match_dup 1) (match_dup 2)))]
976 "TARGET_32BIT"
977 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
978 [(set_attr "conds" "set")
979 (set_attr "length" "8")
980 (set_attr "type" "multiple")]
981 )
982
983 (define_insn "subsi3_compare1"
984 [(set (reg:CC CC_REGNUM)
985 (compare:CC
986 (match_operand:SI 1 "register_operand" "r")
987 (match_operand:SI 2 "register_operand" "r")))
988 (set (match_operand:SI 0 "register_operand" "=r")
989 (minus:SI (match_dup 1) (match_dup 2)))]
990 "TARGET_32BIT"
991 "subs%?\\t%0, %1, %2"
992 [(set_attr "conds" "set")
993 (set_attr "type" "alus_sreg")]
994 )
995
996 (define_insn "subsi3_carryin"
997 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
998 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
999 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1000 (match_operand:SI 3 "arm_borrow_operation" "")))]
1001 "TARGET_32BIT"
1002 "@
1003 sbc%?\\t%0, %1, %2
1004 rsc%?\\t%0, %2, %1
1005 sbc%?\\t%0, %2, %2, lsl #1"
1006 [(set_attr "conds" "use")
1007 (set_attr "arch" "*,a,t2")
1008 (set_attr "predicable" "yes")
1009 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1010 )
1011
1012 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1013 [(set (reg:<CC_EXTEND> CC_REGNUM)
1014 (compare:<CC_EXTEND>
1015 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1016 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1017 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1018 (clobber (match_scratch:SI 0 "=l,r"))]
1019 "TARGET_32BIT"
1020 "sbcs\\t%0, %1, %2"
1021 [(set_attr "conds" "set")
1022 (set_attr "arch" "t2,*")
1023 (set_attr "length" "2,4")
1024 (set_attr "type" "adc_reg")]
1025 )
1026
1027 ;; Similar to the above, but handling a constant which has a different
1028 ;; canonicalization.
1029 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1030 [(set (reg:<CC_EXTEND> CC_REGNUM)
1031 (compare:<CC_EXTEND>
1032 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1033 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1034 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1035 (clobber (match_scratch:SI 0 "=l,r"))]
1036 "TARGET_32BIT"
1037 "@
1038 sbcs\\t%0, %1, %2
1039 adcs\\t%0, %1, #%B2"
1040 [(set_attr "conds" "set")
1041 (set_attr "type" "adc_imm")]
1042 )
1043
1044 ;; Further canonicalization when the constant is zero.
1045 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1046 [(set (reg:<CC_EXTEND> CC_REGNUM)
1047 (compare:<CC_EXTEND>
1048 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1049 (match_operand:DI 2 "arm_borrow_operation" "")))
1050 (clobber (match_scratch:SI 0 "=l,r"))]
1051 "TARGET_32BIT"
1052 "sbcs\\t%0, %1, #0"
1053 [(set_attr "conds" "set")
1054 (set_attr "type" "adc_imm")]
1055 )
1056
1057 (define_insn "*subsi3_carryin_const"
1058 [(set (match_operand:SI 0 "s_register_operand" "=r")
1059 (minus:SI (plus:SI
1060 (match_operand:SI 1 "s_register_operand" "r")
1061 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1062 (match_operand:SI 3 "arm_borrow_operation" "")))]
1063 "TARGET_32BIT"
1064 "sbc\\t%0, %1, #%n2"
1065 [(set_attr "conds" "use")
1066 (set_attr "type" "adc_imm")]
1067 )
1068
1069 (define_insn "*subsi3_carryin_const0"
1070 [(set (match_operand:SI 0 "s_register_operand" "=r")
1071 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1072 (match_operand:SI 2 "arm_borrow_operation" "")))]
1073 "TARGET_32BIT"
1074 "sbc\\t%0, %1, #0"
1075 [(set_attr "conds" "use")
1076 (set_attr "type" "adc_imm")]
1077 )
1078
1079 (define_insn "*subsi3_carryin_shift"
1080 [(set (match_operand:SI 0 "s_register_operand" "=r")
1081 (minus:SI (minus:SI
1082 (match_operand:SI 1 "s_register_operand" "r")
1083 (match_operator:SI 2 "shift_operator"
1084 [(match_operand:SI 3 "s_register_operand" "r")
1085 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1086 (match_operand:SI 5 "arm_borrow_operation" "")))]
1087 "TARGET_32BIT"
1088 "sbc%?\\t%0, %1, %3%S2"
1089 [(set_attr "conds" "use")
1090 (set_attr "predicable" "yes")
1091 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1092 (const_string "alu_shift_imm")
1093 (const_string "alu_shift_reg")))]
1094 )
1095
1096 (define_insn "*subsi3_carryin_shift_alt"
1097 [(set (match_operand:SI 0 "s_register_operand" "=r")
1098 (minus:SI (minus:SI
1099 (match_operand:SI 1 "s_register_operand" "r")
1100 (match_operand:SI 5 "arm_borrow_operation" ""))
1101 (match_operator:SI 2 "shift_operator"
1102 [(match_operand:SI 3 "s_register_operand" "r")
1103 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1104 "TARGET_32BIT"
1105 "sbc%?\\t%0, %1, %3%S2"
1106 [(set_attr "conds" "use")
1107 (set_attr "predicable" "yes")
1108 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1109 (const_string "alu_shift_imm")
1110 (const_string "alu_shift_reg")))]
1111 )
1112
1113 (define_insn "*rsbsi3_carryin_shift"
1114 [(set (match_operand:SI 0 "s_register_operand" "=r")
1115 (minus:SI (minus:SI
1116 (match_operator:SI 2 "shift_operator"
1117 [(match_operand:SI 3 "s_register_operand" "r")
1118 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1119 (match_operand:SI 1 "s_register_operand" "r"))
1120 (match_operand:SI 5 "arm_borrow_operation" "")))]
1121 "TARGET_ARM"
1122 "rsc%?\\t%0, %1, %3%S2"
1123 [(set_attr "conds" "use")
1124 (set_attr "predicable" "yes")
1125 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1126 (const_string "alu_shift_imm")
1127 (const_string "alu_shift_reg")))]
1128 )
1129
1130 (define_insn "*rsbsi3_carryin_shift_alt"
1131 [(set (match_operand:SI 0 "s_register_operand" "=r")
1132 (minus:SI (minus:SI
1133 (match_operator:SI 2 "shift_operator"
1134 [(match_operand:SI 3 "s_register_operand" "r")
1135 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1136 (match_operand:SI 5 "arm_borrow_operation" ""))
1137 (match_operand:SI 1 "s_register_operand" "r")))]
1138 "TARGET_ARM"
1139 "rsc%?\\t%0, %1, %3%S2"
1140 [(set_attr "conds" "use")
1141 (set_attr "predicable" "yes")
1142 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1143 (const_string "alu_shift_imm")
1144 (const_string "alu_shift_reg")))]
1145 )
1146
1147 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1148 (define_split
1149 [(set (match_operand:SI 0 "s_register_operand" "")
1150 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1151 (match_operand:SI 2 "s_register_operand" ""))
1152 (const_int -1)))
1153 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1154 "TARGET_32BIT"
1155 [(set (match_dup 3) (match_dup 1))
1156 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1157 "
1158 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1159 ")
1160
1161 (define_expand "addsf3"
1162 [(set (match_operand:SF 0 "s_register_operand")
1163 (plus:SF (match_operand:SF 1 "s_register_operand")
1164 (match_operand:SF 2 "s_register_operand")))]
1165 "TARGET_32BIT && TARGET_HARD_FLOAT"
1166 "
1167 ")
1168
1169 (define_expand "adddf3"
1170 [(set (match_operand:DF 0 "s_register_operand")
1171 (plus:DF (match_operand:DF 1 "s_register_operand")
1172 (match_operand:DF 2 "s_register_operand")))]
1173 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1174 "
1175 ")
1176
1177 (define_expand "subdi3"
1178 [(parallel
1179 [(set (match_operand:DI 0 "s_register_operand")
1180 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1181 (match_operand:DI 2 "s_register_operand")))
1182 (clobber (reg:CC CC_REGNUM))])]
1183 "TARGET_EITHER"
1184 "
1185 if (TARGET_THUMB1)
1186 {
1187 if (!REG_P (operands[1]))
1188 operands[1] = force_reg (DImode, operands[1]);
1189 }
1190 else
1191 {
1192 rtx lo_result, hi_result, lo_dest, hi_dest;
1193 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1194 rtx condition;
1195
1196 /* Since operands[1] may be an integer, pass it second, so that
1197 any necessary simplifications will be done on the decomposed
1198 constant. */
1199 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1200 &lo_op1, &hi_op1);
1201 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1202 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1203
1204 if (!arm_rhs_operand (lo_op1, SImode))
1205 lo_op1 = force_reg (SImode, lo_op1);
1206
1207 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1208 || !arm_rhs_operand (hi_op1, SImode))
1209 hi_op1 = force_reg (SImode, hi_op1);
1210
1211 rtx cc_reg;
1212 if (lo_op1 == const0_rtx)
1213 {
1214 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1215 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1216 }
1217 else if (CONST_INT_P (lo_op1))
1218 {
1219 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1220 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1221 GEN_INT (~UINTVAL (lo_op1))));
1222 }
1223 else
1224 {
1225 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1226 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1227 }
1228
1229 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1230
1231 if (hi_op1 == const0_rtx)
1232 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1233 else
1234 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1235
1236 if (lo_result != lo_dest)
1237 emit_move_insn (lo_result, lo_dest);
1238
1239 if (hi_result != hi_dest)
1240 emit_move_insn (hi_result, hi_dest);
1241
1242 DONE;
1243 }
1244 "
1245 )
1246
1247 (define_expand "subsi3"
1248 [(set (match_operand:SI 0 "s_register_operand")
1249 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1250 (match_operand:SI 2 "s_register_operand")))]
1251 "TARGET_EITHER"
1252 "
1253 if (CONST_INT_P (operands[1]))
1254 {
1255 if (TARGET_32BIT)
1256 {
1257 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1258 operands[1] = force_reg (SImode, operands[1]);
1259 else
1260 {
1261 arm_split_constant (MINUS, SImode, NULL_RTX,
1262 INTVAL (operands[1]), operands[0],
1263 operands[2],
1264 optimize && can_create_pseudo_p ());
1265 DONE;
1266 }
1267 }
1268 else /* TARGET_THUMB1 */
1269 operands[1] = force_reg (SImode, operands[1]);
1270 }
1271 "
1272 )
1273
1274 ; ??? Check Thumb-2 split length
1275 (define_insn_and_split "*arm_subsi3_insn"
1276 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1277 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1278 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1279 "TARGET_32BIT"
1280 "@
1281 sub%?\\t%0, %1, %2
1282 sub%?\\t%0, %2
1283 sub%?\\t%0, %1, %2
1284 rsb%?\\t%0, %2, %1
1285 rsb%?\\t%0, %2, %1
1286 sub%?\\t%0, %1, %2
1287 sub%?\\t%0, %1, %2
1288 sub%?\\t%0, %1, %2
1289 #"
1290 "&& (CONST_INT_P (operands[1])
1291 && !const_ok_for_arm (INTVAL (operands[1])))"
1292 [(clobber (const_int 0))]
1293 "
1294 arm_split_constant (MINUS, SImode, curr_insn,
1295 INTVAL (operands[1]), operands[0], operands[2], 0);
1296 DONE;
1297 "
1298 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1299 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1300 (set_attr "predicable" "yes")
1301 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1302 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1303 )
1304
1305 (define_peephole2
1306 [(match_scratch:SI 3 "r")
1307 (set (match_operand:SI 0 "arm_general_register_operand" "")
1308 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1309 (match_operand:SI 2 "arm_general_register_operand" "")))]
1310 "TARGET_32BIT
1311 && !const_ok_for_arm (INTVAL (operands[1]))
1312 && const_ok_for_arm (~INTVAL (operands[1]))"
1313 [(set (match_dup 3) (match_dup 1))
1314 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1315 ""
1316 )
1317
1318 (define_insn "subsi3_compare0"
1319 [(set (reg:CC_NOOV CC_REGNUM)
1320 (compare:CC_NOOV
1321 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1322 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1323 (const_int 0)))
1324 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1325 (minus:SI (match_dup 1) (match_dup 2)))]
1326 "TARGET_32BIT"
1327 "@
1328 subs%?\\t%0, %1, %2
1329 subs%?\\t%0, %1, %2
1330 rsbs%?\\t%0, %2, %1"
1331 [(set_attr "conds" "set")
1332 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1333 )
1334
1335 (define_insn "subsi3_compare"
1336 [(set (reg:CC CC_REGNUM)
1337 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1338 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1339 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1340 (minus:SI (match_dup 1) (match_dup 2)))]
1341 "TARGET_32BIT"
1342 "@
1343 subs%?\\t%0, %1, %2
1344 subs%?\\t%0, %1, %2
1345 rsbs%?\\t%0, %2, %1"
1346 [(set_attr "conds" "set")
1347 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1348 )
1349
1350 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1351 ;; rather than (0 cmp reg). This gives the same results for unsigned
1352 ;; and equality compares which is what we mostly need here.
1353 (define_insn "rsb_imm_compare"
1354 [(set (reg:CC_RSB CC_REGNUM)
1355 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1356 (match_operand 3 "const_int_operand" "")))
1357 (set (match_operand:SI 0 "s_register_operand" "=r")
1358 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1359 (match_dup 2)))]
1360 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1361 "rsbs\\t%0, %2, %1"
1362 [(set_attr "conds" "set")
1363 (set_attr "type" "alus_imm")]
1364 )
1365
1366 ;; Similarly, but the result is unused.
1367 (define_insn "rsb_imm_compare_scratch"
1368 [(set (reg:CC_RSB CC_REGNUM)
1369 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1370 (match_operand 1 "arm_not_immediate_operand" "K")))
1371 (clobber (match_scratch:SI 0 "=r"))]
1372 "TARGET_32BIT"
1373 "rsbs\\t%0, %2, #%B1"
1374 [(set_attr "conds" "set")
1375 (set_attr "type" "alus_imm")]
1376 )
1377
1378 ;; Compare the sum of a value plus a carry against a constant. Uses
1379 ;; RSC, so the result is swapped. Only available on Arm
1380 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
1381 [(set (reg:CC_SWP CC_REGNUM)
1382 (compare:CC_SWP
1383 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
1384 (match_operand:DI 3 "arm_borrow_operation" ""))
1385 (match_operand 1 "arm_immediate_operand" "I")))
1386 (clobber (match_scratch:SI 0 "=r"))]
1387 "TARGET_ARM"
1388 "rscs\\t%0, %2, %1"
1389 [(set_attr "conds" "set")
1390 (set_attr "type" "alus_imm")]
1391 )
1392
1393 (define_expand "subsf3"
1394 [(set (match_operand:SF 0 "s_register_operand")
1395 (minus:SF (match_operand:SF 1 "s_register_operand")
1396 (match_operand:SF 2 "s_register_operand")))]
1397 "TARGET_32BIT && TARGET_HARD_FLOAT"
1398 "
1399 ")
1400
1401 (define_expand "subdf3"
1402 [(set (match_operand:DF 0 "s_register_operand")
1403 (minus:DF (match_operand:DF 1 "s_register_operand")
1404 (match_operand:DF 2 "s_register_operand")))]
1405 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1406 "
1407 ")
1408
1409 \f
1410 ;; Multiplication insns
1411
1412 (define_expand "mulhi3"
1413 [(set (match_operand:HI 0 "s_register_operand")
1414 (mult:HI (match_operand:HI 1 "s_register_operand")
1415 (match_operand:HI 2 "s_register_operand")))]
1416 "TARGET_DSP_MULTIPLY"
1417 "
1418 {
1419 rtx result = gen_reg_rtx (SImode);
1420 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1421 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1422 DONE;
1423 }"
1424 )
1425
1426 (define_expand "mulsi3"
1427 [(set (match_operand:SI 0 "s_register_operand")
1428 (mult:SI (match_operand:SI 2 "s_register_operand")
1429 (match_operand:SI 1 "s_register_operand")))]
1430 "TARGET_EITHER"
1431 ""
1432 )
1433
1434 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1435 (define_insn "*mul"
1436 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1437 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1438 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1439 "TARGET_32BIT"
1440 "mul%?\\t%0, %2, %1"
1441 [(set_attr "type" "mul")
1442 (set_attr "predicable" "yes")
1443 (set_attr "arch" "t2,v6,nov6,nov6")
1444 (set_attr "length" "4")
1445 (set_attr "predicable_short_it" "yes,no,*,*")]
1446 )
1447
1448 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1449 ;; reusing the same register.
1450
1451 (define_insn "*mla"
1452 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1453 (plus:SI
1454 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1455 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1456 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1457 "TARGET_32BIT"
1458 "mla%?\\t%0, %3, %2, %1"
1459 [(set_attr "type" "mla")
1460 (set_attr "predicable" "yes")
1461 (set_attr "arch" "v6,nov6,nov6,nov6")]
1462 )
1463
1464 (define_insn "*mls"
1465 [(set (match_operand:SI 0 "s_register_operand" "=r")
1466 (minus:SI
1467 (match_operand:SI 1 "s_register_operand" "r")
1468 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1469 (match_operand:SI 2 "s_register_operand" "r"))))]
1470 "TARGET_32BIT && arm_arch_thumb2"
1471 "mls%?\\t%0, %3, %2, %1"
1472 [(set_attr "type" "mla")
1473 (set_attr "predicable" "yes")]
1474 )
1475
1476 (define_insn "*mulsi3_compare0"
1477 [(set (reg:CC_NOOV CC_REGNUM)
1478 (compare:CC_NOOV (mult:SI
1479 (match_operand:SI 2 "s_register_operand" "r,r")
1480 (match_operand:SI 1 "s_register_operand" "%0,r"))
1481 (const_int 0)))
1482 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1483 (mult:SI (match_dup 2) (match_dup 1)))]
1484 "TARGET_ARM && !arm_arch6"
1485 "muls%?\\t%0, %2, %1"
1486 [(set_attr "conds" "set")
1487 (set_attr "type" "muls")]
1488 )
1489
1490 (define_insn "*mulsi3_compare0_v6"
1491 [(set (reg:CC_NOOV CC_REGNUM)
1492 (compare:CC_NOOV (mult:SI
1493 (match_operand:SI 2 "s_register_operand" "r")
1494 (match_operand:SI 1 "s_register_operand" "r"))
1495 (const_int 0)))
1496 (set (match_operand:SI 0 "s_register_operand" "=r")
1497 (mult:SI (match_dup 2) (match_dup 1)))]
1498 "TARGET_ARM && arm_arch6 && optimize_size"
1499 "muls%?\\t%0, %2, %1"
1500 [(set_attr "conds" "set")
1501 (set_attr "type" "muls")]
1502 )
1503
1504 (define_insn "*mulsi_compare0_scratch"
1505 [(set (reg:CC_NOOV CC_REGNUM)
1506 (compare:CC_NOOV (mult:SI
1507 (match_operand:SI 2 "s_register_operand" "r,r")
1508 (match_operand:SI 1 "s_register_operand" "%0,r"))
1509 (const_int 0)))
1510 (clobber (match_scratch:SI 0 "=&r,&r"))]
1511 "TARGET_ARM && !arm_arch6"
1512 "muls%?\\t%0, %2, %1"
1513 [(set_attr "conds" "set")
1514 (set_attr "type" "muls")]
1515 )
1516
1517 (define_insn "*mulsi_compare0_scratch_v6"
1518 [(set (reg:CC_NOOV CC_REGNUM)
1519 (compare:CC_NOOV (mult:SI
1520 (match_operand:SI 2 "s_register_operand" "r")
1521 (match_operand:SI 1 "s_register_operand" "r"))
1522 (const_int 0)))
1523 (clobber (match_scratch:SI 0 "=r"))]
1524 "TARGET_ARM && arm_arch6 && optimize_size"
1525 "muls%?\\t%0, %2, %1"
1526 [(set_attr "conds" "set")
1527 (set_attr "type" "muls")]
1528 )
1529
1530 (define_insn "*mulsi3addsi_compare0"
1531 [(set (reg:CC_NOOV CC_REGNUM)
1532 (compare:CC_NOOV
1533 (plus:SI (mult:SI
1534 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1535 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1536 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1537 (const_int 0)))
1538 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1539 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1540 (match_dup 3)))]
1541 "TARGET_ARM && arm_arch6"
1542 "mlas%?\\t%0, %2, %1, %3"
1543 [(set_attr "conds" "set")
1544 (set_attr "type" "mlas")]
1545 )
1546
1547 (define_insn "*mulsi3addsi_compare0_v6"
1548 [(set (reg:CC_NOOV CC_REGNUM)
1549 (compare:CC_NOOV
1550 (plus:SI (mult:SI
1551 (match_operand:SI 2 "s_register_operand" "r")
1552 (match_operand:SI 1 "s_register_operand" "r"))
1553 (match_operand:SI 3 "s_register_operand" "r"))
1554 (const_int 0)))
1555 (set (match_operand:SI 0 "s_register_operand" "=r")
1556 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1557 (match_dup 3)))]
1558 "TARGET_ARM && arm_arch6 && optimize_size"
1559 "mlas%?\\t%0, %2, %1, %3"
1560 [(set_attr "conds" "set")
1561 (set_attr "type" "mlas")]
1562 )
1563
1564 (define_insn "*mulsi3addsi_compare0_scratch"
1565 [(set (reg:CC_NOOV CC_REGNUM)
1566 (compare:CC_NOOV
1567 (plus:SI (mult:SI
1568 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1569 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1570 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1571 (const_int 0)))
1572 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1573 "TARGET_ARM && !arm_arch6"
1574 "mlas%?\\t%0, %2, %1, %3"
1575 [(set_attr "conds" "set")
1576 (set_attr "type" "mlas")]
1577 )
1578
1579 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1580 [(set (reg:CC_NOOV CC_REGNUM)
1581 (compare:CC_NOOV
1582 (plus:SI (mult:SI
1583 (match_operand:SI 2 "s_register_operand" "r")
1584 (match_operand:SI 1 "s_register_operand" "r"))
1585 (match_operand:SI 3 "s_register_operand" "r"))
1586 (const_int 0)))
1587 (clobber (match_scratch:SI 0 "=r"))]
1588 "TARGET_ARM && arm_arch6 && optimize_size"
1589 "mlas%?\\t%0, %2, %1, %3"
1590 [(set_attr "conds" "set")
1591 (set_attr "type" "mlas")]
1592 )
1593
1594 ;; 32x32->64 widening multiply.
1595 ;; The only difference between the v3-5 and v6+ versions is the requirement
1596 ;; that the output does not overlap with either input.
1597
1598 (define_expand "<Us>mulsidi3"
1599 [(set (match_operand:DI 0 "s_register_operand")
1600 (mult:DI
1601 (SE:DI (match_operand:SI 1 "s_register_operand"))
1602 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1603 "TARGET_32BIT"
1604 {
1605 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1606 gen_highpart (SImode, operands[0]),
1607 operands[1], operands[2]));
1608 DONE;
1609 }
1610 )
1611
1612 (define_insn "<US>mull"
1613 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1614 (mult:SI
1615 (match_operand:SI 2 "s_register_operand" "%r,r")
1616 (match_operand:SI 3 "s_register_operand" "r,r")))
1617 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1618 (truncate:SI
1619 (lshiftrt:DI
1620 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1621 (const_int 32))))]
1622 "TARGET_32BIT"
1623 "<US>mull%?\\t%0, %1, %2, %3"
1624 [(set_attr "type" "umull")
1625 (set_attr "predicable" "yes")
1626 (set_attr "arch" "v6,nov6")]
1627 )
1628
1629 (define_expand "<Us>maddsidi4"
1630 [(set (match_operand:DI 0 "s_register_operand")
1631 (plus:DI
1632 (mult:DI
1633 (SE:DI (match_operand:SI 1 "s_register_operand"))
1634 (SE:DI (match_operand:SI 2 "s_register_operand")))
1635 (match_operand:DI 3 "s_register_operand")))]
1636 "TARGET_32BIT"
1637 {
1638 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1639 gen_lowpart (SImode, operands[3]),
1640 gen_highpart (SImode, operands[0]),
1641 gen_highpart (SImode, operands[3]),
1642 operands[1], operands[2]));
1643 DONE;
1644 }
1645 )
1646
1647 (define_insn "<US>mlal"
1648 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1649 (plus:SI
1650 (mult:SI
1651 (match_operand:SI 4 "s_register_operand" "%r,r")
1652 (match_operand:SI 5 "s_register_operand" "r,r"))
1653 (match_operand:SI 1 "s_register_operand" "0,0")))
1654 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1655 (plus:SI
1656 (truncate:SI
1657 (lshiftrt:DI
1658 (plus:DI
1659 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1660 (zero_extend:DI (match_dup 1)))
1661 (const_int 32)))
1662 (match_operand:SI 3 "s_register_operand" "2,2")))]
1663 "TARGET_32BIT"
1664 "<US>mlal%?\\t%0, %2, %4, %5"
1665 [(set_attr "type" "umlal")
1666 (set_attr "predicable" "yes")
1667 (set_attr "arch" "v6,nov6")]
1668 )
1669
1670 (define_expand "<US>mulsi3_highpart"
1671 [(parallel
1672 [(set (match_operand:SI 0 "s_register_operand")
1673 (truncate:SI
1674 (lshiftrt:DI
1675 (mult:DI
1676 (SE:DI (match_operand:SI 1 "s_register_operand"))
1677 (SE:DI (match_operand:SI 2 "s_register_operand")))
1678 (const_int 32))))
1679 (clobber (match_scratch:SI 3 ""))])]
1680 "TARGET_32BIT"
1681 ""
1682 )
1683
1684 (define_insn "*<US>mull_high"
1685 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1686 (truncate:SI
1687 (lshiftrt:DI
1688 (mult:DI
1689 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1690 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1691 (const_int 32))))
1692 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1693 "TARGET_32BIT"
1694 "<US>mull%?\\t%3, %0, %2, %1"
1695 [(set_attr "type" "umull")
1696 (set_attr "predicable" "yes")
1697 (set_attr "arch" "v6,nov6,nov6")]
1698 )
1699
1700 (define_insn "mulhisi3"
1701 [(set (match_operand:SI 0 "s_register_operand" "=r")
1702 (mult:SI (sign_extend:SI
1703 (match_operand:HI 1 "s_register_operand" "%r"))
1704 (sign_extend:SI
1705 (match_operand:HI 2 "s_register_operand" "r"))))]
1706 "TARGET_DSP_MULTIPLY"
1707 "smulbb%?\\t%0, %1, %2"
1708 [(set_attr "type" "smulxy")
1709 (set_attr "predicable" "yes")]
1710 )
1711
1712 (define_insn "*mulhisi3tb"
1713 [(set (match_operand:SI 0 "s_register_operand" "=r")
1714 (mult:SI (ashiftrt:SI
1715 (match_operand:SI 1 "s_register_operand" "r")
1716 (const_int 16))
1717 (sign_extend:SI
1718 (match_operand:HI 2 "s_register_operand" "r"))))]
1719 "TARGET_DSP_MULTIPLY"
1720 "smultb%?\\t%0, %1, %2"
1721 [(set_attr "type" "smulxy")
1722 (set_attr "predicable" "yes")]
1723 )
1724
1725 (define_insn "*mulhisi3bt"
1726 [(set (match_operand:SI 0 "s_register_operand" "=r")
1727 (mult:SI (sign_extend:SI
1728 (match_operand:HI 1 "s_register_operand" "r"))
1729 (ashiftrt:SI
1730 (match_operand:SI 2 "s_register_operand" "r")
1731 (const_int 16))))]
1732 "TARGET_DSP_MULTIPLY"
1733 "smulbt%?\\t%0, %1, %2"
1734 [(set_attr "type" "smulxy")
1735 (set_attr "predicable" "yes")]
1736 )
1737
1738 (define_insn "*mulhisi3tt"
1739 [(set (match_operand:SI 0 "s_register_operand" "=r")
1740 (mult:SI (ashiftrt:SI
1741 (match_operand:SI 1 "s_register_operand" "r")
1742 (const_int 16))
1743 (ashiftrt:SI
1744 (match_operand:SI 2 "s_register_operand" "r")
1745 (const_int 16))))]
1746 "TARGET_DSP_MULTIPLY"
1747 "smultt%?\\t%0, %1, %2"
1748 [(set_attr "type" "smulxy")
1749 (set_attr "predicable" "yes")]
1750 )
1751
1752 (define_insn "maddhisi4"
1753 [(set (match_operand:SI 0 "s_register_operand" "=r")
1754 (plus:SI (mult:SI (sign_extend:SI
1755 (match_operand:HI 1 "s_register_operand" "r"))
1756 (sign_extend:SI
1757 (match_operand:HI 2 "s_register_operand" "r")))
1758 (match_operand:SI 3 "s_register_operand" "r")))]
1759 "TARGET_DSP_MULTIPLY"
1760 "smlabb%?\\t%0, %1, %2, %3"
1761 [(set_attr "type" "smlaxy")
1762 (set_attr "predicable" "yes")]
1763 )
1764
1765 ;; Note: there is no maddhisi4ibt because this one is canonical form
1766 (define_insn "*maddhisi4tb"
1767 [(set (match_operand:SI 0 "s_register_operand" "=r")
1768 (plus:SI (mult:SI (ashiftrt:SI
1769 (match_operand:SI 1 "s_register_operand" "r")
1770 (const_int 16))
1771 (sign_extend:SI
1772 (match_operand:HI 2 "s_register_operand" "r")))
1773 (match_operand:SI 3 "s_register_operand" "r")))]
1774 "TARGET_DSP_MULTIPLY"
1775 "smlatb%?\\t%0, %1, %2, %3"
1776 [(set_attr "type" "smlaxy")
1777 (set_attr "predicable" "yes")]
1778 )
1779
1780 (define_insn "*maddhisi4tt"
1781 [(set (match_operand:SI 0 "s_register_operand" "=r")
1782 (plus:SI (mult:SI (ashiftrt:SI
1783 (match_operand:SI 1 "s_register_operand" "r")
1784 (const_int 16))
1785 (ashiftrt:SI
1786 (match_operand:SI 2 "s_register_operand" "r")
1787 (const_int 16)))
1788 (match_operand:SI 3 "s_register_operand" "r")))]
1789 "TARGET_DSP_MULTIPLY"
1790 "smlatt%?\\t%0, %1, %2, %3"
1791 [(set_attr "type" "smlaxy")
1792 (set_attr "predicable" "yes")]
1793 )
1794
1795 (define_insn "maddhidi4"
1796 [(set (match_operand:DI 0 "s_register_operand" "=r")
1797 (plus:DI
1798 (mult:DI (sign_extend:DI
1799 (match_operand:HI 1 "s_register_operand" "r"))
1800 (sign_extend:DI
1801 (match_operand:HI 2 "s_register_operand" "r")))
1802 (match_operand:DI 3 "s_register_operand" "0")))]
1803 "TARGET_DSP_MULTIPLY"
1804 "smlalbb%?\\t%Q0, %R0, %1, %2"
1805 [(set_attr "type" "smlalxy")
1806 (set_attr "predicable" "yes")])
1807
1808 ;; Note: there is no maddhidi4ibt because this one is canonical form
1809 (define_insn "*maddhidi4tb"
1810 [(set (match_operand:DI 0 "s_register_operand" "=r")
1811 (plus:DI
1812 (mult:DI (sign_extend:DI
1813 (ashiftrt:SI
1814 (match_operand:SI 1 "s_register_operand" "r")
1815 (const_int 16)))
1816 (sign_extend:DI
1817 (match_operand:HI 2 "s_register_operand" "r")))
1818 (match_operand:DI 3 "s_register_operand" "0")))]
1819 "TARGET_DSP_MULTIPLY"
1820 "smlaltb%?\\t%Q0, %R0, %1, %2"
1821 [(set_attr "type" "smlalxy")
1822 (set_attr "predicable" "yes")])
1823
1824 (define_insn "*maddhidi4tt"
1825 [(set (match_operand:DI 0 "s_register_operand" "=r")
1826 (plus:DI
1827 (mult:DI (sign_extend:DI
1828 (ashiftrt:SI
1829 (match_operand:SI 1 "s_register_operand" "r")
1830 (const_int 16)))
1831 (sign_extend:DI
1832 (ashiftrt:SI
1833 (match_operand:SI 2 "s_register_operand" "r")
1834 (const_int 16))))
1835 (match_operand:DI 3 "s_register_operand" "0")))]
1836 "TARGET_DSP_MULTIPLY"
1837 "smlaltt%?\\t%Q0, %R0, %1, %2"
1838 [(set_attr "type" "smlalxy")
1839 (set_attr "predicable" "yes")])
1840
1841 (define_expand "mulsf3"
1842 [(set (match_operand:SF 0 "s_register_operand")
1843 (mult:SF (match_operand:SF 1 "s_register_operand")
1844 (match_operand:SF 2 "s_register_operand")))]
1845 "TARGET_32BIT && TARGET_HARD_FLOAT"
1846 "
1847 ")
1848
1849 (define_expand "muldf3"
1850 [(set (match_operand:DF 0 "s_register_operand")
1851 (mult:DF (match_operand:DF 1 "s_register_operand")
1852 (match_operand:DF 2 "s_register_operand")))]
1853 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1854 "
1855 ")
1856 \f
1857 ;; Division insns
1858
1859 (define_expand "divsf3"
1860 [(set (match_operand:SF 0 "s_register_operand")
1861 (div:SF (match_operand:SF 1 "s_register_operand")
1862 (match_operand:SF 2 "s_register_operand")))]
1863 "TARGET_32BIT && TARGET_HARD_FLOAT"
1864 "")
1865
1866 (define_expand "divdf3"
1867 [(set (match_operand:DF 0 "s_register_operand")
1868 (div:DF (match_operand:DF 1 "s_register_operand")
1869 (match_operand:DF 2 "s_register_operand")))]
1870 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1871 "")
1872 \f
1873
1874 ; Expand logical operations. The mid-end expander does not split off memory
1875 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1876 ; So an explicit expander is needed to generate better code.
1877
1878 (define_expand "<LOGICAL:optab>di3"
1879 [(set (match_operand:DI 0 "s_register_operand")
1880 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1881 (match_operand:DI 2 "arm_<optab>di_operand")))]
1882 "TARGET_32BIT"
1883 {
1884 rtx low = simplify_gen_binary (<CODE>, SImode,
1885 gen_lowpart (SImode, operands[1]),
1886 gen_lowpart (SImode, operands[2]));
1887 rtx high = simplify_gen_binary (<CODE>, SImode,
1888 gen_highpart (SImode, operands[1]),
1889 gen_highpart_mode (SImode, DImode,
1890 operands[2]));
1891
1892 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1893 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1894 DONE;
1895 }
1896 )
1897
1898 (define_expand "one_cmpldi2"
1899 [(set (match_operand:DI 0 "s_register_operand")
1900 (not:DI (match_operand:DI 1 "s_register_operand")))]
1901 "TARGET_32BIT"
1902 {
1903 rtx low = simplify_gen_unary (NOT, SImode,
1904 gen_lowpart (SImode, operands[1]),
1905 SImode);
1906 rtx high = simplify_gen_unary (NOT, SImode,
1907 gen_highpart_mode (SImode, DImode,
1908 operands[1]),
1909 SImode);
1910
1911 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1912 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1913 DONE;
1914 }
1915 )
1916
1917 ;; Split DImode and, ior, xor operations. Simply perform the logical
1918 ;; operation on the upper and lower halves of the registers.
1919 ;; This is needed for atomic operations in arm_split_atomic_op.
1920 ;; Avoid splitting IWMMXT instructions.
1921 (define_split
1922 [(set (match_operand:DI 0 "s_register_operand" "")
1923 (match_operator:DI 6 "logical_binary_operator"
1924 [(match_operand:DI 1 "s_register_operand" "")
1925 (match_operand:DI 2 "s_register_operand" "")]))]
1926 "TARGET_32BIT && reload_completed
1927 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1928 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1929 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1930 "
1931 {
1932 operands[3] = gen_highpart (SImode, operands[0]);
1933 operands[0] = gen_lowpart (SImode, operands[0]);
1934 operands[4] = gen_highpart (SImode, operands[1]);
1935 operands[1] = gen_lowpart (SImode, operands[1]);
1936 operands[5] = gen_highpart (SImode, operands[2]);
1937 operands[2] = gen_lowpart (SImode, operands[2]);
1938 }"
1939 )
1940
1941 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1942 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1943 (define_split
1944 [(set (match_operand:DI 0 "s_register_operand")
1945 (not:DI (match_operand:DI 1 "s_register_operand")))]
1946 "TARGET_32BIT"
1947 [(set (match_dup 0) (not:SI (match_dup 1)))
1948 (set (match_dup 2) (not:SI (match_dup 3)))]
1949 "
1950 {
1951 operands[2] = gen_highpart (SImode, operands[0]);
1952 operands[0] = gen_lowpart (SImode, operands[0]);
1953 operands[3] = gen_highpart (SImode, operands[1]);
1954 operands[1] = gen_lowpart (SImode, operands[1]);
1955 }"
1956 )
1957
1958 (define_expand "andsi3"
1959 [(set (match_operand:SI 0 "s_register_operand")
1960 (and:SI (match_operand:SI 1 "s_register_operand")
1961 (match_operand:SI 2 "reg_or_int_operand")))]
1962 "TARGET_EITHER"
1963 "
1964 if (TARGET_32BIT)
1965 {
1966 if (CONST_INT_P (operands[2]))
1967 {
1968 if (INTVAL (operands[2]) == 255 && arm_arch6)
1969 {
1970 operands[1] = convert_to_mode (QImode, operands[1], 1);
1971 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1972 operands[1]));
1973 DONE;
1974 }
1975 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1976 operands[2] = force_reg (SImode, operands[2]);
1977 else
1978 {
1979 arm_split_constant (AND, SImode, NULL_RTX,
1980 INTVAL (operands[2]), operands[0],
1981 operands[1],
1982 optimize && can_create_pseudo_p ());
1983
1984 DONE;
1985 }
1986 }
1987 }
1988 else /* TARGET_THUMB1 */
1989 {
1990 if (!CONST_INT_P (operands[2]))
1991 {
1992 rtx tmp = force_reg (SImode, operands[2]);
1993 if (rtx_equal_p (operands[0], operands[1]))
1994 operands[2] = tmp;
1995 else
1996 {
1997 operands[2] = operands[1];
1998 operands[1] = tmp;
1999 }
2000 }
2001 else
2002 {
2003 int i;
2004
2005 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2006 {
2007 operands[2] = force_reg (SImode,
2008 GEN_INT (~INTVAL (operands[2])));
2009
2010 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2011
2012 DONE;
2013 }
2014
2015 for (i = 9; i <= 31; i++)
2016 {
2017 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2018 {
2019 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2020 const0_rtx));
2021 DONE;
2022 }
2023 else if ((HOST_WIDE_INT_1 << i) - 1
2024 == ~INTVAL (operands[2]))
2025 {
2026 rtx shift = GEN_INT (i);
2027 rtx reg = gen_reg_rtx (SImode);
2028
2029 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2030 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2031
2032 DONE;
2033 }
2034 }
2035
2036 operands[2] = force_reg (SImode, operands[2]);
2037 }
2038 }
2039 "
2040 )
2041
2042 ; ??? Check split length for Thumb-2
2043 (define_insn_and_split "*arm_andsi3_insn"
2044 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2045 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2046 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2047 "TARGET_32BIT"
2048 "@
2049 and%?\\t%0, %1, %2
2050 and%?\\t%0, %1, %2
2051 bic%?\\t%0, %1, #%B2
2052 and%?\\t%0, %1, %2
2053 #"
2054 "TARGET_32BIT
2055 && CONST_INT_P (operands[2])
2056 && !(const_ok_for_arm (INTVAL (operands[2]))
2057 || const_ok_for_arm (~INTVAL (operands[2])))"
2058 [(clobber (const_int 0))]
2059 "
2060 arm_split_constant (AND, SImode, curr_insn,
2061 INTVAL (operands[2]), operands[0], operands[1], 0);
2062 DONE;
2063 "
2064 [(set_attr "length" "4,4,4,4,16")
2065 (set_attr "predicable" "yes")
2066 (set_attr "predicable_short_it" "no,yes,no,no,no")
2067 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2068 )
2069
2070 (define_insn "*andsi3_compare0"
2071 [(set (reg:CC_NOOV CC_REGNUM)
2072 (compare:CC_NOOV
2073 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2074 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2075 (const_int 0)))
2076 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2077 (and:SI (match_dup 1) (match_dup 2)))]
2078 "TARGET_32BIT"
2079 "@
2080 ands%?\\t%0, %1, %2
2081 bics%?\\t%0, %1, #%B2
2082 ands%?\\t%0, %1, %2"
2083 [(set_attr "conds" "set")
2084 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2085 )
2086
2087 (define_insn "*andsi3_compare0_scratch"
2088 [(set (reg:CC_NOOV CC_REGNUM)
2089 (compare:CC_NOOV
2090 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2091 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2092 (const_int 0)))
2093 (clobber (match_scratch:SI 2 "=X,r,X"))]
2094 "TARGET_32BIT"
2095 "@
2096 tst%?\\t%0, %1
2097 bics%?\\t%2, %0, #%B1
2098 tst%?\\t%0, %1"
2099 [(set_attr "conds" "set")
2100 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2101 )
2102
2103 (define_insn "*zeroextractsi_compare0_scratch"
2104 [(set (reg:CC_NOOV CC_REGNUM)
2105 (compare:CC_NOOV (zero_extract:SI
2106 (match_operand:SI 0 "s_register_operand" "r")
2107 (match_operand 1 "const_int_operand" "n")
2108 (match_operand 2 "const_int_operand" "n"))
2109 (const_int 0)))]
2110 "TARGET_32BIT
2111 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2112 && INTVAL (operands[1]) > 0
2113 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2114 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2115 "*
2116 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2117 << INTVAL (operands[2]));
2118 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2119 return \"\";
2120 "
2121 [(set_attr "conds" "set")
2122 (set_attr "predicable" "yes")
2123 (set_attr "type" "logics_imm")]
2124 )
2125
2126 (define_insn_and_split "*ne_zeroextractsi"
2127 [(set (match_operand:SI 0 "s_register_operand" "=r")
2128 (ne:SI (zero_extract:SI
2129 (match_operand:SI 1 "s_register_operand" "r")
2130 (match_operand:SI 2 "const_int_operand" "n")
2131 (match_operand:SI 3 "const_int_operand" "n"))
2132 (const_int 0)))
2133 (clobber (reg:CC CC_REGNUM))]
2134 "TARGET_32BIT
2135 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2136 && INTVAL (operands[2]) > 0
2137 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2138 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2139 "#"
2140 "TARGET_32BIT
2141 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2142 && INTVAL (operands[2]) > 0
2143 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2144 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2145 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2146 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2147 (const_int 0)))
2148 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2149 (set (match_dup 0)
2150 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2151 (match_dup 0) (const_int 1)))]
2152 "
2153 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2154 << INTVAL (operands[3]));
2155 "
2156 [(set_attr "conds" "clob")
2157 (set (attr "length")
2158 (if_then_else (eq_attr "is_thumb" "yes")
2159 (const_int 12)
2160 (const_int 8)))
2161 (set_attr "type" "multiple")]
2162 )
2163
2164 (define_insn_and_split "*ne_zeroextractsi_shifted"
2165 [(set (match_operand:SI 0 "s_register_operand" "=r")
2166 (ne:SI (zero_extract:SI
2167 (match_operand:SI 1 "s_register_operand" "r")
2168 (match_operand:SI 2 "const_int_operand" "n")
2169 (const_int 0))
2170 (const_int 0)))
2171 (clobber (reg:CC CC_REGNUM))]
2172 "TARGET_ARM"
2173 "#"
2174 "TARGET_ARM"
2175 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2176 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2177 (const_int 0)))
2178 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2179 (set (match_dup 0)
2180 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2181 (match_dup 0) (const_int 1)))]
2182 "
2183 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2184 "
2185 [(set_attr "conds" "clob")
2186 (set_attr "length" "8")
2187 (set_attr "type" "multiple")]
2188 )
2189
2190 (define_insn_and_split "*ite_ne_zeroextractsi"
2191 [(set (match_operand:SI 0 "s_register_operand" "=r")
2192 (if_then_else:SI (ne (zero_extract:SI
2193 (match_operand:SI 1 "s_register_operand" "r")
2194 (match_operand:SI 2 "const_int_operand" "n")
2195 (match_operand:SI 3 "const_int_operand" "n"))
2196 (const_int 0))
2197 (match_operand:SI 4 "arm_not_operand" "rIK")
2198 (const_int 0)))
2199 (clobber (reg:CC CC_REGNUM))]
2200 "TARGET_ARM
2201 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2202 && INTVAL (operands[2]) > 0
2203 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2204 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2205 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2206 "#"
2207 "TARGET_ARM
2208 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2209 && INTVAL (operands[2]) > 0
2210 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2211 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2212 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2213 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2214 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2215 (const_int 0)))
2216 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2217 (set (match_dup 0)
2218 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2219 (match_dup 0) (match_dup 4)))]
2220 "
2221 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2222 << INTVAL (operands[3]));
2223 "
2224 [(set_attr "conds" "clob")
2225 (set_attr "length" "8")
2226 (set_attr "type" "multiple")]
2227 )
2228
2229 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2230 [(set (match_operand:SI 0 "s_register_operand" "=r")
2231 (if_then_else:SI (ne (zero_extract:SI
2232 (match_operand:SI 1 "s_register_operand" "r")
2233 (match_operand:SI 2 "const_int_operand" "n")
2234 (const_int 0))
2235 (const_int 0))
2236 (match_operand:SI 3 "arm_not_operand" "rIK")
2237 (const_int 0)))
2238 (clobber (reg:CC CC_REGNUM))]
2239 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2240 "#"
2241 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2242 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2243 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2244 (const_int 0)))
2245 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2246 (set (match_dup 0)
2247 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2248 (match_dup 0) (match_dup 3)))]
2249 "
2250 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2251 "
2252 [(set_attr "conds" "clob")
2253 (set_attr "length" "8")
2254 (set_attr "type" "multiple")]
2255 )
2256
2257 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2258 (define_split
2259 [(set (match_operand:SI 0 "s_register_operand" "")
2260 (match_operator:SI 1 "shiftable_operator"
2261 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2262 (match_operand:SI 3 "const_int_operand" "")
2263 (match_operand:SI 4 "const_int_operand" ""))
2264 (match_operand:SI 5 "s_register_operand" "")]))
2265 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2266 "TARGET_ARM"
2267 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2268 (set (match_dup 0)
2269 (match_op_dup 1
2270 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2271 (match_dup 5)]))]
2272 "{
2273 HOST_WIDE_INT temp = INTVAL (operands[3]);
2274
2275 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2276 operands[4] = GEN_INT (32 - temp);
2277 }"
2278 )
2279
2280 (define_split
2281 [(set (match_operand:SI 0 "s_register_operand" "")
2282 (match_operator:SI 1 "shiftable_operator"
2283 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2284 (match_operand:SI 3 "const_int_operand" "")
2285 (match_operand:SI 4 "const_int_operand" ""))
2286 (match_operand:SI 5 "s_register_operand" "")]))
2287 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2288 "TARGET_ARM"
2289 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2290 (set (match_dup 0)
2291 (match_op_dup 1
2292 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2293 (match_dup 5)]))]
2294 "{
2295 HOST_WIDE_INT temp = INTVAL (operands[3]);
2296
2297 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2298 operands[4] = GEN_INT (32 - temp);
2299 }"
2300 )
2301
2302 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2303 ;;; represented by the bitfield, then this will produce incorrect results.
2304 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2305 ;;; which have a real bit-field insert instruction, the truncation happens
2306 ;;; in the bit-field insert instruction itself. Since arm does not have a
2307 ;;; bit-field insert instruction, we would have to emit code here to truncate
2308 ;;; the value before we insert. This loses some of the advantage of having
2309 ;;; this insv pattern, so this pattern needs to be reevalutated.
2310
2311 (define_expand "insv"
2312 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2313 (match_operand 1 "general_operand")
2314 (match_operand 2 "general_operand"))
2315 (match_operand 3 "reg_or_int_operand"))]
2316 "TARGET_ARM || arm_arch_thumb2"
2317 "
2318 {
2319 int start_bit = INTVAL (operands[2]);
2320 int width = INTVAL (operands[1]);
2321 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2322 rtx target, subtarget;
2323
2324 if (arm_arch_thumb2)
2325 {
2326 if (unaligned_access && MEM_P (operands[0])
2327 && s_register_operand (operands[3], GET_MODE (operands[3]))
2328 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2329 {
2330 rtx base_addr;
2331
2332 if (BYTES_BIG_ENDIAN)
2333 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2334 - start_bit;
2335
2336 if (width == 32)
2337 {
2338 base_addr = adjust_address (operands[0], SImode,
2339 start_bit / BITS_PER_UNIT);
2340 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2341 }
2342 else
2343 {
2344 rtx tmp = gen_reg_rtx (HImode);
2345
2346 base_addr = adjust_address (operands[0], HImode,
2347 start_bit / BITS_PER_UNIT);
2348 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2349 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2350 }
2351 DONE;
2352 }
2353 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2354 {
2355 bool use_bfi = TRUE;
2356
2357 if (CONST_INT_P (operands[3]))
2358 {
2359 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2360
2361 if (val == 0)
2362 {
2363 emit_insn (gen_insv_zero (operands[0], operands[1],
2364 operands[2]));
2365 DONE;
2366 }
2367
2368 /* See if the set can be done with a single orr instruction. */
2369 if (val == mask && const_ok_for_arm (val << start_bit))
2370 use_bfi = FALSE;
2371 }
2372
2373 if (use_bfi)
2374 {
2375 if (!REG_P (operands[3]))
2376 operands[3] = force_reg (SImode, operands[3]);
2377
2378 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2379 operands[3]));
2380 DONE;
2381 }
2382 }
2383 else
2384 FAIL;
2385 }
2386
2387 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2388 FAIL;
2389
2390 target = copy_rtx (operands[0]);
2391 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2392 subreg as the final target. */
2393 if (GET_CODE (target) == SUBREG)
2394 {
2395 subtarget = gen_reg_rtx (SImode);
2396 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2397 < GET_MODE_SIZE (SImode))
2398 target = SUBREG_REG (target);
2399 }
2400 else
2401 subtarget = target;
2402
2403 if (CONST_INT_P (operands[3]))
2404 {
2405 /* Since we are inserting a known constant, we may be able to
2406 reduce the number of bits that we have to clear so that
2407 the mask becomes simple. */
2408 /* ??? This code does not check to see if the new mask is actually
2409 simpler. It may not be. */
2410 rtx op1 = gen_reg_rtx (SImode);
2411 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2412 start of this pattern. */
2413 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2414 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2415
2416 emit_insn (gen_andsi3 (op1, operands[0],
2417 gen_int_mode (~mask2, SImode)));
2418 emit_insn (gen_iorsi3 (subtarget, op1,
2419 gen_int_mode (op3_value << start_bit, SImode)));
2420 }
2421 else if (start_bit == 0
2422 && !(const_ok_for_arm (mask)
2423 || const_ok_for_arm (~mask)))
2424 {
2425 /* A Trick, since we are setting the bottom bits in the word,
2426 we can shift operand[3] up, operand[0] down, OR them together
2427 and rotate the result back again. This takes 3 insns, and
2428 the third might be mergeable into another op. */
2429 /* The shift up copes with the possibility that operand[3] is
2430 wider than the bitfield. */
2431 rtx op0 = gen_reg_rtx (SImode);
2432 rtx op1 = gen_reg_rtx (SImode);
2433
2434 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2435 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2436 emit_insn (gen_iorsi3 (op1, op1, op0));
2437 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2438 }
2439 else if ((width + start_bit == 32)
2440 && !(const_ok_for_arm (mask)
2441 || const_ok_for_arm (~mask)))
2442 {
2443 /* Similar trick, but slightly less efficient. */
2444
2445 rtx op0 = gen_reg_rtx (SImode);
2446 rtx op1 = gen_reg_rtx (SImode);
2447
2448 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2449 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2450 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2451 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2452 }
2453 else
2454 {
2455 rtx op0 = gen_int_mode (mask, SImode);
2456 rtx op1 = gen_reg_rtx (SImode);
2457 rtx op2 = gen_reg_rtx (SImode);
2458
2459 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2460 {
2461 rtx tmp = gen_reg_rtx (SImode);
2462
2463 emit_insn (gen_movsi (tmp, op0));
2464 op0 = tmp;
2465 }
2466
2467 /* Mask out any bits in operand[3] that are not needed. */
2468 emit_insn (gen_andsi3 (op1, operands[3], op0));
2469
2470 if (CONST_INT_P (op0)
2471 && (const_ok_for_arm (mask << start_bit)
2472 || const_ok_for_arm (~(mask << start_bit))))
2473 {
2474 op0 = gen_int_mode (~(mask << start_bit), SImode);
2475 emit_insn (gen_andsi3 (op2, operands[0], op0));
2476 }
2477 else
2478 {
2479 if (CONST_INT_P (op0))
2480 {
2481 rtx tmp = gen_reg_rtx (SImode);
2482
2483 emit_insn (gen_movsi (tmp, op0));
2484 op0 = tmp;
2485 }
2486
2487 if (start_bit != 0)
2488 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2489
2490 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2491 }
2492
2493 if (start_bit != 0)
2494 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2495
2496 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2497 }
2498
2499 if (subtarget != target)
2500 {
2501 /* If TARGET is still a SUBREG, then it must be wider than a word,
2502 so we must be careful only to set the subword we were asked to. */
2503 if (GET_CODE (target) == SUBREG)
2504 emit_move_insn (target, subtarget);
2505 else
2506 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2507 }
2508
2509 DONE;
2510 }"
2511 )
2512
2513 (define_insn "insv_zero"
2514 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2515 (match_operand:SI 1 "const_int_M_operand" "M")
2516 (match_operand:SI 2 "const_int_M_operand" "M"))
2517 (const_int 0))]
2518 "arm_arch_thumb2"
2519 "bfc%?\t%0, %2, %1"
2520 [(set_attr "length" "4")
2521 (set_attr "predicable" "yes")
2522 (set_attr "type" "bfm")]
2523 )
2524
2525 (define_insn "insv_t2"
2526 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2527 (match_operand:SI 1 "const_int_M_operand" "M")
2528 (match_operand:SI 2 "const_int_M_operand" "M"))
2529 (match_operand:SI 3 "s_register_operand" "r"))]
2530 "arm_arch_thumb2"
2531 "bfi%?\t%0, %3, %2, %1"
2532 [(set_attr "length" "4")
2533 (set_attr "predicable" "yes")
2534 (set_attr "type" "bfm")]
2535 )
2536
2537 (define_insn "andsi_notsi_si"
2538 [(set (match_operand:SI 0 "s_register_operand" "=r")
2539 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2540 (match_operand:SI 1 "s_register_operand" "r")))]
2541 "TARGET_32BIT"
2542 "bic%?\\t%0, %1, %2"
2543 [(set_attr "predicable" "yes")
2544 (set_attr "type" "logic_reg")]
2545 )
2546
2547 (define_insn "andsi_not_shiftsi_si"
2548 [(set (match_operand:SI 0 "s_register_operand" "=r")
2549 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2550 [(match_operand:SI 2 "s_register_operand" "r")
2551 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2552 (match_operand:SI 1 "s_register_operand" "r")))]
2553 "TARGET_ARM"
2554 "bic%?\\t%0, %1, %2%S4"
2555 [(set_attr "predicable" "yes")
2556 (set_attr "shift" "2")
2557 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2558 (const_string "logic_shift_imm")
2559 (const_string "logic_shift_reg")))]
2560 )
2561
2562 ;; Shifted bics pattern used to set up CC status register and not reusing
2563 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2564 ;; does not support shift by register.
2565 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2566 [(set (reg:CC_NOOV CC_REGNUM)
2567 (compare:CC_NOOV
2568 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2569 [(match_operand:SI 1 "s_register_operand" "r")
2570 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2571 (match_operand:SI 3 "s_register_operand" "r"))
2572 (const_int 0)))
2573 (clobber (match_scratch:SI 4 "=r"))]
2574 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2575 "bics%?\\t%4, %3, %1%S0"
2576 [(set_attr "predicable" "yes")
2577 (set_attr "conds" "set")
2578 (set_attr "shift" "1")
2579 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2580 (const_string "logic_shift_imm")
2581 (const_string "logic_shift_reg")))]
2582 )
2583
2584 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2585 ;; getting reused later.
2586 (define_insn "andsi_not_shiftsi_si_scc"
2587 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2588 (compare:CC_NOOV
2589 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2590 [(match_operand:SI 1 "s_register_operand" "r")
2591 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2592 (match_operand:SI 3 "s_register_operand" "r"))
2593 (const_int 0)))
2594 (set (match_operand:SI 4 "s_register_operand" "=r")
2595 (and:SI (not:SI (match_op_dup 0
2596 [(match_dup 1)
2597 (match_dup 2)]))
2598 (match_dup 3)))])]
2599 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2600 "bics%?\\t%4, %3, %1%S0"
2601 [(set_attr "predicable" "yes")
2602 (set_attr "conds" "set")
2603 (set_attr "shift" "1")
2604 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2605 (const_string "logic_shift_imm")
2606 (const_string "logic_shift_reg")))]
2607 )
2608
2609 (define_insn "*andsi_notsi_si_compare0"
2610 [(set (reg:CC_NOOV CC_REGNUM)
2611 (compare:CC_NOOV
2612 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2613 (match_operand:SI 1 "s_register_operand" "r"))
2614 (const_int 0)))
2615 (set (match_operand:SI 0 "s_register_operand" "=r")
2616 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2617 "TARGET_32BIT"
2618 "bics\\t%0, %1, %2"
2619 [(set_attr "conds" "set")
2620 (set_attr "type" "logics_shift_reg")]
2621 )
2622
2623 (define_insn "*andsi_notsi_si_compare0_scratch"
2624 [(set (reg:CC_NOOV CC_REGNUM)
2625 (compare:CC_NOOV
2626 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2627 (match_operand:SI 1 "s_register_operand" "r"))
2628 (const_int 0)))
2629 (clobber (match_scratch:SI 0 "=r"))]
2630 "TARGET_32BIT"
2631 "bics\\t%0, %1, %2"
2632 [(set_attr "conds" "set")
2633 (set_attr "type" "logics_shift_reg")]
2634 )
2635
2636 (define_expand "iorsi3"
2637 [(set (match_operand:SI 0 "s_register_operand")
2638 (ior:SI (match_operand:SI 1 "s_register_operand")
2639 (match_operand:SI 2 "reg_or_int_operand")))]
2640 "TARGET_EITHER"
2641 "
2642 if (CONST_INT_P (operands[2]))
2643 {
2644 if (TARGET_32BIT)
2645 {
2646 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2647 operands[2] = force_reg (SImode, operands[2]);
2648 else
2649 {
2650 arm_split_constant (IOR, SImode, NULL_RTX,
2651 INTVAL (operands[2]), operands[0],
2652 operands[1],
2653 optimize && can_create_pseudo_p ());
2654 DONE;
2655 }
2656 }
2657 else /* TARGET_THUMB1 */
2658 {
2659 rtx tmp = force_reg (SImode, operands[2]);
2660 if (rtx_equal_p (operands[0], operands[1]))
2661 operands[2] = tmp;
2662 else
2663 {
2664 operands[2] = operands[1];
2665 operands[1] = tmp;
2666 }
2667 }
2668 }
2669 "
2670 )
2671
2672 (define_insn_and_split "*iorsi3_insn"
2673 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2674 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2675 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2676 "TARGET_32BIT"
2677 "@
2678 orr%?\\t%0, %1, %2
2679 orr%?\\t%0, %1, %2
2680 orn%?\\t%0, %1, #%B2
2681 orr%?\\t%0, %1, %2
2682 #"
2683 "TARGET_32BIT
2684 && CONST_INT_P (operands[2])
2685 && !(const_ok_for_arm (INTVAL (operands[2]))
2686 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2687 [(clobber (const_int 0))]
2688 {
2689 arm_split_constant (IOR, SImode, curr_insn,
2690 INTVAL (operands[2]), operands[0], operands[1], 0);
2691 DONE;
2692 }
2693 [(set_attr "length" "4,4,4,4,16")
2694 (set_attr "arch" "32,t2,t2,32,32")
2695 (set_attr "predicable" "yes")
2696 (set_attr "predicable_short_it" "no,yes,no,no,no")
2697 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2698 )
2699
2700 (define_peephole2
2701 [(match_scratch:SI 3 "r")
2702 (set (match_operand:SI 0 "arm_general_register_operand" "")
2703 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2704 (match_operand:SI 2 "const_int_operand" "")))]
2705 "TARGET_ARM
2706 && !const_ok_for_arm (INTVAL (operands[2]))
2707 && const_ok_for_arm (~INTVAL (operands[2]))"
2708 [(set (match_dup 3) (match_dup 2))
2709 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2710 ""
2711 )
2712
2713 (define_insn "*iorsi3_compare0"
2714 [(set (reg:CC_NOOV CC_REGNUM)
2715 (compare:CC_NOOV
2716 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2717 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2718 (const_int 0)))
2719 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2720 (ior:SI (match_dup 1) (match_dup 2)))]
2721 "TARGET_32BIT"
2722 "orrs%?\\t%0, %1, %2"
2723 [(set_attr "conds" "set")
2724 (set_attr "arch" "*,t2,*")
2725 (set_attr "length" "4,2,4")
2726 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2727 )
2728
2729 (define_insn "*iorsi3_compare0_scratch"
2730 [(set (reg:CC_NOOV CC_REGNUM)
2731 (compare:CC_NOOV
2732 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2733 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2734 (const_int 0)))
2735 (clobber (match_scratch:SI 0 "=r,l,r"))]
2736 "TARGET_32BIT"
2737 "orrs%?\\t%0, %1, %2"
2738 [(set_attr "conds" "set")
2739 (set_attr "arch" "*,t2,*")
2740 (set_attr "length" "4,2,4")
2741 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2742 )
2743
2744 (define_expand "xorsi3"
2745 [(set (match_operand:SI 0 "s_register_operand")
2746 (xor:SI (match_operand:SI 1 "s_register_operand")
2747 (match_operand:SI 2 "reg_or_int_operand")))]
2748 "TARGET_EITHER"
2749 "if (CONST_INT_P (operands[2]))
2750 {
2751 if (TARGET_32BIT)
2752 {
2753 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2754 operands[2] = force_reg (SImode, operands[2]);
2755 else
2756 {
2757 arm_split_constant (XOR, SImode, NULL_RTX,
2758 INTVAL (operands[2]), operands[0],
2759 operands[1],
2760 optimize && can_create_pseudo_p ());
2761 DONE;
2762 }
2763 }
2764 else /* TARGET_THUMB1 */
2765 {
2766 rtx tmp = force_reg (SImode, operands[2]);
2767 if (rtx_equal_p (operands[0], operands[1]))
2768 operands[2] = tmp;
2769 else
2770 {
2771 operands[2] = operands[1];
2772 operands[1] = tmp;
2773 }
2774 }
2775 }"
2776 )
2777
2778 (define_insn_and_split "*arm_xorsi3"
2779 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2780 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2781 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2782 "TARGET_32BIT"
2783 "@
2784 eor%?\\t%0, %1, %2
2785 eor%?\\t%0, %1, %2
2786 eor%?\\t%0, %1, %2
2787 #"
2788 "TARGET_32BIT
2789 && CONST_INT_P (operands[2])
2790 && !const_ok_for_arm (INTVAL (operands[2]))"
2791 [(clobber (const_int 0))]
2792 {
2793 arm_split_constant (XOR, SImode, curr_insn,
2794 INTVAL (operands[2]), operands[0], operands[1], 0);
2795 DONE;
2796 }
2797 [(set_attr "length" "4,4,4,16")
2798 (set_attr "predicable" "yes")
2799 (set_attr "predicable_short_it" "no,yes,no,no")
2800 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2801 )
2802
2803 (define_insn "*xorsi3_compare0"
2804 [(set (reg:CC_NOOV CC_REGNUM)
2805 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2806 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2807 (const_int 0)))
2808 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2809 (xor:SI (match_dup 1) (match_dup 2)))]
2810 "TARGET_32BIT"
2811 "eors%?\\t%0, %1, %2"
2812 [(set_attr "conds" "set")
2813 (set_attr "type" "logics_imm,logics_reg")]
2814 )
2815
2816 (define_insn "*xorsi3_compare0_scratch"
2817 [(set (reg:CC_NOOV CC_REGNUM)
2818 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2819 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2820 (const_int 0)))]
2821 "TARGET_32BIT"
2822 "teq%?\\t%0, %1"
2823 [(set_attr "conds" "set")
2824 (set_attr "type" "logics_imm,logics_reg")]
2825 )
2826
2827 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2828 ; (NOT D) we can sometimes merge the final NOT into one of the following
2829 ; insns.
2830
2831 (define_split
2832 [(set (match_operand:SI 0 "s_register_operand" "")
2833 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2834 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2835 (match_operand:SI 3 "arm_rhs_operand" "")))
2836 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2837 "TARGET_32BIT"
2838 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2839 (not:SI (match_dup 3))))
2840 (set (match_dup 0) (not:SI (match_dup 4)))]
2841 ""
2842 )
2843
2844 (define_insn_and_split "*andsi_iorsi3_notsi"
2845 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2846 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2847 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2848 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2849 "TARGET_32BIT"
2850 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2851 "&& reload_completed"
2852 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2853 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2854 {
2855 /* If operands[3] is a constant make sure to fold the NOT into it
2856 to avoid creating a NOT of a CONST_INT. */
2857 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2858 if (CONST_INT_P (not_rtx))
2859 {
2860 operands[4] = operands[0];
2861 operands[5] = not_rtx;
2862 }
2863 else
2864 {
2865 operands[5] = operands[0];
2866 operands[4] = not_rtx;
2867 }
2868 }
2869 [(set_attr "length" "8")
2870 (set_attr "ce_count" "2")
2871 (set_attr "predicable" "yes")
2872 (set_attr "type" "multiple")]
2873 )
2874
2875 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2876 ; insns are available?
2877 (define_split
2878 [(set (match_operand:SI 0 "s_register_operand" "")
2879 (match_operator:SI 1 "logical_binary_operator"
2880 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2881 (match_operand:SI 3 "const_int_operand" "")
2882 (match_operand:SI 4 "const_int_operand" ""))
2883 (match_operator:SI 9 "logical_binary_operator"
2884 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2885 (match_operand:SI 6 "const_int_operand" ""))
2886 (match_operand:SI 7 "s_register_operand" "")])]))
2887 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2888 "TARGET_32BIT
2889 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2890 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2891 [(set (match_dup 8)
2892 (match_op_dup 1
2893 [(ashift:SI (match_dup 2) (match_dup 4))
2894 (match_dup 5)]))
2895 (set (match_dup 0)
2896 (match_op_dup 1
2897 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2898 (match_dup 7)]))]
2899 "
2900 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2901 ")
2902
2903 (define_split
2904 [(set (match_operand:SI 0 "s_register_operand" "")
2905 (match_operator:SI 1 "logical_binary_operator"
2906 [(match_operator:SI 9 "logical_binary_operator"
2907 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2908 (match_operand:SI 6 "const_int_operand" ""))
2909 (match_operand:SI 7 "s_register_operand" "")])
2910 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2911 (match_operand:SI 3 "const_int_operand" "")
2912 (match_operand:SI 4 "const_int_operand" ""))]))
2913 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2914 "TARGET_32BIT
2915 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2916 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2917 [(set (match_dup 8)
2918 (match_op_dup 1
2919 [(ashift:SI (match_dup 2) (match_dup 4))
2920 (match_dup 5)]))
2921 (set (match_dup 0)
2922 (match_op_dup 1
2923 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2924 (match_dup 7)]))]
2925 "
2926 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2927 ")
2928
2929 (define_split
2930 [(set (match_operand:SI 0 "s_register_operand" "")
2931 (match_operator:SI 1 "logical_binary_operator"
2932 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2933 (match_operand:SI 3 "const_int_operand" "")
2934 (match_operand:SI 4 "const_int_operand" ""))
2935 (match_operator:SI 9 "logical_binary_operator"
2936 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2937 (match_operand:SI 6 "const_int_operand" ""))
2938 (match_operand:SI 7 "s_register_operand" "")])]))
2939 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2940 "TARGET_32BIT
2941 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2942 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2943 [(set (match_dup 8)
2944 (match_op_dup 1
2945 [(ashift:SI (match_dup 2) (match_dup 4))
2946 (match_dup 5)]))
2947 (set (match_dup 0)
2948 (match_op_dup 1
2949 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2950 (match_dup 7)]))]
2951 "
2952 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2953 ")
2954
2955 (define_split
2956 [(set (match_operand:SI 0 "s_register_operand" "")
2957 (match_operator:SI 1 "logical_binary_operator"
2958 [(match_operator:SI 9 "logical_binary_operator"
2959 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2960 (match_operand:SI 6 "const_int_operand" ""))
2961 (match_operand:SI 7 "s_register_operand" "")])
2962 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2963 (match_operand:SI 3 "const_int_operand" "")
2964 (match_operand:SI 4 "const_int_operand" ""))]))
2965 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2966 "TARGET_32BIT
2967 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2968 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2969 [(set (match_dup 8)
2970 (match_op_dup 1
2971 [(ashift:SI (match_dup 2) (match_dup 4))
2972 (match_dup 5)]))
2973 (set (match_dup 0)
2974 (match_op_dup 1
2975 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2976 (match_dup 7)]))]
2977 "
2978 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2979 ")
2980 \f
2981
2982 ;; Minimum and maximum insns
2983
2984 (define_expand "smaxsi3"
2985 [(parallel [
2986 (set (match_operand:SI 0 "s_register_operand")
2987 (smax:SI (match_operand:SI 1 "s_register_operand")
2988 (match_operand:SI 2 "arm_rhs_operand")))
2989 (clobber (reg:CC CC_REGNUM))])]
2990 "TARGET_32BIT"
2991 "
2992 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2993 {
2994 /* No need for a clobber of the condition code register here. */
2995 emit_insn (gen_rtx_SET (operands[0],
2996 gen_rtx_SMAX (SImode, operands[1],
2997 operands[2])));
2998 DONE;
2999 }
3000 ")
3001
3002 (define_insn "*smax_0"
3003 [(set (match_operand:SI 0 "s_register_operand" "=r")
3004 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3005 (const_int 0)))]
3006 "TARGET_32BIT"
3007 "bic%?\\t%0, %1, %1, asr #31"
3008 [(set_attr "predicable" "yes")
3009 (set_attr "type" "logic_shift_reg")]
3010 )
3011
3012 (define_insn "*smax_m1"
3013 [(set (match_operand:SI 0 "s_register_operand" "=r")
3014 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3015 (const_int -1)))]
3016 "TARGET_32BIT"
3017 "orr%?\\t%0, %1, %1, asr #31"
3018 [(set_attr "predicable" "yes")
3019 (set_attr "type" "logic_shift_reg")]
3020 )
3021
3022 (define_insn_and_split "*arm_smax_insn"
3023 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3024 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3025 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3026 (clobber (reg:CC CC_REGNUM))]
3027 "TARGET_ARM"
3028 "#"
3029 ; cmp\\t%1, %2\;movlt\\t%0, %2
3030 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3031 "TARGET_ARM"
3032 [(set (reg:CC CC_REGNUM)
3033 (compare:CC (match_dup 1) (match_dup 2)))
3034 (set (match_dup 0)
3035 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3036 (match_dup 1)
3037 (match_dup 2)))]
3038 ""
3039 [(set_attr "conds" "clob")
3040 (set_attr "length" "8,12")
3041 (set_attr "type" "multiple")]
3042 )
3043
3044 (define_expand "sminsi3"
3045 [(parallel [
3046 (set (match_operand:SI 0 "s_register_operand")
3047 (smin:SI (match_operand:SI 1 "s_register_operand")
3048 (match_operand:SI 2 "arm_rhs_operand")))
3049 (clobber (reg:CC CC_REGNUM))])]
3050 "TARGET_32BIT"
3051 "
3052 if (operands[2] == const0_rtx)
3053 {
3054 /* No need for a clobber of the condition code register here. */
3055 emit_insn (gen_rtx_SET (operands[0],
3056 gen_rtx_SMIN (SImode, operands[1],
3057 operands[2])));
3058 DONE;
3059 }
3060 ")
3061
3062 (define_insn "*smin_0"
3063 [(set (match_operand:SI 0 "s_register_operand" "=r")
3064 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3065 (const_int 0)))]
3066 "TARGET_32BIT"
3067 "and%?\\t%0, %1, %1, asr #31"
3068 [(set_attr "predicable" "yes")
3069 (set_attr "type" "logic_shift_reg")]
3070 )
3071
3072 (define_insn_and_split "*arm_smin_insn"
3073 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3074 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3075 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3076 (clobber (reg:CC CC_REGNUM))]
3077 "TARGET_ARM"
3078 "#"
3079 ; cmp\\t%1, %2\;movge\\t%0, %2
3080 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3081 "TARGET_ARM"
3082 [(set (reg:CC CC_REGNUM)
3083 (compare:CC (match_dup 1) (match_dup 2)))
3084 (set (match_dup 0)
3085 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3086 (match_dup 1)
3087 (match_dup 2)))]
3088 ""
3089 [(set_attr "conds" "clob")
3090 (set_attr "length" "8,12")
3091 (set_attr "type" "multiple,multiple")]
3092 )
3093
3094 (define_expand "umaxsi3"
3095 [(parallel [
3096 (set (match_operand:SI 0 "s_register_operand")
3097 (umax:SI (match_operand:SI 1 "s_register_operand")
3098 (match_operand:SI 2 "arm_rhs_operand")))
3099 (clobber (reg:CC CC_REGNUM))])]
3100 "TARGET_32BIT"
3101 ""
3102 )
3103
3104 (define_insn_and_split "*arm_umaxsi3"
3105 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3106 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3107 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3108 (clobber (reg:CC CC_REGNUM))]
3109 "TARGET_ARM"
3110 "#"
3111 ; cmp\\t%1, %2\;movcc\\t%0, %2
3112 ; cmp\\t%1, %2\;movcs\\t%0, %1
3113 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3114 "TARGET_ARM"
3115 [(set (reg:CC CC_REGNUM)
3116 (compare:CC (match_dup 1) (match_dup 2)))
3117 (set (match_dup 0)
3118 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3119 (match_dup 1)
3120 (match_dup 2)))]
3121 ""
3122 [(set_attr "conds" "clob")
3123 (set_attr "length" "8,8,12")
3124 (set_attr "type" "store_4")]
3125 )
3126
3127 (define_expand "uminsi3"
3128 [(parallel [
3129 (set (match_operand:SI 0 "s_register_operand")
3130 (umin:SI (match_operand:SI 1 "s_register_operand")
3131 (match_operand:SI 2 "arm_rhs_operand")))
3132 (clobber (reg:CC CC_REGNUM))])]
3133 "TARGET_32BIT"
3134 ""
3135 )
3136
3137 (define_insn_and_split "*arm_uminsi3"
3138 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3139 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3140 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3141 (clobber (reg:CC CC_REGNUM))]
3142 "TARGET_ARM"
3143 "#"
3144 ; cmp\\t%1, %2\;movcs\\t%0, %2
3145 ; cmp\\t%1, %2\;movcc\\t%0, %1
3146 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3147 "TARGET_ARM"
3148 [(set (reg:CC CC_REGNUM)
3149 (compare:CC (match_dup 1) (match_dup 2)))
3150 (set (match_dup 0)
3151 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3152 (match_dup 1)
3153 (match_dup 2)))]
3154 ""
3155 [(set_attr "conds" "clob")
3156 (set_attr "length" "8,8,12")
3157 (set_attr "type" "store_4")]
3158 )
3159
3160 (define_insn "*store_minmaxsi"
3161 [(set (match_operand:SI 0 "memory_operand" "=m")
3162 (match_operator:SI 3 "minmax_operator"
3163 [(match_operand:SI 1 "s_register_operand" "r")
3164 (match_operand:SI 2 "s_register_operand" "r")]))
3165 (clobber (reg:CC CC_REGNUM))]
3166 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3167 "*
3168 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3169 operands[1], operands[2]);
3170 output_asm_insn (\"cmp\\t%1, %2\", operands);
3171 if (TARGET_THUMB2)
3172 output_asm_insn (\"ite\t%d3\", operands);
3173 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3174 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3175 return \"\";
3176 "
3177 [(set_attr "conds" "clob")
3178 (set (attr "length")
3179 (if_then_else (eq_attr "is_thumb" "yes")
3180 (const_int 14)
3181 (const_int 12)))
3182 (set_attr "type" "store_4")]
3183 )
3184
3185 ; Reject the frame pointer in operand[1], since reloading this after
3186 ; it has been eliminated can cause carnage.
3187 (define_insn "*minmax_arithsi"
3188 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3189 (match_operator:SI 4 "shiftable_operator"
3190 [(match_operator:SI 5 "minmax_operator"
3191 [(match_operand:SI 2 "s_register_operand" "r,r")
3192 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3193 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3194 (clobber (reg:CC CC_REGNUM))]
3195 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3196 "*
3197 {
3198 enum rtx_code code = GET_CODE (operands[4]);
3199 bool need_else;
3200
3201 if (which_alternative != 0 || operands[3] != const0_rtx
3202 || (code != PLUS && code != IOR && code != XOR))
3203 need_else = true;
3204 else
3205 need_else = false;
3206
3207 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3208 operands[2], operands[3]);
3209 output_asm_insn (\"cmp\\t%2, %3\", operands);
3210 if (TARGET_THUMB2)
3211 {
3212 if (need_else)
3213 output_asm_insn (\"ite\\t%d5\", operands);
3214 else
3215 output_asm_insn (\"it\\t%d5\", operands);
3216 }
3217 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3218 if (need_else)
3219 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3220 return \"\";
3221 }"
3222 [(set_attr "conds" "clob")
3223 (set (attr "length")
3224 (if_then_else (eq_attr "is_thumb" "yes")
3225 (const_int 14)
3226 (const_int 12)))
3227 (set_attr "type" "multiple")]
3228 )
3229
3230 ; Reject the frame pointer in operand[1], since reloading this after
3231 ; it has been eliminated can cause carnage.
3232 (define_insn_and_split "*minmax_arithsi_non_canon"
3233 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3234 (minus:SI
3235 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3236 (match_operator:SI 4 "minmax_operator"
3237 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3238 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3239 (clobber (reg:CC CC_REGNUM))]
3240 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3241 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3242 "#"
3243 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3244 [(set (reg:CC CC_REGNUM)
3245 (compare:CC (match_dup 2) (match_dup 3)))
3246
3247 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3248 (set (match_dup 0)
3249 (minus:SI (match_dup 1)
3250 (match_dup 2))))
3251 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3252 (set (match_dup 0)
3253 (match_dup 6)))]
3254 {
3255 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3256 operands[2], operands[3]);
3257 enum rtx_code rc = minmax_code (operands[4]);
3258 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3259 operands[2], operands[3]);
3260
3261 if (mode == CCFPmode || mode == CCFPEmode)
3262 rc = reverse_condition_maybe_unordered (rc);
3263 else
3264 rc = reverse_condition (rc);
3265 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3266 if (CONST_INT_P (operands[3]))
3267 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3268 else
3269 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3270 }
3271 [(set_attr "conds" "clob")
3272 (set (attr "length")
3273 (if_then_else (eq_attr "is_thumb" "yes")
3274 (const_int 14)
3275 (const_int 12)))
3276 (set_attr "type" "multiple")]
3277 )
3278
3279 (define_code_iterator SAT [smin smax])
3280 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3281 (define_code_attr SATlo [(smin "1") (smax "2")])
3282 (define_code_attr SAThi [(smin "2") (smax "1")])
3283
3284 (define_insn "*satsi_<SAT:code>"
3285 [(set (match_operand:SI 0 "s_register_operand" "=r")
3286 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3287 (match_operand:SI 1 "const_int_operand" "i"))
3288 (match_operand:SI 2 "const_int_operand" "i")))]
3289 "TARGET_32BIT && arm_arch6
3290 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3291 {
3292 int mask;
3293 bool signed_sat;
3294 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3295 &mask, &signed_sat))
3296 gcc_unreachable ();
3297
3298 operands[1] = GEN_INT (mask);
3299 if (signed_sat)
3300 return "ssat%?\t%0, %1, %3";
3301 else
3302 return "usat%?\t%0, %1, %3";
3303 }
3304 [(set_attr "predicable" "yes")
3305 (set_attr "type" "alus_imm")]
3306 )
3307
3308 (define_insn "*satsi_<SAT:code>_shift"
3309 [(set (match_operand:SI 0 "s_register_operand" "=r")
3310 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3311 [(match_operand:SI 4 "s_register_operand" "r")
3312 (match_operand:SI 5 "const_int_operand" "i")])
3313 (match_operand:SI 1 "const_int_operand" "i"))
3314 (match_operand:SI 2 "const_int_operand" "i")))]
3315 "TARGET_32BIT && arm_arch6
3316 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3317 {
3318 int mask;
3319 bool signed_sat;
3320 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3321 &mask, &signed_sat))
3322 gcc_unreachable ();
3323
3324 operands[1] = GEN_INT (mask);
3325 if (signed_sat)
3326 return "ssat%?\t%0, %1, %4%S3";
3327 else
3328 return "usat%?\t%0, %1, %4%S3";
3329 }
3330 [(set_attr "predicable" "yes")
3331 (set_attr "shift" "3")
3332 (set_attr "type" "logic_shift_reg")])
3333 \f
3334 ;; Shift and rotation insns
3335
3336 (define_expand "ashldi3"
3337 [(set (match_operand:DI 0 "s_register_operand")
3338 (ashift:DI (match_operand:DI 1 "s_register_operand")
3339 (match_operand:SI 2 "reg_or_int_operand")))]
3340 "TARGET_32BIT"
3341 "
3342 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3343 operands[2], gen_reg_rtx (SImode),
3344 gen_reg_rtx (SImode));
3345 DONE;
3346 ")
3347
3348 (define_expand "ashlsi3"
3349 [(set (match_operand:SI 0 "s_register_operand")
3350 (ashift:SI (match_operand:SI 1 "s_register_operand")
3351 (match_operand:SI 2 "arm_rhs_operand")))]
3352 "TARGET_EITHER"
3353 "
3354 if (CONST_INT_P (operands[2])
3355 && (UINTVAL (operands[2])) > 31)
3356 {
3357 emit_insn (gen_movsi (operands[0], const0_rtx));
3358 DONE;
3359 }
3360 "
3361 )
3362
3363 (define_expand "ashrdi3"
3364 [(set (match_operand:DI 0 "s_register_operand")
3365 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3366 (match_operand:SI 2 "reg_or_int_operand")))]
3367 "TARGET_32BIT"
3368 "
3369 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3370 operands[2], gen_reg_rtx (SImode),
3371 gen_reg_rtx (SImode));
3372 DONE;
3373 ")
3374
3375 (define_expand "ashrsi3"
3376 [(set (match_operand:SI 0 "s_register_operand")
3377 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3378 (match_operand:SI 2 "arm_rhs_operand")))]
3379 "TARGET_EITHER"
3380 "
3381 if (CONST_INT_P (operands[2])
3382 && UINTVAL (operands[2]) > 31)
3383 operands[2] = GEN_INT (31);
3384 "
3385 )
3386
3387 (define_expand "lshrdi3"
3388 [(set (match_operand:DI 0 "s_register_operand")
3389 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3390 (match_operand:SI 2 "reg_or_int_operand")))]
3391 "TARGET_32BIT"
3392 "
3393 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3394 operands[2], gen_reg_rtx (SImode),
3395 gen_reg_rtx (SImode));
3396 DONE;
3397 ")
3398
3399 (define_expand "lshrsi3"
3400 [(set (match_operand:SI 0 "s_register_operand")
3401 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3402 (match_operand:SI 2 "arm_rhs_operand")))]
3403 "TARGET_EITHER"
3404 "
3405 if (CONST_INT_P (operands[2])
3406 && (UINTVAL (operands[2])) > 31)
3407 {
3408 emit_insn (gen_movsi (operands[0], const0_rtx));
3409 DONE;
3410 }
3411 "
3412 )
3413
3414 (define_expand "rotlsi3"
3415 [(set (match_operand:SI 0 "s_register_operand")
3416 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3417 (match_operand:SI 2 "reg_or_int_operand")))]
3418 "TARGET_32BIT"
3419 "
3420 if (CONST_INT_P (operands[2]))
3421 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3422 else
3423 {
3424 rtx reg = gen_reg_rtx (SImode);
3425 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3426 operands[2] = reg;
3427 }
3428 "
3429 )
3430
3431 (define_expand "rotrsi3"
3432 [(set (match_operand:SI 0 "s_register_operand")
3433 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3434 (match_operand:SI 2 "arm_rhs_operand")))]
3435 "TARGET_EITHER"
3436 "
3437 if (TARGET_32BIT)
3438 {
3439 if (CONST_INT_P (operands[2])
3440 && UINTVAL (operands[2]) > 31)
3441 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3442 }
3443 else /* TARGET_THUMB1 */
3444 {
3445 if (CONST_INT_P (operands [2]))
3446 operands [2] = force_reg (SImode, operands[2]);
3447 }
3448 "
3449 )
3450
3451 (define_insn "*arm_shiftsi3"
3452 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3453 (match_operator:SI 3 "shift_operator"
3454 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3455 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3456 "TARGET_32BIT"
3457 "* return arm_output_shift(operands, 0);"
3458 [(set_attr "predicable" "yes")
3459 (set_attr "arch" "t2,t2,*,*")
3460 (set_attr "predicable_short_it" "yes,yes,no,no")
3461 (set_attr "length" "4")
3462 (set_attr "shift" "1")
3463 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3464 )
3465
3466 (define_insn "*shiftsi3_compare0"
3467 [(set (reg:CC_NOOV CC_REGNUM)
3468 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3469 [(match_operand:SI 1 "s_register_operand" "r,r")
3470 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3471 (const_int 0)))
3472 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3473 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3474 "TARGET_32BIT"
3475 "* return arm_output_shift(operands, 1);"
3476 [(set_attr "conds" "set")
3477 (set_attr "shift" "1")
3478 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3479 )
3480
3481 (define_insn "*shiftsi3_compare0_scratch"
3482 [(set (reg:CC_NOOV CC_REGNUM)
3483 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3484 [(match_operand:SI 1 "s_register_operand" "r,r")
3485 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3486 (const_int 0)))
3487 (clobber (match_scratch:SI 0 "=r,r"))]
3488 "TARGET_32BIT"
3489 "* return arm_output_shift(operands, 1);"
3490 [(set_attr "conds" "set")
3491 (set_attr "shift" "1")
3492 (set_attr "type" "shift_imm,shift_reg")]
3493 )
3494
3495 (define_insn "*not_shiftsi"
3496 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3497 (not:SI (match_operator:SI 3 "shift_operator"
3498 [(match_operand:SI 1 "s_register_operand" "r,r")
3499 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3500 "TARGET_32BIT"
3501 "mvn%?\\t%0, %1%S3"
3502 [(set_attr "predicable" "yes")
3503 (set_attr "shift" "1")
3504 (set_attr "arch" "32,a")
3505 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3506
3507 (define_insn "*not_shiftsi_compare0"
3508 [(set (reg:CC_NOOV CC_REGNUM)
3509 (compare:CC_NOOV
3510 (not:SI (match_operator:SI 3 "shift_operator"
3511 [(match_operand:SI 1 "s_register_operand" "r,r")
3512 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3513 (const_int 0)))
3514 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3515 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3516 "TARGET_32BIT"
3517 "mvns%?\\t%0, %1%S3"
3518 [(set_attr "conds" "set")
3519 (set_attr "shift" "1")
3520 (set_attr "arch" "32,a")
3521 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3522
3523 (define_insn "*not_shiftsi_compare0_scratch"
3524 [(set (reg:CC_NOOV CC_REGNUM)
3525 (compare:CC_NOOV
3526 (not:SI (match_operator:SI 3 "shift_operator"
3527 [(match_operand:SI 1 "s_register_operand" "r,r")
3528 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3529 (const_int 0)))
3530 (clobber (match_scratch:SI 0 "=r,r"))]
3531 "TARGET_32BIT"
3532 "mvns%?\\t%0, %1%S3"
3533 [(set_attr "conds" "set")
3534 (set_attr "shift" "1")
3535 (set_attr "arch" "32,a")
3536 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3537
3538 ;; We don't really have extzv, but defining this using shifts helps
3539 ;; to reduce register pressure later on.
3540
3541 (define_expand "extzv"
3542 [(set (match_operand 0 "s_register_operand")
3543 (zero_extract (match_operand 1 "nonimmediate_operand")
3544 (match_operand 2 "const_int_operand")
3545 (match_operand 3 "const_int_operand")))]
3546 "TARGET_THUMB1 || arm_arch_thumb2"
3547 "
3548 {
3549 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3550 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3551
3552 if (arm_arch_thumb2)
3553 {
3554 HOST_WIDE_INT width = INTVAL (operands[2]);
3555 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3556
3557 if (unaligned_access && MEM_P (operands[1])
3558 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3559 {
3560 rtx base_addr;
3561
3562 if (BYTES_BIG_ENDIAN)
3563 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3564 - bitpos;
3565
3566 if (width == 32)
3567 {
3568 base_addr = adjust_address (operands[1], SImode,
3569 bitpos / BITS_PER_UNIT);
3570 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3571 }
3572 else
3573 {
3574 rtx dest = operands[0];
3575 rtx tmp = gen_reg_rtx (SImode);
3576
3577 /* We may get a paradoxical subreg here. Strip it off. */
3578 if (GET_CODE (dest) == SUBREG
3579 && GET_MODE (dest) == SImode
3580 && GET_MODE (SUBREG_REG (dest)) == HImode)
3581 dest = SUBREG_REG (dest);
3582
3583 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3584 FAIL;
3585
3586 base_addr = adjust_address (operands[1], HImode,
3587 bitpos / BITS_PER_UNIT);
3588 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3589 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3590 }
3591 DONE;
3592 }
3593 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3594 {
3595 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3596 operands[3]));
3597 DONE;
3598 }
3599 else
3600 FAIL;
3601 }
3602
3603 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3604 FAIL;
3605
3606 operands[3] = GEN_INT (rshift);
3607
3608 if (lshift == 0)
3609 {
3610 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3611 DONE;
3612 }
3613
3614 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3615 operands[3], gen_reg_rtx (SImode)));
3616 DONE;
3617 }"
3618 )
3619
3620 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3621
3622 (define_expand "extzv_t1"
3623 [(set (match_operand:SI 4 "s_register_operand")
3624 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3625 (match_operand:SI 2 "const_int_operand")))
3626 (set (match_operand:SI 0 "s_register_operand")
3627 (lshiftrt:SI (match_dup 4)
3628 (match_operand:SI 3 "const_int_operand")))]
3629 "TARGET_THUMB1"
3630 "")
3631
3632 (define_expand "extv"
3633 [(set (match_operand 0 "s_register_operand")
3634 (sign_extract (match_operand 1 "nonimmediate_operand")
3635 (match_operand 2 "const_int_operand")
3636 (match_operand 3 "const_int_operand")))]
3637 "arm_arch_thumb2"
3638 {
3639 HOST_WIDE_INT width = INTVAL (operands[2]);
3640 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3641
3642 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3643 && (bitpos % BITS_PER_UNIT) == 0)
3644 {
3645 rtx base_addr;
3646
3647 if (BYTES_BIG_ENDIAN)
3648 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3649
3650 if (width == 32)
3651 {
3652 base_addr = adjust_address (operands[1], SImode,
3653 bitpos / BITS_PER_UNIT);
3654 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3655 }
3656 else
3657 {
3658 rtx dest = operands[0];
3659 rtx tmp = gen_reg_rtx (SImode);
3660
3661 /* We may get a paradoxical subreg here. Strip it off. */
3662 if (GET_CODE (dest) == SUBREG
3663 && GET_MODE (dest) == SImode
3664 && GET_MODE (SUBREG_REG (dest)) == HImode)
3665 dest = SUBREG_REG (dest);
3666
3667 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3668 FAIL;
3669
3670 base_addr = adjust_address (operands[1], HImode,
3671 bitpos / BITS_PER_UNIT);
3672 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3673 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3674 }
3675
3676 DONE;
3677 }
3678 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3679 FAIL;
3680 else if (GET_MODE (operands[0]) == SImode
3681 && GET_MODE (operands[1]) == SImode)
3682 {
3683 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3684 operands[3]));
3685 DONE;
3686 }
3687
3688 FAIL;
3689 })
3690
3691 ; Helper to expand register forms of extv with the proper modes.
3692
3693 (define_expand "extv_regsi"
3694 [(set (match_operand:SI 0 "s_register_operand")
3695 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3696 (match_operand 2 "const_int_operand")
3697 (match_operand 3 "const_int_operand")))]
3698 ""
3699 {
3700 })
3701
3702 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3703
3704 (define_insn "unaligned_loaddi"
3705 [(set (match_operand:DI 0 "s_register_operand" "=r")
3706 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3707 UNSPEC_UNALIGNED_LOAD))]
3708 "TARGET_32BIT && TARGET_LDRD"
3709 "*
3710 return output_move_double (operands, true, NULL);
3711 "
3712 [(set_attr "length" "8")
3713 (set_attr "type" "load_8")])
3714
3715 (define_insn "unaligned_loadsi"
3716 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3717 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3718 UNSPEC_UNALIGNED_LOAD))]
3719 "unaligned_access"
3720 "@
3721 ldr\t%0, %1\t@ unaligned
3722 ldr%?\t%0, %1\t@ unaligned
3723 ldr%?\t%0, %1\t@ unaligned"
3724 [(set_attr "arch" "t1,t2,32")
3725 (set_attr "length" "2,2,4")
3726 (set_attr "predicable" "no,yes,yes")
3727 (set_attr "predicable_short_it" "no,yes,no")
3728 (set_attr "type" "load_4")])
3729
3730 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3731 ;; address (there's no immediate format). That's tricky to support
3732 ;; here and we don't really need this pattern for that case, so only
3733 ;; enable for 32-bit ISAs.
3734 (define_insn "unaligned_loadhis"
3735 [(set (match_operand:SI 0 "s_register_operand" "=r")
3736 (sign_extend:SI
3737 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3738 UNSPEC_UNALIGNED_LOAD)))]
3739 "unaligned_access && TARGET_32BIT"
3740 "ldrsh%?\t%0, %1\t@ unaligned"
3741 [(set_attr "predicable" "yes")
3742 (set_attr "type" "load_byte")])
3743
3744 (define_insn "unaligned_loadhiu"
3745 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3746 (zero_extend:SI
3747 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3748 UNSPEC_UNALIGNED_LOAD)))]
3749 "unaligned_access"
3750 "@
3751 ldrh\t%0, %1\t@ unaligned
3752 ldrh%?\t%0, %1\t@ unaligned
3753 ldrh%?\t%0, %1\t@ unaligned"
3754 [(set_attr "arch" "t1,t2,32")
3755 (set_attr "length" "2,2,4")
3756 (set_attr "predicable" "no,yes,yes")
3757 (set_attr "predicable_short_it" "no,yes,no")
3758 (set_attr "type" "load_byte")])
3759
3760 (define_insn "unaligned_storedi"
3761 [(set (match_operand:DI 0 "memory_operand" "=m")
3762 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3763 UNSPEC_UNALIGNED_STORE))]
3764 "TARGET_32BIT && TARGET_LDRD"
3765 "*
3766 return output_move_double (operands, true, NULL);
3767 "
3768 [(set_attr "length" "8")
3769 (set_attr "type" "store_8")])
3770
3771 (define_insn "unaligned_storesi"
3772 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3773 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3774 UNSPEC_UNALIGNED_STORE))]
3775 "unaligned_access"
3776 "@
3777 str\t%1, %0\t@ unaligned
3778 str%?\t%1, %0\t@ unaligned
3779 str%?\t%1, %0\t@ unaligned"
3780 [(set_attr "arch" "t1,t2,32")
3781 (set_attr "length" "2,2,4")
3782 (set_attr "predicable" "no,yes,yes")
3783 (set_attr "predicable_short_it" "no,yes,no")
3784 (set_attr "type" "store_4")])
3785
3786 (define_insn "unaligned_storehi"
3787 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3788 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3789 UNSPEC_UNALIGNED_STORE))]
3790 "unaligned_access"
3791 "@
3792 strh\t%1, %0\t@ unaligned
3793 strh%?\t%1, %0\t@ unaligned
3794 strh%?\t%1, %0\t@ unaligned"
3795 [(set_attr "arch" "t1,t2,32")
3796 (set_attr "length" "2,2,4")
3797 (set_attr "predicable" "no,yes,yes")
3798 (set_attr "predicable_short_it" "no,yes,no")
3799 (set_attr "type" "store_4")])
3800
3801
3802 (define_insn "*extv_reg"
3803 [(set (match_operand:SI 0 "s_register_operand" "=r")
3804 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3805 (match_operand:SI 2 "const_int_operand" "n")
3806 (match_operand:SI 3 "const_int_operand" "n")))]
3807 "arm_arch_thumb2
3808 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3809 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3810 "sbfx%?\t%0, %1, %3, %2"
3811 [(set_attr "length" "4")
3812 (set_attr "predicable" "yes")
3813 (set_attr "type" "bfm")]
3814 )
3815
3816 (define_insn "extzv_t2"
3817 [(set (match_operand:SI 0 "s_register_operand" "=r")
3818 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3819 (match_operand:SI 2 "const_int_operand" "n")
3820 (match_operand:SI 3 "const_int_operand" "n")))]
3821 "arm_arch_thumb2
3822 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3823 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3824 "ubfx%?\t%0, %1, %3, %2"
3825 [(set_attr "length" "4")
3826 (set_attr "predicable" "yes")
3827 (set_attr "type" "bfm")]
3828 )
3829
3830
3831 ;; Division instructions
3832 (define_insn "divsi3"
3833 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3834 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3835 (match_operand:SI 2 "s_register_operand" "r,r")))]
3836 "TARGET_IDIV"
3837 "@
3838 sdiv%?\t%0, %1, %2
3839 sdiv\t%0, %1, %2"
3840 [(set_attr "arch" "32,v8mb")
3841 (set_attr "predicable" "yes")
3842 (set_attr "type" "sdiv")]
3843 )
3844
3845 (define_insn "udivsi3"
3846 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3847 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3848 (match_operand:SI 2 "s_register_operand" "r,r")))]
3849 "TARGET_IDIV"
3850 "@
3851 udiv%?\t%0, %1, %2
3852 udiv\t%0, %1, %2"
3853 [(set_attr "arch" "32,v8mb")
3854 (set_attr "predicable" "yes")
3855 (set_attr "type" "udiv")]
3856 )
3857
3858 \f
3859 ;; Unary arithmetic insns
3860
3861 (define_expand "negvsi3"
3862 [(match_operand:SI 0 "register_operand")
3863 (match_operand:SI 1 "register_operand")
3864 (match_operand 2 "")]
3865 "TARGET_32BIT"
3866 {
3867 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3868 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3869
3870 DONE;
3871 })
3872
3873 (define_expand "negvdi3"
3874 [(match_operand:DI 0 "s_register_operand")
3875 (match_operand:DI 1 "s_register_operand")
3876 (match_operand 2 "")]
3877 "TARGET_ARM"
3878 {
3879 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3880 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3881
3882 DONE;
3883 })
3884
3885
3886 (define_insn "negdi2_compare"
3887 [(set (reg:CC CC_REGNUM)
3888 (compare:CC
3889 (const_int 0)
3890 (match_operand:DI 1 "register_operand" "r,r")))
3891 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3892 (minus:DI (const_int 0) (match_dup 1)))]
3893 "TARGET_ARM"
3894 "@
3895 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3896 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3897 [(set_attr "conds" "set")
3898 (set_attr "arch" "a,t2")
3899 (set_attr "length" "8")
3900 (set_attr "type" "multiple")]
3901 )
3902
3903 (define_expand "negsi2"
3904 [(set (match_operand:SI 0 "s_register_operand")
3905 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3906 "TARGET_EITHER"
3907 ""
3908 )
3909
3910 (define_insn "*arm_negsi2"
3911 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3912 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3913 "TARGET_32BIT"
3914 "rsb%?\\t%0, %1, #0"
3915 [(set_attr "predicable" "yes")
3916 (set_attr "predicable_short_it" "yes,no")
3917 (set_attr "arch" "t2,*")
3918 (set_attr "length" "4")
3919 (set_attr "type" "alu_imm")]
3920 )
3921
3922 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3923 ;; rather than (0 cmp reg). This gives the same results for unsigned
3924 ;; and equality compares which is what we mostly need here.
3925 (define_insn "negsi2_0compare"
3926 [(set (reg:CC_RSB CC_REGNUM)
3927 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3928 (const_int -1)))
3929 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3930 (neg:SI (match_dup 1)))]
3931 "TARGET_32BIT"
3932 "@
3933 negs\\t%0, %1
3934 rsbs\\t%0, %1, #0"
3935 [(set_attr "conds" "set")
3936 (set_attr "arch" "t2,*")
3937 (set_attr "length" "2,*")
3938 (set_attr "type" "alus_imm")]
3939 )
3940
3941 (define_insn "negsi2_carryin"
3942 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3943 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3944 (match_operand:SI 2 "arm_borrow_operation" "")))]
3945 "TARGET_32BIT"
3946 "@
3947 rsc\\t%0, %1, #0
3948 sbc\\t%0, %1, %1, lsl #1"
3949 [(set_attr "conds" "use")
3950 (set_attr "arch" "a,t2")
3951 (set_attr "type" "adc_imm,adc_reg")]
3952 )
3953
3954 (define_expand "negsf2"
3955 [(set (match_operand:SF 0 "s_register_operand")
3956 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3957 "TARGET_32BIT && TARGET_HARD_FLOAT"
3958 ""
3959 )
3960
3961 (define_expand "negdf2"
3962 [(set (match_operand:DF 0 "s_register_operand")
3963 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3964 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3965 "")
3966
3967 ;; abssi2 doesn't really clobber the condition codes if a different register
3968 ;; is being set. To keep things simple, assume during rtl manipulations that
3969 ;; it does, but tell the final scan operator the truth. Similarly for
3970 ;; (neg (abs...))
3971
3972 (define_expand "abssi2"
3973 [(parallel
3974 [(set (match_operand:SI 0 "s_register_operand")
3975 (abs:SI (match_operand:SI 1 "s_register_operand")))
3976 (clobber (match_dup 2))])]
3977 "TARGET_EITHER"
3978 "
3979 if (TARGET_THUMB1)
3980 operands[2] = gen_rtx_SCRATCH (SImode);
3981 else
3982 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3983 ")
3984
3985 (define_insn_and_split "*arm_abssi2"
3986 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3987 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3988 (clobber (reg:CC CC_REGNUM))]
3989 "TARGET_ARM"
3990 "#"
3991 "&& reload_completed"
3992 [(const_int 0)]
3993 {
3994 /* if (which_alternative == 0) */
3995 if (REGNO(operands[0]) == REGNO(operands[1]))
3996 {
3997 /* Emit the pattern:
3998 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3999 [(set (reg:CC CC_REGNUM)
4000 (compare:CC (match_dup 0) (const_int 0)))
4001 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4002 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4003 */
4004 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4005 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4006 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4007 (gen_rtx_LT (SImode,
4008 gen_rtx_REG (CCmode, CC_REGNUM),
4009 const0_rtx)),
4010 (gen_rtx_SET (operands[0],
4011 (gen_rtx_MINUS (SImode,
4012 const0_rtx,
4013 operands[1]))))));
4014 DONE;
4015 }
4016 else
4017 {
4018 /* Emit the pattern:
4019 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4020 [(set (match_dup 0)
4021 (xor:SI (match_dup 1)
4022 (ashiftrt:SI (match_dup 1) (const_int 31))))
4023 (set (match_dup 0)
4024 (minus:SI (match_dup 0)
4025 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4026 */
4027 emit_insn (gen_rtx_SET (operands[0],
4028 gen_rtx_XOR (SImode,
4029 gen_rtx_ASHIFTRT (SImode,
4030 operands[1],
4031 GEN_INT (31)),
4032 operands[1])));
4033 emit_insn (gen_rtx_SET (operands[0],
4034 gen_rtx_MINUS (SImode,
4035 operands[0],
4036 gen_rtx_ASHIFTRT (SImode,
4037 operands[1],
4038 GEN_INT (31)))));
4039 DONE;
4040 }
4041 }
4042 [(set_attr "conds" "clob,*")
4043 (set_attr "shift" "1")
4044 (set_attr "predicable" "no, yes")
4045 (set_attr "length" "8")
4046 (set_attr "type" "multiple")]
4047 )
4048
4049 (define_insn_and_split "*arm_neg_abssi2"
4050 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4051 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4052 (clobber (reg:CC CC_REGNUM))]
4053 "TARGET_ARM"
4054 "#"
4055 "&& reload_completed"
4056 [(const_int 0)]
4057 {
4058 /* if (which_alternative == 0) */
4059 if (REGNO (operands[0]) == REGNO (operands[1]))
4060 {
4061 /* Emit the pattern:
4062 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4063 */
4064 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4065 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4066 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4067 gen_rtx_GT (SImode,
4068 gen_rtx_REG (CCmode, CC_REGNUM),
4069 const0_rtx),
4070 gen_rtx_SET (operands[0],
4071 (gen_rtx_MINUS (SImode,
4072 const0_rtx,
4073 operands[1])))));
4074 }
4075 else
4076 {
4077 /* Emit the pattern:
4078 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4079 */
4080 emit_insn (gen_rtx_SET (operands[0],
4081 gen_rtx_XOR (SImode,
4082 gen_rtx_ASHIFTRT (SImode,
4083 operands[1],
4084 GEN_INT (31)),
4085 operands[1])));
4086 emit_insn (gen_rtx_SET (operands[0],
4087 gen_rtx_MINUS (SImode,
4088 gen_rtx_ASHIFTRT (SImode,
4089 operands[1],
4090 GEN_INT (31)),
4091 operands[0])));
4092 }
4093 DONE;
4094 }
4095 [(set_attr "conds" "clob,*")
4096 (set_attr "shift" "1")
4097 (set_attr "predicable" "no, yes")
4098 (set_attr "length" "8")
4099 (set_attr "type" "multiple")]
4100 )
4101
4102 (define_expand "abssf2"
4103 [(set (match_operand:SF 0 "s_register_operand")
4104 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4105 "TARGET_32BIT && TARGET_HARD_FLOAT"
4106 "")
4107
4108 (define_expand "absdf2"
4109 [(set (match_operand:DF 0 "s_register_operand")
4110 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4111 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4112 "")
4113
4114 (define_expand "sqrtsf2"
4115 [(set (match_operand:SF 0 "s_register_operand")
4116 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4117 "TARGET_32BIT && TARGET_HARD_FLOAT"
4118 "")
4119
4120 (define_expand "sqrtdf2"
4121 [(set (match_operand:DF 0 "s_register_operand")
4122 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4123 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4124 "")
4125
4126 (define_expand "one_cmplsi2"
4127 [(set (match_operand:SI 0 "s_register_operand")
4128 (not:SI (match_operand:SI 1 "s_register_operand")))]
4129 "TARGET_EITHER"
4130 ""
4131 )
4132
4133 (define_insn "*arm_one_cmplsi2"
4134 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4135 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4136 "TARGET_32BIT"
4137 "mvn%?\\t%0, %1"
4138 [(set_attr "predicable" "yes")
4139 (set_attr "predicable_short_it" "yes,no")
4140 (set_attr "arch" "t2,*")
4141 (set_attr "length" "4")
4142 (set_attr "type" "mvn_reg")]
4143 )
4144
4145 (define_insn "*notsi_compare0"
4146 [(set (reg:CC_NOOV CC_REGNUM)
4147 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4148 (const_int 0)))
4149 (set (match_operand:SI 0 "s_register_operand" "=r")
4150 (not:SI (match_dup 1)))]
4151 "TARGET_32BIT"
4152 "mvns%?\\t%0, %1"
4153 [(set_attr "conds" "set")
4154 (set_attr "type" "mvn_reg")]
4155 )
4156
4157 (define_insn "*notsi_compare0_scratch"
4158 [(set (reg:CC_NOOV CC_REGNUM)
4159 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4160 (const_int 0)))
4161 (clobber (match_scratch:SI 0 "=r"))]
4162 "TARGET_32BIT"
4163 "mvns%?\\t%0, %1"
4164 [(set_attr "conds" "set")
4165 (set_attr "type" "mvn_reg")]
4166 )
4167 \f
4168 ;; Fixed <--> Floating conversion insns
4169
4170 (define_expand "floatsihf2"
4171 [(set (match_operand:HF 0 "general_operand")
4172 (float:HF (match_operand:SI 1 "general_operand")))]
4173 "TARGET_EITHER"
4174 "
4175 {
4176 rtx op1 = gen_reg_rtx (SFmode);
4177 expand_float (op1, operands[1], 0);
4178 op1 = convert_to_mode (HFmode, op1, 0);
4179 emit_move_insn (operands[0], op1);
4180 DONE;
4181 }"
4182 )
4183
4184 (define_expand "floatdihf2"
4185 [(set (match_operand:HF 0 "general_operand")
4186 (float:HF (match_operand:DI 1 "general_operand")))]
4187 "TARGET_EITHER"
4188 "
4189 {
4190 rtx op1 = gen_reg_rtx (SFmode);
4191 expand_float (op1, operands[1], 0);
4192 op1 = convert_to_mode (HFmode, op1, 0);
4193 emit_move_insn (operands[0], op1);
4194 DONE;
4195 }"
4196 )
4197
4198 (define_expand "floatsisf2"
4199 [(set (match_operand:SF 0 "s_register_operand")
4200 (float:SF (match_operand:SI 1 "s_register_operand")))]
4201 "TARGET_32BIT && TARGET_HARD_FLOAT"
4202 "
4203 ")
4204
4205 (define_expand "floatsidf2"
4206 [(set (match_operand:DF 0 "s_register_operand")
4207 (float:DF (match_operand:SI 1 "s_register_operand")))]
4208 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4209 "
4210 ")
4211
4212 (define_expand "fix_trunchfsi2"
4213 [(set (match_operand:SI 0 "general_operand")
4214 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4215 "TARGET_EITHER"
4216 "
4217 {
4218 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4219 expand_fix (operands[0], op1, 0);
4220 DONE;
4221 }"
4222 )
4223
4224 (define_expand "fix_trunchfdi2"
4225 [(set (match_operand:DI 0 "general_operand")
4226 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4227 "TARGET_EITHER"
4228 "
4229 {
4230 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4231 expand_fix (operands[0], op1, 0);
4232 DONE;
4233 }"
4234 )
4235
4236 (define_expand "fix_truncsfsi2"
4237 [(set (match_operand:SI 0 "s_register_operand")
4238 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4239 "TARGET_32BIT && TARGET_HARD_FLOAT"
4240 "
4241 ")
4242
4243 (define_expand "fix_truncdfsi2"
4244 [(set (match_operand:SI 0 "s_register_operand")
4245 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4246 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4247 "
4248 ")
4249
4250 ;; Truncation insns
4251
4252 (define_expand "truncdfsf2"
4253 [(set (match_operand:SF 0 "s_register_operand")
4254 (float_truncate:SF
4255 (match_operand:DF 1 "s_register_operand")))]
4256 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4257 ""
4258 )
4259
4260 ;; DFmode to HFmode conversions on targets without a single-step hardware
4261 ;; instruction for it would have to go through SFmode. This is dangerous
4262 ;; as it introduces double rounding.
4263 ;;
4264 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4265 ;; a single-step instruction.
4266
4267 (define_expand "truncdfhf2"
4268 [(set (match_operand:HF 0 "s_register_operand")
4269 (float_truncate:HF
4270 (match_operand:DF 1 "s_register_operand")))]
4271 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4272 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4273 {
4274 /* We don't have a direct instruction for this, so we must be in
4275 an unsafe math mode, and going via SFmode. */
4276
4277 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4278 {
4279 rtx op1;
4280 op1 = convert_to_mode (SFmode, operands[1], 0);
4281 op1 = convert_to_mode (HFmode, op1, 0);
4282 emit_move_insn (operands[0], op1);
4283 DONE;
4284 }
4285 /* Otherwise, we will pick this up as a single instruction with
4286 no intermediary rounding. */
4287 }
4288 )
4289 \f
4290 ;; Zero and sign extension instructions.
4291
4292 (define_expand "zero_extend<mode>di2"
4293 [(set (match_operand:DI 0 "s_register_operand" "")
4294 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4295 "TARGET_32BIT <qhs_zextenddi_cond>"
4296 {
4297 rtx res_lo, res_hi, op0_lo, op0_hi;
4298 res_lo = gen_lowpart (SImode, operands[0]);
4299 res_hi = gen_highpart (SImode, operands[0]);
4300 if (can_create_pseudo_p ())
4301 {
4302 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4303 op0_hi = gen_reg_rtx (SImode);
4304 }
4305 else
4306 {
4307 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4308 op0_hi = res_hi;
4309 }
4310 if (<MODE>mode != SImode)
4311 emit_insn (gen_rtx_SET (op0_lo,
4312 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4313 emit_insn (gen_movsi (op0_hi, const0_rtx));
4314 if (res_lo != op0_lo)
4315 emit_move_insn (res_lo, op0_lo);
4316 if (res_hi != op0_hi)
4317 emit_move_insn (res_hi, op0_hi);
4318 DONE;
4319 }
4320 )
4321
4322 (define_expand "extend<mode>di2"
4323 [(set (match_operand:DI 0 "s_register_operand" "")
4324 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4325 "TARGET_32BIT <qhs_sextenddi_cond>"
4326 {
4327 rtx res_lo, res_hi, op0_lo, op0_hi;
4328 res_lo = gen_lowpart (SImode, operands[0]);
4329 res_hi = gen_highpart (SImode, operands[0]);
4330 if (can_create_pseudo_p ())
4331 {
4332 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4333 op0_hi = gen_reg_rtx (SImode);
4334 }
4335 else
4336 {
4337 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4338 op0_hi = res_hi;
4339 }
4340 if (<MODE>mode != SImode)
4341 emit_insn (gen_rtx_SET (op0_lo,
4342 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4343 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4344 if (res_lo != op0_lo)
4345 emit_move_insn (res_lo, op0_lo);
4346 if (res_hi != op0_hi)
4347 emit_move_insn (res_hi, op0_hi);
4348 DONE;
4349 }
4350 )
4351
4352 ;; Splits for all extensions to DImode
4353 (define_split
4354 [(set (match_operand:DI 0 "s_register_operand" "")
4355 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4356 "TARGET_32BIT"
4357 [(set (match_dup 0) (match_dup 1))]
4358 {
4359 rtx lo_part = gen_lowpart (SImode, operands[0]);
4360 machine_mode src_mode = GET_MODE (operands[1]);
4361
4362 if (src_mode == SImode)
4363 emit_move_insn (lo_part, operands[1]);
4364 else
4365 emit_insn (gen_rtx_SET (lo_part,
4366 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4367 operands[0] = gen_highpart (SImode, operands[0]);
4368 operands[1] = const0_rtx;
4369 })
4370
4371 (define_split
4372 [(set (match_operand:DI 0 "s_register_operand" "")
4373 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4374 "TARGET_32BIT"
4375 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4376 {
4377 rtx lo_part = gen_lowpart (SImode, operands[0]);
4378 machine_mode src_mode = GET_MODE (operands[1]);
4379
4380 if (src_mode == SImode)
4381 emit_move_insn (lo_part, operands[1]);
4382 else
4383 emit_insn (gen_rtx_SET (lo_part,
4384 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4385 operands[1] = lo_part;
4386 operands[0] = gen_highpart (SImode, operands[0]);
4387 })
4388
4389 (define_expand "zero_extendhisi2"
4390 [(set (match_operand:SI 0 "s_register_operand")
4391 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4392 "TARGET_EITHER"
4393 {
4394 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4395 {
4396 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4397 DONE;
4398 }
4399 if (!arm_arch6 && !MEM_P (operands[1]))
4400 {
4401 rtx t = gen_lowpart (SImode, operands[1]);
4402 rtx tmp = gen_reg_rtx (SImode);
4403 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4404 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4405 DONE;
4406 }
4407 })
4408
4409 (define_split
4410 [(set (match_operand:SI 0 "s_register_operand" "")
4411 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4412 "!TARGET_THUMB2 && !arm_arch6"
4413 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4414 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4415 {
4416 operands[2] = gen_lowpart (SImode, operands[1]);
4417 })
4418
4419 (define_insn "*arm_zero_extendhisi2"
4420 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4421 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4422 "TARGET_ARM && arm_arch4 && !arm_arch6"
4423 "@
4424 #
4425 ldrh%?\\t%0, %1"
4426 [(set_attr "type" "alu_shift_reg,load_byte")
4427 (set_attr "predicable" "yes")]
4428 )
4429
4430 (define_insn "*arm_zero_extendhisi2_v6"
4431 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4432 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4433 "TARGET_ARM && arm_arch6"
4434 "@
4435 uxth%?\\t%0, %1
4436 ldrh%?\\t%0, %1"
4437 [(set_attr "predicable" "yes")
4438 (set_attr "type" "extend,load_byte")]
4439 )
4440
4441 (define_insn "*arm_zero_extendhisi2addsi"
4442 [(set (match_operand:SI 0 "s_register_operand" "=r")
4443 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4444 (match_operand:SI 2 "s_register_operand" "r")))]
4445 "TARGET_INT_SIMD"
4446 "uxtah%?\\t%0, %2, %1"
4447 [(set_attr "type" "alu_shift_reg")
4448 (set_attr "predicable" "yes")]
4449 )
4450
4451 (define_expand "zero_extendqisi2"
4452 [(set (match_operand:SI 0 "s_register_operand")
4453 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4454 "TARGET_EITHER"
4455 {
4456 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4457 {
4458 emit_insn (gen_andsi3 (operands[0],
4459 gen_lowpart (SImode, operands[1]),
4460 GEN_INT (255)));
4461 DONE;
4462 }
4463 if (!arm_arch6 && !MEM_P (operands[1]))
4464 {
4465 rtx t = gen_lowpart (SImode, operands[1]);
4466 rtx tmp = gen_reg_rtx (SImode);
4467 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4468 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4469 DONE;
4470 }
4471 })
4472
4473 (define_split
4474 [(set (match_operand:SI 0 "s_register_operand" "")
4475 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4476 "!arm_arch6"
4477 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4478 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4479 {
4480 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4481 if (TARGET_ARM)
4482 {
4483 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4484 DONE;
4485 }
4486 })
4487
4488 (define_insn "*arm_zero_extendqisi2"
4489 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4490 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4491 "TARGET_ARM && !arm_arch6"
4492 "@
4493 #
4494 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4495 [(set_attr "length" "8,4")
4496 (set_attr "type" "alu_shift_reg,load_byte")
4497 (set_attr "predicable" "yes")]
4498 )
4499
4500 (define_insn "*arm_zero_extendqisi2_v6"
4501 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4502 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4503 "TARGET_ARM && arm_arch6"
4504 "@
4505 uxtb%?\\t%0, %1
4506 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4507 [(set_attr "type" "extend,load_byte")
4508 (set_attr "predicable" "yes")]
4509 )
4510
4511 (define_insn "*arm_zero_extendqisi2addsi"
4512 [(set (match_operand:SI 0 "s_register_operand" "=r")
4513 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4514 (match_operand:SI 2 "s_register_operand" "r")))]
4515 "TARGET_INT_SIMD"
4516 "uxtab%?\\t%0, %2, %1"
4517 [(set_attr "predicable" "yes")
4518 (set_attr "type" "alu_shift_reg")]
4519 )
4520
4521 (define_split
4522 [(set (match_operand:SI 0 "s_register_operand" "")
4523 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4524 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4525 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4526 [(set (match_dup 2) (match_dup 1))
4527 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4528 ""
4529 )
4530
4531 (define_split
4532 [(set (match_operand:SI 0 "s_register_operand" "")
4533 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4534 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4535 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4536 [(set (match_dup 2) (match_dup 1))
4537 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4538 ""
4539 )
4540
4541
4542 (define_split
4543 [(set (match_operand:SI 0 "s_register_operand" "")
4544 (IOR_XOR:SI (and:SI (ashift:SI
4545 (match_operand:SI 1 "s_register_operand" "")
4546 (match_operand:SI 2 "const_int_operand" ""))
4547 (match_operand:SI 3 "const_int_operand" ""))
4548 (zero_extend:SI
4549 (match_operator 5 "subreg_lowpart_operator"
4550 [(match_operand:SI 4 "s_register_operand" "")]))))]
4551 "TARGET_32BIT
4552 && (UINTVAL (operands[3])
4553 == (GET_MODE_MASK (GET_MODE (operands[5]))
4554 & (GET_MODE_MASK (GET_MODE (operands[5]))
4555 << (INTVAL (operands[2])))))"
4556 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4557 (match_dup 4)))
4558 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4559 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4560 )
4561
4562 (define_insn "*compareqi_eq0"
4563 [(set (reg:CC_Z CC_REGNUM)
4564 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4565 (const_int 0)))]
4566 "TARGET_32BIT"
4567 "tst%?\\t%0, #255"
4568 [(set_attr "conds" "set")
4569 (set_attr "predicable" "yes")
4570 (set_attr "type" "logic_imm")]
4571 )
4572
4573 (define_expand "extendhisi2"
4574 [(set (match_operand:SI 0 "s_register_operand")
4575 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4576 "TARGET_EITHER"
4577 {
4578 if (TARGET_THUMB1)
4579 {
4580 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4581 DONE;
4582 }
4583 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4584 {
4585 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4586 DONE;
4587 }
4588
4589 if (!arm_arch6 && !MEM_P (operands[1]))
4590 {
4591 rtx t = gen_lowpart (SImode, operands[1]);
4592 rtx tmp = gen_reg_rtx (SImode);
4593 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4594 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4595 DONE;
4596 }
4597 })
4598
4599 (define_split
4600 [(parallel
4601 [(set (match_operand:SI 0 "register_operand" "")
4602 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4603 (clobber (match_scratch:SI 2 ""))])]
4604 "!arm_arch6"
4605 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4606 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4607 {
4608 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4609 })
4610
4611 ;; This pattern will only be used when ldsh is not available
4612 (define_expand "extendhisi2_mem"
4613 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4614 (set (match_dup 3)
4615 (zero_extend:SI (match_dup 7)))
4616 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4617 (set (match_operand:SI 0 "" "")
4618 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4619 "TARGET_ARM"
4620 "
4621 {
4622 rtx mem1, mem2;
4623 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4624
4625 mem1 = change_address (operands[1], QImode, addr);
4626 mem2 = change_address (operands[1], QImode,
4627 plus_constant (Pmode, addr, 1));
4628 operands[0] = gen_lowpart (SImode, operands[0]);
4629 operands[1] = mem1;
4630 operands[2] = gen_reg_rtx (SImode);
4631 operands[3] = gen_reg_rtx (SImode);
4632 operands[6] = gen_reg_rtx (SImode);
4633 operands[7] = mem2;
4634
4635 if (BYTES_BIG_ENDIAN)
4636 {
4637 operands[4] = operands[2];
4638 operands[5] = operands[3];
4639 }
4640 else
4641 {
4642 operands[4] = operands[3];
4643 operands[5] = operands[2];
4644 }
4645 }"
4646 )
4647
4648 (define_split
4649 [(set (match_operand:SI 0 "register_operand" "")
4650 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4651 "!arm_arch6"
4652 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4653 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4654 {
4655 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4656 })
4657
4658 (define_insn "*arm_extendhisi2"
4659 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4660 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4661 "TARGET_ARM && arm_arch4 && !arm_arch6"
4662 "@
4663 #
4664 ldrsh%?\\t%0, %1"
4665 [(set_attr "length" "8,4")
4666 (set_attr "type" "alu_shift_reg,load_byte")
4667 (set_attr "predicable" "yes")]
4668 )
4669
4670 ;; ??? Check Thumb-2 pool range
4671 (define_insn "*arm_extendhisi2_v6"
4672 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4673 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4674 "TARGET_32BIT && arm_arch6"
4675 "@
4676 sxth%?\\t%0, %1
4677 ldrsh%?\\t%0, %1"
4678 [(set_attr "type" "extend,load_byte")
4679 (set_attr "predicable" "yes")]
4680 )
4681
4682 (define_insn "*arm_extendhisi2addsi"
4683 [(set (match_operand:SI 0 "s_register_operand" "=r")
4684 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4685 (match_operand:SI 2 "s_register_operand" "r")))]
4686 "TARGET_INT_SIMD"
4687 "sxtah%?\\t%0, %2, %1"
4688 [(set_attr "type" "alu_shift_reg")]
4689 )
4690
4691 (define_expand "extendqihi2"
4692 [(set (match_dup 2)
4693 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4694 (const_int 24)))
4695 (set (match_operand:HI 0 "s_register_operand")
4696 (ashiftrt:SI (match_dup 2)
4697 (const_int 24)))]
4698 "TARGET_ARM"
4699 "
4700 {
4701 if (arm_arch4 && MEM_P (operands[1]))
4702 {
4703 emit_insn (gen_rtx_SET (operands[0],
4704 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4705 DONE;
4706 }
4707 if (!s_register_operand (operands[1], QImode))
4708 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4709 operands[0] = gen_lowpart (SImode, operands[0]);
4710 operands[1] = gen_lowpart (SImode, operands[1]);
4711 operands[2] = gen_reg_rtx (SImode);
4712 }"
4713 )
4714
4715 (define_insn "*arm_extendqihi_insn"
4716 [(set (match_operand:HI 0 "s_register_operand" "=r")
4717 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4718 "TARGET_ARM && arm_arch4"
4719 "ldrsb%?\\t%0, %1"
4720 [(set_attr "type" "load_byte")
4721 (set_attr "predicable" "yes")]
4722 )
4723
4724 (define_expand "extendqisi2"
4725 [(set (match_operand:SI 0 "s_register_operand")
4726 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4727 "TARGET_EITHER"
4728 {
4729 if (!arm_arch4 && MEM_P (operands[1]))
4730 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4731
4732 if (!arm_arch6 && !MEM_P (operands[1]))
4733 {
4734 rtx t = gen_lowpart (SImode, operands[1]);
4735 rtx tmp = gen_reg_rtx (SImode);
4736 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4737 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4738 DONE;
4739 }
4740 })
4741
4742 (define_split
4743 [(set (match_operand:SI 0 "register_operand" "")
4744 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4745 "!arm_arch6"
4746 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4747 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4748 {
4749 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4750 })
4751
4752 (define_insn "*arm_extendqisi"
4753 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4754 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4755 "TARGET_ARM && arm_arch4 && !arm_arch6"
4756 "@
4757 #
4758 ldrsb%?\\t%0, %1"
4759 [(set_attr "length" "8,4")
4760 (set_attr "type" "alu_shift_reg,load_byte")
4761 (set_attr "predicable" "yes")]
4762 )
4763
4764 (define_insn "*arm_extendqisi_v6"
4765 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4766 (sign_extend:SI
4767 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4768 "TARGET_ARM && arm_arch6"
4769 "@
4770 sxtb%?\\t%0, %1
4771 ldrsb%?\\t%0, %1"
4772 [(set_attr "type" "extend,load_byte")
4773 (set_attr "predicable" "yes")]
4774 )
4775
4776 (define_insn "*arm_extendqisi2addsi"
4777 [(set (match_operand:SI 0 "s_register_operand" "=r")
4778 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4779 (match_operand:SI 2 "s_register_operand" "r")))]
4780 "TARGET_INT_SIMD"
4781 "sxtab%?\\t%0, %2, %1"
4782 [(set_attr "type" "alu_shift_reg")
4783 (set_attr "predicable" "yes")]
4784 )
4785
4786 (define_insn "arm_<sup>xtb16"
4787 [(set (match_operand:SI 0 "s_register_operand" "=r")
4788 (unspec:SI
4789 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4790 "TARGET_INT_SIMD"
4791 "<sup>xtb16%?\\t%0, %1"
4792 [(set_attr "predicable" "yes")
4793 (set_attr "type" "alu_dsp_reg")])
4794
4795 (define_insn "arm_<simd32_op>"
4796 [(set (match_operand:SI 0 "s_register_operand" "=r")
4797 (unspec:SI
4798 [(match_operand:SI 1 "s_register_operand" "r")
4799 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4800 "TARGET_INT_SIMD"
4801 "<simd32_op>%?\\t%0, %1, %2"
4802 [(set_attr "predicable" "yes")
4803 (set_attr "type" "alu_dsp_reg")])
4804
4805 (define_insn "arm_usada8"
4806 [(set (match_operand:SI 0 "s_register_operand" "=r")
4807 (unspec:SI
4808 [(match_operand:SI 1 "s_register_operand" "r")
4809 (match_operand:SI 2 "s_register_operand" "r")
4810 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4811 "TARGET_INT_SIMD"
4812 "usada8%?\\t%0, %1, %2, %3"
4813 [(set_attr "predicable" "yes")
4814 (set_attr "type" "alu_dsp_reg")])
4815
4816 (define_insn "arm_<simd32_op>"
4817 [(set (match_operand:DI 0 "s_register_operand" "=r")
4818 (unspec:DI
4819 [(match_operand:SI 1 "s_register_operand" "r")
4820 (match_operand:SI 2 "s_register_operand" "r")
4821 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4822 "TARGET_INT_SIMD"
4823 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4824 [(set_attr "predicable" "yes")
4825 (set_attr "type" "smlald")])
4826
4827 (define_expand "extendsfdf2"
4828 [(set (match_operand:DF 0 "s_register_operand")
4829 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4830 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4831 ""
4832 )
4833
4834 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4835 ;; must go through SFmode.
4836 ;;
4837 ;; This is always safe for an extend.
4838
4839 (define_expand "extendhfdf2"
4840 [(set (match_operand:DF 0 "s_register_operand")
4841 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4842 "TARGET_EITHER"
4843 {
4844 /* We don't have a direct instruction for this, so go via SFmode. */
4845 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4846 {
4847 rtx op1;
4848 op1 = convert_to_mode (SFmode, operands[1], 0);
4849 op1 = convert_to_mode (DFmode, op1, 0);
4850 emit_insn (gen_movdf (operands[0], op1));
4851 DONE;
4852 }
4853 /* Otherwise, we're done producing RTL and will pick up the correct
4854 pattern to do this with one rounding-step in a single instruction. */
4855 }
4856 )
4857 \f
4858 ;; Move insns (including loads and stores)
4859
4860 ;; XXX Just some ideas about movti.
4861 ;; I don't think these are a good idea on the arm, there just aren't enough
4862 ;; registers
4863 ;;(define_expand "loadti"
4864 ;; [(set (match_operand:TI 0 "s_register_operand")
4865 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4866 ;; "" "")
4867
4868 ;;(define_expand "storeti"
4869 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4870 ;; (match_operand:TI 1 "s_register_operand"))]
4871 ;; "" "")
4872
4873 ;;(define_expand "movti"
4874 ;; [(set (match_operand:TI 0 "general_operand")
4875 ;; (match_operand:TI 1 "general_operand"))]
4876 ;; ""
4877 ;; "
4878 ;;{
4879 ;; rtx insn;
4880 ;;
4881 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4882 ;; operands[1] = copy_to_reg (operands[1]);
4883 ;; if (MEM_P (operands[0]))
4884 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4885 ;; else if (MEM_P (operands[1]))
4886 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4887 ;; else
4888 ;; FAIL;
4889 ;;
4890 ;; emit_insn (insn);
4891 ;; DONE;
4892 ;;}")
4893
4894 ;; Recognize garbage generated above.
4895
4896 ;;(define_insn ""
4897 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4898 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4899 ;; ""
4900 ;; "*
4901 ;; {
4902 ;; register mem = (which_alternative < 3);
4903 ;; register const char *template;
4904 ;;
4905 ;; operands[mem] = XEXP (operands[mem], 0);
4906 ;; switch (which_alternative)
4907 ;; {
4908 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4909 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4910 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4911 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4912 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4913 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4914 ;; }
4915 ;; output_asm_insn (template, operands);
4916 ;; return \"\";
4917 ;; }")
4918
4919 (define_expand "movdi"
4920 [(set (match_operand:DI 0 "general_operand")
4921 (match_operand:DI 1 "general_operand"))]
4922 "TARGET_EITHER"
4923 "
4924 gcc_checking_assert (aligned_operand (operands[0], DImode));
4925 gcc_checking_assert (aligned_operand (operands[1], DImode));
4926 if (can_create_pseudo_p ())
4927 {
4928 if (!REG_P (operands[0]))
4929 operands[1] = force_reg (DImode, operands[1]);
4930 }
4931 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4932 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4933 {
4934 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4935 when expanding function calls. */
4936 gcc_assert (can_create_pseudo_p ());
4937 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4938 {
4939 /* Perform load into legal reg pair first, then move. */
4940 rtx reg = gen_reg_rtx (DImode);
4941 emit_insn (gen_movdi (reg, operands[1]));
4942 operands[1] = reg;
4943 }
4944 emit_move_insn (gen_lowpart (SImode, operands[0]),
4945 gen_lowpart (SImode, operands[1]));
4946 emit_move_insn (gen_highpart (SImode, operands[0]),
4947 gen_highpart (SImode, operands[1]));
4948 DONE;
4949 }
4950 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4951 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4952 {
4953 /* Avoid STRD's from an odd-numbered register pair in ARM state
4954 when expanding function prologue. */
4955 gcc_assert (can_create_pseudo_p ());
4956 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4957 ? gen_reg_rtx (DImode)
4958 : operands[0];
4959 emit_move_insn (gen_lowpart (SImode, split_dest),
4960 gen_lowpart (SImode, operands[1]));
4961 emit_move_insn (gen_highpart (SImode, split_dest),
4962 gen_highpart (SImode, operands[1]));
4963 if (split_dest != operands[0])
4964 emit_insn (gen_movdi (operands[0], split_dest));
4965 DONE;
4966 }
4967 "
4968 )
4969
4970 (define_insn "*arm_movdi"
4971 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4972 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4973 "TARGET_32BIT
4974 && !(TARGET_HARD_FLOAT)
4975 && !TARGET_IWMMXT
4976 && ( register_operand (operands[0], DImode)
4977 || register_operand (operands[1], DImode))"
4978 "*
4979 switch (which_alternative)
4980 {
4981 case 0:
4982 case 1:
4983 case 2:
4984 return \"#\";
4985 case 3:
4986 /* Cannot load it directly, split to load it via MOV / MOVT. */
4987 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4988 return \"#\";
4989 /* Fall through. */
4990 default:
4991 return output_move_double (operands, true, NULL);
4992 }
4993 "
4994 [(set_attr "length" "8,12,16,8,8")
4995 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4996 (set_attr "arm_pool_range" "*,*,*,1020,*")
4997 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4998 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4999 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5000 )
5001
5002 (define_split
5003 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5004 (match_operand:ANY64 1 "immediate_operand" ""))]
5005 "TARGET_32BIT
5006 && reload_completed
5007 && (arm_disable_literal_pool
5008 || (arm_const_double_inline_cost (operands[1])
5009 <= arm_max_const_double_inline_cost ()))"
5010 [(const_int 0)]
5011 "
5012 arm_split_constant (SET, SImode, curr_insn,
5013 INTVAL (gen_lowpart (SImode, operands[1])),
5014 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5015 arm_split_constant (SET, SImode, curr_insn,
5016 INTVAL (gen_highpart_mode (SImode,
5017 GET_MODE (operands[0]),
5018 operands[1])),
5019 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5020 DONE;
5021 "
5022 )
5023
5024 ; If optimizing for size, or if we have load delay slots, then
5025 ; we want to split the constant into two separate operations.
5026 ; In both cases this may split a trivial part into a single data op
5027 ; leaving a single complex constant to load. We can also get longer
5028 ; offsets in a LDR which means we get better chances of sharing the pool
5029 ; entries. Finally, we can normally do a better job of scheduling
5030 ; LDR instructions than we can with LDM.
5031 ; This pattern will only match if the one above did not.
5032 (define_split
5033 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5034 (match_operand:ANY64 1 "const_double_operand" ""))]
5035 "TARGET_ARM && reload_completed
5036 && arm_const_double_by_parts (operands[1])"
5037 [(set (match_dup 0) (match_dup 1))
5038 (set (match_dup 2) (match_dup 3))]
5039 "
5040 operands[2] = gen_highpart (SImode, operands[0]);
5041 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5042 operands[1]);
5043 operands[0] = gen_lowpart (SImode, operands[0]);
5044 operands[1] = gen_lowpart (SImode, operands[1]);
5045 "
5046 )
5047
5048 (define_split
5049 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5050 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5051 "TARGET_EITHER && reload_completed"
5052 [(set (match_dup 0) (match_dup 1))
5053 (set (match_dup 2) (match_dup 3))]
5054 "
5055 operands[2] = gen_highpart (SImode, operands[0]);
5056 operands[3] = gen_highpart (SImode, operands[1]);
5057 operands[0] = gen_lowpart (SImode, operands[0]);
5058 operands[1] = gen_lowpart (SImode, operands[1]);
5059
5060 /* Handle a partial overlap. */
5061 if (rtx_equal_p (operands[0], operands[3]))
5062 {
5063 rtx tmp0 = operands[0];
5064 rtx tmp1 = operands[1];
5065
5066 operands[0] = operands[2];
5067 operands[1] = operands[3];
5068 operands[2] = tmp0;
5069 operands[3] = tmp1;
5070 }
5071 "
5072 )
5073
5074 ;; We can't actually do base+index doubleword loads if the index and
5075 ;; destination overlap. Split here so that we at least have chance to
5076 ;; schedule.
5077 (define_split
5078 [(set (match_operand:DI 0 "s_register_operand" "")
5079 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5080 (match_operand:SI 2 "s_register_operand" ""))))]
5081 "TARGET_LDRD
5082 && reg_overlap_mentioned_p (operands[0], operands[1])
5083 && reg_overlap_mentioned_p (operands[0], operands[2])"
5084 [(set (match_dup 4)
5085 (plus:SI (match_dup 1)
5086 (match_dup 2)))
5087 (set (match_dup 0)
5088 (mem:DI (match_dup 4)))]
5089 "
5090 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5091 "
5092 )
5093
5094 (define_expand "movsi"
5095 [(set (match_operand:SI 0 "general_operand")
5096 (match_operand:SI 1 "general_operand"))]
5097 "TARGET_EITHER"
5098 "
5099 {
5100 rtx base, offset, tmp;
5101
5102 gcc_checking_assert (aligned_operand (operands[0], SImode));
5103 gcc_checking_assert (aligned_operand (operands[1], SImode));
5104 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5105 {
5106 /* Everything except mem = const or mem = mem can be done easily. */
5107 if (MEM_P (operands[0]))
5108 operands[1] = force_reg (SImode, operands[1]);
5109 if (arm_general_register_operand (operands[0], SImode)
5110 && CONST_INT_P (operands[1])
5111 && !(const_ok_for_arm (INTVAL (operands[1]))
5112 || const_ok_for_arm (~INTVAL (operands[1]))))
5113 {
5114 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5115 {
5116 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5117 DONE;
5118 }
5119 else
5120 {
5121 arm_split_constant (SET, SImode, NULL_RTX,
5122 INTVAL (operands[1]), operands[0], NULL_RTX,
5123 optimize && can_create_pseudo_p ());
5124 DONE;
5125 }
5126 }
5127 }
5128 else /* Target doesn't have MOVT... */
5129 {
5130 if (can_create_pseudo_p ())
5131 {
5132 if (!REG_P (operands[0]))
5133 operands[1] = force_reg (SImode, operands[1]);
5134 }
5135 }
5136
5137 split_const (operands[1], &base, &offset);
5138 if (INTVAL (offset) != 0
5139 && targetm.cannot_force_const_mem (SImode, operands[1]))
5140 {
5141 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5142 emit_move_insn (tmp, base);
5143 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5144 DONE;
5145 }
5146
5147 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5148
5149 /* Recognize the case where operand[1] is a reference to thread-local
5150 data and load its address to a register. Offsets have been split off
5151 already. */
5152 if (arm_tls_referenced_p (operands[1]))
5153 operands[1] = legitimize_tls_address (operands[1], tmp);
5154 else if (flag_pic
5155 && (CONSTANT_P (operands[1])
5156 || symbol_mentioned_p (operands[1])
5157 || label_mentioned_p (operands[1])))
5158 operands[1] =
5159 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5160 }
5161 "
5162 )
5163
5164 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5165 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5166 ;; so this does not matter.
5167 (define_insn "*arm_movt"
5168 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5169 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5170 (match_operand:SI 2 "general_operand" "i,i")))]
5171 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5172 "@
5173 movt%?\t%0, #:upper16:%c2
5174 movt\t%0, #:upper16:%c2"
5175 [(set_attr "arch" "32,v8mb")
5176 (set_attr "predicable" "yes")
5177 (set_attr "length" "4")
5178 (set_attr "type" "alu_sreg")]
5179 )
5180
5181 (define_insn "*arm_movsi_insn"
5182 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5183 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5184 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5185 && ( register_operand (operands[0], SImode)
5186 || register_operand (operands[1], SImode))"
5187 "@
5188 mov%?\\t%0, %1
5189 mov%?\\t%0, %1
5190 mvn%?\\t%0, #%B1
5191 movw%?\\t%0, %1
5192 ldr%?\\t%0, %1
5193 str%?\\t%1, %0"
5194 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5195 (set_attr "predicable" "yes")
5196 (set_attr "arch" "*,*,*,v6t2,*,*")
5197 (set_attr "pool_range" "*,*,*,*,4096,*")
5198 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5199 )
5200
5201 (define_split
5202 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5203 (match_operand:SI 1 "const_int_operand" ""))]
5204 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5205 && (!(const_ok_for_arm (INTVAL (operands[1]))
5206 || const_ok_for_arm (~INTVAL (operands[1]))))"
5207 [(clobber (const_int 0))]
5208 "
5209 arm_split_constant (SET, SImode, NULL_RTX,
5210 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5211 DONE;
5212 "
5213 )
5214
5215 ;; A normal way to do (symbol + offset) requires three instructions at least
5216 ;; (depends on how big the offset is) as below:
5217 ;; movw r0, #:lower16:g
5218 ;; movw r0, #:upper16:g
5219 ;; adds r0, #4
5220 ;;
5221 ;; A better way would be:
5222 ;; movw r0, #:lower16:g+4
5223 ;; movw r0, #:upper16:g+4
5224 ;;
5225 ;; The limitation of this way is that the length of offset should be a 16-bit
5226 ;; signed value, because current assembler only supports REL type relocation for
5227 ;; such case. If the more powerful RELA type is supported in future, we should
5228 ;; update this pattern to go with better way.
5229 (define_split
5230 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5231 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5232 (match_operand:SI 2 "const_int_operand" ""))))]
5233 "TARGET_THUMB
5234 && TARGET_HAVE_MOVT
5235 && arm_disable_literal_pool
5236 && reload_completed
5237 && GET_CODE (operands[1]) == SYMBOL_REF"
5238 [(clobber (const_int 0))]
5239 "
5240 int offset = INTVAL (operands[2]);
5241
5242 if (offset < -0x8000 || offset > 0x7fff)
5243 {
5244 arm_emit_movpair (operands[0], operands[1]);
5245 emit_insn (gen_rtx_SET (operands[0],
5246 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5247 }
5248 else
5249 {
5250 rtx op = gen_rtx_CONST (SImode,
5251 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5252 arm_emit_movpair (operands[0], op);
5253 }
5254 "
5255 )
5256
5257 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5258 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5259 ;; and lo_sum would be merged back into memory load at cprop. However,
5260 ;; if the default is to prefer movt/movw rather than a load from the constant
5261 ;; pool, the performance is better.
5262 (define_split
5263 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5264 (match_operand:SI 1 "general_operand" ""))]
5265 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5266 && !target_word_relocations
5267 && !arm_tls_referenced_p (operands[1])"
5268 [(clobber (const_int 0))]
5269 {
5270 arm_emit_movpair (operands[0], operands[1]);
5271 DONE;
5272 })
5273
5274 ;; When generating pic, we need to load the symbol offset into a register.
5275 ;; So that the optimizer does not confuse this with a normal symbol load
5276 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5277 ;; since that is the only type of relocation we can use.
5278
5279 ;; Wrap calculation of the whole PIC address in a single pattern for the
5280 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5281 ;; a PIC address involves two loads from memory, so we want to CSE it
5282 ;; as often as possible.
5283 ;; This pattern will be split into one of the pic_load_addr_* patterns
5284 ;; and a move after GCSE optimizations.
5285 ;;
5286 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5287 (define_expand "calculate_pic_address"
5288 [(set (match_operand:SI 0 "register_operand")
5289 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5290 (unspec:SI [(match_operand:SI 2 "" "")]
5291 UNSPEC_PIC_SYM))))]
5292 "flag_pic"
5293 )
5294
5295 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5296 (define_split
5297 [(set (match_operand:SI 0 "register_operand" "")
5298 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5299 (unspec:SI [(match_operand:SI 2 "" "")]
5300 UNSPEC_PIC_SYM))))]
5301 "flag_pic"
5302 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5303 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5304 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5305 )
5306
5307 ;; operand1 is the memory address to go into
5308 ;; pic_load_addr_32bit.
5309 ;; operand2 is the PIC label to be emitted
5310 ;; from pic_add_dot_plus_eight.
5311 ;; We do this to allow hoisting of the entire insn.
5312 (define_insn_and_split "pic_load_addr_unified"
5313 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5314 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5315 (match_operand:SI 2 "" "")]
5316 UNSPEC_PIC_UNIFIED))]
5317 "flag_pic"
5318 "#"
5319 "&& reload_completed"
5320 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5321 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5322 (match_dup 2)] UNSPEC_PIC_BASE))]
5323 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5324 [(set_attr "type" "load_4,load_4,load_4")
5325 (set_attr "pool_range" "4096,4094,1022")
5326 (set_attr "neg_pool_range" "4084,0,0")
5327 (set_attr "arch" "a,t2,t1")
5328 (set_attr "length" "8,6,4")]
5329 )
5330
5331 ;; The rather odd constraints on the following are to force reload to leave
5332 ;; the insn alone, and to force the minipool generation pass to then move
5333 ;; the GOT symbol to memory.
5334
5335 (define_insn "pic_load_addr_32bit"
5336 [(set (match_operand:SI 0 "s_register_operand" "=r")
5337 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5338 "TARGET_32BIT && flag_pic"
5339 "ldr%?\\t%0, %1"
5340 [(set_attr "type" "load_4")
5341 (set (attr "pool_range")
5342 (if_then_else (eq_attr "is_thumb" "no")
5343 (const_int 4096)
5344 (const_int 4094)))
5345 (set (attr "neg_pool_range")
5346 (if_then_else (eq_attr "is_thumb" "no")
5347 (const_int 4084)
5348 (const_int 0)))]
5349 )
5350
5351 (define_insn "pic_load_addr_thumb1"
5352 [(set (match_operand:SI 0 "s_register_operand" "=l")
5353 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5354 "TARGET_THUMB1 && flag_pic"
5355 "ldr\\t%0, %1"
5356 [(set_attr "type" "load_4")
5357 (set (attr "pool_range") (const_int 1018))]
5358 )
5359
5360 (define_insn "pic_add_dot_plus_four"
5361 [(set (match_operand:SI 0 "register_operand" "=r")
5362 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5363 (const_int 4)
5364 (match_operand 2 "" "")]
5365 UNSPEC_PIC_BASE))]
5366 "TARGET_THUMB"
5367 "*
5368 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5369 INTVAL (operands[2]));
5370 return \"add\\t%0, %|pc\";
5371 "
5372 [(set_attr "length" "2")
5373 (set_attr "type" "alu_sreg")]
5374 )
5375
5376 (define_insn "pic_add_dot_plus_eight"
5377 [(set (match_operand:SI 0 "register_operand" "=r")
5378 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5379 (const_int 8)
5380 (match_operand 2 "" "")]
5381 UNSPEC_PIC_BASE))]
5382 "TARGET_ARM"
5383 "*
5384 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5385 INTVAL (operands[2]));
5386 return \"add%?\\t%0, %|pc, %1\";
5387 "
5388 [(set_attr "predicable" "yes")
5389 (set_attr "type" "alu_sreg")]
5390 )
5391
5392 (define_insn "tls_load_dot_plus_eight"
5393 [(set (match_operand:SI 0 "register_operand" "=r")
5394 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5395 (const_int 8)
5396 (match_operand 2 "" "")]
5397 UNSPEC_PIC_BASE)))]
5398 "TARGET_ARM"
5399 "*
5400 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5401 INTVAL (operands[2]));
5402 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5403 "
5404 [(set_attr "predicable" "yes")
5405 (set_attr "type" "load_4")]
5406 )
5407
5408 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5409 ;; followed by a load. These sequences can be crunched down to
5410 ;; tls_load_dot_plus_eight by a peephole.
5411
5412 (define_peephole2
5413 [(set (match_operand:SI 0 "register_operand" "")
5414 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5415 (const_int 8)
5416 (match_operand 1 "" "")]
5417 UNSPEC_PIC_BASE))
5418 (set (match_operand:SI 2 "arm_general_register_operand" "")
5419 (mem:SI (match_dup 0)))]
5420 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5421 [(set (match_dup 2)
5422 (mem:SI (unspec:SI [(match_dup 3)
5423 (const_int 8)
5424 (match_dup 1)]
5425 UNSPEC_PIC_BASE)))]
5426 ""
5427 )
5428
5429 (define_insn "pic_offset_arm"
5430 [(set (match_operand:SI 0 "register_operand" "=r")
5431 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5432 (unspec:SI [(match_operand:SI 2 "" "X")]
5433 UNSPEC_PIC_OFFSET))))]
5434 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5435 "ldr%?\\t%0, [%1,%2]"
5436 [(set_attr "type" "load_4")]
5437 )
5438
5439 (define_expand "builtin_setjmp_receiver"
5440 [(label_ref (match_operand 0 "" ""))]
5441 "flag_pic"
5442 "
5443 {
5444 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5445 register. */
5446 if (arm_pic_register != INVALID_REGNUM)
5447 arm_load_pic_register (1UL << 3, NULL_RTX);
5448 DONE;
5449 }")
5450
5451 ;; If copying one reg to another we can set the condition codes according to
5452 ;; its value. Such a move is common after a return from subroutine and the
5453 ;; result is being tested against zero.
5454
5455 (define_insn "*movsi_compare0"
5456 [(set (reg:CC CC_REGNUM)
5457 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5458 (const_int 0)))
5459 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5460 (match_dup 1))]
5461 "TARGET_32BIT"
5462 "@
5463 cmp%?\\t%0, #0
5464 subs%?\\t%0, %1, #0"
5465 [(set_attr "conds" "set")
5466 (set_attr "type" "alus_imm,alus_imm")]
5467 )
5468
5469 ;; Subroutine to store a half word from a register into memory.
5470 ;; Operand 0 is the source register (HImode)
5471 ;; Operand 1 is the destination address in a register (SImode)
5472
5473 ;; In both this routine and the next, we must be careful not to spill
5474 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5475 ;; can generate unrecognizable rtl.
5476
5477 (define_expand "storehi"
5478 [;; store the low byte
5479 (set (match_operand 1 "" "") (match_dup 3))
5480 ;; extract the high byte
5481 (set (match_dup 2)
5482 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5483 ;; store the high byte
5484 (set (match_dup 4) (match_dup 5))]
5485 "TARGET_ARM"
5486 "
5487 {
5488 rtx op1 = operands[1];
5489 rtx addr = XEXP (op1, 0);
5490 enum rtx_code code = GET_CODE (addr);
5491
5492 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5493 || code == MINUS)
5494 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5495
5496 operands[4] = adjust_address (op1, QImode, 1);
5497 operands[1] = adjust_address (operands[1], QImode, 0);
5498 operands[3] = gen_lowpart (QImode, operands[0]);
5499 operands[0] = gen_lowpart (SImode, operands[0]);
5500 operands[2] = gen_reg_rtx (SImode);
5501 operands[5] = gen_lowpart (QImode, operands[2]);
5502 }"
5503 )
5504
5505 (define_expand "storehi_bigend"
5506 [(set (match_dup 4) (match_dup 3))
5507 (set (match_dup 2)
5508 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5509 (set (match_operand 1 "" "") (match_dup 5))]
5510 "TARGET_ARM"
5511 "
5512 {
5513 rtx op1 = operands[1];
5514 rtx addr = XEXP (op1, 0);
5515 enum rtx_code code = GET_CODE (addr);
5516
5517 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5518 || code == MINUS)
5519 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5520
5521 operands[4] = adjust_address (op1, QImode, 1);
5522 operands[1] = adjust_address (operands[1], QImode, 0);
5523 operands[3] = gen_lowpart (QImode, operands[0]);
5524 operands[0] = gen_lowpart (SImode, operands[0]);
5525 operands[2] = gen_reg_rtx (SImode);
5526 operands[5] = gen_lowpart (QImode, operands[2]);
5527 }"
5528 )
5529
5530 ;; Subroutine to store a half word integer constant into memory.
5531 (define_expand "storeinthi"
5532 [(set (match_operand 0 "" "")
5533 (match_operand 1 "" ""))
5534 (set (match_dup 3) (match_dup 2))]
5535 "TARGET_ARM"
5536 "
5537 {
5538 HOST_WIDE_INT value = INTVAL (operands[1]);
5539 rtx addr = XEXP (operands[0], 0);
5540 rtx op0 = operands[0];
5541 enum rtx_code code = GET_CODE (addr);
5542
5543 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5544 || code == MINUS)
5545 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5546
5547 operands[1] = gen_reg_rtx (SImode);
5548 if (BYTES_BIG_ENDIAN)
5549 {
5550 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5551 if ((value & 255) == ((value >> 8) & 255))
5552 operands[2] = operands[1];
5553 else
5554 {
5555 operands[2] = gen_reg_rtx (SImode);
5556 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5557 }
5558 }
5559 else
5560 {
5561 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5562 if ((value & 255) == ((value >> 8) & 255))
5563 operands[2] = operands[1];
5564 else
5565 {
5566 operands[2] = gen_reg_rtx (SImode);
5567 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5568 }
5569 }
5570
5571 operands[3] = adjust_address (op0, QImode, 1);
5572 operands[0] = adjust_address (operands[0], QImode, 0);
5573 operands[2] = gen_lowpart (QImode, operands[2]);
5574 operands[1] = gen_lowpart (QImode, operands[1]);
5575 }"
5576 )
5577
5578 (define_expand "storehi_single_op"
5579 [(set (match_operand:HI 0 "memory_operand")
5580 (match_operand:HI 1 "general_operand"))]
5581 "TARGET_32BIT && arm_arch4"
5582 "
5583 if (!s_register_operand (operands[1], HImode))
5584 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5585 "
5586 )
5587
5588 (define_expand "movhi"
5589 [(set (match_operand:HI 0 "general_operand")
5590 (match_operand:HI 1 "general_operand"))]
5591 "TARGET_EITHER"
5592 "
5593 gcc_checking_assert (aligned_operand (operands[0], HImode));
5594 gcc_checking_assert (aligned_operand (operands[1], HImode));
5595 if (TARGET_ARM)
5596 {
5597 if (can_create_pseudo_p ())
5598 {
5599 if (MEM_P (operands[0]))
5600 {
5601 if (arm_arch4)
5602 {
5603 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5604 DONE;
5605 }
5606 if (CONST_INT_P (operands[1]))
5607 emit_insn (gen_storeinthi (operands[0], operands[1]));
5608 else
5609 {
5610 if (MEM_P (operands[1]))
5611 operands[1] = force_reg (HImode, operands[1]);
5612 if (BYTES_BIG_ENDIAN)
5613 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5614 else
5615 emit_insn (gen_storehi (operands[1], operands[0]));
5616 }
5617 DONE;
5618 }
5619 /* Sign extend a constant, and keep it in an SImode reg. */
5620 else if (CONST_INT_P (operands[1]))
5621 {
5622 rtx reg = gen_reg_rtx (SImode);
5623 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5624
5625 /* If the constant is already valid, leave it alone. */
5626 if (!const_ok_for_arm (val))
5627 {
5628 /* If setting all the top bits will make the constant
5629 loadable in a single instruction, then set them.
5630 Otherwise, sign extend the number. */
5631
5632 if (const_ok_for_arm (~(val | ~0xffff)))
5633 val |= ~0xffff;
5634 else if (val & 0x8000)
5635 val |= ~0xffff;
5636 }
5637
5638 emit_insn (gen_movsi (reg, GEN_INT (val)));
5639 operands[1] = gen_lowpart (HImode, reg);
5640 }
5641 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5642 && MEM_P (operands[1]))
5643 {
5644 rtx reg = gen_reg_rtx (SImode);
5645
5646 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5647 operands[1] = gen_lowpart (HImode, reg);
5648 }
5649 else if (!arm_arch4)
5650 {
5651 if (MEM_P (operands[1]))
5652 {
5653 rtx base;
5654 rtx offset = const0_rtx;
5655 rtx reg = gen_reg_rtx (SImode);
5656
5657 if ((REG_P (base = XEXP (operands[1], 0))
5658 || (GET_CODE (base) == PLUS
5659 && (CONST_INT_P (offset = XEXP (base, 1)))
5660 && ((INTVAL(offset) & 1) != 1)
5661 && REG_P (base = XEXP (base, 0))))
5662 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5663 {
5664 rtx new_rtx;
5665
5666 new_rtx = widen_memory_access (operands[1], SImode,
5667 ((INTVAL (offset) & ~3)
5668 - INTVAL (offset)));
5669 emit_insn (gen_movsi (reg, new_rtx));
5670 if (((INTVAL (offset) & 2) != 0)
5671 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5672 {
5673 rtx reg2 = gen_reg_rtx (SImode);
5674
5675 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5676 reg = reg2;
5677 }
5678 }
5679 else
5680 emit_insn (gen_movhi_bytes (reg, operands[1]));
5681
5682 operands[1] = gen_lowpart (HImode, reg);
5683 }
5684 }
5685 }
5686 /* Handle loading a large integer during reload. */
5687 else if (CONST_INT_P (operands[1])
5688 && !const_ok_for_arm (INTVAL (operands[1]))
5689 && !const_ok_for_arm (~INTVAL (operands[1])))
5690 {
5691 /* Writing a constant to memory needs a scratch, which should
5692 be handled with SECONDARY_RELOADs. */
5693 gcc_assert (REG_P (operands[0]));
5694
5695 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5696 emit_insn (gen_movsi (operands[0], operands[1]));
5697 DONE;
5698 }
5699 }
5700 else if (TARGET_THUMB2)
5701 {
5702 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5703 if (can_create_pseudo_p ())
5704 {
5705 if (!REG_P (operands[0]))
5706 operands[1] = force_reg (HImode, operands[1]);
5707 /* Zero extend a constant, and keep it in an SImode reg. */
5708 else if (CONST_INT_P (operands[1]))
5709 {
5710 rtx reg = gen_reg_rtx (SImode);
5711 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5712
5713 emit_insn (gen_movsi (reg, GEN_INT (val)));
5714 operands[1] = gen_lowpart (HImode, reg);
5715 }
5716 }
5717 }
5718 else /* TARGET_THUMB1 */
5719 {
5720 if (can_create_pseudo_p ())
5721 {
5722 if (CONST_INT_P (operands[1]))
5723 {
5724 rtx reg = gen_reg_rtx (SImode);
5725
5726 emit_insn (gen_movsi (reg, operands[1]));
5727 operands[1] = gen_lowpart (HImode, reg);
5728 }
5729
5730 /* ??? We shouldn't really get invalid addresses here, but this can
5731 happen if we are passed a SP (never OK for HImode/QImode) or
5732 virtual register (also rejected as illegitimate for HImode/QImode)
5733 relative address. */
5734 /* ??? This should perhaps be fixed elsewhere, for instance, in
5735 fixup_stack_1, by checking for other kinds of invalid addresses,
5736 e.g. a bare reference to a virtual register. This may confuse the
5737 alpha though, which must handle this case differently. */
5738 if (MEM_P (operands[0])
5739 && !memory_address_p (GET_MODE (operands[0]),
5740 XEXP (operands[0], 0)))
5741 operands[0]
5742 = replace_equiv_address (operands[0],
5743 copy_to_reg (XEXP (operands[0], 0)));
5744
5745 if (MEM_P (operands[1])
5746 && !memory_address_p (GET_MODE (operands[1]),
5747 XEXP (operands[1], 0)))
5748 operands[1]
5749 = replace_equiv_address (operands[1],
5750 copy_to_reg (XEXP (operands[1], 0)));
5751
5752 if (MEM_P (operands[1]) && optimize > 0)
5753 {
5754 rtx reg = gen_reg_rtx (SImode);
5755
5756 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5757 operands[1] = gen_lowpart (HImode, reg);
5758 }
5759
5760 if (MEM_P (operands[0]))
5761 operands[1] = force_reg (HImode, operands[1]);
5762 }
5763 else if (CONST_INT_P (operands[1])
5764 && !satisfies_constraint_I (operands[1]))
5765 {
5766 /* Handle loading a large integer during reload. */
5767
5768 /* Writing a constant to memory needs a scratch, which should
5769 be handled with SECONDARY_RELOADs. */
5770 gcc_assert (REG_P (operands[0]));
5771
5772 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5773 emit_insn (gen_movsi (operands[0], operands[1]));
5774 DONE;
5775 }
5776 }
5777 "
5778 )
5779
5780 (define_expand "movhi_bytes"
5781 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5782 (set (match_dup 3)
5783 (zero_extend:SI (match_dup 6)))
5784 (set (match_operand:SI 0 "" "")
5785 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5786 "TARGET_ARM"
5787 "
5788 {
5789 rtx mem1, mem2;
5790 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5791
5792 mem1 = change_address (operands[1], QImode, addr);
5793 mem2 = change_address (operands[1], QImode,
5794 plus_constant (Pmode, addr, 1));
5795 operands[0] = gen_lowpart (SImode, operands[0]);
5796 operands[1] = mem1;
5797 operands[2] = gen_reg_rtx (SImode);
5798 operands[3] = gen_reg_rtx (SImode);
5799 operands[6] = mem2;
5800
5801 if (BYTES_BIG_ENDIAN)
5802 {
5803 operands[4] = operands[2];
5804 operands[5] = operands[3];
5805 }
5806 else
5807 {
5808 operands[4] = operands[3];
5809 operands[5] = operands[2];
5810 }
5811 }"
5812 )
5813
5814 (define_expand "movhi_bigend"
5815 [(set (match_dup 2)
5816 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5817 (const_int 16)))
5818 (set (match_dup 3)
5819 (ashiftrt:SI (match_dup 2) (const_int 16)))
5820 (set (match_operand:HI 0 "s_register_operand")
5821 (match_dup 4))]
5822 "TARGET_ARM"
5823 "
5824 operands[2] = gen_reg_rtx (SImode);
5825 operands[3] = gen_reg_rtx (SImode);
5826 operands[4] = gen_lowpart (HImode, operands[3]);
5827 "
5828 )
5829
5830 ;; Pattern to recognize insn generated default case above
5831 (define_insn "*movhi_insn_arch4"
5832 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5833 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5834 "TARGET_ARM
5835 && arm_arch4 && !TARGET_HARD_FLOAT
5836 && (register_operand (operands[0], HImode)
5837 || register_operand (operands[1], HImode))"
5838 "@
5839 mov%?\\t%0, %1\\t%@ movhi
5840 mvn%?\\t%0, #%B1\\t%@ movhi
5841 movw%?\\t%0, %L1\\t%@ movhi
5842 strh%?\\t%1, %0\\t%@ movhi
5843 ldrh%?\\t%0, %1\\t%@ movhi"
5844 [(set_attr "predicable" "yes")
5845 (set_attr "pool_range" "*,*,*,*,256")
5846 (set_attr "neg_pool_range" "*,*,*,*,244")
5847 (set_attr "arch" "*,*,v6t2,*,*")
5848 (set_attr_alternative "type"
5849 [(if_then_else (match_operand 1 "const_int_operand" "")
5850 (const_string "mov_imm" )
5851 (const_string "mov_reg"))
5852 (const_string "mvn_imm")
5853 (const_string "mov_imm")
5854 (const_string "store_4")
5855 (const_string "load_4")])]
5856 )
5857
5858 (define_insn "*movhi_bytes"
5859 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5860 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5861 "TARGET_ARM && !TARGET_HARD_FLOAT"
5862 "@
5863 mov%?\\t%0, %1\\t%@ movhi
5864 mov%?\\t%0, %1\\t%@ movhi
5865 mvn%?\\t%0, #%B1\\t%@ movhi"
5866 [(set_attr "predicable" "yes")
5867 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5868 )
5869
5870 ;; We use a DImode scratch because we may occasionally need an additional
5871 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5872 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5873 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5874 ;; to be correctly handled in default_secondary_reload function.
5875 (define_expand "reload_outhi"
5876 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5877 (match_operand:HI 1 "s_register_operand" "r")
5878 (match_operand:DI 2 "s_register_operand" "=&l")])]
5879 "TARGET_EITHER"
5880 "if (TARGET_ARM)
5881 arm_reload_out_hi (operands);
5882 else
5883 thumb_reload_out_hi (operands);
5884 DONE;
5885 "
5886 )
5887
5888 (define_expand "reload_inhi"
5889 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5890 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5891 (match_operand:DI 2 "s_register_operand" "=&r")])]
5892 "TARGET_EITHER"
5893 "
5894 if (TARGET_ARM)
5895 arm_reload_in_hi (operands);
5896 else
5897 thumb_reload_out_hi (operands);
5898 DONE;
5899 ")
5900
5901 (define_expand "movqi"
5902 [(set (match_operand:QI 0 "general_operand")
5903 (match_operand:QI 1 "general_operand"))]
5904 "TARGET_EITHER"
5905 "
5906 /* Everything except mem = const or mem = mem can be done easily */
5907
5908 if (can_create_pseudo_p ())
5909 {
5910 if (CONST_INT_P (operands[1]))
5911 {
5912 rtx reg = gen_reg_rtx (SImode);
5913
5914 /* For thumb we want an unsigned immediate, then we are more likely
5915 to be able to use a movs insn. */
5916 if (TARGET_THUMB)
5917 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5918
5919 emit_insn (gen_movsi (reg, operands[1]));
5920 operands[1] = gen_lowpart (QImode, reg);
5921 }
5922
5923 if (TARGET_THUMB)
5924 {
5925 /* ??? We shouldn't really get invalid addresses here, but this can
5926 happen if we are passed a SP (never OK for HImode/QImode) or
5927 virtual register (also rejected as illegitimate for HImode/QImode)
5928 relative address. */
5929 /* ??? This should perhaps be fixed elsewhere, for instance, in
5930 fixup_stack_1, by checking for other kinds of invalid addresses,
5931 e.g. a bare reference to a virtual register. This may confuse the
5932 alpha though, which must handle this case differently. */
5933 if (MEM_P (operands[0])
5934 && !memory_address_p (GET_MODE (operands[0]),
5935 XEXP (operands[0], 0)))
5936 operands[0]
5937 = replace_equiv_address (operands[0],
5938 copy_to_reg (XEXP (operands[0], 0)));
5939 if (MEM_P (operands[1])
5940 && !memory_address_p (GET_MODE (operands[1]),
5941 XEXP (operands[1], 0)))
5942 operands[1]
5943 = replace_equiv_address (operands[1],
5944 copy_to_reg (XEXP (operands[1], 0)));
5945 }
5946
5947 if (MEM_P (operands[1]) && optimize > 0)
5948 {
5949 rtx reg = gen_reg_rtx (SImode);
5950
5951 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5952 operands[1] = gen_lowpart (QImode, reg);
5953 }
5954
5955 if (MEM_P (operands[0]))
5956 operands[1] = force_reg (QImode, operands[1]);
5957 }
5958 else if (TARGET_THUMB
5959 && CONST_INT_P (operands[1])
5960 && !satisfies_constraint_I (operands[1]))
5961 {
5962 /* Handle loading a large integer during reload. */
5963
5964 /* Writing a constant to memory needs a scratch, which should
5965 be handled with SECONDARY_RELOADs. */
5966 gcc_assert (REG_P (operands[0]));
5967
5968 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5969 emit_insn (gen_movsi (operands[0], operands[1]));
5970 DONE;
5971 }
5972 "
5973 )
5974
5975 (define_insn "*arm_movqi_insn"
5976 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5977 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5978 "TARGET_32BIT
5979 && ( register_operand (operands[0], QImode)
5980 || register_operand (operands[1], QImode))"
5981 "@
5982 mov%?\\t%0, %1
5983 mov%?\\t%0, %1
5984 mov%?\\t%0, %1
5985 mov%?\\t%0, %1
5986 mvn%?\\t%0, #%B1
5987 ldrb%?\\t%0, %1
5988 strb%?\\t%1, %0
5989 ldrb%?\\t%0, %1
5990 strb%?\\t%1, %0"
5991 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5992 (set_attr "predicable" "yes")
5993 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5994 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5995 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5996 )
5997
5998 ;; HFmode moves
5999 (define_expand "movhf"
6000 [(set (match_operand:HF 0 "general_operand")
6001 (match_operand:HF 1 "general_operand"))]
6002 "TARGET_EITHER"
6003 "
6004 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6005 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6006 if (TARGET_32BIT)
6007 {
6008 if (MEM_P (operands[0]))
6009 operands[1] = force_reg (HFmode, operands[1]);
6010 }
6011 else /* TARGET_THUMB1 */
6012 {
6013 if (can_create_pseudo_p ())
6014 {
6015 if (!REG_P (operands[0]))
6016 operands[1] = force_reg (HFmode, operands[1]);
6017 }
6018 }
6019 "
6020 )
6021
6022 (define_insn "*arm32_movhf"
6023 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6024 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6025 "TARGET_32BIT && !TARGET_HARD_FLOAT
6026 && ( s_register_operand (operands[0], HFmode)
6027 || s_register_operand (operands[1], HFmode))"
6028 "*
6029 switch (which_alternative)
6030 {
6031 case 0: /* ARM register from memory */
6032 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6033 case 1: /* memory from ARM register */
6034 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6035 case 2: /* ARM register from ARM register */
6036 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6037 case 3: /* ARM register from constant */
6038 {
6039 long bits;
6040 rtx ops[4];
6041
6042 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6043 HFmode);
6044 ops[0] = operands[0];
6045 ops[1] = GEN_INT (bits);
6046 ops[2] = GEN_INT (bits & 0xff00);
6047 ops[3] = GEN_INT (bits & 0x00ff);
6048
6049 if (arm_arch_thumb2)
6050 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6051 else
6052 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6053 return \"\";
6054 }
6055 default:
6056 gcc_unreachable ();
6057 }
6058 "
6059 [(set_attr "conds" "unconditional")
6060 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6061 (set_attr "length" "4,4,4,8")
6062 (set_attr "predicable" "yes")]
6063 )
6064
6065 (define_expand "movsf"
6066 [(set (match_operand:SF 0 "general_operand")
6067 (match_operand:SF 1 "general_operand"))]
6068 "TARGET_EITHER"
6069 "
6070 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6071 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6072 if (TARGET_32BIT)
6073 {
6074 if (MEM_P (operands[0]))
6075 operands[1] = force_reg (SFmode, operands[1]);
6076 }
6077 else /* TARGET_THUMB1 */
6078 {
6079 if (can_create_pseudo_p ())
6080 {
6081 if (!REG_P (operands[0]))
6082 operands[1] = force_reg (SFmode, operands[1]);
6083 }
6084 }
6085
6086 /* Cannot load it directly, generate a load with clobber so that it can be
6087 loaded via GPR with MOV / MOVT. */
6088 if (arm_disable_literal_pool
6089 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6090 && CONST_DOUBLE_P (operands[1])
6091 && TARGET_HARD_FLOAT
6092 && !vfp3_const_double_rtx (operands[1]))
6093 {
6094 rtx clobreg = gen_reg_rtx (SFmode);
6095 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6096 clobreg));
6097 DONE;
6098 }
6099 "
6100 )
6101
6102 ;; Transform a floating-point move of a constant into a core register into
6103 ;; an SImode operation.
6104 (define_split
6105 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6106 (match_operand:SF 1 "immediate_operand" ""))]
6107 "TARGET_EITHER
6108 && reload_completed
6109 && CONST_DOUBLE_P (operands[1])"
6110 [(set (match_dup 2) (match_dup 3))]
6111 "
6112 operands[2] = gen_lowpart (SImode, operands[0]);
6113 operands[3] = gen_lowpart (SImode, operands[1]);
6114 if (operands[2] == 0 || operands[3] == 0)
6115 FAIL;
6116 "
6117 )
6118
6119 (define_insn "*arm_movsf_soft_insn"
6120 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6121 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6122 "TARGET_32BIT
6123 && TARGET_SOFT_FLOAT
6124 && (!MEM_P (operands[0])
6125 || register_operand (operands[1], SFmode))"
6126 {
6127 switch (which_alternative)
6128 {
6129 case 0: return \"mov%?\\t%0, %1\";
6130 case 1:
6131 /* Cannot load it directly, split to load it via MOV / MOVT. */
6132 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6133 return \"#\";
6134 return \"ldr%?\\t%0, %1\\t%@ float\";
6135 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6136 default: gcc_unreachable ();
6137 }
6138 }
6139 [(set_attr "predicable" "yes")
6140 (set_attr "type" "mov_reg,load_4,store_4")
6141 (set_attr "arm_pool_range" "*,4096,*")
6142 (set_attr "thumb2_pool_range" "*,4094,*")
6143 (set_attr "arm_neg_pool_range" "*,4084,*")
6144 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6145 )
6146
6147 ;; Splitter for the above.
6148 (define_split
6149 [(set (match_operand:SF 0 "s_register_operand")
6150 (match_operand:SF 1 "const_double_operand"))]
6151 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6152 [(const_int 0)]
6153 {
6154 long buf;
6155 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6156 rtx cst = gen_int_mode (buf, SImode);
6157 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6158 DONE;
6159 }
6160 )
6161
6162 (define_expand "movdf"
6163 [(set (match_operand:DF 0 "general_operand")
6164 (match_operand:DF 1 "general_operand"))]
6165 "TARGET_EITHER"
6166 "
6167 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6168 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6169 if (TARGET_32BIT)
6170 {
6171 if (MEM_P (operands[0]))
6172 operands[1] = force_reg (DFmode, operands[1]);
6173 }
6174 else /* TARGET_THUMB */
6175 {
6176 if (can_create_pseudo_p ())
6177 {
6178 if (!REG_P (operands[0]))
6179 operands[1] = force_reg (DFmode, operands[1]);
6180 }
6181 }
6182
6183 /* Cannot load it directly, generate a load with clobber so that it can be
6184 loaded via GPR with MOV / MOVT. */
6185 if (arm_disable_literal_pool
6186 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6187 && CONSTANT_P (operands[1])
6188 && TARGET_HARD_FLOAT
6189 && !arm_const_double_rtx (operands[1])
6190 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6191 {
6192 rtx clobreg = gen_reg_rtx (DFmode);
6193 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6194 clobreg));
6195 DONE;
6196 }
6197 "
6198 )
6199
6200 ;; Reloading a df mode value stored in integer regs to memory can require a
6201 ;; scratch reg.
6202 ;; Another reload_out<m> pattern that requires special constraints.
6203 (define_expand "reload_outdf"
6204 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6205 (match_operand:DF 1 "s_register_operand" "r")
6206 (match_operand:SI 2 "s_register_operand" "=&r")]
6207 "TARGET_THUMB2"
6208 "
6209 {
6210 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6211
6212 if (code == REG)
6213 operands[2] = XEXP (operands[0], 0);
6214 else if (code == POST_INC || code == PRE_DEC)
6215 {
6216 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6217 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6218 emit_insn (gen_movdi (operands[0], operands[1]));
6219 DONE;
6220 }
6221 else if (code == PRE_INC)
6222 {
6223 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6224
6225 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6226 operands[2] = reg;
6227 }
6228 else if (code == POST_DEC)
6229 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6230 else
6231 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6232 XEXP (XEXP (operands[0], 0), 1)));
6233
6234 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6235 operands[1]));
6236
6237 if (code == POST_DEC)
6238 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6239
6240 DONE;
6241 }"
6242 )
6243
6244 (define_insn "*movdf_soft_insn"
6245 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6246 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6247 "TARGET_32BIT && TARGET_SOFT_FLOAT
6248 && ( register_operand (operands[0], DFmode)
6249 || register_operand (operands[1], DFmode))"
6250 "*
6251 switch (which_alternative)
6252 {
6253 case 0:
6254 case 1:
6255 case 2:
6256 return \"#\";
6257 case 3:
6258 /* Cannot load it directly, split to load it via MOV / MOVT. */
6259 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6260 return \"#\";
6261 /* Fall through. */
6262 default:
6263 return output_move_double (operands, true, NULL);
6264 }
6265 "
6266 [(set_attr "length" "8,12,16,8,8")
6267 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6268 (set_attr "arm_pool_range" "*,*,*,1020,*")
6269 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6270 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6271 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6272 )
6273
6274 ;; Splitter for the above.
6275 (define_split
6276 [(set (match_operand:DF 0 "s_register_operand")
6277 (match_operand:DF 1 "const_double_operand"))]
6278 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6279 [(const_int 0)]
6280 {
6281 long buf[2];
6282 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6283 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6284 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6285 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6286 rtx cst = gen_int_mode (ival, DImode);
6287 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6288 DONE;
6289 }
6290 )
6291 \f
6292
6293 ;; load- and store-multiple insns
6294 ;; The arm can load/store any set of registers, provided that they are in
6295 ;; ascending order, but these expanders assume a contiguous set.
6296
6297 (define_expand "load_multiple"
6298 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6299 (match_operand:SI 1 "" ""))
6300 (use (match_operand:SI 2 "" ""))])]
6301 "TARGET_32BIT"
6302 {
6303 HOST_WIDE_INT offset = 0;
6304
6305 /* Support only fixed point registers. */
6306 if (!CONST_INT_P (operands[2])
6307 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6308 || INTVAL (operands[2]) < 2
6309 || !MEM_P (operands[1])
6310 || !REG_P (operands[0])
6311 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6312 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6313 FAIL;
6314
6315 operands[3]
6316 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6317 INTVAL (operands[2]),
6318 force_reg (SImode, XEXP (operands[1], 0)),
6319 FALSE, operands[1], &offset);
6320 })
6321
6322 (define_expand "store_multiple"
6323 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6324 (match_operand:SI 1 "" ""))
6325 (use (match_operand:SI 2 "" ""))])]
6326 "TARGET_32BIT"
6327 {
6328 HOST_WIDE_INT offset = 0;
6329
6330 /* Support only fixed point registers. */
6331 if (!CONST_INT_P (operands[2])
6332 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6333 || INTVAL (operands[2]) < 2
6334 || !REG_P (operands[1])
6335 || !MEM_P (operands[0])
6336 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6337 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6338 FAIL;
6339
6340 operands[3]
6341 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6342 INTVAL (operands[2]),
6343 force_reg (SImode, XEXP (operands[0], 0)),
6344 FALSE, operands[0], &offset);
6345 })
6346
6347
6348 (define_expand "setmemsi"
6349 [(match_operand:BLK 0 "general_operand")
6350 (match_operand:SI 1 "const_int_operand")
6351 (match_operand:SI 2 "const_int_operand")
6352 (match_operand:SI 3 "const_int_operand")]
6353 "TARGET_32BIT"
6354 {
6355 if (arm_gen_setmem (operands))
6356 DONE;
6357
6358 FAIL;
6359 })
6360
6361
6362 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6363 ;; We could let this apply for blocks of less than this, but it clobbers so
6364 ;; many registers that there is then probably a better way.
6365
6366 (define_expand "cpymemqi"
6367 [(match_operand:BLK 0 "general_operand")
6368 (match_operand:BLK 1 "general_operand")
6369 (match_operand:SI 2 "const_int_operand")
6370 (match_operand:SI 3 "const_int_operand")]
6371 ""
6372 "
6373 if (TARGET_32BIT)
6374 {
6375 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6376 && !optimize_function_for_size_p (cfun))
6377 {
6378 if (gen_cpymem_ldrd_strd (operands))
6379 DONE;
6380 FAIL;
6381 }
6382
6383 if (arm_gen_cpymemqi (operands))
6384 DONE;
6385 FAIL;
6386 }
6387 else /* TARGET_THUMB1 */
6388 {
6389 if ( INTVAL (operands[3]) != 4
6390 || INTVAL (operands[2]) > 48)
6391 FAIL;
6392
6393 thumb_expand_cpymemqi (operands);
6394 DONE;
6395 }
6396 "
6397 )
6398 \f
6399
6400 ;; Compare & branch insns
6401 ;; The range calculations are based as follows:
6402 ;; For forward branches, the address calculation returns the address of
6403 ;; the next instruction. This is 2 beyond the branch instruction.
6404 ;; For backward branches, the address calculation returns the address of
6405 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6406 ;; instruction for the shortest sequence, and 4 before the branch instruction
6407 ;; if we have to jump around an unconditional branch.
6408 ;; To the basic branch range the PC offset must be added (this is +4).
6409 ;; So for forward branches we have
6410 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6411 ;; And for backward branches we have
6412 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6413 ;;
6414 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6415 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6416
6417 (define_expand "cbranchsi4"
6418 [(set (pc) (if_then_else
6419 (match_operator 0 "expandable_comparison_operator"
6420 [(match_operand:SI 1 "s_register_operand")
6421 (match_operand:SI 2 "nonmemory_operand")])
6422 (label_ref (match_operand 3 "" ""))
6423 (pc)))]
6424 "TARGET_EITHER"
6425 "
6426 if (!TARGET_THUMB1)
6427 {
6428 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6429 FAIL;
6430 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6431 operands[3]));
6432 DONE;
6433 }
6434 if (thumb1_cmpneg_operand (operands[2], SImode))
6435 {
6436 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6437 operands[3], operands[0]));
6438 DONE;
6439 }
6440 if (!thumb1_cmp_operand (operands[2], SImode))
6441 operands[2] = force_reg (SImode, operands[2]);
6442 ")
6443
6444 (define_expand "cbranchsf4"
6445 [(set (pc) (if_then_else
6446 (match_operator 0 "expandable_comparison_operator"
6447 [(match_operand:SF 1 "s_register_operand")
6448 (match_operand:SF 2 "vfp_compare_operand")])
6449 (label_ref (match_operand 3 "" ""))
6450 (pc)))]
6451 "TARGET_32BIT && TARGET_HARD_FLOAT"
6452 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6453 operands[3])); DONE;"
6454 )
6455
6456 (define_expand "cbranchdf4"
6457 [(set (pc) (if_then_else
6458 (match_operator 0 "expandable_comparison_operator"
6459 [(match_operand:DF 1 "s_register_operand")
6460 (match_operand:DF 2 "vfp_compare_operand")])
6461 (label_ref (match_operand 3 "" ""))
6462 (pc)))]
6463 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6464 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6465 operands[3])); DONE;"
6466 )
6467
6468 (define_expand "cbranchdi4"
6469 [(set (pc) (if_then_else
6470 (match_operator 0 "expandable_comparison_operator"
6471 [(match_operand:DI 1 "s_register_operand")
6472 (match_operand:DI 2 "reg_or_int_operand")])
6473 (label_ref (match_operand 3 "" ""))
6474 (pc)))]
6475 "TARGET_32BIT"
6476 "{
6477 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6478 FAIL;
6479 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6480 operands[3]));
6481 DONE;
6482 }"
6483 )
6484
6485 ;; Comparison and test insns
6486
6487 (define_insn "*arm_cmpsi_insn"
6488 [(set (reg:CC CC_REGNUM)
6489 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6490 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6491 "TARGET_32BIT"
6492 "@
6493 cmp%?\\t%0, %1
6494 cmp%?\\t%0, %1
6495 cmp%?\\t%0, %1
6496 cmp%?\\t%0, %1
6497 cmn%?\\t%0, #%n1"
6498 [(set_attr "conds" "set")
6499 (set_attr "arch" "t2,t2,any,any,any")
6500 (set_attr "length" "2,2,4,4,4")
6501 (set_attr "predicable" "yes")
6502 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6503 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6504 )
6505
6506 (define_insn "*cmpsi_shiftsi"
6507 [(set (reg:CC CC_REGNUM)
6508 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6509 (match_operator:SI 3 "shift_operator"
6510 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6511 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6512 "TARGET_32BIT"
6513 "cmp\\t%0, %1%S3"
6514 [(set_attr "conds" "set")
6515 (set_attr "shift" "1")
6516 (set_attr "arch" "32,a,a")
6517 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6518
6519 (define_insn "*cmpsi_shiftsi_swp"
6520 [(set (reg:CC_SWP CC_REGNUM)
6521 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6522 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6523 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6524 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6525 "TARGET_32BIT"
6526 "cmp%?\\t%0, %1%S3"
6527 [(set_attr "conds" "set")
6528 (set_attr "shift" "1")
6529 (set_attr "arch" "32,a,a")
6530 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6531
6532 (define_insn "*arm_cmpsi_negshiftsi_si"
6533 [(set (reg:CC_Z CC_REGNUM)
6534 (compare:CC_Z
6535 (neg:SI (match_operator:SI 1 "shift_operator"
6536 [(match_operand:SI 2 "s_register_operand" "r")
6537 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6538 (match_operand:SI 0 "s_register_operand" "r")))]
6539 "TARGET_ARM"
6540 "cmn%?\\t%0, %2%S1"
6541 [(set_attr "conds" "set")
6542 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6543 (const_string "alus_shift_imm")
6544 (const_string "alus_shift_reg")))
6545 (set_attr "predicable" "yes")]
6546 )
6547
6548 ;; DImode comparisons. The generic code generates branches that
6549 ;; if-conversion cannot reduce to a conditional compare, so we do
6550 ;; that directly.
6551
6552 (define_insn "*arm_cmpdi_insn"
6553 [(set (reg:CC_NCV CC_REGNUM)
6554 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6555 (match_operand:DI 1 "arm_di_operand" "rDi")))
6556 (clobber (match_scratch:SI 2 "=r"))]
6557 "TARGET_32BIT"
6558 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6559 [(set_attr "conds" "set")
6560 (set_attr "length" "8")
6561 (set_attr "type" "multiple")]
6562 )
6563
6564 (define_insn_and_split "*arm_cmpdi_unsigned"
6565 [(set (reg:CC_CZ CC_REGNUM)
6566 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6567 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6568
6569 "TARGET_32BIT"
6570 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6571 "&& reload_completed"
6572 [(set (reg:CC CC_REGNUM)
6573 (compare:CC (match_dup 2) (match_dup 3)))
6574 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6575 (set (reg:CC CC_REGNUM)
6576 (compare:CC (match_dup 0) (match_dup 1))))]
6577 {
6578 operands[2] = gen_highpart (SImode, operands[0]);
6579 operands[0] = gen_lowpart (SImode, operands[0]);
6580 if (CONST_INT_P (operands[1]))
6581 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6582 else
6583 operands[3] = gen_highpart (SImode, operands[1]);
6584 operands[1] = gen_lowpart (SImode, operands[1]);
6585 }
6586 [(set_attr "conds" "set")
6587 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6588 (set_attr "arch" "t2,t2,t2,a")
6589 (set_attr "length" "6,6,10,8")
6590 (set_attr "type" "multiple")]
6591 )
6592
6593 ; This insn allows redundant compares to be removed by cse, nothing should
6594 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6595 ; is deleted later on. The match_dup will match the mode here, so that
6596 ; mode changes of the condition codes aren't lost by this even though we don't
6597 ; specify what they are.
6598
6599 (define_insn "*deleted_compare"
6600 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6601 "TARGET_32BIT"
6602 "\\t%@ deleted compare"
6603 [(set_attr "conds" "set")
6604 (set_attr "length" "0")
6605 (set_attr "type" "no_insn")]
6606 )
6607
6608 \f
6609 ;; Conditional branch insns
6610
6611 (define_expand "cbranch_cc"
6612 [(set (pc)
6613 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6614 (match_operand 2 "" "")])
6615 (label_ref (match_operand 3 "" ""))
6616 (pc)))]
6617 "TARGET_32BIT"
6618 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6619 operands[1], operands[2], NULL_RTX);
6620 operands[2] = const0_rtx;"
6621 )
6622
6623 ;;
6624 ;; Patterns to match conditional branch insns.
6625 ;;
6626
6627 (define_insn "arm_cond_branch"
6628 [(set (pc)
6629 (if_then_else (match_operator 1 "arm_comparison_operator"
6630 [(match_operand 2 "cc_register" "") (const_int 0)])
6631 (label_ref (match_operand 0 "" ""))
6632 (pc)))]
6633 "TARGET_32BIT"
6634 "*
6635 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6636 {
6637 arm_ccfsm_state += 2;
6638 return \"\";
6639 }
6640 return \"b%d1\\t%l0\";
6641 "
6642 [(set_attr "conds" "use")
6643 (set_attr "type" "branch")
6644 (set (attr "length")
6645 (if_then_else
6646 (and (match_test "TARGET_THUMB2")
6647 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6648 (le (minus (match_dup 0) (pc)) (const_int 256))))
6649 (const_int 2)
6650 (const_int 4)))]
6651 )
6652
6653 (define_insn "*arm_cond_branch_reversed"
6654 [(set (pc)
6655 (if_then_else (match_operator 1 "arm_comparison_operator"
6656 [(match_operand 2 "cc_register" "") (const_int 0)])
6657 (pc)
6658 (label_ref (match_operand 0 "" ""))))]
6659 "TARGET_32BIT"
6660 "*
6661 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6662 {
6663 arm_ccfsm_state += 2;
6664 return \"\";
6665 }
6666 return \"b%D1\\t%l0\";
6667 "
6668 [(set_attr "conds" "use")
6669 (set_attr "type" "branch")
6670 (set (attr "length")
6671 (if_then_else
6672 (and (match_test "TARGET_THUMB2")
6673 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6674 (le (minus (match_dup 0) (pc)) (const_int 256))))
6675 (const_int 2)
6676 (const_int 4)))]
6677 )
6678
6679 \f
6680
6681 ; scc insns
6682
6683 (define_expand "cstore_cc"
6684 [(set (match_operand:SI 0 "s_register_operand")
6685 (match_operator:SI 1 "" [(match_operand 2 "" "")
6686 (match_operand 3 "" "")]))]
6687 "TARGET_32BIT"
6688 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6689 operands[2], operands[3], NULL_RTX);
6690 operands[3] = const0_rtx;"
6691 )
6692
6693 (define_insn_and_split "*mov_scc"
6694 [(set (match_operand:SI 0 "s_register_operand" "=r")
6695 (match_operator:SI 1 "arm_comparison_operator_mode"
6696 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6697 "TARGET_ARM"
6698 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6699 "TARGET_ARM"
6700 [(set (match_dup 0)
6701 (if_then_else:SI (match_dup 1)
6702 (const_int 1)
6703 (const_int 0)))]
6704 ""
6705 [(set_attr "conds" "use")
6706 (set_attr "length" "8")
6707 (set_attr "type" "multiple")]
6708 )
6709
6710 (define_insn "*negscc_borrow"
6711 [(set (match_operand:SI 0 "s_register_operand" "=r")
6712 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
6713 "TARGET_32BIT"
6714 "sbc\\t%0, %0, %0"
6715 [(set_attr "conds" "use")
6716 (set_attr "length" "4")
6717 (set_attr "type" "adc_reg")]
6718 )
6719
6720 (define_insn_and_split "*mov_negscc"
6721 [(set (match_operand:SI 0 "s_register_operand" "=r")
6722 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6723 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6724 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
6725 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6726 "&& true"
6727 [(set (match_dup 0)
6728 (if_then_else:SI (match_dup 1)
6729 (match_dup 3)
6730 (const_int 0)))]
6731 {
6732 operands[3] = GEN_INT (~0);
6733 }
6734 [(set_attr "conds" "use")
6735 (set_attr "length" "8")
6736 (set_attr "type" "multiple")]
6737 )
6738
6739 (define_insn_and_split "*mov_notscc"
6740 [(set (match_operand:SI 0 "s_register_operand" "=r")
6741 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6742 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6743 "TARGET_ARM"
6744 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6745 "TARGET_ARM"
6746 [(set (match_dup 0)
6747 (if_then_else:SI (match_dup 1)
6748 (match_dup 3)
6749 (match_dup 4)))]
6750 {
6751 operands[3] = GEN_INT (~1);
6752 operands[4] = GEN_INT (~0);
6753 }
6754 [(set_attr "conds" "use")
6755 (set_attr "length" "8")
6756 (set_attr "type" "multiple")]
6757 )
6758
6759 (define_expand "cstoresi4"
6760 [(set (match_operand:SI 0 "s_register_operand")
6761 (match_operator:SI 1 "expandable_comparison_operator"
6762 [(match_operand:SI 2 "s_register_operand")
6763 (match_operand:SI 3 "reg_or_int_operand")]))]
6764 "TARGET_32BIT || TARGET_THUMB1"
6765 "{
6766 rtx op3, scratch, scratch2;
6767
6768 if (!TARGET_THUMB1)
6769 {
6770 if (!arm_add_operand (operands[3], SImode))
6771 operands[3] = force_reg (SImode, operands[3]);
6772 emit_insn (gen_cstore_cc (operands[0], operands[1],
6773 operands[2], operands[3]));
6774 DONE;
6775 }
6776
6777 if (operands[3] == const0_rtx)
6778 {
6779 switch (GET_CODE (operands[1]))
6780 {
6781 case EQ:
6782 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6783 break;
6784
6785 case NE:
6786 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6787 break;
6788
6789 case LE:
6790 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6791 NULL_RTX, 0, OPTAB_WIDEN);
6792 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6793 NULL_RTX, 0, OPTAB_WIDEN);
6794 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6795 operands[0], 1, OPTAB_WIDEN);
6796 break;
6797
6798 case GE:
6799 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6800 NULL_RTX, 1);
6801 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6802 NULL_RTX, 1, OPTAB_WIDEN);
6803 break;
6804
6805 case GT:
6806 scratch = expand_binop (SImode, ashr_optab, operands[2],
6807 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6808 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6809 NULL_RTX, 0, OPTAB_WIDEN);
6810 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6811 0, OPTAB_WIDEN);
6812 break;
6813
6814 /* LT is handled by generic code. No need for unsigned with 0. */
6815 default:
6816 FAIL;
6817 }
6818 DONE;
6819 }
6820
6821 switch (GET_CODE (operands[1]))
6822 {
6823 case EQ:
6824 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6825 NULL_RTX, 0, OPTAB_WIDEN);
6826 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6827 break;
6828
6829 case NE:
6830 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6831 NULL_RTX, 0, OPTAB_WIDEN);
6832 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6833 break;
6834
6835 case LE:
6836 op3 = force_reg (SImode, operands[3]);
6837
6838 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6839 NULL_RTX, 1, OPTAB_WIDEN);
6840 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6841 NULL_RTX, 0, OPTAB_WIDEN);
6842 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6843 op3, operands[2]));
6844 break;
6845
6846 case GE:
6847 op3 = operands[3];
6848 if (!thumb1_cmp_operand (op3, SImode))
6849 op3 = force_reg (SImode, op3);
6850 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6851 NULL_RTX, 0, OPTAB_WIDEN);
6852 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6853 NULL_RTX, 1, OPTAB_WIDEN);
6854 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6855 operands[2], op3));
6856 break;
6857
6858 case LEU:
6859 op3 = force_reg (SImode, operands[3]);
6860 scratch = force_reg (SImode, const0_rtx);
6861 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6862 op3, operands[2]));
6863 break;
6864
6865 case GEU:
6866 op3 = operands[3];
6867 if (!thumb1_cmp_operand (op3, SImode))
6868 op3 = force_reg (SImode, op3);
6869 scratch = force_reg (SImode, const0_rtx);
6870 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6871 operands[2], op3));
6872 break;
6873
6874 case LTU:
6875 op3 = operands[3];
6876 if (!thumb1_cmp_operand (op3, SImode))
6877 op3 = force_reg (SImode, op3);
6878 scratch = gen_reg_rtx (SImode);
6879 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6880 break;
6881
6882 case GTU:
6883 op3 = force_reg (SImode, operands[3]);
6884 scratch = gen_reg_rtx (SImode);
6885 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6886 break;
6887
6888 /* No good sequences for GT, LT. */
6889 default:
6890 FAIL;
6891 }
6892 DONE;
6893 }")
6894
6895 (define_expand "cstorehf4"
6896 [(set (match_operand:SI 0 "s_register_operand")
6897 (match_operator:SI 1 "expandable_comparison_operator"
6898 [(match_operand:HF 2 "s_register_operand")
6899 (match_operand:HF 3 "vfp_compare_operand")]))]
6900 "TARGET_VFP_FP16INST"
6901 {
6902 if (!arm_validize_comparison (&operands[1],
6903 &operands[2],
6904 &operands[3]))
6905 FAIL;
6906
6907 emit_insn (gen_cstore_cc (operands[0], operands[1],
6908 operands[2], operands[3]));
6909 DONE;
6910 }
6911 )
6912
6913 (define_expand "cstoresf4"
6914 [(set (match_operand:SI 0 "s_register_operand")
6915 (match_operator:SI 1 "expandable_comparison_operator"
6916 [(match_operand:SF 2 "s_register_operand")
6917 (match_operand:SF 3 "vfp_compare_operand")]))]
6918 "TARGET_32BIT && TARGET_HARD_FLOAT"
6919 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6920 operands[2], operands[3])); DONE;"
6921 )
6922
6923 (define_expand "cstoredf4"
6924 [(set (match_operand:SI 0 "s_register_operand")
6925 (match_operator:SI 1 "expandable_comparison_operator"
6926 [(match_operand:DF 2 "s_register_operand")
6927 (match_operand:DF 3 "vfp_compare_operand")]))]
6928 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6929 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6930 operands[2], operands[3])); DONE;"
6931 )
6932
6933 (define_expand "cstoredi4"
6934 [(set (match_operand:SI 0 "s_register_operand")
6935 (match_operator:SI 1 "expandable_comparison_operator"
6936 [(match_operand:DI 2 "s_register_operand")
6937 (match_operand:DI 3 "reg_or_int_operand")]))]
6938 "TARGET_32BIT"
6939 "{
6940 if (!arm_validize_comparison (&operands[1],
6941 &operands[2],
6942 &operands[3]))
6943 FAIL;
6944 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6945 operands[3]));
6946 DONE;
6947 }"
6948 )
6949
6950 \f
6951 ;; Conditional move insns
6952
6953 (define_expand "movsicc"
6954 [(set (match_operand:SI 0 "s_register_operand")
6955 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6956 (match_operand:SI 2 "arm_not_operand")
6957 (match_operand:SI 3 "arm_not_operand")))]
6958 "TARGET_32BIT"
6959 "
6960 {
6961 enum rtx_code code;
6962 rtx ccreg;
6963
6964 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6965 &XEXP (operands[1], 1)))
6966 FAIL;
6967
6968 code = GET_CODE (operands[1]);
6969 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6970 XEXP (operands[1], 1), NULL_RTX);
6971 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6972 }"
6973 )
6974
6975 (define_expand "movhfcc"
6976 [(set (match_operand:HF 0 "s_register_operand")
6977 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6978 (match_operand:HF 2 "s_register_operand")
6979 (match_operand:HF 3 "s_register_operand")))]
6980 "TARGET_VFP_FP16INST"
6981 "
6982 {
6983 enum rtx_code code = GET_CODE (operands[1]);
6984 rtx ccreg;
6985
6986 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6987 &XEXP (operands[1], 1)))
6988 FAIL;
6989
6990 code = GET_CODE (operands[1]);
6991 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6992 XEXP (operands[1], 1), NULL_RTX);
6993 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6994 }"
6995 )
6996
6997 (define_expand "movsfcc"
6998 [(set (match_operand:SF 0 "s_register_operand")
6999 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
7000 (match_operand:SF 2 "s_register_operand")
7001 (match_operand:SF 3 "s_register_operand")))]
7002 "TARGET_32BIT && TARGET_HARD_FLOAT"
7003 "
7004 {
7005 enum rtx_code code = GET_CODE (operands[1]);
7006 rtx ccreg;
7007
7008 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7009 &XEXP (operands[1], 1)))
7010 FAIL;
7011
7012 code = GET_CODE (operands[1]);
7013 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7014 XEXP (operands[1], 1), NULL_RTX);
7015 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7016 }"
7017 )
7018
7019 (define_expand "movdfcc"
7020 [(set (match_operand:DF 0 "s_register_operand")
7021 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7022 (match_operand:DF 2 "s_register_operand")
7023 (match_operand:DF 3 "s_register_operand")))]
7024 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7025 "
7026 {
7027 enum rtx_code code = GET_CODE (operands[1]);
7028 rtx ccreg;
7029
7030 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7031 &XEXP (operands[1], 1)))
7032 FAIL;
7033 code = GET_CODE (operands[1]);
7034 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7035 XEXP (operands[1], 1), NULL_RTX);
7036 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7037 }"
7038 )
7039
7040 (define_insn "*cmov<mode>"
7041 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7042 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7043 [(match_operand 2 "cc_register" "") (const_int 0)])
7044 (match_operand:SDF 3 "s_register_operand"
7045 "<F_constraint>")
7046 (match_operand:SDF 4 "s_register_operand"
7047 "<F_constraint>")))]
7048 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7049 "*
7050 {
7051 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7052 switch (code)
7053 {
7054 case ARM_GE:
7055 case ARM_GT:
7056 case ARM_EQ:
7057 case ARM_VS:
7058 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7059 case ARM_LT:
7060 case ARM_LE:
7061 case ARM_NE:
7062 case ARM_VC:
7063 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7064 default:
7065 gcc_unreachable ();
7066 }
7067 return \"\";
7068 }"
7069 [(set_attr "conds" "use")
7070 (set_attr "type" "fcsel")]
7071 )
7072
7073 (define_insn "*cmovhf"
7074 [(set (match_operand:HF 0 "s_register_operand" "=t")
7075 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7076 [(match_operand 2 "cc_register" "") (const_int 0)])
7077 (match_operand:HF 3 "s_register_operand" "t")
7078 (match_operand:HF 4 "s_register_operand" "t")))]
7079 "TARGET_VFP_FP16INST"
7080 "*
7081 {
7082 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7083 switch (code)
7084 {
7085 case ARM_GE:
7086 case ARM_GT:
7087 case ARM_EQ:
7088 case ARM_VS:
7089 return \"vsel%d1.f16\\t%0, %3, %4\";
7090 case ARM_LT:
7091 case ARM_LE:
7092 case ARM_NE:
7093 case ARM_VC:
7094 return \"vsel%D1.f16\\t%0, %4, %3\";
7095 default:
7096 gcc_unreachable ();
7097 }
7098 return \"\";
7099 }"
7100 [(set_attr "conds" "use")
7101 (set_attr "type" "fcsel")]
7102 )
7103
7104 (define_insn_and_split "*movsicc_insn"
7105 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7106 (if_then_else:SI
7107 (match_operator 3 "arm_comparison_operator"
7108 [(match_operand 4 "cc_register" "") (const_int 0)])
7109 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7110 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7111 "TARGET_ARM"
7112 "@
7113 mov%D3\\t%0, %2
7114 mvn%D3\\t%0, #%B2
7115 mov%d3\\t%0, %1
7116 mvn%d3\\t%0, #%B1
7117 #
7118 #
7119 #
7120 #"
7121 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7122 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7123 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7124 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7125 "&& reload_completed"
7126 [(const_int 0)]
7127 {
7128 enum rtx_code rev_code;
7129 machine_mode mode;
7130 rtx rev_cond;
7131
7132 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7133 operands[3],
7134 gen_rtx_SET (operands[0], operands[1])));
7135
7136 rev_code = GET_CODE (operands[3]);
7137 mode = GET_MODE (operands[4]);
7138 if (mode == CCFPmode || mode == CCFPEmode)
7139 rev_code = reverse_condition_maybe_unordered (rev_code);
7140 else
7141 rev_code = reverse_condition (rev_code);
7142
7143 rev_cond = gen_rtx_fmt_ee (rev_code,
7144 VOIDmode,
7145 operands[4],
7146 const0_rtx);
7147 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7148 rev_cond,
7149 gen_rtx_SET (operands[0], operands[2])));
7150 DONE;
7151 }
7152 [(set_attr "length" "4,4,4,4,8,8,8,8")
7153 (set_attr "conds" "use")
7154 (set_attr_alternative "type"
7155 [(if_then_else (match_operand 2 "const_int_operand" "")
7156 (const_string "mov_imm")
7157 (const_string "mov_reg"))
7158 (const_string "mvn_imm")
7159 (if_then_else (match_operand 1 "const_int_operand" "")
7160 (const_string "mov_imm")
7161 (const_string "mov_reg"))
7162 (const_string "mvn_imm")
7163 (const_string "multiple")
7164 (const_string "multiple")
7165 (const_string "multiple")
7166 (const_string "multiple")])]
7167 )
7168
7169 (define_insn "*movsfcc_soft_insn"
7170 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7171 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7172 [(match_operand 4 "cc_register" "") (const_int 0)])
7173 (match_operand:SF 1 "s_register_operand" "0,r")
7174 (match_operand:SF 2 "s_register_operand" "r,0")))]
7175 "TARGET_ARM && TARGET_SOFT_FLOAT"
7176 "@
7177 mov%D3\\t%0, %2
7178 mov%d3\\t%0, %1"
7179 [(set_attr "conds" "use")
7180 (set_attr "type" "mov_reg")]
7181 )
7182
7183 \f
7184 ;; Jump and linkage insns
7185
7186 (define_expand "jump"
7187 [(set (pc)
7188 (label_ref (match_operand 0 "" "")))]
7189 "TARGET_EITHER"
7190 ""
7191 )
7192
7193 (define_insn "*arm_jump"
7194 [(set (pc)
7195 (label_ref (match_operand 0 "" "")))]
7196 "TARGET_32BIT"
7197 "*
7198 {
7199 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7200 {
7201 arm_ccfsm_state += 2;
7202 return \"\";
7203 }
7204 return \"b%?\\t%l0\";
7205 }
7206 "
7207 [(set_attr "predicable" "yes")
7208 (set (attr "length")
7209 (if_then_else
7210 (and (match_test "TARGET_THUMB2")
7211 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7212 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7213 (const_int 2)
7214 (const_int 4)))
7215 (set_attr "type" "branch")]
7216 )
7217
7218 (define_expand "call"
7219 [(parallel [(call (match_operand 0 "memory_operand")
7220 (match_operand 1 "general_operand"))
7221 (use (match_operand 2 "" ""))
7222 (clobber (reg:SI LR_REGNUM))])]
7223 "TARGET_EITHER"
7224 "
7225 {
7226 rtx callee, pat;
7227 tree addr = MEM_EXPR (operands[0]);
7228
7229 /* In an untyped call, we can get NULL for operand 2. */
7230 if (operands[2] == NULL_RTX)
7231 operands[2] = const0_rtx;
7232
7233 /* Decide if we should generate indirect calls by loading the
7234 32-bit address of the callee into a register before performing the
7235 branch and link. */
7236 callee = XEXP (operands[0], 0);
7237 if (GET_CODE (callee) == SYMBOL_REF
7238 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7239 : !REG_P (callee))
7240 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7241
7242 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7243 /* Indirect call: set r9 with FDPIC value of callee. */
7244 XEXP (operands[0], 0)
7245 = arm_load_function_descriptor (XEXP (operands[0], 0));
7246
7247 if (detect_cmse_nonsecure_call (addr))
7248 {
7249 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7250 operands[2]);
7251 emit_call_insn (pat);
7252 }
7253 else
7254 {
7255 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7256 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7257 }
7258
7259 /* Restore FDPIC register (r9) after call. */
7260 if (TARGET_FDPIC)
7261 {
7262 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7263 rtx initial_fdpic_reg
7264 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7265
7266 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7267 initial_fdpic_reg));
7268 }
7269
7270 DONE;
7271 }"
7272 )
7273
7274 (define_insn "restore_pic_register_after_call"
7275 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7276 (unspec:SI [(match_dup 0)
7277 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7278 UNSPEC_PIC_RESTORE))]
7279 ""
7280 "@
7281 mov\t%0, %1
7282 ldr\t%0, %1"
7283 )
7284
7285 (define_expand "call_internal"
7286 [(parallel [(call (match_operand 0 "memory_operand")
7287 (match_operand 1 "general_operand"))
7288 (use (match_operand 2 "" ""))
7289 (clobber (reg:SI LR_REGNUM))])])
7290
7291 (define_expand "nonsecure_call_internal"
7292 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7293 UNSPEC_NONSECURE_MEM)
7294 (match_operand 1 "general_operand"))
7295 (use (match_operand 2 "" ""))
7296 (clobber (reg:SI LR_REGNUM))])]
7297 "use_cmse"
7298 "
7299 {
7300 rtx tmp;
7301 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7302 gen_rtx_REG (SImode, R4_REGNUM),
7303 SImode);
7304
7305 operands[0] = replace_equiv_address (operands[0], tmp);
7306 }")
7307
7308 (define_insn "*call_reg_armv5"
7309 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7310 (match_operand 1 "" ""))
7311 (use (match_operand 2 "" ""))
7312 (clobber (reg:SI LR_REGNUM))]
7313 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7314 "blx%?\\t%0"
7315 [(set_attr "type" "call")]
7316 )
7317
7318 (define_insn "*call_reg_arm"
7319 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7320 (match_operand 1 "" ""))
7321 (use (match_operand 2 "" ""))
7322 (clobber (reg:SI LR_REGNUM))]
7323 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7324 "*
7325 return output_call (operands);
7326 "
7327 ;; length is worst case, normally it is only two
7328 [(set_attr "length" "12")
7329 (set_attr "type" "call")]
7330 )
7331
7332
7333 (define_expand "call_value"
7334 [(parallel [(set (match_operand 0 "" "")
7335 (call (match_operand 1 "memory_operand")
7336 (match_operand 2 "general_operand")))
7337 (use (match_operand 3 "" ""))
7338 (clobber (reg:SI LR_REGNUM))])]
7339 "TARGET_EITHER"
7340 "
7341 {
7342 rtx pat, callee;
7343 tree addr = MEM_EXPR (operands[1]);
7344
7345 /* In an untyped call, we can get NULL for operand 2. */
7346 if (operands[3] == 0)
7347 operands[3] = const0_rtx;
7348
7349 /* Decide if we should generate indirect calls by loading the
7350 32-bit address of the callee into a register before performing the
7351 branch and link. */
7352 callee = XEXP (operands[1], 0);
7353 if (GET_CODE (callee) == SYMBOL_REF
7354 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7355 : !REG_P (callee))
7356 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7357
7358 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7359 /* Indirect call: set r9 with FDPIC value of callee. */
7360 XEXP (operands[1], 0)
7361 = arm_load_function_descriptor (XEXP (operands[1], 0));
7362
7363 if (detect_cmse_nonsecure_call (addr))
7364 {
7365 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7366 operands[2], operands[3]);
7367 emit_call_insn (pat);
7368 }
7369 else
7370 {
7371 pat = gen_call_value_internal (operands[0], operands[1],
7372 operands[2], operands[3]);
7373 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7374 }
7375
7376 /* Restore FDPIC register (r9) after call. */
7377 if (TARGET_FDPIC)
7378 {
7379 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7380 rtx initial_fdpic_reg
7381 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7382
7383 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7384 initial_fdpic_reg));
7385 }
7386
7387 DONE;
7388 }"
7389 )
7390
7391 (define_expand "call_value_internal"
7392 [(parallel [(set (match_operand 0 "" "")
7393 (call (match_operand 1 "memory_operand")
7394 (match_operand 2 "general_operand")))
7395 (use (match_operand 3 "" ""))
7396 (clobber (reg:SI LR_REGNUM))])])
7397
7398 (define_expand "nonsecure_call_value_internal"
7399 [(parallel [(set (match_operand 0 "" "")
7400 (call (unspec:SI [(match_operand 1 "memory_operand")]
7401 UNSPEC_NONSECURE_MEM)
7402 (match_operand 2 "general_operand")))
7403 (use (match_operand 3 "" ""))
7404 (clobber (reg:SI LR_REGNUM))])]
7405 "use_cmse"
7406 "
7407 {
7408 rtx tmp;
7409 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7410 gen_rtx_REG (SImode, R4_REGNUM),
7411 SImode);
7412
7413 operands[1] = replace_equiv_address (operands[1], tmp);
7414 }")
7415
7416 (define_insn "*call_value_reg_armv5"
7417 [(set (match_operand 0 "" "")
7418 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7419 (match_operand 2 "" "")))
7420 (use (match_operand 3 "" ""))
7421 (clobber (reg:SI LR_REGNUM))]
7422 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7423 "blx%?\\t%1"
7424 [(set_attr "type" "call")]
7425 )
7426
7427 (define_insn "*call_value_reg_arm"
7428 [(set (match_operand 0 "" "")
7429 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7430 (match_operand 2 "" "")))
7431 (use (match_operand 3 "" ""))
7432 (clobber (reg:SI LR_REGNUM))]
7433 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7434 "*
7435 return output_call (&operands[1]);
7436 "
7437 [(set_attr "length" "12")
7438 (set_attr "type" "call")]
7439 )
7440
7441 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7442 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7443
7444 (define_insn "*call_symbol"
7445 [(call (mem:SI (match_operand:SI 0 "" ""))
7446 (match_operand 1 "" ""))
7447 (use (match_operand 2 "" ""))
7448 (clobber (reg:SI LR_REGNUM))]
7449 "TARGET_32BIT
7450 && !SIBLING_CALL_P (insn)
7451 && (GET_CODE (operands[0]) == SYMBOL_REF)
7452 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7453 "*
7454 {
7455 rtx op = operands[0];
7456
7457 /* Switch mode now when possible. */
7458 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7459 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7460 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7461
7462 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7463 }"
7464 [(set_attr "type" "call")]
7465 )
7466
7467 (define_insn "*call_value_symbol"
7468 [(set (match_operand 0 "" "")
7469 (call (mem:SI (match_operand:SI 1 "" ""))
7470 (match_operand:SI 2 "" "")))
7471 (use (match_operand 3 "" ""))
7472 (clobber (reg:SI LR_REGNUM))]
7473 "TARGET_32BIT
7474 && !SIBLING_CALL_P (insn)
7475 && (GET_CODE (operands[1]) == SYMBOL_REF)
7476 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7477 "*
7478 {
7479 rtx op = operands[1];
7480
7481 /* Switch mode now when possible. */
7482 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7483 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7484 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7485
7486 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7487 }"
7488 [(set_attr "type" "call")]
7489 )
7490
7491 (define_expand "sibcall_internal"
7492 [(parallel [(call (match_operand 0 "memory_operand")
7493 (match_operand 1 "general_operand"))
7494 (return)
7495 (use (match_operand 2 "" ""))])])
7496
7497 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7498 (define_expand "sibcall"
7499 [(parallel [(call (match_operand 0 "memory_operand")
7500 (match_operand 1 "general_operand"))
7501 (return)
7502 (use (match_operand 2 "" ""))])]
7503 "TARGET_32BIT"
7504 "
7505 {
7506 rtx pat;
7507
7508 if ((!REG_P (XEXP (operands[0], 0))
7509 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7510 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7511 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7512 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7513
7514 if (operands[2] == NULL_RTX)
7515 operands[2] = const0_rtx;
7516
7517 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7518 arm_emit_call_insn (pat, operands[0], true);
7519 DONE;
7520 }"
7521 )
7522
7523 (define_expand "sibcall_value_internal"
7524 [(parallel [(set (match_operand 0 "" "")
7525 (call (match_operand 1 "memory_operand")
7526 (match_operand 2 "general_operand")))
7527 (return)
7528 (use (match_operand 3 "" ""))])])
7529
7530 (define_expand "sibcall_value"
7531 [(parallel [(set (match_operand 0 "" "")
7532 (call (match_operand 1 "memory_operand")
7533 (match_operand 2 "general_operand")))
7534 (return)
7535 (use (match_operand 3 "" ""))])]
7536 "TARGET_32BIT"
7537 "
7538 {
7539 rtx pat;
7540
7541 if ((!REG_P (XEXP (operands[1], 0))
7542 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7543 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7544 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7545 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7546
7547 if (operands[3] == NULL_RTX)
7548 operands[3] = const0_rtx;
7549
7550 pat = gen_sibcall_value_internal (operands[0], operands[1],
7551 operands[2], operands[3]);
7552 arm_emit_call_insn (pat, operands[1], true);
7553 DONE;
7554 }"
7555 )
7556
7557 (define_insn "*sibcall_insn"
7558 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7559 (match_operand 1 "" ""))
7560 (return)
7561 (use (match_operand 2 "" ""))]
7562 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7563 "*
7564 if (which_alternative == 1)
7565 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7566 else
7567 {
7568 if (arm_arch5t || arm_arch4t)
7569 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7570 else
7571 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7572 }
7573 "
7574 [(set_attr "type" "call")]
7575 )
7576
7577 (define_insn "*sibcall_value_insn"
7578 [(set (match_operand 0 "" "")
7579 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7580 (match_operand 2 "" "")))
7581 (return)
7582 (use (match_operand 3 "" ""))]
7583 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7584 "*
7585 if (which_alternative == 1)
7586 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7587 else
7588 {
7589 if (arm_arch5t || arm_arch4t)
7590 return \"bx%?\\t%1\";
7591 else
7592 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7593 }
7594 "
7595 [(set_attr "type" "call")]
7596 )
7597
7598 (define_expand "<return_str>return"
7599 [(RETURNS)]
7600 "(TARGET_ARM || (TARGET_THUMB2
7601 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7602 && !IS_STACKALIGN (arm_current_func_type ())))
7603 <return_cond_false>"
7604 "
7605 {
7606 if (TARGET_THUMB2)
7607 {
7608 thumb2_expand_return (<return_simple_p>);
7609 DONE;
7610 }
7611 }
7612 "
7613 )
7614
7615 ;; Often the return insn will be the same as loading from memory, so set attr
7616 (define_insn "*arm_return"
7617 [(return)]
7618 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7619 "*
7620 {
7621 if (arm_ccfsm_state == 2)
7622 {
7623 arm_ccfsm_state += 2;
7624 return \"\";
7625 }
7626 return output_return_instruction (const_true_rtx, true, false, false);
7627 }"
7628 [(set_attr "type" "load_4")
7629 (set_attr "length" "12")
7630 (set_attr "predicable" "yes")]
7631 )
7632
7633 (define_insn "*cond_<return_str>return"
7634 [(set (pc)
7635 (if_then_else (match_operator 0 "arm_comparison_operator"
7636 [(match_operand 1 "cc_register" "") (const_int 0)])
7637 (RETURNS)
7638 (pc)))]
7639 "TARGET_ARM <return_cond_true>"
7640 "*
7641 {
7642 if (arm_ccfsm_state == 2)
7643 {
7644 arm_ccfsm_state += 2;
7645 return \"\";
7646 }
7647 return output_return_instruction (operands[0], true, false,
7648 <return_simple_p>);
7649 }"
7650 [(set_attr "conds" "use")
7651 (set_attr "length" "12")
7652 (set_attr "type" "load_4")]
7653 )
7654
7655 (define_insn "*cond_<return_str>return_inverted"
7656 [(set (pc)
7657 (if_then_else (match_operator 0 "arm_comparison_operator"
7658 [(match_operand 1 "cc_register" "") (const_int 0)])
7659 (pc)
7660 (RETURNS)))]
7661 "TARGET_ARM <return_cond_true>"
7662 "*
7663 {
7664 if (arm_ccfsm_state == 2)
7665 {
7666 arm_ccfsm_state += 2;
7667 return \"\";
7668 }
7669 return output_return_instruction (operands[0], true, true,
7670 <return_simple_p>);
7671 }"
7672 [(set_attr "conds" "use")
7673 (set_attr "length" "12")
7674 (set_attr "type" "load_4")]
7675 )
7676
7677 (define_insn "*arm_simple_return"
7678 [(simple_return)]
7679 "TARGET_ARM"
7680 "*
7681 {
7682 if (arm_ccfsm_state == 2)
7683 {
7684 arm_ccfsm_state += 2;
7685 return \"\";
7686 }
7687 return output_return_instruction (const_true_rtx, true, false, true);
7688 }"
7689 [(set_attr "type" "branch")
7690 (set_attr "length" "4")
7691 (set_attr "predicable" "yes")]
7692 )
7693
7694 ;; Generate a sequence of instructions to determine if the processor is
7695 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7696 ;; mask.
7697
7698 (define_expand "return_addr_mask"
7699 [(set (match_dup 1)
7700 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7701 (const_int 0)))
7702 (set (match_operand:SI 0 "s_register_operand")
7703 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7704 (const_int -1)
7705 (const_int 67108860)))] ; 0x03fffffc
7706 "TARGET_ARM"
7707 "
7708 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7709 ")
7710
7711 (define_insn "*check_arch2"
7712 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7713 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7714 (const_int 0)))]
7715 "TARGET_ARM"
7716 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7717 [(set_attr "length" "8")
7718 (set_attr "conds" "set")
7719 (set_attr "type" "multiple")]
7720 )
7721
7722 ;; Call subroutine returning any type.
7723
7724 (define_expand "untyped_call"
7725 [(parallel [(call (match_operand 0 "" "")
7726 (const_int 0))
7727 (match_operand 1 "" "")
7728 (match_operand 2 "" "")])]
7729 "TARGET_EITHER && !TARGET_FDPIC"
7730 "
7731 {
7732 int i;
7733 rtx par = gen_rtx_PARALLEL (VOIDmode,
7734 rtvec_alloc (XVECLEN (operands[2], 0)));
7735 rtx addr = gen_reg_rtx (Pmode);
7736 rtx mem;
7737 int size = 0;
7738
7739 emit_move_insn (addr, XEXP (operands[1], 0));
7740 mem = change_address (operands[1], BLKmode, addr);
7741
7742 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7743 {
7744 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7745
7746 /* Default code only uses r0 as a return value, but we could
7747 be using anything up to 4 registers. */
7748 if (REGNO (src) == R0_REGNUM)
7749 src = gen_rtx_REG (TImode, R0_REGNUM);
7750
7751 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7752 GEN_INT (size));
7753 size += GET_MODE_SIZE (GET_MODE (src));
7754 }
7755
7756 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7757
7758 size = 0;
7759
7760 for (i = 0; i < XVECLEN (par, 0); i++)
7761 {
7762 HOST_WIDE_INT offset = 0;
7763 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7764
7765 if (size != 0)
7766 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7767
7768 mem = change_address (mem, GET_MODE (reg), NULL);
7769 if (REGNO (reg) == R0_REGNUM)
7770 {
7771 /* On thumb we have to use a write-back instruction. */
7772 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7773 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7774 size = TARGET_ARM ? 16 : 0;
7775 }
7776 else
7777 {
7778 emit_move_insn (mem, reg);
7779 size = GET_MODE_SIZE (GET_MODE (reg));
7780 }
7781 }
7782
7783 /* The optimizer does not know that the call sets the function value
7784 registers we stored in the result block. We avoid problems by
7785 claiming that all hard registers are used and clobbered at this
7786 point. */
7787 emit_insn (gen_blockage ());
7788
7789 DONE;
7790 }"
7791 )
7792
7793 (define_expand "untyped_return"
7794 [(match_operand:BLK 0 "memory_operand")
7795 (match_operand 1 "" "")]
7796 "TARGET_EITHER && !TARGET_FDPIC"
7797 "
7798 {
7799 int i;
7800 rtx addr = gen_reg_rtx (Pmode);
7801 rtx mem;
7802 int size = 0;
7803
7804 emit_move_insn (addr, XEXP (operands[0], 0));
7805 mem = change_address (operands[0], BLKmode, addr);
7806
7807 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7808 {
7809 HOST_WIDE_INT offset = 0;
7810 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7811
7812 if (size != 0)
7813 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7814
7815 mem = change_address (mem, GET_MODE (reg), NULL);
7816 if (REGNO (reg) == R0_REGNUM)
7817 {
7818 /* On thumb we have to use a write-back instruction. */
7819 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7820 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7821 size = TARGET_ARM ? 16 : 0;
7822 }
7823 else
7824 {
7825 emit_move_insn (reg, mem);
7826 size = GET_MODE_SIZE (GET_MODE (reg));
7827 }
7828 }
7829
7830 /* Emit USE insns before the return. */
7831 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7832 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7833
7834 /* Construct the return. */
7835 expand_naked_return ();
7836
7837 DONE;
7838 }"
7839 )
7840
7841 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7842 ;; all of memory. This blocks insns from being moved across this point.
7843
7844 (define_insn "blockage"
7845 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7846 "TARGET_EITHER"
7847 ""
7848 [(set_attr "length" "0")
7849 (set_attr "type" "block")]
7850 )
7851
7852 ;; Since we hard code r0 here use the 'o' constraint to prevent
7853 ;; provoking undefined behaviour in the hardware with putting out
7854 ;; auto-increment operations with potentially r0 as the base register.
7855 (define_insn "probe_stack"
7856 [(set (match_operand:SI 0 "memory_operand" "=o")
7857 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7858 "TARGET_32BIT"
7859 "str%?\\tr0, %0"
7860 [(set_attr "type" "store_4")
7861 (set_attr "predicable" "yes")]
7862 )
7863
7864 (define_insn "probe_stack_range"
7865 [(set (match_operand:SI 0 "register_operand" "=r")
7866 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7867 (match_operand:SI 2 "register_operand" "r")]
7868 VUNSPEC_PROBE_STACK_RANGE))]
7869 "TARGET_32BIT"
7870 {
7871 return output_probe_stack_range (operands[0], operands[2]);
7872 }
7873 [(set_attr "type" "multiple")
7874 (set_attr "conds" "clob")]
7875 )
7876
7877 ;; Named patterns for stack smashing protection.
7878 (define_expand "stack_protect_combined_set"
7879 [(parallel
7880 [(set (match_operand:SI 0 "memory_operand")
7881 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7882 UNSPEC_SP_SET))
7883 (clobber (match_scratch:SI 2 ""))
7884 (clobber (match_scratch:SI 3 ""))])]
7885 ""
7886 ""
7887 )
7888
7889 ;; Use a separate insn from the above expand to be able to have the mem outside
7890 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7891 ;; try to reload the guard since we need to control how PIC access is done in
7892 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7893 ;; legitimize_pic_address ()).
7894 (define_insn_and_split "*stack_protect_combined_set_insn"
7895 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7896 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7897 UNSPEC_SP_SET))
7898 (clobber (match_scratch:SI 2 "=&l,&r"))
7899 (clobber (match_scratch:SI 3 "=&l,&r"))]
7900 ""
7901 "#"
7902 "reload_completed"
7903 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7904 UNSPEC_SP_SET))
7905 (clobber (match_dup 2))])]
7906 "
7907 {
7908 if (flag_pic)
7909 {
7910 rtx pic_reg;
7911
7912 if (TARGET_FDPIC)
7913 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7914 else
7915 pic_reg = operands[3];
7916
7917 /* Forces recomputing of GOT base now. */
7918 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7919 true /*compute_now*/);
7920 }
7921 else
7922 {
7923 if (address_operand (operands[1], SImode))
7924 operands[2] = operands[1];
7925 else
7926 {
7927 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7928 emit_move_insn (operands[2], mem);
7929 }
7930 }
7931 }"
7932 [(set_attr "arch" "t1,32")]
7933 )
7934
7935 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7936 ;; canary value does not live beyond the life of this sequence.
7937 (define_insn "*stack_protect_set_insn"
7938 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7939 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7940 UNSPEC_SP_SET))
7941 (clobber (match_dup 1))]
7942 ""
7943 "@
7944 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7945 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7946 [(set_attr "length" "8,12")
7947 (set_attr "conds" "clob,nocond")
7948 (set_attr "type" "multiple")
7949 (set_attr "arch" "t1,32")]
7950 )
7951
7952 (define_expand "stack_protect_combined_test"
7953 [(parallel
7954 [(set (pc)
7955 (if_then_else
7956 (eq (match_operand:SI 0 "memory_operand")
7957 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7958 UNSPEC_SP_TEST))
7959 (label_ref (match_operand 2))
7960 (pc)))
7961 (clobber (match_scratch:SI 3 ""))
7962 (clobber (match_scratch:SI 4 ""))
7963 (clobber (reg:CC CC_REGNUM))])]
7964 ""
7965 ""
7966 )
7967
7968 ;; Use a separate insn from the above expand to be able to have the mem outside
7969 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7970 ;; try to reload the guard since we need to control how PIC access is done in
7971 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7972 ;; legitimize_pic_address ()).
7973 (define_insn_and_split "*stack_protect_combined_test_insn"
7974 [(set (pc)
7975 (if_then_else
7976 (eq (match_operand:SI 0 "memory_operand" "m,m")
7977 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7978 UNSPEC_SP_TEST))
7979 (label_ref (match_operand 2))
7980 (pc)))
7981 (clobber (match_scratch:SI 3 "=&l,&r"))
7982 (clobber (match_scratch:SI 4 "=&l,&r"))
7983 (clobber (reg:CC CC_REGNUM))]
7984 ""
7985 "#"
7986 "reload_completed"
7987 [(const_int 0)]
7988 {
7989 rtx eq;
7990
7991 if (flag_pic)
7992 {
7993 rtx pic_reg;
7994
7995 if (TARGET_FDPIC)
7996 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7997 else
7998 pic_reg = operands[4];
7999
8000 /* Forces recomputing of GOT base now. */
8001 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
8002 true /*compute_now*/);
8003 }
8004 else
8005 {
8006 if (address_operand (operands[1], SImode))
8007 operands[3] = operands[1];
8008 else
8009 {
8010 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8011 emit_move_insn (operands[3], mem);
8012 }
8013 }
8014 if (TARGET_32BIT)
8015 {
8016 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8017 operands[3]));
8018 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8019 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8020 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8021 }
8022 else
8023 {
8024 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8025 operands[3]));
8026 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8027 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8028 operands[2]));
8029 }
8030 DONE;
8031 }
8032 [(set_attr "arch" "t1,32")]
8033 )
8034
8035 (define_insn "arm_stack_protect_test_insn"
8036 [(set (reg:CC_Z CC_REGNUM)
8037 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8038 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8039 UNSPEC_SP_TEST)
8040 (const_int 0)))
8041 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8042 (clobber (match_dup 2))]
8043 "TARGET_32BIT"
8044 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8045 [(set_attr "length" "8,12")
8046 (set_attr "conds" "set")
8047 (set_attr "type" "multiple")
8048 (set_attr "arch" "t,32")]
8049 )
8050
8051 (define_expand "casesi"
8052 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8053 (match_operand:SI 1 "const_int_operand") ; lower bound
8054 (match_operand:SI 2 "const_int_operand") ; total range
8055 (match_operand:SI 3 "" "") ; table label
8056 (match_operand:SI 4 "" "")] ; Out of range label
8057 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8058 "
8059 {
8060 enum insn_code code;
8061 if (operands[1] != const0_rtx)
8062 {
8063 rtx reg = gen_reg_rtx (SImode);
8064
8065 emit_insn (gen_addsi3 (reg, operands[0],
8066 gen_int_mode (-INTVAL (operands[1]),
8067 SImode)));
8068 operands[0] = reg;
8069 }
8070
8071 if (TARGET_ARM)
8072 code = CODE_FOR_arm_casesi_internal;
8073 else if (TARGET_THUMB1)
8074 code = CODE_FOR_thumb1_casesi_internal_pic;
8075 else if (flag_pic)
8076 code = CODE_FOR_thumb2_casesi_internal_pic;
8077 else
8078 code = CODE_FOR_thumb2_casesi_internal;
8079
8080 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8081 operands[2] = force_reg (SImode, operands[2]);
8082
8083 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8084 operands[3], operands[4]));
8085 DONE;
8086 }"
8087 )
8088
8089 ;; The USE in this pattern is needed to tell flow analysis that this is
8090 ;; a CASESI insn. It has no other purpose.
8091 (define_expand "arm_casesi_internal"
8092 [(parallel [(set (pc)
8093 (if_then_else
8094 (leu (match_operand:SI 0 "s_register_operand")
8095 (match_operand:SI 1 "arm_rhs_operand"))
8096 (match_dup 4)
8097 (label_ref:SI (match_operand 3 ""))))
8098 (clobber (reg:CC CC_REGNUM))
8099 (use (label_ref:SI (match_operand 2 "")))])]
8100 "TARGET_ARM"
8101 {
8102 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8103 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8104 gen_rtx_LABEL_REF (SImode, operands[2]));
8105 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8106 MEM_READONLY_P (operands[4]) = 1;
8107 MEM_NOTRAP_P (operands[4]) = 1;
8108 })
8109
8110 (define_insn "*arm_casesi_internal"
8111 [(parallel [(set (pc)
8112 (if_then_else
8113 (leu (match_operand:SI 0 "s_register_operand" "r")
8114 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8115 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8116 (label_ref:SI (match_operand 2 "" ""))))
8117 (label_ref:SI (match_operand 3 "" ""))))
8118 (clobber (reg:CC CC_REGNUM))
8119 (use (label_ref:SI (match_dup 2)))])]
8120 "TARGET_ARM"
8121 "*
8122 if (flag_pic)
8123 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8124 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8125 "
8126 [(set_attr "conds" "clob")
8127 (set_attr "length" "12")
8128 (set_attr "type" "multiple")]
8129 )
8130
8131 (define_expand "indirect_jump"
8132 [(set (pc)
8133 (match_operand:SI 0 "s_register_operand"))]
8134 "TARGET_EITHER"
8135 "
8136 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8137 address and use bx. */
8138 if (TARGET_THUMB2)
8139 {
8140 rtx tmp;
8141 tmp = gen_reg_rtx (SImode);
8142 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8143 operands[0] = tmp;
8144 }
8145 "
8146 )
8147
8148 ;; NB Never uses BX.
8149 (define_insn "*arm_indirect_jump"
8150 [(set (pc)
8151 (match_operand:SI 0 "s_register_operand" "r"))]
8152 "TARGET_ARM"
8153 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8154 [(set_attr "predicable" "yes")
8155 (set_attr "type" "branch")]
8156 )
8157
8158 (define_insn "*load_indirect_jump"
8159 [(set (pc)
8160 (match_operand:SI 0 "memory_operand" "m"))]
8161 "TARGET_ARM"
8162 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8163 [(set_attr "type" "load_4")
8164 (set_attr "pool_range" "4096")
8165 (set_attr "neg_pool_range" "4084")
8166 (set_attr "predicable" "yes")]
8167 )
8168
8169 \f
8170 ;; Misc insns
8171
8172 (define_insn "nop"
8173 [(const_int 0)]
8174 "TARGET_EITHER"
8175 "nop"
8176 [(set (attr "length")
8177 (if_then_else (eq_attr "is_thumb" "yes")
8178 (const_int 2)
8179 (const_int 4)))
8180 (set_attr "type" "mov_reg")]
8181 )
8182
8183 (define_insn "trap"
8184 [(trap_if (const_int 1) (const_int 0))]
8185 ""
8186 "*
8187 if (TARGET_ARM)
8188 return \".inst\\t0xe7f000f0\";
8189 else
8190 return \".inst\\t0xdeff\";
8191 "
8192 [(set (attr "length")
8193 (if_then_else (eq_attr "is_thumb" "yes")
8194 (const_int 2)
8195 (const_int 4)))
8196 (set_attr "type" "trap")
8197 (set_attr "conds" "unconditional")]
8198 )
8199
8200 \f
8201 ;; Patterns to allow combination of arithmetic, cond code and shifts
8202
8203 (define_insn "*<arith_shift_insn>_multsi"
8204 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8205 (SHIFTABLE_OPS:SI
8206 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8207 (match_operand:SI 3 "power_of_two_operand" ""))
8208 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8209 "TARGET_32BIT"
8210 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8211 [(set_attr "predicable" "yes")
8212 (set_attr "shift" "2")
8213 (set_attr "arch" "a,t2")
8214 (set_attr "type" "alu_shift_imm")])
8215
8216 (define_insn "*<arith_shift_insn>_shiftsi"
8217 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8218 (SHIFTABLE_OPS:SI
8219 (match_operator:SI 2 "shift_nomul_operator"
8220 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8221 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8222 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8223 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8224 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8225 [(set_attr "predicable" "yes")
8226 (set_attr "shift" "3")
8227 (set_attr "arch" "a,t2,a")
8228 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8229
8230 (define_split
8231 [(set (match_operand:SI 0 "s_register_operand" "")
8232 (match_operator:SI 1 "shiftable_operator"
8233 [(match_operator:SI 2 "shiftable_operator"
8234 [(match_operator:SI 3 "shift_operator"
8235 [(match_operand:SI 4 "s_register_operand" "")
8236 (match_operand:SI 5 "reg_or_int_operand" "")])
8237 (match_operand:SI 6 "s_register_operand" "")])
8238 (match_operand:SI 7 "arm_rhs_operand" "")]))
8239 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8240 "TARGET_32BIT"
8241 [(set (match_dup 8)
8242 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8243 (match_dup 6)]))
8244 (set (match_dup 0)
8245 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8246 "")
8247
8248 (define_insn "*arith_shiftsi_compare0"
8249 [(set (reg:CC_NOOV CC_REGNUM)
8250 (compare:CC_NOOV
8251 (match_operator:SI 1 "shiftable_operator"
8252 [(match_operator:SI 3 "shift_operator"
8253 [(match_operand:SI 4 "s_register_operand" "r,r")
8254 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8255 (match_operand:SI 2 "s_register_operand" "r,r")])
8256 (const_int 0)))
8257 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8258 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8259 (match_dup 2)]))]
8260 "TARGET_32BIT"
8261 "%i1s%?\\t%0, %2, %4%S3"
8262 [(set_attr "conds" "set")
8263 (set_attr "shift" "4")
8264 (set_attr "arch" "32,a")
8265 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8266
8267 (define_insn "*arith_shiftsi_compare0_scratch"
8268 [(set (reg:CC_NOOV CC_REGNUM)
8269 (compare:CC_NOOV
8270 (match_operator:SI 1 "shiftable_operator"
8271 [(match_operator:SI 3 "shift_operator"
8272 [(match_operand:SI 4 "s_register_operand" "r,r")
8273 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8274 (match_operand:SI 2 "s_register_operand" "r,r")])
8275 (const_int 0)))
8276 (clobber (match_scratch:SI 0 "=r,r"))]
8277 "TARGET_32BIT"
8278 "%i1s%?\\t%0, %2, %4%S3"
8279 [(set_attr "conds" "set")
8280 (set_attr "shift" "4")
8281 (set_attr "arch" "32,a")
8282 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8283
8284 (define_insn "*sub_shiftsi"
8285 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8286 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8287 (match_operator:SI 2 "shift_operator"
8288 [(match_operand:SI 3 "s_register_operand" "r,r")
8289 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8290 "TARGET_32BIT"
8291 "sub%?\\t%0, %1, %3%S2"
8292 [(set_attr "predicable" "yes")
8293 (set_attr "predicable_short_it" "no")
8294 (set_attr "shift" "3")
8295 (set_attr "arch" "32,a")
8296 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8297
8298 (define_insn "*sub_shiftsi_compare0"
8299 [(set (reg:CC_NOOV CC_REGNUM)
8300 (compare:CC_NOOV
8301 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8302 (match_operator:SI 2 "shift_operator"
8303 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8304 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8305 (const_int 0)))
8306 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8307 (minus:SI (match_dup 1)
8308 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8309 "TARGET_32BIT"
8310 "subs%?\\t%0, %1, %3%S2"
8311 [(set_attr "conds" "set")
8312 (set_attr "shift" "3")
8313 (set_attr "arch" "32,a,a")
8314 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8315
8316 (define_insn "*sub_shiftsi_compare0_scratch"
8317 [(set (reg:CC_NOOV CC_REGNUM)
8318 (compare:CC_NOOV
8319 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8320 (match_operator:SI 2 "shift_operator"
8321 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8322 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8323 (const_int 0)))
8324 (clobber (match_scratch:SI 0 "=r,r,r"))]
8325 "TARGET_32BIT"
8326 "subs%?\\t%0, %1, %3%S2"
8327 [(set_attr "conds" "set")
8328 (set_attr "shift" "3")
8329 (set_attr "arch" "32,a,a")
8330 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8331 \f
8332
8333 (define_insn_and_split "*and_scc"
8334 [(set (match_operand:SI 0 "s_register_operand" "=r")
8335 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8336 [(match_operand 2 "cc_register" "") (const_int 0)])
8337 (match_operand:SI 3 "s_register_operand" "r")))]
8338 "TARGET_ARM"
8339 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8340 "&& reload_completed"
8341 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8342 (cond_exec (match_dup 4) (set (match_dup 0)
8343 (and:SI (match_dup 3) (const_int 1))))]
8344 {
8345 machine_mode mode = GET_MODE (operands[2]);
8346 enum rtx_code rc = GET_CODE (operands[1]);
8347
8348 /* Note that operands[4] is the same as operands[1],
8349 but with VOIDmode as the result. */
8350 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8351 if (mode == CCFPmode || mode == CCFPEmode)
8352 rc = reverse_condition_maybe_unordered (rc);
8353 else
8354 rc = reverse_condition (rc);
8355 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8356 }
8357 [(set_attr "conds" "use")
8358 (set_attr "type" "multiple")
8359 (set_attr "length" "8")]
8360 )
8361
8362 (define_insn_and_split "*ior_scc"
8363 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8364 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8365 [(match_operand 2 "cc_register" "") (const_int 0)])
8366 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8367 "TARGET_ARM"
8368 "@
8369 orr%d1\\t%0, %3, #1
8370 #"
8371 "&& reload_completed
8372 && REGNO (operands [0]) != REGNO (operands[3])"
8373 ;; && which_alternative == 1
8374 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8375 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8376 (cond_exec (match_dup 4) (set (match_dup 0)
8377 (ior:SI (match_dup 3) (const_int 1))))]
8378 {
8379 machine_mode mode = GET_MODE (operands[2]);
8380 enum rtx_code rc = GET_CODE (operands[1]);
8381
8382 /* Note that operands[4] is the same as operands[1],
8383 but with VOIDmode as the result. */
8384 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8385 if (mode == CCFPmode || mode == CCFPEmode)
8386 rc = reverse_condition_maybe_unordered (rc);
8387 else
8388 rc = reverse_condition (rc);
8389 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8390 }
8391 [(set_attr "conds" "use")
8392 (set_attr "length" "4,8")
8393 (set_attr "type" "logic_imm,multiple")]
8394 )
8395
8396 ; A series of splitters for the compare_scc pattern below. Note that
8397 ; order is important.
8398 (define_split
8399 [(set (match_operand:SI 0 "s_register_operand" "")
8400 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8401 (const_int 0)))
8402 (clobber (reg:CC CC_REGNUM))]
8403 "TARGET_32BIT && reload_completed"
8404 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8405
8406 (define_split
8407 [(set (match_operand:SI 0 "s_register_operand" "")
8408 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8409 (const_int 0)))
8410 (clobber (reg:CC CC_REGNUM))]
8411 "TARGET_32BIT && reload_completed"
8412 [(set (match_dup 0) (not:SI (match_dup 1)))
8413 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8414
8415 (define_split
8416 [(set (match_operand:SI 0 "s_register_operand" "")
8417 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8418 (const_int 0)))
8419 (clobber (reg:CC CC_REGNUM))]
8420 "arm_arch5t && TARGET_32BIT"
8421 [(set (match_dup 0) (clz:SI (match_dup 1)))
8422 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8423 )
8424
8425 (define_split
8426 [(set (match_operand:SI 0 "s_register_operand" "")
8427 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8428 (const_int 0)))
8429 (clobber (reg:CC CC_REGNUM))]
8430 "TARGET_32BIT && reload_completed"
8431 [(parallel
8432 [(set (reg:CC CC_REGNUM)
8433 (compare:CC (const_int 1) (match_dup 1)))
8434 (set (match_dup 0)
8435 (minus:SI (const_int 1) (match_dup 1)))])
8436 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8437 (set (match_dup 0) (const_int 0)))])
8438
8439 (define_split
8440 [(set (match_operand:SI 0 "s_register_operand" "")
8441 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8442 (match_operand:SI 2 "const_int_operand" "")))
8443 (clobber (reg:CC CC_REGNUM))]
8444 "TARGET_32BIT && reload_completed"
8445 [(parallel
8446 [(set (reg:CC CC_REGNUM)
8447 (compare:CC (match_dup 1) (match_dup 2)))
8448 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8449 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8450 (set (match_dup 0) (const_int 1)))]
8451 {
8452 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8453 })
8454
8455 (define_split
8456 [(set (match_operand:SI 0 "s_register_operand" "")
8457 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8458 (match_operand:SI 2 "arm_add_operand" "")))
8459 (clobber (reg:CC CC_REGNUM))]
8460 "TARGET_32BIT && reload_completed"
8461 [(parallel
8462 [(set (reg:CC_NOOV CC_REGNUM)
8463 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8464 (const_int 0)))
8465 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8466 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8467 (set (match_dup 0) (const_int 1)))])
8468
8469 (define_insn_and_split "*compare_scc"
8470 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8471 (match_operator:SI 1 "arm_comparison_operator"
8472 [(match_operand:SI 2 "s_register_operand" "r,r")
8473 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8474 (clobber (reg:CC CC_REGNUM))]
8475 "TARGET_32BIT"
8476 "#"
8477 "&& reload_completed"
8478 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8479 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8480 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8481 {
8482 rtx tmp1;
8483 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8484 operands[2], operands[3]);
8485 enum rtx_code rc = GET_CODE (operands[1]);
8486
8487 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8488
8489 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8490 if (mode == CCFPmode || mode == CCFPEmode)
8491 rc = reverse_condition_maybe_unordered (rc);
8492 else
8493 rc = reverse_condition (rc);
8494 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8495 }
8496 [(set_attr "type" "multiple")]
8497 )
8498
8499 ;; Attempt to improve the sequence generated by the compare_scc splitters
8500 ;; not to use conditional execution.
8501
8502 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8503 ;; clz Rd, reg1
8504 ;; lsr Rd, Rd, #5
8505 (define_peephole2
8506 [(set (reg:CC CC_REGNUM)
8507 (compare:CC (match_operand:SI 1 "register_operand" "")
8508 (const_int 0)))
8509 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8510 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8511 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8512 (set (match_dup 0) (const_int 1)))]
8513 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8514 [(set (match_dup 0) (clz:SI (match_dup 1)))
8515 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8516 )
8517
8518 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8519 ;; negs Rd, reg1
8520 ;; adc Rd, Rd, reg1
8521 (define_peephole2
8522 [(set (reg:CC CC_REGNUM)
8523 (compare:CC (match_operand:SI 1 "register_operand" "")
8524 (const_int 0)))
8525 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8526 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8527 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8528 (set (match_dup 0) (const_int 1)))
8529 (match_scratch:SI 2 "r")]
8530 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8531 [(parallel
8532 [(set (reg:CC CC_REGNUM)
8533 (compare:CC (const_int 0) (match_dup 1)))
8534 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8535 (set (match_dup 0)
8536 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8537 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8538 )
8539
8540 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8541 ;; sub Rd, Reg1, reg2
8542 ;; clz Rd, Rd
8543 ;; lsr Rd, Rd, #5
8544 (define_peephole2
8545 [(set (reg:CC CC_REGNUM)
8546 (compare:CC (match_operand:SI 1 "register_operand" "")
8547 (match_operand:SI 2 "arm_rhs_operand" "")))
8548 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8549 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8550 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8551 (set (match_dup 0) (const_int 1)))]
8552 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8553 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8554 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8555 (set (match_dup 0) (clz:SI (match_dup 0)))
8556 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8557 )
8558
8559
8560 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8561 ;; sub T1, Reg1, reg2
8562 ;; negs Rd, T1
8563 ;; adc Rd, Rd, T1
8564 (define_peephole2
8565 [(set (reg:CC CC_REGNUM)
8566 (compare:CC (match_operand:SI 1 "register_operand" "")
8567 (match_operand:SI 2 "arm_rhs_operand" "")))
8568 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8569 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8570 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8571 (set (match_dup 0) (const_int 1)))
8572 (match_scratch:SI 3 "r")]
8573 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8574 [(set (match_dup 3) (match_dup 4))
8575 (parallel
8576 [(set (reg:CC CC_REGNUM)
8577 (compare:CC (const_int 0) (match_dup 3)))
8578 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8579 (set (match_dup 0)
8580 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8581 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8582 "
8583 if (CONST_INT_P (operands[2]))
8584 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8585 else
8586 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8587 ")
8588
8589 (define_insn "*cond_move"
8590 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8591 (if_then_else:SI (match_operator 3 "equality_operator"
8592 [(match_operator 4 "arm_comparison_operator"
8593 [(match_operand 5 "cc_register" "") (const_int 0)])
8594 (const_int 0)])
8595 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8596 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8597 "TARGET_ARM"
8598 "*
8599 if (GET_CODE (operands[3]) == NE)
8600 {
8601 if (which_alternative != 1)
8602 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8603 if (which_alternative != 0)
8604 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8605 return \"\";
8606 }
8607 if (which_alternative != 0)
8608 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8609 if (which_alternative != 1)
8610 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8611 return \"\";
8612 "
8613 [(set_attr "conds" "use")
8614 (set_attr_alternative "type"
8615 [(if_then_else (match_operand 2 "const_int_operand" "")
8616 (const_string "mov_imm")
8617 (const_string "mov_reg"))
8618 (if_then_else (match_operand 1 "const_int_operand" "")
8619 (const_string "mov_imm")
8620 (const_string "mov_reg"))
8621 (const_string "multiple")])
8622 (set_attr "length" "4,4,8")]
8623 )
8624
8625 (define_insn "*cond_arith"
8626 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8627 (match_operator:SI 5 "shiftable_operator"
8628 [(match_operator:SI 4 "arm_comparison_operator"
8629 [(match_operand:SI 2 "s_register_operand" "r,r")
8630 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8631 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8632 (clobber (reg:CC CC_REGNUM))]
8633 "TARGET_ARM"
8634 "*
8635 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8636 return \"%i5\\t%0, %1, %2, lsr #31\";
8637
8638 output_asm_insn (\"cmp\\t%2, %3\", operands);
8639 if (GET_CODE (operands[5]) == AND)
8640 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8641 else if (GET_CODE (operands[5]) == MINUS)
8642 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8643 else if (which_alternative != 0)
8644 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8645 return \"%i5%d4\\t%0, %1, #1\";
8646 "
8647 [(set_attr "conds" "clob")
8648 (set_attr "length" "12")
8649 (set_attr "type" "multiple")]
8650 )
8651
8652 (define_insn "*cond_sub"
8653 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8654 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8655 (match_operator:SI 4 "arm_comparison_operator"
8656 [(match_operand:SI 2 "s_register_operand" "r,r")
8657 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8658 (clobber (reg:CC CC_REGNUM))]
8659 "TARGET_ARM"
8660 "*
8661 output_asm_insn (\"cmp\\t%2, %3\", operands);
8662 if (which_alternative != 0)
8663 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8664 return \"sub%d4\\t%0, %1, #1\";
8665 "
8666 [(set_attr "conds" "clob")
8667 (set_attr "length" "8,12")
8668 (set_attr "type" "multiple")]
8669 )
8670
8671 (define_insn "*cmp_ite0"
8672 [(set (match_operand 6 "dominant_cc_register" "")
8673 (compare
8674 (if_then_else:SI
8675 (match_operator 4 "arm_comparison_operator"
8676 [(match_operand:SI 0 "s_register_operand"
8677 "l,l,l,r,r,r,r,r,r")
8678 (match_operand:SI 1 "arm_add_operand"
8679 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8680 (match_operator:SI 5 "arm_comparison_operator"
8681 [(match_operand:SI 2 "s_register_operand"
8682 "l,r,r,l,l,r,r,r,r")
8683 (match_operand:SI 3 "arm_add_operand"
8684 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8685 (const_int 0))
8686 (const_int 0)))]
8687 "TARGET_32BIT"
8688 "*
8689 {
8690 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8691 {
8692 {\"cmp%d5\\t%0, %1\",
8693 \"cmp%d4\\t%2, %3\"},
8694 {\"cmn%d5\\t%0, #%n1\",
8695 \"cmp%d4\\t%2, %3\"},
8696 {\"cmp%d5\\t%0, %1\",
8697 \"cmn%d4\\t%2, #%n3\"},
8698 {\"cmn%d5\\t%0, #%n1\",
8699 \"cmn%d4\\t%2, #%n3\"}
8700 };
8701 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8702 {
8703 {\"cmp\\t%2, %3\",
8704 \"cmp\\t%0, %1\"},
8705 {\"cmp\\t%2, %3\",
8706 \"cmn\\t%0, #%n1\"},
8707 {\"cmn\\t%2, #%n3\",
8708 \"cmp\\t%0, %1\"},
8709 {\"cmn\\t%2, #%n3\",
8710 \"cmn\\t%0, #%n1\"}
8711 };
8712 static const char * const ite[2] =
8713 {
8714 \"it\\t%d5\",
8715 \"it\\t%d4\"
8716 };
8717 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8718 CMP_CMP, CMN_CMP, CMP_CMP,
8719 CMN_CMP, CMP_CMN, CMN_CMN};
8720 int swap =
8721 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8722
8723 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8724 if (TARGET_THUMB2) {
8725 output_asm_insn (ite[swap], operands);
8726 }
8727 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8728 return \"\";
8729 }"
8730 [(set_attr "conds" "set")
8731 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8732 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8733 (set_attr "type" "multiple")
8734 (set_attr_alternative "length"
8735 [(const_int 6)
8736 (const_int 8)
8737 (const_int 8)
8738 (const_int 8)
8739 (const_int 8)
8740 (if_then_else (eq_attr "is_thumb" "no")
8741 (const_int 8)
8742 (const_int 10))
8743 (if_then_else (eq_attr "is_thumb" "no")
8744 (const_int 8)
8745 (const_int 10))
8746 (if_then_else (eq_attr "is_thumb" "no")
8747 (const_int 8)
8748 (const_int 10))
8749 (if_then_else (eq_attr "is_thumb" "no")
8750 (const_int 8)
8751 (const_int 10))])]
8752 )
8753
8754 (define_insn "*cmp_ite1"
8755 [(set (match_operand 6 "dominant_cc_register" "")
8756 (compare
8757 (if_then_else:SI
8758 (match_operator 4 "arm_comparison_operator"
8759 [(match_operand:SI 0 "s_register_operand"
8760 "l,l,l,r,r,r,r,r,r")
8761 (match_operand:SI 1 "arm_add_operand"
8762 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8763 (match_operator:SI 5 "arm_comparison_operator"
8764 [(match_operand:SI 2 "s_register_operand"
8765 "l,r,r,l,l,r,r,r,r")
8766 (match_operand:SI 3 "arm_add_operand"
8767 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8768 (const_int 1))
8769 (const_int 0)))]
8770 "TARGET_32BIT"
8771 "*
8772 {
8773 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8774 {
8775 {\"cmp\\t%0, %1\",
8776 \"cmp\\t%2, %3\"},
8777 {\"cmn\\t%0, #%n1\",
8778 \"cmp\\t%2, %3\"},
8779 {\"cmp\\t%0, %1\",
8780 \"cmn\\t%2, #%n3\"},
8781 {\"cmn\\t%0, #%n1\",
8782 \"cmn\\t%2, #%n3\"}
8783 };
8784 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8785 {
8786 {\"cmp%d4\\t%2, %3\",
8787 \"cmp%D5\\t%0, %1\"},
8788 {\"cmp%d4\\t%2, %3\",
8789 \"cmn%D5\\t%0, #%n1\"},
8790 {\"cmn%d4\\t%2, #%n3\",
8791 \"cmp%D5\\t%0, %1\"},
8792 {\"cmn%d4\\t%2, #%n3\",
8793 \"cmn%D5\\t%0, #%n1\"}
8794 };
8795 static const char * const ite[2] =
8796 {
8797 \"it\\t%d4\",
8798 \"it\\t%D5\"
8799 };
8800 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8801 CMP_CMP, CMN_CMP, CMP_CMP,
8802 CMN_CMP, CMP_CMN, CMN_CMN};
8803 int swap =
8804 comparison_dominates_p (GET_CODE (operands[5]),
8805 reverse_condition (GET_CODE (operands[4])));
8806
8807 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8808 if (TARGET_THUMB2) {
8809 output_asm_insn (ite[swap], operands);
8810 }
8811 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8812 return \"\";
8813 }"
8814 [(set_attr "conds" "set")
8815 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8816 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8817 (set_attr_alternative "length"
8818 [(const_int 6)
8819 (const_int 8)
8820 (const_int 8)
8821 (const_int 8)
8822 (const_int 8)
8823 (if_then_else (eq_attr "is_thumb" "no")
8824 (const_int 8)
8825 (const_int 10))
8826 (if_then_else (eq_attr "is_thumb" "no")
8827 (const_int 8)
8828 (const_int 10))
8829 (if_then_else (eq_attr "is_thumb" "no")
8830 (const_int 8)
8831 (const_int 10))
8832 (if_then_else (eq_attr "is_thumb" "no")
8833 (const_int 8)
8834 (const_int 10))])
8835 (set_attr "type" "multiple")]
8836 )
8837
8838 (define_insn "*cmp_and"
8839 [(set (match_operand 6 "dominant_cc_register" "")
8840 (compare
8841 (and:SI
8842 (match_operator 4 "arm_comparison_operator"
8843 [(match_operand:SI 0 "s_register_operand"
8844 "l,l,l,r,r,r,r,r,r,r")
8845 (match_operand:SI 1 "arm_add_operand"
8846 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8847 (match_operator:SI 5 "arm_comparison_operator"
8848 [(match_operand:SI 2 "s_register_operand"
8849 "l,r,r,l,l,r,r,r,r,r")
8850 (match_operand:SI 3 "arm_add_operand"
8851 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8852 (const_int 0)))]
8853 "TARGET_32BIT"
8854 "*
8855 {
8856 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8857 {
8858 {\"cmp%d5\\t%0, %1\",
8859 \"cmp%d4\\t%2, %3\"},
8860 {\"cmn%d5\\t%0, #%n1\",
8861 \"cmp%d4\\t%2, %3\"},
8862 {\"cmp%d5\\t%0, %1\",
8863 \"cmn%d4\\t%2, #%n3\"},
8864 {\"cmn%d5\\t%0, #%n1\",
8865 \"cmn%d4\\t%2, #%n3\"}
8866 };
8867 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8868 {
8869 {\"cmp\\t%2, %3\",
8870 \"cmp\\t%0, %1\"},
8871 {\"cmp\\t%2, %3\",
8872 \"cmn\\t%0, #%n1\"},
8873 {\"cmn\\t%2, #%n3\",
8874 \"cmp\\t%0, %1\"},
8875 {\"cmn\\t%2, #%n3\",
8876 \"cmn\\t%0, #%n1\"}
8877 };
8878 static const char *const ite[2] =
8879 {
8880 \"it\\t%d5\",
8881 \"it\\t%d4\"
8882 };
8883 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8884 CMP_CMP, CMN_CMP, CMP_CMP,
8885 CMP_CMP, CMN_CMP, CMP_CMN,
8886 CMN_CMN};
8887 int swap =
8888 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8889
8890 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8891 if (TARGET_THUMB2) {
8892 output_asm_insn (ite[swap], operands);
8893 }
8894 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8895 return \"\";
8896 }"
8897 [(set_attr "conds" "set")
8898 (set_attr "predicable" "no")
8899 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8900 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8901 (set_attr_alternative "length"
8902 [(const_int 6)
8903 (const_int 8)
8904 (const_int 8)
8905 (const_int 8)
8906 (const_int 8)
8907 (const_int 6)
8908 (if_then_else (eq_attr "is_thumb" "no")
8909 (const_int 8)
8910 (const_int 10))
8911 (if_then_else (eq_attr "is_thumb" "no")
8912 (const_int 8)
8913 (const_int 10))
8914 (if_then_else (eq_attr "is_thumb" "no")
8915 (const_int 8)
8916 (const_int 10))
8917 (if_then_else (eq_attr "is_thumb" "no")
8918 (const_int 8)
8919 (const_int 10))])
8920 (set_attr "type" "multiple")]
8921 )
8922
8923 (define_insn "*cmp_ior"
8924 [(set (match_operand 6 "dominant_cc_register" "")
8925 (compare
8926 (ior:SI
8927 (match_operator 4 "arm_comparison_operator"
8928 [(match_operand:SI 0 "s_register_operand"
8929 "l,l,l,r,r,r,r,r,r,r")
8930 (match_operand:SI 1 "arm_add_operand"
8931 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8932 (match_operator:SI 5 "arm_comparison_operator"
8933 [(match_operand:SI 2 "s_register_operand"
8934 "l,r,r,l,l,r,r,r,r,r")
8935 (match_operand:SI 3 "arm_add_operand"
8936 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8937 (const_int 0)))]
8938 "TARGET_32BIT"
8939 "*
8940 {
8941 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8942 {
8943 {\"cmp\\t%0, %1\",
8944 \"cmp\\t%2, %3\"},
8945 {\"cmn\\t%0, #%n1\",
8946 \"cmp\\t%2, %3\"},
8947 {\"cmp\\t%0, %1\",
8948 \"cmn\\t%2, #%n3\"},
8949 {\"cmn\\t%0, #%n1\",
8950 \"cmn\\t%2, #%n3\"}
8951 };
8952 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8953 {
8954 {\"cmp%D4\\t%2, %3\",
8955 \"cmp%D5\\t%0, %1\"},
8956 {\"cmp%D4\\t%2, %3\",
8957 \"cmn%D5\\t%0, #%n1\"},
8958 {\"cmn%D4\\t%2, #%n3\",
8959 \"cmp%D5\\t%0, %1\"},
8960 {\"cmn%D4\\t%2, #%n3\",
8961 \"cmn%D5\\t%0, #%n1\"}
8962 };
8963 static const char *const ite[2] =
8964 {
8965 \"it\\t%D4\",
8966 \"it\\t%D5\"
8967 };
8968 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8969 CMP_CMP, CMN_CMP, CMP_CMP,
8970 CMP_CMP, CMN_CMP, CMP_CMN,
8971 CMN_CMN};
8972 int swap =
8973 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8974
8975 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8976 if (TARGET_THUMB2) {
8977 output_asm_insn (ite[swap], operands);
8978 }
8979 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8980 return \"\";
8981 }
8982 "
8983 [(set_attr "conds" "set")
8984 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8985 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8986 (set_attr_alternative "length"
8987 [(const_int 6)
8988 (const_int 8)
8989 (const_int 8)
8990 (const_int 8)
8991 (const_int 8)
8992 (const_int 6)
8993 (if_then_else (eq_attr "is_thumb" "no")
8994 (const_int 8)
8995 (const_int 10))
8996 (if_then_else (eq_attr "is_thumb" "no")
8997 (const_int 8)
8998 (const_int 10))
8999 (if_then_else (eq_attr "is_thumb" "no")
9000 (const_int 8)
9001 (const_int 10))
9002 (if_then_else (eq_attr "is_thumb" "no")
9003 (const_int 8)
9004 (const_int 10))])
9005 (set_attr "type" "multiple")]
9006 )
9007
9008 (define_insn_and_split "*ior_scc_scc"
9009 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9010 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9011 [(match_operand:SI 1 "s_register_operand" "l,r")
9012 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9013 (match_operator:SI 6 "arm_comparison_operator"
9014 [(match_operand:SI 4 "s_register_operand" "l,r")
9015 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9016 (clobber (reg:CC CC_REGNUM))]
9017 "TARGET_32BIT
9018 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9019 != CCmode)"
9020 "#"
9021 "TARGET_32BIT && reload_completed"
9022 [(set (match_dup 7)
9023 (compare
9024 (ior:SI
9025 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9026 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9027 (const_int 0)))
9028 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9029 "operands[7]
9030 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9031 DOM_CC_X_OR_Y),
9032 CC_REGNUM);"
9033 [(set_attr "conds" "clob")
9034 (set_attr "enabled_for_short_it" "yes,no")
9035 (set_attr "length" "16")
9036 (set_attr "type" "multiple")]
9037 )
9038
9039 ; If the above pattern is followed by a CMP insn, then the compare is
9040 ; redundant, since we can rework the conditional instruction that follows.
9041 (define_insn_and_split "*ior_scc_scc_cmp"
9042 [(set (match_operand 0 "dominant_cc_register" "")
9043 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9044 [(match_operand:SI 1 "s_register_operand" "l,r")
9045 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9046 (match_operator:SI 6 "arm_comparison_operator"
9047 [(match_operand:SI 4 "s_register_operand" "l,r")
9048 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9049 (const_int 0)))
9050 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9051 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9052 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9053 "TARGET_32BIT"
9054 "#"
9055 "TARGET_32BIT && reload_completed"
9056 [(set (match_dup 0)
9057 (compare
9058 (ior:SI
9059 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9060 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9061 (const_int 0)))
9062 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9063 ""
9064 [(set_attr "conds" "set")
9065 (set_attr "enabled_for_short_it" "yes,no")
9066 (set_attr "length" "16")
9067 (set_attr "type" "multiple")]
9068 )
9069
9070 (define_insn_and_split "*and_scc_scc"
9071 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9072 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9073 [(match_operand:SI 1 "s_register_operand" "l,r")
9074 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9075 (match_operator:SI 6 "arm_comparison_operator"
9076 [(match_operand:SI 4 "s_register_operand" "l,r")
9077 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9078 (clobber (reg:CC CC_REGNUM))]
9079 "TARGET_32BIT
9080 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9081 != CCmode)"
9082 "#"
9083 "TARGET_32BIT && reload_completed
9084 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9085 != CCmode)"
9086 [(set (match_dup 7)
9087 (compare
9088 (and:SI
9089 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9090 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9091 (const_int 0)))
9092 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9093 "operands[7]
9094 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9095 DOM_CC_X_AND_Y),
9096 CC_REGNUM);"
9097 [(set_attr "conds" "clob")
9098 (set_attr "enabled_for_short_it" "yes,no")
9099 (set_attr "length" "16")
9100 (set_attr "type" "multiple")]
9101 )
9102
9103 ; If the above pattern is followed by a CMP insn, then the compare is
9104 ; redundant, since we can rework the conditional instruction that follows.
9105 (define_insn_and_split "*and_scc_scc_cmp"
9106 [(set (match_operand 0 "dominant_cc_register" "")
9107 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9108 [(match_operand:SI 1 "s_register_operand" "l,r")
9109 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9110 (match_operator:SI 6 "arm_comparison_operator"
9111 [(match_operand:SI 4 "s_register_operand" "l,r")
9112 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9113 (const_int 0)))
9114 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9115 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9116 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9117 "TARGET_32BIT"
9118 "#"
9119 "TARGET_32BIT && reload_completed"
9120 [(set (match_dup 0)
9121 (compare
9122 (and:SI
9123 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9124 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9125 (const_int 0)))
9126 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9127 ""
9128 [(set_attr "conds" "set")
9129 (set_attr "enabled_for_short_it" "yes,no")
9130 (set_attr "length" "16")
9131 (set_attr "type" "multiple")]
9132 )
9133
9134 ;; If there is no dominance in the comparison, then we can still save an
9135 ;; instruction in the AND case, since we can know that the second compare
9136 ;; need only zero the value if false (if true, then the value is already
9137 ;; correct).
9138 (define_insn_and_split "*and_scc_scc_nodom"
9139 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9140 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9141 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9142 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9143 (match_operator:SI 6 "arm_comparison_operator"
9144 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9145 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9146 (clobber (reg:CC CC_REGNUM))]
9147 "TARGET_32BIT
9148 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9149 == CCmode)"
9150 "#"
9151 "TARGET_32BIT && reload_completed"
9152 [(parallel [(set (match_dup 0)
9153 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9154 (clobber (reg:CC CC_REGNUM))])
9155 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9156 (set (match_dup 0)
9157 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9158 (match_dup 0)
9159 (const_int 0)))]
9160 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9161 operands[4], operands[5]),
9162 CC_REGNUM);
9163 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9164 operands[5]);"
9165 [(set_attr "conds" "clob")
9166 (set_attr "length" "20")
9167 (set_attr "type" "multiple")]
9168 )
9169
9170 (define_split
9171 [(set (reg:CC_NOOV CC_REGNUM)
9172 (compare:CC_NOOV (ior:SI
9173 (and:SI (match_operand:SI 0 "s_register_operand" "")
9174 (const_int 1))
9175 (match_operator:SI 1 "arm_comparison_operator"
9176 [(match_operand:SI 2 "s_register_operand" "")
9177 (match_operand:SI 3 "arm_add_operand" "")]))
9178 (const_int 0)))
9179 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9180 "TARGET_ARM"
9181 [(set (match_dup 4)
9182 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9183 (match_dup 0)))
9184 (set (reg:CC_NOOV CC_REGNUM)
9185 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9186 (const_int 0)))]
9187 "")
9188
9189 (define_split
9190 [(set (reg:CC_NOOV CC_REGNUM)
9191 (compare:CC_NOOV (ior:SI
9192 (match_operator:SI 1 "arm_comparison_operator"
9193 [(match_operand:SI 2 "s_register_operand" "")
9194 (match_operand:SI 3 "arm_add_operand" "")])
9195 (and:SI (match_operand:SI 0 "s_register_operand" "")
9196 (const_int 1)))
9197 (const_int 0)))
9198 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9199 "TARGET_ARM"
9200 [(set (match_dup 4)
9201 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9202 (match_dup 0)))
9203 (set (reg:CC_NOOV CC_REGNUM)
9204 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9205 (const_int 0)))]
9206 "")
9207 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9208
9209 (define_insn_and_split "*negscc"
9210 [(set (match_operand:SI 0 "s_register_operand" "=r")
9211 (neg:SI (match_operator 3 "arm_comparison_operator"
9212 [(match_operand:SI 1 "s_register_operand" "r")
9213 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9214 (clobber (reg:CC CC_REGNUM))]
9215 "TARGET_ARM"
9216 "#"
9217 "&& reload_completed"
9218 [(const_int 0)]
9219 {
9220 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9221
9222 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9223 {
9224 /* Emit mov\\t%0, %1, asr #31 */
9225 emit_insn (gen_rtx_SET (operands[0],
9226 gen_rtx_ASHIFTRT (SImode,
9227 operands[1],
9228 GEN_INT (31))));
9229 DONE;
9230 }
9231 else if (GET_CODE (operands[3]) == NE)
9232 {
9233 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9234 if (CONST_INT_P (operands[2]))
9235 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9236 gen_int_mode (-INTVAL (operands[2]),
9237 SImode)));
9238 else
9239 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9240
9241 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9242 gen_rtx_NE (SImode,
9243 cc_reg,
9244 const0_rtx),
9245 gen_rtx_SET (operands[0],
9246 GEN_INT (~0))));
9247 DONE;
9248 }
9249 else
9250 {
9251 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9252 emit_insn (gen_rtx_SET (cc_reg,
9253 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9254 enum rtx_code rc = GET_CODE (operands[3]);
9255
9256 rc = reverse_condition (rc);
9257 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9258 gen_rtx_fmt_ee (rc,
9259 VOIDmode,
9260 cc_reg,
9261 const0_rtx),
9262 gen_rtx_SET (operands[0], const0_rtx)));
9263 rc = GET_CODE (operands[3]);
9264 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9265 gen_rtx_fmt_ee (rc,
9266 VOIDmode,
9267 cc_reg,
9268 const0_rtx),
9269 gen_rtx_SET (operands[0],
9270 GEN_INT (~0))));
9271 DONE;
9272 }
9273 FAIL;
9274 }
9275 [(set_attr "conds" "clob")
9276 (set_attr "length" "12")
9277 (set_attr "type" "multiple")]
9278 )
9279
9280 (define_insn_and_split "movcond_addsi"
9281 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9282 (if_then_else:SI
9283 (match_operator 5 "comparison_operator"
9284 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9285 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9286 (const_int 0)])
9287 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9288 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9289 (clobber (reg:CC CC_REGNUM))]
9290 "TARGET_32BIT"
9291 "#"
9292 "&& reload_completed"
9293 [(set (reg:CC_NOOV CC_REGNUM)
9294 (compare:CC_NOOV
9295 (plus:SI (match_dup 3)
9296 (match_dup 4))
9297 (const_int 0)))
9298 (set (match_dup 0) (match_dup 1))
9299 (cond_exec (match_dup 6)
9300 (set (match_dup 0) (match_dup 2)))]
9301 "
9302 {
9303 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9304 operands[3], operands[4]);
9305 enum rtx_code rc = GET_CODE (operands[5]);
9306 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9307 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9308 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9309 rc = reverse_condition (rc);
9310 else
9311 std::swap (operands[1], operands[2]);
9312
9313 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9314 }
9315 "
9316 [(set_attr "conds" "clob")
9317 (set_attr "enabled_for_short_it" "no,yes,yes")
9318 (set_attr "type" "multiple")]
9319 )
9320
9321 (define_insn "movcond"
9322 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9323 (if_then_else:SI
9324 (match_operator 5 "arm_comparison_operator"
9325 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9326 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9327 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9328 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9329 (clobber (reg:CC CC_REGNUM))]
9330 "TARGET_ARM"
9331 "*
9332 if (GET_CODE (operands[5]) == LT
9333 && (operands[4] == const0_rtx))
9334 {
9335 if (which_alternative != 1 && REG_P (operands[1]))
9336 {
9337 if (operands[2] == const0_rtx)
9338 return \"and\\t%0, %1, %3, asr #31\";
9339 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9340 }
9341 else if (which_alternative != 0 && REG_P (operands[2]))
9342 {
9343 if (operands[1] == const0_rtx)
9344 return \"bic\\t%0, %2, %3, asr #31\";
9345 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9346 }
9347 /* The only case that falls through to here is when both ops 1 & 2
9348 are constants. */
9349 }
9350
9351 if (GET_CODE (operands[5]) == GE
9352 && (operands[4] == const0_rtx))
9353 {
9354 if (which_alternative != 1 && REG_P (operands[1]))
9355 {
9356 if (operands[2] == const0_rtx)
9357 return \"bic\\t%0, %1, %3, asr #31\";
9358 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9359 }
9360 else if (which_alternative != 0 && REG_P (operands[2]))
9361 {
9362 if (operands[1] == const0_rtx)
9363 return \"and\\t%0, %2, %3, asr #31\";
9364 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9365 }
9366 /* The only case that falls through to here is when both ops 1 & 2
9367 are constants. */
9368 }
9369 if (CONST_INT_P (operands[4])
9370 && !const_ok_for_arm (INTVAL (operands[4])))
9371 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9372 else
9373 output_asm_insn (\"cmp\\t%3, %4\", operands);
9374 if (which_alternative != 0)
9375 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9376 if (which_alternative != 1)
9377 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9378 return \"\";
9379 "
9380 [(set_attr "conds" "clob")
9381 (set_attr "length" "8,8,12")
9382 (set_attr "type" "multiple")]
9383 )
9384
9385 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9386
9387 (define_insn "*ifcompare_plus_move"
9388 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9389 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9390 [(match_operand:SI 4 "s_register_operand" "r,r")
9391 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9392 (plus:SI
9393 (match_operand:SI 2 "s_register_operand" "r,r")
9394 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9395 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9396 (clobber (reg:CC CC_REGNUM))]
9397 "TARGET_ARM"
9398 "#"
9399 [(set_attr "conds" "clob")
9400 (set_attr "length" "8,12")
9401 (set_attr "type" "multiple")]
9402 )
9403
9404 (define_insn "*if_plus_move"
9405 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9406 (if_then_else:SI
9407 (match_operator 4 "arm_comparison_operator"
9408 [(match_operand 5 "cc_register" "") (const_int 0)])
9409 (plus:SI
9410 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9411 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9412 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9413 "TARGET_ARM"
9414 "@
9415 add%d4\\t%0, %2, %3
9416 sub%d4\\t%0, %2, #%n3
9417 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9418 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9419 [(set_attr "conds" "use")
9420 (set_attr "length" "4,4,8,8")
9421 (set_attr_alternative "type"
9422 [(if_then_else (match_operand 3 "const_int_operand" "")
9423 (const_string "alu_imm" )
9424 (const_string "alu_sreg"))
9425 (const_string "alu_imm")
9426 (const_string "multiple")
9427 (const_string "multiple")])]
9428 )
9429
9430 (define_insn "*ifcompare_move_plus"
9431 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9432 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9433 [(match_operand:SI 4 "s_register_operand" "r,r")
9434 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9435 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9436 (plus:SI
9437 (match_operand:SI 2 "s_register_operand" "r,r")
9438 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9439 (clobber (reg:CC CC_REGNUM))]
9440 "TARGET_ARM"
9441 "#"
9442 [(set_attr "conds" "clob")
9443 (set_attr "length" "8,12")
9444 (set_attr "type" "multiple")]
9445 )
9446
9447 (define_insn "*if_move_plus"
9448 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9449 (if_then_else:SI
9450 (match_operator 4 "arm_comparison_operator"
9451 [(match_operand 5 "cc_register" "") (const_int 0)])
9452 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9453 (plus:SI
9454 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9455 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9456 "TARGET_ARM"
9457 "@
9458 add%D4\\t%0, %2, %3
9459 sub%D4\\t%0, %2, #%n3
9460 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9461 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9462 [(set_attr "conds" "use")
9463 (set_attr "length" "4,4,8,8")
9464 (set_attr_alternative "type"
9465 [(if_then_else (match_operand 3 "const_int_operand" "")
9466 (const_string "alu_imm" )
9467 (const_string "alu_sreg"))
9468 (const_string "alu_imm")
9469 (const_string "multiple")
9470 (const_string "multiple")])]
9471 )
9472
9473 (define_insn "*ifcompare_arith_arith"
9474 [(set (match_operand:SI 0 "s_register_operand" "=r")
9475 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9476 [(match_operand:SI 5 "s_register_operand" "r")
9477 (match_operand:SI 6 "arm_add_operand" "rIL")])
9478 (match_operator:SI 8 "shiftable_operator"
9479 [(match_operand:SI 1 "s_register_operand" "r")
9480 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9481 (match_operator:SI 7 "shiftable_operator"
9482 [(match_operand:SI 3 "s_register_operand" "r")
9483 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9484 (clobber (reg:CC CC_REGNUM))]
9485 "TARGET_ARM"
9486 "#"
9487 [(set_attr "conds" "clob")
9488 (set_attr "length" "12")
9489 (set_attr "type" "multiple")]
9490 )
9491
9492 (define_insn "*if_arith_arith"
9493 [(set (match_operand:SI 0 "s_register_operand" "=r")
9494 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9495 [(match_operand 8 "cc_register" "") (const_int 0)])
9496 (match_operator:SI 6 "shiftable_operator"
9497 [(match_operand:SI 1 "s_register_operand" "r")
9498 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9499 (match_operator:SI 7 "shiftable_operator"
9500 [(match_operand:SI 3 "s_register_operand" "r")
9501 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9502 "TARGET_ARM"
9503 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9504 [(set_attr "conds" "use")
9505 (set_attr "length" "8")
9506 (set_attr "type" "multiple")]
9507 )
9508
9509 (define_insn "*ifcompare_arith_move"
9510 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9511 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9512 [(match_operand:SI 2 "s_register_operand" "r,r")
9513 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9514 (match_operator:SI 7 "shiftable_operator"
9515 [(match_operand:SI 4 "s_register_operand" "r,r")
9516 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9517 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9518 (clobber (reg:CC CC_REGNUM))]
9519 "TARGET_ARM"
9520 "*
9521 /* If we have an operation where (op x 0) is the identity operation and
9522 the conditional operator is LT or GE and we are comparing against zero and
9523 everything is in registers then we can do this in two instructions. */
9524 if (operands[3] == const0_rtx
9525 && GET_CODE (operands[7]) != AND
9526 && REG_P (operands[5])
9527 && REG_P (operands[1])
9528 && REGNO (operands[1]) == REGNO (operands[4])
9529 && REGNO (operands[4]) != REGNO (operands[0]))
9530 {
9531 if (GET_CODE (operands[6]) == LT)
9532 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9533 else if (GET_CODE (operands[6]) == GE)
9534 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9535 }
9536 if (CONST_INT_P (operands[3])
9537 && !const_ok_for_arm (INTVAL (operands[3])))
9538 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9539 else
9540 output_asm_insn (\"cmp\\t%2, %3\", operands);
9541 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9542 if (which_alternative != 0)
9543 return \"mov%D6\\t%0, %1\";
9544 return \"\";
9545 "
9546 [(set_attr "conds" "clob")
9547 (set_attr "length" "8,12")
9548 (set_attr "type" "multiple")]
9549 )
9550
9551 (define_insn "*if_arith_move"
9552 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9553 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9554 [(match_operand 6 "cc_register" "") (const_int 0)])
9555 (match_operator:SI 5 "shiftable_operator"
9556 [(match_operand:SI 2 "s_register_operand" "r,r")
9557 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9558 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9559 "TARGET_ARM"
9560 "@
9561 %I5%d4\\t%0, %2, %3
9562 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9563 [(set_attr "conds" "use")
9564 (set_attr "length" "4,8")
9565 (set_attr_alternative "type"
9566 [(if_then_else (match_operand 3 "const_int_operand" "")
9567 (const_string "alu_shift_imm" )
9568 (const_string "alu_shift_reg"))
9569 (const_string "multiple")])]
9570 )
9571
9572 (define_insn "*ifcompare_move_arith"
9573 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9574 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9575 [(match_operand:SI 4 "s_register_operand" "r,r")
9576 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9577 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9578 (match_operator:SI 7 "shiftable_operator"
9579 [(match_operand:SI 2 "s_register_operand" "r,r")
9580 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9581 (clobber (reg:CC CC_REGNUM))]
9582 "TARGET_ARM"
9583 "*
9584 /* If we have an operation where (op x 0) is the identity operation and
9585 the conditional operator is LT or GE and we are comparing against zero and
9586 everything is in registers then we can do this in two instructions */
9587 if (operands[5] == const0_rtx
9588 && GET_CODE (operands[7]) != AND
9589 && REG_P (operands[3])
9590 && REG_P (operands[1])
9591 && REGNO (operands[1]) == REGNO (operands[2])
9592 && REGNO (operands[2]) != REGNO (operands[0]))
9593 {
9594 if (GET_CODE (operands[6]) == GE)
9595 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9596 else if (GET_CODE (operands[6]) == LT)
9597 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9598 }
9599
9600 if (CONST_INT_P (operands[5])
9601 && !const_ok_for_arm (INTVAL (operands[5])))
9602 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9603 else
9604 output_asm_insn (\"cmp\\t%4, %5\", operands);
9605
9606 if (which_alternative != 0)
9607 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9608 return \"%I7%D6\\t%0, %2, %3\";
9609 "
9610 [(set_attr "conds" "clob")
9611 (set_attr "length" "8,12")
9612 (set_attr "type" "multiple")]
9613 )
9614
9615 (define_insn "*if_move_arith"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9617 (if_then_else:SI
9618 (match_operator 4 "arm_comparison_operator"
9619 [(match_operand 6 "cc_register" "") (const_int 0)])
9620 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9621 (match_operator:SI 5 "shiftable_operator"
9622 [(match_operand:SI 2 "s_register_operand" "r,r")
9623 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9624 "TARGET_ARM"
9625 "@
9626 %I5%D4\\t%0, %2, %3
9627 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9628 [(set_attr "conds" "use")
9629 (set_attr "length" "4,8")
9630 (set_attr_alternative "type"
9631 [(if_then_else (match_operand 3 "const_int_operand" "")
9632 (const_string "alu_shift_imm" )
9633 (const_string "alu_shift_reg"))
9634 (const_string "multiple")])]
9635 )
9636
9637 (define_insn "*ifcompare_move_not"
9638 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9639 (if_then_else:SI
9640 (match_operator 5 "arm_comparison_operator"
9641 [(match_operand:SI 3 "s_register_operand" "r,r")
9642 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9643 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9644 (not:SI
9645 (match_operand:SI 2 "s_register_operand" "r,r"))))
9646 (clobber (reg:CC CC_REGNUM))]
9647 "TARGET_ARM"
9648 "#"
9649 [(set_attr "conds" "clob")
9650 (set_attr "length" "8,12")
9651 (set_attr "type" "multiple")]
9652 )
9653
9654 (define_insn "*if_move_not"
9655 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9656 (if_then_else:SI
9657 (match_operator 4 "arm_comparison_operator"
9658 [(match_operand 3 "cc_register" "") (const_int 0)])
9659 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9660 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9661 "TARGET_ARM"
9662 "@
9663 mvn%D4\\t%0, %2
9664 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9665 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9666 [(set_attr "conds" "use")
9667 (set_attr "type" "mvn_reg")
9668 (set_attr "length" "4,8,8")
9669 (set_attr "type" "mvn_reg,multiple,multiple")]
9670 )
9671
9672 (define_insn "*ifcompare_not_move"
9673 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9674 (if_then_else:SI
9675 (match_operator 5 "arm_comparison_operator"
9676 [(match_operand:SI 3 "s_register_operand" "r,r")
9677 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9678 (not:SI
9679 (match_operand:SI 2 "s_register_operand" "r,r"))
9680 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9681 (clobber (reg:CC CC_REGNUM))]
9682 "TARGET_ARM"
9683 "#"
9684 [(set_attr "conds" "clob")
9685 (set_attr "length" "8,12")
9686 (set_attr "type" "multiple")]
9687 )
9688
9689 (define_insn "*if_not_move"
9690 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9691 (if_then_else:SI
9692 (match_operator 4 "arm_comparison_operator"
9693 [(match_operand 3 "cc_register" "") (const_int 0)])
9694 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9695 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9696 "TARGET_ARM"
9697 "@
9698 mvn%d4\\t%0, %2
9699 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9700 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9701 [(set_attr "conds" "use")
9702 (set_attr "type" "mvn_reg,multiple,multiple")
9703 (set_attr "length" "4,8,8")]
9704 )
9705
9706 (define_insn "*ifcompare_shift_move"
9707 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9708 (if_then_else:SI
9709 (match_operator 6 "arm_comparison_operator"
9710 [(match_operand:SI 4 "s_register_operand" "r,r")
9711 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9712 (match_operator:SI 7 "shift_operator"
9713 [(match_operand:SI 2 "s_register_operand" "r,r")
9714 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9715 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9716 (clobber (reg:CC CC_REGNUM))]
9717 "TARGET_ARM"
9718 "#"
9719 [(set_attr "conds" "clob")
9720 (set_attr "length" "8,12")
9721 (set_attr "type" "multiple")]
9722 )
9723
9724 (define_insn "*if_shift_move"
9725 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9726 (if_then_else:SI
9727 (match_operator 5 "arm_comparison_operator"
9728 [(match_operand 6 "cc_register" "") (const_int 0)])
9729 (match_operator:SI 4 "shift_operator"
9730 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9731 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9732 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9733 "TARGET_ARM"
9734 "@
9735 mov%d5\\t%0, %2%S4
9736 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9737 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9738 [(set_attr "conds" "use")
9739 (set_attr "shift" "2")
9740 (set_attr "length" "4,8,8")
9741 (set_attr_alternative "type"
9742 [(if_then_else (match_operand 3 "const_int_operand" "")
9743 (const_string "mov_shift" )
9744 (const_string "mov_shift_reg"))
9745 (const_string "multiple")
9746 (const_string "multiple")])]
9747 )
9748
9749 (define_insn "*ifcompare_move_shift"
9750 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9751 (if_then_else:SI
9752 (match_operator 6 "arm_comparison_operator"
9753 [(match_operand:SI 4 "s_register_operand" "r,r")
9754 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9755 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9756 (match_operator:SI 7 "shift_operator"
9757 [(match_operand:SI 2 "s_register_operand" "r,r")
9758 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9759 (clobber (reg:CC CC_REGNUM))]
9760 "TARGET_ARM"
9761 "#"
9762 [(set_attr "conds" "clob")
9763 (set_attr "length" "8,12")
9764 (set_attr "type" "multiple")]
9765 )
9766
9767 (define_insn "*if_move_shift"
9768 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9769 (if_then_else:SI
9770 (match_operator 5 "arm_comparison_operator"
9771 [(match_operand 6 "cc_register" "") (const_int 0)])
9772 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9773 (match_operator:SI 4 "shift_operator"
9774 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9775 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9776 "TARGET_ARM"
9777 "@
9778 mov%D5\\t%0, %2%S4
9779 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9780 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9781 [(set_attr "conds" "use")
9782 (set_attr "shift" "2")
9783 (set_attr "length" "4,8,8")
9784 (set_attr_alternative "type"
9785 [(if_then_else (match_operand 3 "const_int_operand" "")
9786 (const_string "mov_shift" )
9787 (const_string "mov_shift_reg"))
9788 (const_string "multiple")
9789 (const_string "multiple")])]
9790 )
9791
9792 (define_insn "*ifcompare_shift_shift"
9793 [(set (match_operand:SI 0 "s_register_operand" "=r")
9794 (if_then_else:SI
9795 (match_operator 7 "arm_comparison_operator"
9796 [(match_operand:SI 5 "s_register_operand" "r")
9797 (match_operand:SI 6 "arm_add_operand" "rIL")])
9798 (match_operator:SI 8 "shift_operator"
9799 [(match_operand:SI 1 "s_register_operand" "r")
9800 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9801 (match_operator:SI 9 "shift_operator"
9802 [(match_operand:SI 3 "s_register_operand" "r")
9803 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9804 (clobber (reg:CC CC_REGNUM))]
9805 "TARGET_ARM"
9806 "#"
9807 [(set_attr "conds" "clob")
9808 (set_attr "length" "12")
9809 (set_attr "type" "multiple")]
9810 )
9811
9812 (define_insn "*if_shift_shift"
9813 [(set (match_operand:SI 0 "s_register_operand" "=r")
9814 (if_then_else:SI
9815 (match_operator 5 "arm_comparison_operator"
9816 [(match_operand 8 "cc_register" "") (const_int 0)])
9817 (match_operator:SI 6 "shift_operator"
9818 [(match_operand:SI 1 "s_register_operand" "r")
9819 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9820 (match_operator:SI 7 "shift_operator"
9821 [(match_operand:SI 3 "s_register_operand" "r")
9822 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9823 "TARGET_ARM"
9824 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9825 [(set_attr "conds" "use")
9826 (set_attr "shift" "1")
9827 (set_attr "length" "8")
9828 (set (attr "type") (if_then_else
9829 (and (match_operand 2 "const_int_operand" "")
9830 (match_operand 4 "const_int_operand" ""))
9831 (const_string "mov_shift")
9832 (const_string "mov_shift_reg")))]
9833 )
9834
9835 (define_insn "*ifcompare_not_arith"
9836 [(set (match_operand:SI 0 "s_register_operand" "=r")
9837 (if_then_else:SI
9838 (match_operator 6 "arm_comparison_operator"
9839 [(match_operand:SI 4 "s_register_operand" "r")
9840 (match_operand:SI 5 "arm_add_operand" "rIL")])
9841 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9842 (match_operator:SI 7 "shiftable_operator"
9843 [(match_operand:SI 2 "s_register_operand" "r")
9844 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9845 (clobber (reg:CC CC_REGNUM))]
9846 "TARGET_ARM"
9847 "#"
9848 [(set_attr "conds" "clob")
9849 (set_attr "length" "12")
9850 (set_attr "type" "multiple")]
9851 )
9852
9853 (define_insn "*if_not_arith"
9854 [(set (match_operand:SI 0 "s_register_operand" "=r")
9855 (if_then_else:SI
9856 (match_operator 5 "arm_comparison_operator"
9857 [(match_operand 4 "cc_register" "") (const_int 0)])
9858 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9859 (match_operator:SI 6 "shiftable_operator"
9860 [(match_operand:SI 2 "s_register_operand" "r")
9861 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9862 "TARGET_ARM"
9863 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9864 [(set_attr "conds" "use")
9865 (set_attr "type" "mvn_reg")
9866 (set_attr "length" "8")]
9867 )
9868
9869 (define_insn "*ifcompare_arith_not"
9870 [(set (match_operand:SI 0 "s_register_operand" "=r")
9871 (if_then_else:SI
9872 (match_operator 6 "arm_comparison_operator"
9873 [(match_operand:SI 4 "s_register_operand" "r")
9874 (match_operand:SI 5 "arm_add_operand" "rIL")])
9875 (match_operator:SI 7 "shiftable_operator"
9876 [(match_operand:SI 2 "s_register_operand" "r")
9877 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9878 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9879 (clobber (reg:CC CC_REGNUM))]
9880 "TARGET_ARM"
9881 "#"
9882 [(set_attr "conds" "clob")
9883 (set_attr "length" "12")
9884 (set_attr "type" "multiple")]
9885 )
9886
9887 (define_insn "*if_arith_not"
9888 [(set (match_operand:SI 0 "s_register_operand" "=r")
9889 (if_then_else:SI
9890 (match_operator 5 "arm_comparison_operator"
9891 [(match_operand 4 "cc_register" "") (const_int 0)])
9892 (match_operator:SI 6 "shiftable_operator"
9893 [(match_operand:SI 2 "s_register_operand" "r")
9894 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9895 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9896 "TARGET_ARM"
9897 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9898 [(set_attr "conds" "use")
9899 (set_attr "type" "multiple")
9900 (set_attr "length" "8")]
9901 )
9902
9903 (define_insn "*ifcompare_neg_move"
9904 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9905 (if_then_else:SI
9906 (match_operator 5 "arm_comparison_operator"
9907 [(match_operand:SI 3 "s_register_operand" "r,r")
9908 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9909 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9910 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9911 (clobber (reg:CC CC_REGNUM))]
9912 "TARGET_ARM"
9913 "#"
9914 [(set_attr "conds" "clob")
9915 (set_attr "length" "8,12")
9916 (set_attr "type" "multiple")]
9917 )
9918
9919 (define_insn_and_split "*if_neg_move"
9920 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9921 (if_then_else:SI
9922 (match_operator 4 "arm_comparison_operator"
9923 [(match_operand 3 "cc_register" "") (const_int 0)])
9924 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9925 (match_operand:SI 1 "s_register_operand" "0,0")))]
9926 "TARGET_32BIT"
9927 "#"
9928 "&& reload_completed"
9929 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9930 (set (match_dup 0) (neg:SI (match_dup 2))))]
9931 ""
9932 [(set_attr "conds" "use")
9933 (set_attr "length" "4")
9934 (set_attr "arch" "t2,32")
9935 (set_attr "enabled_for_short_it" "yes,no")
9936 (set_attr "type" "logic_shift_imm")]
9937 )
9938
9939 (define_insn "*ifcompare_move_neg"
9940 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9941 (if_then_else:SI
9942 (match_operator 5 "arm_comparison_operator"
9943 [(match_operand:SI 3 "s_register_operand" "r,r")
9944 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9945 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9946 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9947 (clobber (reg:CC CC_REGNUM))]
9948 "TARGET_ARM"
9949 "#"
9950 [(set_attr "conds" "clob")
9951 (set_attr "length" "8,12")
9952 (set_attr "type" "multiple")]
9953 )
9954
9955 (define_insn_and_split "*if_move_neg"
9956 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9957 (if_then_else:SI
9958 (match_operator 4 "arm_comparison_operator"
9959 [(match_operand 3 "cc_register" "") (const_int 0)])
9960 (match_operand:SI 1 "s_register_operand" "0,0")
9961 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9962 "TARGET_32BIT"
9963 "#"
9964 "&& reload_completed"
9965 [(cond_exec (match_dup 5)
9966 (set (match_dup 0) (neg:SI (match_dup 2))))]
9967 {
9968 machine_mode mode = GET_MODE (operands[3]);
9969 rtx_code rc = GET_CODE (operands[4]);
9970
9971 if (mode == CCFPmode || mode == CCFPEmode)
9972 rc = reverse_condition_maybe_unordered (rc);
9973 else
9974 rc = reverse_condition (rc);
9975
9976 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9977 }
9978 [(set_attr "conds" "use")
9979 (set_attr "length" "4")
9980 (set_attr "arch" "t2,32")
9981 (set_attr "enabled_for_short_it" "yes,no")
9982 (set_attr "type" "logic_shift_imm")]
9983 )
9984
9985 (define_insn "*arith_adjacentmem"
9986 [(set (match_operand:SI 0 "s_register_operand" "=r")
9987 (match_operator:SI 1 "shiftable_operator"
9988 [(match_operand:SI 2 "memory_operand" "m")
9989 (match_operand:SI 3 "memory_operand" "m")]))
9990 (clobber (match_scratch:SI 4 "=r"))]
9991 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9992 "*
9993 {
9994 rtx ldm[3];
9995 rtx arith[4];
9996 rtx base_reg;
9997 HOST_WIDE_INT val1 = 0, val2 = 0;
9998
9999 if (REGNO (operands[0]) > REGNO (operands[4]))
10000 {
10001 ldm[1] = operands[4];
10002 ldm[2] = operands[0];
10003 }
10004 else
10005 {
10006 ldm[1] = operands[0];
10007 ldm[2] = operands[4];
10008 }
10009
10010 base_reg = XEXP (operands[2], 0);
10011
10012 if (!REG_P (base_reg))
10013 {
10014 val1 = INTVAL (XEXP (base_reg, 1));
10015 base_reg = XEXP (base_reg, 0);
10016 }
10017
10018 if (!REG_P (XEXP (operands[3], 0)))
10019 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10020
10021 arith[0] = operands[0];
10022 arith[3] = operands[1];
10023
10024 if (val1 < val2)
10025 {
10026 arith[1] = ldm[1];
10027 arith[2] = ldm[2];
10028 }
10029 else
10030 {
10031 arith[1] = ldm[2];
10032 arith[2] = ldm[1];
10033 }
10034
10035 ldm[0] = base_reg;
10036 if (val1 !=0 && val2 != 0)
10037 {
10038 rtx ops[3];
10039
10040 if (val1 == 4 || val2 == 4)
10041 /* Other val must be 8, since we know they are adjacent and neither
10042 is zero. */
10043 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10044 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10045 {
10046 ldm[0] = ops[0] = operands[4];
10047 ops[1] = base_reg;
10048 ops[2] = GEN_INT (val1);
10049 output_add_immediate (ops);
10050 if (val1 < val2)
10051 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10052 else
10053 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10054 }
10055 else
10056 {
10057 /* Offset is out of range for a single add, so use two ldr. */
10058 ops[0] = ldm[1];
10059 ops[1] = base_reg;
10060 ops[2] = GEN_INT (val1);
10061 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10062 ops[0] = ldm[2];
10063 ops[2] = GEN_INT (val2);
10064 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10065 }
10066 }
10067 else if (val1 != 0)
10068 {
10069 if (val1 < val2)
10070 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10071 else
10072 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10073 }
10074 else
10075 {
10076 if (val1 < val2)
10077 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10078 else
10079 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10080 }
10081 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10082 return \"\";
10083 }"
10084 [(set_attr "length" "12")
10085 (set_attr "predicable" "yes")
10086 (set_attr "type" "load_4")]
10087 )
10088
10089 ; This pattern is never tried by combine, so do it as a peephole
10090
10091 (define_peephole2
10092 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10093 (match_operand:SI 1 "arm_general_register_operand" ""))
10094 (set (reg:CC CC_REGNUM)
10095 (compare:CC (match_dup 1) (const_int 0)))]
10096 "TARGET_ARM"
10097 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10098 (set (match_dup 0) (match_dup 1))])]
10099 ""
10100 )
10101
10102 (define_split
10103 [(set (match_operand:SI 0 "s_register_operand" "")
10104 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10105 (const_int 0))
10106 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10107 [(match_operand:SI 3 "s_register_operand" "")
10108 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10109 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10110 "TARGET_ARM"
10111 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10112 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10113 (match_dup 5)))]
10114 ""
10115 )
10116
10117 ;; This split can be used because CC_Z mode implies that the following
10118 ;; branch will be an equality, or an unsigned inequality, so the sign
10119 ;; extension is not needed.
10120
10121 (define_split
10122 [(set (reg:CC_Z CC_REGNUM)
10123 (compare:CC_Z
10124 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10125 (const_int 24))
10126 (match_operand 1 "const_int_operand" "")))
10127 (clobber (match_scratch:SI 2 ""))]
10128 "TARGET_ARM
10129 && ((UINTVAL (operands[1]))
10130 == ((UINTVAL (operands[1])) >> 24) << 24)"
10131 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10132 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10133 "
10134 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10135 "
10136 )
10137 ;; ??? Check the patterns above for Thumb-2 usefulness
10138
10139 (define_expand "prologue"
10140 [(clobber (const_int 0))]
10141 "TARGET_EITHER"
10142 "if (TARGET_32BIT)
10143 arm_expand_prologue ();
10144 else
10145 thumb1_expand_prologue ();
10146 DONE;
10147 "
10148 )
10149
10150 (define_expand "epilogue"
10151 [(clobber (const_int 0))]
10152 "TARGET_EITHER"
10153 "
10154 if (crtl->calls_eh_return)
10155 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10156 if (TARGET_THUMB1)
10157 {
10158 thumb1_expand_epilogue ();
10159 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10160 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10161 }
10162 else if (HAVE_return)
10163 {
10164 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10165 no need for explicit testing again. */
10166 emit_jump_insn (gen_return ());
10167 }
10168 else if (TARGET_32BIT)
10169 {
10170 arm_expand_epilogue (true);
10171 }
10172 DONE;
10173 "
10174 )
10175
10176 ;; Note - although unspec_volatile's USE all hard registers,
10177 ;; USEs are ignored after relaod has completed. Thus we need
10178 ;; to add an unspec of the link register to ensure that flow
10179 ;; does not think that it is unused by the sibcall branch that
10180 ;; will replace the standard function epilogue.
10181 (define_expand "sibcall_epilogue"
10182 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10183 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10184 "TARGET_32BIT"
10185 "
10186 arm_expand_epilogue (false);
10187 DONE;
10188 "
10189 )
10190
10191 (define_expand "eh_epilogue"
10192 [(use (match_operand:SI 0 "register_operand"))
10193 (use (match_operand:SI 1 "register_operand"))
10194 (use (match_operand:SI 2 "register_operand"))]
10195 "TARGET_EITHER"
10196 "
10197 {
10198 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10199 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10200 {
10201 rtx ra = gen_rtx_REG (Pmode, 2);
10202
10203 emit_move_insn (ra, operands[2]);
10204 operands[2] = ra;
10205 }
10206 /* This is a hack -- we may have crystalized the function type too
10207 early. */
10208 cfun->machine->func_type = 0;
10209 }"
10210 )
10211
10212 ;; This split is only used during output to reduce the number of patterns
10213 ;; that need assembler instructions adding to them. We allowed the setting
10214 ;; of the conditions to be implicit during rtl generation so that
10215 ;; the conditional compare patterns would work. However this conflicts to
10216 ;; some extent with the conditional data operations, so we have to split them
10217 ;; up again here.
10218
10219 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10220 ;; conditional execution sufficient?
10221
10222 (define_split
10223 [(set (match_operand:SI 0 "s_register_operand" "")
10224 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10225 [(match_operand 2 "" "") (match_operand 3 "" "")])
10226 (match_dup 0)
10227 (match_operand 4 "" "")))
10228 (clobber (reg:CC CC_REGNUM))]
10229 "TARGET_ARM && reload_completed"
10230 [(set (match_dup 5) (match_dup 6))
10231 (cond_exec (match_dup 7)
10232 (set (match_dup 0) (match_dup 4)))]
10233 "
10234 {
10235 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10236 operands[2], operands[3]);
10237 enum rtx_code rc = GET_CODE (operands[1]);
10238
10239 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10240 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10241 if (mode == CCFPmode || mode == CCFPEmode)
10242 rc = reverse_condition_maybe_unordered (rc);
10243 else
10244 rc = reverse_condition (rc);
10245
10246 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10247 }"
10248 )
10249
10250 (define_split
10251 [(set (match_operand:SI 0 "s_register_operand" "")
10252 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10253 [(match_operand 2 "" "") (match_operand 3 "" "")])
10254 (match_operand 4 "" "")
10255 (match_dup 0)))
10256 (clobber (reg:CC CC_REGNUM))]
10257 "TARGET_ARM && reload_completed"
10258 [(set (match_dup 5) (match_dup 6))
10259 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10260 (set (match_dup 0) (match_dup 4)))]
10261 "
10262 {
10263 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10264 operands[2], operands[3]);
10265
10266 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10267 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10268 }"
10269 )
10270
10271 (define_split
10272 [(set (match_operand:SI 0 "s_register_operand" "")
10273 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10274 [(match_operand 2 "" "") (match_operand 3 "" "")])
10275 (match_operand 4 "" "")
10276 (match_operand 5 "" "")))
10277 (clobber (reg:CC CC_REGNUM))]
10278 "TARGET_ARM && reload_completed"
10279 [(set (match_dup 6) (match_dup 7))
10280 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10281 (set (match_dup 0) (match_dup 4)))
10282 (cond_exec (match_dup 8)
10283 (set (match_dup 0) (match_dup 5)))]
10284 "
10285 {
10286 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10287 operands[2], operands[3]);
10288 enum rtx_code rc = GET_CODE (operands[1]);
10289
10290 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10291 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10292 if (mode == CCFPmode || mode == CCFPEmode)
10293 rc = reverse_condition_maybe_unordered (rc);
10294 else
10295 rc = reverse_condition (rc);
10296
10297 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10298 }"
10299 )
10300
10301 (define_split
10302 [(set (match_operand:SI 0 "s_register_operand" "")
10303 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10304 [(match_operand:SI 2 "s_register_operand" "")
10305 (match_operand:SI 3 "arm_add_operand" "")])
10306 (match_operand:SI 4 "arm_rhs_operand" "")
10307 (not:SI
10308 (match_operand:SI 5 "s_register_operand" ""))))
10309 (clobber (reg:CC CC_REGNUM))]
10310 "TARGET_ARM && reload_completed"
10311 [(set (match_dup 6) (match_dup 7))
10312 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10313 (set (match_dup 0) (match_dup 4)))
10314 (cond_exec (match_dup 8)
10315 (set (match_dup 0) (not:SI (match_dup 5))))]
10316 "
10317 {
10318 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10319 operands[2], operands[3]);
10320 enum rtx_code rc = GET_CODE (operands[1]);
10321
10322 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10323 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10324 if (mode == CCFPmode || mode == CCFPEmode)
10325 rc = reverse_condition_maybe_unordered (rc);
10326 else
10327 rc = reverse_condition (rc);
10328
10329 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10330 }"
10331 )
10332
10333 (define_insn "*cond_move_not"
10334 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10335 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10336 [(match_operand 3 "cc_register" "") (const_int 0)])
10337 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10338 (not:SI
10339 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10340 "TARGET_ARM"
10341 "@
10342 mvn%D4\\t%0, %2
10343 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10344 [(set_attr "conds" "use")
10345 (set_attr "type" "mvn_reg,multiple")
10346 (set_attr "length" "4,8")]
10347 )
10348
10349 ;; The next two patterns occur when an AND operation is followed by a
10350 ;; scc insn sequence
10351
10352 (define_insn "*sign_extract_onebit"
10353 [(set (match_operand:SI 0 "s_register_operand" "=r")
10354 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10355 (const_int 1)
10356 (match_operand:SI 2 "const_int_operand" "n")))
10357 (clobber (reg:CC CC_REGNUM))]
10358 "TARGET_ARM"
10359 "*
10360 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10361 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10362 return \"mvnne\\t%0, #0\";
10363 "
10364 [(set_attr "conds" "clob")
10365 (set_attr "length" "8")
10366 (set_attr "type" "multiple")]
10367 )
10368
10369 (define_insn "*not_signextract_onebit"
10370 [(set (match_operand:SI 0 "s_register_operand" "=r")
10371 (not:SI
10372 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10373 (const_int 1)
10374 (match_operand:SI 2 "const_int_operand" "n"))))
10375 (clobber (reg:CC CC_REGNUM))]
10376 "TARGET_ARM"
10377 "*
10378 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10379 output_asm_insn (\"tst\\t%1, %2\", operands);
10380 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10381 return \"movne\\t%0, #0\";
10382 "
10383 [(set_attr "conds" "clob")
10384 (set_attr "length" "12")
10385 (set_attr "type" "multiple")]
10386 )
10387 ;; ??? The above patterns need auditing for Thumb-2
10388
10389 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10390 ;; expressions. For simplicity, the first register is also in the unspec
10391 ;; part.
10392 ;; To avoid the usage of GNU extension, the length attribute is computed
10393 ;; in a C function arm_attr_length_push_multi.
10394 (define_insn "*push_multi"
10395 [(match_parallel 2 "multi_register_push"
10396 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10397 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10398 UNSPEC_PUSH_MULT))])]
10399 ""
10400 "*
10401 {
10402 int num_saves = XVECLEN (operands[2], 0);
10403
10404 /* For the StrongARM at least it is faster to
10405 use STR to store only a single register.
10406 In Thumb mode always use push, and the assembler will pick
10407 something appropriate. */
10408 if (num_saves == 1 && TARGET_ARM)
10409 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10410 else
10411 {
10412 int i;
10413 char pattern[100];
10414
10415 if (TARGET_32BIT)
10416 strcpy (pattern, \"push%?\\t{%1\");
10417 else
10418 strcpy (pattern, \"push\\t{%1\");
10419
10420 for (i = 1; i < num_saves; i++)
10421 {
10422 strcat (pattern, \", %|\");
10423 strcat (pattern,
10424 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10425 }
10426
10427 strcat (pattern, \"}\");
10428 output_asm_insn (pattern, operands);
10429 }
10430
10431 return \"\";
10432 }"
10433 [(set_attr "type" "store_16")
10434 (set (attr "length")
10435 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10436 )
10437
10438 (define_insn "stack_tie"
10439 [(set (mem:BLK (scratch))
10440 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10441 (match_operand:SI 1 "s_register_operand" "rk")]
10442 UNSPEC_PRLG_STK))]
10443 ""
10444 ""
10445 [(set_attr "length" "0")
10446 (set_attr "type" "block")]
10447 )
10448
10449 ;; Pop (as used in epilogue RTL)
10450 ;;
10451 (define_insn "*load_multiple_with_writeback"
10452 [(match_parallel 0 "load_multiple_operation"
10453 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10454 (plus:SI (match_dup 1)
10455 (match_operand:SI 2 "const_int_I_operand" "I")))
10456 (set (match_operand:SI 3 "s_register_operand" "=rk")
10457 (mem:SI (match_dup 1)))
10458 ])]
10459 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10460 "*
10461 {
10462 arm_output_multireg_pop (operands, /*return_pc=*/false,
10463 /*cond=*/const_true_rtx,
10464 /*reverse=*/false,
10465 /*update=*/true);
10466 return \"\";
10467 }
10468 "
10469 [(set_attr "type" "load_16")
10470 (set_attr "predicable" "yes")
10471 (set (attr "length")
10472 (symbol_ref "arm_attr_length_pop_multi (operands,
10473 /*return_pc=*/false,
10474 /*write_back_p=*/true)"))]
10475 )
10476
10477 ;; Pop with return (as used in epilogue RTL)
10478 ;;
10479 ;; This instruction is generated when the registers are popped at the end of
10480 ;; epilogue. Here, instead of popping the value into LR and then generating
10481 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10482 ;; with (return).
10483 (define_insn "*pop_multiple_with_writeback_and_return"
10484 [(match_parallel 0 "pop_multiple_return"
10485 [(return)
10486 (set (match_operand:SI 1 "s_register_operand" "+rk")
10487 (plus:SI (match_dup 1)
10488 (match_operand:SI 2 "const_int_I_operand" "I")))
10489 (set (match_operand:SI 3 "s_register_operand" "=rk")
10490 (mem:SI (match_dup 1)))
10491 ])]
10492 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10493 "*
10494 {
10495 arm_output_multireg_pop (operands, /*return_pc=*/true,
10496 /*cond=*/const_true_rtx,
10497 /*reverse=*/false,
10498 /*update=*/true);
10499 return \"\";
10500 }
10501 "
10502 [(set_attr "type" "load_16")
10503 (set_attr "predicable" "yes")
10504 (set (attr "length")
10505 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10506 /*write_back_p=*/true)"))]
10507 )
10508
10509 (define_insn "*pop_multiple_with_return"
10510 [(match_parallel 0 "pop_multiple_return"
10511 [(return)
10512 (set (match_operand:SI 2 "s_register_operand" "=rk")
10513 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10514 ])]
10515 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10516 "*
10517 {
10518 arm_output_multireg_pop (operands, /*return_pc=*/true,
10519 /*cond=*/const_true_rtx,
10520 /*reverse=*/false,
10521 /*update=*/false);
10522 return \"\";
10523 }
10524 "
10525 [(set_attr "type" "load_16")
10526 (set_attr "predicable" "yes")
10527 (set (attr "length")
10528 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10529 /*write_back_p=*/false)"))]
10530 )
10531
10532 ;; Load into PC and return
10533 (define_insn "*ldr_with_return"
10534 [(return)
10535 (set (reg:SI PC_REGNUM)
10536 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10537 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10538 "ldr%?\t%|pc, [%0], #4"
10539 [(set_attr "type" "load_4")
10540 (set_attr "predicable" "yes")]
10541 )
10542 ;; Pop for floating point registers (as used in epilogue RTL)
10543 (define_insn "*vfp_pop_multiple_with_writeback"
10544 [(match_parallel 0 "pop_multiple_fp"
10545 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10546 (plus:SI (match_dup 1)
10547 (match_operand:SI 2 "const_int_I_operand" "I")))
10548 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10549 (mem:DF (match_dup 1)))])]
10550 "TARGET_32BIT && TARGET_HARD_FLOAT"
10551 "*
10552 {
10553 int num_regs = XVECLEN (operands[0], 0);
10554 char pattern[100];
10555 rtx op_list[2];
10556 strcpy (pattern, \"vldm\\t\");
10557 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10558 strcat (pattern, \"!, {\");
10559 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10560 strcat (pattern, \"%P0\");
10561 if ((num_regs - 1) > 1)
10562 {
10563 strcat (pattern, \"-%P1\");
10564 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10565 }
10566
10567 strcat (pattern, \"}\");
10568 output_asm_insn (pattern, op_list);
10569 return \"\";
10570 }
10571 "
10572 [(set_attr "type" "load_16")
10573 (set_attr "conds" "unconditional")
10574 (set_attr "predicable" "no")]
10575 )
10576
10577 ;; Special patterns for dealing with the constant pool
10578
10579 (define_insn "align_4"
10580 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10581 "TARGET_EITHER"
10582 "*
10583 assemble_align (32);
10584 return \"\";
10585 "
10586 [(set_attr "type" "no_insn")]
10587 )
10588
10589 (define_insn "align_8"
10590 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10591 "TARGET_EITHER"
10592 "*
10593 assemble_align (64);
10594 return \"\";
10595 "
10596 [(set_attr "type" "no_insn")]
10597 )
10598
10599 (define_insn "consttable_end"
10600 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10601 "TARGET_EITHER"
10602 "*
10603 making_const_table = FALSE;
10604 return \"\";
10605 "
10606 [(set_attr "type" "no_insn")]
10607 )
10608
10609 (define_insn "consttable_1"
10610 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10611 "TARGET_EITHER"
10612 "*
10613 making_const_table = TRUE;
10614 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10615 assemble_zeros (3);
10616 return \"\";
10617 "
10618 [(set_attr "length" "4")
10619 (set_attr "type" "no_insn")]
10620 )
10621
10622 (define_insn "consttable_2"
10623 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10624 "TARGET_EITHER"
10625 "*
10626 {
10627 rtx x = operands[0];
10628 making_const_table = TRUE;
10629 switch (GET_MODE_CLASS (GET_MODE (x)))
10630 {
10631 case MODE_FLOAT:
10632 arm_emit_fp16_const (x);
10633 break;
10634 default:
10635 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10636 assemble_zeros (2);
10637 break;
10638 }
10639 return \"\";
10640 }"
10641 [(set_attr "length" "4")
10642 (set_attr "type" "no_insn")]
10643 )
10644
10645 (define_insn "consttable_4"
10646 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10647 "TARGET_EITHER"
10648 "*
10649 {
10650 rtx x = operands[0];
10651 making_const_table = TRUE;
10652 scalar_float_mode float_mode;
10653 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10654 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10655 else
10656 {
10657 /* XXX: Sometimes gcc does something really dumb and ends up with
10658 a HIGH in a constant pool entry, usually because it's trying to
10659 load into a VFP register. We know this will always be used in
10660 combination with a LO_SUM which ignores the high bits, so just
10661 strip off the HIGH. */
10662 if (GET_CODE (x) == HIGH)
10663 x = XEXP (x, 0);
10664 assemble_integer (x, 4, BITS_PER_WORD, 1);
10665 mark_symbol_refs_as_used (x);
10666 }
10667 return \"\";
10668 }"
10669 [(set_attr "length" "4")
10670 (set_attr "type" "no_insn")]
10671 )
10672
10673 (define_insn "consttable_8"
10674 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10675 "TARGET_EITHER"
10676 "*
10677 {
10678 making_const_table = TRUE;
10679 scalar_float_mode float_mode;
10680 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10681 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10682 float_mode, BITS_PER_WORD);
10683 else
10684 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10685 return \"\";
10686 }"
10687 [(set_attr "length" "8")
10688 (set_attr "type" "no_insn")]
10689 )
10690
10691 (define_insn "consttable_16"
10692 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10693 "TARGET_EITHER"
10694 "*
10695 {
10696 making_const_table = TRUE;
10697 scalar_float_mode float_mode;
10698 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10699 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10700 float_mode, BITS_PER_WORD);
10701 else
10702 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10703 return \"\";
10704 }"
10705 [(set_attr "length" "16")
10706 (set_attr "type" "no_insn")]
10707 )
10708
10709 ;; V5 Instructions,
10710
10711 (define_insn "clzsi2"
10712 [(set (match_operand:SI 0 "s_register_operand" "=r")
10713 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10714 "TARGET_32BIT && arm_arch5t"
10715 "clz%?\\t%0, %1"
10716 [(set_attr "predicable" "yes")
10717 (set_attr "type" "clz")])
10718
10719 (define_insn "rbitsi2"
10720 [(set (match_operand:SI 0 "s_register_operand" "=r")
10721 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10722 "TARGET_32BIT && arm_arch_thumb2"
10723 "rbit%?\\t%0, %1"
10724 [(set_attr "predicable" "yes")
10725 (set_attr "type" "clz")])
10726
10727 ;; Keep this as a CTZ expression until after reload and then split
10728 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10729 ;; to fold with any other expression.
10730
10731 (define_insn_and_split "ctzsi2"
10732 [(set (match_operand:SI 0 "s_register_operand" "=r")
10733 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10734 "TARGET_32BIT && arm_arch_thumb2"
10735 "#"
10736 "&& reload_completed"
10737 [(const_int 0)]
10738 "
10739 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10740 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10741 DONE;
10742 ")
10743
10744 ;; V5E instructions.
10745
10746 (define_insn "prefetch"
10747 [(prefetch (match_operand:SI 0 "address_operand" "p")
10748 (match_operand:SI 1 "" "")
10749 (match_operand:SI 2 "" ""))]
10750 "TARGET_32BIT && arm_arch5te"
10751 "pld\\t%a0"
10752 [(set_attr "type" "load_4")]
10753 )
10754
10755 ;; General predication pattern
10756
10757 (define_cond_exec
10758 [(match_operator 0 "arm_comparison_operator"
10759 [(match_operand 1 "cc_register" "")
10760 (const_int 0)])]
10761 "TARGET_32BIT
10762 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10763 ""
10764 [(set_attr "predicated" "yes")]
10765 )
10766
10767 (define_insn "force_register_use"
10768 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10769 ""
10770 "%@ %0 needed"
10771 [(set_attr "length" "0")
10772 (set_attr "type" "no_insn")]
10773 )
10774
10775
10776 ;; Patterns for exception handling
10777
10778 (define_expand "eh_return"
10779 [(use (match_operand 0 "general_operand"))]
10780 "TARGET_EITHER"
10781 "
10782 {
10783 if (TARGET_32BIT)
10784 emit_insn (gen_arm_eh_return (operands[0]));
10785 else
10786 emit_insn (gen_thumb_eh_return (operands[0]));
10787 DONE;
10788 }"
10789 )
10790
10791 ;; We can't expand this before we know where the link register is stored.
10792 (define_insn_and_split "arm_eh_return"
10793 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10794 VUNSPEC_EH_RETURN)
10795 (clobber (match_scratch:SI 1 "=&r"))]
10796 "TARGET_ARM"
10797 "#"
10798 "&& reload_completed"
10799 [(const_int 0)]
10800 "
10801 {
10802 arm_set_return_address (operands[0], operands[1]);
10803 DONE;
10804 }"
10805 )
10806
10807 \f
10808 ;; TLS support
10809
10810 (define_insn "load_tp_hard"
10811 [(set (match_operand:SI 0 "register_operand" "=r")
10812 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10813 "TARGET_HARD_TP"
10814 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10815 [(set_attr "predicable" "yes")
10816 (set_attr "type" "mrs")]
10817 )
10818
10819 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10820 (define_insn "load_tp_soft_fdpic"
10821 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10822 (clobber (reg:SI FDPIC_REGNUM))
10823 (clobber (reg:SI LR_REGNUM))
10824 (clobber (reg:SI IP_REGNUM))
10825 (clobber (reg:CC CC_REGNUM))]
10826 "TARGET_SOFT_TP && TARGET_FDPIC"
10827 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10828 [(set_attr "conds" "clob")
10829 (set_attr "type" "branch")]
10830 )
10831
10832 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10833 (define_insn "load_tp_soft"
10834 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10835 (clobber (reg:SI LR_REGNUM))
10836 (clobber (reg:SI IP_REGNUM))
10837 (clobber (reg:CC CC_REGNUM))]
10838 "TARGET_SOFT_TP && !TARGET_FDPIC"
10839 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10840 [(set_attr "conds" "clob")
10841 (set_attr "type" "branch")]
10842 )
10843
10844 ;; tls descriptor call
10845 (define_insn "tlscall"
10846 [(set (reg:SI R0_REGNUM)
10847 (unspec:SI [(reg:SI R0_REGNUM)
10848 (match_operand:SI 0 "" "X")
10849 (match_operand 1 "" "")] UNSPEC_TLS))
10850 (clobber (reg:SI R1_REGNUM))
10851 (clobber (reg:SI LR_REGNUM))
10852 (clobber (reg:SI CC_REGNUM))]
10853 "TARGET_GNU2_TLS"
10854 {
10855 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10856 INTVAL (operands[1]));
10857 return "bl\\t%c0(tlscall)";
10858 }
10859 [(set_attr "conds" "clob")
10860 (set_attr "length" "4")
10861 (set_attr "type" "branch")]
10862 )
10863
10864 ;; For thread pointer builtin
10865 (define_expand "get_thread_pointersi"
10866 [(match_operand:SI 0 "s_register_operand")]
10867 ""
10868 "
10869 {
10870 arm_load_tp (operands[0]);
10871 DONE;
10872 }")
10873
10874 ;;
10875
10876 ;; We only care about the lower 16 bits of the constant
10877 ;; being inserted into the upper 16 bits of the register.
10878 (define_insn "*arm_movtas_ze"
10879 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10880 (const_int 16)
10881 (const_int 16))
10882 (match_operand:SI 1 "const_int_operand" ""))]
10883 "TARGET_HAVE_MOVT"
10884 "@
10885 movt%?\t%0, %L1
10886 movt\t%0, %L1"
10887 [(set_attr "arch" "32,v8mb")
10888 (set_attr "predicable" "yes")
10889 (set_attr "length" "4")
10890 (set_attr "type" "alu_sreg")]
10891 )
10892
10893 (define_insn "*arm_rev"
10894 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10895 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10896 "arm_arch6"
10897 "@
10898 rev\t%0, %1
10899 rev%?\t%0, %1
10900 rev%?\t%0, %1"
10901 [(set_attr "arch" "t1,t2,32")
10902 (set_attr "length" "2,2,4")
10903 (set_attr "predicable" "no,yes,yes")
10904 (set_attr "type" "rev")]
10905 )
10906
10907 (define_expand "arm_legacy_rev"
10908 [(set (match_operand:SI 2 "s_register_operand")
10909 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10910 (const_int 16))
10911 (match_dup 1)))
10912 (set (match_dup 2)
10913 (lshiftrt:SI (match_dup 2)
10914 (const_int 8)))
10915 (set (match_operand:SI 3 "s_register_operand")
10916 (rotatert:SI (match_dup 1)
10917 (const_int 8)))
10918 (set (match_dup 2)
10919 (and:SI (match_dup 2)
10920 (const_int -65281)))
10921 (set (match_operand:SI 0 "s_register_operand")
10922 (xor:SI (match_dup 3)
10923 (match_dup 2)))]
10924 "TARGET_32BIT"
10925 ""
10926 )
10927
10928 ;; Reuse temporaries to keep register pressure down.
10929 (define_expand "thumb_legacy_rev"
10930 [(set (match_operand:SI 2 "s_register_operand")
10931 (ashift:SI (match_operand:SI 1 "s_register_operand")
10932 (const_int 24)))
10933 (set (match_operand:SI 3 "s_register_operand")
10934 (lshiftrt:SI (match_dup 1)
10935 (const_int 24)))
10936 (set (match_dup 3)
10937 (ior:SI (match_dup 3)
10938 (match_dup 2)))
10939 (set (match_operand:SI 4 "s_register_operand")
10940 (const_int 16))
10941 (set (match_operand:SI 5 "s_register_operand")
10942 (rotatert:SI (match_dup 1)
10943 (match_dup 4)))
10944 (set (match_dup 2)
10945 (ashift:SI (match_dup 5)
10946 (const_int 24)))
10947 (set (match_dup 5)
10948 (lshiftrt:SI (match_dup 5)
10949 (const_int 24)))
10950 (set (match_dup 5)
10951 (ior:SI (match_dup 5)
10952 (match_dup 2)))
10953 (set (match_dup 5)
10954 (rotatert:SI (match_dup 5)
10955 (match_dup 4)))
10956 (set (match_operand:SI 0 "s_register_operand")
10957 (ior:SI (match_dup 5)
10958 (match_dup 3)))]
10959 "TARGET_THUMB"
10960 ""
10961 )
10962
10963 ;; ARM-specific expansion of signed mod by power of 2
10964 ;; using conditional negate.
10965 ;; For r0 % n where n is a power of 2 produce:
10966 ;; rsbs r1, r0, #0
10967 ;; and r0, r0, #(n - 1)
10968 ;; and r1, r1, #(n - 1)
10969 ;; rsbpl r0, r1, #0
10970
10971 (define_expand "modsi3"
10972 [(match_operand:SI 0 "register_operand")
10973 (match_operand:SI 1 "register_operand")
10974 (match_operand:SI 2 "const_int_operand")]
10975 "TARGET_32BIT"
10976 {
10977 HOST_WIDE_INT val = INTVAL (operands[2]);
10978
10979 if (val <= 0
10980 || exact_log2 (val) <= 0)
10981 FAIL;
10982
10983 rtx mask = GEN_INT (val - 1);
10984
10985 /* In the special case of x0 % 2 we can do the even shorter:
10986 cmp r0, #0
10987 and r0, r0, #1
10988 rsblt r0, r0, #0. */
10989
10990 if (val == 2)
10991 {
10992 rtx cc_reg = arm_gen_compare_reg (LT,
10993 operands[1], const0_rtx, NULL_RTX);
10994 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10995 rtx masked = gen_reg_rtx (SImode);
10996
10997 emit_insn (gen_andsi3 (masked, operands[1], mask));
10998 emit_move_insn (operands[0],
10999 gen_rtx_IF_THEN_ELSE (SImode, cond,
11000 gen_rtx_NEG (SImode,
11001 masked),
11002 masked));
11003 DONE;
11004 }
11005
11006 rtx neg_op = gen_reg_rtx (SImode);
11007 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11008 operands[1]));
11009
11010 /* Extract the condition register and mode. */
11011 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11012 rtx cc_reg = SET_DEST (cmp);
11013 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11014
11015 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11016
11017 rtx masked_neg = gen_reg_rtx (SImode);
11018 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11019
11020 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11021 during expand does not always work. Do an IF_THEN_ELSE instead. */
11022 emit_move_insn (operands[0],
11023 gen_rtx_IF_THEN_ELSE (SImode, cond,
11024 gen_rtx_NEG (SImode, masked_neg),
11025 operands[0]));
11026
11027
11028 DONE;
11029 }
11030 )
11031
11032 (define_expand "bswapsi2"
11033 [(set (match_operand:SI 0 "s_register_operand")
11034 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11035 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11036 "
11037 if (!arm_arch6)
11038 {
11039 rtx op2 = gen_reg_rtx (SImode);
11040 rtx op3 = gen_reg_rtx (SImode);
11041
11042 if (TARGET_THUMB)
11043 {
11044 rtx op4 = gen_reg_rtx (SImode);
11045 rtx op5 = gen_reg_rtx (SImode);
11046
11047 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11048 op2, op3, op4, op5));
11049 }
11050 else
11051 {
11052 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11053 op2, op3));
11054 }
11055
11056 DONE;
11057 }
11058 "
11059 )
11060
11061 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11062 ;; and unsigned variants, respectively. For rev16, expose
11063 ;; byte-swapping in the lower 16 bits only.
11064 (define_insn "*arm_revsh"
11065 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11066 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11067 "arm_arch6"
11068 "@
11069 revsh\t%0, %1
11070 revsh%?\t%0, %1
11071 revsh%?\t%0, %1"
11072 [(set_attr "arch" "t1,t2,32")
11073 (set_attr "length" "2,2,4")
11074 (set_attr "type" "rev")]
11075 )
11076
11077 (define_insn "*arm_rev16"
11078 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11079 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11080 "arm_arch6"
11081 "@
11082 rev16\t%0, %1
11083 rev16%?\t%0, %1
11084 rev16%?\t%0, %1"
11085 [(set_attr "arch" "t1,t2,32")
11086 (set_attr "length" "2,2,4")
11087 (set_attr "type" "rev")]
11088 )
11089
11090 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11091 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11092 ;; each valid permutation.
11093
11094 (define_insn "arm_rev16si2"
11095 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11096 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11097 (const_int 8))
11098 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11099 (and:SI (lshiftrt:SI (match_dup 1)
11100 (const_int 8))
11101 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11102 "arm_arch6
11103 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11104 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11105 "rev16\\t%0, %1"
11106 [(set_attr "arch" "t1,t2,32")
11107 (set_attr "length" "2,2,4")
11108 (set_attr "type" "rev")]
11109 )
11110
11111 (define_insn "arm_rev16si2_alt"
11112 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11113 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11114 (const_int 8))
11115 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11116 (and:SI (ashift:SI (match_dup 1)
11117 (const_int 8))
11118 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11119 "arm_arch6
11120 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11121 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11122 "rev16\\t%0, %1"
11123 [(set_attr "arch" "t1,t2,32")
11124 (set_attr "length" "2,2,4")
11125 (set_attr "type" "rev")]
11126 )
11127
11128 (define_expand "bswaphi2"
11129 [(set (match_operand:HI 0 "s_register_operand")
11130 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11131 "arm_arch6"
11132 ""
11133 )
11134
11135 ;; Patterns for LDRD/STRD in Thumb2 mode
11136
11137 (define_insn "*thumb2_ldrd"
11138 [(set (match_operand:SI 0 "s_register_operand" "=r")
11139 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11140 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11141 (set (match_operand:SI 3 "s_register_operand" "=r")
11142 (mem:SI (plus:SI (match_dup 1)
11143 (match_operand:SI 4 "const_int_operand" ""))))]
11144 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11145 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11146 && (operands_ok_ldrd_strd (operands[0], operands[3],
11147 operands[1], INTVAL (operands[2]),
11148 false, true))"
11149 "ldrd%?\t%0, %3, [%1, %2]"
11150 [(set_attr "type" "load_8")
11151 (set_attr "predicable" "yes")])
11152
11153 (define_insn "*thumb2_ldrd_base"
11154 [(set (match_operand:SI 0 "s_register_operand" "=r")
11155 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11156 (set (match_operand:SI 2 "s_register_operand" "=r")
11157 (mem:SI (plus:SI (match_dup 1)
11158 (const_int 4))))]
11159 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11160 && (operands_ok_ldrd_strd (operands[0], operands[2],
11161 operands[1], 0, false, true))"
11162 "ldrd%?\t%0, %2, [%1]"
11163 [(set_attr "type" "load_8")
11164 (set_attr "predicable" "yes")])
11165
11166 (define_insn "*thumb2_ldrd_base_neg"
11167 [(set (match_operand:SI 0 "s_register_operand" "=r")
11168 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11169 (const_int -4))))
11170 (set (match_operand:SI 2 "s_register_operand" "=r")
11171 (mem:SI (match_dup 1)))]
11172 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11173 && (operands_ok_ldrd_strd (operands[0], operands[2],
11174 operands[1], -4, false, true))"
11175 "ldrd%?\t%0, %2, [%1, #-4]"
11176 [(set_attr "type" "load_8")
11177 (set_attr "predicable" "yes")])
11178
11179 (define_insn "*thumb2_strd"
11180 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11181 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11182 (match_operand:SI 2 "s_register_operand" "r"))
11183 (set (mem:SI (plus:SI (match_dup 0)
11184 (match_operand:SI 3 "const_int_operand" "")))
11185 (match_operand:SI 4 "s_register_operand" "r"))]
11186 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11187 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11188 && (operands_ok_ldrd_strd (operands[2], operands[4],
11189 operands[0], INTVAL (operands[1]),
11190 false, false))"
11191 "strd%?\t%2, %4, [%0, %1]"
11192 [(set_attr "type" "store_8")
11193 (set_attr "predicable" "yes")])
11194
11195 (define_insn "*thumb2_strd_base"
11196 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11197 (match_operand:SI 1 "s_register_operand" "r"))
11198 (set (mem:SI (plus:SI (match_dup 0)
11199 (const_int 4)))
11200 (match_operand:SI 2 "s_register_operand" "r"))]
11201 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11202 && (operands_ok_ldrd_strd (operands[1], operands[2],
11203 operands[0], 0, false, false))"
11204 "strd%?\t%1, %2, [%0]"
11205 [(set_attr "type" "store_8")
11206 (set_attr "predicable" "yes")])
11207
11208 (define_insn "*thumb2_strd_base_neg"
11209 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11210 (const_int -4)))
11211 (match_operand:SI 1 "s_register_operand" "r"))
11212 (set (mem:SI (match_dup 0))
11213 (match_operand:SI 2 "s_register_operand" "r"))]
11214 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11215 && (operands_ok_ldrd_strd (operands[1], operands[2],
11216 operands[0], -4, false, false))"
11217 "strd%?\t%1, %2, [%0, #-4]"
11218 [(set_attr "type" "store_8")
11219 (set_attr "predicable" "yes")])
11220
11221 ;; ARMv8 CRC32 instructions.
11222 (define_insn "arm_<crc_variant>"
11223 [(set (match_operand:SI 0 "s_register_operand" "=r")
11224 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11225 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11226 CRC))]
11227 "TARGET_CRC32"
11228 "<crc_variant>\\t%0, %1, %2"
11229 [(set_attr "type" "crc")
11230 (set_attr "conds" "unconditional")]
11231 )
11232
11233 ;; Load the load/store double peephole optimizations.
11234 (include "ldrdstrd.md")
11235
11236 ;; Load the load/store multiple patterns
11237 (include "ldmstm.md")
11238
11239 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11240 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11241 ;; The operands are validated through the load_multiple_operation
11242 ;; match_parallel predicate rather than through constraints so enable it only
11243 ;; after reload.
11244 (define_insn "*load_multiple"
11245 [(match_parallel 0 "load_multiple_operation"
11246 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11247 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11248 ])]
11249 "TARGET_32BIT && reload_completed"
11250 "*
11251 {
11252 arm_output_multireg_pop (operands, /*return_pc=*/false,
11253 /*cond=*/const_true_rtx,
11254 /*reverse=*/false,
11255 /*update=*/false);
11256 return \"\";
11257 }
11258 "
11259 [(set_attr "predicable" "yes")]
11260 )
11261
11262 (define_expand "copysignsf3"
11263 [(match_operand:SF 0 "register_operand")
11264 (match_operand:SF 1 "register_operand")
11265 (match_operand:SF 2 "register_operand")]
11266 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11267 "{
11268 emit_move_insn (operands[0], operands[2]);
11269 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11270 GEN_INT (31), GEN_INT (0),
11271 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11272 DONE;
11273 }"
11274 )
11275
11276 (define_expand "copysigndf3"
11277 [(match_operand:DF 0 "register_operand")
11278 (match_operand:DF 1 "register_operand")
11279 (match_operand:DF 2 "register_operand")]
11280 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11281 "{
11282 rtx op0_low = gen_lowpart (SImode, operands[0]);
11283 rtx op0_high = gen_highpart (SImode, operands[0]);
11284 rtx op1_low = gen_lowpart (SImode, operands[1]);
11285 rtx op1_high = gen_highpart (SImode, operands[1]);
11286 rtx op2_high = gen_highpart (SImode, operands[2]);
11287
11288 rtx scratch1 = gen_reg_rtx (SImode);
11289 rtx scratch2 = gen_reg_rtx (SImode);
11290 emit_move_insn (scratch1, op2_high);
11291 emit_move_insn (scratch2, op1_high);
11292
11293 emit_insn(gen_rtx_SET(scratch1,
11294 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11295 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11296 emit_move_insn (op0_low, op1_low);
11297 emit_move_insn (op0_high, scratch2);
11298
11299 DONE;
11300 }"
11301 )
11302
11303 ;; movmisalign patterns for HImode and SImode.
11304 (define_expand "movmisalign<mode>"
11305 [(match_operand:HSI 0 "general_operand")
11306 (match_operand:HSI 1 "general_operand")]
11307 "unaligned_access"
11308 {
11309 /* This pattern is not permitted to fail during expansion: if both arguments
11310 are non-registers (e.g. memory := constant), force operand 1 into a
11311 register. */
11312 rtx (* gen_unaligned_load)(rtx, rtx);
11313 rtx tmp_dest = operands[0];
11314 if (!s_register_operand (operands[0], <MODE>mode)
11315 && !s_register_operand (operands[1], <MODE>mode))
11316 operands[1] = force_reg (<MODE>mode, operands[1]);
11317
11318 if (<MODE>mode == HImode)
11319 {
11320 gen_unaligned_load = gen_unaligned_loadhiu;
11321 tmp_dest = gen_reg_rtx (SImode);
11322 }
11323 else
11324 gen_unaligned_load = gen_unaligned_loadsi;
11325
11326 if (MEM_P (operands[1]))
11327 {
11328 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11329 if (<MODE>mode == HImode)
11330 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11331 }
11332 else
11333 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11334
11335 DONE;
11336 })
11337
11338 (define_insn "arm_<cdp>"
11339 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11340 (match_operand:SI 1 "immediate_operand" "n")
11341 (match_operand:SI 2 "immediate_operand" "n")
11342 (match_operand:SI 3 "immediate_operand" "n")
11343 (match_operand:SI 4 "immediate_operand" "n")
11344 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11345 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11346 {
11347 arm_const_bounds (operands[0], 0, 16);
11348 arm_const_bounds (operands[1], 0, 16);
11349 arm_const_bounds (operands[2], 0, (1 << 5));
11350 arm_const_bounds (operands[3], 0, (1 << 5));
11351 arm_const_bounds (operands[4], 0, (1 << 5));
11352 arm_const_bounds (operands[5], 0, 8);
11353 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11354 }
11355 [(set_attr "length" "4")
11356 (set_attr "type" "coproc")])
11357
11358 (define_insn "*ldc"
11359 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11360 (match_operand:SI 1 "immediate_operand" "n")
11361 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11362 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11363 {
11364 arm_const_bounds (operands[0], 0, 16);
11365 arm_const_bounds (operands[1], 0, (1 << 5));
11366 return "<ldc>\\tp%c0, CR%c1, %2";
11367 }
11368 [(set_attr "length" "4")
11369 (set_attr "type" "coproc")])
11370
11371 (define_insn "*stc"
11372 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11373 (match_operand:SI 1 "immediate_operand" "n")
11374 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11375 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11376 {
11377 arm_const_bounds (operands[0], 0, 16);
11378 arm_const_bounds (operands[1], 0, (1 << 5));
11379 return "<stc>\\tp%c0, CR%c1, %2";
11380 }
11381 [(set_attr "length" "4")
11382 (set_attr "type" "coproc")])
11383
11384 (define_expand "arm_<ldc>"
11385 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11386 (match_operand:SI 1 "immediate_operand")
11387 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11388 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11389
11390 (define_expand "arm_<stc>"
11391 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11392 (match_operand:SI 1 "immediate_operand")
11393 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11394 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11395
11396 (define_insn "arm_<mcr>"
11397 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11398 (match_operand:SI 1 "immediate_operand" "n")
11399 (match_operand:SI 2 "s_register_operand" "r")
11400 (match_operand:SI 3 "immediate_operand" "n")
11401 (match_operand:SI 4 "immediate_operand" "n")
11402 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11403 (use (match_dup 2))]
11404 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11405 {
11406 arm_const_bounds (operands[0], 0, 16);
11407 arm_const_bounds (operands[1], 0, 8);
11408 arm_const_bounds (operands[3], 0, (1 << 5));
11409 arm_const_bounds (operands[4], 0, (1 << 5));
11410 arm_const_bounds (operands[5], 0, 8);
11411 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11412 }
11413 [(set_attr "length" "4")
11414 (set_attr "type" "coproc")])
11415
11416 (define_insn "arm_<mrc>"
11417 [(set (match_operand:SI 0 "s_register_operand" "=r")
11418 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11419 (match_operand:SI 2 "immediate_operand" "n")
11420 (match_operand:SI 3 "immediate_operand" "n")
11421 (match_operand:SI 4 "immediate_operand" "n")
11422 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11423 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11424 {
11425 arm_const_bounds (operands[1], 0, 16);
11426 arm_const_bounds (operands[2], 0, 8);
11427 arm_const_bounds (operands[3], 0, (1 << 5));
11428 arm_const_bounds (operands[4], 0, (1 << 5));
11429 arm_const_bounds (operands[5], 0, 8);
11430 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11431 }
11432 [(set_attr "length" "4")
11433 (set_attr "type" "coproc")])
11434
11435 (define_insn "arm_<mcrr>"
11436 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11437 (match_operand:SI 1 "immediate_operand" "n")
11438 (match_operand:DI 2 "s_register_operand" "r")
11439 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11440 (use (match_dup 2))]
11441 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11442 {
11443 arm_const_bounds (operands[0], 0, 16);
11444 arm_const_bounds (operands[1], 0, 8);
11445 arm_const_bounds (operands[3], 0, (1 << 5));
11446 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11447 }
11448 [(set_attr "length" "4")
11449 (set_attr "type" "coproc")])
11450
11451 (define_insn "arm_<mrrc>"
11452 [(set (match_operand:DI 0 "s_register_operand" "=r")
11453 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11454 (match_operand:SI 2 "immediate_operand" "n")
11455 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11456 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11457 {
11458 arm_const_bounds (operands[1], 0, 16);
11459 arm_const_bounds (operands[2], 0, 8);
11460 arm_const_bounds (operands[3], 0, (1 << 5));
11461 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11462 }
11463 [(set_attr "length" "4")
11464 (set_attr "type" "coproc")])
11465
11466 (define_expand "speculation_barrier"
11467 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11468 "TARGET_EITHER"
11469 "
11470 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11471 have a usable barrier (and probably don't need one in practice).
11472 But to be safe if such code is run on later architectures, call a
11473 helper function in libgcc that will do the thing for the active
11474 system. */
11475 if (!(arm_arch7 || arm_arch8))
11476 {
11477 arm_emit_speculation_barrier_function ();
11478 DONE;
11479 }
11480 "
11481 )
11482
11483 ;; Generate a hard speculation barrier when we have not enabled speculation
11484 ;; tracking.
11485 (define_insn "*speculation_barrier_insn"
11486 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11487 "arm_arch7 || arm_arch8"
11488 "isb\;dsb\\tsy"
11489 [(set_attr "type" "block")
11490 (set_attr "length" "8")]
11491 )
11492
11493 ;; Vector bits common to IWMMXT and Neon
11494 (include "vec-common.md")
11495 ;; Load the Intel Wireless Multimedia Extension patterns
11496 (include "iwmmxt.md")
11497 ;; Load the VFP co-processor patterns
11498 (include "vfp.md")
11499 ;; Thumb-1 patterns
11500 (include "thumb1.md")
11501 ;; Thumb-2 patterns
11502 (include "thumb2.md")
11503 ;; Neon patterns
11504 (include "neon.md")
11505 ;; Crypto patterns
11506 (include "crypto.md")
11507 ;; Synchronization Primitives
11508 (include "sync.md")
11509 ;; Fixed-point patterns
11510 (include "arm-fixed.md")