]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
[arm] Early split subdi3
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51 ;; conditional compare combination
52 (define_constants
53 [(CMP_CMP 0)
54 (CMN_CMP 1)
55 (CMP_CMN 2)
56 (CMN_CMN 3)
57 (NUM_OF_COND_CMP 4)
58 ]
59 )
60
61 \f
62 ;;---------------------------------------------------------------------------
63 ;; Attributes
64
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
67
68 ;; Instruction classification types
69 (include "types.md")
70
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
77
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
85
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
101
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
104 ;; registers.
105 (define_attr "fp" "no,yes" (const_string "no"))
106
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
112
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
117
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
120 (const_int 4))
121
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
131
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
134 (const_string "yes")
135
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
138 (const_string "yes")
139
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
142 (const_string "yes")
143
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
182 (const_string "yes")
183 ]
184
185 (const_string "no")))
186
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
189
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
192 (const_string "yes")
193
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
196 (const_string "yes")
197
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
202
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
208
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
225 (const_string "no")
226
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
229 (const_string "no")
230
231 (eq_attr "arch_enabled" "no")
232 (const_string "no")]
233 (const_string "yes")))
234
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
254
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
262
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
270 ;
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
273 ; inlined branches
274 ;
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
277 ;
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
280 ;
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
283 ;
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
286
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (if_then_else
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
295
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
301
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
312 "block,call,load_4")
313 (const_string "yes")
314 (const_string "no")))
315
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
338
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
342
343
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
347
348 ;;---------------------------------------------------------------------------
349 ;; Unspecs
350
351 (include "unspecs.md")
352
353 ;;---------------------------------------------------------------------------
354 ;; Mode iterators
355
356 (include "iterators.md")
357
358 ;;---------------------------------------------------------------------------
359 ;; Predicates
360
361 (include "predicates.md")
362 (include "constraints.md")
363
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
366
367 (define_attr "tune_cortexr4" "yes,no"
368 (const (if_then_else
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "yes")
371 (const_string "no"))))
372
373 ;; True if the generic scheduling description should be used.
374
375 (define_attr "generic_sched" "yes,no"
376 (const (if_then_else
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "no")
385 (const_string "yes"))))
386
387 (define_attr "generic_vfp" "yes,no"
388 (const (if_then_else
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
394 (const_string "yes")
395 (const_string "no"))))
396
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
403 (include "fa526.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
422 (include "vfp11.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
425
426 \f
427 ;;---------------------------------------------------------------------------
428 ;; Insn patterns
429 ;;
430 ;; Addition insns.
431
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
435
436 (define_expand "adddi3"
437 [(parallel
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
442 "TARGET_EITHER"
443 "
444 if (TARGET_THUMB1)
445 {
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
448 }
449 else
450 {
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
454 &lo_op2, &hi_op2);
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
457
458 if (lo_op2 == const0_rtx)
459 {
460 lo_dest = lo_op1;
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
465 }
466 else
467 {
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
472
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 if (hi_op2 == const0_rtx)
475 emit_insn (gen_add0si3_carryin_ltu (hi_dest, hi_op1));
476 else
477 emit_insn (gen_addsi3_carryin_ltu (hi_dest, hi_op1, hi_op2));
478 }
479
480 if (lo_result != lo_dest)
481 emit_move_insn (lo_result, lo_dest);
482 if (hi_result != hi_dest)
483 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
484 DONE;
485 }
486 "
487 )
488
489 (define_expand "addv<mode>4"
490 [(match_operand:SIDI 0 "register_operand")
491 (match_operand:SIDI 1 "register_operand")
492 (match_operand:SIDI 2 "register_operand")
493 (match_operand 3 "")]
494 "TARGET_32BIT"
495 {
496 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
497 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
498
499 DONE;
500 })
501
502 (define_expand "uaddv<mode>4"
503 [(match_operand:SIDI 0 "register_operand")
504 (match_operand:SIDI 1 "register_operand")
505 (match_operand:SIDI 2 "register_operand")
506 (match_operand 3 "")]
507 "TARGET_32BIT"
508 {
509 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
510 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
511
512 DONE;
513 })
514
515 (define_expand "addsi3"
516 [(set (match_operand:SI 0 "s_register_operand")
517 (plus:SI (match_operand:SI 1 "s_register_operand")
518 (match_operand:SI 2 "reg_or_int_operand")))]
519 "TARGET_EITHER"
520 "
521 if (TARGET_32BIT && CONST_INT_P (operands[2]))
522 {
523 arm_split_constant (PLUS, SImode, NULL_RTX,
524 INTVAL (operands[2]), operands[0], operands[1],
525 optimize && can_create_pseudo_p ());
526 DONE;
527 }
528 "
529 )
530
531 ; If there is a scratch available, this will be faster than synthesizing the
532 ; addition.
533 (define_peephole2
534 [(match_scratch:SI 3 "r")
535 (set (match_operand:SI 0 "arm_general_register_operand" "")
536 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
537 (match_operand:SI 2 "const_int_operand" "")))]
538 "TARGET_32BIT &&
539 !(const_ok_for_arm (INTVAL (operands[2]))
540 || const_ok_for_arm (-INTVAL (operands[2])))
541 && const_ok_for_arm (~INTVAL (operands[2]))"
542 [(set (match_dup 3) (match_dup 2))
543 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
544 ""
545 )
546
547 ;; The r/r/k alternative is required when reloading the address
548 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
549 ;; put the duplicated register first, and not try the commutative version.
550 (define_insn_and_split "*arm_addsi3"
551 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
552 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
553 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
554 "TARGET_32BIT"
555 "@
556 add%?\\t%0, %0, %2
557 add%?\\t%0, %1, %2
558 add%?\\t%0, %1, %2
559 add%?\\t%0, %1, %2
560 add%?\\t%0, %1, %2
561 add%?\\t%0, %1, %2
562 add%?\\t%0, %2, %1
563 add%?\\t%0, %1, %2
564 addw%?\\t%0, %1, %2
565 addw%?\\t%0, %1, %2
566 sub%?\\t%0, %1, #%n2
567 sub%?\\t%0, %1, #%n2
568 sub%?\\t%0, %1, #%n2
569 subw%?\\t%0, %1, #%n2
570 subw%?\\t%0, %1, #%n2
571 #"
572 "TARGET_32BIT
573 && CONST_INT_P (operands[2])
574 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
575 && (reload_completed || !arm_eliminable_register (operands[1]))"
576 [(clobber (const_int 0))]
577 "
578 arm_split_constant (PLUS, SImode, curr_insn,
579 INTVAL (operands[2]), operands[0],
580 operands[1], 0);
581 DONE;
582 "
583 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
584 (set_attr "predicable" "yes")
585 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
586 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
587 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
588 (const_string "alu_imm")
589 (const_string "alu_sreg")))
590 ]
591 )
592
593 (define_insn "adddi3_compareV"
594 [(set (reg:CC_V CC_REGNUM)
595 (ne:CC_V
596 (plus:TI
597 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
598 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
599 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
600 (set (match_operand:DI 0 "s_register_operand" "=&r")
601 (plus:DI (match_dup 1) (match_dup 2)))]
602 "TARGET_32BIT"
603 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
604 [(set_attr "conds" "set")
605 (set_attr "length" "8")
606 (set_attr "type" "multiple")]
607 )
608
609 (define_insn "addsi3_compareV"
610 [(set (reg:CC_V CC_REGNUM)
611 (ne:CC_V
612 (plus:DI
613 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
614 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
615 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
616 (set (match_operand:SI 0 "register_operand" "=r")
617 (plus:SI (match_dup 1) (match_dup 2)))]
618 "TARGET_32BIT"
619 "adds%?\\t%0, %1, %2"
620 [(set_attr "conds" "set")
621 (set_attr "type" "alus_sreg")]
622 )
623
624 (define_insn "adddi3_compareC"
625 [(set (reg:CC_C CC_REGNUM)
626 (compare:CC_C
627 (plus:DI
628 (match_operand:DI 1 "register_operand" "r")
629 (match_operand:DI 2 "register_operand" "r"))
630 (match_dup 1)))
631 (set (match_operand:DI 0 "register_operand" "=&r")
632 (plus:DI (match_dup 1) (match_dup 2)))]
633 "TARGET_32BIT"
634 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
635 [(set_attr "conds" "set")
636 (set_attr "length" "8")
637 (set_attr "type" "multiple")]
638 )
639
640 (define_insn "addsi3_compareC"
641 [(set (reg:CC_C CC_REGNUM)
642 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
643 (match_operand:SI 2 "register_operand" "r"))
644 (match_dup 1)))
645 (set (match_operand:SI 0 "register_operand" "=r")
646 (plus:SI (match_dup 1) (match_dup 2)))]
647 "TARGET_32BIT"
648 "adds%?\\t%0, %1, %2"
649 [(set_attr "conds" "set")
650 (set_attr "type" "alus_sreg")]
651 )
652
653 (define_insn "addsi3_compare0"
654 [(set (reg:CC_NOOV CC_REGNUM)
655 (compare:CC_NOOV
656 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
657 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
658 (const_int 0)))
659 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
660 (plus:SI (match_dup 1) (match_dup 2)))]
661 "TARGET_ARM"
662 "@
663 adds%?\\t%0, %1, %2
664 subs%?\\t%0, %1, #%n2
665 adds%?\\t%0, %1, %2"
666 [(set_attr "conds" "set")
667 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
668 )
669
670 (define_insn "*addsi3_compare0_scratch"
671 [(set (reg:CC_NOOV CC_REGNUM)
672 (compare:CC_NOOV
673 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
674 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
675 (const_int 0)))]
676 "TARGET_ARM"
677 "@
678 cmn%?\\t%0, %1
679 cmp%?\\t%0, #%n1
680 cmn%?\\t%0, %1"
681 [(set_attr "conds" "set")
682 (set_attr "predicable" "yes")
683 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
684 )
685
686 (define_insn "*compare_negsi_si"
687 [(set (reg:CC_Z CC_REGNUM)
688 (compare:CC_Z
689 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
690 (match_operand:SI 1 "s_register_operand" "l,r")))]
691 "TARGET_32BIT"
692 "cmn%?\\t%1, %0"
693 [(set_attr "conds" "set")
694 (set_attr "predicable" "yes")
695 (set_attr "arch" "t2,*")
696 (set_attr "length" "2,4")
697 (set_attr "predicable_short_it" "yes,no")
698 (set_attr "type" "alus_sreg")]
699 )
700
701 ;; This is the canonicalization of subsi3_compare when the
702 ;; addend is a constant.
703 (define_insn "cmpsi2_addneg"
704 [(set (reg:CC CC_REGNUM)
705 (compare:CC
706 (match_operand:SI 1 "s_register_operand" "r,r")
707 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
708 (set (match_operand:SI 0 "s_register_operand" "=r,r")
709 (plus:SI (match_dup 1)
710 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
711 "TARGET_32BIT
712 && (INTVAL (operands[2])
713 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
714 {
715 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
716 in different condition codes (like cmn rather than like cmp), so that
717 alternative comes first. Both alternatives can match for any 0x??000000
718 where except for 0 and INT_MIN it doesn't matter what we choose, and also
719 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
720 as it is shorter. */
721 if (which_alternative == 0 && operands[3] != const1_rtx)
722 return "subs%?\\t%0, %1, #%n3";
723 else
724 return "adds%?\\t%0, %1, %3";
725 }
726 [(set_attr "conds" "set")
727 (set_attr "type" "alus_sreg")]
728 )
729
730 ;; Convert the sequence
731 ;; sub rd, rn, #1
732 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
733 ;; bne dest
734 ;; into
735 ;; subs rd, rn, #1
736 ;; bcs dest ((unsigned)rn >= 1)
737 ;; similarly for the beq variant using bcc.
738 ;; This is a common looping idiom (while (n--))
739 (define_peephole2
740 [(set (match_operand:SI 0 "arm_general_register_operand" "")
741 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
742 (const_int -1)))
743 (set (match_operand 2 "cc_register" "")
744 (compare (match_dup 0) (const_int -1)))
745 (set (pc)
746 (if_then_else (match_operator 3 "equality_operator"
747 [(match_dup 2) (const_int 0)])
748 (match_operand 4 "" "")
749 (match_operand 5 "" "")))]
750 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
751 [(parallel[
752 (set (match_dup 2)
753 (compare:CC
754 (match_dup 1) (const_int 1)))
755 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
756 (set (pc)
757 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
758 (match_dup 4)
759 (match_dup 5)))]
760 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
761 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
762 ? GEU : LTU),
763 VOIDmode,
764 operands[2], const0_rtx);"
765 )
766
767 ;; The next four insns work because they compare the result with one of
768 ;; the operands, and we know that the use of the condition code is
769 ;; either GEU or LTU, so we can use the carry flag from the addition
770 ;; instead of doing the compare a second time.
771 (define_insn "*addsi3_compare_op1"
772 [(set (reg:CC_C CC_REGNUM)
773 (compare:CC_C
774 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
775 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
776 (match_dup 1)))
777 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
778 (plus:SI (match_dup 1) (match_dup 2)))]
779 "TARGET_32BIT"
780 "@
781 adds%?\\t%0, %1, %2
782 adds%?\\t%0, %0, %2
783 subs%?\\t%0, %1, #%n2
784 subs%?\\t%0, %0, #%n2
785 adds%?\\t%0, %1, %2
786 subs%?\\t%0, %1, #%n2
787 adds%?\\t%0, %1, %2"
788 [(set_attr "conds" "set")
789 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
790 (set_attr "length" "2,2,2,2,4,4,4")
791 (set_attr "type"
792 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
793 )
794
795 (define_insn "*addsi3_compare_op2"
796 [(set (reg:CC_C CC_REGNUM)
797 (compare:CC_C
798 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
799 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
800 (match_dup 2)))
801 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
802 (plus:SI (match_dup 1) (match_dup 2)))]
803 "TARGET_32BIT"
804 "@
805 adds%?\\t%0, %1, %2
806 adds%?\\t%0, %0, %2
807 subs%?\\t%0, %1, #%n2
808 subs%?\\t%0, %0, #%n2
809 adds%?\\t%0, %1, %2
810 subs%?\\t%0, %1, #%n2
811 adds%?\\t%0, %1, %2"
812 [(set_attr "conds" "set")
813 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
814 (set_attr "length" "2,2,2,2,4,4,4")
815 (set_attr "type"
816 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
817 )
818
819 (define_insn "*compare_addsi2_op0"
820 [(set (reg:CC_C CC_REGNUM)
821 (compare:CC_C
822 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
823 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
824 (match_dup 0)))]
825 "TARGET_32BIT"
826 "@
827 cmp%?\\t%0, #%n1
828 cmn%?\\t%0, %1
829 cmn%?\\t%0, %1
830 cmp%?\\t%0, #%n1
831 cmn%?\\t%0, %1"
832 [(set_attr "conds" "set")
833 (set_attr "predicable" "yes")
834 (set_attr "arch" "t2,t2,*,*,*")
835 (set_attr "predicable_short_it" "yes,yes,no,no,no")
836 (set_attr "length" "2,2,4,4,4")
837 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
838 )
839
840 (define_insn "*compare_addsi2_op1"
841 [(set (reg:CC_C CC_REGNUM)
842 (compare:CC_C
843 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
844 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
845 (match_dup 1)))]
846 "TARGET_32BIT"
847 "@
848 cmp%?\\t%0, #%n1
849 cmn%?\\t%0, %1
850 cmn%?\\t%0, %1
851 cmp%?\\t%0, #%n1
852 cmn%?\\t%0, %1"
853 [(set_attr "conds" "set")
854 (set_attr "predicable" "yes")
855 (set_attr "arch" "t2,t2,*,*,*")
856 (set_attr "predicable_short_it" "yes,yes,no,no,no")
857 (set_attr "length" "2,2,4,4,4")
858 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
859 )
860
861 (define_insn "addsi3_carryin_<optab>"
862 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
863 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
864 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
865 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
866 "TARGET_32BIT"
867 "@
868 adc%?\\t%0, %1, %2
869 adc%?\\t%0, %1, %2
870 sbc%?\\t%0, %1, #%B2"
871 [(set_attr "conds" "use")
872 (set_attr "predicable" "yes")
873 (set_attr "arch" "t2,*,*")
874 (set_attr "length" "4")
875 (set_attr "predicable_short_it" "yes,no,no")
876 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
877 )
878
879 ;; Canonicalization of the above when the immediate is zero.
880 (define_insn "add0si3_carryin_<optab>"
881 [(set (match_operand:SI 0 "s_register_operand" "=r")
882 (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
883 (match_operand:SI 1 "arm_not_operand" "r")))]
884 "TARGET_32BIT"
885 "adc%?\\t%0, %1, #0"
886 [(set_attr "conds" "use")
887 (set_attr "predicable" "yes")
888 (set_attr "length" "4")
889 (set_attr "type" "adc_imm")]
890 )
891
892 (define_insn "*addsi3_carryin_alt2_<optab>"
893 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
894 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
895 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
896 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
897 "TARGET_32BIT"
898 "@
899 adc%?\\t%0, %1, %2
900 adc%?\\t%0, %1, %2
901 sbc%?\\t%0, %1, #%B2"
902 [(set_attr "conds" "use")
903 (set_attr "predicable" "yes")
904 (set_attr "arch" "t2,*,*")
905 (set_attr "length" "4")
906 (set_attr "predicable_short_it" "yes,no,no")
907 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
908 )
909
910 (define_insn "*addsi3_carryin_shift_<optab>"
911 [(set (match_operand:SI 0 "s_register_operand" "=r")
912 (plus:SI (plus:SI
913 (match_operator:SI 2 "shift_operator"
914 [(match_operand:SI 3 "s_register_operand" "r")
915 (match_operand:SI 4 "reg_or_int_operand" "rM")])
916 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0)))
917 (match_operand:SI 1 "s_register_operand" "r")))]
918 "TARGET_32BIT"
919 "adc%?\\t%0, %1, %3%S2"
920 [(set_attr "conds" "use")
921 (set_attr "predicable" "yes")
922 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
923 (const_string "alu_shift_imm")
924 (const_string "alu_shift_reg")))]
925 )
926
927 (define_insn "*addsi3_carryin_clobercc_<optab>"
928 [(set (match_operand:SI 0 "s_register_operand" "=r")
929 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
930 (match_operand:SI 2 "arm_rhs_operand" "rI"))
931 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
932 (clobber (reg:CC CC_REGNUM))]
933 "TARGET_32BIT"
934 "adcs%?\\t%0, %1, %2"
935 [(set_attr "conds" "set")
936 (set_attr "type" "adcs_reg")]
937 )
938
939 (define_expand "subv<mode>4"
940 [(match_operand:SIDI 0 "register_operand")
941 (match_operand:SIDI 1 "register_operand")
942 (match_operand:SIDI 2 "register_operand")
943 (match_operand 3 "")]
944 "TARGET_32BIT"
945 {
946 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
947 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
948
949 DONE;
950 })
951
952 (define_expand "usubv<mode>4"
953 [(match_operand:SIDI 0 "register_operand")
954 (match_operand:SIDI 1 "register_operand")
955 (match_operand:SIDI 2 "register_operand")
956 (match_operand 3 "")]
957 "TARGET_32BIT"
958 {
959 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
960 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
961
962 DONE;
963 })
964
965 (define_insn "subdi3_compare1"
966 [(set (reg:CC CC_REGNUM)
967 (compare:CC
968 (match_operand:DI 1 "s_register_operand" "r")
969 (match_operand:DI 2 "s_register_operand" "r")))
970 (set (match_operand:DI 0 "s_register_operand" "=&r")
971 (minus:DI (match_dup 1) (match_dup 2)))]
972 "TARGET_32BIT"
973 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
974 [(set_attr "conds" "set")
975 (set_attr "length" "8")
976 (set_attr "type" "multiple")]
977 )
978
979 (define_insn "subsi3_compare1"
980 [(set (reg:CC CC_REGNUM)
981 (compare:CC
982 (match_operand:SI 1 "register_operand" "r")
983 (match_operand:SI 2 "register_operand" "r")))
984 (set (match_operand:SI 0 "register_operand" "=r")
985 (minus:SI (match_dup 1) (match_dup 2)))]
986 "TARGET_32BIT"
987 "subs%?\\t%0, %1, %2"
988 [(set_attr "conds" "set")
989 (set_attr "type" "alus_sreg")]
990 )
991
992 (define_insn "subsi3_carryin"
993 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
994 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
995 (match_operand:SI 2 "s_register_operand" "r,r,r"))
996 (match_operand:SI 3 "arm_borrow_operation" "")))]
997 "TARGET_32BIT"
998 "@
999 sbc%?\\t%0, %1, %2
1000 rsc%?\\t%0, %2, %1
1001 sbc%?\\t%0, %2, %2, lsl #1"
1002 [(set_attr "conds" "use")
1003 (set_attr "arch" "*,a,t2")
1004 (set_attr "predicable" "yes")
1005 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1006 )
1007
1008 (define_insn "*subsi3_carryin_const"
1009 [(set (match_operand:SI 0 "s_register_operand" "=r")
1010 (minus:SI (plus:SI
1011 (match_operand:SI 1 "s_register_operand" "r")
1012 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1013 (match_operand:SI 3 "arm_borrow_operation" "")))]
1014 "TARGET_32BIT"
1015 "sbc\\t%0, %1, #%n2"
1016 [(set_attr "conds" "use")
1017 (set_attr "type" "adc_imm")]
1018 )
1019
1020 (define_insn "*subsi3_carryin_const0"
1021 [(set (match_operand:SI 0 "s_register_operand" "=r")
1022 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1023 (match_operand:SI 2 "arm_borrow_operation" "")))]
1024 "TARGET_32BIT"
1025 "sbc\\t%0, %1, #0"
1026 [(set_attr "conds" "use")
1027 (set_attr "type" "adc_imm")]
1028 )
1029
1030 (define_insn "*subsi3_carryin_shift"
1031 [(set (match_operand:SI 0 "s_register_operand" "=r")
1032 (minus:SI (minus:SI
1033 (match_operand:SI 1 "s_register_operand" "r")
1034 (match_operator:SI 2 "shift_operator"
1035 [(match_operand:SI 3 "s_register_operand" "r")
1036 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1037 (match_operand:SI 5 "arm_borrow_operation" "")))]
1038 "TARGET_32BIT"
1039 "sbc%?\\t%0, %1, %3%S2"
1040 [(set_attr "conds" "use")
1041 (set_attr "predicable" "yes")
1042 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1043 (const_string "alu_shift_imm")
1044 (const_string "alu_shift_reg")))]
1045 )
1046
1047 (define_insn "*rsbsi3_carryin_shift"
1048 [(set (match_operand:SI 0 "s_register_operand" "=r")
1049 (minus:SI (minus:SI
1050 (match_operator:SI 2 "shift_operator"
1051 [(match_operand:SI 3 "s_register_operand" "r")
1052 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1053 (match_operand:SI 1 "s_register_operand" "r"))
1054 (match_operand:SI 5 "arm_borrow_operation" "")))]
1055 "TARGET_ARM"
1056 "rsc%?\\t%0, %1, %3%S2"
1057 [(set_attr "conds" "use")
1058 (set_attr "predicable" "yes")
1059 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1060 (const_string "alu_shift_imm")
1061 (const_string "alu_shift_reg")))]
1062 )
1063
1064 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1065 (define_split
1066 [(set (match_operand:SI 0 "s_register_operand" "")
1067 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1068 (match_operand:SI 2 "s_register_operand" ""))
1069 (const_int -1)))
1070 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1071 "TARGET_32BIT"
1072 [(set (match_dup 3) (match_dup 1))
1073 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1074 "
1075 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1076 ")
1077
1078 (define_expand "addsf3"
1079 [(set (match_operand:SF 0 "s_register_operand")
1080 (plus:SF (match_operand:SF 1 "s_register_operand")
1081 (match_operand:SF 2 "s_register_operand")))]
1082 "TARGET_32BIT && TARGET_HARD_FLOAT"
1083 "
1084 ")
1085
1086 (define_expand "adddf3"
1087 [(set (match_operand:DF 0 "s_register_operand")
1088 (plus:DF (match_operand:DF 1 "s_register_operand")
1089 (match_operand:DF 2 "s_register_operand")))]
1090 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1091 "
1092 ")
1093
1094 (define_expand "subdi3"
1095 [(parallel
1096 [(set (match_operand:DI 0 "s_register_operand")
1097 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1098 (match_operand:DI 2 "s_register_operand")))
1099 (clobber (reg:CC CC_REGNUM))])]
1100 "TARGET_EITHER"
1101 "
1102 if (TARGET_THUMB1)
1103 {
1104 if (!REG_P (operands[1]))
1105 operands[1] = force_reg (DImode, operands[1]);
1106 }
1107 else
1108 {
1109 rtx lo_result, hi_result, lo_dest, hi_dest;
1110 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1111 rtx condition;
1112
1113 /* Since operands[1] may be an integer, pass it second, so that
1114 any necessary simplifications will be done on the decomposed
1115 constant. */
1116 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1117 &lo_op1, &hi_op1);
1118 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1119 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1120
1121 if (!arm_rhs_operand (lo_op1, SImode))
1122 lo_op1 = force_reg (SImode, lo_op1);
1123
1124 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1125 || !arm_rhs_operand (hi_op1, SImode))
1126 hi_op1 = force_reg (SImode, hi_op1);
1127
1128 rtx cc_reg;
1129 if (lo_op1 == const0_rtx)
1130 {
1131 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1132 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1133 }
1134 else if (CONST_INT_P (lo_op1))
1135 {
1136 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1137 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1138 GEN_INT (~UINTVAL (lo_op1))));
1139 }
1140 else
1141 {
1142 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1143 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1144 }
1145
1146 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1147
1148 if (hi_op1 == const0_rtx)
1149 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1150 else
1151 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1152
1153 if (lo_result != lo_dest)
1154 emit_move_insn (lo_result, lo_dest);
1155
1156 if (hi_result != hi_dest)
1157 emit_move_insn (hi_result, hi_dest);
1158
1159 DONE;
1160 }
1161 "
1162 )
1163
1164 (define_insn "*arm_subdi3"
1165 [(set (match_operand:DI 0 "arm_general_register_operand" "=&r,&r,&r")
1166 (minus:DI (match_operand:DI 1 "arm_general_register_operand" "0,r,0")
1167 (match_operand:DI 2 "arm_general_register_operand" "r,0,0")))
1168 (clobber (reg:CC CC_REGNUM))]
1169 "TARGET_32BIT"
1170 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1171 [(set_attr "conds" "clob")
1172 (set_attr "length" "8")
1173 (set_attr "type" "multiple")]
1174 )
1175
1176 (define_expand "subsi3"
1177 [(set (match_operand:SI 0 "s_register_operand")
1178 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1179 (match_operand:SI 2 "s_register_operand")))]
1180 "TARGET_EITHER"
1181 "
1182 if (CONST_INT_P (operands[1]))
1183 {
1184 if (TARGET_32BIT)
1185 {
1186 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1187 operands[1] = force_reg (SImode, operands[1]);
1188 else
1189 {
1190 arm_split_constant (MINUS, SImode, NULL_RTX,
1191 INTVAL (operands[1]), operands[0],
1192 operands[2],
1193 optimize && can_create_pseudo_p ());
1194 DONE;
1195 }
1196 }
1197 else /* TARGET_THUMB1 */
1198 operands[1] = force_reg (SImode, operands[1]);
1199 }
1200 "
1201 )
1202
1203 ; ??? Check Thumb-2 split length
1204 (define_insn_and_split "*arm_subsi3_insn"
1205 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1206 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1207 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1208 "TARGET_32BIT"
1209 "@
1210 sub%?\\t%0, %1, %2
1211 sub%?\\t%0, %2
1212 sub%?\\t%0, %1, %2
1213 rsb%?\\t%0, %2, %1
1214 rsb%?\\t%0, %2, %1
1215 sub%?\\t%0, %1, %2
1216 sub%?\\t%0, %1, %2
1217 sub%?\\t%0, %1, %2
1218 #"
1219 "&& (CONST_INT_P (operands[1])
1220 && !const_ok_for_arm (INTVAL (operands[1])))"
1221 [(clobber (const_int 0))]
1222 "
1223 arm_split_constant (MINUS, SImode, curr_insn,
1224 INTVAL (operands[1]), operands[0], operands[2], 0);
1225 DONE;
1226 "
1227 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1228 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1229 (set_attr "predicable" "yes")
1230 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1231 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1232 )
1233
1234 (define_peephole2
1235 [(match_scratch:SI 3 "r")
1236 (set (match_operand:SI 0 "arm_general_register_operand" "")
1237 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1238 (match_operand:SI 2 "arm_general_register_operand" "")))]
1239 "TARGET_32BIT
1240 && !const_ok_for_arm (INTVAL (operands[1]))
1241 && const_ok_for_arm (~INTVAL (operands[1]))"
1242 [(set (match_dup 3) (match_dup 1))
1243 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1244 ""
1245 )
1246
1247 (define_insn "subsi3_compare0"
1248 [(set (reg:CC_NOOV CC_REGNUM)
1249 (compare:CC_NOOV
1250 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1251 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1252 (const_int 0)))
1253 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1254 (minus:SI (match_dup 1) (match_dup 2)))]
1255 "TARGET_32BIT"
1256 "@
1257 subs%?\\t%0, %1, %2
1258 subs%?\\t%0, %1, %2
1259 rsbs%?\\t%0, %2, %1"
1260 [(set_attr "conds" "set")
1261 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1262 )
1263
1264 (define_insn "subsi3_compare"
1265 [(set (reg:CC CC_REGNUM)
1266 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1267 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1268 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1269 (minus:SI (match_dup 1) (match_dup 2)))]
1270 "TARGET_32BIT"
1271 "@
1272 subs%?\\t%0, %1, %2
1273 subs%?\\t%0, %1, %2
1274 rsbs%?\\t%0, %2, %1"
1275 [(set_attr "conds" "set")
1276 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1277 )
1278
1279 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1280 ;; rather than (0 cmp reg). This gives the same results for unsigned
1281 ;; and equality compares which is what we mostly need here.
1282 (define_insn "rsb_imm_compare"
1283 [(set (reg:CC_RSB CC_REGNUM)
1284 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1285 (match_operand 3 "const_int_operand" "")))
1286 (set (match_operand:SI 0 "s_register_operand" "=r")
1287 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1288 (match_dup 2)))]
1289 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1290 "rsbs\\t%0, %2, %1"
1291 [(set_attr "conds" "set")
1292 (set_attr "type" "alus_imm")]
1293 )
1294
1295 (define_expand "subsf3"
1296 [(set (match_operand:SF 0 "s_register_operand")
1297 (minus:SF (match_operand:SF 1 "s_register_operand")
1298 (match_operand:SF 2 "s_register_operand")))]
1299 "TARGET_32BIT && TARGET_HARD_FLOAT"
1300 "
1301 ")
1302
1303 (define_expand "subdf3"
1304 [(set (match_operand:DF 0 "s_register_operand")
1305 (minus:DF (match_operand:DF 1 "s_register_operand")
1306 (match_operand:DF 2 "s_register_operand")))]
1307 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1308 "
1309 ")
1310
1311 \f
1312 ;; Multiplication insns
1313
1314 (define_expand "mulhi3"
1315 [(set (match_operand:HI 0 "s_register_operand")
1316 (mult:HI (match_operand:HI 1 "s_register_operand")
1317 (match_operand:HI 2 "s_register_operand")))]
1318 "TARGET_DSP_MULTIPLY"
1319 "
1320 {
1321 rtx result = gen_reg_rtx (SImode);
1322 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1323 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1324 DONE;
1325 }"
1326 )
1327
1328 (define_expand "mulsi3"
1329 [(set (match_operand:SI 0 "s_register_operand")
1330 (mult:SI (match_operand:SI 2 "s_register_operand")
1331 (match_operand:SI 1 "s_register_operand")))]
1332 "TARGET_EITHER"
1333 ""
1334 )
1335
1336 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1337 (define_insn "*mul"
1338 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1339 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1340 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1341 "TARGET_32BIT"
1342 "mul%?\\t%0, %2, %1"
1343 [(set_attr "type" "mul")
1344 (set_attr "predicable" "yes")
1345 (set_attr "arch" "t2,v6,nov6,nov6")
1346 (set_attr "length" "4")
1347 (set_attr "predicable_short_it" "yes,no,*,*")]
1348 )
1349
1350 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1351 ;; reusing the same register.
1352
1353 (define_insn "*mla"
1354 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1355 (plus:SI
1356 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1357 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1358 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1359 "TARGET_32BIT"
1360 "mla%?\\t%0, %3, %2, %1"
1361 [(set_attr "type" "mla")
1362 (set_attr "predicable" "yes")
1363 (set_attr "arch" "v6,nov6,nov6,nov6")]
1364 )
1365
1366 (define_insn "*mls"
1367 [(set (match_operand:SI 0 "s_register_operand" "=r")
1368 (minus:SI
1369 (match_operand:SI 1 "s_register_operand" "r")
1370 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1371 (match_operand:SI 2 "s_register_operand" "r"))))]
1372 "TARGET_32BIT && arm_arch_thumb2"
1373 "mls%?\\t%0, %3, %2, %1"
1374 [(set_attr "type" "mla")
1375 (set_attr "predicable" "yes")]
1376 )
1377
1378 (define_insn "*mulsi3_compare0"
1379 [(set (reg:CC_NOOV CC_REGNUM)
1380 (compare:CC_NOOV (mult:SI
1381 (match_operand:SI 2 "s_register_operand" "r,r")
1382 (match_operand:SI 1 "s_register_operand" "%0,r"))
1383 (const_int 0)))
1384 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1385 (mult:SI (match_dup 2) (match_dup 1)))]
1386 "TARGET_ARM && !arm_arch6"
1387 "muls%?\\t%0, %2, %1"
1388 [(set_attr "conds" "set")
1389 (set_attr "type" "muls")]
1390 )
1391
1392 (define_insn "*mulsi3_compare0_v6"
1393 [(set (reg:CC_NOOV CC_REGNUM)
1394 (compare:CC_NOOV (mult:SI
1395 (match_operand:SI 2 "s_register_operand" "r")
1396 (match_operand:SI 1 "s_register_operand" "r"))
1397 (const_int 0)))
1398 (set (match_operand:SI 0 "s_register_operand" "=r")
1399 (mult:SI (match_dup 2) (match_dup 1)))]
1400 "TARGET_ARM && arm_arch6 && optimize_size"
1401 "muls%?\\t%0, %2, %1"
1402 [(set_attr "conds" "set")
1403 (set_attr "type" "muls")]
1404 )
1405
1406 (define_insn "*mulsi_compare0_scratch"
1407 [(set (reg:CC_NOOV CC_REGNUM)
1408 (compare:CC_NOOV (mult:SI
1409 (match_operand:SI 2 "s_register_operand" "r,r")
1410 (match_operand:SI 1 "s_register_operand" "%0,r"))
1411 (const_int 0)))
1412 (clobber (match_scratch:SI 0 "=&r,&r"))]
1413 "TARGET_ARM && !arm_arch6"
1414 "muls%?\\t%0, %2, %1"
1415 [(set_attr "conds" "set")
1416 (set_attr "type" "muls")]
1417 )
1418
1419 (define_insn "*mulsi_compare0_scratch_v6"
1420 [(set (reg:CC_NOOV CC_REGNUM)
1421 (compare:CC_NOOV (mult:SI
1422 (match_operand:SI 2 "s_register_operand" "r")
1423 (match_operand:SI 1 "s_register_operand" "r"))
1424 (const_int 0)))
1425 (clobber (match_scratch:SI 0 "=r"))]
1426 "TARGET_ARM && arm_arch6 && optimize_size"
1427 "muls%?\\t%0, %2, %1"
1428 [(set_attr "conds" "set")
1429 (set_attr "type" "muls")]
1430 )
1431
1432 (define_insn "*mulsi3addsi_compare0"
1433 [(set (reg:CC_NOOV CC_REGNUM)
1434 (compare:CC_NOOV
1435 (plus:SI (mult:SI
1436 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1437 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1438 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1439 (const_int 0)))
1440 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1441 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1442 (match_dup 3)))]
1443 "TARGET_ARM && arm_arch6"
1444 "mlas%?\\t%0, %2, %1, %3"
1445 [(set_attr "conds" "set")
1446 (set_attr "type" "mlas")]
1447 )
1448
1449 (define_insn "*mulsi3addsi_compare0_v6"
1450 [(set (reg:CC_NOOV CC_REGNUM)
1451 (compare:CC_NOOV
1452 (plus:SI (mult:SI
1453 (match_operand:SI 2 "s_register_operand" "r")
1454 (match_operand:SI 1 "s_register_operand" "r"))
1455 (match_operand:SI 3 "s_register_operand" "r"))
1456 (const_int 0)))
1457 (set (match_operand:SI 0 "s_register_operand" "=r")
1458 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1459 (match_dup 3)))]
1460 "TARGET_ARM && arm_arch6 && optimize_size"
1461 "mlas%?\\t%0, %2, %1, %3"
1462 [(set_attr "conds" "set")
1463 (set_attr "type" "mlas")]
1464 )
1465
1466 (define_insn "*mulsi3addsi_compare0_scratch"
1467 [(set (reg:CC_NOOV CC_REGNUM)
1468 (compare:CC_NOOV
1469 (plus:SI (mult:SI
1470 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1471 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1472 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1473 (const_int 0)))
1474 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1475 "TARGET_ARM && !arm_arch6"
1476 "mlas%?\\t%0, %2, %1, %3"
1477 [(set_attr "conds" "set")
1478 (set_attr "type" "mlas")]
1479 )
1480
1481 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1482 [(set (reg:CC_NOOV CC_REGNUM)
1483 (compare:CC_NOOV
1484 (plus:SI (mult:SI
1485 (match_operand:SI 2 "s_register_operand" "r")
1486 (match_operand:SI 1 "s_register_operand" "r"))
1487 (match_operand:SI 3 "s_register_operand" "r"))
1488 (const_int 0)))
1489 (clobber (match_scratch:SI 0 "=r"))]
1490 "TARGET_ARM && arm_arch6 && optimize_size"
1491 "mlas%?\\t%0, %2, %1, %3"
1492 [(set_attr "conds" "set")
1493 (set_attr "type" "mlas")]
1494 )
1495
1496 ;; 32x32->64 widening multiply.
1497 ;; The only difference between the v3-5 and v6+ versions is the requirement
1498 ;; that the output does not overlap with either input.
1499
1500 (define_expand "<Us>mulsidi3"
1501 [(set (match_operand:DI 0 "s_register_operand")
1502 (mult:DI
1503 (SE:DI (match_operand:SI 1 "s_register_operand"))
1504 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1505 "TARGET_32BIT"
1506 {
1507 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1508 gen_highpart (SImode, operands[0]),
1509 operands[1], operands[2]));
1510 DONE;
1511 }
1512 )
1513
1514 (define_insn "<US>mull"
1515 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1516 (mult:SI
1517 (match_operand:SI 2 "s_register_operand" "%r,r")
1518 (match_operand:SI 3 "s_register_operand" "r,r")))
1519 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1520 (truncate:SI
1521 (lshiftrt:DI
1522 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1523 (const_int 32))))]
1524 "TARGET_32BIT"
1525 "<US>mull%?\\t%0, %1, %2, %3"
1526 [(set_attr "type" "umull")
1527 (set_attr "predicable" "yes")
1528 (set_attr "arch" "v6,nov6")]
1529 )
1530
1531 (define_expand "<Us>maddsidi4"
1532 [(set (match_operand:DI 0 "s_register_operand")
1533 (plus:DI
1534 (mult:DI
1535 (SE:DI (match_operand:SI 1 "s_register_operand"))
1536 (SE:DI (match_operand:SI 2 "s_register_operand")))
1537 (match_operand:DI 3 "s_register_operand")))]
1538 "TARGET_32BIT"
1539 {
1540 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1541 gen_lowpart (SImode, operands[3]),
1542 gen_highpart (SImode, operands[0]),
1543 gen_highpart (SImode, operands[3]),
1544 operands[1], operands[2]));
1545 DONE;
1546 }
1547 )
1548
1549 (define_insn "<US>mlal"
1550 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1551 (plus:SI
1552 (mult:SI
1553 (match_operand:SI 4 "s_register_operand" "%r,r")
1554 (match_operand:SI 5 "s_register_operand" "r,r"))
1555 (match_operand:SI 1 "s_register_operand" "0,0")))
1556 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1557 (plus:SI
1558 (truncate:SI
1559 (lshiftrt:DI
1560 (plus:DI
1561 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1562 (zero_extend:DI (match_dup 1)))
1563 (const_int 32)))
1564 (match_operand:SI 3 "s_register_operand" "2,2")))]
1565 "TARGET_32BIT"
1566 "<US>mlal%?\\t%0, %2, %4, %5"
1567 [(set_attr "type" "umlal")
1568 (set_attr "predicable" "yes")
1569 (set_attr "arch" "v6,nov6")]
1570 )
1571
1572 (define_expand "<US>mulsi3_highpart"
1573 [(parallel
1574 [(set (match_operand:SI 0 "s_register_operand")
1575 (truncate:SI
1576 (lshiftrt:DI
1577 (mult:DI
1578 (SE:DI (match_operand:SI 1 "s_register_operand"))
1579 (SE:DI (match_operand:SI 2 "s_register_operand")))
1580 (const_int 32))))
1581 (clobber (match_scratch:SI 3 ""))])]
1582 "TARGET_32BIT"
1583 ""
1584 )
1585
1586 (define_insn "*<US>mull_high"
1587 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1588 (truncate:SI
1589 (lshiftrt:DI
1590 (mult:DI
1591 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1592 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1593 (const_int 32))))
1594 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1595 "TARGET_32BIT"
1596 "<US>mull%?\\t%3, %0, %2, %1"
1597 [(set_attr "type" "umull")
1598 (set_attr "predicable" "yes")
1599 (set_attr "arch" "v6,nov6,nov6")]
1600 )
1601
1602 (define_insn "mulhisi3"
1603 [(set (match_operand:SI 0 "s_register_operand" "=r")
1604 (mult:SI (sign_extend:SI
1605 (match_operand:HI 1 "s_register_operand" "%r"))
1606 (sign_extend:SI
1607 (match_operand:HI 2 "s_register_operand" "r"))))]
1608 "TARGET_DSP_MULTIPLY"
1609 "smulbb%?\\t%0, %1, %2"
1610 [(set_attr "type" "smulxy")
1611 (set_attr "predicable" "yes")]
1612 )
1613
1614 (define_insn "*mulhisi3tb"
1615 [(set (match_operand:SI 0 "s_register_operand" "=r")
1616 (mult:SI (ashiftrt:SI
1617 (match_operand:SI 1 "s_register_operand" "r")
1618 (const_int 16))
1619 (sign_extend:SI
1620 (match_operand:HI 2 "s_register_operand" "r"))))]
1621 "TARGET_DSP_MULTIPLY"
1622 "smultb%?\\t%0, %1, %2"
1623 [(set_attr "type" "smulxy")
1624 (set_attr "predicable" "yes")]
1625 )
1626
1627 (define_insn "*mulhisi3bt"
1628 [(set (match_operand:SI 0 "s_register_operand" "=r")
1629 (mult:SI (sign_extend:SI
1630 (match_operand:HI 1 "s_register_operand" "r"))
1631 (ashiftrt:SI
1632 (match_operand:SI 2 "s_register_operand" "r")
1633 (const_int 16))))]
1634 "TARGET_DSP_MULTIPLY"
1635 "smulbt%?\\t%0, %1, %2"
1636 [(set_attr "type" "smulxy")
1637 (set_attr "predicable" "yes")]
1638 )
1639
1640 (define_insn "*mulhisi3tt"
1641 [(set (match_operand:SI 0 "s_register_operand" "=r")
1642 (mult:SI (ashiftrt:SI
1643 (match_operand:SI 1 "s_register_operand" "r")
1644 (const_int 16))
1645 (ashiftrt:SI
1646 (match_operand:SI 2 "s_register_operand" "r")
1647 (const_int 16))))]
1648 "TARGET_DSP_MULTIPLY"
1649 "smultt%?\\t%0, %1, %2"
1650 [(set_attr "type" "smulxy")
1651 (set_attr "predicable" "yes")]
1652 )
1653
1654 (define_insn "maddhisi4"
1655 [(set (match_operand:SI 0 "s_register_operand" "=r")
1656 (plus:SI (mult:SI (sign_extend:SI
1657 (match_operand:HI 1 "s_register_operand" "r"))
1658 (sign_extend:SI
1659 (match_operand:HI 2 "s_register_operand" "r")))
1660 (match_operand:SI 3 "s_register_operand" "r")))]
1661 "TARGET_DSP_MULTIPLY"
1662 "smlabb%?\\t%0, %1, %2, %3"
1663 [(set_attr "type" "smlaxy")
1664 (set_attr "predicable" "yes")]
1665 )
1666
1667 ;; Note: there is no maddhisi4ibt because this one is canonical form
1668 (define_insn "*maddhisi4tb"
1669 [(set (match_operand:SI 0 "s_register_operand" "=r")
1670 (plus:SI (mult:SI (ashiftrt:SI
1671 (match_operand:SI 1 "s_register_operand" "r")
1672 (const_int 16))
1673 (sign_extend:SI
1674 (match_operand:HI 2 "s_register_operand" "r")))
1675 (match_operand:SI 3 "s_register_operand" "r")))]
1676 "TARGET_DSP_MULTIPLY"
1677 "smlatb%?\\t%0, %1, %2, %3"
1678 [(set_attr "type" "smlaxy")
1679 (set_attr "predicable" "yes")]
1680 )
1681
1682 (define_insn "*maddhisi4tt"
1683 [(set (match_operand:SI 0 "s_register_operand" "=r")
1684 (plus:SI (mult:SI (ashiftrt:SI
1685 (match_operand:SI 1 "s_register_operand" "r")
1686 (const_int 16))
1687 (ashiftrt:SI
1688 (match_operand:SI 2 "s_register_operand" "r")
1689 (const_int 16)))
1690 (match_operand:SI 3 "s_register_operand" "r")))]
1691 "TARGET_DSP_MULTIPLY"
1692 "smlatt%?\\t%0, %1, %2, %3"
1693 [(set_attr "type" "smlaxy")
1694 (set_attr "predicable" "yes")]
1695 )
1696
1697 (define_insn "maddhidi4"
1698 [(set (match_operand:DI 0 "s_register_operand" "=r")
1699 (plus:DI
1700 (mult:DI (sign_extend:DI
1701 (match_operand:HI 1 "s_register_operand" "r"))
1702 (sign_extend:DI
1703 (match_operand:HI 2 "s_register_operand" "r")))
1704 (match_operand:DI 3 "s_register_operand" "0")))]
1705 "TARGET_DSP_MULTIPLY"
1706 "smlalbb%?\\t%Q0, %R0, %1, %2"
1707 [(set_attr "type" "smlalxy")
1708 (set_attr "predicable" "yes")])
1709
1710 ;; Note: there is no maddhidi4ibt because this one is canonical form
1711 (define_insn "*maddhidi4tb"
1712 [(set (match_operand:DI 0 "s_register_operand" "=r")
1713 (plus:DI
1714 (mult:DI (sign_extend:DI
1715 (ashiftrt:SI
1716 (match_operand:SI 1 "s_register_operand" "r")
1717 (const_int 16)))
1718 (sign_extend:DI
1719 (match_operand:HI 2 "s_register_operand" "r")))
1720 (match_operand:DI 3 "s_register_operand" "0")))]
1721 "TARGET_DSP_MULTIPLY"
1722 "smlaltb%?\\t%Q0, %R0, %1, %2"
1723 [(set_attr "type" "smlalxy")
1724 (set_attr "predicable" "yes")])
1725
1726 (define_insn "*maddhidi4tt"
1727 [(set (match_operand:DI 0 "s_register_operand" "=r")
1728 (plus:DI
1729 (mult:DI (sign_extend:DI
1730 (ashiftrt:SI
1731 (match_operand:SI 1 "s_register_operand" "r")
1732 (const_int 16)))
1733 (sign_extend:DI
1734 (ashiftrt:SI
1735 (match_operand:SI 2 "s_register_operand" "r")
1736 (const_int 16))))
1737 (match_operand:DI 3 "s_register_operand" "0")))]
1738 "TARGET_DSP_MULTIPLY"
1739 "smlaltt%?\\t%Q0, %R0, %1, %2"
1740 [(set_attr "type" "smlalxy")
1741 (set_attr "predicable" "yes")])
1742
1743 (define_expand "mulsf3"
1744 [(set (match_operand:SF 0 "s_register_operand")
1745 (mult:SF (match_operand:SF 1 "s_register_operand")
1746 (match_operand:SF 2 "s_register_operand")))]
1747 "TARGET_32BIT && TARGET_HARD_FLOAT"
1748 "
1749 ")
1750
1751 (define_expand "muldf3"
1752 [(set (match_operand:DF 0 "s_register_operand")
1753 (mult:DF (match_operand:DF 1 "s_register_operand")
1754 (match_operand:DF 2 "s_register_operand")))]
1755 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1756 "
1757 ")
1758 \f
1759 ;; Division insns
1760
1761 (define_expand "divsf3"
1762 [(set (match_operand:SF 0 "s_register_operand")
1763 (div:SF (match_operand:SF 1 "s_register_operand")
1764 (match_operand:SF 2 "s_register_operand")))]
1765 "TARGET_32BIT && TARGET_HARD_FLOAT"
1766 "")
1767
1768 (define_expand "divdf3"
1769 [(set (match_operand:DF 0 "s_register_operand")
1770 (div:DF (match_operand:DF 1 "s_register_operand")
1771 (match_operand:DF 2 "s_register_operand")))]
1772 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1773 "")
1774 \f
1775
1776 ; Expand logical operations. The mid-end expander does not split off memory
1777 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1778 ; So an explicit expander is needed to generate better code.
1779
1780 (define_expand "<LOGICAL:optab>di3"
1781 [(set (match_operand:DI 0 "s_register_operand")
1782 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1783 (match_operand:DI 2 "arm_<optab>di_operand")))]
1784 "TARGET_32BIT"
1785 {
1786 rtx low = simplify_gen_binary (<CODE>, SImode,
1787 gen_lowpart (SImode, operands[1]),
1788 gen_lowpart (SImode, operands[2]));
1789 rtx high = simplify_gen_binary (<CODE>, SImode,
1790 gen_highpart (SImode, operands[1]),
1791 gen_highpart_mode (SImode, DImode,
1792 operands[2]));
1793
1794 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1795 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1796 DONE;
1797 }
1798 )
1799
1800 (define_expand "one_cmpldi2"
1801 [(set (match_operand:DI 0 "s_register_operand")
1802 (not:DI (match_operand:DI 1 "s_register_operand")))]
1803 "TARGET_32BIT"
1804 {
1805 rtx low = simplify_gen_unary (NOT, SImode,
1806 gen_lowpart (SImode, operands[1]),
1807 SImode);
1808 rtx high = simplify_gen_unary (NOT, SImode,
1809 gen_highpart_mode (SImode, DImode,
1810 operands[1]),
1811 SImode);
1812
1813 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1814 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1815 DONE;
1816 }
1817 )
1818
1819 ;; Split DImode and, ior, xor operations. Simply perform the logical
1820 ;; operation on the upper and lower halves of the registers.
1821 ;; This is needed for atomic operations in arm_split_atomic_op.
1822 ;; Avoid splitting IWMMXT instructions.
1823 (define_split
1824 [(set (match_operand:DI 0 "s_register_operand" "")
1825 (match_operator:DI 6 "logical_binary_operator"
1826 [(match_operand:DI 1 "s_register_operand" "")
1827 (match_operand:DI 2 "s_register_operand" "")]))]
1828 "TARGET_32BIT && reload_completed
1829 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1830 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1831 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1832 "
1833 {
1834 operands[3] = gen_highpart (SImode, operands[0]);
1835 operands[0] = gen_lowpart (SImode, operands[0]);
1836 operands[4] = gen_highpart (SImode, operands[1]);
1837 operands[1] = gen_lowpart (SImode, operands[1]);
1838 operands[5] = gen_highpart (SImode, operands[2]);
1839 operands[2] = gen_lowpart (SImode, operands[2]);
1840 }"
1841 )
1842
1843 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1844 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1845 (define_split
1846 [(set (match_operand:DI 0 "s_register_operand")
1847 (not:DI (match_operand:DI 1 "s_register_operand")))]
1848 "TARGET_32BIT"
1849 [(set (match_dup 0) (not:SI (match_dup 1)))
1850 (set (match_dup 2) (not:SI (match_dup 3)))]
1851 "
1852 {
1853 operands[2] = gen_highpart (SImode, operands[0]);
1854 operands[0] = gen_lowpart (SImode, operands[0]);
1855 operands[3] = gen_highpart (SImode, operands[1]);
1856 operands[1] = gen_lowpart (SImode, operands[1]);
1857 }"
1858 )
1859
1860 (define_expand "andsi3"
1861 [(set (match_operand:SI 0 "s_register_operand")
1862 (and:SI (match_operand:SI 1 "s_register_operand")
1863 (match_operand:SI 2 "reg_or_int_operand")))]
1864 "TARGET_EITHER"
1865 "
1866 if (TARGET_32BIT)
1867 {
1868 if (CONST_INT_P (operands[2]))
1869 {
1870 if (INTVAL (operands[2]) == 255 && arm_arch6)
1871 {
1872 operands[1] = convert_to_mode (QImode, operands[1], 1);
1873 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1874 operands[1]));
1875 DONE;
1876 }
1877 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1878 operands[2] = force_reg (SImode, operands[2]);
1879 else
1880 {
1881 arm_split_constant (AND, SImode, NULL_RTX,
1882 INTVAL (operands[2]), operands[0],
1883 operands[1],
1884 optimize && can_create_pseudo_p ());
1885
1886 DONE;
1887 }
1888 }
1889 }
1890 else /* TARGET_THUMB1 */
1891 {
1892 if (!CONST_INT_P (operands[2]))
1893 {
1894 rtx tmp = force_reg (SImode, operands[2]);
1895 if (rtx_equal_p (operands[0], operands[1]))
1896 operands[2] = tmp;
1897 else
1898 {
1899 operands[2] = operands[1];
1900 operands[1] = tmp;
1901 }
1902 }
1903 else
1904 {
1905 int i;
1906
1907 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1908 {
1909 operands[2] = force_reg (SImode,
1910 GEN_INT (~INTVAL (operands[2])));
1911
1912 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
1913
1914 DONE;
1915 }
1916
1917 for (i = 9; i <= 31; i++)
1918 {
1919 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
1920 {
1921 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1922 const0_rtx));
1923 DONE;
1924 }
1925 else if ((HOST_WIDE_INT_1 << i) - 1
1926 == ~INTVAL (operands[2]))
1927 {
1928 rtx shift = GEN_INT (i);
1929 rtx reg = gen_reg_rtx (SImode);
1930
1931 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1932 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1933
1934 DONE;
1935 }
1936 }
1937
1938 operands[2] = force_reg (SImode, operands[2]);
1939 }
1940 }
1941 "
1942 )
1943
1944 ; ??? Check split length for Thumb-2
1945 (define_insn_and_split "*arm_andsi3_insn"
1946 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
1947 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
1948 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
1949 "TARGET_32BIT"
1950 "@
1951 and%?\\t%0, %1, %2
1952 and%?\\t%0, %1, %2
1953 bic%?\\t%0, %1, #%B2
1954 and%?\\t%0, %1, %2
1955 #"
1956 "TARGET_32BIT
1957 && CONST_INT_P (operands[2])
1958 && !(const_ok_for_arm (INTVAL (operands[2]))
1959 || const_ok_for_arm (~INTVAL (operands[2])))"
1960 [(clobber (const_int 0))]
1961 "
1962 arm_split_constant (AND, SImode, curr_insn,
1963 INTVAL (operands[2]), operands[0], operands[1], 0);
1964 DONE;
1965 "
1966 [(set_attr "length" "4,4,4,4,16")
1967 (set_attr "predicable" "yes")
1968 (set_attr "predicable_short_it" "no,yes,no,no,no")
1969 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
1970 )
1971
1972 (define_insn "*andsi3_compare0"
1973 [(set (reg:CC_NOOV CC_REGNUM)
1974 (compare:CC_NOOV
1975 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1976 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
1977 (const_int 0)))
1978 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1979 (and:SI (match_dup 1) (match_dup 2)))]
1980 "TARGET_32BIT"
1981 "@
1982 ands%?\\t%0, %1, %2
1983 bics%?\\t%0, %1, #%B2
1984 ands%?\\t%0, %1, %2"
1985 [(set_attr "conds" "set")
1986 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1987 )
1988
1989 (define_insn "*andsi3_compare0_scratch"
1990 [(set (reg:CC_NOOV CC_REGNUM)
1991 (compare:CC_NOOV
1992 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1993 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
1994 (const_int 0)))
1995 (clobber (match_scratch:SI 2 "=X,r,X"))]
1996 "TARGET_32BIT"
1997 "@
1998 tst%?\\t%0, %1
1999 bics%?\\t%2, %0, #%B1
2000 tst%?\\t%0, %1"
2001 [(set_attr "conds" "set")
2002 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2003 )
2004
2005 (define_insn "*zeroextractsi_compare0_scratch"
2006 [(set (reg:CC_NOOV CC_REGNUM)
2007 (compare:CC_NOOV (zero_extract:SI
2008 (match_operand:SI 0 "s_register_operand" "r")
2009 (match_operand 1 "const_int_operand" "n")
2010 (match_operand 2 "const_int_operand" "n"))
2011 (const_int 0)))]
2012 "TARGET_32BIT
2013 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2014 && INTVAL (operands[1]) > 0
2015 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2016 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2017 "*
2018 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2019 << INTVAL (operands[2]));
2020 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2021 return \"\";
2022 "
2023 [(set_attr "conds" "set")
2024 (set_attr "predicable" "yes")
2025 (set_attr "type" "logics_imm")]
2026 )
2027
2028 (define_insn_and_split "*ne_zeroextractsi"
2029 [(set (match_operand:SI 0 "s_register_operand" "=r")
2030 (ne:SI (zero_extract:SI
2031 (match_operand:SI 1 "s_register_operand" "r")
2032 (match_operand:SI 2 "const_int_operand" "n")
2033 (match_operand:SI 3 "const_int_operand" "n"))
2034 (const_int 0)))
2035 (clobber (reg:CC CC_REGNUM))]
2036 "TARGET_32BIT
2037 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2038 && INTVAL (operands[2]) > 0
2039 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2040 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2041 "#"
2042 "TARGET_32BIT
2043 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2044 && INTVAL (operands[2]) > 0
2045 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2046 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2047 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2048 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2049 (const_int 0)))
2050 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2051 (set (match_dup 0)
2052 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2053 (match_dup 0) (const_int 1)))]
2054 "
2055 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2056 << INTVAL (operands[3]));
2057 "
2058 [(set_attr "conds" "clob")
2059 (set (attr "length")
2060 (if_then_else (eq_attr "is_thumb" "yes")
2061 (const_int 12)
2062 (const_int 8)))
2063 (set_attr "type" "multiple")]
2064 )
2065
2066 (define_insn_and_split "*ne_zeroextractsi_shifted"
2067 [(set (match_operand:SI 0 "s_register_operand" "=r")
2068 (ne:SI (zero_extract:SI
2069 (match_operand:SI 1 "s_register_operand" "r")
2070 (match_operand:SI 2 "const_int_operand" "n")
2071 (const_int 0))
2072 (const_int 0)))
2073 (clobber (reg:CC CC_REGNUM))]
2074 "TARGET_ARM"
2075 "#"
2076 "TARGET_ARM"
2077 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2078 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2079 (const_int 0)))
2080 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2081 (set (match_dup 0)
2082 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2083 (match_dup 0) (const_int 1)))]
2084 "
2085 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2086 "
2087 [(set_attr "conds" "clob")
2088 (set_attr "length" "8")
2089 (set_attr "type" "multiple")]
2090 )
2091
2092 (define_insn_and_split "*ite_ne_zeroextractsi"
2093 [(set (match_operand:SI 0 "s_register_operand" "=r")
2094 (if_then_else:SI (ne (zero_extract:SI
2095 (match_operand:SI 1 "s_register_operand" "r")
2096 (match_operand:SI 2 "const_int_operand" "n")
2097 (match_operand:SI 3 "const_int_operand" "n"))
2098 (const_int 0))
2099 (match_operand:SI 4 "arm_not_operand" "rIK")
2100 (const_int 0)))
2101 (clobber (reg:CC CC_REGNUM))]
2102 "TARGET_ARM
2103 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2104 && INTVAL (operands[2]) > 0
2105 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2106 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2107 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2108 "#"
2109 "TARGET_ARM
2110 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2111 && INTVAL (operands[2]) > 0
2112 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2113 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2114 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2115 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2116 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2117 (const_int 0)))
2118 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2119 (set (match_dup 0)
2120 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2121 (match_dup 0) (match_dup 4)))]
2122 "
2123 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2124 << INTVAL (operands[3]));
2125 "
2126 [(set_attr "conds" "clob")
2127 (set_attr "length" "8")
2128 (set_attr "type" "multiple")]
2129 )
2130
2131 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2132 [(set (match_operand:SI 0 "s_register_operand" "=r")
2133 (if_then_else:SI (ne (zero_extract:SI
2134 (match_operand:SI 1 "s_register_operand" "r")
2135 (match_operand:SI 2 "const_int_operand" "n")
2136 (const_int 0))
2137 (const_int 0))
2138 (match_operand:SI 3 "arm_not_operand" "rIK")
2139 (const_int 0)))
2140 (clobber (reg:CC CC_REGNUM))]
2141 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2142 "#"
2143 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2144 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2145 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2146 (const_int 0)))
2147 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2148 (set (match_dup 0)
2149 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2150 (match_dup 0) (match_dup 3)))]
2151 "
2152 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2153 "
2154 [(set_attr "conds" "clob")
2155 (set_attr "length" "8")
2156 (set_attr "type" "multiple")]
2157 )
2158
2159 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2160 (define_split
2161 [(set (match_operand:SI 0 "s_register_operand" "")
2162 (match_operator:SI 1 "shiftable_operator"
2163 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2164 (match_operand:SI 3 "const_int_operand" "")
2165 (match_operand:SI 4 "const_int_operand" ""))
2166 (match_operand:SI 5 "s_register_operand" "")]))
2167 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2168 "TARGET_ARM"
2169 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2170 (set (match_dup 0)
2171 (match_op_dup 1
2172 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2173 (match_dup 5)]))]
2174 "{
2175 HOST_WIDE_INT temp = INTVAL (operands[3]);
2176
2177 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2178 operands[4] = GEN_INT (32 - temp);
2179 }"
2180 )
2181
2182 (define_split
2183 [(set (match_operand:SI 0 "s_register_operand" "")
2184 (match_operator:SI 1 "shiftable_operator"
2185 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2186 (match_operand:SI 3 "const_int_operand" "")
2187 (match_operand:SI 4 "const_int_operand" ""))
2188 (match_operand:SI 5 "s_register_operand" "")]))
2189 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2190 "TARGET_ARM"
2191 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2192 (set (match_dup 0)
2193 (match_op_dup 1
2194 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2195 (match_dup 5)]))]
2196 "{
2197 HOST_WIDE_INT temp = INTVAL (operands[3]);
2198
2199 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2200 operands[4] = GEN_INT (32 - temp);
2201 }"
2202 )
2203
2204 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2205 ;;; represented by the bitfield, then this will produce incorrect results.
2206 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2207 ;;; which have a real bit-field insert instruction, the truncation happens
2208 ;;; in the bit-field insert instruction itself. Since arm does not have a
2209 ;;; bit-field insert instruction, we would have to emit code here to truncate
2210 ;;; the value before we insert. This loses some of the advantage of having
2211 ;;; this insv pattern, so this pattern needs to be reevalutated.
2212
2213 (define_expand "insv"
2214 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2215 (match_operand 1 "general_operand")
2216 (match_operand 2 "general_operand"))
2217 (match_operand 3 "reg_or_int_operand"))]
2218 "TARGET_ARM || arm_arch_thumb2"
2219 "
2220 {
2221 int start_bit = INTVAL (operands[2]);
2222 int width = INTVAL (operands[1]);
2223 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2224 rtx target, subtarget;
2225
2226 if (arm_arch_thumb2)
2227 {
2228 if (unaligned_access && MEM_P (operands[0])
2229 && s_register_operand (operands[3], GET_MODE (operands[3]))
2230 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2231 {
2232 rtx base_addr;
2233
2234 if (BYTES_BIG_ENDIAN)
2235 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2236 - start_bit;
2237
2238 if (width == 32)
2239 {
2240 base_addr = adjust_address (operands[0], SImode,
2241 start_bit / BITS_PER_UNIT);
2242 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2243 }
2244 else
2245 {
2246 rtx tmp = gen_reg_rtx (HImode);
2247
2248 base_addr = adjust_address (operands[0], HImode,
2249 start_bit / BITS_PER_UNIT);
2250 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2251 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2252 }
2253 DONE;
2254 }
2255 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2256 {
2257 bool use_bfi = TRUE;
2258
2259 if (CONST_INT_P (operands[3]))
2260 {
2261 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2262
2263 if (val == 0)
2264 {
2265 emit_insn (gen_insv_zero (operands[0], operands[1],
2266 operands[2]));
2267 DONE;
2268 }
2269
2270 /* See if the set can be done with a single orr instruction. */
2271 if (val == mask && const_ok_for_arm (val << start_bit))
2272 use_bfi = FALSE;
2273 }
2274
2275 if (use_bfi)
2276 {
2277 if (!REG_P (operands[3]))
2278 operands[3] = force_reg (SImode, operands[3]);
2279
2280 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2281 operands[3]));
2282 DONE;
2283 }
2284 }
2285 else
2286 FAIL;
2287 }
2288
2289 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2290 FAIL;
2291
2292 target = copy_rtx (operands[0]);
2293 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2294 subreg as the final target. */
2295 if (GET_CODE (target) == SUBREG)
2296 {
2297 subtarget = gen_reg_rtx (SImode);
2298 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2299 < GET_MODE_SIZE (SImode))
2300 target = SUBREG_REG (target);
2301 }
2302 else
2303 subtarget = target;
2304
2305 if (CONST_INT_P (operands[3]))
2306 {
2307 /* Since we are inserting a known constant, we may be able to
2308 reduce the number of bits that we have to clear so that
2309 the mask becomes simple. */
2310 /* ??? This code does not check to see if the new mask is actually
2311 simpler. It may not be. */
2312 rtx op1 = gen_reg_rtx (SImode);
2313 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2314 start of this pattern. */
2315 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2316 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2317
2318 emit_insn (gen_andsi3 (op1, operands[0],
2319 gen_int_mode (~mask2, SImode)));
2320 emit_insn (gen_iorsi3 (subtarget, op1,
2321 gen_int_mode (op3_value << start_bit, SImode)));
2322 }
2323 else if (start_bit == 0
2324 && !(const_ok_for_arm (mask)
2325 || const_ok_for_arm (~mask)))
2326 {
2327 /* A Trick, since we are setting the bottom bits in the word,
2328 we can shift operand[3] up, operand[0] down, OR them together
2329 and rotate the result back again. This takes 3 insns, and
2330 the third might be mergeable into another op. */
2331 /* The shift up copes with the possibility that operand[3] is
2332 wider than the bitfield. */
2333 rtx op0 = gen_reg_rtx (SImode);
2334 rtx op1 = gen_reg_rtx (SImode);
2335
2336 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2337 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2338 emit_insn (gen_iorsi3 (op1, op1, op0));
2339 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2340 }
2341 else if ((width + start_bit == 32)
2342 && !(const_ok_for_arm (mask)
2343 || const_ok_for_arm (~mask)))
2344 {
2345 /* Similar trick, but slightly less efficient. */
2346
2347 rtx op0 = gen_reg_rtx (SImode);
2348 rtx op1 = gen_reg_rtx (SImode);
2349
2350 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2351 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2352 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2353 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2354 }
2355 else
2356 {
2357 rtx op0 = gen_int_mode (mask, SImode);
2358 rtx op1 = gen_reg_rtx (SImode);
2359 rtx op2 = gen_reg_rtx (SImode);
2360
2361 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2362 {
2363 rtx tmp = gen_reg_rtx (SImode);
2364
2365 emit_insn (gen_movsi (tmp, op0));
2366 op0 = tmp;
2367 }
2368
2369 /* Mask out any bits in operand[3] that are not needed. */
2370 emit_insn (gen_andsi3 (op1, operands[3], op0));
2371
2372 if (CONST_INT_P (op0)
2373 && (const_ok_for_arm (mask << start_bit)
2374 || const_ok_for_arm (~(mask << start_bit))))
2375 {
2376 op0 = gen_int_mode (~(mask << start_bit), SImode);
2377 emit_insn (gen_andsi3 (op2, operands[0], op0));
2378 }
2379 else
2380 {
2381 if (CONST_INT_P (op0))
2382 {
2383 rtx tmp = gen_reg_rtx (SImode);
2384
2385 emit_insn (gen_movsi (tmp, op0));
2386 op0 = tmp;
2387 }
2388
2389 if (start_bit != 0)
2390 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2391
2392 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2393 }
2394
2395 if (start_bit != 0)
2396 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2397
2398 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2399 }
2400
2401 if (subtarget != target)
2402 {
2403 /* If TARGET is still a SUBREG, then it must be wider than a word,
2404 so we must be careful only to set the subword we were asked to. */
2405 if (GET_CODE (target) == SUBREG)
2406 emit_move_insn (target, subtarget);
2407 else
2408 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2409 }
2410
2411 DONE;
2412 }"
2413 )
2414
2415 (define_insn "insv_zero"
2416 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2417 (match_operand:SI 1 "const_int_M_operand" "M")
2418 (match_operand:SI 2 "const_int_M_operand" "M"))
2419 (const_int 0))]
2420 "arm_arch_thumb2"
2421 "bfc%?\t%0, %2, %1"
2422 [(set_attr "length" "4")
2423 (set_attr "predicable" "yes")
2424 (set_attr "type" "bfm")]
2425 )
2426
2427 (define_insn "insv_t2"
2428 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2429 (match_operand:SI 1 "const_int_M_operand" "M")
2430 (match_operand:SI 2 "const_int_M_operand" "M"))
2431 (match_operand:SI 3 "s_register_operand" "r"))]
2432 "arm_arch_thumb2"
2433 "bfi%?\t%0, %3, %2, %1"
2434 [(set_attr "length" "4")
2435 (set_attr "predicable" "yes")
2436 (set_attr "type" "bfm")]
2437 )
2438
2439 (define_insn "andsi_notsi_si"
2440 [(set (match_operand:SI 0 "s_register_operand" "=r")
2441 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2442 (match_operand:SI 1 "s_register_operand" "r")))]
2443 "TARGET_32BIT"
2444 "bic%?\\t%0, %1, %2"
2445 [(set_attr "predicable" "yes")
2446 (set_attr "type" "logic_reg")]
2447 )
2448
2449 (define_insn "andsi_not_shiftsi_si"
2450 [(set (match_operand:SI 0 "s_register_operand" "=r")
2451 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2452 [(match_operand:SI 2 "s_register_operand" "r")
2453 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2454 (match_operand:SI 1 "s_register_operand" "r")))]
2455 "TARGET_ARM"
2456 "bic%?\\t%0, %1, %2%S4"
2457 [(set_attr "predicable" "yes")
2458 (set_attr "shift" "2")
2459 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2460 (const_string "logic_shift_imm")
2461 (const_string "logic_shift_reg")))]
2462 )
2463
2464 ;; Shifted bics pattern used to set up CC status register and not reusing
2465 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2466 ;; does not support shift by register.
2467 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2468 [(set (reg:CC_NOOV CC_REGNUM)
2469 (compare:CC_NOOV
2470 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2471 [(match_operand:SI 1 "s_register_operand" "r")
2472 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2473 (match_operand:SI 3 "s_register_operand" "r"))
2474 (const_int 0)))
2475 (clobber (match_scratch:SI 4 "=r"))]
2476 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2477 "bics%?\\t%4, %3, %1%S0"
2478 [(set_attr "predicable" "yes")
2479 (set_attr "conds" "set")
2480 (set_attr "shift" "1")
2481 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2482 (const_string "logic_shift_imm")
2483 (const_string "logic_shift_reg")))]
2484 )
2485
2486 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2487 ;; getting reused later.
2488 (define_insn "andsi_not_shiftsi_si_scc"
2489 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2490 (compare:CC_NOOV
2491 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2492 [(match_operand:SI 1 "s_register_operand" "r")
2493 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2494 (match_operand:SI 3 "s_register_operand" "r"))
2495 (const_int 0)))
2496 (set (match_operand:SI 4 "s_register_operand" "=r")
2497 (and:SI (not:SI (match_op_dup 0
2498 [(match_dup 1)
2499 (match_dup 2)]))
2500 (match_dup 3)))])]
2501 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2502 "bics%?\\t%4, %3, %1%S0"
2503 [(set_attr "predicable" "yes")
2504 (set_attr "conds" "set")
2505 (set_attr "shift" "1")
2506 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2507 (const_string "logic_shift_imm")
2508 (const_string "logic_shift_reg")))]
2509 )
2510
2511 (define_insn "*andsi_notsi_si_compare0"
2512 [(set (reg:CC_NOOV CC_REGNUM)
2513 (compare:CC_NOOV
2514 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2515 (match_operand:SI 1 "s_register_operand" "r"))
2516 (const_int 0)))
2517 (set (match_operand:SI 0 "s_register_operand" "=r")
2518 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2519 "TARGET_32BIT"
2520 "bics\\t%0, %1, %2"
2521 [(set_attr "conds" "set")
2522 (set_attr "type" "logics_shift_reg")]
2523 )
2524
2525 (define_insn "*andsi_notsi_si_compare0_scratch"
2526 [(set (reg:CC_NOOV CC_REGNUM)
2527 (compare:CC_NOOV
2528 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2529 (match_operand:SI 1 "s_register_operand" "r"))
2530 (const_int 0)))
2531 (clobber (match_scratch:SI 0 "=r"))]
2532 "TARGET_32BIT"
2533 "bics\\t%0, %1, %2"
2534 [(set_attr "conds" "set")
2535 (set_attr "type" "logics_shift_reg")]
2536 )
2537
2538 (define_expand "iorsi3"
2539 [(set (match_operand:SI 0 "s_register_operand")
2540 (ior:SI (match_operand:SI 1 "s_register_operand")
2541 (match_operand:SI 2 "reg_or_int_operand")))]
2542 "TARGET_EITHER"
2543 "
2544 if (CONST_INT_P (operands[2]))
2545 {
2546 if (TARGET_32BIT)
2547 {
2548 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2549 operands[2] = force_reg (SImode, operands[2]);
2550 else
2551 {
2552 arm_split_constant (IOR, SImode, NULL_RTX,
2553 INTVAL (operands[2]), operands[0],
2554 operands[1],
2555 optimize && can_create_pseudo_p ());
2556 DONE;
2557 }
2558 }
2559 else /* TARGET_THUMB1 */
2560 {
2561 rtx tmp = force_reg (SImode, operands[2]);
2562 if (rtx_equal_p (operands[0], operands[1]))
2563 operands[2] = tmp;
2564 else
2565 {
2566 operands[2] = operands[1];
2567 operands[1] = tmp;
2568 }
2569 }
2570 }
2571 "
2572 )
2573
2574 (define_insn_and_split "*iorsi3_insn"
2575 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2576 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2577 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2578 "TARGET_32BIT"
2579 "@
2580 orr%?\\t%0, %1, %2
2581 orr%?\\t%0, %1, %2
2582 orn%?\\t%0, %1, #%B2
2583 orr%?\\t%0, %1, %2
2584 #"
2585 "TARGET_32BIT
2586 && CONST_INT_P (operands[2])
2587 && !(const_ok_for_arm (INTVAL (operands[2]))
2588 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2589 [(clobber (const_int 0))]
2590 {
2591 arm_split_constant (IOR, SImode, curr_insn,
2592 INTVAL (operands[2]), operands[0], operands[1], 0);
2593 DONE;
2594 }
2595 [(set_attr "length" "4,4,4,4,16")
2596 (set_attr "arch" "32,t2,t2,32,32")
2597 (set_attr "predicable" "yes")
2598 (set_attr "predicable_short_it" "no,yes,no,no,no")
2599 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2600 )
2601
2602 (define_peephole2
2603 [(match_scratch:SI 3 "r")
2604 (set (match_operand:SI 0 "arm_general_register_operand" "")
2605 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2606 (match_operand:SI 2 "const_int_operand" "")))]
2607 "TARGET_ARM
2608 && !const_ok_for_arm (INTVAL (operands[2]))
2609 && const_ok_for_arm (~INTVAL (operands[2]))"
2610 [(set (match_dup 3) (match_dup 2))
2611 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2612 ""
2613 )
2614
2615 (define_insn "*iorsi3_compare0"
2616 [(set (reg:CC_NOOV CC_REGNUM)
2617 (compare:CC_NOOV
2618 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2619 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2620 (const_int 0)))
2621 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2622 (ior:SI (match_dup 1) (match_dup 2)))]
2623 "TARGET_32BIT"
2624 "orrs%?\\t%0, %1, %2"
2625 [(set_attr "conds" "set")
2626 (set_attr "arch" "*,t2,*")
2627 (set_attr "length" "4,2,4")
2628 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2629 )
2630
2631 (define_insn "*iorsi3_compare0_scratch"
2632 [(set (reg:CC_NOOV CC_REGNUM)
2633 (compare:CC_NOOV
2634 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2635 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2636 (const_int 0)))
2637 (clobber (match_scratch:SI 0 "=r,l,r"))]
2638 "TARGET_32BIT"
2639 "orrs%?\\t%0, %1, %2"
2640 [(set_attr "conds" "set")
2641 (set_attr "arch" "*,t2,*")
2642 (set_attr "length" "4,2,4")
2643 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2644 )
2645
2646 (define_expand "xorsi3"
2647 [(set (match_operand:SI 0 "s_register_operand")
2648 (xor:SI (match_operand:SI 1 "s_register_operand")
2649 (match_operand:SI 2 "reg_or_int_operand")))]
2650 "TARGET_EITHER"
2651 "if (CONST_INT_P (operands[2]))
2652 {
2653 if (TARGET_32BIT)
2654 {
2655 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2656 operands[2] = force_reg (SImode, operands[2]);
2657 else
2658 {
2659 arm_split_constant (XOR, SImode, NULL_RTX,
2660 INTVAL (operands[2]), operands[0],
2661 operands[1],
2662 optimize && can_create_pseudo_p ());
2663 DONE;
2664 }
2665 }
2666 else /* TARGET_THUMB1 */
2667 {
2668 rtx tmp = force_reg (SImode, operands[2]);
2669 if (rtx_equal_p (operands[0], operands[1]))
2670 operands[2] = tmp;
2671 else
2672 {
2673 operands[2] = operands[1];
2674 operands[1] = tmp;
2675 }
2676 }
2677 }"
2678 )
2679
2680 (define_insn_and_split "*arm_xorsi3"
2681 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2682 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2683 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2684 "TARGET_32BIT"
2685 "@
2686 eor%?\\t%0, %1, %2
2687 eor%?\\t%0, %1, %2
2688 eor%?\\t%0, %1, %2
2689 #"
2690 "TARGET_32BIT
2691 && CONST_INT_P (operands[2])
2692 && !const_ok_for_arm (INTVAL (operands[2]))"
2693 [(clobber (const_int 0))]
2694 {
2695 arm_split_constant (XOR, SImode, curr_insn,
2696 INTVAL (operands[2]), operands[0], operands[1], 0);
2697 DONE;
2698 }
2699 [(set_attr "length" "4,4,4,16")
2700 (set_attr "predicable" "yes")
2701 (set_attr "predicable_short_it" "no,yes,no,no")
2702 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2703 )
2704
2705 (define_insn "*xorsi3_compare0"
2706 [(set (reg:CC_NOOV CC_REGNUM)
2707 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2708 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2709 (const_int 0)))
2710 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2711 (xor:SI (match_dup 1) (match_dup 2)))]
2712 "TARGET_32BIT"
2713 "eors%?\\t%0, %1, %2"
2714 [(set_attr "conds" "set")
2715 (set_attr "type" "logics_imm,logics_reg")]
2716 )
2717
2718 (define_insn "*xorsi3_compare0_scratch"
2719 [(set (reg:CC_NOOV CC_REGNUM)
2720 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2721 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2722 (const_int 0)))]
2723 "TARGET_32BIT"
2724 "teq%?\\t%0, %1"
2725 [(set_attr "conds" "set")
2726 (set_attr "type" "logics_imm,logics_reg")]
2727 )
2728
2729 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2730 ; (NOT D) we can sometimes merge the final NOT into one of the following
2731 ; insns.
2732
2733 (define_split
2734 [(set (match_operand:SI 0 "s_register_operand" "")
2735 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2736 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2737 (match_operand:SI 3 "arm_rhs_operand" "")))
2738 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2739 "TARGET_32BIT"
2740 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2741 (not:SI (match_dup 3))))
2742 (set (match_dup 0) (not:SI (match_dup 4)))]
2743 ""
2744 )
2745
2746 (define_insn_and_split "*andsi_iorsi3_notsi"
2747 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2748 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2749 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2750 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2751 "TARGET_32BIT"
2752 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2753 "&& reload_completed"
2754 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2755 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2756 {
2757 /* If operands[3] is a constant make sure to fold the NOT into it
2758 to avoid creating a NOT of a CONST_INT. */
2759 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2760 if (CONST_INT_P (not_rtx))
2761 {
2762 operands[4] = operands[0];
2763 operands[5] = not_rtx;
2764 }
2765 else
2766 {
2767 operands[5] = operands[0];
2768 operands[4] = not_rtx;
2769 }
2770 }
2771 [(set_attr "length" "8")
2772 (set_attr "ce_count" "2")
2773 (set_attr "predicable" "yes")
2774 (set_attr "type" "multiple")]
2775 )
2776
2777 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2778 ; insns are available?
2779 (define_split
2780 [(set (match_operand:SI 0 "s_register_operand" "")
2781 (match_operator:SI 1 "logical_binary_operator"
2782 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2783 (match_operand:SI 3 "const_int_operand" "")
2784 (match_operand:SI 4 "const_int_operand" ""))
2785 (match_operator:SI 9 "logical_binary_operator"
2786 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2787 (match_operand:SI 6 "const_int_operand" ""))
2788 (match_operand:SI 7 "s_register_operand" "")])]))
2789 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2790 "TARGET_32BIT
2791 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2792 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2793 [(set (match_dup 8)
2794 (match_op_dup 1
2795 [(ashift:SI (match_dup 2) (match_dup 4))
2796 (match_dup 5)]))
2797 (set (match_dup 0)
2798 (match_op_dup 1
2799 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2800 (match_dup 7)]))]
2801 "
2802 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2803 ")
2804
2805 (define_split
2806 [(set (match_operand:SI 0 "s_register_operand" "")
2807 (match_operator:SI 1 "logical_binary_operator"
2808 [(match_operator:SI 9 "logical_binary_operator"
2809 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2810 (match_operand:SI 6 "const_int_operand" ""))
2811 (match_operand:SI 7 "s_register_operand" "")])
2812 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2813 (match_operand:SI 3 "const_int_operand" "")
2814 (match_operand:SI 4 "const_int_operand" ""))]))
2815 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2816 "TARGET_32BIT
2817 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2818 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2819 [(set (match_dup 8)
2820 (match_op_dup 1
2821 [(ashift:SI (match_dup 2) (match_dup 4))
2822 (match_dup 5)]))
2823 (set (match_dup 0)
2824 (match_op_dup 1
2825 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2826 (match_dup 7)]))]
2827 "
2828 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2829 ")
2830
2831 (define_split
2832 [(set (match_operand:SI 0 "s_register_operand" "")
2833 (match_operator:SI 1 "logical_binary_operator"
2834 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2835 (match_operand:SI 3 "const_int_operand" "")
2836 (match_operand:SI 4 "const_int_operand" ""))
2837 (match_operator:SI 9 "logical_binary_operator"
2838 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2839 (match_operand:SI 6 "const_int_operand" ""))
2840 (match_operand:SI 7 "s_register_operand" "")])]))
2841 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2842 "TARGET_32BIT
2843 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2844 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2845 [(set (match_dup 8)
2846 (match_op_dup 1
2847 [(ashift:SI (match_dup 2) (match_dup 4))
2848 (match_dup 5)]))
2849 (set (match_dup 0)
2850 (match_op_dup 1
2851 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2852 (match_dup 7)]))]
2853 "
2854 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2855 ")
2856
2857 (define_split
2858 [(set (match_operand:SI 0 "s_register_operand" "")
2859 (match_operator:SI 1 "logical_binary_operator"
2860 [(match_operator:SI 9 "logical_binary_operator"
2861 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2862 (match_operand:SI 6 "const_int_operand" ""))
2863 (match_operand:SI 7 "s_register_operand" "")])
2864 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2865 (match_operand:SI 3 "const_int_operand" "")
2866 (match_operand:SI 4 "const_int_operand" ""))]))
2867 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2868 "TARGET_32BIT
2869 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2870 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2871 [(set (match_dup 8)
2872 (match_op_dup 1
2873 [(ashift:SI (match_dup 2) (match_dup 4))
2874 (match_dup 5)]))
2875 (set (match_dup 0)
2876 (match_op_dup 1
2877 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2878 (match_dup 7)]))]
2879 "
2880 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2881 ")
2882 \f
2883
2884 ;; Minimum and maximum insns
2885
2886 (define_expand "smaxsi3"
2887 [(parallel [
2888 (set (match_operand:SI 0 "s_register_operand")
2889 (smax:SI (match_operand:SI 1 "s_register_operand")
2890 (match_operand:SI 2 "arm_rhs_operand")))
2891 (clobber (reg:CC CC_REGNUM))])]
2892 "TARGET_32BIT"
2893 "
2894 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2895 {
2896 /* No need for a clobber of the condition code register here. */
2897 emit_insn (gen_rtx_SET (operands[0],
2898 gen_rtx_SMAX (SImode, operands[1],
2899 operands[2])));
2900 DONE;
2901 }
2902 ")
2903
2904 (define_insn "*smax_0"
2905 [(set (match_operand:SI 0 "s_register_operand" "=r")
2906 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2907 (const_int 0)))]
2908 "TARGET_32BIT"
2909 "bic%?\\t%0, %1, %1, asr #31"
2910 [(set_attr "predicable" "yes")
2911 (set_attr "type" "logic_shift_reg")]
2912 )
2913
2914 (define_insn "*smax_m1"
2915 [(set (match_operand:SI 0 "s_register_operand" "=r")
2916 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2917 (const_int -1)))]
2918 "TARGET_32BIT"
2919 "orr%?\\t%0, %1, %1, asr #31"
2920 [(set_attr "predicable" "yes")
2921 (set_attr "type" "logic_shift_reg")]
2922 )
2923
2924 (define_insn_and_split "*arm_smax_insn"
2925 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2926 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2927 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2928 (clobber (reg:CC CC_REGNUM))]
2929 "TARGET_ARM"
2930 "#"
2931 ; cmp\\t%1, %2\;movlt\\t%0, %2
2932 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2933 "TARGET_ARM"
2934 [(set (reg:CC CC_REGNUM)
2935 (compare:CC (match_dup 1) (match_dup 2)))
2936 (set (match_dup 0)
2937 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
2938 (match_dup 1)
2939 (match_dup 2)))]
2940 ""
2941 [(set_attr "conds" "clob")
2942 (set_attr "length" "8,12")
2943 (set_attr "type" "multiple")]
2944 )
2945
2946 (define_expand "sminsi3"
2947 [(parallel [
2948 (set (match_operand:SI 0 "s_register_operand")
2949 (smin:SI (match_operand:SI 1 "s_register_operand")
2950 (match_operand:SI 2 "arm_rhs_operand")))
2951 (clobber (reg:CC CC_REGNUM))])]
2952 "TARGET_32BIT"
2953 "
2954 if (operands[2] == const0_rtx)
2955 {
2956 /* No need for a clobber of the condition code register here. */
2957 emit_insn (gen_rtx_SET (operands[0],
2958 gen_rtx_SMIN (SImode, operands[1],
2959 operands[2])));
2960 DONE;
2961 }
2962 ")
2963
2964 (define_insn "*smin_0"
2965 [(set (match_operand:SI 0 "s_register_operand" "=r")
2966 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2967 (const_int 0)))]
2968 "TARGET_32BIT"
2969 "and%?\\t%0, %1, %1, asr #31"
2970 [(set_attr "predicable" "yes")
2971 (set_attr "type" "logic_shift_reg")]
2972 )
2973
2974 (define_insn_and_split "*arm_smin_insn"
2975 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2976 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2977 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2978 (clobber (reg:CC CC_REGNUM))]
2979 "TARGET_ARM"
2980 "#"
2981 ; cmp\\t%1, %2\;movge\\t%0, %2
2982 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2983 "TARGET_ARM"
2984 [(set (reg:CC CC_REGNUM)
2985 (compare:CC (match_dup 1) (match_dup 2)))
2986 (set (match_dup 0)
2987 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
2988 (match_dup 1)
2989 (match_dup 2)))]
2990 ""
2991 [(set_attr "conds" "clob")
2992 (set_attr "length" "8,12")
2993 (set_attr "type" "multiple,multiple")]
2994 )
2995
2996 (define_expand "umaxsi3"
2997 [(parallel [
2998 (set (match_operand:SI 0 "s_register_operand")
2999 (umax:SI (match_operand:SI 1 "s_register_operand")
3000 (match_operand:SI 2 "arm_rhs_operand")))
3001 (clobber (reg:CC CC_REGNUM))])]
3002 "TARGET_32BIT"
3003 ""
3004 )
3005
3006 (define_insn_and_split "*arm_umaxsi3"
3007 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3008 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3009 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3010 (clobber (reg:CC CC_REGNUM))]
3011 "TARGET_ARM"
3012 "#"
3013 ; cmp\\t%1, %2\;movcc\\t%0, %2
3014 ; cmp\\t%1, %2\;movcs\\t%0, %1
3015 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3016 "TARGET_ARM"
3017 [(set (reg:CC CC_REGNUM)
3018 (compare:CC (match_dup 1) (match_dup 2)))
3019 (set (match_dup 0)
3020 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3021 (match_dup 1)
3022 (match_dup 2)))]
3023 ""
3024 [(set_attr "conds" "clob")
3025 (set_attr "length" "8,8,12")
3026 (set_attr "type" "store_4")]
3027 )
3028
3029 (define_expand "uminsi3"
3030 [(parallel [
3031 (set (match_operand:SI 0 "s_register_operand")
3032 (umin:SI (match_operand:SI 1 "s_register_operand")
3033 (match_operand:SI 2 "arm_rhs_operand")))
3034 (clobber (reg:CC CC_REGNUM))])]
3035 "TARGET_32BIT"
3036 ""
3037 )
3038
3039 (define_insn_and_split "*arm_uminsi3"
3040 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3041 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3042 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3043 (clobber (reg:CC CC_REGNUM))]
3044 "TARGET_ARM"
3045 "#"
3046 ; cmp\\t%1, %2\;movcs\\t%0, %2
3047 ; cmp\\t%1, %2\;movcc\\t%0, %1
3048 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3049 "TARGET_ARM"
3050 [(set (reg:CC CC_REGNUM)
3051 (compare:CC (match_dup 1) (match_dup 2)))
3052 (set (match_dup 0)
3053 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3054 (match_dup 1)
3055 (match_dup 2)))]
3056 ""
3057 [(set_attr "conds" "clob")
3058 (set_attr "length" "8,8,12")
3059 (set_attr "type" "store_4")]
3060 )
3061
3062 (define_insn "*store_minmaxsi"
3063 [(set (match_operand:SI 0 "memory_operand" "=m")
3064 (match_operator:SI 3 "minmax_operator"
3065 [(match_operand:SI 1 "s_register_operand" "r")
3066 (match_operand:SI 2 "s_register_operand" "r")]))
3067 (clobber (reg:CC CC_REGNUM))]
3068 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3069 "*
3070 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3071 operands[1], operands[2]);
3072 output_asm_insn (\"cmp\\t%1, %2\", operands);
3073 if (TARGET_THUMB2)
3074 output_asm_insn (\"ite\t%d3\", operands);
3075 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3076 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3077 return \"\";
3078 "
3079 [(set_attr "conds" "clob")
3080 (set (attr "length")
3081 (if_then_else (eq_attr "is_thumb" "yes")
3082 (const_int 14)
3083 (const_int 12)))
3084 (set_attr "type" "store_4")]
3085 )
3086
3087 ; Reject the frame pointer in operand[1], since reloading this after
3088 ; it has been eliminated can cause carnage.
3089 (define_insn "*minmax_arithsi"
3090 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3091 (match_operator:SI 4 "shiftable_operator"
3092 [(match_operator:SI 5 "minmax_operator"
3093 [(match_operand:SI 2 "s_register_operand" "r,r")
3094 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3095 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3096 (clobber (reg:CC CC_REGNUM))]
3097 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3098 "*
3099 {
3100 enum rtx_code code = GET_CODE (operands[4]);
3101 bool need_else;
3102
3103 if (which_alternative != 0 || operands[3] != const0_rtx
3104 || (code != PLUS && code != IOR && code != XOR))
3105 need_else = true;
3106 else
3107 need_else = false;
3108
3109 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3110 operands[2], operands[3]);
3111 output_asm_insn (\"cmp\\t%2, %3\", operands);
3112 if (TARGET_THUMB2)
3113 {
3114 if (need_else)
3115 output_asm_insn (\"ite\\t%d5\", operands);
3116 else
3117 output_asm_insn (\"it\\t%d5\", operands);
3118 }
3119 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3120 if (need_else)
3121 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3122 return \"\";
3123 }"
3124 [(set_attr "conds" "clob")
3125 (set (attr "length")
3126 (if_then_else (eq_attr "is_thumb" "yes")
3127 (const_int 14)
3128 (const_int 12)))
3129 (set_attr "type" "multiple")]
3130 )
3131
3132 ; Reject the frame pointer in operand[1], since reloading this after
3133 ; it has been eliminated can cause carnage.
3134 (define_insn_and_split "*minmax_arithsi_non_canon"
3135 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3136 (minus:SI
3137 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3138 (match_operator:SI 4 "minmax_operator"
3139 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3140 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3141 (clobber (reg:CC CC_REGNUM))]
3142 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3143 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3144 "#"
3145 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3146 [(set (reg:CC CC_REGNUM)
3147 (compare:CC (match_dup 2) (match_dup 3)))
3148
3149 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3150 (set (match_dup 0)
3151 (minus:SI (match_dup 1)
3152 (match_dup 2))))
3153 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3154 (set (match_dup 0)
3155 (match_dup 6)))]
3156 {
3157 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3158 operands[2], operands[3]);
3159 enum rtx_code rc = minmax_code (operands[4]);
3160 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3161 operands[2], operands[3]);
3162
3163 if (mode == CCFPmode || mode == CCFPEmode)
3164 rc = reverse_condition_maybe_unordered (rc);
3165 else
3166 rc = reverse_condition (rc);
3167 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3168 if (CONST_INT_P (operands[3]))
3169 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3170 else
3171 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3172 }
3173 [(set_attr "conds" "clob")
3174 (set (attr "length")
3175 (if_then_else (eq_attr "is_thumb" "yes")
3176 (const_int 14)
3177 (const_int 12)))
3178 (set_attr "type" "multiple")]
3179 )
3180
3181 (define_code_iterator SAT [smin smax])
3182 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3183 (define_code_attr SATlo [(smin "1") (smax "2")])
3184 (define_code_attr SAThi [(smin "2") (smax "1")])
3185
3186 (define_insn "*satsi_<SAT:code>"
3187 [(set (match_operand:SI 0 "s_register_operand" "=r")
3188 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3189 (match_operand:SI 1 "const_int_operand" "i"))
3190 (match_operand:SI 2 "const_int_operand" "i")))]
3191 "TARGET_32BIT && arm_arch6
3192 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3193 {
3194 int mask;
3195 bool signed_sat;
3196 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3197 &mask, &signed_sat))
3198 gcc_unreachable ();
3199
3200 operands[1] = GEN_INT (mask);
3201 if (signed_sat)
3202 return "ssat%?\t%0, %1, %3";
3203 else
3204 return "usat%?\t%0, %1, %3";
3205 }
3206 [(set_attr "predicable" "yes")
3207 (set_attr "type" "alus_imm")]
3208 )
3209
3210 (define_insn "*satsi_<SAT:code>_shift"
3211 [(set (match_operand:SI 0 "s_register_operand" "=r")
3212 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3213 [(match_operand:SI 4 "s_register_operand" "r")
3214 (match_operand:SI 5 "const_int_operand" "i")])
3215 (match_operand:SI 1 "const_int_operand" "i"))
3216 (match_operand:SI 2 "const_int_operand" "i")))]
3217 "TARGET_32BIT && arm_arch6
3218 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3219 {
3220 int mask;
3221 bool signed_sat;
3222 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3223 &mask, &signed_sat))
3224 gcc_unreachable ();
3225
3226 operands[1] = GEN_INT (mask);
3227 if (signed_sat)
3228 return "ssat%?\t%0, %1, %4%S3";
3229 else
3230 return "usat%?\t%0, %1, %4%S3";
3231 }
3232 [(set_attr "predicable" "yes")
3233 (set_attr "shift" "3")
3234 (set_attr "type" "logic_shift_reg")])
3235 \f
3236 ;; Shift and rotation insns
3237
3238 (define_expand "ashldi3"
3239 [(set (match_operand:DI 0 "s_register_operand")
3240 (ashift:DI (match_operand:DI 1 "s_register_operand")
3241 (match_operand:SI 2 "reg_or_int_operand")))]
3242 "TARGET_32BIT"
3243 "
3244 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3245 operands[2], gen_reg_rtx (SImode),
3246 gen_reg_rtx (SImode));
3247 DONE;
3248 ")
3249
3250 (define_expand "ashlsi3"
3251 [(set (match_operand:SI 0 "s_register_operand")
3252 (ashift:SI (match_operand:SI 1 "s_register_operand")
3253 (match_operand:SI 2 "arm_rhs_operand")))]
3254 "TARGET_EITHER"
3255 "
3256 if (CONST_INT_P (operands[2])
3257 && (UINTVAL (operands[2])) > 31)
3258 {
3259 emit_insn (gen_movsi (operands[0], const0_rtx));
3260 DONE;
3261 }
3262 "
3263 )
3264
3265 (define_expand "ashrdi3"
3266 [(set (match_operand:DI 0 "s_register_operand")
3267 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3268 (match_operand:SI 2 "reg_or_int_operand")))]
3269 "TARGET_32BIT"
3270 "
3271 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3272 operands[2], gen_reg_rtx (SImode),
3273 gen_reg_rtx (SImode));
3274 DONE;
3275 ")
3276
3277 (define_expand "ashrsi3"
3278 [(set (match_operand:SI 0 "s_register_operand")
3279 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3280 (match_operand:SI 2 "arm_rhs_operand")))]
3281 "TARGET_EITHER"
3282 "
3283 if (CONST_INT_P (operands[2])
3284 && UINTVAL (operands[2]) > 31)
3285 operands[2] = GEN_INT (31);
3286 "
3287 )
3288
3289 (define_expand "lshrdi3"
3290 [(set (match_operand:DI 0 "s_register_operand")
3291 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3292 (match_operand:SI 2 "reg_or_int_operand")))]
3293 "TARGET_32BIT"
3294 "
3295 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3296 operands[2], gen_reg_rtx (SImode),
3297 gen_reg_rtx (SImode));
3298 DONE;
3299 ")
3300
3301 (define_expand "lshrsi3"
3302 [(set (match_operand:SI 0 "s_register_operand")
3303 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3304 (match_operand:SI 2 "arm_rhs_operand")))]
3305 "TARGET_EITHER"
3306 "
3307 if (CONST_INT_P (operands[2])
3308 && (UINTVAL (operands[2])) > 31)
3309 {
3310 emit_insn (gen_movsi (operands[0], const0_rtx));
3311 DONE;
3312 }
3313 "
3314 )
3315
3316 (define_expand "rotlsi3"
3317 [(set (match_operand:SI 0 "s_register_operand")
3318 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3319 (match_operand:SI 2 "reg_or_int_operand")))]
3320 "TARGET_32BIT"
3321 "
3322 if (CONST_INT_P (operands[2]))
3323 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3324 else
3325 {
3326 rtx reg = gen_reg_rtx (SImode);
3327 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3328 operands[2] = reg;
3329 }
3330 "
3331 )
3332
3333 (define_expand "rotrsi3"
3334 [(set (match_operand:SI 0 "s_register_operand")
3335 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3336 (match_operand:SI 2 "arm_rhs_operand")))]
3337 "TARGET_EITHER"
3338 "
3339 if (TARGET_32BIT)
3340 {
3341 if (CONST_INT_P (operands[2])
3342 && UINTVAL (operands[2]) > 31)
3343 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3344 }
3345 else /* TARGET_THUMB1 */
3346 {
3347 if (CONST_INT_P (operands [2]))
3348 operands [2] = force_reg (SImode, operands[2]);
3349 }
3350 "
3351 )
3352
3353 (define_insn "*arm_shiftsi3"
3354 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3355 (match_operator:SI 3 "shift_operator"
3356 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3357 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3358 "TARGET_32BIT"
3359 "* return arm_output_shift(operands, 0);"
3360 [(set_attr "predicable" "yes")
3361 (set_attr "arch" "t2,t2,*,*")
3362 (set_attr "predicable_short_it" "yes,yes,no,no")
3363 (set_attr "length" "4")
3364 (set_attr "shift" "1")
3365 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3366 )
3367
3368 (define_insn "*shiftsi3_compare0"
3369 [(set (reg:CC_NOOV CC_REGNUM)
3370 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3371 [(match_operand:SI 1 "s_register_operand" "r,r")
3372 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3373 (const_int 0)))
3374 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3375 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3376 "TARGET_32BIT"
3377 "* return arm_output_shift(operands, 1);"
3378 [(set_attr "conds" "set")
3379 (set_attr "shift" "1")
3380 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3381 )
3382
3383 (define_insn "*shiftsi3_compare0_scratch"
3384 [(set (reg:CC_NOOV CC_REGNUM)
3385 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3386 [(match_operand:SI 1 "s_register_operand" "r,r")
3387 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3388 (const_int 0)))
3389 (clobber (match_scratch:SI 0 "=r,r"))]
3390 "TARGET_32BIT"
3391 "* return arm_output_shift(operands, 1);"
3392 [(set_attr "conds" "set")
3393 (set_attr "shift" "1")
3394 (set_attr "type" "shift_imm,shift_reg")]
3395 )
3396
3397 (define_insn "*not_shiftsi"
3398 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3399 (not:SI (match_operator:SI 3 "shift_operator"
3400 [(match_operand:SI 1 "s_register_operand" "r,r")
3401 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3402 "TARGET_32BIT"
3403 "mvn%?\\t%0, %1%S3"
3404 [(set_attr "predicable" "yes")
3405 (set_attr "shift" "1")
3406 (set_attr "arch" "32,a")
3407 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3408
3409 (define_insn "*not_shiftsi_compare0"
3410 [(set (reg:CC_NOOV CC_REGNUM)
3411 (compare:CC_NOOV
3412 (not:SI (match_operator:SI 3 "shift_operator"
3413 [(match_operand:SI 1 "s_register_operand" "r,r")
3414 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3415 (const_int 0)))
3416 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3417 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3418 "TARGET_32BIT"
3419 "mvns%?\\t%0, %1%S3"
3420 [(set_attr "conds" "set")
3421 (set_attr "shift" "1")
3422 (set_attr "arch" "32,a")
3423 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3424
3425 (define_insn "*not_shiftsi_compare0_scratch"
3426 [(set (reg:CC_NOOV CC_REGNUM)
3427 (compare:CC_NOOV
3428 (not:SI (match_operator:SI 3 "shift_operator"
3429 [(match_operand:SI 1 "s_register_operand" "r,r")
3430 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3431 (const_int 0)))
3432 (clobber (match_scratch:SI 0 "=r,r"))]
3433 "TARGET_32BIT"
3434 "mvns%?\\t%0, %1%S3"
3435 [(set_attr "conds" "set")
3436 (set_attr "shift" "1")
3437 (set_attr "arch" "32,a")
3438 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3439
3440 ;; We don't really have extzv, but defining this using shifts helps
3441 ;; to reduce register pressure later on.
3442
3443 (define_expand "extzv"
3444 [(set (match_operand 0 "s_register_operand")
3445 (zero_extract (match_operand 1 "nonimmediate_operand")
3446 (match_operand 2 "const_int_operand")
3447 (match_operand 3 "const_int_operand")))]
3448 "TARGET_THUMB1 || arm_arch_thumb2"
3449 "
3450 {
3451 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3452 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3453
3454 if (arm_arch_thumb2)
3455 {
3456 HOST_WIDE_INT width = INTVAL (operands[2]);
3457 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3458
3459 if (unaligned_access && MEM_P (operands[1])
3460 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3461 {
3462 rtx base_addr;
3463
3464 if (BYTES_BIG_ENDIAN)
3465 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3466 - bitpos;
3467
3468 if (width == 32)
3469 {
3470 base_addr = adjust_address (operands[1], SImode,
3471 bitpos / BITS_PER_UNIT);
3472 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3473 }
3474 else
3475 {
3476 rtx dest = operands[0];
3477 rtx tmp = gen_reg_rtx (SImode);
3478
3479 /* We may get a paradoxical subreg here. Strip it off. */
3480 if (GET_CODE (dest) == SUBREG
3481 && GET_MODE (dest) == SImode
3482 && GET_MODE (SUBREG_REG (dest)) == HImode)
3483 dest = SUBREG_REG (dest);
3484
3485 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3486 FAIL;
3487
3488 base_addr = adjust_address (operands[1], HImode,
3489 bitpos / BITS_PER_UNIT);
3490 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3491 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3492 }
3493 DONE;
3494 }
3495 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3496 {
3497 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3498 operands[3]));
3499 DONE;
3500 }
3501 else
3502 FAIL;
3503 }
3504
3505 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3506 FAIL;
3507
3508 operands[3] = GEN_INT (rshift);
3509
3510 if (lshift == 0)
3511 {
3512 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3513 DONE;
3514 }
3515
3516 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3517 operands[3], gen_reg_rtx (SImode)));
3518 DONE;
3519 }"
3520 )
3521
3522 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3523
3524 (define_expand "extzv_t1"
3525 [(set (match_operand:SI 4 "s_register_operand")
3526 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3527 (match_operand:SI 2 "const_int_operand")))
3528 (set (match_operand:SI 0 "s_register_operand")
3529 (lshiftrt:SI (match_dup 4)
3530 (match_operand:SI 3 "const_int_operand")))]
3531 "TARGET_THUMB1"
3532 "")
3533
3534 (define_expand "extv"
3535 [(set (match_operand 0 "s_register_operand")
3536 (sign_extract (match_operand 1 "nonimmediate_operand")
3537 (match_operand 2 "const_int_operand")
3538 (match_operand 3 "const_int_operand")))]
3539 "arm_arch_thumb2"
3540 {
3541 HOST_WIDE_INT width = INTVAL (operands[2]);
3542 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3543
3544 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3545 && (bitpos % BITS_PER_UNIT) == 0)
3546 {
3547 rtx base_addr;
3548
3549 if (BYTES_BIG_ENDIAN)
3550 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3551
3552 if (width == 32)
3553 {
3554 base_addr = adjust_address (operands[1], SImode,
3555 bitpos / BITS_PER_UNIT);
3556 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3557 }
3558 else
3559 {
3560 rtx dest = operands[0];
3561 rtx tmp = gen_reg_rtx (SImode);
3562
3563 /* We may get a paradoxical subreg here. Strip it off. */
3564 if (GET_CODE (dest) == SUBREG
3565 && GET_MODE (dest) == SImode
3566 && GET_MODE (SUBREG_REG (dest)) == HImode)
3567 dest = SUBREG_REG (dest);
3568
3569 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3570 FAIL;
3571
3572 base_addr = adjust_address (operands[1], HImode,
3573 bitpos / BITS_PER_UNIT);
3574 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3575 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3576 }
3577
3578 DONE;
3579 }
3580 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3581 FAIL;
3582 else if (GET_MODE (operands[0]) == SImode
3583 && GET_MODE (operands[1]) == SImode)
3584 {
3585 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3586 operands[3]));
3587 DONE;
3588 }
3589
3590 FAIL;
3591 })
3592
3593 ; Helper to expand register forms of extv with the proper modes.
3594
3595 (define_expand "extv_regsi"
3596 [(set (match_operand:SI 0 "s_register_operand")
3597 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3598 (match_operand 2 "const_int_operand")
3599 (match_operand 3 "const_int_operand")))]
3600 ""
3601 {
3602 })
3603
3604 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3605
3606 (define_insn "unaligned_loaddi"
3607 [(set (match_operand:DI 0 "s_register_operand" "=r")
3608 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3609 UNSPEC_UNALIGNED_LOAD))]
3610 "TARGET_32BIT && TARGET_LDRD"
3611 "*
3612 return output_move_double (operands, true, NULL);
3613 "
3614 [(set_attr "length" "8")
3615 (set_attr "type" "load_8")])
3616
3617 (define_insn "unaligned_loadsi"
3618 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3619 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3620 UNSPEC_UNALIGNED_LOAD))]
3621 "unaligned_access"
3622 "@
3623 ldr\t%0, %1\t@ unaligned
3624 ldr%?\t%0, %1\t@ unaligned
3625 ldr%?\t%0, %1\t@ unaligned"
3626 [(set_attr "arch" "t1,t2,32")
3627 (set_attr "length" "2,2,4")
3628 (set_attr "predicable" "no,yes,yes")
3629 (set_attr "predicable_short_it" "no,yes,no")
3630 (set_attr "type" "load_4")])
3631
3632 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3633 ;; address (there's no immediate format). That's tricky to support
3634 ;; here and we don't really need this pattern for that case, so only
3635 ;; enable for 32-bit ISAs.
3636 (define_insn "unaligned_loadhis"
3637 [(set (match_operand:SI 0 "s_register_operand" "=r")
3638 (sign_extend:SI
3639 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3640 UNSPEC_UNALIGNED_LOAD)))]
3641 "unaligned_access && TARGET_32BIT"
3642 "ldrsh%?\t%0, %1\t@ unaligned"
3643 [(set_attr "predicable" "yes")
3644 (set_attr "type" "load_byte")])
3645
3646 (define_insn "unaligned_loadhiu"
3647 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3648 (zero_extend:SI
3649 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3650 UNSPEC_UNALIGNED_LOAD)))]
3651 "unaligned_access"
3652 "@
3653 ldrh\t%0, %1\t@ unaligned
3654 ldrh%?\t%0, %1\t@ unaligned
3655 ldrh%?\t%0, %1\t@ unaligned"
3656 [(set_attr "arch" "t1,t2,32")
3657 (set_attr "length" "2,2,4")
3658 (set_attr "predicable" "no,yes,yes")
3659 (set_attr "predicable_short_it" "no,yes,no")
3660 (set_attr "type" "load_byte")])
3661
3662 (define_insn "unaligned_storedi"
3663 [(set (match_operand:DI 0 "memory_operand" "=m")
3664 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3665 UNSPEC_UNALIGNED_STORE))]
3666 "TARGET_32BIT && TARGET_LDRD"
3667 "*
3668 return output_move_double (operands, true, NULL);
3669 "
3670 [(set_attr "length" "8")
3671 (set_attr "type" "store_8")])
3672
3673 (define_insn "unaligned_storesi"
3674 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3675 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3676 UNSPEC_UNALIGNED_STORE))]
3677 "unaligned_access"
3678 "@
3679 str\t%1, %0\t@ unaligned
3680 str%?\t%1, %0\t@ unaligned
3681 str%?\t%1, %0\t@ unaligned"
3682 [(set_attr "arch" "t1,t2,32")
3683 (set_attr "length" "2,2,4")
3684 (set_attr "predicable" "no,yes,yes")
3685 (set_attr "predicable_short_it" "no,yes,no")
3686 (set_attr "type" "store_4")])
3687
3688 (define_insn "unaligned_storehi"
3689 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3690 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3691 UNSPEC_UNALIGNED_STORE))]
3692 "unaligned_access"
3693 "@
3694 strh\t%1, %0\t@ unaligned
3695 strh%?\t%1, %0\t@ unaligned
3696 strh%?\t%1, %0\t@ unaligned"
3697 [(set_attr "arch" "t1,t2,32")
3698 (set_attr "length" "2,2,4")
3699 (set_attr "predicable" "no,yes,yes")
3700 (set_attr "predicable_short_it" "no,yes,no")
3701 (set_attr "type" "store_4")])
3702
3703
3704 (define_insn "*extv_reg"
3705 [(set (match_operand:SI 0 "s_register_operand" "=r")
3706 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3707 (match_operand:SI 2 "const_int_operand" "n")
3708 (match_operand:SI 3 "const_int_operand" "n")))]
3709 "arm_arch_thumb2
3710 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3711 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3712 "sbfx%?\t%0, %1, %3, %2"
3713 [(set_attr "length" "4")
3714 (set_attr "predicable" "yes")
3715 (set_attr "type" "bfm")]
3716 )
3717
3718 (define_insn "extzv_t2"
3719 [(set (match_operand:SI 0 "s_register_operand" "=r")
3720 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3721 (match_operand:SI 2 "const_int_operand" "n")
3722 (match_operand:SI 3 "const_int_operand" "n")))]
3723 "arm_arch_thumb2
3724 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3725 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3726 "ubfx%?\t%0, %1, %3, %2"
3727 [(set_attr "length" "4")
3728 (set_attr "predicable" "yes")
3729 (set_attr "type" "bfm")]
3730 )
3731
3732
3733 ;; Division instructions
3734 (define_insn "divsi3"
3735 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3736 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3737 (match_operand:SI 2 "s_register_operand" "r,r")))]
3738 "TARGET_IDIV"
3739 "@
3740 sdiv%?\t%0, %1, %2
3741 sdiv\t%0, %1, %2"
3742 [(set_attr "arch" "32,v8mb")
3743 (set_attr "predicable" "yes")
3744 (set_attr "type" "sdiv")]
3745 )
3746
3747 (define_insn "udivsi3"
3748 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3749 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3750 (match_operand:SI 2 "s_register_operand" "r,r")))]
3751 "TARGET_IDIV"
3752 "@
3753 udiv%?\t%0, %1, %2
3754 udiv\t%0, %1, %2"
3755 [(set_attr "arch" "32,v8mb")
3756 (set_attr "predicable" "yes")
3757 (set_attr "type" "udiv")]
3758 )
3759
3760 \f
3761 ;; Unary arithmetic insns
3762
3763 (define_expand "negvsi3"
3764 [(match_operand:SI 0 "register_operand")
3765 (match_operand:SI 1 "register_operand")
3766 (match_operand 2 "")]
3767 "TARGET_32BIT"
3768 {
3769 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3770 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3771
3772 DONE;
3773 })
3774
3775 (define_expand "negvdi3"
3776 [(match_operand:DI 0 "s_register_operand")
3777 (match_operand:DI 1 "s_register_operand")
3778 (match_operand 2 "")]
3779 "TARGET_ARM"
3780 {
3781 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3782 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3783
3784 DONE;
3785 })
3786
3787
3788 (define_insn "negdi2_compare"
3789 [(set (reg:CC CC_REGNUM)
3790 (compare:CC
3791 (const_int 0)
3792 (match_operand:DI 1 "register_operand" "r,r")))
3793 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3794 (minus:DI (const_int 0) (match_dup 1)))]
3795 "TARGET_ARM"
3796 "@
3797 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3798 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3799 [(set_attr "conds" "set")
3800 (set_attr "arch" "a,t2")
3801 (set_attr "length" "8")
3802 (set_attr "type" "multiple")]
3803 )
3804
3805 (define_expand "negsi2"
3806 [(set (match_operand:SI 0 "s_register_operand")
3807 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3808 "TARGET_EITHER"
3809 ""
3810 )
3811
3812 (define_insn "*arm_negsi2"
3813 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3814 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3815 "TARGET_32BIT"
3816 "rsb%?\\t%0, %1, #0"
3817 [(set_attr "predicable" "yes")
3818 (set_attr "predicable_short_it" "yes,no")
3819 (set_attr "arch" "t2,*")
3820 (set_attr "length" "4")
3821 (set_attr "type" "alu_imm")]
3822 )
3823
3824 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3825 ;; rather than (0 cmp reg). This gives the same results for unsigned
3826 ;; and equality compares which is what we mostly need here.
3827 (define_insn "negsi2_0compare"
3828 [(set (reg:CC_RSB CC_REGNUM)
3829 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3830 (const_int -1)))
3831 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3832 (neg:SI (match_dup 1)))]
3833 "TARGET_32BIT"
3834 "@
3835 negs\\t%0, %1
3836 rsbs\\t%0, %1, #0"
3837 [(set_attr "conds" "set")
3838 (set_attr "arch" "t2,*")
3839 (set_attr "length" "2,*")
3840 (set_attr "type" "alus_imm")]
3841 )
3842
3843 (define_insn "negsi2_carryin"
3844 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3845 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3846 (match_operand:SI 2 "arm_borrow_operation" "")))]
3847 "TARGET_32BIT"
3848 "@
3849 rsc\\t%0, %1, #0
3850 sbc\\t%0, %1, %1, lsl #1"
3851 [(set_attr "conds" "use")
3852 (set_attr "arch" "a,t2")
3853 (set_attr "type" "adc_imm,adc_reg")]
3854 )
3855
3856 (define_expand "negsf2"
3857 [(set (match_operand:SF 0 "s_register_operand")
3858 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3859 "TARGET_32BIT && TARGET_HARD_FLOAT"
3860 ""
3861 )
3862
3863 (define_expand "negdf2"
3864 [(set (match_operand:DF 0 "s_register_operand")
3865 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3866 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3867 "")
3868
3869 (define_insn_and_split "*zextendsidi_negsi"
3870 [(set (match_operand:DI 0 "s_register_operand" "=r")
3871 (zero_extend:DI (neg:SI (match_operand:SI 1 "s_register_operand" "r"))))]
3872 "TARGET_32BIT"
3873 "#"
3874 ""
3875 [(set (match_dup 2)
3876 (neg:SI (match_dup 1)))
3877 (set (match_dup 3)
3878 (const_int 0))]
3879 {
3880 operands[2] = gen_lowpart (SImode, operands[0]);
3881 operands[3] = gen_highpart (SImode, operands[0]);
3882 }
3883 [(set_attr "length" "8")
3884 (set_attr "type" "multiple")]
3885 )
3886
3887 ;; Negate an extended 32-bit value.
3888 (define_insn_and_split "*negdi_extendsidi"
3889 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
3890 (neg:DI (sign_extend:DI
3891 (match_operand:SI 1 "s_register_operand" "l,r"))))
3892 (clobber (reg:CC CC_REGNUM))]
3893 "TARGET_32BIT"
3894 "#"
3895 "&& reload_completed"
3896 [(const_int 0)]
3897 {
3898 rtx low = gen_lowpart (SImode, operands[0]);
3899 rtx high = gen_highpart (SImode, operands[0]);
3900
3901 if (reg_overlap_mentioned_p (low, operands[1]))
3902 {
3903 /* Input overlaps the low word of the output. Use:
3904 asr Rhi, Rin, #31
3905 rsbs Rlo, Rin, #0
3906 rsc Rhi, Rhi, #0 (thumb2: sbc Rhi, Rhi, Rhi, lsl #1). */
3907 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
3908
3909 emit_insn (gen_rtx_SET (high,
3910 gen_rtx_ASHIFTRT (SImode, operands[1],
3911 GEN_INT (31))));
3912
3913 emit_insn (gen_subsi3_compare (low, const0_rtx, operands[1]));
3914 if (TARGET_ARM)
3915 emit_insn (gen_rtx_SET (high,
3916 gen_rtx_MINUS (SImode,
3917 gen_rtx_MINUS (SImode,
3918 const0_rtx,
3919 high),
3920 gen_rtx_LTU (SImode,
3921 cc_reg,
3922 const0_rtx))));
3923 else
3924 {
3925 rtx two_x = gen_rtx_ASHIFT (SImode, high, GEN_INT (1));
3926 emit_insn (gen_rtx_SET (high,
3927 gen_rtx_MINUS (SImode,
3928 gen_rtx_MINUS (SImode,
3929 high,
3930 two_x),
3931 gen_rtx_LTU (SImode,
3932 cc_reg,
3933 const0_rtx))));
3934 }
3935 }
3936 else
3937 {
3938 /* No overlap, or overlap on high word. Use:
3939 rsb Rlo, Rin, #0
3940 bic Rhi, Rlo, Rin
3941 asr Rhi, Rhi, #31
3942 Flags not needed for this sequence. */
3943 emit_insn (gen_rtx_SET (low, gen_rtx_NEG (SImode, operands[1])));
3944 emit_insn (gen_rtx_SET (high,
3945 gen_rtx_AND (SImode,
3946 gen_rtx_NOT (SImode, operands[1]),
3947 low)));
3948 emit_insn (gen_rtx_SET (high,
3949 gen_rtx_ASHIFTRT (SImode, high,
3950 GEN_INT (31))));
3951 }
3952 DONE;
3953 }
3954 [(set_attr "length" "12")
3955 (set_attr "arch" "t2,*")
3956 (set_attr "type" "multiple")]
3957 )
3958
3959 ;; abssi2 doesn't really clobber the condition codes if a different register
3960 ;; is being set. To keep things simple, assume during rtl manipulations that
3961 ;; it does, but tell the final scan operator the truth. Similarly for
3962 ;; (neg (abs...))
3963
3964 (define_expand "abssi2"
3965 [(parallel
3966 [(set (match_operand:SI 0 "s_register_operand")
3967 (abs:SI (match_operand:SI 1 "s_register_operand")))
3968 (clobber (match_dup 2))])]
3969 "TARGET_EITHER"
3970 "
3971 if (TARGET_THUMB1)
3972 operands[2] = gen_rtx_SCRATCH (SImode);
3973 else
3974 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3975 ")
3976
3977 (define_insn_and_split "*arm_abssi2"
3978 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3979 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3980 (clobber (reg:CC CC_REGNUM))]
3981 "TARGET_ARM"
3982 "#"
3983 "&& reload_completed"
3984 [(const_int 0)]
3985 {
3986 /* if (which_alternative == 0) */
3987 if (REGNO(operands[0]) == REGNO(operands[1]))
3988 {
3989 /* Emit the pattern:
3990 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3991 [(set (reg:CC CC_REGNUM)
3992 (compare:CC (match_dup 0) (const_int 0)))
3993 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
3994 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
3995 */
3996 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3997 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3998 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3999 (gen_rtx_LT (SImode,
4000 gen_rtx_REG (CCmode, CC_REGNUM),
4001 const0_rtx)),
4002 (gen_rtx_SET (operands[0],
4003 (gen_rtx_MINUS (SImode,
4004 const0_rtx,
4005 operands[1]))))));
4006 DONE;
4007 }
4008 else
4009 {
4010 /* Emit the pattern:
4011 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4012 [(set (match_dup 0)
4013 (xor:SI (match_dup 1)
4014 (ashiftrt:SI (match_dup 1) (const_int 31))))
4015 (set (match_dup 0)
4016 (minus:SI (match_dup 0)
4017 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4018 */
4019 emit_insn (gen_rtx_SET (operands[0],
4020 gen_rtx_XOR (SImode,
4021 gen_rtx_ASHIFTRT (SImode,
4022 operands[1],
4023 GEN_INT (31)),
4024 operands[1])));
4025 emit_insn (gen_rtx_SET (operands[0],
4026 gen_rtx_MINUS (SImode,
4027 operands[0],
4028 gen_rtx_ASHIFTRT (SImode,
4029 operands[1],
4030 GEN_INT (31)))));
4031 DONE;
4032 }
4033 }
4034 [(set_attr "conds" "clob,*")
4035 (set_attr "shift" "1")
4036 (set_attr "predicable" "no, yes")
4037 (set_attr "length" "8")
4038 (set_attr "type" "multiple")]
4039 )
4040
4041 (define_insn_and_split "*arm_neg_abssi2"
4042 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4043 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4044 (clobber (reg:CC CC_REGNUM))]
4045 "TARGET_ARM"
4046 "#"
4047 "&& reload_completed"
4048 [(const_int 0)]
4049 {
4050 /* if (which_alternative == 0) */
4051 if (REGNO (operands[0]) == REGNO (operands[1]))
4052 {
4053 /* Emit the pattern:
4054 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4055 */
4056 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4057 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4058 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4059 gen_rtx_GT (SImode,
4060 gen_rtx_REG (CCmode, CC_REGNUM),
4061 const0_rtx),
4062 gen_rtx_SET (operands[0],
4063 (gen_rtx_MINUS (SImode,
4064 const0_rtx,
4065 operands[1])))));
4066 }
4067 else
4068 {
4069 /* Emit the pattern:
4070 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4071 */
4072 emit_insn (gen_rtx_SET (operands[0],
4073 gen_rtx_XOR (SImode,
4074 gen_rtx_ASHIFTRT (SImode,
4075 operands[1],
4076 GEN_INT (31)),
4077 operands[1])));
4078 emit_insn (gen_rtx_SET (operands[0],
4079 gen_rtx_MINUS (SImode,
4080 gen_rtx_ASHIFTRT (SImode,
4081 operands[1],
4082 GEN_INT (31)),
4083 operands[0])));
4084 }
4085 DONE;
4086 }
4087 [(set_attr "conds" "clob,*")
4088 (set_attr "shift" "1")
4089 (set_attr "predicable" "no, yes")
4090 (set_attr "length" "8")
4091 (set_attr "type" "multiple")]
4092 )
4093
4094 (define_expand "abssf2"
4095 [(set (match_operand:SF 0 "s_register_operand")
4096 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4097 "TARGET_32BIT && TARGET_HARD_FLOAT"
4098 "")
4099
4100 (define_expand "absdf2"
4101 [(set (match_operand:DF 0 "s_register_operand")
4102 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4103 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4104 "")
4105
4106 (define_expand "sqrtsf2"
4107 [(set (match_operand:SF 0 "s_register_operand")
4108 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4109 "TARGET_32BIT && TARGET_HARD_FLOAT"
4110 "")
4111
4112 (define_expand "sqrtdf2"
4113 [(set (match_operand:DF 0 "s_register_operand")
4114 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4115 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4116 "")
4117
4118 (define_expand "one_cmplsi2"
4119 [(set (match_operand:SI 0 "s_register_operand")
4120 (not:SI (match_operand:SI 1 "s_register_operand")))]
4121 "TARGET_EITHER"
4122 ""
4123 )
4124
4125 (define_insn "*arm_one_cmplsi2"
4126 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4127 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4128 "TARGET_32BIT"
4129 "mvn%?\\t%0, %1"
4130 [(set_attr "predicable" "yes")
4131 (set_attr "predicable_short_it" "yes,no")
4132 (set_attr "arch" "t2,*")
4133 (set_attr "length" "4")
4134 (set_attr "type" "mvn_reg")]
4135 )
4136
4137 (define_insn "*notsi_compare0"
4138 [(set (reg:CC_NOOV CC_REGNUM)
4139 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4140 (const_int 0)))
4141 (set (match_operand:SI 0 "s_register_operand" "=r")
4142 (not:SI (match_dup 1)))]
4143 "TARGET_32BIT"
4144 "mvns%?\\t%0, %1"
4145 [(set_attr "conds" "set")
4146 (set_attr "type" "mvn_reg")]
4147 )
4148
4149 (define_insn "*notsi_compare0_scratch"
4150 [(set (reg:CC_NOOV CC_REGNUM)
4151 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4152 (const_int 0)))
4153 (clobber (match_scratch:SI 0 "=r"))]
4154 "TARGET_32BIT"
4155 "mvns%?\\t%0, %1"
4156 [(set_attr "conds" "set")
4157 (set_attr "type" "mvn_reg")]
4158 )
4159 \f
4160 ;; Fixed <--> Floating conversion insns
4161
4162 (define_expand "floatsihf2"
4163 [(set (match_operand:HF 0 "general_operand")
4164 (float:HF (match_operand:SI 1 "general_operand")))]
4165 "TARGET_EITHER"
4166 "
4167 {
4168 rtx op1 = gen_reg_rtx (SFmode);
4169 expand_float (op1, operands[1], 0);
4170 op1 = convert_to_mode (HFmode, op1, 0);
4171 emit_move_insn (operands[0], op1);
4172 DONE;
4173 }"
4174 )
4175
4176 (define_expand "floatdihf2"
4177 [(set (match_operand:HF 0 "general_operand")
4178 (float:HF (match_operand:DI 1 "general_operand")))]
4179 "TARGET_EITHER"
4180 "
4181 {
4182 rtx op1 = gen_reg_rtx (SFmode);
4183 expand_float (op1, operands[1], 0);
4184 op1 = convert_to_mode (HFmode, op1, 0);
4185 emit_move_insn (operands[0], op1);
4186 DONE;
4187 }"
4188 )
4189
4190 (define_expand "floatsisf2"
4191 [(set (match_operand:SF 0 "s_register_operand")
4192 (float:SF (match_operand:SI 1 "s_register_operand")))]
4193 "TARGET_32BIT && TARGET_HARD_FLOAT"
4194 "
4195 ")
4196
4197 (define_expand "floatsidf2"
4198 [(set (match_operand:DF 0 "s_register_operand")
4199 (float:DF (match_operand:SI 1 "s_register_operand")))]
4200 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4201 "
4202 ")
4203
4204 (define_expand "fix_trunchfsi2"
4205 [(set (match_operand:SI 0 "general_operand")
4206 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4207 "TARGET_EITHER"
4208 "
4209 {
4210 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4211 expand_fix (operands[0], op1, 0);
4212 DONE;
4213 }"
4214 )
4215
4216 (define_expand "fix_trunchfdi2"
4217 [(set (match_operand:DI 0 "general_operand")
4218 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4219 "TARGET_EITHER"
4220 "
4221 {
4222 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4223 expand_fix (operands[0], op1, 0);
4224 DONE;
4225 }"
4226 )
4227
4228 (define_expand "fix_truncsfsi2"
4229 [(set (match_operand:SI 0 "s_register_operand")
4230 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4231 "TARGET_32BIT && TARGET_HARD_FLOAT"
4232 "
4233 ")
4234
4235 (define_expand "fix_truncdfsi2"
4236 [(set (match_operand:SI 0 "s_register_operand")
4237 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4238 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4239 "
4240 ")
4241
4242 ;; Truncation insns
4243
4244 (define_expand "truncdfsf2"
4245 [(set (match_operand:SF 0 "s_register_operand")
4246 (float_truncate:SF
4247 (match_operand:DF 1 "s_register_operand")))]
4248 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4249 ""
4250 )
4251
4252 ;; DFmode to HFmode conversions on targets without a single-step hardware
4253 ;; instruction for it would have to go through SFmode. This is dangerous
4254 ;; as it introduces double rounding.
4255 ;;
4256 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4257 ;; a single-step instruction.
4258
4259 (define_expand "truncdfhf2"
4260 [(set (match_operand:HF 0 "s_register_operand")
4261 (float_truncate:HF
4262 (match_operand:DF 1 "s_register_operand")))]
4263 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4264 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4265 {
4266 /* We don't have a direct instruction for this, so we must be in
4267 an unsafe math mode, and going via SFmode. */
4268
4269 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4270 {
4271 rtx op1;
4272 op1 = convert_to_mode (SFmode, operands[1], 0);
4273 op1 = convert_to_mode (HFmode, op1, 0);
4274 emit_move_insn (operands[0], op1);
4275 DONE;
4276 }
4277 /* Otherwise, we will pick this up as a single instruction with
4278 no intermediary rounding. */
4279 }
4280 )
4281 \f
4282 ;; Zero and sign extension instructions.
4283
4284 (define_expand "zero_extend<mode>di2"
4285 [(set (match_operand:DI 0 "s_register_operand" "")
4286 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4287 "TARGET_32BIT <qhs_zextenddi_cond>"
4288 {
4289 rtx res_lo, res_hi, op0_lo, op0_hi;
4290 res_lo = gen_lowpart (SImode, operands[0]);
4291 res_hi = gen_highpart (SImode, operands[0]);
4292 if (can_create_pseudo_p ())
4293 {
4294 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4295 op0_hi = gen_reg_rtx (SImode);
4296 }
4297 else
4298 {
4299 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4300 op0_hi = res_hi;
4301 }
4302 if (<MODE>mode != SImode)
4303 emit_insn (gen_rtx_SET (op0_lo,
4304 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4305 emit_insn (gen_movsi (op0_hi, const0_rtx));
4306 if (res_lo != op0_lo)
4307 emit_move_insn (res_lo, op0_lo);
4308 if (res_hi != op0_hi)
4309 emit_move_insn (res_hi, op0_hi);
4310 DONE;
4311 }
4312 )
4313
4314 (define_expand "extend<mode>di2"
4315 [(set (match_operand:DI 0 "s_register_operand" "")
4316 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4317 "TARGET_32BIT <qhs_sextenddi_cond>"
4318 {
4319 rtx res_lo, res_hi, op0_lo, op0_hi;
4320 res_lo = gen_lowpart (SImode, operands[0]);
4321 res_hi = gen_highpart (SImode, operands[0]);
4322 if (can_create_pseudo_p ())
4323 {
4324 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4325 op0_hi = gen_reg_rtx (SImode);
4326 }
4327 else
4328 {
4329 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4330 op0_hi = res_hi;
4331 }
4332 if (<MODE>mode != SImode)
4333 emit_insn (gen_rtx_SET (op0_lo,
4334 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4335 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4336 if (res_lo != op0_lo)
4337 emit_move_insn (res_lo, op0_lo);
4338 if (res_hi != op0_hi)
4339 emit_move_insn (res_hi, op0_hi);
4340 DONE;
4341 }
4342 )
4343
4344 ;; Splits for all extensions to DImode
4345 (define_split
4346 [(set (match_operand:DI 0 "s_register_operand" "")
4347 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4348 "TARGET_32BIT"
4349 [(set (match_dup 0) (match_dup 1))]
4350 {
4351 rtx lo_part = gen_lowpart (SImode, operands[0]);
4352 machine_mode src_mode = GET_MODE (operands[1]);
4353
4354 if (src_mode == SImode)
4355 emit_move_insn (lo_part, operands[1]);
4356 else
4357 emit_insn (gen_rtx_SET (lo_part,
4358 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4359 operands[0] = gen_highpart (SImode, operands[0]);
4360 operands[1] = const0_rtx;
4361 })
4362
4363 (define_split
4364 [(set (match_operand:DI 0 "s_register_operand" "")
4365 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4366 "TARGET_32BIT"
4367 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4368 {
4369 rtx lo_part = gen_lowpart (SImode, operands[0]);
4370 machine_mode src_mode = GET_MODE (operands[1]);
4371
4372 if (src_mode == SImode)
4373 emit_move_insn (lo_part, operands[1]);
4374 else
4375 emit_insn (gen_rtx_SET (lo_part,
4376 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4377 operands[1] = lo_part;
4378 operands[0] = gen_highpart (SImode, operands[0]);
4379 })
4380
4381 (define_expand "zero_extendhisi2"
4382 [(set (match_operand:SI 0 "s_register_operand")
4383 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4384 "TARGET_EITHER"
4385 {
4386 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4387 {
4388 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4389 DONE;
4390 }
4391 if (!arm_arch6 && !MEM_P (operands[1]))
4392 {
4393 rtx t = gen_lowpart (SImode, operands[1]);
4394 rtx tmp = gen_reg_rtx (SImode);
4395 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4396 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4397 DONE;
4398 }
4399 })
4400
4401 (define_split
4402 [(set (match_operand:SI 0 "s_register_operand" "")
4403 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4404 "!TARGET_THUMB2 && !arm_arch6"
4405 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4406 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4407 {
4408 operands[2] = gen_lowpart (SImode, operands[1]);
4409 })
4410
4411 (define_insn "*arm_zero_extendhisi2"
4412 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4413 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4414 "TARGET_ARM && arm_arch4 && !arm_arch6"
4415 "@
4416 #
4417 ldrh%?\\t%0, %1"
4418 [(set_attr "type" "alu_shift_reg,load_byte")
4419 (set_attr "predicable" "yes")]
4420 )
4421
4422 (define_insn "*arm_zero_extendhisi2_v6"
4423 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4424 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4425 "TARGET_ARM && arm_arch6"
4426 "@
4427 uxth%?\\t%0, %1
4428 ldrh%?\\t%0, %1"
4429 [(set_attr "predicable" "yes")
4430 (set_attr "type" "extend,load_byte")]
4431 )
4432
4433 (define_insn "*arm_zero_extendhisi2addsi"
4434 [(set (match_operand:SI 0 "s_register_operand" "=r")
4435 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4436 (match_operand:SI 2 "s_register_operand" "r")))]
4437 "TARGET_INT_SIMD"
4438 "uxtah%?\\t%0, %2, %1"
4439 [(set_attr "type" "alu_shift_reg")
4440 (set_attr "predicable" "yes")]
4441 )
4442
4443 (define_expand "zero_extendqisi2"
4444 [(set (match_operand:SI 0 "s_register_operand")
4445 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4446 "TARGET_EITHER"
4447 {
4448 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4449 {
4450 emit_insn (gen_andsi3 (operands[0],
4451 gen_lowpart (SImode, operands[1]),
4452 GEN_INT (255)));
4453 DONE;
4454 }
4455 if (!arm_arch6 && !MEM_P (operands[1]))
4456 {
4457 rtx t = gen_lowpart (SImode, operands[1]);
4458 rtx tmp = gen_reg_rtx (SImode);
4459 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4460 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4461 DONE;
4462 }
4463 })
4464
4465 (define_split
4466 [(set (match_operand:SI 0 "s_register_operand" "")
4467 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4468 "!arm_arch6"
4469 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4470 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4471 {
4472 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4473 if (TARGET_ARM)
4474 {
4475 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4476 DONE;
4477 }
4478 })
4479
4480 (define_insn "*arm_zero_extendqisi2"
4481 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4482 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4483 "TARGET_ARM && !arm_arch6"
4484 "@
4485 #
4486 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4487 [(set_attr "length" "8,4")
4488 (set_attr "type" "alu_shift_reg,load_byte")
4489 (set_attr "predicable" "yes")]
4490 )
4491
4492 (define_insn "*arm_zero_extendqisi2_v6"
4493 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4494 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4495 "TARGET_ARM && arm_arch6"
4496 "@
4497 uxtb%?\\t%0, %1
4498 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4499 [(set_attr "type" "extend,load_byte")
4500 (set_attr "predicable" "yes")]
4501 )
4502
4503 (define_insn "*arm_zero_extendqisi2addsi"
4504 [(set (match_operand:SI 0 "s_register_operand" "=r")
4505 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4506 (match_operand:SI 2 "s_register_operand" "r")))]
4507 "TARGET_INT_SIMD"
4508 "uxtab%?\\t%0, %2, %1"
4509 [(set_attr "predicable" "yes")
4510 (set_attr "type" "alu_shift_reg")]
4511 )
4512
4513 (define_split
4514 [(set (match_operand:SI 0 "s_register_operand" "")
4515 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4516 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4517 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4518 [(set (match_dup 2) (match_dup 1))
4519 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4520 ""
4521 )
4522
4523 (define_split
4524 [(set (match_operand:SI 0 "s_register_operand" "")
4525 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4526 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4527 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4528 [(set (match_dup 2) (match_dup 1))
4529 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4530 ""
4531 )
4532
4533
4534 (define_split
4535 [(set (match_operand:SI 0 "s_register_operand" "")
4536 (IOR_XOR:SI (and:SI (ashift:SI
4537 (match_operand:SI 1 "s_register_operand" "")
4538 (match_operand:SI 2 "const_int_operand" ""))
4539 (match_operand:SI 3 "const_int_operand" ""))
4540 (zero_extend:SI
4541 (match_operator 5 "subreg_lowpart_operator"
4542 [(match_operand:SI 4 "s_register_operand" "")]))))]
4543 "TARGET_32BIT
4544 && (UINTVAL (operands[3])
4545 == (GET_MODE_MASK (GET_MODE (operands[5]))
4546 & (GET_MODE_MASK (GET_MODE (operands[5]))
4547 << (INTVAL (operands[2])))))"
4548 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4549 (match_dup 4)))
4550 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4551 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4552 )
4553
4554 (define_insn "*compareqi_eq0"
4555 [(set (reg:CC_Z CC_REGNUM)
4556 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4557 (const_int 0)))]
4558 "TARGET_32BIT"
4559 "tst%?\\t%0, #255"
4560 [(set_attr "conds" "set")
4561 (set_attr "predicable" "yes")
4562 (set_attr "type" "logic_imm")]
4563 )
4564
4565 (define_expand "extendhisi2"
4566 [(set (match_operand:SI 0 "s_register_operand")
4567 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4568 "TARGET_EITHER"
4569 {
4570 if (TARGET_THUMB1)
4571 {
4572 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4573 DONE;
4574 }
4575 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4576 {
4577 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4578 DONE;
4579 }
4580
4581 if (!arm_arch6 && !MEM_P (operands[1]))
4582 {
4583 rtx t = gen_lowpart (SImode, operands[1]);
4584 rtx tmp = gen_reg_rtx (SImode);
4585 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4586 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4587 DONE;
4588 }
4589 })
4590
4591 (define_split
4592 [(parallel
4593 [(set (match_operand:SI 0 "register_operand" "")
4594 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4595 (clobber (match_scratch:SI 2 ""))])]
4596 "!arm_arch6"
4597 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4598 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4599 {
4600 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4601 })
4602
4603 ;; This pattern will only be used when ldsh is not available
4604 (define_expand "extendhisi2_mem"
4605 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4606 (set (match_dup 3)
4607 (zero_extend:SI (match_dup 7)))
4608 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4609 (set (match_operand:SI 0 "" "")
4610 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4611 "TARGET_ARM"
4612 "
4613 {
4614 rtx mem1, mem2;
4615 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4616
4617 mem1 = change_address (operands[1], QImode, addr);
4618 mem2 = change_address (operands[1], QImode,
4619 plus_constant (Pmode, addr, 1));
4620 operands[0] = gen_lowpart (SImode, operands[0]);
4621 operands[1] = mem1;
4622 operands[2] = gen_reg_rtx (SImode);
4623 operands[3] = gen_reg_rtx (SImode);
4624 operands[6] = gen_reg_rtx (SImode);
4625 operands[7] = mem2;
4626
4627 if (BYTES_BIG_ENDIAN)
4628 {
4629 operands[4] = operands[2];
4630 operands[5] = operands[3];
4631 }
4632 else
4633 {
4634 operands[4] = operands[3];
4635 operands[5] = operands[2];
4636 }
4637 }"
4638 )
4639
4640 (define_split
4641 [(set (match_operand:SI 0 "register_operand" "")
4642 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4643 "!arm_arch6"
4644 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4645 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4646 {
4647 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4648 })
4649
4650 (define_insn "*arm_extendhisi2"
4651 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4652 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4653 "TARGET_ARM && arm_arch4 && !arm_arch6"
4654 "@
4655 #
4656 ldrsh%?\\t%0, %1"
4657 [(set_attr "length" "8,4")
4658 (set_attr "type" "alu_shift_reg,load_byte")
4659 (set_attr "predicable" "yes")]
4660 )
4661
4662 ;; ??? Check Thumb-2 pool range
4663 (define_insn "*arm_extendhisi2_v6"
4664 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4665 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4666 "TARGET_32BIT && arm_arch6"
4667 "@
4668 sxth%?\\t%0, %1
4669 ldrsh%?\\t%0, %1"
4670 [(set_attr "type" "extend,load_byte")
4671 (set_attr "predicable" "yes")]
4672 )
4673
4674 (define_insn "*arm_extendhisi2addsi"
4675 [(set (match_operand:SI 0 "s_register_operand" "=r")
4676 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4677 (match_operand:SI 2 "s_register_operand" "r")))]
4678 "TARGET_INT_SIMD"
4679 "sxtah%?\\t%0, %2, %1"
4680 [(set_attr "type" "alu_shift_reg")]
4681 )
4682
4683 (define_expand "extendqihi2"
4684 [(set (match_dup 2)
4685 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4686 (const_int 24)))
4687 (set (match_operand:HI 0 "s_register_operand")
4688 (ashiftrt:SI (match_dup 2)
4689 (const_int 24)))]
4690 "TARGET_ARM"
4691 "
4692 {
4693 if (arm_arch4 && MEM_P (operands[1]))
4694 {
4695 emit_insn (gen_rtx_SET (operands[0],
4696 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4697 DONE;
4698 }
4699 if (!s_register_operand (operands[1], QImode))
4700 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4701 operands[0] = gen_lowpart (SImode, operands[0]);
4702 operands[1] = gen_lowpart (SImode, operands[1]);
4703 operands[2] = gen_reg_rtx (SImode);
4704 }"
4705 )
4706
4707 (define_insn "*arm_extendqihi_insn"
4708 [(set (match_operand:HI 0 "s_register_operand" "=r")
4709 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4710 "TARGET_ARM && arm_arch4"
4711 "ldrsb%?\\t%0, %1"
4712 [(set_attr "type" "load_byte")
4713 (set_attr "predicable" "yes")]
4714 )
4715
4716 (define_expand "extendqisi2"
4717 [(set (match_operand:SI 0 "s_register_operand")
4718 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4719 "TARGET_EITHER"
4720 {
4721 if (!arm_arch4 && MEM_P (operands[1]))
4722 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4723
4724 if (!arm_arch6 && !MEM_P (operands[1]))
4725 {
4726 rtx t = gen_lowpart (SImode, operands[1]);
4727 rtx tmp = gen_reg_rtx (SImode);
4728 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4729 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4730 DONE;
4731 }
4732 })
4733
4734 (define_split
4735 [(set (match_operand:SI 0 "register_operand" "")
4736 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4737 "!arm_arch6"
4738 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4739 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4740 {
4741 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4742 })
4743
4744 (define_insn "*arm_extendqisi"
4745 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4746 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4747 "TARGET_ARM && arm_arch4 && !arm_arch6"
4748 "@
4749 #
4750 ldrsb%?\\t%0, %1"
4751 [(set_attr "length" "8,4")
4752 (set_attr "type" "alu_shift_reg,load_byte")
4753 (set_attr "predicable" "yes")]
4754 )
4755
4756 (define_insn "*arm_extendqisi_v6"
4757 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4758 (sign_extend:SI
4759 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4760 "TARGET_ARM && arm_arch6"
4761 "@
4762 sxtb%?\\t%0, %1
4763 ldrsb%?\\t%0, %1"
4764 [(set_attr "type" "extend,load_byte")
4765 (set_attr "predicable" "yes")]
4766 )
4767
4768 (define_insn "*arm_extendqisi2addsi"
4769 [(set (match_operand:SI 0 "s_register_operand" "=r")
4770 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4771 (match_operand:SI 2 "s_register_operand" "r")))]
4772 "TARGET_INT_SIMD"
4773 "sxtab%?\\t%0, %2, %1"
4774 [(set_attr "type" "alu_shift_reg")
4775 (set_attr "predicable" "yes")]
4776 )
4777
4778 (define_insn "arm_<sup>xtb16"
4779 [(set (match_operand:SI 0 "s_register_operand" "=r")
4780 (unspec:SI
4781 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4782 "TARGET_INT_SIMD"
4783 "<sup>xtb16%?\\t%0, %1"
4784 [(set_attr "predicable" "yes")
4785 (set_attr "type" "alu_dsp_reg")])
4786
4787 (define_insn "arm_<simd32_op>"
4788 [(set (match_operand:SI 0 "s_register_operand" "=r")
4789 (unspec:SI
4790 [(match_operand:SI 1 "s_register_operand" "r")
4791 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4792 "TARGET_INT_SIMD"
4793 "<simd32_op>%?\\t%0, %1, %2"
4794 [(set_attr "predicable" "yes")
4795 (set_attr "type" "alu_dsp_reg")])
4796
4797 (define_insn "arm_usada8"
4798 [(set (match_operand:SI 0 "s_register_operand" "=r")
4799 (unspec:SI
4800 [(match_operand:SI 1 "s_register_operand" "r")
4801 (match_operand:SI 2 "s_register_operand" "r")
4802 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4803 "TARGET_INT_SIMD"
4804 "usada8%?\\t%0, %1, %2, %3"
4805 [(set_attr "predicable" "yes")
4806 (set_attr "type" "alu_dsp_reg")])
4807
4808 (define_insn "arm_<simd32_op>"
4809 [(set (match_operand:DI 0 "s_register_operand" "=r")
4810 (unspec:DI
4811 [(match_operand:SI 1 "s_register_operand" "r")
4812 (match_operand:SI 2 "s_register_operand" "r")
4813 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4814 "TARGET_INT_SIMD"
4815 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4816 [(set_attr "predicable" "yes")
4817 (set_attr "type" "smlald")])
4818
4819 (define_expand "extendsfdf2"
4820 [(set (match_operand:DF 0 "s_register_operand")
4821 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4822 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4823 ""
4824 )
4825
4826 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4827 ;; must go through SFmode.
4828 ;;
4829 ;; This is always safe for an extend.
4830
4831 (define_expand "extendhfdf2"
4832 [(set (match_operand:DF 0 "s_register_operand")
4833 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4834 "TARGET_EITHER"
4835 {
4836 /* We don't have a direct instruction for this, so go via SFmode. */
4837 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4838 {
4839 rtx op1;
4840 op1 = convert_to_mode (SFmode, operands[1], 0);
4841 op1 = convert_to_mode (DFmode, op1, 0);
4842 emit_insn (gen_movdf (operands[0], op1));
4843 DONE;
4844 }
4845 /* Otherwise, we're done producing RTL and will pick up the correct
4846 pattern to do this with one rounding-step in a single instruction. */
4847 }
4848 )
4849 \f
4850 ;; Move insns (including loads and stores)
4851
4852 ;; XXX Just some ideas about movti.
4853 ;; I don't think these are a good idea on the arm, there just aren't enough
4854 ;; registers
4855 ;;(define_expand "loadti"
4856 ;; [(set (match_operand:TI 0 "s_register_operand")
4857 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4858 ;; "" "")
4859
4860 ;;(define_expand "storeti"
4861 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4862 ;; (match_operand:TI 1 "s_register_operand"))]
4863 ;; "" "")
4864
4865 ;;(define_expand "movti"
4866 ;; [(set (match_operand:TI 0 "general_operand")
4867 ;; (match_operand:TI 1 "general_operand"))]
4868 ;; ""
4869 ;; "
4870 ;;{
4871 ;; rtx insn;
4872 ;;
4873 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4874 ;; operands[1] = copy_to_reg (operands[1]);
4875 ;; if (MEM_P (operands[0]))
4876 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4877 ;; else if (MEM_P (operands[1]))
4878 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4879 ;; else
4880 ;; FAIL;
4881 ;;
4882 ;; emit_insn (insn);
4883 ;; DONE;
4884 ;;}")
4885
4886 ;; Recognize garbage generated above.
4887
4888 ;;(define_insn ""
4889 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4890 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4891 ;; ""
4892 ;; "*
4893 ;; {
4894 ;; register mem = (which_alternative < 3);
4895 ;; register const char *template;
4896 ;;
4897 ;; operands[mem] = XEXP (operands[mem], 0);
4898 ;; switch (which_alternative)
4899 ;; {
4900 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4901 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4902 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4903 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4904 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4905 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4906 ;; }
4907 ;; output_asm_insn (template, operands);
4908 ;; return \"\";
4909 ;; }")
4910
4911 (define_expand "movdi"
4912 [(set (match_operand:DI 0 "general_operand")
4913 (match_operand:DI 1 "general_operand"))]
4914 "TARGET_EITHER"
4915 "
4916 gcc_checking_assert (aligned_operand (operands[0], DImode));
4917 gcc_checking_assert (aligned_operand (operands[1], DImode));
4918 if (can_create_pseudo_p ())
4919 {
4920 if (!REG_P (operands[0]))
4921 operands[1] = force_reg (DImode, operands[1]);
4922 }
4923 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4924 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4925 {
4926 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4927 when expanding function calls. */
4928 gcc_assert (can_create_pseudo_p ());
4929 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4930 {
4931 /* Perform load into legal reg pair first, then move. */
4932 rtx reg = gen_reg_rtx (DImode);
4933 emit_insn (gen_movdi (reg, operands[1]));
4934 operands[1] = reg;
4935 }
4936 emit_move_insn (gen_lowpart (SImode, operands[0]),
4937 gen_lowpart (SImode, operands[1]));
4938 emit_move_insn (gen_highpart (SImode, operands[0]),
4939 gen_highpart (SImode, operands[1]));
4940 DONE;
4941 }
4942 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4943 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4944 {
4945 /* Avoid STRD's from an odd-numbered register pair in ARM state
4946 when expanding function prologue. */
4947 gcc_assert (can_create_pseudo_p ());
4948 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4949 ? gen_reg_rtx (DImode)
4950 : operands[0];
4951 emit_move_insn (gen_lowpart (SImode, split_dest),
4952 gen_lowpart (SImode, operands[1]));
4953 emit_move_insn (gen_highpart (SImode, split_dest),
4954 gen_highpart (SImode, operands[1]));
4955 if (split_dest != operands[0])
4956 emit_insn (gen_movdi (operands[0], split_dest));
4957 DONE;
4958 }
4959 "
4960 )
4961
4962 (define_insn "*arm_movdi"
4963 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4964 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4965 "TARGET_32BIT
4966 && !(TARGET_HARD_FLOAT)
4967 && !TARGET_IWMMXT
4968 && ( register_operand (operands[0], DImode)
4969 || register_operand (operands[1], DImode))"
4970 "*
4971 switch (which_alternative)
4972 {
4973 case 0:
4974 case 1:
4975 case 2:
4976 return \"#\";
4977 case 3:
4978 /* Cannot load it directly, split to load it via MOV / MOVT. */
4979 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4980 return \"#\";
4981 /* Fall through. */
4982 default:
4983 return output_move_double (operands, true, NULL);
4984 }
4985 "
4986 [(set_attr "length" "8,12,16,8,8")
4987 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4988 (set_attr "arm_pool_range" "*,*,*,1020,*")
4989 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4990 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4991 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4992 )
4993
4994 (define_split
4995 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4996 (match_operand:ANY64 1 "immediate_operand" ""))]
4997 "TARGET_32BIT
4998 && reload_completed
4999 && (arm_disable_literal_pool
5000 || (arm_const_double_inline_cost (operands[1])
5001 <= arm_max_const_double_inline_cost ()))"
5002 [(const_int 0)]
5003 "
5004 arm_split_constant (SET, SImode, curr_insn,
5005 INTVAL (gen_lowpart (SImode, operands[1])),
5006 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5007 arm_split_constant (SET, SImode, curr_insn,
5008 INTVAL (gen_highpart_mode (SImode,
5009 GET_MODE (operands[0]),
5010 operands[1])),
5011 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5012 DONE;
5013 "
5014 )
5015
5016 ; If optimizing for size, or if we have load delay slots, then
5017 ; we want to split the constant into two separate operations.
5018 ; In both cases this may split a trivial part into a single data op
5019 ; leaving a single complex constant to load. We can also get longer
5020 ; offsets in a LDR which means we get better chances of sharing the pool
5021 ; entries. Finally, we can normally do a better job of scheduling
5022 ; LDR instructions than we can with LDM.
5023 ; This pattern will only match if the one above did not.
5024 (define_split
5025 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5026 (match_operand:ANY64 1 "const_double_operand" ""))]
5027 "TARGET_ARM && reload_completed
5028 && arm_const_double_by_parts (operands[1])"
5029 [(set (match_dup 0) (match_dup 1))
5030 (set (match_dup 2) (match_dup 3))]
5031 "
5032 operands[2] = gen_highpart (SImode, operands[0]);
5033 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5034 operands[1]);
5035 operands[0] = gen_lowpart (SImode, operands[0]);
5036 operands[1] = gen_lowpart (SImode, operands[1]);
5037 "
5038 )
5039
5040 (define_split
5041 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5042 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5043 "TARGET_EITHER && reload_completed"
5044 [(set (match_dup 0) (match_dup 1))
5045 (set (match_dup 2) (match_dup 3))]
5046 "
5047 operands[2] = gen_highpart (SImode, operands[0]);
5048 operands[3] = gen_highpart (SImode, operands[1]);
5049 operands[0] = gen_lowpart (SImode, operands[0]);
5050 operands[1] = gen_lowpart (SImode, operands[1]);
5051
5052 /* Handle a partial overlap. */
5053 if (rtx_equal_p (operands[0], operands[3]))
5054 {
5055 rtx tmp0 = operands[0];
5056 rtx tmp1 = operands[1];
5057
5058 operands[0] = operands[2];
5059 operands[1] = operands[3];
5060 operands[2] = tmp0;
5061 operands[3] = tmp1;
5062 }
5063 "
5064 )
5065
5066 ;; We can't actually do base+index doubleword loads if the index and
5067 ;; destination overlap. Split here so that we at least have chance to
5068 ;; schedule.
5069 (define_split
5070 [(set (match_operand:DI 0 "s_register_operand" "")
5071 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5072 (match_operand:SI 2 "s_register_operand" ""))))]
5073 "TARGET_LDRD
5074 && reg_overlap_mentioned_p (operands[0], operands[1])
5075 && reg_overlap_mentioned_p (operands[0], operands[2])"
5076 [(set (match_dup 4)
5077 (plus:SI (match_dup 1)
5078 (match_dup 2)))
5079 (set (match_dup 0)
5080 (mem:DI (match_dup 4)))]
5081 "
5082 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5083 "
5084 )
5085
5086 (define_expand "movsi"
5087 [(set (match_operand:SI 0 "general_operand")
5088 (match_operand:SI 1 "general_operand"))]
5089 "TARGET_EITHER"
5090 "
5091 {
5092 rtx base, offset, tmp;
5093
5094 gcc_checking_assert (aligned_operand (operands[0], SImode));
5095 gcc_checking_assert (aligned_operand (operands[1], SImode));
5096 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5097 {
5098 /* Everything except mem = const or mem = mem can be done easily. */
5099 if (MEM_P (operands[0]))
5100 operands[1] = force_reg (SImode, operands[1]);
5101 if (arm_general_register_operand (operands[0], SImode)
5102 && CONST_INT_P (operands[1])
5103 && !(const_ok_for_arm (INTVAL (operands[1]))
5104 || const_ok_for_arm (~INTVAL (operands[1]))))
5105 {
5106 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5107 {
5108 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5109 DONE;
5110 }
5111 else
5112 {
5113 arm_split_constant (SET, SImode, NULL_RTX,
5114 INTVAL (operands[1]), operands[0], NULL_RTX,
5115 optimize && can_create_pseudo_p ());
5116 DONE;
5117 }
5118 }
5119 }
5120 else /* Target doesn't have MOVT... */
5121 {
5122 if (can_create_pseudo_p ())
5123 {
5124 if (!REG_P (operands[0]))
5125 operands[1] = force_reg (SImode, operands[1]);
5126 }
5127 }
5128
5129 split_const (operands[1], &base, &offset);
5130 if (INTVAL (offset) != 0
5131 && targetm.cannot_force_const_mem (SImode, operands[1]))
5132 {
5133 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5134 emit_move_insn (tmp, base);
5135 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5136 DONE;
5137 }
5138
5139 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5140
5141 /* Recognize the case where operand[1] is a reference to thread-local
5142 data and load its address to a register. Offsets have been split off
5143 already. */
5144 if (arm_tls_referenced_p (operands[1]))
5145 operands[1] = legitimize_tls_address (operands[1], tmp);
5146 else if (flag_pic
5147 && (CONSTANT_P (operands[1])
5148 || symbol_mentioned_p (operands[1])
5149 || label_mentioned_p (operands[1])))
5150 operands[1] =
5151 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5152 }
5153 "
5154 )
5155
5156 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5157 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5158 ;; so this does not matter.
5159 (define_insn "*arm_movt"
5160 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5161 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5162 (match_operand:SI 2 "general_operand" "i,i")))]
5163 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5164 "@
5165 movt%?\t%0, #:upper16:%c2
5166 movt\t%0, #:upper16:%c2"
5167 [(set_attr "arch" "32,v8mb")
5168 (set_attr "predicable" "yes")
5169 (set_attr "length" "4")
5170 (set_attr "type" "alu_sreg")]
5171 )
5172
5173 (define_insn "*arm_movsi_insn"
5174 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5175 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5176 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5177 && ( register_operand (operands[0], SImode)
5178 || register_operand (operands[1], SImode))"
5179 "@
5180 mov%?\\t%0, %1
5181 mov%?\\t%0, %1
5182 mvn%?\\t%0, #%B1
5183 movw%?\\t%0, %1
5184 ldr%?\\t%0, %1
5185 str%?\\t%1, %0"
5186 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5187 (set_attr "predicable" "yes")
5188 (set_attr "arch" "*,*,*,v6t2,*,*")
5189 (set_attr "pool_range" "*,*,*,*,4096,*")
5190 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5191 )
5192
5193 (define_split
5194 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5195 (match_operand:SI 1 "const_int_operand" ""))]
5196 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5197 && (!(const_ok_for_arm (INTVAL (operands[1]))
5198 || const_ok_for_arm (~INTVAL (operands[1]))))"
5199 [(clobber (const_int 0))]
5200 "
5201 arm_split_constant (SET, SImode, NULL_RTX,
5202 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5203 DONE;
5204 "
5205 )
5206
5207 ;; A normal way to do (symbol + offset) requires three instructions at least
5208 ;; (depends on how big the offset is) as below:
5209 ;; movw r0, #:lower16:g
5210 ;; movw r0, #:upper16:g
5211 ;; adds r0, #4
5212 ;;
5213 ;; A better way would be:
5214 ;; movw r0, #:lower16:g+4
5215 ;; movw r0, #:upper16:g+4
5216 ;;
5217 ;; The limitation of this way is that the length of offset should be a 16-bit
5218 ;; signed value, because current assembler only supports REL type relocation for
5219 ;; such case. If the more powerful RELA type is supported in future, we should
5220 ;; update this pattern to go with better way.
5221 (define_split
5222 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5223 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5224 (match_operand:SI 2 "const_int_operand" ""))))]
5225 "TARGET_THUMB
5226 && TARGET_HAVE_MOVT
5227 && arm_disable_literal_pool
5228 && reload_completed
5229 && GET_CODE (operands[1]) == SYMBOL_REF"
5230 [(clobber (const_int 0))]
5231 "
5232 int offset = INTVAL (operands[2]);
5233
5234 if (offset < -0x8000 || offset > 0x7fff)
5235 {
5236 arm_emit_movpair (operands[0], operands[1]);
5237 emit_insn (gen_rtx_SET (operands[0],
5238 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5239 }
5240 else
5241 {
5242 rtx op = gen_rtx_CONST (SImode,
5243 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5244 arm_emit_movpair (operands[0], op);
5245 }
5246 "
5247 )
5248
5249 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5250 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5251 ;; and lo_sum would be merged back into memory load at cprop. However,
5252 ;; if the default is to prefer movt/movw rather than a load from the constant
5253 ;; pool, the performance is better.
5254 (define_split
5255 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5256 (match_operand:SI 1 "general_operand" ""))]
5257 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5258 && !target_word_relocations
5259 && !arm_tls_referenced_p (operands[1])"
5260 [(clobber (const_int 0))]
5261 {
5262 arm_emit_movpair (operands[0], operands[1]);
5263 DONE;
5264 })
5265
5266 ;; When generating pic, we need to load the symbol offset into a register.
5267 ;; So that the optimizer does not confuse this with a normal symbol load
5268 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5269 ;; since that is the only type of relocation we can use.
5270
5271 ;; Wrap calculation of the whole PIC address in a single pattern for the
5272 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5273 ;; a PIC address involves two loads from memory, so we want to CSE it
5274 ;; as often as possible.
5275 ;; This pattern will be split into one of the pic_load_addr_* patterns
5276 ;; and a move after GCSE optimizations.
5277 ;;
5278 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5279 (define_expand "calculate_pic_address"
5280 [(set (match_operand:SI 0 "register_operand")
5281 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5282 (unspec:SI [(match_operand:SI 2 "" "")]
5283 UNSPEC_PIC_SYM))))]
5284 "flag_pic"
5285 )
5286
5287 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5288 (define_split
5289 [(set (match_operand:SI 0 "register_operand" "")
5290 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5291 (unspec:SI [(match_operand:SI 2 "" "")]
5292 UNSPEC_PIC_SYM))))]
5293 "flag_pic"
5294 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5295 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5296 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5297 )
5298
5299 ;; operand1 is the memory address to go into
5300 ;; pic_load_addr_32bit.
5301 ;; operand2 is the PIC label to be emitted
5302 ;; from pic_add_dot_plus_eight.
5303 ;; We do this to allow hoisting of the entire insn.
5304 (define_insn_and_split "pic_load_addr_unified"
5305 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5306 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5307 (match_operand:SI 2 "" "")]
5308 UNSPEC_PIC_UNIFIED))]
5309 "flag_pic"
5310 "#"
5311 "&& reload_completed"
5312 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5313 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5314 (match_dup 2)] UNSPEC_PIC_BASE))]
5315 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5316 [(set_attr "type" "load_4,load_4,load_4")
5317 (set_attr "pool_range" "4096,4094,1022")
5318 (set_attr "neg_pool_range" "4084,0,0")
5319 (set_attr "arch" "a,t2,t1")
5320 (set_attr "length" "8,6,4")]
5321 )
5322
5323 ;; The rather odd constraints on the following are to force reload to leave
5324 ;; the insn alone, and to force the minipool generation pass to then move
5325 ;; the GOT symbol to memory.
5326
5327 (define_insn "pic_load_addr_32bit"
5328 [(set (match_operand:SI 0 "s_register_operand" "=r")
5329 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5330 "TARGET_32BIT && flag_pic"
5331 "ldr%?\\t%0, %1"
5332 [(set_attr "type" "load_4")
5333 (set (attr "pool_range")
5334 (if_then_else (eq_attr "is_thumb" "no")
5335 (const_int 4096)
5336 (const_int 4094)))
5337 (set (attr "neg_pool_range")
5338 (if_then_else (eq_attr "is_thumb" "no")
5339 (const_int 4084)
5340 (const_int 0)))]
5341 )
5342
5343 (define_insn "pic_load_addr_thumb1"
5344 [(set (match_operand:SI 0 "s_register_operand" "=l")
5345 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5346 "TARGET_THUMB1 && flag_pic"
5347 "ldr\\t%0, %1"
5348 [(set_attr "type" "load_4")
5349 (set (attr "pool_range") (const_int 1018))]
5350 )
5351
5352 (define_insn "pic_add_dot_plus_four"
5353 [(set (match_operand:SI 0 "register_operand" "=r")
5354 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5355 (const_int 4)
5356 (match_operand 2 "" "")]
5357 UNSPEC_PIC_BASE))]
5358 "TARGET_THUMB"
5359 "*
5360 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5361 INTVAL (operands[2]));
5362 return \"add\\t%0, %|pc\";
5363 "
5364 [(set_attr "length" "2")
5365 (set_attr "type" "alu_sreg")]
5366 )
5367
5368 (define_insn "pic_add_dot_plus_eight"
5369 [(set (match_operand:SI 0 "register_operand" "=r")
5370 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5371 (const_int 8)
5372 (match_operand 2 "" "")]
5373 UNSPEC_PIC_BASE))]
5374 "TARGET_ARM"
5375 "*
5376 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5377 INTVAL (operands[2]));
5378 return \"add%?\\t%0, %|pc, %1\";
5379 "
5380 [(set_attr "predicable" "yes")
5381 (set_attr "type" "alu_sreg")]
5382 )
5383
5384 (define_insn "tls_load_dot_plus_eight"
5385 [(set (match_operand:SI 0 "register_operand" "=r")
5386 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5387 (const_int 8)
5388 (match_operand 2 "" "")]
5389 UNSPEC_PIC_BASE)))]
5390 "TARGET_ARM"
5391 "*
5392 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5393 INTVAL (operands[2]));
5394 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5395 "
5396 [(set_attr "predicable" "yes")
5397 (set_attr "type" "load_4")]
5398 )
5399
5400 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5401 ;; followed by a load. These sequences can be crunched down to
5402 ;; tls_load_dot_plus_eight by a peephole.
5403
5404 (define_peephole2
5405 [(set (match_operand:SI 0 "register_operand" "")
5406 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5407 (const_int 8)
5408 (match_operand 1 "" "")]
5409 UNSPEC_PIC_BASE))
5410 (set (match_operand:SI 2 "arm_general_register_operand" "")
5411 (mem:SI (match_dup 0)))]
5412 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5413 [(set (match_dup 2)
5414 (mem:SI (unspec:SI [(match_dup 3)
5415 (const_int 8)
5416 (match_dup 1)]
5417 UNSPEC_PIC_BASE)))]
5418 ""
5419 )
5420
5421 (define_insn "pic_offset_arm"
5422 [(set (match_operand:SI 0 "register_operand" "=r")
5423 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5424 (unspec:SI [(match_operand:SI 2 "" "X")]
5425 UNSPEC_PIC_OFFSET))))]
5426 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5427 "ldr%?\\t%0, [%1,%2]"
5428 [(set_attr "type" "load_4")]
5429 )
5430
5431 (define_expand "builtin_setjmp_receiver"
5432 [(label_ref (match_operand 0 "" ""))]
5433 "flag_pic"
5434 "
5435 {
5436 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5437 register. */
5438 if (arm_pic_register != INVALID_REGNUM)
5439 arm_load_pic_register (1UL << 3, NULL_RTX);
5440 DONE;
5441 }")
5442
5443 ;; If copying one reg to another we can set the condition codes according to
5444 ;; its value. Such a move is common after a return from subroutine and the
5445 ;; result is being tested against zero.
5446
5447 (define_insn "*movsi_compare0"
5448 [(set (reg:CC CC_REGNUM)
5449 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5450 (const_int 0)))
5451 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5452 (match_dup 1))]
5453 "TARGET_32BIT"
5454 "@
5455 cmp%?\\t%0, #0
5456 subs%?\\t%0, %1, #0"
5457 [(set_attr "conds" "set")
5458 (set_attr "type" "alus_imm,alus_imm")]
5459 )
5460
5461 ;; Subroutine to store a half word from a register into memory.
5462 ;; Operand 0 is the source register (HImode)
5463 ;; Operand 1 is the destination address in a register (SImode)
5464
5465 ;; In both this routine and the next, we must be careful not to spill
5466 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5467 ;; can generate unrecognizable rtl.
5468
5469 (define_expand "storehi"
5470 [;; store the low byte
5471 (set (match_operand 1 "" "") (match_dup 3))
5472 ;; extract the high byte
5473 (set (match_dup 2)
5474 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5475 ;; store the high byte
5476 (set (match_dup 4) (match_dup 5))]
5477 "TARGET_ARM"
5478 "
5479 {
5480 rtx op1 = operands[1];
5481 rtx addr = XEXP (op1, 0);
5482 enum rtx_code code = GET_CODE (addr);
5483
5484 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5485 || code == MINUS)
5486 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5487
5488 operands[4] = adjust_address (op1, QImode, 1);
5489 operands[1] = adjust_address (operands[1], QImode, 0);
5490 operands[3] = gen_lowpart (QImode, operands[0]);
5491 operands[0] = gen_lowpart (SImode, operands[0]);
5492 operands[2] = gen_reg_rtx (SImode);
5493 operands[5] = gen_lowpart (QImode, operands[2]);
5494 }"
5495 )
5496
5497 (define_expand "storehi_bigend"
5498 [(set (match_dup 4) (match_dup 3))
5499 (set (match_dup 2)
5500 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5501 (set (match_operand 1 "" "") (match_dup 5))]
5502 "TARGET_ARM"
5503 "
5504 {
5505 rtx op1 = operands[1];
5506 rtx addr = XEXP (op1, 0);
5507 enum rtx_code code = GET_CODE (addr);
5508
5509 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5510 || code == MINUS)
5511 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5512
5513 operands[4] = adjust_address (op1, QImode, 1);
5514 operands[1] = adjust_address (operands[1], QImode, 0);
5515 operands[3] = gen_lowpart (QImode, operands[0]);
5516 operands[0] = gen_lowpart (SImode, operands[0]);
5517 operands[2] = gen_reg_rtx (SImode);
5518 operands[5] = gen_lowpart (QImode, operands[2]);
5519 }"
5520 )
5521
5522 ;; Subroutine to store a half word integer constant into memory.
5523 (define_expand "storeinthi"
5524 [(set (match_operand 0 "" "")
5525 (match_operand 1 "" ""))
5526 (set (match_dup 3) (match_dup 2))]
5527 "TARGET_ARM"
5528 "
5529 {
5530 HOST_WIDE_INT value = INTVAL (operands[1]);
5531 rtx addr = XEXP (operands[0], 0);
5532 rtx op0 = operands[0];
5533 enum rtx_code code = GET_CODE (addr);
5534
5535 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5536 || code == MINUS)
5537 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5538
5539 operands[1] = gen_reg_rtx (SImode);
5540 if (BYTES_BIG_ENDIAN)
5541 {
5542 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5543 if ((value & 255) == ((value >> 8) & 255))
5544 operands[2] = operands[1];
5545 else
5546 {
5547 operands[2] = gen_reg_rtx (SImode);
5548 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5549 }
5550 }
5551 else
5552 {
5553 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5554 if ((value & 255) == ((value >> 8) & 255))
5555 operands[2] = operands[1];
5556 else
5557 {
5558 operands[2] = gen_reg_rtx (SImode);
5559 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5560 }
5561 }
5562
5563 operands[3] = adjust_address (op0, QImode, 1);
5564 operands[0] = adjust_address (operands[0], QImode, 0);
5565 operands[2] = gen_lowpart (QImode, operands[2]);
5566 operands[1] = gen_lowpart (QImode, operands[1]);
5567 }"
5568 )
5569
5570 (define_expand "storehi_single_op"
5571 [(set (match_operand:HI 0 "memory_operand")
5572 (match_operand:HI 1 "general_operand"))]
5573 "TARGET_32BIT && arm_arch4"
5574 "
5575 if (!s_register_operand (operands[1], HImode))
5576 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5577 "
5578 )
5579
5580 (define_expand "movhi"
5581 [(set (match_operand:HI 0 "general_operand")
5582 (match_operand:HI 1 "general_operand"))]
5583 "TARGET_EITHER"
5584 "
5585 gcc_checking_assert (aligned_operand (operands[0], HImode));
5586 gcc_checking_assert (aligned_operand (operands[1], HImode));
5587 if (TARGET_ARM)
5588 {
5589 if (can_create_pseudo_p ())
5590 {
5591 if (MEM_P (operands[0]))
5592 {
5593 if (arm_arch4)
5594 {
5595 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5596 DONE;
5597 }
5598 if (CONST_INT_P (operands[1]))
5599 emit_insn (gen_storeinthi (operands[0], operands[1]));
5600 else
5601 {
5602 if (MEM_P (operands[1]))
5603 operands[1] = force_reg (HImode, operands[1]);
5604 if (BYTES_BIG_ENDIAN)
5605 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5606 else
5607 emit_insn (gen_storehi (operands[1], operands[0]));
5608 }
5609 DONE;
5610 }
5611 /* Sign extend a constant, and keep it in an SImode reg. */
5612 else if (CONST_INT_P (operands[1]))
5613 {
5614 rtx reg = gen_reg_rtx (SImode);
5615 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5616
5617 /* If the constant is already valid, leave it alone. */
5618 if (!const_ok_for_arm (val))
5619 {
5620 /* If setting all the top bits will make the constant
5621 loadable in a single instruction, then set them.
5622 Otherwise, sign extend the number. */
5623
5624 if (const_ok_for_arm (~(val | ~0xffff)))
5625 val |= ~0xffff;
5626 else if (val & 0x8000)
5627 val |= ~0xffff;
5628 }
5629
5630 emit_insn (gen_movsi (reg, GEN_INT (val)));
5631 operands[1] = gen_lowpart (HImode, reg);
5632 }
5633 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5634 && MEM_P (operands[1]))
5635 {
5636 rtx reg = gen_reg_rtx (SImode);
5637
5638 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5639 operands[1] = gen_lowpart (HImode, reg);
5640 }
5641 else if (!arm_arch4)
5642 {
5643 if (MEM_P (operands[1]))
5644 {
5645 rtx base;
5646 rtx offset = const0_rtx;
5647 rtx reg = gen_reg_rtx (SImode);
5648
5649 if ((REG_P (base = XEXP (operands[1], 0))
5650 || (GET_CODE (base) == PLUS
5651 && (CONST_INT_P (offset = XEXP (base, 1)))
5652 && ((INTVAL(offset) & 1) != 1)
5653 && REG_P (base = XEXP (base, 0))))
5654 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5655 {
5656 rtx new_rtx;
5657
5658 new_rtx = widen_memory_access (operands[1], SImode,
5659 ((INTVAL (offset) & ~3)
5660 - INTVAL (offset)));
5661 emit_insn (gen_movsi (reg, new_rtx));
5662 if (((INTVAL (offset) & 2) != 0)
5663 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5664 {
5665 rtx reg2 = gen_reg_rtx (SImode);
5666
5667 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5668 reg = reg2;
5669 }
5670 }
5671 else
5672 emit_insn (gen_movhi_bytes (reg, operands[1]));
5673
5674 operands[1] = gen_lowpart (HImode, reg);
5675 }
5676 }
5677 }
5678 /* Handle loading a large integer during reload. */
5679 else if (CONST_INT_P (operands[1])
5680 && !const_ok_for_arm (INTVAL (operands[1]))
5681 && !const_ok_for_arm (~INTVAL (operands[1])))
5682 {
5683 /* Writing a constant to memory needs a scratch, which should
5684 be handled with SECONDARY_RELOADs. */
5685 gcc_assert (REG_P (operands[0]));
5686
5687 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5688 emit_insn (gen_movsi (operands[0], operands[1]));
5689 DONE;
5690 }
5691 }
5692 else if (TARGET_THUMB2)
5693 {
5694 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5695 if (can_create_pseudo_p ())
5696 {
5697 if (!REG_P (operands[0]))
5698 operands[1] = force_reg (HImode, operands[1]);
5699 /* Zero extend a constant, and keep it in an SImode reg. */
5700 else if (CONST_INT_P (operands[1]))
5701 {
5702 rtx reg = gen_reg_rtx (SImode);
5703 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5704
5705 emit_insn (gen_movsi (reg, GEN_INT (val)));
5706 operands[1] = gen_lowpart (HImode, reg);
5707 }
5708 }
5709 }
5710 else /* TARGET_THUMB1 */
5711 {
5712 if (can_create_pseudo_p ())
5713 {
5714 if (CONST_INT_P (operands[1]))
5715 {
5716 rtx reg = gen_reg_rtx (SImode);
5717
5718 emit_insn (gen_movsi (reg, operands[1]));
5719 operands[1] = gen_lowpart (HImode, reg);
5720 }
5721
5722 /* ??? We shouldn't really get invalid addresses here, but this can
5723 happen if we are passed a SP (never OK for HImode/QImode) or
5724 virtual register (also rejected as illegitimate for HImode/QImode)
5725 relative address. */
5726 /* ??? This should perhaps be fixed elsewhere, for instance, in
5727 fixup_stack_1, by checking for other kinds of invalid addresses,
5728 e.g. a bare reference to a virtual register. This may confuse the
5729 alpha though, which must handle this case differently. */
5730 if (MEM_P (operands[0])
5731 && !memory_address_p (GET_MODE (operands[0]),
5732 XEXP (operands[0], 0)))
5733 operands[0]
5734 = replace_equiv_address (operands[0],
5735 copy_to_reg (XEXP (operands[0], 0)));
5736
5737 if (MEM_P (operands[1])
5738 && !memory_address_p (GET_MODE (operands[1]),
5739 XEXP (operands[1], 0)))
5740 operands[1]
5741 = replace_equiv_address (operands[1],
5742 copy_to_reg (XEXP (operands[1], 0)));
5743
5744 if (MEM_P (operands[1]) && optimize > 0)
5745 {
5746 rtx reg = gen_reg_rtx (SImode);
5747
5748 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5749 operands[1] = gen_lowpart (HImode, reg);
5750 }
5751
5752 if (MEM_P (operands[0]))
5753 operands[1] = force_reg (HImode, operands[1]);
5754 }
5755 else if (CONST_INT_P (operands[1])
5756 && !satisfies_constraint_I (operands[1]))
5757 {
5758 /* Handle loading a large integer during reload. */
5759
5760 /* Writing a constant to memory needs a scratch, which should
5761 be handled with SECONDARY_RELOADs. */
5762 gcc_assert (REG_P (operands[0]));
5763
5764 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5765 emit_insn (gen_movsi (operands[0], operands[1]));
5766 DONE;
5767 }
5768 }
5769 "
5770 )
5771
5772 (define_expand "movhi_bytes"
5773 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5774 (set (match_dup 3)
5775 (zero_extend:SI (match_dup 6)))
5776 (set (match_operand:SI 0 "" "")
5777 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5778 "TARGET_ARM"
5779 "
5780 {
5781 rtx mem1, mem2;
5782 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5783
5784 mem1 = change_address (operands[1], QImode, addr);
5785 mem2 = change_address (operands[1], QImode,
5786 plus_constant (Pmode, addr, 1));
5787 operands[0] = gen_lowpart (SImode, operands[0]);
5788 operands[1] = mem1;
5789 operands[2] = gen_reg_rtx (SImode);
5790 operands[3] = gen_reg_rtx (SImode);
5791 operands[6] = mem2;
5792
5793 if (BYTES_BIG_ENDIAN)
5794 {
5795 operands[4] = operands[2];
5796 operands[5] = operands[3];
5797 }
5798 else
5799 {
5800 operands[4] = operands[3];
5801 operands[5] = operands[2];
5802 }
5803 }"
5804 )
5805
5806 (define_expand "movhi_bigend"
5807 [(set (match_dup 2)
5808 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5809 (const_int 16)))
5810 (set (match_dup 3)
5811 (ashiftrt:SI (match_dup 2) (const_int 16)))
5812 (set (match_operand:HI 0 "s_register_operand")
5813 (match_dup 4))]
5814 "TARGET_ARM"
5815 "
5816 operands[2] = gen_reg_rtx (SImode);
5817 operands[3] = gen_reg_rtx (SImode);
5818 operands[4] = gen_lowpart (HImode, operands[3]);
5819 "
5820 )
5821
5822 ;; Pattern to recognize insn generated default case above
5823 (define_insn "*movhi_insn_arch4"
5824 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5825 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5826 "TARGET_ARM
5827 && arm_arch4 && !TARGET_HARD_FLOAT
5828 && (register_operand (operands[0], HImode)
5829 || register_operand (operands[1], HImode))"
5830 "@
5831 mov%?\\t%0, %1\\t%@ movhi
5832 mvn%?\\t%0, #%B1\\t%@ movhi
5833 movw%?\\t%0, %L1\\t%@ movhi
5834 strh%?\\t%1, %0\\t%@ movhi
5835 ldrh%?\\t%0, %1\\t%@ movhi"
5836 [(set_attr "predicable" "yes")
5837 (set_attr "pool_range" "*,*,*,*,256")
5838 (set_attr "neg_pool_range" "*,*,*,*,244")
5839 (set_attr "arch" "*,*,v6t2,*,*")
5840 (set_attr_alternative "type"
5841 [(if_then_else (match_operand 1 "const_int_operand" "")
5842 (const_string "mov_imm" )
5843 (const_string "mov_reg"))
5844 (const_string "mvn_imm")
5845 (const_string "mov_imm")
5846 (const_string "store_4")
5847 (const_string "load_4")])]
5848 )
5849
5850 (define_insn "*movhi_bytes"
5851 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5852 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5853 "TARGET_ARM && !TARGET_HARD_FLOAT"
5854 "@
5855 mov%?\\t%0, %1\\t%@ movhi
5856 mov%?\\t%0, %1\\t%@ movhi
5857 mvn%?\\t%0, #%B1\\t%@ movhi"
5858 [(set_attr "predicable" "yes")
5859 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5860 )
5861
5862 ;; We use a DImode scratch because we may occasionally need an additional
5863 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5864 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5865 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5866 ;; to be correctly handled in default_secondary_reload function.
5867 (define_expand "reload_outhi"
5868 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5869 (match_operand:HI 1 "s_register_operand" "r")
5870 (match_operand:DI 2 "s_register_operand" "=&l")])]
5871 "TARGET_EITHER"
5872 "if (TARGET_ARM)
5873 arm_reload_out_hi (operands);
5874 else
5875 thumb_reload_out_hi (operands);
5876 DONE;
5877 "
5878 )
5879
5880 (define_expand "reload_inhi"
5881 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5882 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5883 (match_operand:DI 2 "s_register_operand" "=&r")])]
5884 "TARGET_EITHER"
5885 "
5886 if (TARGET_ARM)
5887 arm_reload_in_hi (operands);
5888 else
5889 thumb_reload_out_hi (operands);
5890 DONE;
5891 ")
5892
5893 (define_expand "movqi"
5894 [(set (match_operand:QI 0 "general_operand")
5895 (match_operand:QI 1 "general_operand"))]
5896 "TARGET_EITHER"
5897 "
5898 /* Everything except mem = const or mem = mem can be done easily */
5899
5900 if (can_create_pseudo_p ())
5901 {
5902 if (CONST_INT_P (operands[1]))
5903 {
5904 rtx reg = gen_reg_rtx (SImode);
5905
5906 /* For thumb we want an unsigned immediate, then we are more likely
5907 to be able to use a movs insn. */
5908 if (TARGET_THUMB)
5909 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5910
5911 emit_insn (gen_movsi (reg, operands[1]));
5912 operands[1] = gen_lowpart (QImode, reg);
5913 }
5914
5915 if (TARGET_THUMB)
5916 {
5917 /* ??? We shouldn't really get invalid addresses here, but this can
5918 happen if we are passed a SP (never OK for HImode/QImode) or
5919 virtual register (also rejected as illegitimate for HImode/QImode)
5920 relative address. */
5921 /* ??? This should perhaps be fixed elsewhere, for instance, in
5922 fixup_stack_1, by checking for other kinds of invalid addresses,
5923 e.g. a bare reference to a virtual register. This may confuse the
5924 alpha though, which must handle this case differently. */
5925 if (MEM_P (operands[0])
5926 && !memory_address_p (GET_MODE (operands[0]),
5927 XEXP (operands[0], 0)))
5928 operands[0]
5929 = replace_equiv_address (operands[0],
5930 copy_to_reg (XEXP (operands[0], 0)));
5931 if (MEM_P (operands[1])
5932 && !memory_address_p (GET_MODE (operands[1]),
5933 XEXP (operands[1], 0)))
5934 operands[1]
5935 = replace_equiv_address (operands[1],
5936 copy_to_reg (XEXP (operands[1], 0)));
5937 }
5938
5939 if (MEM_P (operands[1]) && optimize > 0)
5940 {
5941 rtx reg = gen_reg_rtx (SImode);
5942
5943 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5944 operands[1] = gen_lowpart (QImode, reg);
5945 }
5946
5947 if (MEM_P (operands[0]))
5948 operands[1] = force_reg (QImode, operands[1]);
5949 }
5950 else if (TARGET_THUMB
5951 && CONST_INT_P (operands[1])
5952 && !satisfies_constraint_I (operands[1]))
5953 {
5954 /* Handle loading a large integer during reload. */
5955
5956 /* Writing a constant to memory needs a scratch, which should
5957 be handled with SECONDARY_RELOADs. */
5958 gcc_assert (REG_P (operands[0]));
5959
5960 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5961 emit_insn (gen_movsi (operands[0], operands[1]));
5962 DONE;
5963 }
5964 "
5965 )
5966
5967 (define_insn "*arm_movqi_insn"
5968 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5969 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5970 "TARGET_32BIT
5971 && ( register_operand (operands[0], QImode)
5972 || register_operand (operands[1], QImode))"
5973 "@
5974 mov%?\\t%0, %1
5975 mov%?\\t%0, %1
5976 mov%?\\t%0, %1
5977 mov%?\\t%0, %1
5978 mvn%?\\t%0, #%B1
5979 ldrb%?\\t%0, %1
5980 strb%?\\t%1, %0
5981 ldrb%?\\t%0, %1
5982 strb%?\\t%1, %0"
5983 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5984 (set_attr "predicable" "yes")
5985 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5986 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5987 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5988 )
5989
5990 ;; HFmode moves
5991 (define_expand "movhf"
5992 [(set (match_operand:HF 0 "general_operand")
5993 (match_operand:HF 1 "general_operand"))]
5994 "TARGET_EITHER"
5995 "
5996 gcc_checking_assert (aligned_operand (operands[0], HFmode));
5997 gcc_checking_assert (aligned_operand (operands[1], HFmode));
5998 if (TARGET_32BIT)
5999 {
6000 if (MEM_P (operands[0]))
6001 operands[1] = force_reg (HFmode, operands[1]);
6002 }
6003 else /* TARGET_THUMB1 */
6004 {
6005 if (can_create_pseudo_p ())
6006 {
6007 if (!REG_P (operands[0]))
6008 operands[1] = force_reg (HFmode, operands[1]);
6009 }
6010 }
6011 "
6012 )
6013
6014 (define_insn "*arm32_movhf"
6015 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6016 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6017 "TARGET_32BIT && !TARGET_HARD_FLOAT
6018 && ( s_register_operand (operands[0], HFmode)
6019 || s_register_operand (operands[1], HFmode))"
6020 "*
6021 switch (which_alternative)
6022 {
6023 case 0: /* ARM register from memory */
6024 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6025 case 1: /* memory from ARM register */
6026 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6027 case 2: /* ARM register from ARM register */
6028 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6029 case 3: /* ARM register from constant */
6030 {
6031 long bits;
6032 rtx ops[4];
6033
6034 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6035 HFmode);
6036 ops[0] = operands[0];
6037 ops[1] = GEN_INT (bits);
6038 ops[2] = GEN_INT (bits & 0xff00);
6039 ops[3] = GEN_INT (bits & 0x00ff);
6040
6041 if (arm_arch_thumb2)
6042 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6043 else
6044 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6045 return \"\";
6046 }
6047 default:
6048 gcc_unreachable ();
6049 }
6050 "
6051 [(set_attr "conds" "unconditional")
6052 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6053 (set_attr "length" "4,4,4,8")
6054 (set_attr "predicable" "yes")]
6055 )
6056
6057 (define_expand "movsf"
6058 [(set (match_operand:SF 0 "general_operand")
6059 (match_operand:SF 1 "general_operand"))]
6060 "TARGET_EITHER"
6061 "
6062 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6063 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6064 if (TARGET_32BIT)
6065 {
6066 if (MEM_P (operands[0]))
6067 operands[1] = force_reg (SFmode, operands[1]);
6068 }
6069 else /* TARGET_THUMB1 */
6070 {
6071 if (can_create_pseudo_p ())
6072 {
6073 if (!REG_P (operands[0]))
6074 operands[1] = force_reg (SFmode, operands[1]);
6075 }
6076 }
6077
6078 /* Cannot load it directly, generate a load with clobber so that it can be
6079 loaded via GPR with MOV / MOVT. */
6080 if (arm_disable_literal_pool
6081 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6082 && CONST_DOUBLE_P (operands[1])
6083 && TARGET_HARD_FLOAT
6084 && !vfp3_const_double_rtx (operands[1]))
6085 {
6086 rtx clobreg = gen_reg_rtx (SFmode);
6087 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6088 clobreg));
6089 DONE;
6090 }
6091 "
6092 )
6093
6094 ;; Transform a floating-point move of a constant into a core register into
6095 ;; an SImode operation.
6096 (define_split
6097 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6098 (match_operand:SF 1 "immediate_operand" ""))]
6099 "TARGET_EITHER
6100 && reload_completed
6101 && CONST_DOUBLE_P (operands[1])"
6102 [(set (match_dup 2) (match_dup 3))]
6103 "
6104 operands[2] = gen_lowpart (SImode, operands[0]);
6105 operands[3] = gen_lowpart (SImode, operands[1]);
6106 if (operands[2] == 0 || operands[3] == 0)
6107 FAIL;
6108 "
6109 )
6110
6111 (define_insn "*arm_movsf_soft_insn"
6112 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6113 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6114 "TARGET_32BIT
6115 && TARGET_SOFT_FLOAT
6116 && (!MEM_P (operands[0])
6117 || register_operand (operands[1], SFmode))"
6118 {
6119 switch (which_alternative)
6120 {
6121 case 0: return \"mov%?\\t%0, %1\";
6122 case 1:
6123 /* Cannot load it directly, split to load it via MOV / MOVT. */
6124 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6125 return \"#\";
6126 return \"ldr%?\\t%0, %1\\t%@ float\";
6127 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6128 default: gcc_unreachable ();
6129 }
6130 }
6131 [(set_attr "predicable" "yes")
6132 (set_attr "type" "mov_reg,load_4,store_4")
6133 (set_attr "arm_pool_range" "*,4096,*")
6134 (set_attr "thumb2_pool_range" "*,4094,*")
6135 (set_attr "arm_neg_pool_range" "*,4084,*")
6136 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6137 )
6138
6139 ;; Splitter for the above.
6140 (define_split
6141 [(set (match_operand:SF 0 "s_register_operand")
6142 (match_operand:SF 1 "const_double_operand"))]
6143 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6144 [(const_int 0)]
6145 {
6146 long buf;
6147 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6148 rtx cst = gen_int_mode (buf, SImode);
6149 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6150 DONE;
6151 }
6152 )
6153
6154 (define_expand "movdf"
6155 [(set (match_operand:DF 0 "general_operand")
6156 (match_operand:DF 1 "general_operand"))]
6157 "TARGET_EITHER"
6158 "
6159 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6160 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6161 if (TARGET_32BIT)
6162 {
6163 if (MEM_P (operands[0]))
6164 operands[1] = force_reg (DFmode, operands[1]);
6165 }
6166 else /* TARGET_THUMB */
6167 {
6168 if (can_create_pseudo_p ())
6169 {
6170 if (!REG_P (operands[0]))
6171 operands[1] = force_reg (DFmode, operands[1]);
6172 }
6173 }
6174
6175 /* Cannot load it directly, generate a load with clobber so that it can be
6176 loaded via GPR with MOV / MOVT. */
6177 if (arm_disable_literal_pool
6178 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6179 && CONSTANT_P (operands[1])
6180 && TARGET_HARD_FLOAT
6181 && !arm_const_double_rtx (operands[1])
6182 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6183 {
6184 rtx clobreg = gen_reg_rtx (DFmode);
6185 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6186 clobreg));
6187 DONE;
6188 }
6189 "
6190 )
6191
6192 ;; Reloading a df mode value stored in integer regs to memory can require a
6193 ;; scratch reg.
6194 ;; Another reload_out<m> pattern that requires special constraints.
6195 (define_expand "reload_outdf"
6196 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6197 (match_operand:DF 1 "s_register_operand" "r")
6198 (match_operand:SI 2 "s_register_operand" "=&r")]
6199 "TARGET_THUMB2"
6200 "
6201 {
6202 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6203
6204 if (code == REG)
6205 operands[2] = XEXP (operands[0], 0);
6206 else if (code == POST_INC || code == PRE_DEC)
6207 {
6208 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6209 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6210 emit_insn (gen_movdi (operands[0], operands[1]));
6211 DONE;
6212 }
6213 else if (code == PRE_INC)
6214 {
6215 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6216
6217 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6218 operands[2] = reg;
6219 }
6220 else if (code == POST_DEC)
6221 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6222 else
6223 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6224 XEXP (XEXP (operands[0], 0), 1)));
6225
6226 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6227 operands[1]));
6228
6229 if (code == POST_DEC)
6230 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6231
6232 DONE;
6233 }"
6234 )
6235
6236 (define_insn "*movdf_soft_insn"
6237 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6238 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6239 "TARGET_32BIT && TARGET_SOFT_FLOAT
6240 && ( register_operand (operands[0], DFmode)
6241 || register_operand (operands[1], DFmode))"
6242 "*
6243 switch (which_alternative)
6244 {
6245 case 0:
6246 case 1:
6247 case 2:
6248 return \"#\";
6249 case 3:
6250 /* Cannot load it directly, split to load it via MOV / MOVT. */
6251 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6252 return \"#\";
6253 /* Fall through. */
6254 default:
6255 return output_move_double (operands, true, NULL);
6256 }
6257 "
6258 [(set_attr "length" "8,12,16,8,8")
6259 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6260 (set_attr "arm_pool_range" "*,*,*,1020,*")
6261 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6262 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6263 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6264 )
6265
6266 ;; Splitter for the above.
6267 (define_split
6268 [(set (match_operand:DF 0 "s_register_operand")
6269 (match_operand:DF 1 "const_double_operand"))]
6270 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6271 [(const_int 0)]
6272 {
6273 long buf[2];
6274 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6275 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6276 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6277 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6278 rtx cst = gen_int_mode (ival, DImode);
6279 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6280 DONE;
6281 }
6282 )
6283 \f
6284
6285 ;; load- and store-multiple insns
6286 ;; The arm can load/store any set of registers, provided that they are in
6287 ;; ascending order, but these expanders assume a contiguous set.
6288
6289 (define_expand "load_multiple"
6290 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6291 (match_operand:SI 1 "" ""))
6292 (use (match_operand:SI 2 "" ""))])]
6293 "TARGET_32BIT"
6294 {
6295 HOST_WIDE_INT offset = 0;
6296
6297 /* Support only fixed point registers. */
6298 if (!CONST_INT_P (operands[2])
6299 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6300 || INTVAL (operands[2]) < 2
6301 || !MEM_P (operands[1])
6302 || !REG_P (operands[0])
6303 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6304 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6305 FAIL;
6306
6307 operands[3]
6308 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6309 INTVAL (operands[2]),
6310 force_reg (SImode, XEXP (operands[1], 0)),
6311 FALSE, operands[1], &offset);
6312 })
6313
6314 (define_expand "store_multiple"
6315 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6316 (match_operand:SI 1 "" ""))
6317 (use (match_operand:SI 2 "" ""))])]
6318 "TARGET_32BIT"
6319 {
6320 HOST_WIDE_INT offset = 0;
6321
6322 /* Support only fixed point registers. */
6323 if (!CONST_INT_P (operands[2])
6324 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6325 || INTVAL (operands[2]) < 2
6326 || !REG_P (operands[1])
6327 || !MEM_P (operands[0])
6328 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6329 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6330 FAIL;
6331
6332 operands[3]
6333 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6334 INTVAL (operands[2]),
6335 force_reg (SImode, XEXP (operands[0], 0)),
6336 FALSE, operands[0], &offset);
6337 })
6338
6339
6340 (define_expand "setmemsi"
6341 [(match_operand:BLK 0 "general_operand")
6342 (match_operand:SI 1 "const_int_operand")
6343 (match_operand:SI 2 "const_int_operand")
6344 (match_operand:SI 3 "const_int_operand")]
6345 "TARGET_32BIT"
6346 {
6347 if (arm_gen_setmem (operands))
6348 DONE;
6349
6350 FAIL;
6351 })
6352
6353
6354 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6355 ;; We could let this apply for blocks of less than this, but it clobbers so
6356 ;; many registers that there is then probably a better way.
6357
6358 (define_expand "cpymemqi"
6359 [(match_operand:BLK 0 "general_operand")
6360 (match_operand:BLK 1 "general_operand")
6361 (match_operand:SI 2 "const_int_operand")
6362 (match_operand:SI 3 "const_int_operand")]
6363 ""
6364 "
6365 if (TARGET_32BIT)
6366 {
6367 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6368 && !optimize_function_for_size_p (cfun))
6369 {
6370 if (gen_cpymem_ldrd_strd (operands))
6371 DONE;
6372 FAIL;
6373 }
6374
6375 if (arm_gen_cpymemqi (operands))
6376 DONE;
6377 FAIL;
6378 }
6379 else /* TARGET_THUMB1 */
6380 {
6381 if ( INTVAL (operands[3]) != 4
6382 || INTVAL (operands[2]) > 48)
6383 FAIL;
6384
6385 thumb_expand_cpymemqi (operands);
6386 DONE;
6387 }
6388 "
6389 )
6390 \f
6391
6392 ;; Compare & branch insns
6393 ;; The range calculations are based as follows:
6394 ;; For forward branches, the address calculation returns the address of
6395 ;; the next instruction. This is 2 beyond the branch instruction.
6396 ;; For backward branches, the address calculation returns the address of
6397 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6398 ;; instruction for the shortest sequence, and 4 before the branch instruction
6399 ;; if we have to jump around an unconditional branch.
6400 ;; To the basic branch range the PC offset must be added (this is +4).
6401 ;; So for forward branches we have
6402 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6403 ;; And for backward branches we have
6404 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6405 ;;
6406 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6407 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6408
6409 (define_expand "cbranchsi4"
6410 [(set (pc) (if_then_else
6411 (match_operator 0 "expandable_comparison_operator"
6412 [(match_operand:SI 1 "s_register_operand")
6413 (match_operand:SI 2 "nonmemory_operand")])
6414 (label_ref (match_operand 3 "" ""))
6415 (pc)))]
6416 "TARGET_EITHER"
6417 "
6418 if (!TARGET_THUMB1)
6419 {
6420 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6421 FAIL;
6422 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6423 operands[3]));
6424 DONE;
6425 }
6426 if (thumb1_cmpneg_operand (operands[2], SImode))
6427 {
6428 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6429 operands[3], operands[0]));
6430 DONE;
6431 }
6432 if (!thumb1_cmp_operand (operands[2], SImode))
6433 operands[2] = force_reg (SImode, operands[2]);
6434 ")
6435
6436 (define_expand "cbranchsf4"
6437 [(set (pc) (if_then_else
6438 (match_operator 0 "expandable_comparison_operator"
6439 [(match_operand:SF 1 "s_register_operand")
6440 (match_operand:SF 2 "vfp_compare_operand")])
6441 (label_ref (match_operand 3 "" ""))
6442 (pc)))]
6443 "TARGET_32BIT && TARGET_HARD_FLOAT"
6444 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6445 operands[3])); DONE;"
6446 )
6447
6448 (define_expand "cbranchdf4"
6449 [(set (pc) (if_then_else
6450 (match_operator 0 "expandable_comparison_operator"
6451 [(match_operand:DF 1 "s_register_operand")
6452 (match_operand:DF 2 "vfp_compare_operand")])
6453 (label_ref (match_operand 3 "" ""))
6454 (pc)))]
6455 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6456 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6457 operands[3])); DONE;"
6458 )
6459
6460 (define_expand "cbranchdi4"
6461 [(set (pc) (if_then_else
6462 (match_operator 0 "expandable_comparison_operator"
6463 [(match_operand:DI 1 "s_register_operand")
6464 (match_operand:DI 2 "cmpdi_operand")])
6465 (label_ref (match_operand 3 "" ""))
6466 (pc)))]
6467 "TARGET_32BIT"
6468 "{
6469 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6470 FAIL;
6471 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6472 operands[3]));
6473 DONE;
6474 }"
6475 )
6476
6477 ;; Comparison and test insns
6478
6479 (define_insn "*arm_cmpsi_insn"
6480 [(set (reg:CC CC_REGNUM)
6481 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6482 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6483 "TARGET_32BIT"
6484 "@
6485 cmp%?\\t%0, %1
6486 cmp%?\\t%0, %1
6487 cmp%?\\t%0, %1
6488 cmp%?\\t%0, %1
6489 cmn%?\\t%0, #%n1"
6490 [(set_attr "conds" "set")
6491 (set_attr "arch" "t2,t2,any,any,any")
6492 (set_attr "length" "2,2,4,4,4")
6493 (set_attr "predicable" "yes")
6494 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6495 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6496 )
6497
6498 (define_insn "*cmpsi_shiftsi"
6499 [(set (reg:CC CC_REGNUM)
6500 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6501 (match_operator:SI 3 "shift_operator"
6502 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6503 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6504 "TARGET_32BIT"
6505 "cmp\\t%0, %1%S3"
6506 [(set_attr "conds" "set")
6507 (set_attr "shift" "1")
6508 (set_attr "arch" "32,a,a")
6509 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6510
6511 (define_insn "*cmpsi_shiftsi_swp"
6512 [(set (reg:CC_SWP CC_REGNUM)
6513 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6514 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6515 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6516 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6517 "TARGET_32BIT"
6518 "cmp%?\\t%0, %1%S3"
6519 [(set_attr "conds" "set")
6520 (set_attr "shift" "1")
6521 (set_attr "arch" "32,a,a")
6522 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6523
6524 (define_insn "*arm_cmpsi_negshiftsi_si"
6525 [(set (reg:CC_Z CC_REGNUM)
6526 (compare:CC_Z
6527 (neg:SI (match_operator:SI 1 "shift_operator"
6528 [(match_operand:SI 2 "s_register_operand" "r")
6529 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6530 (match_operand:SI 0 "s_register_operand" "r")))]
6531 "TARGET_ARM"
6532 "cmn%?\\t%0, %2%S1"
6533 [(set_attr "conds" "set")
6534 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6535 (const_string "alus_shift_imm")
6536 (const_string "alus_shift_reg")))
6537 (set_attr "predicable" "yes")]
6538 )
6539
6540 ;; DImode comparisons. The generic code generates branches that
6541 ;; if-conversion cannot reduce to a conditional compare, so we do
6542 ;; that directly.
6543
6544 (define_insn "*arm_cmpdi_insn"
6545 [(set (reg:CC_NCV CC_REGNUM)
6546 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6547 (match_operand:DI 1 "arm_di_operand" "rDi")))
6548 (clobber (match_scratch:SI 2 "=r"))]
6549 "TARGET_32BIT"
6550 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6551 [(set_attr "conds" "set")
6552 (set_attr "length" "8")
6553 (set_attr "type" "multiple")]
6554 )
6555
6556 (define_insn_and_split "*arm_cmpdi_unsigned"
6557 [(set (reg:CC_CZ CC_REGNUM)
6558 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6559 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6560
6561 "TARGET_32BIT"
6562 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6563 "&& reload_completed"
6564 [(set (reg:CC CC_REGNUM)
6565 (compare:CC (match_dup 2) (match_dup 3)))
6566 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6567 (set (reg:CC CC_REGNUM)
6568 (compare:CC (match_dup 0) (match_dup 1))))]
6569 {
6570 operands[2] = gen_highpart (SImode, operands[0]);
6571 operands[0] = gen_lowpart (SImode, operands[0]);
6572 if (CONST_INT_P (operands[1]))
6573 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6574 else
6575 operands[3] = gen_highpart (SImode, operands[1]);
6576 operands[1] = gen_lowpart (SImode, operands[1]);
6577 }
6578 [(set_attr "conds" "set")
6579 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6580 (set_attr "arch" "t2,t2,t2,a")
6581 (set_attr "length" "6,6,10,8")
6582 (set_attr "type" "multiple")]
6583 )
6584
6585 (define_insn "*arm_cmpdi_zero"
6586 [(set (reg:CC_Z CC_REGNUM)
6587 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
6588 (const_int 0)))
6589 (clobber (match_scratch:SI 1 "=r"))]
6590 "TARGET_32BIT"
6591 "orrs%?\\t%1, %Q0, %R0"
6592 [(set_attr "conds" "set")
6593 (set_attr "type" "logics_reg")]
6594 )
6595
6596 ; This insn allows redundant compares to be removed by cse, nothing should
6597 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6598 ; is deleted later on. The match_dup will match the mode here, so that
6599 ; mode changes of the condition codes aren't lost by this even though we don't
6600 ; specify what they are.
6601
6602 (define_insn "*deleted_compare"
6603 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6604 "TARGET_32BIT"
6605 "\\t%@ deleted compare"
6606 [(set_attr "conds" "set")
6607 (set_attr "length" "0")
6608 (set_attr "type" "no_insn")]
6609 )
6610
6611 \f
6612 ;; Conditional branch insns
6613
6614 (define_expand "cbranch_cc"
6615 [(set (pc)
6616 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6617 (match_operand 2 "" "")])
6618 (label_ref (match_operand 3 "" ""))
6619 (pc)))]
6620 "TARGET_32BIT"
6621 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6622 operands[1], operands[2], NULL_RTX);
6623 operands[2] = const0_rtx;"
6624 )
6625
6626 ;;
6627 ;; Patterns to match conditional branch insns.
6628 ;;
6629
6630 (define_insn "arm_cond_branch"
6631 [(set (pc)
6632 (if_then_else (match_operator 1 "arm_comparison_operator"
6633 [(match_operand 2 "cc_register" "") (const_int 0)])
6634 (label_ref (match_operand 0 "" ""))
6635 (pc)))]
6636 "TARGET_32BIT"
6637 "*
6638 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6639 {
6640 arm_ccfsm_state += 2;
6641 return \"\";
6642 }
6643 return \"b%d1\\t%l0\";
6644 "
6645 [(set_attr "conds" "use")
6646 (set_attr "type" "branch")
6647 (set (attr "length")
6648 (if_then_else
6649 (and (match_test "TARGET_THUMB2")
6650 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6651 (le (minus (match_dup 0) (pc)) (const_int 256))))
6652 (const_int 2)
6653 (const_int 4)))]
6654 )
6655
6656 (define_insn "*arm_cond_branch_reversed"
6657 [(set (pc)
6658 (if_then_else (match_operator 1 "arm_comparison_operator"
6659 [(match_operand 2 "cc_register" "") (const_int 0)])
6660 (pc)
6661 (label_ref (match_operand 0 "" ""))))]
6662 "TARGET_32BIT"
6663 "*
6664 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6665 {
6666 arm_ccfsm_state += 2;
6667 return \"\";
6668 }
6669 return \"b%D1\\t%l0\";
6670 "
6671 [(set_attr "conds" "use")
6672 (set_attr "type" "branch")
6673 (set (attr "length")
6674 (if_then_else
6675 (and (match_test "TARGET_THUMB2")
6676 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6677 (le (minus (match_dup 0) (pc)) (const_int 256))))
6678 (const_int 2)
6679 (const_int 4)))]
6680 )
6681
6682 \f
6683
6684 ; scc insns
6685
6686 (define_expand "cstore_cc"
6687 [(set (match_operand:SI 0 "s_register_operand")
6688 (match_operator:SI 1 "" [(match_operand 2 "" "")
6689 (match_operand 3 "" "")]))]
6690 "TARGET_32BIT"
6691 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6692 operands[2], operands[3], NULL_RTX);
6693 operands[3] = const0_rtx;"
6694 )
6695
6696 (define_insn_and_split "*mov_scc"
6697 [(set (match_operand:SI 0 "s_register_operand" "=r")
6698 (match_operator:SI 1 "arm_comparison_operator_mode"
6699 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6700 "TARGET_ARM"
6701 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6702 "TARGET_ARM"
6703 [(set (match_dup 0)
6704 (if_then_else:SI (match_dup 1)
6705 (const_int 1)
6706 (const_int 0)))]
6707 ""
6708 [(set_attr "conds" "use")
6709 (set_attr "length" "8")
6710 (set_attr "type" "multiple")]
6711 )
6712
6713 (define_insn_and_split "*mov_negscc"
6714 [(set (match_operand:SI 0 "s_register_operand" "=r")
6715 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6716 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6717 "TARGET_ARM"
6718 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6719 "TARGET_ARM"
6720 [(set (match_dup 0)
6721 (if_then_else:SI (match_dup 1)
6722 (match_dup 3)
6723 (const_int 0)))]
6724 {
6725 operands[3] = GEN_INT (~0);
6726 }
6727 [(set_attr "conds" "use")
6728 (set_attr "length" "8")
6729 (set_attr "type" "multiple")]
6730 )
6731
6732 (define_insn_and_split "*mov_notscc"
6733 [(set (match_operand:SI 0 "s_register_operand" "=r")
6734 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6735 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6736 "TARGET_ARM"
6737 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6738 "TARGET_ARM"
6739 [(set (match_dup 0)
6740 (if_then_else:SI (match_dup 1)
6741 (match_dup 3)
6742 (match_dup 4)))]
6743 {
6744 operands[3] = GEN_INT (~1);
6745 operands[4] = GEN_INT (~0);
6746 }
6747 [(set_attr "conds" "use")
6748 (set_attr "length" "8")
6749 (set_attr "type" "multiple")]
6750 )
6751
6752 (define_expand "cstoresi4"
6753 [(set (match_operand:SI 0 "s_register_operand")
6754 (match_operator:SI 1 "expandable_comparison_operator"
6755 [(match_operand:SI 2 "s_register_operand")
6756 (match_operand:SI 3 "reg_or_int_operand")]))]
6757 "TARGET_32BIT || TARGET_THUMB1"
6758 "{
6759 rtx op3, scratch, scratch2;
6760
6761 if (!TARGET_THUMB1)
6762 {
6763 if (!arm_add_operand (operands[3], SImode))
6764 operands[3] = force_reg (SImode, operands[3]);
6765 emit_insn (gen_cstore_cc (operands[0], operands[1],
6766 operands[2], operands[3]));
6767 DONE;
6768 }
6769
6770 if (operands[3] == const0_rtx)
6771 {
6772 switch (GET_CODE (operands[1]))
6773 {
6774 case EQ:
6775 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6776 break;
6777
6778 case NE:
6779 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6780 break;
6781
6782 case LE:
6783 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6784 NULL_RTX, 0, OPTAB_WIDEN);
6785 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6786 NULL_RTX, 0, OPTAB_WIDEN);
6787 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6788 operands[0], 1, OPTAB_WIDEN);
6789 break;
6790
6791 case GE:
6792 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6793 NULL_RTX, 1);
6794 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6795 NULL_RTX, 1, OPTAB_WIDEN);
6796 break;
6797
6798 case GT:
6799 scratch = expand_binop (SImode, ashr_optab, operands[2],
6800 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6801 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6802 NULL_RTX, 0, OPTAB_WIDEN);
6803 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6804 0, OPTAB_WIDEN);
6805 break;
6806
6807 /* LT is handled by generic code. No need for unsigned with 0. */
6808 default:
6809 FAIL;
6810 }
6811 DONE;
6812 }
6813
6814 switch (GET_CODE (operands[1]))
6815 {
6816 case EQ:
6817 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6818 NULL_RTX, 0, OPTAB_WIDEN);
6819 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6820 break;
6821
6822 case NE:
6823 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6824 NULL_RTX, 0, OPTAB_WIDEN);
6825 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6826 break;
6827
6828 case LE:
6829 op3 = force_reg (SImode, operands[3]);
6830
6831 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6832 NULL_RTX, 1, OPTAB_WIDEN);
6833 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6834 NULL_RTX, 0, OPTAB_WIDEN);
6835 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6836 op3, operands[2]));
6837 break;
6838
6839 case GE:
6840 op3 = operands[3];
6841 if (!thumb1_cmp_operand (op3, SImode))
6842 op3 = force_reg (SImode, op3);
6843 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6844 NULL_RTX, 0, OPTAB_WIDEN);
6845 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6846 NULL_RTX, 1, OPTAB_WIDEN);
6847 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6848 operands[2], op3));
6849 break;
6850
6851 case LEU:
6852 op3 = force_reg (SImode, operands[3]);
6853 scratch = force_reg (SImode, const0_rtx);
6854 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6855 op3, operands[2]));
6856 break;
6857
6858 case GEU:
6859 op3 = operands[3];
6860 if (!thumb1_cmp_operand (op3, SImode))
6861 op3 = force_reg (SImode, op3);
6862 scratch = force_reg (SImode, const0_rtx);
6863 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6864 operands[2], op3));
6865 break;
6866
6867 case LTU:
6868 op3 = operands[3];
6869 if (!thumb1_cmp_operand (op3, SImode))
6870 op3 = force_reg (SImode, op3);
6871 scratch = gen_reg_rtx (SImode);
6872 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6873 break;
6874
6875 case GTU:
6876 op3 = force_reg (SImode, operands[3]);
6877 scratch = gen_reg_rtx (SImode);
6878 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6879 break;
6880
6881 /* No good sequences for GT, LT. */
6882 default:
6883 FAIL;
6884 }
6885 DONE;
6886 }")
6887
6888 (define_expand "cstorehf4"
6889 [(set (match_operand:SI 0 "s_register_operand")
6890 (match_operator:SI 1 "expandable_comparison_operator"
6891 [(match_operand:HF 2 "s_register_operand")
6892 (match_operand:HF 3 "vfp_compare_operand")]))]
6893 "TARGET_VFP_FP16INST"
6894 {
6895 if (!arm_validize_comparison (&operands[1],
6896 &operands[2],
6897 &operands[3]))
6898 FAIL;
6899
6900 emit_insn (gen_cstore_cc (operands[0], operands[1],
6901 operands[2], operands[3]));
6902 DONE;
6903 }
6904 )
6905
6906 (define_expand "cstoresf4"
6907 [(set (match_operand:SI 0 "s_register_operand")
6908 (match_operator:SI 1 "expandable_comparison_operator"
6909 [(match_operand:SF 2 "s_register_operand")
6910 (match_operand:SF 3 "vfp_compare_operand")]))]
6911 "TARGET_32BIT && TARGET_HARD_FLOAT"
6912 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6913 operands[2], operands[3])); DONE;"
6914 )
6915
6916 (define_expand "cstoredf4"
6917 [(set (match_operand:SI 0 "s_register_operand")
6918 (match_operator:SI 1 "expandable_comparison_operator"
6919 [(match_operand:DF 2 "s_register_operand")
6920 (match_operand:DF 3 "vfp_compare_operand")]))]
6921 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6922 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6923 operands[2], operands[3])); DONE;"
6924 )
6925
6926 (define_expand "cstoredi4"
6927 [(set (match_operand:SI 0 "s_register_operand")
6928 (match_operator:SI 1 "expandable_comparison_operator"
6929 [(match_operand:DI 2 "s_register_operand")
6930 (match_operand:DI 3 "cmpdi_operand")]))]
6931 "TARGET_32BIT"
6932 "{
6933 if (!arm_validize_comparison (&operands[1],
6934 &operands[2],
6935 &operands[3]))
6936 FAIL;
6937 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6938 operands[3]));
6939 DONE;
6940 }"
6941 )
6942
6943 \f
6944 ;; Conditional move insns
6945
6946 (define_expand "movsicc"
6947 [(set (match_operand:SI 0 "s_register_operand")
6948 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6949 (match_operand:SI 2 "arm_not_operand")
6950 (match_operand:SI 3 "arm_not_operand")))]
6951 "TARGET_32BIT"
6952 "
6953 {
6954 enum rtx_code code;
6955 rtx ccreg;
6956
6957 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6958 &XEXP (operands[1], 1)))
6959 FAIL;
6960
6961 code = GET_CODE (operands[1]);
6962 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6963 XEXP (operands[1], 1), NULL_RTX);
6964 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6965 }"
6966 )
6967
6968 (define_expand "movhfcc"
6969 [(set (match_operand:HF 0 "s_register_operand")
6970 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6971 (match_operand:HF 2 "s_register_operand")
6972 (match_operand:HF 3 "s_register_operand")))]
6973 "TARGET_VFP_FP16INST"
6974 "
6975 {
6976 enum rtx_code code = GET_CODE (operands[1]);
6977 rtx ccreg;
6978
6979 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6980 &XEXP (operands[1], 1)))
6981 FAIL;
6982
6983 code = GET_CODE (operands[1]);
6984 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6985 XEXP (operands[1], 1), NULL_RTX);
6986 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6987 }"
6988 )
6989
6990 (define_expand "movsfcc"
6991 [(set (match_operand:SF 0 "s_register_operand")
6992 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6993 (match_operand:SF 2 "s_register_operand")
6994 (match_operand:SF 3 "s_register_operand")))]
6995 "TARGET_32BIT && TARGET_HARD_FLOAT"
6996 "
6997 {
6998 enum rtx_code code = GET_CODE (operands[1]);
6999 rtx ccreg;
7000
7001 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7002 &XEXP (operands[1], 1)))
7003 FAIL;
7004
7005 code = GET_CODE (operands[1]);
7006 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7007 XEXP (operands[1], 1), NULL_RTX);
7008 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7009 }"
7010 )
7011
7012 (define_expand "movdfcc"
7013 [(set (match_operand:DF 0 "s_register_operand")
7014 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7015 (match_operand:DF 2 "s_register_operand")
7016 (match_operand:DF 3 "s_register_operand")))]
7017 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7018 "
7019 {
7020 enum rtx_code code = GET_CODE (operands[1]);
7021 rtx ccreg;
7022
7023 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7024 &XEXP (operands[1], 1)))
7025 FAIL;
7026 code = GET_CODE (operands[1]);
7027 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7028 XEXP (operands[1], 1), NULL_RTX);
7029 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7030 }"
7031 )
7032
7033 (define_insn "*cmov<mode>"
7034 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7035 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7036 [(match_operand 2 "cc_register" "") (const_int 0)])
7037 (match_operand:SDF 3 "s_register_operand"
7038 "<F_constraint>")
7039 (match_operand:SDF 4 "s_register_operand"
7040 "<F_constraint>")))]
7041 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7042 "*
7043 {
7044 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7045 switch (code)
7046 {
7047 case ARM_GE:
7048 case ARM_GT:
7049 case ARM_EQ:
7050 case ARM_VS:
7051 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7052 case ARM_LT:
7053 case ARM_LE:
7054 case ARM_NE:
7055 case ARM_VC:
7056 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7057 default:
7058 gcc_unreachable ();
7059 }
7060 return \"\";
7061 }"
7062 [(set_attr "conds" "use")
7063 (set_attr "type" "fcsel")]
7064 )
7065
7066 (define_insn "*cmovhf"
7067 [(set (match_operand:HF 0 "s_register_operand" "=t")
7068 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7069 [(match_operand 2 "cc_register" "") (const_int 0)])
7070 (match_operand:HF 3 "s_register_operand" "t")
7071 (match_operand:HF 4 "s_register_operand" "t")))]
7072 "TARGET_VFP_FP16INST"
7073 "*
7074 {
7075 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7076 switch (code)
7077 {
7078 case ARM_GE:
7079 case ARM_GT:
7080 case ARM_EQ:
7081 case ARM_VS:
7082 return \"vsel%d1.f16\\t%0, %3, %4\";
7083 case ARM_LT:
7084 case ARM_LE:
7085 case ARM_NE:
7086 case ARM_VC:
7087 return \"vsel%D1.f16\\t%0, %4, %3\";
7088 default:
7089 gcc_unreachable ();
7090 }
7091 return \"\";
7092 }"
7093 [(set_attr "conds" "use")
7094 (set_attr "type" "fcsel")]
7095 )
7096
7097 (define_insn_and_split "*movsicc_insn"
7098 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7099 (if_then_else:SI
7100 (match_operator 3 "arm_comparison_operator"
7101 [(match_operand 4 "cc_register" "") (const_int 0)])
7102 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7103 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7104 "TARGET_ARM"
7105 "@
7106 mov%D3\\t%0, %2
7107 mvn%D3\\t%0, #%B2
7108 mov%d3\\t%0, %1
7109 mvn%d3\\t%0, #%B1
7110 #
7111 #
7112 #
7113 #"
7114 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7115 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7116 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7117 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7118 "&& reload_completed"
7119 [(const_int 0)]
7120 {
7121 enum rtx_code rev_code;
7122 machine_mode mode;
7123 rtx rev_cond;
7124
7125 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7126 operands[3],
7127 gen_rtx_SET (operands[0], operands[1])));
7128
7129 rev_code = GET_CODE (operands[3]);
7130 mode = GET_MODE (operands[4]);
7131 if (mode == CCFPmode || mode == CCFPEmode)
7132 rev_code = reverse_condition_maybe_unordered (rev_code);
7133 else
7134 rev_code = reverse_condition (rev_code);
7135
7136 rev_cond = gen_rtx_fmt_ee (rev_code,
7137 VOIDmode,
7138 operands[4],
7139 const0_rtx);
7140 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7141 rev_cond,
7142 gen_rtx_SET (operands[0], operands[2])));
7143 DONE;
7144 }
7145 [(set_attr "length" "4,4,4,4,8,8,8,8")
7146 (set_attr "conds" "use")
7147 (set_attr_alternative "type"
7148 [(if_then_else (match_operand 2 "const_int_operand" "")
7149 (const_string "mov_imm")
7150 (const_string "mov_reg"))
7151 (const_string "mvn_imm")
7152 (if_then_else (match_operand 1 "const_int_operand" "")
7153 (const_string "mov_imm")
7154 (const_string "mov_reg"))
7155 (const_string "mvn_imm")
7156 (const_string "multiple")
7157 (const_string "multiple")
7158 (const_string "multiple")
7159 (const_string "multiple")])]
7160 )
7161
7162 (define_insn "*movsfcc_soft_insn"
7163 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7164 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7165 [(match_operand 4 "cc_register" "") (const_int 0)])
7166 (match_operand:SF 1 "s_register_operand" "0,r")
7167 (match_operand:SF 2 "s_register_operand" "r,0")))]
7168 "TARGET_ARM && TARGET_SOFT_FLOAT"
7169 "@
7170 mov%D3\\t%0, %2
7171 mov%d3\\t%0, %1"
7172 [(set_attr "conds" "use")
7173 (set_attr "type" "mov_reg")]
7174 )
7175
7176 \f
7177 ;; Jump and linkage insns
7178
7179 (define_expand "jump"
7180 [(set (pc)
7181 (label_ref (match_operand 0 "" "")))]
7182 "TARGET_EITHER"
7183 ""
7184 )
7185
7186 (define_insn "*arm_jump"
7187 [(set (pc)
7188 (label_ref (match_operand 0 "" "")))]
7189 "TARGET_32BIT"
7190 "*
7191 {
7192 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7193 {
7194 arm_ccfsm_state += 2;
7195 return \"\";
7196 }
7197 return \"b%?\\t%l0\";
7198 }
7199 "
7200 [(set_attr "predicable" "yes")
7201 (set (attr "length")
7202 (if_then_else
7203 (and (match_test "TARGET_THUMB2")
7204 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7205 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7206 (const_int 2)
7207 (const_int 4)))
7208 (set_attr "type" "branch")]
7209 )
7210
7211 (define_expand "call"
7212 [(parallel [(call (match_operand 0 "memory_operand")
7213 (match_operand 1 "general_operand"))
7214 (use (match_operand 2 "" ""))
7215 (clobber (reg:SI LR_REGNUM))])]
7216 "TARGET_EITHER"
7217 "
7218 {
7219 rtx callee, pat;
7220 tree addr = MEM_EXPR (operands[0]);
7221
7222 /* In an untyped call, we can get NULL for operand 2. */
7223 if (operands[2] == NULL_RTX)
7224 operands[2] = const0_rtx;
7225
7226 /* Decide if we should generate indirect calls by loading the
7227 32-bit address of the callee into a register before performing the
7228 branch and link. */
7229 callee = XEXP (operands[0], 0);
7230 if (GET_CODE (callee) == SYMBOL_REF
7231 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7232 : !REG_P (callee))
7233 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7234
7235 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7236 /* Indirect call: set r9 with FDPIC value of callee. */
7237 XEXP (operands[0], 0)
7238 = arm_load_function_descriptor (XEXP (operands[0], 0));
7239
7240 if (detect_cmse_nonsecure_call (addr))
7241 {
7242 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7243 operands[2]);
7244 emit_call_insn (pat);
7245 }
7246 else
7247 {
7248 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7249 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7250 }
7251
7252 /* Restore FDPIC register (r9) after call. */
7253 if (TARGET_FDPIC)
7254 {
7255 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7256 rtx initial_fdpic_reg
7257 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7258
7259 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7260 initial_fdpic_reg));
7261 }
7262
7263 DONE;
7264 }"
7265 )
7266
7267 (define_insn "restore_pic_register_after_call"
7268 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7269 (unspec:SI [(match_dup 0)
7270 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7271 UNSPEC_PIC_RESTORE))]
7272 ""
7273 "@
7274 mov\t%0, %1
7275 ldr\t%0, %1"
7276 )
7277
7278 (define_expand "call_internal"
7279 [(parallel [(call (match_operand 0 "memory_operand")
7280 (match_operand 1 "general_operand"))
7281 (use (match_operand 2 "" ""))
7282 (clobber (reg:SI LR_REGNUM))])])
7283
7284 (define_expand "nonsecure_call_internal"
7285 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7286 UNSPEC_NONSECURE_MEM)
7287 (match_operand 1 "general_operand"))
7288 (use (match_operand 2 "" ""))
7289 (clobber (reg:SI LR_REGNUM))])]
7290 "use_cmse"
7291 "
7292 {
7293 rtx tmp;
7294 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7295 gen_rtx_REG (SImode, R4_REGNUM),
7296 SImode);
7297
7298 operands[0] = replace_equiv_address (operands[0], tmp);
7299 }")
7300
7301 (define_insn "*call_reg_armv5"
7302 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7303 (match_operand 1 "" ""))
7304 (use (match_operand 2 "" ""))
7305 (clobber (reg:SI LR_REGNUM))]
7306 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7307 "blx%?\\t%0"
7308 [(set_attr "type" "call")]
7309 )
7310
7311 (define_insn "*call_reg_arm"
7312 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7313 (match_operand 1 "" ""))
7314 (use (match_operand 2 "" ""))
7315 (clobber (reg:SI LR_REGNUM))]
7316 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7317 "*
7318 return output_call (operands);
7319 "
7320 ;; length is worst case, normally it is only two
7321 [(set_attr "length" "12")
7322 (set_attr "type" "call")]
7323 )
7324
7325
7326 (define_expand "call_value"
7327 [(parallel [(set (match_operand 0 "" "")
7328 (call (match_operand 1 "memory_operand")
7329 (match_operand 2 "general_operand")))
7330 (use (match_operand 3 "" ""))
7331 (clobber (reg:SI LR_REGNUM))])]
7332 "TARGET_EITHER"
7333 "
7334 {
7335 rtx pat, callee;
7336 tree addr = MEM_EXPR (operands[1]);
7337
7338 /* In an untyped call, we can get NULL for operand 2. */
7339 if (operands[3] == 0)
7340 operands[3] = const0_rtx;
7341
7342 /* Decide if we should generate indirect calls by loading the
7343 32-bit address of the callee into a register before performing the
7344 branch and link. */
7345 callee = XEXP (operands[1], 0);
7346 if (GET_CODE (callee) == SYMBOL_REF
7347 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7348 : !REG_P (callee))
7349 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7350
7351 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7352 /* Indirect call: set r9 with FDPIC value of callee. */
7353 XEXP (operands[1], 0)
7354 = arm_load_function_descriptor (XEXP (operands[1], 0));
7355
7356 if (detect_cmse_nonsecure_call (addr))
7357 {
7358 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7359 operands[2], operands[3]);
7360 emit_call_insn (pat);
7361 }
7362 else
7363 {
7364 pat = gen_call_value_internal (operands[0], operands[1],
7365 operands[2], operands[3]);
7366 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7367 }
7368
7369 /* Restore FDPIC register (r9) after call. */
7370 if (TARGET_FDPIC)
7371 {
7372 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7373 rtx initial_fdpic_reg
7374 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7375
7376 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7377 initial_fdpic_reg));
7378 }
7379
7380 DONE;
7381 }"
7382 )
7383
7384 (define_expand "call_value_internal"
7385 [(parallel [(set (match_operand 0 "" "")
7386 (call (match_operand 1 "memory_operand")
7387 (match_operand 2 "general_operand")))
7388 (use (match_operand 3 "" ""))
7389 (clobber (reg:SI LR_REGNUM))])])
7390
7391 (define_expand "nonsecure_call_value_internal"
7392 [(parallel [(set (match_operand 0 "" "")
7393 (call (unspec:SI [(match_operand 1 "memory_operand")]
7394 UNSPEC_NONSECURE_MEM)
7395 (match_operand 2 "general_operand")))
7396 (use (match_operand 3 "" ""))
7397 (clobber (reg:SI LR_REGNUM))])]
7398 "use_cmse"
7399 "
7400 {
7401 rtx tmp;
7402 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7403 gen_rtx_REG (SImode, R4_REGNUM),
7404 SImode);
7405
7406 operands[1] = replace_equiv_address (operands[1], tmp);
7407 }")
7408
7409 (define_insn "*call_value_reg_armv5"
7410 [(set (match_operand 0 "" "")
7411 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7412 (match_operand 2 "" "")))
7413 (use (match_operand 3 "" ""))
7414 (clobber (reg:SI LR_REGNUM))]
7415 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7416 "blx%?\\t%1"
7417 [(set_attr "type" "call")]
7418 )
7419
7420 (define_insn "*call_value_reg_arm"
7421 [(set (match_operand 0 "" "")
7422 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7423 (match_operand 2 "" "")))
7424 (use (match_operand 3 "" ""))
7425 (clobber (reg:SI LR_REGNUM))]
7426 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7427 "*
7428 return output_call (&operands[1]);
7429 "
7430 [(set_attr "length" "12")
7431 (set_attr "type" "call")]
7432 )
7433
7434 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7435 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7436
7437 (define_insn "*call_symbol"
7438 [(call (mem:SI (match_operand:SI 0 "" ""))
7439 (match_operand 1 "" ""))
7440 (use (match_operand 2 "" ""))
7441 (clobber (reg:SI LR_REGNUM))]
7442 "TARGET_32BIT
7443 && !SIBLING_CALL_P (insn)
7444 && (GET_CODE (operands[0]) == SYMBOL_REF)
7445 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7446 "*
7447 {
7448 rtx op = operands[0];
7449
7450 /* Switch mode now when possible. */
7451 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7452 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7453 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7454
7455 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7456 }"
7457 [(set_attr "type" "call")]
7458 )
7459
7460 (define_insn "*call_value_symbol"
7461 [(set (match_operand 0 "" "")
7462 (call (mem:SI (match_operand:SI 1 "" ""))
7463 (match_operand:SI 2 "" "")))
7464 (use (match_operand 3 "" ""))
7465 (clobber (reg:SI LR_REGNUM))]
7466 "TARGET_32BIT
7467 && !SIBLING_CALL_P (insn)
7468 && (GET_CODE (operands[1]) == SYMBOL_REF)
7469 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7470 "*
7471 {
7472 rtx op = operands[1];
7473
7474 /* Switch mode now when possible. */
7475 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7476 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7477 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7478
7479 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7480 }"
7481 [(set_attr "type" "call")]
7482 )
7483
7484 (define_expand "sibcall_internal"
7485 [(parallel [(call (match_operand 0 "memory_operand")
7486 (match_operand 1 "general_operand"))
7487 (return)
7488 (use (match_operand 2 "" ""))])])
7489
7490 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7491 (define_expand "sibcall"
7492 [(parallel [(call (match_operand 0 "memory_operand")
7493 (match_operand 1 "general_operand"))
7494 (return)
7495 (use (match_operand 2 "" ""))])]
7496 "TARGET_32BIT"
7497 "
7498 {
7499 rtx pat;
7500
7501 if ((!REG_P (XEXP (operands[0], 0))
7502 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7503 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7504 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7505 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7506
7507 if (operands[2] == NULL_RTX)
7508 operands[2] = const0_rtx;
7509
7510 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7511 arm_emit_call_insn (pat, operands[0], true);
7512 DONE;
7513 }"
7514 )
7515
7516 (define_expand "sibcall_value_internal"
7517 [(parallel [(set (match_operand 0 "" "")
7518 (call (match_operand 1 "memory_operand")
7519 (match_operand 2 "general_operand")))
7520 (return)
7521 (use (match_operand 3 "" ""))])])
7522
7523 (define_expand "sibcall_value"
7524 [(parallel [(set (match_operand 0 "" "")
7525 (call (match_operand 1 "memory_operand")
7526 (match_operand 2 "general_operand")))
7527 (return)
7528 (use (match_operand 3 "" ""))])]
7529 "TARGET_32BIT"
7530 "
7531 {
7532 rtx pat;
7533
7534 if ((!REG_P (XEXP (operands[1], 0))
7535 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7536 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7537 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7538 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7539
7540 if (operands[3] == NULL_RTX)
7541 operands[3] = const0_rtx;
7542
7543 pat = gen_sibcall_value_internal (operands[0], operands[1],
7544 operands[2], operands[3]);
7545 arm_emit_call_insn (pat, operands[1], true);
7546 DONE;
7547 }"
7548 )
7549
7550 (define_insn "*sibcall_insn"
7551 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7552 (match_operand 1 "" ""))
7553 (return)
7554 (use (match_operand 2 "" ""))]
7555 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7556 "*
7557 if (which_alternative == 1)
7558 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7559 else
7560 {
7561 if (arm_arch5t || arm_arch4t)
7562 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7563 else
7564 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7565 }
7566 "
7567 [(set_attr "type" "call")]
7568 )
7569
7570 (define_insn "*sibcall_value_insn"
7571 [(set (match_operand 0 "" "")
7572 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7573 (match_operand 2 "" "")))
7574 (return)
7575 (use (match_operand 3 "" ""))]
7576 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7577 "*
7578 if (which_alternative == 1)
7579 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7580 else
7581 {
7582 if (arm_arch5t || arm_arch4t)
7583 return \"bx%?\\t%1\";
7584 else
7585 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7586 }
7587 "
7588 [(set_attr "type" "call")]
7589 )
7590
7591 (define_expand "<return_str>return"
7592 [(RETURNS)]
7593 "(TARGET_ARM || (TARGET_THUMB2
7594 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7595 && !IS_STACKALIGN (arm_current_func_type ())))
7596 <return_cond_false>"
7597 "
7598 {
7599 if (TARGET_THUMB2)
7600 {
7601 thumb2_expand_return (<return_simple_p>);
7602 DONE;
7603 }
7604 }
7605 "
7606 )
7607
7608 ;; Often the return insn will be the same as loading from memory, so set attr
7609 (define_insn "*arm_return"
7610 [(return)]
7611 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7612 "*
7613 {
7614 if (arm_ccfsm_state == 2)
7615 {
7616 arm_ccfsm_state += 2;
7617 return \"\";
7618 }
7619 return output_return_instruction (const_true_rtx, true, false, false);
7620 }"
7621 [(set_attr "type" "load_4")
7622 (set_attr "length" "12")
7623 (set_attr "predicable" "yes")]
7624 )
7625
7626 (define_insn "*cond_<return_str>return"
7627 [(set (pc)
7628 (if_then_else (match_operator 0 "arm_comparison_operator"
7629 [(match_operand 1 "cc_register" "") (const_int 0)])
7630 (RETURNS)
7631 (pc)))]
7632 "TARGET_ARM <return_cond_true>"
7633 "*
7634 {
7635 if (arm_ccfsm_state == 2)
7636 {
7637 arm_ccfsm_state += 2;
7638 return \"\";
7639 }
7640 return output_return_instruction (operands[0], true, false,
7641 <return_simple_p>);
7642 }"
7643 [(set_attr "conds" "use")
7644 (set_attr "length" "12")
7645 (set_attr "type" "load_4")]
7646 )
7647
7648 (define_insn "*cond_<return_str>return_inverted"
7649 [(set (pc)
7650 (if_then_else (match_operator 0 "arm_comparison_operator"
7651 [(match_operand 1 "cc_register" "") (const_int 0)])
7652 (pc)
7653 (RETURNS)))]
7654 "TARGET_ARM <return_cond_true>"
7655 "*
7656 {
7657 if (arm_ccfsm_state == 2)
7658 {
7659 arm_ccfsm_state += 2;
7660 return \"\";
7661 }
7662 return output_return_instruction (operands[0], true, true,
7663 <return_simple_p>);
7664 }"
7665 [(set_attr "conds" "use")
7666 (set_attr "length" "12")
7667 (set_attr "type" "load_4")]
7668 )
7669
7670 (define_insn "*arm_simple_return"
7671 [(simple_return)]
7672 "TARGET_ARM"
7673 "*
7674 {
7675 if (arm_ccfsm_state == 2)
7676 {
7677 arm_ccfsm_state += 2;
7678 return \"\";
7679 }
7680 return output_return_instruction (const_true_rtx, true, false, true);
7681 }"
7682 [(set_attr "type" "branch")
7683 (set_attr "length" "4")
7684 (set_attr "predicable" "yes")]
7685 )
7686
7687 ;; Generate a sequence of instructions to determine if the processor is
7688 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7689 ;; mask.
7690
7691 (define_expand "return_addr_mask"
7692 [(set (match_dup 1)
7693 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7694 (const_int 0)))
7695 (set (match_operand:SI 0 "s_register_operand")
7696 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7697 (const_int -1)
7698 (const_int 67108860)))] ; 0x03fffffc
7699 "TARGET_ARM"
7700 "
7701 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7702 ")
7703
7704 (define_insn "*check_arch2"
7705 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7706 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7707 (const_int 0)))]
7708 "TARGET_ARM"
7709 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7710 [(set_attr "length" "8")
7711 (set_attr "conds" "set")
7712 (set_attr "type" "multiple")]
7713 )
7714
7715 ;; Call subroutine returning any type.
7716
7717 (define_expand "untyped_call"
7718 [(parallel [(call (match_operand 0 "" "")
7719 (const_int 0))
7720 (match_operand 1 "" "")
7721 (match_operand 2 "" "")])]
7722 "TARGET_EITHER && !TARGET_FDPIC"
7723 "
7724 {
7725 int i;
7726 rtx par = gen_rtx_PARALLEL (VOIDmode,
7727 rtvec_alloc (XVECLEN (operands[2], 0)));
7728 rtx addr = gen_reg_rtx (Pmode);
7729 rtx mem;
7730 int size = 0;
7731
7732 emit_move_insn (addr, XEXP (operands[1], 0));
7733 mem = change_address (operands[1], BLKmode, addr);
7734
7735 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7736 {
7737 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7738
7739 /* Default code only uses r0 as a return value, but we could
7740 be using anything up to 4 registers. */
7741 if (REGNO (src) == R0_REGNUM)
7742 src = gen_rtx_REG (TImode, R0_REGNUM);
7743
7744 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7745 GEN_INT (size));
7746 size += GET_MODE_SIZE (GET_MODE (src));
7747 }
7748
7749 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7750
7751 size = 0;
7752
7753 for (i = 0; i < XVECLEN (par, 0); i++)
7754 {
7755 HOST_WIDE_INT offset = 0;
7756 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7757
7758 if (size != 0)
7759 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7760
7761 mem = change_address (mem, GET_MODE (reg), NULL);
7762 if (REGNO (reg) == R0_REGNUM)
7763 {
7764 /* On thumb we have to use a write-back instruction. */
7765 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7766 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7767 size = TARGET_ARM ? 16 : 0;
7768 }
7769 else
7770 {
7771 emit_move_insn (mem, reg);
7772 size = GET_MODE_SIZE (GET_MODE (reg));
7773 }
7774 }
7775
7776 /* The optimizer does not know that the call sets the function value
7777 registers we stored in the result block. We avoid problems by
7778 claiming that all hard registers are used and clobbered at this
7779 point. */
7780 emit_insn (gen_blockage ());
7781
7782 DONE;
7783 }"
7784 )
7785
7786 (define_expand "untyped_return"
7787 [(match_operand:BLK 0 "memory_operand")
7788 (match_operand 1 "" "")]
7789 "TARGET_EITHER && !TARGET_FDPIC"
7790 "
7791 {
7792 int i;
7793 rtx addr = gen_reg_rtx (Pmode);
7794 rtx mem;
7795 int size = 0;
7796
7797 emit_move_insn (addr, XEXP (operands[0], 0));
7798 mem = change_address (operands[0], BLKmode, addr);
7799
7800 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7801 {
7802 HOST_WIDE_INT offset = 0;
7803 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7804
7805 if (size != 0)
7806 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7807
7808 mem = change_address (mem, GET_MODE (reg), NULL);
7809 if (REGNO (reg) == R0_REGNUM)
7810 {
7811 /* On thumb we have to use a write-back instruction. */
7812 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7813 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7814 size = TARGET_ARM ? 16 : 0;
7815 }
7816 else
7817 {
7818 emit_move_insn (reg, mem);
7819 size = GET_MODE_SIZE (GET_MODE (reg));
7820 }
7821 }
7822
7823 /* Emit USE insns before the return. */
7824 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7825 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7826
7827 /* Construct the return. */
7828 expand_naked_return ();
7829
7830 DONE;
7831 }"
7832 )
7833
7834 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7835 ;; all of memory. This blocks insns from being moved across this point.
7836
7837 (define_insn "blockage"
7838 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7839 "TARGET_EITHER"
7840 ""
7841 [(set_attr "length" "0")
7842 (set_attr "type" "block")]
7843 )
7844
7845 ;; Since we hard code r0 here use the 'o' constraint to prevent
7846 ;; provoking undefined behaviour in the hardware with putting out
7847 ;; auto-increment operations with potentially r0 as the base register.
7848 (define_insn "probe_stack"
7849 [(set (match_operand:SI 0 "memory_operand" "=o")
7850 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7851 "TARGET_32BIT"
7852 "str%?\\tr0, %0"
7853 [(set_attr "type" "store_4")
7854 (set_attr "predicable" "yes")]
7855 )
7856
7857 (define_insn "probe_stack_range"
7858 [(set (match_operand:SI 0 "register_operand" "=r")
7859 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7860 (match_operand:SI 2 "register_operand" "r")]
7861 VUNSPEC_PROBE_STACK_RANGE))]
7862 "TARGET_32BIT"
7863 {
7864 return output_probe_stack_range (operands[0], operands[2]);
7865 }
7866 [(set_attr "type" "multiple")
7867 (set_attr "conds" "clob")]
7868 )
7869
7870 ;; Named patterns for stack smashing protection.
7871 (define_expand "stack_protect_combined_set"
7872 [(parallel
7873 [(set (match_operand:SI 0 "memory_operand")
7874 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7875 UNSPEC_SP_SET))
7876 (clobber (match_scratch:SI 2 ""))
7877 (clobber (match_scratch:SI 3 ""))])]
7878 ""
7879 ""
7880 )
7881
7882 ;; Use a separate insn from the above expand to be able to have the mem outside
7883 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7884 ;; try to reload the guard since we need to control how PIC access is done in
7885 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7886 ;; legitimize_pic_address ()).
7887 (define_insn_and_split "*stack_protect_combined_set_insn"
7888 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7889 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7890 UNSPEC_SP_SET))
7891 (clobber (match_scratch:SI 2 "=&l,&r"))
7892 (clobber (match_scratch:SI 3 "=&l,&r"))]
7893 ""
7894 "#"
7895 "reload_completed"
7896 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7897 UNSPEC_SP_SET))
7898 (clobber (match_dup 2))])]
7899 "
7900 {
7901 if (flag_pic)
7902 {
7903 rtx pic_reg;
7904
7905 if (TARGET_FDPIC)
7906 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7907 else
7908 pic_reg = operands[3];
7909
7910 /* Forces recomputing of GOT base now. */
7911 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7912 true /*compute_now*/);
7913 }
7914 else
7915 {
7916 if (address_operand (operands[1], SImode))
7917 operands[2] = operands[1];
7918 else
7919 {
7920 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7921 emit_move_insn (operands[2], mem);
7922 }
7923 }
7924 }"
7925 [(set_attr "arch" "t1,32")]
7926 )
7927
7928 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7929 ;; canary value does not live beyond the life of this sequence.
7930 (define_insn "*stack_protect_set_insn"
7931 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7932 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7933 UNSPEC_SP_SET))
7934 (clobber (match_dup 1))]
7935 ""
7936 "@
7937 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7938 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7939 [(set_attr "length" "8,12")
7940 (set_attr "conds" "clob,nocond")
7941 (set_attr "type" "multiple")
7942 (set_attr "arch" "t1,32")]
7943 )
7944
7945 (define_expand "stack_protect_combined_test"
7946 [(parallel
7947 [(set (pc)
7948 (if_then_else
7949 (eq (match_operand:SI 0 "memory_operand")
7950 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7951 UNSPEC_SP_TEST))
7952 (label_ref (match_operand 2))
7953 (pc)))
7954 (clobber (match_scratch:SI 3 ""))
7955 (clobber (match_scratch:SI 4 ""))
7956 (clobber (reg:CC CC_REGNUM))])]
7957 ""
7958 ""
7959 )
7960
7961 ;; Use a separate insn from the above expand to be able to have the mem outside
7962 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7963 ;; try to reload the guard since we need to control how PIC access is done in
7964 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7965 ;; legitimize_pic_address ()).
7966 (define_insn_and_split "*stack_protect_combined_test_insn"
7967 [(set (pc)
7968 (if_then_else
7969 (eq (match_operand:SI 0 "memory_operand" "m,m")
7970 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7971 UNSPEC_SP_TEST))
7972 (label_ref (match_operand 2))
7973 (pc)))
7974 (clobber (match_scratch:SI 3 "=&l,&r"))
7975 (clobber (match_scratch:SI 4 "=&l,&r"))
7976 (clobber (reg:CC CC_REGNUM))]
7977 ""
7978 "#"
7979 "reload_completed"
7980 [(const_int 0)]
7981 {
7982 rtx eq;
7983
7984 if (flag_pic)
7985 {
7986 rtx pic_reg;
7987
7988 if (TARGET_FDPIC)
7989 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7990 else
7991 pic_reg = operands[4];
7992
7993 /* Forces recomputing of GOT base now. */
7994 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7995 true /*compute_now*/);
7996 }
7997 else
7998 {
7999 if (address_operand (operands[1], SImode))
8000 operands[3] = operands[1];
8001 else
8002 {
8003 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8004 emit_move_insn (operands[3], mem);
8005 }
8006 }
8007 if (TARGET_32BIT)
8008 {
8009 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8010 operands[3]));
8011 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8012 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8013 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8014 }
8015 else
8016 {
8017 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8018 operands[3]));
8019 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8020 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8021 operands[2]));
8022 }
8023 DONE;
8024 }
8025 [(set_attr "arch" "t1,32")]
8026 )
8027
8028 (define_insn "arm_stack_protect_test_insn"
8029 [(set (reg:CC_Z CC_REGNUM)
8030 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8031 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8032 UNSPEC_SP_TEST)
8033 (const_int 0)))
8034 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8035 (clobber (match_dup 2))]
8036 "TARGET_32BIT"
8037 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8038 [(set_attr "length" "8,12")
8039 (set_attr "conds" "set")
8040 (set_attr "type" "multiple")
8041 (set_attr "arch" "t,32")]
8042 )
8043
8044 (define_expand "casesi"
8045 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8046 (match_operand:SI 1 "const_int_operand") ; lower bound
8047 (match_operand:SI 2 "const_int_operand") ; total range
8048 (match_operand:SI 3 "" "") ; table label
8049 (match_operand:SI 4 "" "")] ; Out of range label
8050 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8051 "
8052 {
8053 enum insn_code code;
8054 if (operands[1] != const0_rtx)
8055 {
8056 rtx reg = gen_reg_rtx (SImode);
8057
8058 emit_insn (gen_addsi3 (reg, operands[0],
8059 gen_int_mode (-INTVAL (operands[1]),
8060 SImode)));
8061 operands[0] = reg;
8062 }
8063
8064 if (TARGET_ARM)
8065 code = CODE_FOR_arm_casesi_internal;
8066 else if (TARGET_THUMB1)
8067 code = CODE_FOR_thumb1_casesi_internal_pic;
8068 else if (flag_pic)
8069 code = CODE_FOR_thumb2_casesi_internal_pic;
8070 else
8071 code = CODE_FOR_thumb2_casesi_internal;
8072
8073 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8074 operands[2] = force_reg (SImode, operands[2]);
8075
8076 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8077 operands[3], operands[4]));
8078 DONE;
8079 }"
8080 )
8081
8082 ;; The USE in this pattern is needed to tell flow analysis that this is
8083 ;; a CASESI insn. It has no other purpose.
8084 (define_expand "arm_casesi_internal"
8085 [(parallel [(set (pc)
8086 (if_then_else
8087 (leu (match_operand:SI 0 "s_register_operand")
8088 (match_operand:SI 1 "arm_rhs_operand"))
8089 (match_dup 4)
8090 (label_ref:SI (match_operand 3 ""))))
8091 (clobber (reg:CC CC_REGNUM))
8092 (use (label_ref:SI (match_operand 2 "")))])]
8093 "TARGET_ARM"
8094 {
8095 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8096 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8097 gen_rtx_LABEL_REF (SImode, operands[2]));
8098 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8099 MEM_READONLY_P (operands[4]) = 1;
8100 MEM_NOTRAP_P (operands[4]) = 1;
8101 })
8102
8103 (define_insn "*arm_casesi_internal"
8104 [(parallel [(set (pc)
8105 (if_then_else
8106 (leu (match_operand:SI 0 "s_register_operand" "r")
8107 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8108 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8109 (label_ref:SI (match_operand 2 "" ""))))
8110 (label_ref:SI (match_operand 3 "" ""))))
8111 (clobber (reg:CC CC_REGNUM))
8112 (use (label_ref:SI (match_dup 2)))])]
8113 "TARGET_ARM"
8114 "*
8115 if (flag_pic)
8116 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8117 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8118 "
8119 [(set_attr "conds" "clob")
8120 (set_attr "length" "12")
8121 (set_attr "type" "multiple")]
8122 )
8123
8124 (define_expand "indirect_jump"
8125 [(set (pc)
8126 (match_operand:SI 0 "s_register_operand"))]
8127 "TARGET_EITHER"
8128 "
8129 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8130 address and use bx. */
8131 if (TARGET_THUMB2)
8132 {
8133 rtx tmp;
8134 tmp = gen_reg_rtx (SImode);
8135 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8136 operands[0] = tmp;
8137 }
8138 "
8139 )
8140
8141 ;; NB Never uses BX.
8142 (define_insn "*arm_indirect_jump"
8143 [(set (pc)
8144 (match_operand:SI 0 "s_register_operand" "r"))]
8145 "TARGET_ARM"
8146 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8147 [(set_attr "predicable" "yes")
8148 (set_attr "type" "branch")]
8149 )
8150
8151 (define_insn "*load_indirect_jump"
8152 [(set (pc)
8153 (match_operand:SI 0 "memory_operand" "m"))]
8154 "TARGET_ARM"
8155 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8156 [(set_attr "type" "load_4")
8157 (set_attr "pool_range" "4096")
8158 (set_attr "neg_pool_range" "4084")
8159 (set_attr "predicable" "yes")]
8160 )
8161
8162 \f
8163 ;; Misc insns
8164
8165 (define_insn "nop"
8166 [(const_int 0)]
8167 "TARGET_EITHER"
8168 "nop"
8169 [(set (attr "length")
8170 (if_then_else (eq_attr "is_thumb" "yes")
8171 (const_int 2)
8172 (const_int 4)))
8173 (set_attr "type" "mov_reg")]
8174 )
8175
8176 (define_insn "trap"
8177 [(trap_if (const_int 1) (const_int 0))]
8178 ""
8179 "*
8180 if (TARGET_ARM)
8181 return \".inst\\t0xe7f000f0\";
8182 else
8183 return \".inst\\t0xdeff\";
8184 "
8185 [(set (attr "length")
8186 (if_then_else (eq_attr "is_thumb" "yes")
8187 (const_int 2)
8188 (const_int 4)))
8189 (set_attr "type" "trap")
8190 (set_attr "conds" "unconditional")]
8191 )
8192
8193 \f
8194 ;; Patterns to allow combination of arithmetic, cond code and shifts
8195
8196 (define_insn "*<arith_shift_insn>_multsi"
8197 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8198 (SHIFTABLE_OPS:SI
8199 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8200 (match_operand:SI 3 "power_of_two_operand" ""))
8201 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8202 "TARGET_32BIT"
8203 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8204 [(set_attr "predicable" "yes")
8205 (set_attr "shift" "2")
8206 (set_attr "arch" "a,t2")
8207 (set_attr "type" "alu_shift_imm")])
8208
8209 (define_insn "*<arith_shift_insn>_shiftsi"
8210 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8211 (SHIFTABLE_OPS:SI
8212 (match_operator:SI 2 "shift_nomul_operator"
8213 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8214 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8215 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8216 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8217 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8218 [(set_attr "predicable" "yes")
8219 (set_attr "shift" "3")
8220 (set_attr "arch" "a,t2,a")
8221 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8222
8223 (define_split
8224 [(set (match_operand:SI 0 "s_register_operand" "")
8225 (match_operator:SI 1 "shiftable_operator"
8226 [(match_operator:SI 2 "shiftable_operator"
8227 [(match_operator:SI 3 "shift_operator"
8228 [(match_operand:SI 4 "s_register_operand" "")
8229 (match_operand:SI 5 "reg_or_int_operand" "")])
8230 (match_operand:SI 6 "s_register_operand" "")])
8231 (match_operand:SI 7 "arm_rhs_operand" "")]))
8232 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8233 "TARGET_32BIT"
8234 [(set (match_dup 8)
8235 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8236 (match_dup 6)]))
8237 (set (match_dup 0)
8238 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8239 "")
8240
8241 (define_insn "*arith_shiftsi_compare0"
8242 [(set (reg:CC_NOOV CC_REGNUM)
8243 (compare:CC_NOOV
8244 (match_operator:SI 1 "shiftable_operator"
8245 [(match_operator:SI 3 "shift_operator"
8246 [(match_operand:SI 4 "s_register_operand" "r,r")
8247 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8248 (match_operand:SI 2 "s_register_operand" "r,r")])
8249 (const_int 0)))
8250 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8251 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8252 (match_dup 2)]))]
8253 "TARGET_32BIT"
8254 "%i1s%?\\t%0, %2, %4%S3"
8255 [(set_attr "conds" "set")
8256 (set_attr "shift" "4")
8257 (set_attr "arch" "32,a")
8258 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8259
8260 (define_insn "*arith_shiftsi_compare0_scratch"
8261 [(set (reg:CC_NOOV CC_REGNUM)
8262 (compare:CC_NOOV
8263 (match_operator:SI 1 "shiftable_operator"
8264 [(match_operator:SI 3 "shift_operator"
8265 [(match_operand:SI 4 "s_register_operand" "r,r")
8266 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8267 (match_operand:SI 2 "s_register_operand" "r,r")])
8268 (const_int 0)))
8269 (clobber (match_scratch:SI 0 "=r,r"))]
8270 "TARGET_32BIT"
8271 "%i1s%?\\t%0, %2, %4%S3"
8272 [(set_attr "conds" "set")
8273 (set_attr "shift" "4")
8274 (set_attr "arch" "32,a")
8275 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8276
8277 (define_insn "*sub_shiftsi"
8278 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8279 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8280 (match_operator:SI 2 "shift_operator"
8281 [(match_operand:SI 3 "s_register_operand" "r,r")
8282 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8283 "TARGET_32BIT"
8284 "sub%?\\t%0, %1, %3%S2"
8285 [(set_attr "predicable" "yes")
8286 (set_attr "predicable_short_it" "no")
8287 (set_attr "shift" "3")
8288 (set_attr "arch" "32,a")
8289 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8290
8291 (define_insn "*sub_shiftsi_compare0"
8292 [(set (reg:CC_NOOV CC_REGNUM)
8293 (compare:CC_NOOV
8294 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8295 (match_operator:SI 2 "shift_operator"
8296 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8297 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8298 (const_int 0)))
8299 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8300 (minus:SI (match_dup 1)
8301 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8302 "TARGET_32BIT"
8303 "subs%?\\t%0, %1, %3%S2"
8304 [(set_attr "conds" "set")
8305 (set_attr "shift" "3")
8306 (set_attr "arch" "32,a,a")
8307 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8308
8309 (define_insn "*sub_shiftsi_compare0_scratch"
8310 [(set (reg:CC_NOOV CC_REGNUM)
8311 (compare:CC_NOOV
8312 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8313 (match_operator:SI 2 "shift_operator"
8314 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8315 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8316 (const_int 0)))
8317 (clobber (match_scratch:SI 0 "=r,r,r"))]
8318 "TARGET_32BIT"
8319 "subs%?\\t%0, %1, %3%S2"
8320 [(set_attr "conds" "set")
8321 (set_attr "shift" "3")
8322 (set_attr "arch" "32,a,a")
8323 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8324 \f
8325
8326 (define_insn_and_split "*and_scc"
8327 [(set (match_operand:SI 0 "s_register_operand" "=r")
8328 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8329 [(match_operand 2 "cc_register" "") (const_int 0)])
8330 (match_operand:SI 3 "s_register_operand" "r")))]
8331 "TARGET_ARM"
8332 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8333 "&& reload_completed"
8334 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8335 (cond_exec (match_dup 4) (set (match_dup 0)
8336 (and:SI (match_dup 3) (const_int 1))))]
8337 {
8338 machine_mode mode = GET_MODE (operands[2]);
8339 enum rtx_code rc = GET_CODE (operands[1]);
8340
8341 /* Note that operands[4] is the same as operands[1],
8342 but with VOIDmode as the result. */
8343 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8344 if (mode == CCFPmode || mode == CCFPEmode)
8345 rc = reverse_condition_maybe_unordered (rc);
8346 else
8347 rc = reverse_condition (rc);
8348 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8349 }
8350 [(set_attr "conds" "use")
8351 (set_attr "type" "multiple")
8352 (set_attr "length" "8")]
8353 )
8354
8355 (define_insn_and_split "*ior_scc"
8356 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8357 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8358 [(match_operand 2 "cc_register" "") (const_int 0)])
8359 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8360 "TARGET_ARM"
8361 "@
8362 orr%d1\\t%0, %3, #1
8363 #"
8364 "&& reload_completed
8365 && REGNO (operands [0]) != REGNO (operands[3])"
8366 ;; && which_alternative == 1
8367 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8368 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8369 (cond_exec (match_dup 4) (set (match_dup 0)
8370 (ior:SI (match_dup 3) (const_int 1))))]
8371 {
8372 machine_mode mode = GET_MODE (operands[2]);
8373 enum rtx_code rc = GET_CODE (operands[1]);
8374
8375 /* Note that operands[4] is the same as operands[1],
8376 but with VOIDmode as the result. */
8377 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8378 if (mode == CCFPmode || mode == CCFPEmode)
8379 rc = reverse_condition_maybe_unordered (rc);
8380 else
8381 rc = reverse_condition (rc);
8382 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8383 }
8384 [(set_attr "conds" "use")
8385 (set_attr "length" "4,8")
8386 (set_attr "type" "logic_imm,multiple")]
8387 )
8388
8389 ; A series of splitters for the compare_scc pattern below. Note that
8390 ; order is important.
8391 (define_split
8392 [(set (match_operand:SI 0 "s_register_operand" "")
8393 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8394 (const_int 0)))
8395 (clobber (reg:CC CC_REGNUM))]
8396 "TARGET_32BIT && reload_completed"
8397 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8398
8399 (define_split
8400 [(set (match_operand:SI 0 "s_register_operand" "")
8401 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8402 (const_int 0)))
8403 (clobber (reg:CC CC_REGNUM))]
8404 "TARGET_32BIT && reload_completed"
8405 [(set (match_dup 0) (not:SI (match_dup 1)))
8406 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8407
8408 (define_split
8409 [(set (match_operand:SI 0 "s_register_operand" "")
8410 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8411 (const_int 0)))
8412 (clobber (reg:CC CC_REGNUM))]
8413 "arm_arch5t && TARGET_32BIT"
8414 [(set (match_dup 0) (clz:SI (match_dup 1)))
8415 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8416 )
8417
8418 (define_split
8419 [(set (match_operand:SI 0 "s_register_operand" "")
8420 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8421 (const_int 0)))
8422 (clobber (reg:CC CC_REGNUM))]
8423 "TARGET_32BIT && reload_completed"
8424 [(parallel
8425 [(set (reg:CC CC_REGNUM)
8426 (compare:CC (const_int 1) (match_dup 1)))
8427 (set (match_dup 0)
8428 (minus:SI (const_int 1) (match_dup 1)))])
8429 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8430 (set (match_dup 0) (const_int 0)))])
8431
8432 (define_split
8433 [(set (match_operand:SI 0 "s_register_operand" "")
8434 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8435 (match_operand:SI 2 "const_int_operand" "")))
8436 (clobber (reg:CC CC_REGNUM))]
8437 "TARGET_32BIT && reload_completed"
8438 [(parallel
8439 [(set (reg:CC CC_REGNUM)
8440 (compare:CC (match_dup 1) (match_dup 2)))
8441 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8442 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8443 (set (match_dup 0) (const_int 1)))]
8444 {
8445 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8446 })
8447
8448 (define_split
8449 [(set (match_operand:SI 0 "s_register_operand" "")
8450 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8451 (match_operand:SI 2 "arm_add_operand" "")))
8452 (clobber (reg:CC CC_REGNUM))]
8453 "TARGET_32BIT && reload_completed"
8454 [(parallel
8455 [(set (reg:CC_NOOV CC_REGNUM)
8456 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8457 (const_int 0)))
8458 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8459 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8460 (set (match_dup 0) (const_int 1)))])
8461
8462 (define_insn_and_split "*compare_scc"
8463 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8464 (match_operator:SI 1 "arm_comparison_operator"
8465 [(match_operand:SI 2 "s_register_operand" "r,r")
8466 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8467 (clobber (reg:CC CC_REGNUM))]
8468 "TARGET_32BIT"
8469 "#"
8470 "&& reload_completed"
8471 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8472 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8473 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8474 {
8475 rtx tmp1;
8476 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8477 operands[2], operands[3]);
8478 enum rtx_code rc = GET_CODE (operands[1]);
8479
8480 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8481
8482 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8483 if (mode == CCFPmode || mode == CCFPEmode)
8484 rc = reverse_condition_maybe_unordered (rc);
8485 else
8486 rc = reverse_condition (rc);
8487 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8488 }
8489 [(set_attr "type" "multiple")]
8490 )
8491
8492 ;; Attempt to improve the sequence generated by the compare_scc splitters
8493 ;; not to use conditional execution.
8494
8495 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8496 ;; clz Rd, reg1
8497 ;; lsr Rd, Rd, #5
8498 (define_peephole2
8499 [(set (reg:CC CC_REGNUM)
8500 (compare:CC (match_operand:SI 1 "register_operand" "")
8501 (const_int 0)))
8502 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8503 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8504 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8505 (set (match_dup 0) (const_int 1)))]
8506 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8507 [(set (match_dup 0) (clz:SI (match_dup 1)))
8508 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8509 )
8510
8511 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8512 ;; negs Rd, reg1
8513 ;; adc Rd, Rd, reg1
8514 (define_peephole2
8515 [(set (reg:CC CC_REGNUM)
8516 (compare:CC (match_operand:SI 1 "register_operand" "")
8517 (const_int 0)))
8518 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8519 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8520 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8521 (set (match_dup 0) (const_int 1)))
8522 (match_scratch:SI 2 "r")]
8523 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8524 [(parallel
8525 [(set (reg:CC CC_REGNUM)
8526 (compare:CC (const_int 0) (match_dup 1)))
8527 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8528 (set (match_dup 0)
8529 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8530 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8531 )
8532
8533 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8534 ;; sub Rd, Reg1, reg2
8535 ;; clz Rd, Rd
8536 ;; lsr Rd, Rd, #5
8537 (define_peephole2
8538 [(set (reg:CC CC_REGNUM)
8539 (compare:CC (match_operand:SI 1 "register_operand" "")
8540 (match_operand:SI 2 "arm_rhs_operand" "")))
8541 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8542 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8543 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8544 (set (match_dup 0) (const_int 1)))]
8545 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8546 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8547 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8548 (set (match_dup 0) (clz:SI (match_dup 0)))
8549 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8550 )
8551
8552
8553 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8554 ;; sub T1, Reg1, reg2
8555 ;; negs Rd, T1
8556 ;; adc Rd, Rd, T1
8557 (define_peephole2
8558 [(set (reg:CC CC_REGNUM)
8559 (compare:CC (match_operand:SI 1 "register_operand" "")
8560 (match_operand:SI 2 "arm_rhs_operand" "")))
8561 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8562 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8563 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8564 (set (match_dup 0) (const_int 1)))
8565 (match_scratch:SI 3 "r")]
8566 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8567 [(set (match_dup 3) (match_dup 4))
8568 (parallel
8569 [(set (reg:CC CC_REGNUM)
8570 (compare:CC (const_int 0) (match_dup 3)))
8571 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8572 (set (match_dup 0)
8573 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8574 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8575 "
8576 if (CONST_INT_P (operands[2]))
8577 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8578 else
8579 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8580 ")
8581
8582 (define_insn "*cond_move"
8583 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8584 (if_then_else:SI (match_operator 3 "equality_operator"
8585 [(match_operator 4 "arm_comparison_operator"
8586 [(match_operand 5 "cc_register" "") (const_int 0)])
8587 (const_int 0)])
8588 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8589 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8590 "TARGET_ARM"
8591 "*
8592 if (GET_CODE (operands[3]) == NE)
8593 {
8594 if (which_alternative != 1)
8595 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8596 if (which_alternative != 0)
8597 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8598 return \"\";
8599 }
8600 if (which_alternative != 0)
8601 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8602 if (which_alternative != 1)
8603 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8604 return \"\";
8605 "
8606 [(set_attr "conds" "use")
8607 (set_attr_alternative "type"
8608 [(if_then_else (match_operand 2 "const_int_operand" "")
8609 (const_string "mov_imm")
8610 (const_string "mov_reg"))
8611 (if_then_else (match_operand 1 "const_int_operand" "")
8612 (const_string "mov_imm")
8613 (const_string "mov_reg"))
8614 (const_string "multiple")])
8615 (set_attr "length" "4,4,8")]
8616 )
8617
8618 (define_insn "*cond_arith"
8619 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8620 (match_operator:SI 5 "shiftable_operator"
8621 [(match_operator:SI 4 "arm_comparison_operator"
8622 [(match_operand:SI 2 "s_register_operand" "r,r")
8623 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8624 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8625 (clobber (reg:CC CC_REGNUM))]
8626 "TARGET_ARM"
8627 "*
8628 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8629 return \"%i5\\t%0, %1, %2, lsr #31\";
8630
8631 output_asm_insn (\"cmp\\t%2, %3\", operands);
8632 if (GET_CODE (operands[5]) == AND)
8633 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8634 else if (GET_CODE (operands[5]) == MINUS)
8635 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8636 else if (which_alternative != 0)
8637 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8638 return \"%i5%d4\\t%0, %1, #1\";
8639 "
8640 [(set_attr "conds" "clob")
8641 (set_attr "length" "12")
8642 (set_attr "type" "multiple")]
8643 )
8644
8645 (define_insn "*cond_sub"
8646 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8647 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8648 (match_operator:SI 4 "arm_comparison_operator"
8649 [(match_operand:SI 2 "s_register_operand" "r,r")
8650 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8651 (clobber (reg:CC CC_REGNUM))]
8652 "TARGET_ARM"
8653 "*
8654 output_asm_insn (\"cmp\\t%2, %3\", operands);
8655 if (which_alternative != 0)
8656 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8657 return \"sub%d4\\t%0, %1, #1\";
8658 "
8659 [(set_attr "conds" "clob")
8660 (set_attr "length" "8,12")
8661 (set_attr "type" "multiple")]
8662 )
8663
8664 (define_insn "*cmp_ite0"
8665 [(set (match_operand 6 "dominant_cc_register" "")
8666 (compare
8667 (if_then_else:SI
8668 (match_operator 4 "arm_comparison_operator"
8669 [(match_operand:SI 0 "s_register_operand"
8670 "l,l,l,r,r,r,r,r,r")
8671 (match_operand:SI 1 "arm_add_operand"
8672 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8673 (match_operator:SI 5 "arm_comparison_operator"
8674 [(match_operand:SI 2 "s_register_operand"
8675 "l,r,r,l,l,r,r,r,r")
8676 (match_operand:SI 3 "arm_add_operand"
8677 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8678 (const_int 0))
8679 (const_int 0)))]
8680 "TARGET_32BIT"
8681 "*
8682 {
8683 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8684 {
8685 {\"cmp%d5\\t%0, %1\",
8686 \"cmp%d4\\t%2, %3\"},
8687 {\"cmn%d5\\t%0, #%n1\",
8688 \"cmp%d4\\t%2, %3\"},
8689 {\"cmp%d5\\t%0, %1\",
8690 \"cmn%d4\\t%2, #%n3\"},
8691 {\"cmn%d5\\t%0, #%n1\",
8692 \"cmn%d4\\t%2, #%n3\"}
8693 };
8694 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8695 {
8696 {\"cmp\\t%2, %3\",
8697 \"cmp\\t%0, %1\"},
8698 {\"cmp\\t%2, %3\",
8699 \"cmn\\t%0, #%n1\"},
8700 {\"cmn\\t%2, #%n3\",
8701 \"cmp\\t%0, %1\"},
8702 {\"cmn\\t%2, #%n3\",
8703 \"cmn\\t%0, #%n1\"}
8704 };
8705 static const char * const ite[2] =
8706 {
8707 \"it\\t%d5\",
8708 \"it\\t%d4\"
8709 };
8710 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8711 CMP_CMP, CMN_CMP, CMP_CMP,
8712 CMN_CMP, CMP_CMN, CMN_CMN};
8713 int swap =
8714 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8715
8716 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8717 if (TARGET_THUMB2) {
8718 output_asm_insn (ite[swap], operands);
8719 }
8720 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8721 return \"\";
8722 }"
8723 [(set_attr "conds" "set")
8724 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8725 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8726 (set_attr "type" "multiple")
8727 (set_attr_alternative "length"
8728 [(const_int 6)
8729 (const_int 8)
8730 (const_int 8)
8731 (const_int 8)
8732 (const_int 8)
8733 (if_then_else (eq_attr "is_thumb" "no")
8734 (const_int 8)
8735 (const_int 10))
8736 (if_then_else (eq_attr "is_thumb" "no")
8737 (const_int 8)
8738 (const_int 10))
8739 (if_then_else (eq_attr "is_thumb" "no")
8740 (const_int 8)
8741 (const_int 10))
8742 (if_then_else (eq_attr "is_thumb" "no")
8743 (const_int 8)
8744 (const_int 10))])]
8745 )
8746
8747 (define_insn "*cmp_ite1"
8748 [(set (match_operand 6 "dominant_cc_register" "")
8749 (compare
8750 (if_then_else:SI
8751 (match_operator 4 "arm_comparison_operator"
8752 [(match_operand:SI 0 "s_register_operand"
8753 "l,l,l,r,r,r,r,r,r")
8754 (match_operand:SI 1 "arm_add_operand"
8755 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8756 (match_operator:SI 5 "arm_comparison_operator"
8757 [(match_operand:SI 2 "s_register_operand"
8758 "l,r,r,l,l,r,r,r,r")
8759 (match_operand:SI 3 "arm_add_operand"
8760 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8761 (const_int 1))
8762 (const_int 0)))]
8763 "TARGET_32BIT"
8764 "*
8765 {
8766 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8767 {
8768 {\"cmp\\t%0, %1\",
8769 \"cmp\\t%2, %3\"},
8770 {\"cmn\\t%0, #%n1\",
8771 \"cmp\\t%2, %3\"},
8772 {\"cmp\\t%0, %1\",
8773 \"cmn\\t%2, #%n3\"},
8774 {\"cmn\\t%0, #%n1\",
8775 \"cmn\\t%2, #%n3\"}
8776 };
8777 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8778 {
8779 {\"cmp%d4\\t%2, %3\",
8780 \"cmp%D5\\t%0, %1\"},
8781 {\"cmp%d4\\t%2, %3\",
8782 \"cmn%D5\\t%0, #%n1\"},
8783 {\"cmn%d4\\t%2, #%n3\",
8784 \"cmp%D5\\t%0, %1\"},
8785 {\"cmn%d4\\t%2, #%n3\",
8786 \"cmn%D5\\t%0, #%n1\"}
8787 };
8788 static const char * const ite[2] =
8789 {
8790 \"it\\t%d4\",
8791 \"it\\t%D5\"
8792 };
8793 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8794 CMP_CMP, CMN_CMP, CMP_CMP,
8795 CMN_CMP, CMP_CMN, CMN_CMN};
8796 int swap =
8797 comparison_dominates_p (GET_CODE (operands[5]),
8798 reverse_condition (GET_CODE (operands[4])));
8799
8800 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8801 if (TARGET_THUMB2) {
8802 output_asm_insn (ite[swap], operands);
8803 }
8804 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8805 return \"\";
8806 }"
8807 [(set_attr "conds" "set")
8808 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8809 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8810 (set_attr_alternative "length"
8811 [(const_int 6)
8812 (const_int 8)
8813 (const_int 8)
8814 (const_int 8)
8815 (const_int 8)
8816 (if_then_else (eq_attr "is_thumb" "no")
8817 (const_int 8)
8818 (const_int 10))
8819 (if_then_else (eq_attr "is_thumb" "no")
8820 (const_int 8)
8821 (const_int 10))
8822 (if_then_else (eq_attr "is_thumb" "no")
8823 (const_int 8)
8824 (const_int 10))
8825 (if_then_else (eq_attr "is_thumb" "no")
8826 (const_int 8)
8827 (const_int 10))])
8828 (set_attr "type" "multiple")]
8829 )
8830
8831 (define_insn "*cmp_and"
8832 [(set (match_operand 6 "dominant_cc_register" "")
8833 (compare
8834 (and:SI
8835 (match_operator 4 "arm_comparison_operator"
8836 [(match_operand:SI 0 "s_register_operand"
8837 "l,l,l,r,r,r,r,r,r,r")
8838 (match_operand:SI 1 "arm_add_operand"
8839 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8840 (match_operator:SI 5 "arm_comparison_operator"
8841 [(match_operand:SI 2 "s_register_operand"
8842 "l,r,r,l,l,r,r,r,r,r")
8843 (match_operand:SI 3 "arm_add_operand"
8844 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8845 (const_int 0)))]
8846 "TARGET_32BIT"
8847 "*
8848 {
8849 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8850 {
8851 {\"cmp%d5\\t%0, %1\",
8852 \"cmp%d4\\t%2, %3\"},
8853 {\"cmn%d5\\t%0, #%n1\",
8854 \"cmp%d4\\t%2, %3\"},
8855 {\"cmp%d5\\t%0, %1\",
8856 \"cmn%d4\\t%2, #%n3\"},
8857 {\"cmn%d5\\t%0, #%n1\",
8858 \"cmn%d4\\t%2, #%n3\"}
8859 };
8860 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8861 {
8862 {\"cmp\\t%2, %3\",
8863 \"cmp\\t%0, %1\"},
8864 {\"cmp\\t%2, %3\",
8865 \"cmn\\t%0, #%n1\"},
8866 {\"cmn\\t%2, #%n3\",
8867 \"cmp\\t%0, %1\"},
8868 {\"cmn\\t%2, #%n3\",
8869 \"cmn\\t%0, #%n1\"}
8870 };
8871 static const char *const ite[2] =
8872 {
8873 \"it\\t%d5\",
8874 \"it\\t%d4\"
8875 };
8876 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8877 CMP_CMP, CMN_CMP, CMP_CMP,
8878 CMP_CMP, CMN_CMP, CMP_CMN,
8879 CMN_CMN};
8880 int swap =
8881 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8882
8883 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8884 if (TARGET_THUMB2) {
8885 output_asm_insn (ite[swap], operands);
8886 }
8887 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8888 return \"\";
8889 }"
8890 [(set_attr "conds" "set")
8891 (set_attr "predicable" "no")
8892 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8893 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8894 (set_attr_alternative "length"
8895 [(const_int 6)
8896 (const_int 8)
8897 (const_int 8)
8898 (const_int 8)
8899 (const_int 8)
8900 (const_int 6)
8901 (if_then_else (eq_attr "is_thumb" "no")
8902 (const_int 8)
8903 (const_int 10))
8904 (if_then_else (eq_attr "is_thumb" "no")
8905 (const_int 8)
8906 (const_int 10))
8907 (if_then_else (eq_attr "is_thumb" "no")
8908 (const_int 8)
8909 (const_int 10))
8910 (if_then_else (eq_attr "is_thumb" "no")
8911 (const_int 8)
8912 (const_int 10))])
8913 (set_attr "type" "multiple")]
8914 )
8915
8916 (define_insn "*cmp_ior"
8917 [(set (match_operand 6 "dominant_cc_register" "")
8918 (compare
8919 (ior:SI
8920 (match_operator 4 "arm_comparison_operator"
8921 [(match_operand:SI 0 "s_register_operand"
8922 "l,l,l,r,r,r,r,r,r,r")
8923 (match_operand:SI 1 "arm_add_operand"
8924 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8925 (match_operator:SI 5 "arm_comparison_operator"
8926 [(match_operand:SI 2 "s_register_operand"
8927 "l,r,r,l,l,r,r,r,r,r")
8928 (match_operand:SI 3 "arm_add_operand"
8929 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8930 (const_int 0)))]
8931 "TARGET_32BIT"
8932 "*
8933 {
8934 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8935 {
8936 {\"cmp\\t%0, %1\",
8937 \"cmp\\t%2, %3\"},
8938 {\"cmn\\t%0, #%n1\",
8939 \"cmp\\t%2, %3\"},
8940 {\"cmp\\t%0, %1\",
8941 \"cmn\\t%2, #%n3\"},
8942 {\"cmn\\t%0, #%n1\",
8943 \"cmn\\t%2, #%n3\"}
8944 };
8945 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8946 {
8947 {\"cmp%D4\\t%2, %3\",
8948 \"cmp%D5\\t%0, %1\"},
8949 {\"cmp%D4\\t%2, %3\",
8950 \"cmn%D5\\t%0, #%n1\"},
8951 {\"cmn%D4\\t%2, #%n3\",
8952 \"cmp%D5\\t%0, %1\"},
8953 {\"cmn%D4\\t%2, #%n3\",
8954 \"cmn%D5\\t%0, #%n1\"}
8955 };
8956 static const char *const ite[2] =
8957 {
8958 \"it\\t%D4\",
8959 \"it\\t%D5\"
8960 };
8961 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8962 CMP_CMP, CMN_CMP, CMP_CMP,
8963 CMP_CMP, CMN_CMP, CMP_CMN,
8964 CMN_CMN};
8965 int swap =
8966 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8967
8968 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8969 if (TARGET_THUMB2) {
8970 output_asm_insn (ite[swap], operands);
8971 }
8972 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8973 return \"\";
8974 }
8975 "
8976 [(set_attr "conds" "set")
8977 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8978 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8979 (set_attr_alternative "length"
8980 [(const_int 6)
8981 (const_int 8)
8982 (const_int 8)
8983 (const_int 8)
8984 (const_int 8)
8985 (const_int 6)
8986 (if_then_else (eq_attr "is_thumb" "no")
8987 (const_int 8)
8988 (const_int 10))
8989 (if_then_else (eq_attr "is_thumb" "no")
8990 (const_int 8)
8991 (const_int 10))
8992 (if_then_else (eq_attr "is_thumb" "no")
8993 (const_int 8)
8994 (const_int 10))
8995 (if_then_else (eq_attr "is_thumb" "no")
8996 (const_int 8)
8997 (const_int 10))])
8998 (set_attr "type" "multiple")]
8999 )
9000
9001 (define_insn_and_split "*ior_scc_scc"
9002 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9003 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9004 [(match_operand:SI 1 "s_register_operand" "l,r")
9005 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9006 (match_operator:SI 6 "arm_comparison_operator"
9007 [(match_operand:SI 4 "s_register_operand" "l,r")
9008 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9009 (clobber (reg:CC CC_REGNUM))]
9010 "TARGET_32BIT
9011 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9012 != CCmode)"
9013 "#"
9014 "TARGET_32BIT && reload_completed"
9015 [(set (match_dup 7)
9016 (compare
9017 (ior:SI
9018 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9019 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9020 (const_int 0)))
9021 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9022 "operands[7]
9023 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9024 DOM_CC_X_OR_Y),
9025 CC_REGNUM);"
9026 [(set_attr "conds" "clob")
9027 (set_attr "enabled_for_short_it" "yes,no")
9028 (set_attr "length" "16")
9029 (set_attr "type" "multiple")]
9030 )
9031
9032 ; If the above pattern is followed by a CMP insn, then the compare is
9033 ; redundant, since we can rework the conditional instruction that follows.
9034 (define_insn_and_split "*ior_scc_scc_cmp"
9035 [(set (match_operand 0 "dominant_cc_register" "")
9036 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9037 [(match_operand:SI 1 "s_register_operand" "l,r")
9038 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9039 (match_operator:SI 6 "arm_comparison_operator"
9040 [(match_operand:SI 4 "s_register_operand" "l,r")
9041 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9042 (const_int 0)))
9043 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9044 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9045 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9046 "TARGET_32BIT"
9047 "#"
9048 "TARGET_32BIT && reload_completed"
9049 [(set (match_dup 0)
9050 (compare
9051 (ior:SI
9052 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9053 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9054 (const_int 0)))
9055 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9056 ""
9057 [(set_attr "conds" "set")
9058 (set_attr "enabled_for_short_it" "yes,no")
9059 (set_attr "length" "16")
9060 (set_attr "type" "multiple")]
9061 )
9062
9063 (define_insn_and_split "*and_scc_scc"
9064 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9065 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9066 [(match_operand:SI 1 "s_register_operand" "l,r")
9067 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9068 (match_operator:SI 6 "arm_comparison_operator"
9069 [(match_operand:SI 4 "s_register_operand" "l,r")
9070 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9071 (clobber (reg:CC CC_REGNUM))]
9072 "TARGET_32BIT
9073 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9074 != CCmode)"
9075 "#"
9076 "TARGET_32BIT && reload_completed
9077 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9078 != CCmode)"
9079 [(set (match_dup 7)
9080 (compare
9081 (and:SI
9082 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9083 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9084 (const_int 0)))
9085 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9086 "operands[7]
9087 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9088 DOM_CC_X_AND_Y),
9089 CC_REGNUM);"
9090 [(set_attr "conds" "clob")
9091 (set_attr "enabled_for_short_it" "yes,no")
9092 (set_attr "length" "16")
9093 (set_attr "type" "multiple")]
9094 )
9095
9096 ; If the above pattern is followed by a CMP insn, then the compare is
9097 ; redundant, since we can rework the conditional instruction that follows.
9098 (define_insn_and_split "*and_scc_scc_cmp"
9099 [(set (match_operand 0 "dominant_cc_register" "")
9100 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9101 [(match_operand:SI 1 "s_register_operand" "l,r")
9102 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9103 (match_operator:SI 6 "arm_comparison_operator"
9104 [(match_operand:SI 4 "s_register_operand" "l,r")
9105 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9106 (const_int 0)))
9107 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9108 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9109 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9110 "TARGET_32BIT"
9111 "#"
9112 "TARGET_32BIT && reload_completed"
9113 [(set (match_dup 0)
9114 (compare
9115 (and:SI
9116 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9117 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9118 (const_int 0)))
9119 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9120 ""
9121 [(set_attr "conds" "set")
9122 (set_attr "enabled_for_short_it" "yes,no")
9123 (set_attr "length" "16")
9124 (set_attr "type" "multiple")]
9125 )
9126
9127 ;; If there is no dominance in the comparison, then we can still save an
9128 ;; instruction in the AND case, since we can know that the second compare
9129 ;; need only zero the value if false (if true, then the value is already
9130 ;; correct).
9131 (define_insn_and_split "*and_scc_scc_nodom"
9132 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9133 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9134 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9135 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9136 (match_operator:SI 6 "arm_comparison_operator"
9137 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9138 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9139 (clobber (reg:CC CC_REGNUM))]
9140 "TARGET_32BIT
9141 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9142 == CCmode)"
9143 "#"
9144 "TARGET_32BIT && reload_completed"
9145 [(parallel [(set (match_dup 0)
9146 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9147 (clobber (reg:CC CC_REGNUM))])
9148 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9149 (set (match_dup 0)
9150 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9151 (match_dup 0)
9152 (const_int 0)))]
9153 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9154 operands[4], operands[5]),
9155 CC_REGNUM);
9156 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9157 operands[5]);"
9158 [(set_attr "conds" "clob")
9159 (set_attr "length" "20")
9160 (set_attr "type" "multiple")]
9161 )
9162
9163 (define_split
9164 [(set (reg:CC_NOOV CC_REGNUM)
9165 (compare:CC_NOOV (ior:SI
9166 (and:SI (match_operand:SI 0 "s_register_operand" "")
9167 (const_int 1))
9168 (match_operator:SI 1 "arm_comparison_operator"
9169 [(match_operand:SI 2 "s_register_operand" "")
9170 (match_operand:SI 3 "arm_add_operand" "")]))
9171 (const_int 0)))
9172 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9173 "TARGET_ARM"
9174 [(set (match_dup 4)
9175 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9176 (match_dup 0)))
9177 (set (reg:CC_NOOV CC_REGNUM)
9178 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9179 (const_int 0)))]
9180 "")
9181
9182 (define_split
9183 [(set (reg:CC_NOOV CC_REGNUM)
9184 (compare:CC_NOOV (ior:SI
9185 (match_operator:SI 1 "arm_comparison_operator"
9186 [(match_operand:SI 2 "s_register_operand" "")
9187 (match_operand:SI 3 "arm_add_operand" "")])
9188 (and:SI (match_operand:SI 0 "s_register_operand" "")
9189 (const_int 1)))
9190 (const_int 0)))
9191 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9192 "TARGET_ARM"
9193 [(set (match_dup 4)
9194 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9195 (match_dup 0)))
9196 (set (reg:CC_NOOV CC_REGNUM)
9197 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9198 (const_int 0)))]
9199 "")
9200 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9201
9202 (define_insn_and_split "*negscc"
9203 [(set (match_operand:SI 0 "s_register_operand" "=r")
9204 (neg:SI (match_operator 3 "arm_comparison_operator"
9205 [(match_operand:SI 1 "s_register_operand" "r")
9206 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9207 (clobber (reg:CC CC_REGNUM))]
9208 "TARGET_ARM"
9209 "#"
9210 "&& reload_completed"
9211 [(const_int 0)]
9212 {
9213 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9214
9215 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9216 {
9217 /* Emit mov\\t%0, %1, asr #31 */
9218 emit_insn (gen_rtx_SET (operands[0],
9219 gen_rtx_ASHIFTRT (SImode,
9220 operands[1],
9221 GEN_INT (31))));
9222 DONE;
9223 }
9224 else if (GET_CODE (operands[3]) == NE)
9225 {
9226 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9227 if (CONST_INT_P (operands[2]))
9228 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9229 gen_int_mode (-INTVAL (operands[2]),
9230 SImode)));
9231 else
9232 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9233
9234 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9235 gen_rtx_NE (SImode,
9236 cc_reg,
9237 const0_rtx),
9238 gen_rtx_SET (operands[0],
9239 GEN_INT (~0))));
9240 DONE;
9241 }
9242 else
9243 {
9244 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9245 emit_insn (gen_rtx_SET (cc_reg,
9246 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9247 enum rtx_code rc = GET_CODE (operands[3]);
9248
9249 rc = reverse_condition (rc);
9250 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9251 gen_rtx_fmt_ee (rc,
9252 VOIDmode,
9253 cc_reg,
9254 const0_rtx),
9255 gen_rtx_SET (operands[0], const0_rtx)));
9256 rc = GET_CODE (operands[3]);
9257 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9258 gen_rtx_fmt_ee (rc,
9259 VOIDmode,
9260 cc_reg,
9261 const0_rtx),
9262 gen_rtx_SET (operands[0],
9263 GEN_INT (~0))));
9264 DONE;
9265 }
9266 FAIL;
9267 }
9268 [(set_attr "conds" "clob")
9269 (set_attr "length" "12")
9270 (set_attr "type" "multiple")]
9271 )
9272
9273 (define_insn_and_split "movcond_addsi"
9274 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9275 (if_then_else:SI
9276 (match_operator 5 "comparison_operator"
9277 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9278 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9279 (const_int 0)])
9280 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9281 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9282 (clobber (reg:CC CC_REGNUM))]
9283 "TARGET_32BIT"
9284 "#"
9285 "&& reload_completed"
9286 [(set (reg:CC_NOOV CC_REGNUM)
9287 (compare:CC_NOOV
9288 (plus:SI (match_dup 3)
9289 (match_dup 4))
9290 (const_int 0)))
9291 (set (match_dup 0) (match_dup 1))
9292 (cond_exec (match_dup 6)
9293 (set (match_dup 0) (match_dup 2)))]
9294 "
9295 {
9296 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9297 operands[3], operands[4]);
9298 enum rtx_code rc = GET_CODE (operands[5]);
9299 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9300 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9301 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9302 rc = reverse_condition (rc);
9303 else
9304 std::swap (operands[1], operands[2]);
9305
9306 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9307 }
9308 "
9309 [(set_attr "conds" "clob")
9310 (set_attr "enabled_for_short_it" "no,yes,yes")
9311 (set_attr "type" "multiple")]
9312 )
9313
9314 (define_insn "movcond"
9315 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9316 (if_then_else:SI
9317 (match_operator 5 "arm_comparison_operator"
9318 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9319 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9320 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9321 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9322 (clobber (reg:CC CC_REGNUM))]
9323 "TARGET_ARM"
9324 "*
9325 if (GET_CODE (operands[5]) == LT
9326 && (operands[4] == const0_rtx))
9327 {
9328 if (which_alternative != 1 && REG_P (operands[1]))
9329 {
9330 if (operands[2] == const0_rtx)
9331 return \"and\\t%0, %1, %3, asr #31\";
9332 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9333 }
9334 else if (which_alternative != 0 && REG_P (operands[2]))
9335 {
9336 if (operands[1] == const0_rtx)
9337 return \"bic\\t%0, %2, %3, asr #31\";
9338 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9339 }
9340 /* The only case that falls through to here is when both ops 1 & 2
9341 are constants. */
9342 }
9343
9344 if (GET_CODE (operands[5]) == GE
9345 && (operands[4] == const0_rtx))
9346 {
9347 if (which_alternative != 1 && REG_P (operands[1]))
9348 {
9349 if (operands[2] == const0_rtx)
9350 return \"bic\\t%0, %1, %3, asr #31\";
9351 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9352 }
9353 else if (which_alternative != 0 && REG_P (operands[2]))
9354 {
9355 if (operands[1] == const0_rtx)
9356 return \"and\\t%0, %2, %3, asr #31\";
9357 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9358 }
9359 /* The only case that falls through to here is when both ops 1 & 2
9360 are constants. */
9361 }
9362 if (CONST_INT_P (operands[4])
9363 && !const_ok_for_arm (INTVAL (operands[4])))
9364 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9365 else
9366 output_asm_insn (\"cmp\\t%3, %4\", operands);
9367 if (which_alternative != 0)
9368 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9369 if (which_alternative != 1)
9370 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9371 return \"\";
9372 "
9373 [(set_attr "conds" "clob")
9374 (set_attr "length" "8,8,12")
9375 (set_attr "type" "multiple")]
9376 )
9377
9378 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9379
9380 (define_insn "*ifcompare_plus_move"
9381 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9382 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9383 [(match_operand:SI 4 "s_register_operand" "r,r")
9384 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9385 (plus:SI
9386 (match_operand:SI 2 "s_register_operand" "r,r")
9387 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9388 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9389 (clobber (reg:CC CC_REGNUM))]
9390 "TARGET_ARM"
9391 "#"
9392 [(set_attr "conds" "clob")
9393 (set_attr "length" "8,12")
9394 (set_attr "type" "multiple")]
9395 )
9396
9397 (define_insn "*if_plus_move"
9398 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9399 (if_then_else:SI
9400 (match_operator 4 "arm_comparison_operator"
9401 [(match_operand 5 "cc_register" "") (const_int 0)])
9402 (plus:SI
9403 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9404 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9405 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9406 "TARGET_ARM"
9407 "@
9408 add%d4\\t%0, %2, %3
9409 sub%d4\\t%0, %2, #%n3
9410 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9411 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9412 [(set_attr "conds" "use")
9413 (set_attr "length" "4,4,8,8")
9414 (set_attr_alternative "type"
9415 [(if_then_else (match_operand 3 "const_int_operand" "")
9416 (const_string "alu_imm" )
9417 (const_string "alu_sreg"))
9418 (const_string "alu_imm")
9419 (const_string "multiple")
9420 (const_string "multiple")])]
9421 )
9422
9423 (define_insn "*ifcompare_move_plus"
9424 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9425 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9426 [(match_operand:SI 4 "s_register_operand" "r,r")
9427 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9428 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9429 (plus:SI
9430 (match_operand:SI 2 "s_register_operand" "r,r")
9431 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9432 (clobber (reg:CC CC_REGNUM))]
9433 "TARGET_ARM"
9434 "#"
9435 [(set_attr "conds" "clob")
9436 (set_attr "length" "8,12")
9437 (set_attr "type" "multiple")]
9438 )
9439
9440 (define_insn "*if_move_plus"
9441 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9442 (if_then_else:SI
9443 (match_operator 4 "arm_comparison_operator"
9444 [(match_operand 5 "cc_register" "") (const_int 0)])
9445 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9446 (plus:SI
9447 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9448 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9449 "TARGET_ARM"
9450 "@
9451 add%D4\\t%0, %2, %3
9452 sub%D4\\t%0, %2, #%n3
9453 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9454 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9455 [(set_attr "conds" "use")
9456 (set_attr "length" "4,4,8,8")
9457 (set_attr_alternative "type"
9458 [(if_then_else (match_operand 3 "const_int_operand" "")
9459 (const_string "alu_imm" )
9460 (const_string "alu_sreg"))
9461 (const_string "alu_imm")
9462 (const_string "multiple")
9463 (const_string "multiple")])]
9464 )
9465
9466 (define_insn "*ifcompare_arith_arith"
9467 [(set (match_operand:SI 0 "s_register_operand" "=r")
9468 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9469 [(match_operand:SI 5 "s_register_operand" "r")
9470 (match_operand:SI 6 "arm_add_operand" "rIL")])
9471 (match_operator:SI 8 "shiftable_operator"
9472 [(match_operand:SI 1 "s_register_operand" "r")
9473 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9474 (match_operator:SI 7 "shiftable_operator"
9475 [(match_operand:SI 3 "s_register_operand" "r")
9476 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9477 (clobber (reg:CC CC_REGNUM))]
9478 "TARGET_ARM"
9479 "#"
9480 [(set_attr "conds" "clob")
9481 (set_attr "length" "12")
9482 (set_attr "type" "multiple")]
9483 )
9484
9485 (define_insn "*if_arith_arith"
9486 [(set (match_operand:SI 0 "s_register_operand" "=r")
9487 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9488 [(match_operand 8 "cc_register" "") (const_int 0)])
9489 (match_operator:SI 6 "shiftable_operator"
9490 [(match_operand:SI 1 "s_register_operand" "r")
9491 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9492 (match_operator:SI 7 "shiftable_operator"
9493 [(match_operand:SI 3 "s_register_operand" "r")
9494 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9495 "TARGET_ARM"
9496 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9497 [(set_attr "conds" "use")
9498 (set_attr "length" "8")
9499 (set_attr "type" "multiple")]
9500 )
9501
9502 (define_insn "*ifcompare_arith_move"
9503 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9504 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9505 [(match_operand:SI 2 "s_register_operand" "r,r")
9506 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9507 (match_operator:SI 7 "shiftable_operator"
9508 [(match_operand:SI 4 "s_register_operand" "r,r")
9509 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9510 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9511 (clobber (reg:CC CC_REGNUM))]
9512 "TARGET_ARM"
9513 "*
9514 /* If we have an operation where (op x 0) is the identity operation and
9515 the conditional operator is LT or GE and we are comparing against zero and
9516 everything is in registers then we can do this in two instructions. */
9517 if (operands[3] == const0_rtx
9518 && GET_CODE (operands[7]) != AND
9519 && REG_P (operands[5])
9520 && REG_P (operands[1])
9521 && REGNO (operands[1]) == REGNO (operands[4])
9522 && REGNO (operands[4]) != REGNO (operands[0]))
9523 {
9524 if (GET_CODE (operands[6]) == LT)
9525 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9526 else if (GET_CODE (operands[6]) == GE)
9527 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9528 }
9529 if (CONST_INT_P (operands[3])
9530 && !const_ok_for_arm (INTVAL (operands[3])))
9531 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9532 else
9533 output_asm_insn (\"cmp\\t%2, %3\", operands);
9534 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9535 if (which_alternative != 0)
9536 return \"mov%D6\\t%0, %1\";
9537 return \"\";
9538 "
9539 [(set_attr "conds" "clob")
9540 (set_attr "length" "8,12")
9541 (set_attr "type" "multiple")]
9542 )
9543
9544 (define_insn "*if_arith_move"
9545 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9546 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9547 [(match_operand 6 "cc_register" "") (const_int 0)])
9548 (match_operator:SI 5 "shiftable_operator"
9549 [(match_operand:SI 2 "s_register_operand" "r,r")
9550 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9551 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9552 "TARGET_ARM"
9553 "@
9554 %I5%d4\\t%0, %2, %3
9555 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9556 [(set_attr "conds" "use")
9557 (set_attr "length" "4,8")
9558 (set_attr_alternative "type"
9559 [(if_then_else (match_operand 3 "const_int_operand" "")
9560 (const_string "alu_shift_imm" )
9561 (const_string "alu_shift_reg"))
9562 (const_string "multiple")])]
9563 )
9564
9565 (define_insn "*ifcompare_move_arith"
9566 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9567 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9568 [(match_operand:SI 4 "s_register_operand" "r,r")
9569 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9570 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9571 (match_operator:SI 7 "shiftable_operator"
9572 [(match_operand:SI 2 "s_register_operand" "r,r")
9573 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9574 (clobber (reg:CC CC_REGNUM))]
9575 "TARGET_ARM"
9576 "*
9577 /* If we have an operation where (op x 0) is the identity operation and
9578 the conditional operator is LT or GE and we are comparing against zero and
9579 everything is in registers then we can do this in two instructions */
9580 if (operands[5] == const0_rtx
9581 && GET_CODE (operands[7]) != AND
9582 && REG_P (operands[3])
9583 && REG_P (operands[1])
9584 && REGNO (operands[1]) == REGNO (operands[2])
9585 && REGNO (operands[2]) != REGNO (operands[0]))
9586 {
9587 if (GET_CODE (operands[6]) == GE)
9588 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9589 else if (GET_CODE (operands[6]) == LT)
9590 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9591 }
9592
9593 if (CONST_INT_P (operands[5])
9594 && !const_ok_for_arm (INTVAL (operands[5])))
9595 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9596 else
9597 output_asm_insn (\"cmp\\t%4, %5\", operands);
9598
9599 if (which_alternative != 0)
9600 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9601 return \"%I7%D6\\t%0, %2, %3\";
9602 "
9603 [(set_attr "conds" "clob")
9604 (set_attr "length" "8,12")
9605 (set_attr "type" "multiple")]
9606 )
9607
9608 (define_insn "*if_move_arith"
9609 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9610 (if_then_else:SI
9611 (match_operator 4 "arm_comparison_operator"
9612 [(match_operand 6 "cc_register" "") (const_int 0)])
9613 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9614 (match_operator:SI 5 "shiftable_operator"
9615 [(match_operand:SI 2 "s_register_operand" "r,r")
9616 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9617 "TARGET_ARM"
9618 "@
9619 %I5%D4\\t%0, %2, %3
9620 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9621 [(set_attr "conds" "use")
9622 (set_attr "length" "4,8")
9623 (set_attr_alternative "type"
9624 [(if_then_else (match_operand 3 "const_int_operand" "")
9625 (const_string "alu_shift_imm" )
9626 (const_string "alu_shift_reg"))
9627 (const_string "multiple")])]
9628 )
9629
9630 (define_insn "*ifcompare_move_not"
9631 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9632 (if_then_else:SI
9633 (match_operator 5 "arm_comparison_operator"
9634 [(match_operand:SI 3 "s_register_operand" "r,r")
9635 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9636 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9637 (not:SI
9638 (match_operand:SI 2 "s_register_operand" "r,r"))))
9639 (clobber (reg:CC CC_REGNUM))]
9640 "TARGET_ARM"
9641 "#"
9642 [(set_attr "conds" "clob")
9643 (set_attr "length" "8,12")
9644 (set_attr "type" "multiple")]
9645 )
9646
9647 (define_insn "*if_move_not"
9648 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9649 (if_then_else:SI
9650 (match_operator 4 "arm_comparison_operator"
9651 [(match_operand 3 "cc_register" "") (const_int 0)])
9652 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9653 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9654 "TARGET_ARM"
9655 "@
9656 mvn%D4\\t%0, %2
9657 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9658 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9659 [(set_attr "conds" "use")
9660 (set_attr "type" "mvn_reg")
9661 (set_attr "length" "4,8,8")
9662 (set_attr "type" "mvn_reg,multiple,multiple")]
9663 )
9664
9665 (define_insn "*ifcompare_not_move"
9666 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9667 (if_then_else:SI
9668 (match_operator 5 "arm_comparison_operator"
9669 [(match_operand:SI 3 "s_register_operand" "r,r")
9670 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9671 (not:SI
9672 (match_operand:SI 2 "s_register_operand" "r,r"))
9673 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9674 (clobber (reg:CC CC_REGNUM))]
9675 "TARGET_ARM"
9676 "#"
9677 [(set_attr "conds" "clob")
9678 (set_attr "length" "8,12")
9679 (set_attr "type" "multiple")]
9680 )
9681
9682 (define_insn "*if_not_move"
9683 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9684 (if_then_else:SI
9685 (match_operator 4 "arm_comparison_operator"
9686 [(match_operand 3 "cc_register" "") (const_int 0)])
9687 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9688 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9689 "TARGET_ARM"
9690 "@
9691 mvn%d4\\t%0, %2
9692 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9693 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9694 [(set_attr "conds" "use")
9695 (set_attr "type" "mvn_reg,multiple,multiple")
9696 (set_attr "length" "4,8,8")]
9697 )
9698
9699 (define_insn "*ifcompare_shift_move"
9700 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9701 (if_then_else:SI
9702 (match_operator 6 "arm_comparison_operator"
9703 [(match_operand:SI 4 "s_register_operand" "r,r")
9704 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9705 (match_operator:SI 7 "shift_operator"
9706 [(match_operand:SI 2 "s_register_operand" "r,r")
9707 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9708 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9709 (clobber (reg:CC CC_REGNUM))]
9710 "TARGET_ARM"
9711 "#"
9712 [(set_attr "conds" "clob")
9713 (set_attr "length" "8,12")
9714 (set_attr "type" "multiple")]
9715 )
9716
9717 (define_insn "*if_shift_move"
9718 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9719 (if_then_else:SI
9720 (match_operator 5 "arm_comparison_operator"
9721 [(match_operand 6 "cc_register" "") (const_int 0)])
9722 (match_operator:SI 4 "shift_operator"
9723 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9724 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9725 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9726 "TARGET_ARM"
9727 "@
9728 mov%d5\\t%0, %2%S4
9729 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9730 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9731 [(set_attr "conds" "use")
9732 (set_attr "shift" "2")
9733 (set_attr "length" "4,8,8")
9734 (set_attr_alternative "type"
9735 [(if_then_else (match_operand 3 "const_int_operand" "")
9736 (const_string "mov_shift" )
9737 (const_string "mov_shift_reg"))
9738 (const_string "multiple")
9739 (const_string "multiple")])]
9740 )
9741
9742 (define_insn "*ifcompare_move_shift"
9743 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9744 (if_then_else:SI
9745 (match_operator 6 "arm_comparison_operator"
9746 [(match_operand:SI 4 "s_register_operand" "r,r")
9747 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9748 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9749 (match_operator:SI 7 "shift_operator"
9750 [(match_operand:SI 2 "s_register_operand" "r,r")
9751 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9752 (clobber (reg:CC CC_REGNUM))]
9753 "TARGET_ARM"
9754 "#"
9755 [(set_attr "conds" "clob")
9756 (set_attr "length" "8,12")
9757 (set_attr "type" "multiple")]
9758 )
9759
9760 (define_insn "*if_move_shift"
9761 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9762 (if_then_else:SI
9763 (match_operator 5 "arm_comparison_operator"
9764 [(match_operand 6 "cc_register" "") (const_int 0)])
9765 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9766 (match_operator:SI 4 "shift_operator"
9767 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9768 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9769 "TARGET_ARM"
9770 "@
9771 mov%D5\\t%0, %2%S4
9772 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9773 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9774 [(set_attr "conds" "use")
9775 (set_attr "shift" "2")
9776 (set_attr "length" "4,8,8")
9777 (set_attr_alternative "type"
9778 [(if_then_else (match_operand 3 "const_int_operand" "")
9779 (const_string "mov_shift" )
9780 (const_string "mov_shift_reg"))
9781 (const_string "multiple")
9782 (const_string "multiple")])]
9783 )
9784
9785 (define_insn "*ifcompare_shift_shift"
9786 [(set (match_operand:SI 0 "s_register_operand" "=r")
9787 (if_then_else:SI
9788 (match_operator 7 "arm_comparison_operator"
9789 [(match_operand:SI 5 "s_register_operand" "r")
9790 (match_operand:SI 6 "arm_add_operand" "rIL")])
9791 (match_operator:SI 8 "shift_operator"
9792 [(match_operand:SI 1 "s_register_operand" "r")
9793 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9794 (match_operator:SI 9 "shift_operator"
9795 [(match_operand:SI 3 "s_register_operand" "r")
9796 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9797 (clobber (reg:CC CC_REGNUM))]
9798 "TARGET_ARM"
9799 "#"
9800 [(set_attr "conds" "clob")
9801 (set_attr "length" "12")
9802 (set_attr "type" "multiple")]
9803 )
9804
9805 (define_insn "*if_shift_shift"
9806 [(set (match_operand:SI 0 "s_register_operand" "=r")
9807 (if_then_else:SI
9808 (match_operator 5 "arm_comparison_operator"
9809 [(match_operand 8 "cc_register" "") (const_int 0)])
9810 (match_operator:SI 6 "shift_operator"
9811 [(match_operand:SI 1 "s_register_operand" "r")
9812 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9813 (match_operator:SI 7 "shift_operator"
9814 [(match_operand:SI 3 "s_register_operand" "r")
9815 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9816 "TARGET_ARM"
9817 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9818 [(set_attr "conds" "use")
9819 (set_attr "shift" "1")
9820 (set_attr "length" "8")
9821 (set (attr "type") (if_then_else
9822 (and (match_operand 2 "const_int_operand" "")
9823 (match_operand 4 "const_int_operand" ""))
9824 (const_string "mov_shift")
9825 (const_string "mov_shift_reg")))]
9826 )
9827
9828 (define_insn "*ifcompare_not_arith"
9829 [(set (match_operand:SI 0 "s_register_operand" "=r")
9830 (if_then_else:SI
9831 (match_operator 6 "arm_comparison_operator"
9832 [(match_operand:SI 4 "s_register_operand" "r")
9833 (match_operand:SI 5 "arm_add_operand" "rIL")])
9834 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9835 (match_operator:SI 7 "shiftable_operator"
9836 [(match_operand:SI 2 "s_register_operand" "r")
9837 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9838 (clobber (reg:CC CC_REGNUM))]
9839 "TARGET_ARM"
9840 "#"
9841 [(set_attr "conds" "clob")
9842 (set_attr "length" "12")
9843 (set_attr "type" "multiple")]
9844 )
9845
9846 (define_insn "*if_not_arith"
9847 [(set (match_operand:SI 0 "s_register_operand" "=r")
9848 (if_then_else:SI
9849 (match_operator 5 "arm_comparison_operator"
9850 [(match_operand 4 "cc_register" "") (const_int 0)])
9851 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9852 (match_operator:SI 6 "shiftable_operator"
9853 [(match_operand:SI 2 "s_register_operand" "r")
9854 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9855 "TARGET_ARM"
9856 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9857 [(set_attr "conds" "use")
9858 (set_attr "type" "mvn_reg")
9859 (set_attr "length" "8")]
9860 )
9861
9862 (define_insn "*ifcompare_arith_not"
9863 [(set (match_operand:SI 0 "s_register_operand" "=r")
9864 (if_then_else:SI
9865 (match_operator 6 "arm_comparison_operator"
9866 [(match_operand:SI 4 "s_register_operand" "r")
9867 (match_operand:SI 5 "arm_add_operand" "rIL")])
9868 (match_operator:SI 7 "shiftable_operator"
9869 [(match_operand:SI 2 "s_register_operand" "r")
9870 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9871 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9872 (clobber (reg:CC CC_REGNUM))]
9873 "TARGET_ARM"
9874 "#"
9875 [(set_attr "conds" "clob")
9876 (set_attr "length" "12")
9877 (set_attr "type" "multiple")]
9878 )
9879
9880 (define_insn "*if_arith_not"
9881 [(set (match_operand:SI 0 "s_register_operand" "=r")
9882 (if_then_else:SI
9883 (match_operator 5 "arm_comparison_operator"
9884 [(match_operand 4 "cc_register" "") (const_int 0)])
9885 (match_operator:SI 6 "shiftable_operator"
9886 [(match_operand:SI 2 "s_register_operand" "r")
9887 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9888 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9889 "TARGET_ARM"
9890 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9891 [(set_attr "conds" "use")
9892 (set_attr "type" "multiple")
9893 (set_attr "length" "8")]
9894 )
9895
9896 (define_insn "*ifcompare_neg_move"
9897 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9898 (if_then_else:SI
9899 (match_operator 5 "arm_comparison_operator"
9900 [(match_operand:SI 3 "s_register_operand" "r,r")
9901 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9902 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9903 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9904 (clobber (reg:CC CC_REGNUM))]
9905 "TARGET_ARM"
9906 "#"
9907 [(set_attr "conds" "clob")
9908 (set_attr "length" "8,12")
9909 (set_attr "type" "multiple")]
9910 )
9911
9912 (define_insn_and_split "*if_neg_move"
9913 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9914 (if_then_else:SI
9915 (match_operator 4 "arm_comparison_operator"
9916 [(match_operand 3 "cc_register" "") (const_int 0)])
9917 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9918 (match_operand:SI 1 "s_register_operand" "0,0")))]
9919 "TARGET_32BIT"
9920 "#"
9921 "&& reload_completed"
9922 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9923 (set (match_dup 0) (neg:SI (match_dup 2))))]
9924 ""
9925 [(set_attr "conds" "use")
9926 (set_attr "length" "4")
9927 (set_attr "arch" "t2,32")
9928 (set_attr "enabled_for_short_it" "yes,no")
9929 (set_attr "type" "logic_shift_imm")]
9930 )
9931
9932 (define_insn "*ifcompare_move_neg"
9933 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9934 (if_then_else:SI
9935 (match_operator 5 "arm_comparison_operator"
9936 [(match_operand:SI 3 "s_register_operand" "r,r")
9937 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9938 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9939 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9940 (clobber (reg:CC CC_REGNUM))]
9941 "TARGET_ARM"
9942 "#"
9943 [(set_attr "conds" "clob")
9944 (set_attr "length" "8,12")
9945 (set_attr "type" "multiple")]
9946 )
9947
9948 (define_insn_and_split "*if_move_neg"
9949 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9950 (if_then_else:SI
9951 (match_operator 4 "arm_comparison_operator"
9952 [(match_operand 3 "cc_register" "") (const_int 0)])
9953 (match_operand:SI 1 "s_register_operand" "0,0")
9954 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9955 "TARGET_32BIT"
9956 "#"
9957 "&& reload_completed"
9958 [(cond_exec (match_dup 5)
9959 (set (match_dup 0) (neg:SI (match_dup 2))))]
9960 {
9961 machine_mode mode = GET_MODE (operands[3]);
9962 rtx_code rc = GET_CODE (operands[4]);
9963
9964 if (mode == CCFPmode || mode == CCFPEmode)
9965 rc = reverse_condition_maybe_unordered (rc);
9966 else
9967 rc = reverse_condition (rc);
9968
9969 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9970 }
9971 [(set_attr "conds" "use")
9972 (set_attr "length" "4")
9973 (set_attr "arch" "t2,32")
9974 (set_attr "enabled_for_short_it" "yes,no")
9975 (set_attr "type" "logic_shift_imm")]
9976 )
9977
9978 (define_insn "*arith_adjacentmem"
9979 [(set (match_operand:SI 0 "s_register_operand" "=r")
9980 (match_operator:SI 1 "shiftable_operator"
9981 [(match_operand:SI 2 "memory_operand" "m")
9982 (match_operand:SI 3 "memory_operand" "m")]))
9983 (clobber (match_scratch:SI 4 "=r"))]
9984 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9985 "*
9986 {
9987 rtx ldm[3];
9988 rtx arith[4];
9989 rtx base_reg;
9990 HOST_WIDE_INT val1 = 0, val2 = 0;
9991
9992 if (REGNO (operands[0]) > REGNO (operands[4]))
9993 {
9994 ldm[1] = operands[4];
9995 ldm[2] = operands[0];
9996 }
9997 else
9998 {
9999 ldm[1] = operands[0];
10000 ldm[2] = operands[4];
10001 }
10002
10003 base_reg = XEXP (operands[2], 0);
10004
10005 if (!REG_P (base_reg))
10006 {
10007 val1 = INTVAL (XEXP (base_reg, 1));
10008 base_reg = XEXP (base_reg, 0);
10009 }
10010
10011 if (!REG_P (XEXP (operands[3], 0)))
10012 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10013
10014 arith[0] = operands[0];
10015 arith[3] = operands[1];
10016
10017 if (val1 < val2)
10018 {
10019 arith[1] = ldm[1];
10020 arith[2] = ldm[2];
10021 }
10022 else
10023 {
10024 arith[1] = ldm[2];
10025 arith[2] = ldm[1];
10026 }
10027
10028 ldm[0] = base_reg;
10029 if (val1 !=0 && val2 != 0)
10030 {
10031 rtx ops[3];
10032
10033 if (val1 == 4 || val2 == 4)
10034 /* Other val must be 8, since we know they are adjacent and neither
10035 is zero. */
10036 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10037 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10038 {
10039 ldm[0] = ops[0] = operands[4];
10040 ops[1] = base_reg;
10041 ops[2] = GEN_INT (val1);
10042 output_add_immediate (ops);
10043 if (val1 < val2)
10044 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10045 else
10046 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10047 }
10048 else
10049 {
10050 /* Offset is out of range for a single add, so use two ldr. */
10051 ops[0] = ldm[1];
10052 ops[1] = base_reg;
10053 ops[2] = GEN_INT (val1);
10054 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10055 ops[0] = ldm[2];
10056 ops[2] = GEN_INT (val2);
10057 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10058 }
10059 }
10060 else if (val1 != 0)
10061 {
10062 if (val1 < val2)
10063 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10064 else
10065 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10066 }
10067 else
10068 {
10069 if (val1 < val2)
10070 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10071 else
10072 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10073 }
10074 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10075 return \"\";
10076 }"
10077 [(set_attr "length" "12")
10078 (set_attr "predicable" "yes")
10079 (set_attr "type" "load_4")]
10080 )
10081
10082 ; This pattern is never tried by combine, so do it as a peephole
10083
10084 (define_peephole2
10085 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10086 (match_operand:SI 1 "arm_general_register_operand" ""))
10087 (set (reg:CC CC_REGNUM)
10088 (compare:CC (match_dup 1) (const_int 0)))]
10089 "TARGET_ARM"
10090 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10091 (set (match_dup 0) (match_dup 1))])]
10092 ""
10093 )
10094
10095 (define_split
10096 [(set (match_operand:SI 0 "s_register_operand" "")
10097 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10098 (const_int 0))
10099 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10100 [(match_operand:SI 3 "s_register_operand" "")
10101 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10102 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10103 "TARGET_ARM"
10104 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10105 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10106 (match_dup 5)))]
10107 ""
10108 )
10109
10110 ;; This split can be used because CC_Z mode implies that the following
10111 ;; branch will be an equality, or an unsigned inequality, so the sign
10112 ;; extension is not needed.
10113
10114 (define_split
10115 [(set (reg:CC_Z CC_REGNUM)
10116 (compare:CC_Z
10117 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10118 (const_int 24))
10119 (match_operand 1 "const_int_operand" "")))
10120 (clobber (match_scratch:SI 2 ""))]
10121 "TARGET_ARM
10122 && ((UINTVAL (operands[1]))
10123 == ((UINTVAL (operands[1])) >> 24) << 24)"
10124 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10125 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10126 "
10127 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10128 "
10129 )
10130 ;; ??? Check the patterns above for Thumb-2 usefulness
10131
10132 (define_expand "prologue"
10133 [(clobber (const_int 0))]
10134 "TARGET_EITHER"
10135 "if (TARGET_32BIT)
10136 arm_expand_prologue ();
10137 else
10138 thumb1_expand_prologue ();
10139 DONE;
10140 "
10141 )
10142
10143 (define_expand "epilogue"
10144 [(clobber (const_int 0))]
10145 "TARGET_EITHER"
10146 "
10147 if (crtl->calls_eh_return)
10148 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10149 if (TARGET_THUMB1)
10150 {
10151 thumb1_expand_epilogue ();
10152 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10153 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10154 }
10155 else if (HAVE_return)
10156 {
10157 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10158 no need for explicit testing again. */
10159 emit_jump_insn (gen_return ());
10160 }
10161 else if (TARGET_32BIT)
10162 {
10163 arm_expand_epilogue (true);
10164 }
10165 DONE;
10166 "
10167 )
10168
10169 ;; Note - although unspec_volatile's USE all hard registers,
10170 ;; USEs are ignored after relaod has completed. Thus we need
10171 ;; to add an unspec of the link register to ensure that flow
10172 ;; does not think that it is unused by the sibcall branch that
10173 ;; will replace the standard function epilogue.
10174 (define_expand "sibcall_epilogue"
10175 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10176 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10177 "TARGET_32BIT"
10178 "
10179 arm_expand_epilogue (false);
10180 DONE;
10181 "
10182 )
10183
10184 (define_expand "eh_epilogue"
10185 [(use (match_operand:SI 0 "register_operand"))
10186 (use (match_operand:SI 1 "register_operand"))
10187 (use (match_operand:SI 2 "register_operand"))]
10188 "TARGET_EITHER"
10189 "
10190 {
10191 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10192 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10193 {
10194 rtx ra = gen_rtx_REG (Pmode, 2);
10195
10196 emit_move_insn (ra, operands[2]);
10197 operands[2] = ra;
10198 }
10199 /* This is a hack -- we may have crystalized the function type too
10200 early. */
10201 cfun->machine->func_type = 0;
10202 }"
10203 )
10204
10205 ;; This split is only used during output to reduce the number of patterns
10206 ;; that need assembler instructions adding to them. We allowed the setting
10207 ;; of the conditions to be implicit during rtl generation so that
10208 ;; the conditional compare patterns would work. However this conflicts to
10209 ;; some extent with the conditional data operations, so we have to split them
10210 ;; up again here.
10211
10212 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10213 ;; conditional execution sufficient?
10214
10215 (define_split
10216 [(set (match_operand:SI 0 "s_register_operand" "")
10217 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10218 [(match_operand 2 "" "") (match_operand 3 "" "")])
10219 (match_dup 0)
10220 (match_operand 4 "" "")))
10221 (clobber (reg:CC CC_REGNUM))]
10222 "TARGET_ARM && reload_completed"
10223 [(set (match_dup 5) (match_dup 6))
10224 (cond_exec (match_dup 7)
10225 (set (match_dup 0) (match_dup 4)))]
10226 "
10227 {
10228 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10229 operands[2], operands[3]);
10230 enum rtx_code rc = GET_CODE (operands[1]);
10231
10232 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10233 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10234 if (mode == CCFPmode || mode == CCFPEmode)
10235 rc = reverse_condition_maybe_unordered (rc);
10236 else
10237 rc = reverse_condition (rc);
10238
10239 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10240 }"
10241 )
10242
10243 (define_split
10244 [(set (match_operand:SI 0 "s_register_operand" "")
10245 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10246 [(match_operand 2 "" "") (match_operand 3 "" "")])
10247 (match_operand 4 "" "")
10248 (match_dup 0)))
10249 (clobber (reg:CC CC_REGNUM))]
10250 "TARGET_ARM && reload_completed"
10251 [(set (match_dup 5) (match_dup 6))
10252 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10253 (set (match_dup 0) (match_dup 4)))]
10254 "
10255 {
10256 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10257 operands[2], operands[3]);
10258
10259 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10260 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10261 }"
10262 )
10263
10264 (define_split
10265 [(set (match_operand:SI 0 "s_register_operand" "")
10266 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10267 [(match_operand 2 "" "") (match_operand 3 "" "")])
10268 (match_operand 4 "" "")
10269 (match_operand 5 "" "")))
10270 (clobber (reg:CC CC_REGNUM))]
10271 "TARGET_ARM && reload_completed"
10272 [(set (match_dup 6) (match_dup 7))
10273 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10274 (set (match_dup 0) (match_dup 4)))
10275 (cond_exec (match_dup 8)
10276 (set (match_dup 0) (match_dup 5)))]
10277 "
10278 {
10279 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10280 operands[2], operands[3]);
10281 enum rtx_code rc = GET_CODE (operands[1]);
10282
10283 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10284 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10285 if (mode == CCFPmode || mode == CCFPEmode)
10286 rc = reverse_condition_maybe_unordered (rc);
10287 else
10288 rc = reverse_condition (rc);
10289
10290 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10291 }"
10292 )
10293
10294 (define_split
10295 [(set (match_operand:SI 0 "s_register_operand" "")
10296 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10297 [(match_operand:SI 2 "s_register_operand" "")
10298 (match_operand:SI 3 "arm_add_operand" "")])
10299 (match_operand:SI 4 "arm_rhs_operand" "")
10300 (not:SI
10301 (match_operand:SI 5 "s_register_operand" ""))))
10302 (clobber (reg:CC CC_REGNUM))]
10303 "TARGET_ARM && reload_completed"
10304 [(set (match_dup 6) (match_dup 7))
10305 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10306 (set (match_dup 0) (match_dup 4)))
10307 (cond_exec (match_dup 8)
10308 (set (match_dup 0) (not:SI (match_dup 5))))]
10309 "
10310 {
10311 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10312 operands[2], operands[3]);
10313 enum rtx_code rc = GET_CODE (operands[1]);
10314
10315 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10316 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10317 if (mode == CCFPmode || mode == CCFPEmode)
10318 rc = reverse_condition_maybe_unordered (rc);
10319 else
10320 rc = reverse_condition (rc);
10321
10322 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10323 }"
10324 )
10325
10326 (define_insn "*cond_move_not"
10327 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10328 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10329 [(match_operand 3 "cc_register" "") (const_int 0)])
10330 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10331 (not:SI
10332 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10333 "TARGET_ARM"
10334 "@
10335 mvn%D4\\t%0, %2
10336 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10337 [(set_attr "conds" "use")
10338 (set_attr "type" "mvn_reg,multiple")
10339 (set_attr "length" "4,8")]
10340 )
10341
10342 ;; The next two patterns occur when an AND operation is followed by a
10343 ;; scc insn sequence
10344
10345 (define_insn "*sign_extract_onebit"
10346 [(set (match_operand:SI 0 "s_register_operand" "=r")
10347 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10348 (const_int 1)
10349 (match_operand:SI 2 "const_int_operand" "n")))
10350 (clobber (reg:CC CC_REGNUM))]
10351 "TARGET_ARM"
10352 "*
10353 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10354 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10355 return \"mvnne\\t%0, #0\";
10356 "
10357 [(set_attr "conds" "clob")
10358 (set_attr "length" "8")
10359 (set_attr "type" "multiple")]
10360 )
10361
10362 (define_insn "*not_signextract_onebit"
10363 [(set (match_operand:SI 0 "s_register_operand" "=r")
10364 (not:SI
10365 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10366 (const_int 1)
10367 (match_operand:SI 2 "const_int_operand" "n"))))
10368 (clobber (reg:CC CC_REGNUM))]
10369 "TARGET_ARM"
10370 "*
10371 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10372 output_asm_insn (\"tst\\t%1, %2\", operands);
10373 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10374 return \"movne\\t%0, #0\";
10375 "
10376 [(set_attr "conds" "clob")
10377 (set_attr "length" "12")
10378 (set_attr "type" "multiple")]
10379 )
10380 ;; ??? The above patterns need auditing for Thumb-2
10381
10382 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10383 ;; expressions. For simplicity, the first register is also in the unspec
10384 ;; part.
10385 ;; To avoid the usage of GNU extension, the length attribute is computed
10386 ;; in a C function arm_attr_length_push_multi.
10387 (define_insn "*push_multi"
10388 [(match_parallel 2 "multi_register_push"
10389 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10390 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10391 UNSPEC_PUSH_MULT))])]
10392 ""
10393 "*
10394 {
10395 int num_saves = XVECLEN (operands[2], 0);
10396
10397 /* For the StrongARM at least it is faster to
10398 use STR to store only a single register.
10399 In Thumb mode always use push, and the assembler will pick
10400 something appropriate. */
10401 if (num_saves == 1 && TARGET_ARM)
10402 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10403 else
10404 {
10405 int i;
10406 char pattern[100];
10407
10408 if (TARGET_32BIT)
10409 strcpy (pattern, \"push%?\\t{%1\");
10410 else
10411 strcpy (pattern, \"push\\t{%1\");
10412
10413 for (i = 1; i < num_saves; i++)
10414 {
10415 strcat (pattern, \", %|\");
10416 strcat (pattern,
10417 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10418 }
10419
10420 strcat (pattern, \"}\");
10421 output_asm_insn (pattern, operands);
10422 }
10423
10424 return \"\";
10425 }"
10426 [(set_attr "type" "store_16")
10427 (set (attr "length")
10428 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10429 )
10430
10431 (define_insn "stack_tie"
10432 [(set (mem:BLK (scratch))
10433 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10434 (match_operand:SI 1 "s_register_operand" "rk")]
10435 UNSPEC_PRLG_STK))]
10436 ""
10437 ""
10438 [(set_attr "length" "0")
10439 (set_attr "type" "block")]
10440 )
10441
10442 ;; Pop (as used in epilogue RTL)
10443 ;;
10444 (define_insn "*load_multiple_with_writeback"
10445 [(match_parallel 0 "load_multiple_operation"
10446 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10447 (plus:SI (match_dup 1)
10448 (match_operand:SI 2 "const_int_I_operand" "I")))
10449 (set (match_operand:SI 3 "s_register_operand" "=rk")
10450 (mem:SI (match_dup 1)))
10451 ])]
10452 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10453 "*
10454 {
10455 arm_output_multireg_pop (operands, /*return_pc=*/false,
10456 /*cond=*/const_true_rtx,
10457 /*reverse=*/false,
10458 /*update=*/true);
10459 return \"\";
10460 }
10461 "
10462 [(set_attr "type" "load_16")
10463 (set_attr "predicable" "yes")
10464 (set (attr "length")
10465 (symbol_ref "arm_attr_length_pop_multi (operands,
10466 /*return_pc=*/false,
10467 /*write_back_p=*/true)"))]
10468 )
10469
10470 ;; Pop with return (as used in epilogue RTL)
10471 ;;
10472 ;; This instruction is generated when the registers are popped at the end of
10473 ;; epilogue. Here, instead of popping the value into LR and then generating
10474 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10475 ;; with (return).
10476 (define_insn "*pop_multiple_with_writeback_and_return"
10477 [(match_parallel 0 "pop_multiple_return"
10478 [(return)
10479 (set (match_operand:SI 1 "s_register_operand" "+rk")
10480 (plus:SI (match_dup 1)
10481 (match_operand:SI 2 "const_int_I_operand" "I")))
10482 (set (match_operand:SI 3 "s_register_operand" "=rk")
10483 (mem:SI (match_dup 1)))
10484 ])]
10485 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10486 "*
10487 {
10488 arm_output_multireg_pop (operands, /*return_pc=*/true,
10489 /*cond=*/const_true_rtx,
10490 /*reverse=*/false,
10491 /*update=*/true);
10492 return \"\";
10493 }
10494 "
10495 [(set_attr "type" "load_16")
10496 (set_attr "predicable" "yes")
10497 (set (attr "length")
10498 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10499 /*write_back_p=*/true)"))]
10500 )
10501
10502 (define_insn "*pop_multiple_with_return"
10503 [(match_parallel 0 "pop_multiple_return"
10504 [(return)
10505 (set (match_operand:SI 2 "s_register_operand" "=rk")
10506 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10507 ])]
10508 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10509 "*
10510 {
10511 arm_output_multireg_pop (operands, /*return_pc=*/true,
10512 /*cond=*/const_true_rtx,
10513 /*reverse=*/false,
10514 /*update=*/false);
10515 return \"\";
10516 }
10517 "
10518 [(set_attr "type" "load_16")
10519 (set_attr "predicable" "yes")
10520 (set (attr "length")
10521 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10522 /*write_back_p=*/false)"))]
10523 )
10524
10525 ;; Load into PC and return
10526 (define_insn "*ldr_with_return"
10527 [(return)
10528 (set (reg:SI PC_REGNUM)
10529 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10530 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10531 "ldr%?\t%|pc, [%0], #4"
10532 [(set_attr "type" "load_4")
10533 (set_attr "predicable" "yes")]
10534 )
10535 ;; Pop for floating point registers (as used in epilogue RTL)
10536 (define_insn "*vfp_pop_multiple_with_writeback"
10537 [(match_parallel 0 "pop_multiple_fp"
10538 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10539 (plus:SI (match_dup 1)
10540 (match_operand:SI 2 "const_int_I_operand" "I")))
10541 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10542 (mem:DF (match_dup 1)))])]
10543 "TARGET_32BIT && TARGET_HARD_FLOAT"
10544 "*
10545 {
10546 int num_regs = XVECLEN (operands[0], 0);
10547 char pattern[100];
10548 rtx op_list[2];
10549 strcpy (pattern, \"vldm\\t\");
10550 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10551 strcat (pattern, \"!, {\");
10552 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10553 strcat (pattern, \"%P0\");
10554 if ((num_regs - 1) > 1)
10555 {
10556 strcat (pattern, \"-%P1\");
10557 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10558 }
10559
10560 strcat (pattern, \"}\");
10561 output_asm_insn (pattern, op_list);
10562 return \"\";
10563 }
10564 "
10565 [(set_attr "type" "load_16")
10566 (set_attr "conds" "unconditional")
10567 (set_attr "predicable" "no")]
10568 )
10569
10570 ;; Special patterns for dealing with the constant pool
10571
10572 (define_insn "align_4"
10573 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10574 "TARGET_EITHER"
10575 "*
10576 assemble_align (32);
10577 return \"\";
10578 "
10579 [(set_attr "type" "no_insn")]
10580 )
10581
10582 (define_insn "align_8"
10583 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10584 "TARGET_EITHER"
10585 "*
10586 assemble_align (64);
10587 return \"\";
10588 "
10589 [(set_attr "type" "no_insn")]
10590 )
10591
10592 (define_insn "consttable_end"
10593 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10594 "TARGET_EITHER"
10595 "*
10596 making_const_table = FALSE;
10597 return \"\";
10598 "
10599 [(set_attr "type" "no_insn")]
10600 )
10601
10602 (define_insn "consttable_1"
10603 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10604 "TARGET_EITHER"
10605 "*
10606 making_const_table = TRUE;
10607 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10608 assemble_zeros (3);
10609 return \"\";
10610 "
10611 [(set_attr "length" "4")
10612 (set_attr "type" "no_insn")]
10613 )
10614
10615 (define_insn "consttable_2"
10616 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10617 "TARGET_EITHER"
10618 "*
10619 {
10620 rtx x = operands[0];
10621 making_const_table = TRUE;
10622 switch (GET_MODE_CLASS (GET_MODE (x)))
10623 {
10624 case MODE_FLOAT:
10625 arm_emit_fp16_const (x);
10626 break;
10627 default:
10628 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10629 assemble_zeros (2);
10630 break;
10631 }
10632 return \"\";
10633 }"
10634 [(set_attr "length" "4")
10635 (set_attr "type" "no_insn")]
10636 )
10637
10638 (define_insn "consttable_4"
10639 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10640 "TARGET_EITHER"
10641 "*
10642 {
10643 rtx x = operands[0];
10644 making_const_table = TRUE;
10645 scalar_float_mode float_mode;
10646 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10647 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10648 else
10649 {
10650 /* XXX: Sometimes gcc does something really dumb and ends up with
10651 a HIGH in a constant pool entry, usually because it's trying to
10652 load into a VFP register. We know this will always be used in
10653 combination with a LO_SUM which ignores the high bits, so just
10654 strip off the HIGH. */
10655 if (GET_CODE (x) == HIGH)
10656 x = XEXP (x, 0);
10657 assemble_integer (x, 4, BITS_PER_WORD, 1);
10658 mark_symbol_refs_as_used (x);
10659 }
10660 return \"\";
10661 }"
10662 [(set_attr "length" "4")
10663 (set_attr "type" "no_insn")]
10664 )
10665
10666 (define_insn "consttable_8"
10667 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10668 "TARGET_EITHER"
10669 "*
10670 {
10671 making_const_table = TRUE;
10672 scalar_float_mode float_mode;
10673 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10674 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10675 float_mode, BITS_PER_WORD);
10676 else
10677 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10678 return \"\";
10679 }"
10680 [(set_attr "length" "8")
10681 (set_attr "type" "no_insn")]
10682 )
10683
10684 (define_insn "consttable_16"
10685 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10686 "TARGET_EITHER"
10687 "*
10688 {
10689 making_const_table = TRUE;
10690 scalar_float_mode float_mode;
10691 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10692 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10693 float_mode, BITS_PER_WORD);
10694 else
10695 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10696 return \"\";
10697 }"
10698 [(set_attr "length" "16")
10699 (set_attr "type" "no_insn")]
10700 )
10701
10702 ;; V5 Instructions,
10703
10704 (define_insn "clzsi2"
10705 [(set (match_operand:SI 0 "s_register_operand" "=r")
10706 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10707 "TARGET_32BIT && arm_arch5t"
10708 "clz%?\\t%0, %1"
10709 [(set_attr "predicable" "yes")
10710 (set_attr "type" "clz")])
10711
10712 (define_insn "rbitsi2"
10713 [(set (match_operand:SI 0 "s_register_operand" "=r")
10714 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10715 "TARGET_32BIT && arm_arch_thumb2"
10716 "rbit%?\\t%0, %1"
10717 [(set_attr "predicable" "yes")
10718 (set_attr "type" "clz")])
10719
10720 ;; Keep this as a CTZ expression until after reload and then split
10721 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10722 ;; to fold with any other expression.
10723
10724 (define_insn_and_split "ctzsi2"
10725 [(set (match_operand:SI 0 "s_register_operand" "=r")
10726 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10727 "TARGET_32BIT && arm_arch_thumb2"
10728 "#"
10729 "&& reload_completed"
10730 [(const_int 0)]
10731 "
10732 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10733 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10734 DONE;
10735 ")
10736
10737 ;; V5E instructions.
10738
10739 (define_insn "prefetch"
10740 [(prefetch (match_operand:SI 0 "address_operand" "p")
10741 (match_operand:SI 1 "" "")
10742 (match_operand:SI 2 "" ""))]
10743 "TARGET_32BIT && arm_arch5te"
10744 "pld\\t%a0"
10745 [(set_attr "type" "load_4")]
10746 )
10747
10748 ;; General predication pattern
10749
10750 (define_cond_exec
10751 [(match_operator 0 "arm_comparison_operator"
10752 [(match_operand 1 "cc_register" "")
10753 (const_int 0)])]
10754 "TARGET_32BIT
10755 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10756 ""
10757 [(set_attr "predicated" "yes")]
10758 )
10759
10760 (define_insn "force_register_use"
10761 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10762 ""
10763 "%@ %0 needed"
10764 [(set_attr "length" "0")
10765 (set_attr "type" "no_insn")]
10766 )
10767
10768
10769 ;; Patterns for exception handling
10770
10771 (define_expand "eh_return"
10772 [(use (match_operand 0 "general_operand"))]
10773 "TARGET_EITHER"
10774 "
10775 {
10776 if (TARGET_32BIT)
10777 emit_insn (gen_arm_eh_return (operands[0]));
10778 else
10779 emit_insn (gen_thumb_eh_return (operands[0]));
10780 DONE;
10781 }"
10782 )
10783
10784 ;; We can't expand this before we know where the link register is stored.
10785 (define_insn_and_split "arm_eh_return"
10786 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10787 VUNSPEC_EH_RETURN)
10788 (clobber (match_scratch:SI 1 "=&r"))]
10789 "TARGET_ARM"
10790 "#"
10791 "&& reload_completed"
10792 [(const_int 0)]
10793 "
10794 {
10795 arm_set_return_address (operands[0], operands[1]);
10796 DONE;
10797 }"
10798 )
10799
10800 \f
10801 ;; TLS support
10802
10803 (define_insn "load_tp_hard"
10804 [(set (match_operand:SI 0 "register_operand" "=r")
10805 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10806 "TARGET_HARD_TP"
10807 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10808 [(set_attr "predicable" "yes")
10809 (set_attr "type" "mrs")]
10810 )
10811
10812 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10813 (define_insn "load_tp_soft_fdpic"
10814 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10815 (clobber (reg:SI FDPIC_REGNUM))
10816 (clobber (reg:SI LR_REGNUM))
10817 (clobber (reg:SI IP_REGNUM))
10818 (clobber (reg:CC CC_REGNUM))]
10819 "TARGET_SOFT_TP && TARGET_FDPIC"
10820 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10821 [(set_attr "conds" "clob")
10822 (set_attr "type" "branch")]
10823 )
10824
10825 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10826 (define_insn "load_tp_soft"
10827 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10828 (clobber (reg:SI LR_REGNUM))
10829 (clobber (reg:SI IP_REGNUM))
10830 (clobber (reg:CC CC_REGNUM))]
10831 "TARGET_SOFT_TP && !TARGET_FDPIC"
10832 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10833 [(set_attr "conds" "clob")
10834 (set_attr "type" "branch")]
10835 )
10836
10837 ;; tls descriptor call
10838 (define_insn "tlscall"
10839 [(set (reg:SI R0_REGNUM)
10840 (unspec:SI [(reg:SI R0_REGNUM)
10841 (match_operand:SI 0 "" "X")
10842 (match_operand 1 "" "")] UNSPEC_TLS))
10843 (clobber (reg:SI R1_REGNUM))
10844 (clobber (reg:SI LR_REGNUM))
10845 (clobber (reg:SI CC_REGNUM))]
10846 "TARGET_GNU2_TLS"
10847 {
10848 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10849 INTVAL (operands[1]));
10850 return "bl\\t%c0(tlscall)";
10851 }
10852 [(set_attr "conds" "clob")
10853 (set_attr "length" "4")
10854 (set_attr "type" "branch")]
10855 )
10856
10857 ;; For thread pointer builtin
10858 (define_expand "get_thread_pointersi"
10859 [(match_operand:SI 0 "s_register_operand")]
10860 ""
10861 "
10862 {
10863 arm_load_tp (operands[0]);
10864 DONE;
10865 }")
10866
10867 ;;
10868
10869 ;; We only care about the lower 16 bits of the constant
10870 ;; being inserted into the upper 16 bits of the register.
10871 (define_insn "*arm_movtas_ze"
10872 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10873 (const_int 16)
10874 (const_int 16))
10875 (match_operand:SI 1 "const_int_operand" ""))]
10876 "TARGET_HAVE_MOVT"
10877 "@
10878 movt%?\t%0, %L1
10879 movt\t%0, %L1"
10880 [(set_attr "arch" "32,v8mb")
10881 (set_attr "predicable" "yes")
10882 (set_attr "length" "4")
10883 (set_attr "type" "alu_sreg")]
10884 )
10885
10886 (define_insn "*arm_rev"
10887 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10888 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10889 "arm_arch6"
10890 "@
10891 rev\t%0, %1
10892 rev%?\t%0, %1
10893 rev%?\t%0, %1"
10894 [(set_attr "arch" "t1,t2,32")
10895 (set_attr "length" "2,2,4")
10896 (set_attr "predicable" "no,yes,yes")
10897 (set_attr "type" "rev")]
10898 )
10899
10900 (define_expand "arm_legacy_rev"
10901 [(set (match_operand:SI 2 "s_register_operand")
10902 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10903 (const_int 16))
10904 (match_dup 1)))
10905 (set (match_dup 2)
10906 (lshiftrt:SI (match_dup 2)
10907 (const_int 8)))
10908 (set (match_operand:SI 3 "s_register_operand")
10909 (rotatert:SI (match_dup 1)
10910 (const_int 8)))
10911 (set (match_dup 2)
10912 (and:SI (match_dup 2)
10913 (const_int -65281)))
10914 (set (match_operand:SI 0 "s_register_operand")
10915 (xor:SI (match_dup 3)
10916 (match_dup 2)))]
10917 "TARGET_32BIT"
10918 ""
10919 )
10920
10921 ;; Reuse temporaries to keep register pressure down.
10922 (define_expand "thumb_legacy_rev"
10923 [(set (match_operand:SI 2 "s_register_operand")
10924 (ashift:SI (match_operand:SI 1 "s_register_operand")
10925 (const_int 24)))
10926 (set (match_operand:SI 3 "s_register_operand")
10927 (lshiftrt:SI (match_dup 1)
10928 (const_int 24)))
10929 (set (match_dup 3)
10930 (ior:SI (match_dup 3)
10931 (match_dup 2)))
10932 (set (match_operand:SI 4 "s_register_operand")
10933 (const_int 16))
10934 (set (match_operand:SI 5 "s_register_operand")
10935 (rotatert:SI (match_dup 1)
10936 (match_dup 4)))
10937 (set (match_dup 2)
10938 (ashift:SI (match_dup 5)
10939 (const_int 24)))
10940 (set (match_dup 5)
10941 (lshiftrt:SI (match_dup 5)
10942 (const_int 24)))
10943 (set (match_dup 5)
10944 (ior:SI (match_dup 5)
10945 (match_dup 2)))
10946 (set (match_dup 5)
10947 (rotatert:SI (match_dup 5)
10948 (match_dup 4)))
10949 (set (match_operand:SI 0 "s_register_operand")
10950 (ior:SI (match_dup 5)
10951 (match_dup 3)))]
10952 "TARGET_THUMB"
10953 ""
10954 )
10955
10956 ;; ARM-specific expansion of signed mod by power of 2
10957 ;; using conditional negate.
10958 ;; For r0 % n where n is a power of 2 produce:
10959 ;; rsbs r1, r0, #0
10960 ;; and r0, r0, #(n - 1)
10961 ;; and r1, r1, #(n - 1)
10962 ;; rsbpl r0, r1, #0
10963
10964 (define_expand "modsi3"
10965 [(match_operand:SI 0 "register_operand")
10966 (match_operand:SI 1 "register_operand")
10967 (match_operand:SI 2 "const_int_operand")]
10968 "TARGET_32BIT"
10969 {
10970 HOST_WIDE_INT val = INTVAL (operands[2]);
10971
10972 if (val <= 0
10973 || exact_log2 (val) <= 0)
10974 FAIL;
10975
10976 rtx mask = GEN_INT (val - 1);
10977
10978 /* In the special case of x0 % 2 we can do the even shorter:
10979 cmp r0, #0
10980 and r0, r0, #1
10981 rsblt r0, r0, #0. */
10982
10983 if (val == 2)
10984 {
10985 rtx cc_reg = arm_gen_compare_reg (LT,
10986 operands[1], const0_rtx, NULL_RTX);
10987 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10988 rtx masked = gen_reg_rtx (SImode);
10989
10990 emit_insn (gen_andsi3 (masked, operands[1], mask));
10991 emit_move_insn (operands[0],
10992 gen_rtx_IF_THEN_ELSE (SImode, cond,
10993 gen_rtx_NEG (SImode,
10994 masked),
10995 masked));
10996 DONE;
10997 }
10998
10999 rtx neg_op = gen_reg_rtx (SImode);
11000 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11001 operands[1]));
11002
11003 /* Extract the condition register and mode. */
11004 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11005 rtx cc_reg = SET_DEST (cmp);
11006 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11007
11008 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11009
11010 rtx masked_neg = gen_reg_rtx (SImode);
11011 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11012
11013 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11014 during expand does not always work. Do an IF_THEN_ELSE instead. */
11015 emit_move_insn (operands[0],
11016 gen_rtx_IF_THEN_ELSE (SImode, cond,
11017 gen_rtx_NEG (SImode, masked_neg),
11018 operands[0]));
11019
11020
11021 DONE;
11022 }
11023 )
11024
11025 (define_expand "bswapsi2"
11026 [(set (match_operand:SI 0 "s_register_operand")
11027 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11028 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11029 "
11030 if (!arm_arch6)
11031 {
11032 rtx op2 = gen_reg_rtx (SImode);
11033 rtx op3 = gen_reg_rtx (SImode);
11034
11035 if (TARGET_THUMB)
11036 {
11037 rtx op4 = gen_reg_rtx (SImode);
11038 rtx op5 = gen_reg_rtx (SImode);
11039
11040 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11041 op2, op3, op4, op5));
11042 }
11043 else
11044 {
11045 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11046 op2, op3));
11047 }
11048
11049 DONE;
11050 }
11051 "
11052 )
11053
11054 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11055 ;; and unsigned variants, respectively. For rev16, expose
11056 ;; byte-swapping in the lower 16 bits only.
11057 (define_insn "*arm_revsh"
11058 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11059 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11060 "arm_arch6"
11061 "@
11062 revsh\t%0, %1
11063 revsh%?\t%0, %1
11064 revsh%?\t%0, %1"
11065 [(set_attr "arch" "t1,t2,32")
11066 (set_attr "length" "2,2,4")
11067 (set_attr "type" "rev")]
11068 )
11069
11070 (define_insn "*arm_rev16"
11071 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11072 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11073 "arm_arch6"
11074 "@
11075 rev16\t%0, %1
11076 rev16%?\t%0, %1
11077 rev16%?\t%0, %1"
11078 [(set_attr "arch" "t1,t2,32")
11079 (set_attr "length" "2,2,4")
11080 (set_attr "type" "rev")]
11081 )
11082
11083 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11084 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11085 ;; each valid permutation.
11086
11087 (define_insn "arm_rev16si2"
11088 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11089 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11090 (const_int 8))
11091 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11092 (and:SI (lshiftrt:SI (match_dup 1)
11093 (const_int 8))
11094 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11095 "arm_arch6
11096 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11097 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11098 "rev16\\t%0, %1"
11099 [(set_attr "arch" "t1,t2,32")
11100 (set_attr "length" "2,2,4")
11101 (set_attr "type" "rev")]
11102 )
11103
11104 (define_insn "arm_rev16si2_alt"
11105 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11106 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11107 (const_int 8))
11108 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11109 (and:SI (ashift:SI (match_dup 1)
11110 (const_int 8))
11111 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11112 "arm_arch6
11113 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11114 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11115 "rev16\\t%0, %1"
11116 [(set_attr "arch" "t1,t2,32")
11117 (set_attr "length" "2,2,4")
11118 (set_attr "type" "rev")]
11119 )
11120
11121 (define_expand "bswaphi2"
11122 [(set (match_operand:HI 0 "s_register_operand")
11123 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11124 "arm_arch6"
11125 ""
11126 )
11127
11128 ;; Patterns for LDRD/STRD in Thumb2 mode
11129
11130 (define_insn "*thumb2_ldrd"
11131 [(set (match_operand:SI 0 "s_register_operand" "=r")
11132 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11133 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11134 (set (match_operand:SI 3 "s_register_operand" "=r")
11135 (mem:SI (plus:SI (match_dup 1)
11136 (match_operand:SI 4 "const_int_operand" ""))))]
11137 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11138 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11139 && (operands_ok_ldrd_strd (operands[0], operands[3],
11140 operands[1], INTVAL (operands[2]),
11141 false, true))"
11142 "ldrd%?\t%0, %3, [%1, %2]"
11143 [(set_attr "type" "load_8")
11144 (set_attr "predicable" "yes")])
11145
11146 (define_insn "*thumb2_ldrd_base"
11147 [(set (match_operand:SI 0 "s_register_operand" "=r")
11148 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11149 (set (match_operand:SI 2 "s_register_operand" "=r")
11150 (mem:SI (plus:SI (match_dup 1)
11151 (const_int 4))))]
11152 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11153 && (operands_ok_ldrd_strd (operands[0], operands[2],
11154 operands[1], 0, false, true))"
11155 "ldrd%?\t%0, %2, [%1]"
11156 [(set_attr "type" "load_8")
11157 (set_attr "predicable" "yes")])
11158
11159 (define_insn "*thumb2_ldrd_base_neg"
11160 [(set (match_operand:SI 0 "s_register_operand" "=r")
11161 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11162 (const_int -4))))
11163 (set (match_operand:SI 2 "s_register_operand" "=r")
11164 (mem:SI (match_dup 1)))]
11165 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11166 && (operands_ok_ldrd_strd (operands[0], operands[2],
11167 operands[1], -4, false, true))"
11168 "ldrd%?\t%0, %2, [%1, #-4]"
11169 [(set_attr "type" "load_8")
11170 (set_attr "predicable" "yes")])
11171
11172 (define_insn "*thumb2_strd"
11173 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11174 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11175 (match_operand:SI 2 "s_register_operand" "r"))
11176 (set (mem:SI (plus:SI (match_dup 0)
11177 (match_operand:SI 3 "const_int_operand" "")))
11178 (match_operand:SI 4 "s_register_operand" "r"))]
11179 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11180 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11181 && (operands_ok_ldrd_strd (operands[2], operands[4],
11182 operands[0], INTVAL (operands[1]),
11183 false, false))"
11184 "strd%?\t%2, %4, [%0, %1]"
11185 [(set_attr "type" "store_8")
11186 (set_attr "predicable" "yes")])
11187
11188 (define_insn "*thumb2_strd_base"
11189 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11190 (match_operand:SI 1 "s_register_operand" "r"))
11191 (set (mem:SI (plus:SI (match_dup 0)
11192 (const_int 4)))
11193 (match_operand:SI 2 "s_register_operand" "r"))]
11194 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11195 && (operands_ok_ldrd_strd (operands[1], operands[2],
11196 operands[0], 0, false, false))"
11197 "strd%?\t%1, %2, [%0]"
11198 [(set_attr "type" "store_8")
11199 (set_attr "predicable" "yes")])
11200
11201 (define_insn "*thumb2_strd_base_neg"
11202 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11203 (const_int -4)))
11204 (match_operand:SI 1 "s_register_operand" "r"))
11205 (set (mem:SI (match_dup 0))
11206 (match_operand:SI 2 "s_register_operand" "r"))]
11207 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11208 && (operands_ok_ldrd_strd (operands[1], operands[2],
11209 operands[0], -4, false, false))"
11210 "strd%?\t%1, %2, [%0, #-4]"
11211 [(set_attr "type" "store_8")
11212 (set_attr "predicable" "yes")])
11213
11214 ;; ARMv8 CRC32 instructions.
11215 (define_insn "arm_<crc_variant>"
11216 [(set (match_operand:SI 0 "s_register_operand" "=r")
11217 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11218 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11219 CRC))]
11220 "TARGET_CRC32"
11221 "<crc_variant>\\t%0, %1, %2"
11222 [(set_attr "type" "crc")
11223 (set_attr "conds" "unconditional")]
11224 )
11225
11226 ;; Load the load/store double peephole optimizations.
11227 (include "ldrdstrd.md")
11228
11229 ;; Load the load/store multiple patterns
11230 (include "ldmstm.md")
11231
11232 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11233 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11234 ;; The operands are validated through the load_multiple_operation
11235 ;; match_parallel predicate rather than through constraints so enable it only
11236 ;; after reload.
11237 (define_insn "*load_multiple"
11238 [(match_parallel 0 "load_multiple_operation"
11239 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11240 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11241 ])]
11242 "TARGET_32BIT && reload_completed"
11243 "*
11244 {
11245 arm_output_multireg_pop (operands, /*return_pc=*/false,
11246 /*cond=*/const_true_rtx,
11247 /*reverse=*/false,
11248 /*update=*/false);
11249 return \"\";
11250 }
11251 "
11252 [(set_attr "predicable" "yes")]
11253 )
11254
11255 (define_expand "copysignsf3"
11256 [(match_operand:SF 0 "register_operand")
11257 (match_operand:SF 1 "register_operand")
11258 (match_operand:SF 2 "register_operand")]
11259 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11260 "{
11261 emit_move_insn (operands[0], operands[2]);
11262 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11263 GEN_INT (31), GEN_INT (0),
11264 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11265 DONE;
11266 }"
11267 )
11268
11269 (define_expand "copysigndf3"
11270 [(match_operand:DF 0 "register_operand")
11271 (match_operand:DF 1 "register_operand")
11272 (match_operand:DF 2 "register_operand")]
11273 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11274 "{
11275 rtx op0_low = gen_lowpart (SImode, operands[0]);
11276 rtx op0_high = gen_highpart (SImode, operands[0]);
11277 rtx op1_low = gen_lowpart (SImode, operands[1]);
11278 rtx op1_high = gen_highpart (SImode, operands[1]);
11279 rtx op2_high = gen_highpart (SImode, operands[2]);
11280
11281 rtx scratch1 = gen_reg_rtx (SImode);
11282 rtx scratch2 = gen_reg_rtx (SImode);
11283 emit_move_insn (scratch1, op2_high);
11284 emit_move_insn (scratch2, op1_high);
11285
11286 emit_insn(gen_rtx_SET(scratch1,
11287 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11288 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11289 emit_move_insn (op0_low, op1_low);
11290 emit_move_insn (op0_high, scratch2);
11291
11292 DONE;
11293 }"
11294 )
11295
11296 ;; movmisalign patterns for HImode and SImode.
11297 (define_expand "movmisalign<mode>"
11298 [(match_operand:HSI 0 "general_operand")
11299 (match_operand:HSI 1 "general_operand")]
11300 "unaligned_access"
11301 {
11302 /* This pattern is not permitted to fail during expansion: if both arguments
11303 are non-registers (e.g. memory := constant), force operand 1 into a
11304 register. */
11305 rtx (* gen_unaligned_load)(rtx, rtx);
11306 rtx tmp_dest = operands[0];
11307 if (!s_register_operand (operands[0], <MODE>mode)
11308 && !s_register_operand (operands[1], <MODE>mode))
11309 operands[1] = force_reg (<MODE>mode, operands[1]);
11310
11311 if (<MODE>mode == HImode)
11312 {
11313 gen_unaligned_load = gen_unaligned_loadhiu;
11314 tmp_dest = gen_reg_rtx (SImode);
11315 }
11316 else
11317 gen_unaligned_load = gen_unaligned_loadsi;
11318
11319 if (MEM_P (operands[1]))
11320 {
11321 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11322 if (<MODE>mode == HImode)
11323 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11324 }
11325 else
11326 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11327
11328 DONE;
11329 })
11330
11331 (define_insn "arm_<cdp>"
11332 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11333 (match_operand:SI 1 "immediate_operand" "n")
11334 (match_operand:SI 2 "immediate_operand" "n")
11335 (match_operand:SI 3 "immediate_operand" "n")
11336 (match_operand:SI 4 "immediate_operand" "n")
11337 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11338 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11339 {
11340 arm_const_bounds (operands[0], 0, 16);
11341 arm_const_bounds (operands[1], 0, 16);
11342 arm_const_bounds (operands[2], 0, (1 << 5));
11343 arm_const_bounds (operands[3], 0, (1 << 5));
11344 arm_const_bounds (operands[4], 0, (1 << 5));
11345 arm_const_bounds (operands[5], 0, 8);
11346 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11347 }
11348 [(set_attr "length" "4")
11349 (set_attr "type" "coproc")])
11350
11351 (define_insn "*ldc"
11352 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11353 (match_operand:SI 1 "immediate_operand" "n")
11354 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11355 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11356 {
11357 arm_const_bounds (operands[0], 0, 16);
11358 arm_const_bounds (operands[1], 0, (1 << 5));
11359 return "<ldc>\\tp%c0, CR%c1, %2";
11360 }
11361 [(set_attr "length" "4")
11362 (set_attr "type" "coproc")])
11363
11364 (define_insn "*stc"
11365 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11366 (match_operand:SI 1 "immediate_operand" "n")
11367 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11368 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11369 {
11370 arm_const_bounds (operands[0], 0, 16);
11371 arm_const_bounds (operands[1], 0, (1 << 5));
11372 return "<stc>\\tp%c0, CR%c1, %2";
11373 }
11374 [(set_attr "length" "4")
11375 (set_attr "type" "coproc")])
11376
11377 (define_expand "arm_<ldc>"
11378 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11379 (match_operand:SI 1 "immediate_operand")
11380 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11381 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11382
11383 (define_expand "arm_<stc>"
11384 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11385 (match_operand:SI 1 "immediate_operand")
11386 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11387 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11388
11389 (define_insn "arm_<mcr>"
11390 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11391 (match_operand:SI 1 "immediate_operand" "n")
11392 (match_operand:SI 2 "s_register_operand" "r")
11393 (match_operand:SI 3 "immediate_operand" "n")
11394 (match_operand:SI 4 "immediate_operand" "n")
11395 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11396 (use (match_dup 2))]
11397 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11398 {
11399 arm_const_bounds (operands[0], 0, 16);
11400 arm_const_bounds (operands[1], 0, 8);
11401 arm_const_bounds (operands[3], 0, (1 << 5));
11402 arm_const_bounds (operands[4], 0, (1 << 5));
11403 arm_const_bounds (operands[5], 0, 8);
11404 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11405 }
11406 [(set_attr "length" "4")
11407 (set_attr "type" "coproc")])
11408
11409 (define_insn "arm_<mrc>"
11410 [(set (match_operand:SI 0 "s_register_operand" "=r")
11411 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11412 (match_operand:SI 2 "immediate_operand" "n")
11413 (match_operand:SI 3 "immediate_operand" "n")
11414 (match_operand:SI 4 "immediate_operand" "n")
11415 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11416 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11417 {
11418 arm_const_bounds (operands[1], 0, 16);
11419 arm_const_bounds (operands[2], 0, 8);
11420 arm_const_bounds (operands[3], 0, (1 << 5));
11421 arm_const_bounds (operands[4], 0, (1 << 5));
11422 arm_const_bounds (operands[5], 0, 8);
11423 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11424 }
11425 [(set_attr "length" "4")
11426 (set_attr "type" "coproc")])
11427
11428 (define_insn "arm_<mcrr>"
11429 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11430 (match_operand:SI 1 "immediate_operand" "n")
11431 (match_operand:DI 2 "s_register_operand" "r")
11432 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11433 (use (match_dup 2))]
11434 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11435 {
11436 arm_const_bounds (operands[0], 0, 16);
11437 arm_const_bounds (operands[1], 0, 8);
11438 arm_const_bounds (operands[3], 0, (1 << 5));
11439 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11440 }
11441 [(set_attr "length" "4")
11442 (set_attr "type" "coproc")])
11443
11444 (define_insn "arm_<mrrc>"
11445 [(set (match_operand:DI 0 "s_register_operand" "=r")
11446 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11447 (match_operand:SI 2 "immediate_operand" "n")
11448 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11449 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11450 {
11451 arm_const_bounds (operands[1], 0, 16);
11452 arm_const_bounds (operands[2], 0, 8);
11453 arm_const_bounds (operands[3], 0, (1 << 5));
11454 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11455 }
11456 [(set_attr "length" "4")
11457 (set_attr "type" "coproc")])
11458
11459 (define_expand "speculation_barrier"
11460 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11461 "TARGET_EITHER"
11462 "
11463 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11464 have a usable barrier (and probably don't need one in practice).
11465 But to be safe if such code is run on later architectures, call a
11466 helper function in libgcc that will do the thing for the active
11467 system. */
11468 if (!(arm_arch7 || arm_arch8))
11469 {
11470 arm_emit_speculation_barrier_function ();
11471 DONE;
11472 }
11473 "
11474 )
11475
11476 ;; Generate a hard speculation barrier when we have not enabled speculation
11477 ;; tracking.
11478 (define_insn "*speculation_barrier_insn"
11479 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11480 "arm_arch7 || arm_arch8"
11481 "isb\;dsb\\tsy"
11482 [(set_attr "type" "block")
11483 (set_attr "length" "8")]
11484 )
11485
11486 ;; Vector bits common to IWMMXT and Neon
11487 (include "vec-common.md")
11488 ;; Load the Intel Wireless Multimedia Extension patterns
11489 (include "iwmmxt.md")
11490 ;; Load the VFP co-processor patterns
11491 (include "vfp.md")
11492 ;; Thumb-1 patterns
11493 (include "thumb1.md")
11494 ;; Thumb-2 patterns
11495 (include "thumb2.md")
11496 ;; Neon patterns
11497 (include "neon.md")
11498 ;; Crypto patterns
11499 (include "crypto.md")
11500 ;; Synchronization Primitives
11501 (include "sync.md")
11502 ;; Fixed-point patterns
11503 (include "arm-fixed.md")