]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
[arm][5/X] Implement Q-bit-setting SIMD32 intrinsics
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 (APSRQ_REGNUM 104) ; Q bit pseudo register
43 (APSRGE_REGNUM 105) ; GE bits pseudo register
44 ]
45 )
46 ;; 3rd operand to select_dominance_cc_mode
47 (define_constants
48 [(DOM_CC_X_AND_Y 0)
49 (DOM_CC_NX_OR_Y 1)
50 (DOM_CC_X_OR_Y 2)
51 ]
52 )
53 ;; conditional compare combination
54 (define_constants
55 [(CMP_CMP 0)
56 (CMN_CMP 1)
57 (CMP_CMN 2)
58 (CMN_CMN 3)
59 (NUM_OF_COND_CMP 4)
60 ]
61 )
62
63 \f
64 ;;---------------------------------------------------------------------------
65 ;; Attributes
66
67 ;; Processor type. This is created automatically from arm-cores.def.
68 (include "arm-tune.md")
69
70 ;; Instruction classification types
71 (include "types.md")
72
73 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
74 ; generating ARM code. This is used to control the length of some insn
75 ; patterns that share the same RTL in both ARM and Thumb code.
76 (define_attr "is_thumb" "yes,no"
77 (const (if_then_else (symbol_ref "TARGET_THUMB")
78 (const_string "yes") (const_string "no"))))
79
80 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
81 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
82
83 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
84 (define_attr "is_thumb1" "yes,no"
85 (const (if_then_else (symbol_ref "TARGET_THUMB1")
86 (const_string "yes") (const_string "no"))))
87
88 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
89 ; The arm_restrict_it flag enables the "short IT" feature which
90 ; restricts IT blocks to a single 16-bit instruction.
91 ; This attribute should only be used on 16-bit Thumb-2 instructions
92 ; which may be predicated (the "predicable" attribute must be set).
93 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
94
95 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
96 ; This attribute should only be used on instructions which may emit
97 ; an IT block in their expansion which is not a short IT.
98 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
99
100 ;; Operand number of an input operand that is shifted. Zero if the
101 ;; given instruction does not shift one of its input operands.
102 (define_attr "shift" "" (const_int 0))
103
104 ;; [For compatibility with AArch64 in pipeline models]
105 ;; Attribute that specifies whether or not the instruction touches fp
106 ;; registers.
107 (define_attr "fp" "no,yes" (const_string "no"))
108
109 ; Floating Point Unit. If we only have floating point emulation, then there
110 ; is no point in scheduling the floating point insns. (Well, for best
111 ; performance we should try and group them together).
112 (define_attr "fpu" "none,vfp"
113 (const (symbol_ref "arm_fpu_attr")))
114
115 ; Predicated means that the insn form is conditionally executed based on a
116 ; predicate. We default to 'no' because no Thumb patterns match this rule
117 ; and not all ARM insns do.
118 (define_attr "predicated" "yes,no" (const_string "no"))
119
120 ; LENGTH of an instruction (in bytes)
121 (define_attr "length" ""
122 (const_int 4))
123
124 ; The architecture which supports the instruction (or alternative).
125 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
126 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
127 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
128 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
129 ; Baseline. This attribute is used to compute attribute "enabled",
130 ; use type "any" to enable an alternative in all cases.
131 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
132 (const_string "any"))
133
134 (define_attr "arch_enabled" "no,yes"
135 (cond [(eq_attr "arch" "any")
136 (const_string "yes")
137
138 (and (eq_attr "arch" "a")
139 (match_test "TARGET_ARM"))
140 (const_string "yes")
141
142 (and (eq_attr "arch" "t")
143 (match_test "TARGET_THUMB"))
144 (const_string "yes")
145
146 (and (eq_attr "arch" "t1")
147 (match_test "TARGET_THUMB1"))
148 (const_string "yes")
149
150 (and (eq_attr "arch" "t2")
151 (match_test "TARGET_THUMB2"))
152 (const_string "yes")
153
154 (and (eq_attr "arch" "32")
155 (match_test "TARGET_32BIT"))
156 (const_string "yes")
157
158 (and (eq_attr "arch" "v6")
159 (match_test "TARGET_32BIT && arm_arch6"))
160 (const_string "yes")
161
162 (and (eq_attr "arch" "nov6")
163 (match_test "TARGET_32BIT && !arm_arch6"))
164 (const_string "yes")
165
166 (and (eq_attr "arch" "v6t2")
167 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (const_string "yes")
169
170 (and (eq_attr "arch" "v8mb")
171 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (const_string "yes")
173
174 (and (eq_attr "arch" "iwmmxt2")
175 (match_test "TARGET_REALLY_IWMMXT2"))
176 (const_string "yes")
177
178 (and (eq_attr "arch" "armv6_or_vfpv3")
179 (match_test "arm_arch6 || TARGET_VFP3"))
180 (const_string "yes")
181
182 (and (eq_attr "arch" "neon")
183 (match_test "TARGET_NEON"))
184 (const_string "yes")
185 ]
186
187 (const_string "no")))
188
189 (define_attr "opt" "any,speed,size"
190 (const_string "any"))
191
192 (define_attr "opt_enabled" "no,yes"
193 (cond [(eq_attr "opt" "any")
194 (const_string "yes")
195
196 (and (eq_attr "opt" "speed")
197 (match_test "optimize_function_for_speed_p (cfun)"))
198 (const_string "yes")
199
200 (and (eq_attr "opt" "size")
201 (match_test "optimize_function_for_size_p (cfun)"))
202 (const_string "yes")]
203 (const_string "no")))
204
205 (define_attr "use_literal_pool" "no,yes"
206 (cond [(and (eq_attr "type" "f_loads,f_loadd")
207 (match_test "CONSTANT_P (operands[1])"))
208 (const_string "yes")]
209 (const_string "no")))
210
211 ; Enable all alternatives that are both arch_enabled and insn_enabled.
212 ; FIXME:: opt_enabled has been temporarily removed till the time we have
213 ; an attribute that allows the use of such alternatives.
214 ; This depends on caching of speed_p, size_p on a per
215 ; alternative basis. The problem is that the enabled attribute
216 ; cannot depend on any state that is not cached or is not constant
217 ; for a compilation unit. We probably need a generic "hot/cold"
218 ; alternative which if implemented can help with this. We disable this
219 ; until such a time as this is implemented and / or the improvements or
220 ; regressions with removing this attribute are double checked.
221 ; See ashldi3_neon and <shift>di3_neon in neon.md.
222
223 (define_attr "enabled" "no,yes"
224 (cond [(and (eq_attr "predicable_short_it" "no")
225 (and (eq_attr "predicated" "yes")
226 (match_test "arm_restrict_it")))
227 (const_string "no")
228
229 (and (eq_attr "enabled_for_short_it" "no")
230 (match_test "arm_restrict_it"))
231 (const_string "no")
232
233 (eq_attr "arch_enabled" "no")
234 (const_string "no")]
235 (const_string "yes")))
236
237 ; POOL_RANGE is how far away from a constant pool entry that this insn
238 ; can be placed. If the distance is zero, then this insn will never
239 ; reference the pool.
240 ; Note that for Thumb constant pools the PC value is rounded down to the
241 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
242 ; Thumb insns) should be set to <max_range> - 2.
243 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
244 ; before its address. It is set to <max_range> - (8 + <data_size>).
245 (define_attr "arm_pool_range" "" (const_int 0))
246 (define_attr "thumb2_pool_range" "" (const_int 0))
247 (define_attr "arm_neg_pool_range" "" (const_int 0))
248 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
249
250 (define_attr "pool_range" ""
251 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
252 (attr "arm_pool_range")))
253 (define_attr "neg_pool_range" ""
254 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
255 (attr "arm_neg_pool_range")))
256
257 ; An assembler sequence may clobber the condition codes without us knowing.
258 ; If such an insn references the pool, then we have no way of knowing how,
259 ; so use the most conservative value for pool_range.
260 (define_asm_attributes
261 [(set_attr "conds" "clob")
262 (set_attr "length" "4")
263 (set_attr "pool_range" "250")])
264
265 ; Load scheduling, set from the arm_ld_sched variable
266 ; initialized by arm_option_override()
267 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
268
269 ; condition codes: this one is used by final_prescan_insn to speed up
270 ; conditionalizing instructions. It saves having to scan the rtl to see if
271 ; it uses or alters the condition codes.
272 ;
273 ; USE means that the condition codes are used by the insn in the process of
274 ; outputting code, this means (at present) that we can't use the insn in
275 ; inlined branches
276 ;
277 ; SET means that the purpose of the insn is to set the condition codes in a
278 ; well defined manner.
279 ;
280 ; CLOB means that the condition codes are altered in an undefined manner, if
281 ; they are altered at all
282 ;
283 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
284 ; that the instruction does not use or alter the condition codes.
285 ;
286 ; NOCOND means that the instruction does not use or alter the condition
287 ; codes but can be converted into a conditionally exectuted instruction.
288
289 (define_attr "conds" "use,set,clob,unconditional,nocond"
290 (if_then_else
291 (ior (eq_attr "is_thumb1" "yes")
292 (eq_attr "type" "call"))
293 (const_string "clob")
294 (if_then_else (eq_attr "is_neon_type" "no")
295 (const_string "nocond")
296 (const_string "unconditional"))))
297
298 ; Predicable means that the insn can be conditionally executed based on
299 ; an automatically added predicate (additional patterns are generated by
300 ; gen...). We default to 'no' because no Thumb patterns match this rule
301 ; and not all ARM patterns do.
302 (define_attr "predicable" "no,yes" (const_string "no"))
303
304 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
305 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
306 ; suffer blockages enough to warrant modelling this (and it can adversely
307 ; affect the schedule).
308 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
309
310 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
311 ; to stall the processor. Used with model_wbuf above.
312 (define_attr "write_conflict" "no,yes"
313 (if_then_else (eq_attr "type"
314 "block,call,load_4")
315 (const_string "yes")
316 (const_string "no")))
317
318 ; Classify the insns into those that take one cycle and those that take more
319 ; than one on the main cpu execution unit.
320 (define_attr "core_cycles" "single,multi"
321 (if_then_else (eq_attr "type"
322 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
323 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
324 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
325 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
326 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
327 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
328 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
329 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
330 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
331 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
332 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
333 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
334 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
335 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
336 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
337 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
338 (const_string "single")
339 (const_string "multi")))
340
341 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
342 ;; distant label. Only applicable to Thumb code.
343 (define_attr "far_jump" "yes,no" (const_string "no"))
344
345
346 ;; The number of machine instructions this pattern expands to.
347 ;; Used for Thumb-2 conditional execution.
348 (define_attr "ce_count" "" (const_int 1))
349
350 ;;---------------------------------------------------------------------------
351 ;; Unspecs
352
353 (include "unspecs.md")
354
355 ;;---------------------------------------------------------------------------
356 ;; Mode iterators
357
358 (include "iterators.md")
359
360 ;;---------------------------------------------------------------------------
361 ;; Predicates
362
363 (include "predicates.md")
364 (include "constraints.md")
365
366 ;;---------------------------------------------------------------------------
367 ;; Pipeline descriptions
368
369 (define_attr "tune_cortexr4" "yes,no"
370 (const (if_then_else
371 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
372 (const_string "yes")
373 (const_string "no"))))
374
375 ;; True if the generic scheduling description should be used.
376
377 (define_attr "generic_sched" "yes,no"
378 (const (if_then_else
379 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
380 arm926ejs,arm10e,arm1026ejs,arm1136js,\
381 arm1136jfs,cortexa5,cortexa7,cortexa8,\
382 cortexa9,cortexa12,cortexa15,cortexa17,\
383 cortexa53,cortexa57,cortexm4,cortexm7,\
384 exynosm1,marvell_pj4,xgene1")
385 (eq_attr "tune_cortexr4" "yes"))
386 (const_string "no")
387 (const_string "yes"))))
388
389 (define_attr "generic_vfp" "yes,no"
390 (const (if_then_else
391 (and (eq_attr "fpu" "vfp")
392 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
393 cortexa8,cortexa9,cortexa53,cortexm4,\
394 cortexm7,marvell_pj4,xgene1")
395 (eq_attr "tune_cortexr4" "no"))
396 (const_string "yes")
397 (const_string "no"))))
398
399 (include "marvell-f-iwmmxt.md")
400 (include "arm-generic.md")
401 (include "arm926ejs.md")
402 (include "arm1020e.md")
403 (include "arm1026ejs.md")
404 (include "arm1136jfs.md")
405 (include "fa526.md")
406 (include "fa606te.md")
407 (include "fa626te.md")
408 (include "fmp626.md")
409 (include "fa726te.md")
410 (include "cortex-a5.md")
411 (include "cortex-a7.md")
412 (include "cortex-a8.md")
413 (include "cortex-a9.md")
414 (include "cortex-a15.md")
415 (include "cortex-a17.md")
416 (include "cortex-a53.md")
417 (include "cortex-a57.md")
418 (include "cortex-r4.md")
419 (include "cortex-r4f.md")
420 (include "cortex-m7.md")
421 (include "cortex-m4.md")
422 (include "cortex-m4-fpu.md")
423 (include "exynos-m1.md")
424 (include "vfp11.md")
425 (include "marvell-pj4.md")
426 (include "xgene1.md")
427
428 ;; define_subst and associated attributes
429
430 (define_subst "add_setq"
431 [(set (match_operand:SI 0 "" "")
432 (match_operand:SI 1 "" ""))]
433 ""
434 [(set (match_dup 0)
435 (match_dup 1))
436 (set (reg:CC APSRQ_REGNUM)
437 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
438
439 (define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
440 (define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
441 "ARM_Q_BIT_READ")
442 \f
443 ;;---------------------------------------------------------------------------
444 ;; Insn patterns
445 ;;
446 ;; Addition insns.
447
448 ;; Note: For DImode insns, there is normally no reason why operands should
449 ;; not be in the same register, what we don't want is for something being
450 ;; written to partially overlap something that is an input.
451
452 (define_expand "adddi3"
453 [(parallel
454 [(set (match_operand:DI 0 "s_register_operand")
455 (plus:DI (match_operand:DI 1 "s_register_operand")
456 (match_operand:DI 2 "reg_or_int_operand")))
457 (clobber (reg:CC CC_REGNUM))])]
458 "TARGET_EITHER"
459 "
460 if (TARGET_THUMB1)
461 {
462 if (!REG_P (operands[2]))
463 operands[2] = force_reg (DImode, operands[2]);
464 }
465 else
466 {
467 rtx lo_result, hi_result, lo_dest, hi_dest;
468 rtx lo_op1, hi_op1, lo_op2, hi_op2;
469 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
470 &lo_op2, &hi_op2);
471 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
472 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
473
474 if (lo_op2 == const0_rtx)
475 {
476 lo_dest = lo_op1;
477 if (!arm_add_operand (hi_op2, SImode))
478 hi_op2 = force_reg (SImode, hi_op2);
479 /* Assume hi_op2 won't also be zero. */
480 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
481 }
482 else
483 {
484 if (!arm_add_operand (lo_op2, SImode))
485 lo_op2 = force_reg (SImode, lo_op2);
486 if (!arm_not_operand (hi_op2, SImode))
487 hi_op2 = force_reg (SImode, hi_op2);
488
489 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
490 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
491 const0_rtx);
492 if (hi_op2 == const0_rtx)
493 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
494 else
495 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
496 }
497
498 if (lo_result != lo_dest)
499 emit_move_insn (lo_result, lo_dest);
500 if (hi_result != hi_dest)
501 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
502 DONE;
503 }
504 "
505 )
506
507 (define_expand "addvsi4"
508 [(match_operand:SI 0 "s_register_operand")
509 (match_operand:SI 1 "s_register_operand")
510 (match_operand:SI 2 "arm_add_operand")
511 (match_operand 3 "")]
512 "TARGET_32BIT"
513 {
514 if (CONST_INT_P (operands[2]))
515 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
516 else
517 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
518 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
519
520 DONE;
521 })
522
523 (define_expand "addvdi4"
524 [(match_operand:DI 0 "s_register_operand")
525 (match_operand:DI 1 "s_register_operand")
526 (match_operand:DI 2 "reg_or_int_operand")
527 (match_operand 3 "")]
528 "TARGET_32BIT"
529 {
530 rtx lo_result, hi_result;
531 rtx lo_op1, hi_op1, lo_op2, hi_op2;
532 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
533 &lo_op2, &hi_op2);
534 lo_result = gen_lowpart (SImode, operands[0]);
535 hi_result = gen_highpart (SImode, operands[0]);
536
537 if (lo_op2 == const0_rtx)
538 {
539 emit_move_insn (lo_result, lo_op1);
540 if (!arm_add_operand (hi_op2, SImode))
541 hi_op2 = force_reg (SImode, hi_op2);
542
543 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
544 }
545 else
546 {
547 if (!arm_add_operand (lo_op2, SImode))
548 lo_op2 = force_reg (SImode, lo_op2);
549 if (!arm_not_operand (hi_op2, SImode))
550 hi_op2 = force_reg (SImode, hi_op2);
551
552 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
553
554 if (hi_op2 == const0_rtx)
555 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
556 else if (CONST_INT_P (hi_op2))
557 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
558 else
559 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
560
561 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
562 }
563
564 DONE;
565 })
566
567 (define_expand "addsi3_cin_vout_reg"
568 [(parallel
569 [(set (match_dup 3)
570 (compare:CC_V
571 (plus:DI
572 (plus:DI (match_dup 4)
573 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
574 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
575 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
576 (match_dup 2)))))
577 (set (match_operand:SI 0 "s_register_operand")
578 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
579 (match_dup 2)))])]
580 "TARGET_32BIT"
581 {
582 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
583 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
584 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
585 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
586 }
587 )
588
589 (define_insn "*addsi3_cin_vout_reg_insn"
590 [(set (reg:CC_V CC_REGNUM)
591 (compare:CC_V
592 (plus:DI
593 (plus:DI
594 (match_operand:DI 3 "arm_carry_operation" "")
595 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
596 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
597 (sign_extend:DI
598 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
599 (match_dup 1))
600 (match_dup 2)))))
601 (set (match_operand:SI 0 "s_register_operand" "=l,r")
602 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
603 (match_dup 2)))]
604 "TARGET_32BIT"
605 "@
606 adcs%?\\t%0, %0, %2
607 adcs%?\\t%0, %1, %2"
608 [(set_attr "type" "alus_sreg")
609 (set_attr "arch" "t2,*")
610 (set_attr "length" "2,4")]
611 )
612
613 (define_expand "addsi3_cin_vout_imm"
614 [(parallel
615 [(set (match_dup 3)
616 (compare:CC_V
617 (plus:DI
618 (plus:DI (match_dup 4)
619 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
620 (match_dup 2))
621 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
622 (match_dup 2)))))
623 (set (match_operand:SI 0 "s_register_operand")
624 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
625 (match_operand 2 "arm_adcimm_operand")))])]
626 "TARGET_32BIT"
627 {
628 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
629 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
630 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
631 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
632 }
633 )
634
635 (define_insn "*addsi3_cin_vout_imm_insn"
636 [(set (reg:CC_V CC_REGNUM)
637 (compare:CC_V
638 (plus:DI
639 (plus:DI
640 (match_operand:DI 3 "arm_carry_operation" "")
641 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
642 (match_operand 2 "arm_adcimm_operand" "I,K"))
643 (sign_extend:DI
644 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
645 (match_dup 1))
646 (match_dup 2)))))
647 (set (match_operand:SI 0 "s_register_operand" "=r,r")
648 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
649 (match_dup 2)))]
650 "TARGET_32BIT"
651 "@
652 adcs%?\\t%0, %1, %2
653 sbcs%?\\t%0, %1, #%B2"
654 [(set_attr "type" "alus_imm")]
655 )
656
657 (define_expand "addsi3_cin_vout_0"
658 [(parallel
659 [(set (match_dup 2)
660 (compare:CC_V
661 (plus:DI (match_dup 3)
662 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
663 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
664 (set (match_operand:SI 0 "s_register_operand")
665 (plus:SI (match_dup 4) (match_dup 1)))])]
666 "TARGET_32BIT"
667 {
668 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
669 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
670 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
671 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
672 }
673 )
674
675 (define_insn "*addsi3_cin_vout_0_insn"
676 [(set (reg:CC_V CC_REGNUM)
677 (compare:CC_V
678 (plus:DI
679 (match_operand:DI 2 "arm_carry_operation" "")
680 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
681 (sign_extend:DI (plus:SI
682 (match_operand:SI 3 "arm_carry_operation" "")
683 (match_dup 1)))))
684 (set (match_operand:SI 0 "s_register_operand" "=r")
685 (plus:SI (match_dup 3) (match_dup 1)))]
686 "TARGET_32BIT"
687 "adcs%?\\t%0, %1, #0"
688 [(set_attr "type" "alus_imm")]
689 )
690
691 (define_expand "uaddvsi4"
692 [(match_operand:SI 0 "s_register_operand")
693 (match_operand:SI 1 "s_register_operand")
694 (match_operand:SI 2 "arm_add_operand")
695 (match_operand 3 "")]
696 "TARGET_32BIT"
697 {
698 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
699 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
700
701 DONE;
702 })
703
704 (define_expand "uaddvdi4"
705 [(match_operand:DI 0 "s_register_operand")
706 (match_operand:DI 1 "s_register_operand")
707 (match_operand:DI 2 "reg_or_int_operand")
708 (match_operand 3 "")]
709 "TARGET_32BIT"
710 {
711 rtx lo_result, hi_result;
712 rtx lo_op1, hi_op1, lo_op2, hi_op2;
713 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
714 &lo_op2, &hi_op2);
715 lo_result = gen_lowpart (SImode, operands[0]);
716 hi_result = gen_highpart (SImode, operands[0]);
717
718 if (lo_op2 == const0_rtx)
719 {
720 emit_move_insn (lo_result, lo_op1);
721 if (!arm_add_operand (hi_op2, SImode))
722 hi_op2 = force_reg (SImode, hi_op2);
723
724 gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]);
725 }
726 else
727 {
728 if (!arm_add_operand (lo_op2, SImode))
729 lo_op2 = force_reg (SImode, lo_op2);
730 if (!arm_not_operand (hi_op2, SImode))
731 hi_op2 = force_reg (SImode, hi_op2);
732
733 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
734
735 if (hi_op2 == const0_rtx)
736 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
737 else if (CONST_INT_P (hi_op2))
738 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
739 else
740 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
741
742 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
743 }
744
745 DONE;
746 })
747
748 (define_expand "addsi3_cin_cout_reg"
749 [(parallel
750 [(set (match_dup 3)
751 (compare:CC_ADC
752 (plus:DI
753 (plus:DI (match_dup 4)
754 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
755 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
756 (const_int 4294967296)))
757 (set (match_operand:SI 0 "s_register_operand")
758 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
759 (match_dup 2)))])]
760 "TARGET_32BIT"
761 {
762 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
763 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
764 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
765 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
766 }
767 )
768
769 (define_insn "*addsi3_cin_cout_reg_insn"
770 [(set (reg:CC_ADC CC_REGNUM)
771 (compare:CC_ADC
772 (plus:DI
773 (plus:DI
774 (match_operand:DI 3 "arm_carry_operation" "")
775 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
776 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
777 (const_int 4294967296)))
778 (set (match_operand:SI 0 "s_register_operand" "=l,r")
779 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
780 (match_dup 1))
781 (match_dup 2)))]
782 "TARGET_32BIT"
783 "@
784 adcs%?\\t%0, %0, %2
785 adcs%?\\t%0, %1, %2"
786 [(set_attr "type" "alus_sreg")
787 (set_attr "arch" "t2,*")
788 (set_attr "length" "2,4")]
789 )
790
791 (define_expand "addsi3_cin_cout_imm"
792 [(parallel
793 [(set (match_dup 3)
794 (compare:CC_ADC
795 (plus:DI
796 (plus:DI (match_dup 4)
797 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
798 (match_dup 6))
799 (const_int 4294967296)))
800 (set (match_operand:SI 0 "s_register_operand")
801 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
802 (match_operand:SI 2 "arm_adcimm_operand")))])]
803 "TARGET_32BIT"
804 {
805 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
806 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
807 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
808 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
809 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
810 }
811 )
812
813 (define_insn "*addsi3_cin_cout_imm_insn"
814 [(set (reg:CC_ADC CC_REGNUM)
815 (compare:CC_ADC
816 (plus:DI
817 (plus:DI
818 (match_operand:DI 3 "arm_carry_operation" "")
819 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
820 (match_operand:DI 5 "const_int_operand" "n,n"))
821 (const_int 4294967296)))
822 (set (match_operand:SI 0 "s_register_operand" "=r,r")
823 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
824 (match_dup 1))
825 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
826 "TARGET_32BIT
827 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
828 "@
829 adcs%?\\t%0, %1, %2
830 sbcs%?\\t%0, %1, #%B2"
831 [(set_attr "type" "alus_imm")]
832 )
833
834 (define_expand "addsi3_cin_cout_0"
835 [(parallel
836 [(set (match_dup 2)
837 (compare:CC_ADC
838 (plus:DI (match_dup 3)
839 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
840 (const_int 4294967296)))
841 (set (match_operand:SI 0 "s_register_operand")
842 (plus:SI (match_dup 4) (match_dup 1)))])]
843 "TARGET_32BIT"
844 {
845 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
846 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
847 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
848 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
849 }
850 )
851
852 (define_insn "*addsi3_cin_cout_0_insn"
853 [(set (reg:CC_ADC CC_REGNUM)
854 (compare:CC_ADC
855 (plus:DI
856 (match_operand:DI 2 "arm_carry_operation" "")
857 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
858 (const_int 4294967296)))
859 (set (match_operand:SI 0 "s_register_operand" "=r")
860 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
861 "TARGET_32BIT"
862 "adcs%?\\t%0, %1, #0"
863 [(set_attr "type" "alus_imm")]
864 )
865
866 (define_expand "addsi3"
867 [(set (match_operand:SI 0 "s_register_operand")
868 (plus:SI (match_operand:SI 1 "s_register_operand")
869 (match_operand:SI 2 "reg_or_int_operand")))]
870 "TARGET_EITHER"
871 "
872 if (TARGET_32BIT && CONST_INT_P (operands[2]))
873 {
874 arm_split_constant (PLUS, SImode, NULL_RTX,
875 INTVAL (operands[2]), operands[0], operands[1],
876 optimize && can_create_pseudo_p ());
877 DONE;
878 }
879 "
880 )
881
882 ; If there is a scratch available, this will be faster than synthesizing the
883 ; addition.
884 (define_peephole2
885 [(match_scratch:SI 3 "r")
886 (set (match_operand:SI 0 "arm_general_register_operand" "")
887 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
888 (match_operand:SI 2 "const_int_operand" "")))]
889 "TARGET_32BIT &&
890 !(const_ok_for_arm (INTVAL (operands[2]))
891 || const_ok_for_arm (-INTVAL (operands[2])))
892 && const_ok_for_arm (~INTVAL (operands[2]))"
893 [(set (match_dup 3) (match_dup 2))
894 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
895 ""
896 )
897
898 ;; The r/r/k alternative is required when reloading the address
899 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
900 ;; put the duplicated register first, and not try the commutative version.
901 (define_insn_and_split "*arm_addsi3"
902 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
903 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
904 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
905 "TARGET_32BIT"
906 "@
907 add%?\\t%0, %0, %2
908 add%?\\t%0, %1, %2
909 add%?\\t%0, %1, %2
910 add%?\\t%0, %1, %2
911 add%?\\t%0, %1, %2
912 add%?\\t%0, %1, %2
913 add%?\\t%0, %2, %1
914 add%?\\t%0, %1, %2
915 addw%?\\t%0, %1, %2
916 addw%?\\t%0, %1, %2
917 sub%?\\t%0, %1, #%n2
918 sub%?\\t%0, %1, #%n2
919 sub%?\\t%0, %1, #%n2
920 subw%?\\t%0, %1, #%n2
921 subw%?\\t%0, %1, #%n2
922 #"
923 "TARGET_32BIT
924 && CONST_INT_P (operands[2])
925 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
926 && (reload_completed || !arm_eliminable_register (operands[1]))"
927 [(clobber (const_int 0))]
928 "
929 arm_split_constant (PLUS, SImode, curr_insn,
930 INTVAL (operands[2]), operands[0],
931 operands[1], 0);
932 DONE;
933 "
934 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
935 (set_attr "predicable" "yes")
936 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
937 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
938 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
939 (const_string "alu_imm")
940 (const_string "alu_sreg")))
941 ]
942 )
943
944 (define_insn "addsi3_compareV_reg"
945 [(set (reg:CC_V CC_REGNUM)
946 (compare:CC_V
947 (plus:DI
948 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
949 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
950 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
951 (set (match_operand:SI 0 "register_operand" "=l,r,r")
952 (plus:SI (match_dup 1) (match_dup 2)))]
953 "TARGET_32BIT"
954 "adds%?\\t%0, %1, %2"
955 [(set_attr "conds" "set")
956 (set_attr "arch" "t2,t2,*")
957 (set_attr "length" "2,2,4")
958 (set_attr "type" "alus_sreg")]
959 )
960
961 (define_insn "*addsi3_compareV_reg_nosum"
962 [(set (reg:CC_V CC_REGNUM)
963 (compare:CC_V
964 (plus:DI
965 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
966 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
967 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
968 "TARGET_32BIT"
969 "cmn%?\\t%0, %1"
970 [(set_attr "conds" "set")
971 (set_attr "arch" "t2,*")
972 (set_attr "length" "2,4")
973 (set_attr "type" "alus_sreg")]
974 )
975
976 (define_insn "subvsi3_intmin"
977 [(set (reg:CC_V CC_REGNUM)
978 (compare:CC_V
979 (plus:DI
980 (sign_extend:DI
981 (match_operand:SI 1 "register_operand" "r"))
982 (const_int 2147483648))
983 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
984 (set (match_operand:SI 0 "register_operand" "=r")
985 (plus:SI (match_dup 1) (const_int -2147483648)))]
986 "TARGET_32BIT"
987 "subs%?\\t%0, %1, #-2147483648"
988 [(set_attr "conds" "set")
989 (set_attr "type" "alus_imm")]
990 )
991
992 (define_insn "addsi3_compareV_imm"
993 [(set (reg:CC_V CC_REGNUM)
994 (compare:CC_V
995 (plus:DI
996 (sign_extend:DI
997 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
998 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
999 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
1000 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
1001 (plus:SI (match_dup 1) (match_dup 2)))]
1002 "TARGET_32BIT
1003 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
1004 "@
1005 adds%?\\t%0, %1, %2
1006 adds%?\\t%0, %0, %2
1007 subs%?\\t%0, %1, #%n2
1008 subs%?\\t%0, %0, #%n2
1009 adds%?\\t%0, %1, %2
1010 subs%?\\t%0, %1, #%n2"
1011 [(set_attr "conds" "set")
1012 (set_attr "arch" "t2,t2,t2,t2,*,*")
1013 (set_attr "length" "2,2,2,2,4,4")
1014 (set_attr "type" "alus_imm")]
1015 )
1016
1017 (define_insn "addsi3_compareV_imm_nosum"
1018 [(set (reg:CC_V CC_REGNUM)
1019 (compare:CC_V
1020 (plus:DI
1021 (sign_extend:DI
1022 (match_operand:SI 0 "register_operand" "l,r,r"))
1023 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1024 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1025 "TARGET_32BIT
1026 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1027 "@
1028 cmp%?\\t%0, #%n1
1029 cmn%?\\t%0, %1
1030 cmp%?\\t%0, #%n1"
1031 [(set_attr "conds" "set")
1032 (set_attr "arch" "t2,*,*")
1033 (set_attr "length" "2,4,4")
1034 (set_attr "type" "alus_imm")]
1035 )
1036
1037 ;; We can handle more constants efficently if we can clobber either a scratch
1038 ;; or the other source operand. We deliberately leave this late as in
1039 ;; high register pressure situations it's not worth forcing any reloads.
1040 (define_peephole2
1041 [(match_scratch:SI 2 "l")
1042 (set (reg:CC_V CC_REGNUM)
1043 (compare:CC_V
1044 (plus:DI
1045 (sign_extend:DI
1046 (match_operand:SI 0 "low_register_operand"))
1047 (match_operand 1 "const_int_operand"))
1048 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1049 "TARGET_THUMB2
1050 && satisfies_constraint_Pd (operands[1])"
1051 [(parallel[
1052 (set (reg:CC_V CC_REGNUM)
1053 (compare:CC_V
1054 (plus:DI (sign_extend:DI (match_dup 0))
1055 (sign_extend:DI (match_dup 1)))
1056 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1057 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1058 )
1059
1060 (define_peephole2
1061 [(set (reg:CC_V CC_REGNUM)
1062 (compare:CC_V
1063 (plus:DI
1064 (sign_extend:DI
1065 (match_operand:SI 0 "low_register_operand"))
1066 (match_operand 1 "const_int_operand"))
1067 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1068 "TARGET_THUMB2
1069 && dead_or_set_p (peep2_next_insn (0), operands[0])
1070 && satisfies_constraint_Py (operands[1])"
1071 [(parallel[
1072 (set (reg:CC_V CC_REGNUM)
1073 (compare:CC_V
1074 (plus:DI (sign_extend:DI (match_dup 0))
1075 (sign_extend:DI (match_dup 1)))
1076 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1077 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1078 )
1079
1080 (define_insn "addsi3_compare0"
1081 [(set (reg:CC_NOOV CC_REGNUM)
1082 (compare:CC_NOOV
1083 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1084 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1085 (const_int 0)))
1086 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1087 (plus:SI (match_dup 1) (match_dup 2)))]
1088 "TARGET_ARM"
1089 "@
1090 adds%?\\t%0, %1, %2
1091 subs%?\\t%0, %1, #%n2
1092 adds%?\\t%0, %1, %2"
1093 [(set_attr "conds" "set")
1094 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1095 )
1096
1097 (define_insn "*addsi3_compare0_scratch"
1098 [(set (reg:CC_NOOV CC_REGNUM)
1099 (compare:CC_NOOV
1100 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1101 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1102 (const_int 0)))]
1103 "TARGET_ARM"
1104 "@
1105 cmn%?\\t%0, %1
1106 cmp%?\\t%0, #%n1
1107 cmn%?\\t%0, %1"
1108 [(set_attr "conds" "set")
1109 (set_attr "predicable" "yes")
1110 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1111 )
1112
1113 (define_insn "*compare_negsi_si"
1114 [(set (reg:CC_Z CC_REGNUM)
1115 (compare:CC_Z
1116 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1117 (match_operand:SI 1 "s_register_operand" "l,r")))]
1118 "TARGET_32BIT"
1119 "cmn%?\\t%1, %0"
1120 [(set_attr "conds" "set")
1121 (set_attr "predicable" "yes")
1122 (set_attr "arch" "t2,*")
1123 (set_attr "length" "2,4")
1124 (set_attr "predicable_short_it" "yes,no")
1125 (set_attr "type" "alus_sreg")]
1126 )
1127
1128 ;; This is the canonicalization of subsi3_compare when the
1129 ;; addend is a constant.
1130 (define_insn "cmpsi2_addneg"
1131 [(set (reg:CC CC_REGNUM)
1132 (compare:CC
1133 (match_operand:SI 1 "s_register_operand" "r,r")
1134 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1135 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1136 (plus:SI (match_dup 1)
1137 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1138 "TARGET_32BIT
1139 && (INTVAL (operands[2])
1140 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1141 {
1142 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1143 in different condition codes (like cmn rather than like cmp), so that
1144 alternative comes first. Both alternatives can match for any 0x??000000
1145 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1146 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1147 as it is shorter. */
1148 if (which_alternative == 0 && operands[3] != const1_rtx)
1149 return "subs%?\\t%0, %1, #%n3";
1150 else
1151 return "adds%?\\t%0, %1, %3";
1152 }
1153 [(set_attr "conds" "set")
1154 (set_attr "type" "alus_sreg")]
1155 )
1156
1157 ;; Convert the sequence
1158 ;; sub rd, rn, #1
1159 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1160 ;; bne dest
1161 ;; into
1162 ;; subs rd, rn, #1
1163 ;; bcs dest ((unsigned)rn >= 1)
1164 ;; similarly for the beq variant using bcc.
1165 ;; This is a common looping idiom (while (n--))
1166 (define_peephole2
1167 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1168 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1169 (const_int -1)))
1170 (set (match_operand 2 "cc_register" "")
1171 (compare (match_dup 0) (const_int -1)))
1172 (set (pc)
1173 (if_then_else (match_operator 3 "equality_operator"
1174 [(match_dup 2) (const_int 0)])
1175 (match_operand 4 "" "")
1176 (match_operand 5 "" "")))]
1177 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1178 [(parallel[
1179 (set (match_dup 2)
1180 (compare:CC
1181 (match_dup 1) (const_int 1)))
1182 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1183 (set (pc)
1184 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1185 (match_dup 4)
1186 (match_dup 5)))]
1187 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1188 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1189 ? GEU : LTU),
1190 VOIDmode,
1191 operands[2], const0_rtx);"
1192 )
1193
1194 ;; The next four insns work because they compare the result with one of
1195 ;; the operands, and we know that the use of the condition code is
1196 ;; either GEU or LTU, so we can use the carry flag from the addition
1197 ;; instead of doing the compare a second time.
1198 (define_insn "addsi3_compare_op1"
1199 [(set (reg:CC_C CC_REGNUM)
1200 (compare:CC_C
1201 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1202 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1203 (match_dup 1)))
1204 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1205 (plus:SI (match_dup 1) (match_dup 2)))]
1206 "TARGET_32BIT"
1207 "@
1208 adds%?\\t%0, %1, %2
1209 adds%?\\t%0, %0, %2
1210 subs%?\\t%0, %1, #%n2
1211 subs%?\\t%0, %0, #%n2
1212 adds%?\\t%0, %1, %2
1213 subs%?\\t%0, %1, #%n2"
1214 [(set_attr "conds" "set")
1215 (set_attr "arch" "t2,t2,t2,t2,*,*")
1216 (set_attr "length" "2,2,2,2,4,4")
1217 (set (attr "type")
1218 (if_then_else (match_operand 2 "const_int_operand")
1219 (const_string "alu_imm")
1220 (const_string "alu_sreg")))]
1221 )
1222
1223 (define_insn "*addsi3_compare_op2"
1224 [(set (reg:CC_C CC_REGNUM)
1225 (compare:CC_C
1226 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1227 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1228 (match_dup 2)))
1229 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1230 (plus:SI (match_dup 1) (match_dup 2)))]
1231 "TARGET_32BIT"
1232 "@
1233 adds%?\\t%0, %1, %2
1234 adds%?\\t%0, %0, %2
1235 subs%?\\t%0, %1, #%n2
1236 subs%?\\t%0, %0, #%n2
1237 adds%?\\t%0, %1, %2
1238 subs%?\\t%0, %1, #%n2"
1239 [(set_attr "conds" "set")
1240 (set_attr "arch" "t2,t2,t2,t2,*,*")
1241 (set_attr "length" "2,2,2,2,4,4")
1242 (set (attr "type")
1243 (if_then_else (match_operand 2 "const_int_operand")
1244 (const_string "alu_imm")
1245 (const_string "alu_sreg")))]
1246 )
1247
1248 (define_insn "*compare_addsi2_op0"
1249 [(set (reg:CC_C CC_REGNUM)
1250 (compare:CC_C
1251 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1252 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1253 (match_dup 0)))]
1254 "TARGET_32BIT"
1255 "@
1256 cmn%?\\t%0, %1
1257 cmp%?\\t%0, #%n1
1258 cmn%?\\t%0, %1
1259 cmp%?\\t%0, #%n1"
1260 [(set_attr "conds" "set")
1261 (set_attr "predicable" "yes")
1262 (set_attr "arch" "t2,t2,*,*")
1263 (set_attr "predicable_short_it" "yes,yes,no,no")
1264 (set_attr "length" "2,2,4,4")
1265 (set (attr "type")
1266 (if_then_else (match_operand 1 "const_int_operand")
1267 (const_string "alu_imm")
1268 (const_string "alu_sreg")))]
1269 )
1270
1271 (define_insn "*compare_addsi2_op1"
1272 [(set (reg:CC_C CC_REGNUM)
1273 (compare:CC_C
1274 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1275 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1276 (match_dup 1)))]
1277 "TARGET_32BIT"
1278 "@
1279 cmn%?\\t%0, %1
1280 cmp%?\\t%0, #%n1
1281 cmn%?\\t%0, %1
1282 cmp%?\\t%0, #%n1"
1283 [(set_attr "conds" "set")
1284 (set_attr "predicable" "yes")
1285 (set_attr "arch" "t2,t2,*,*")
1286 (set_attr "predicable_short_it" "yes,yes,no,no")
1287 (set_attr "length" "2,2,4,4")
1288 (set (attr "type")
1289 (if_then_else (match_operand 1 "const_int_operand")
1290 (const_string "alu_imm")
1291 (const_string "alu_sreg")))]
1292 )
1293
1294 (define_insn "addsi3_carryin"
1295 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1296 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1297 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1298 (match_operand:SI 3 "arm_carry_operation" "")))]
1299 "TARGET_32BIT"
1300 "@
1301 adc%?\\t%0, %1, %2
1302 adc%?\\t%0, %1, %2
1303 sbc%?\\t%0, %1, #%B2"
1304 [(set_attr "conds" "use")
1305 (set_attr "predicable" "yes")
1306 (set_attr "arch" "t2,*,*")
1307 (set_attr "length" "4")
1308 (set_attr "predicable_short_it" "yes,no,no")
1309 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1310 )
1311
1312 ;; Canonicalization of the above when the immediate is zero.
1313 (define_insn "add0si3_carryin"
1314 [(set (match_operand:SI 0 "s_register_operand" "=r")
1315 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1316 (match_operand:SI 1 "arm_not_operand" "r")))]
1317 "TARGET_32BIT"
1318 "adc%?\\t%0, %1, #0"
1319 [(set_attr "conds" "use")
1320 (set_attr "predicable" "yes")
1321 (set_attr "length" "4")
1322 (set_attr "type" "adc_imm")]
1323 )
1324
1325 (define_insn "*addsi3_carryin_alt2"
1326 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1327 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1328 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1329 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1330 "TARGET_32BIT"
1331 "@
1332 adc%?\\t%0, %1, %2
1333 adc%?\\t%0, %1, %2
1334 sbc%?\\t%0, %1, #%B2"
1335 [(set_attr "conds" "use")
1336 (set_attr "predicable" "yes")
1337 (set_attr "arch" "t2,*,*")
1338 (set_attr "length" "4")
1339 (set_attr "predicable_short_it" "yes,no,no")
1340 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1341 )
1342
1343 (define_insn "*addsi3_carryin_shift"
1344 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1345 (plus:SI (plus:SI
1346 (match_operator:SI 2 "shift_operator"
1347 [(match_operand:SI 3 "s_register_operand" "r,r")
1348 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1349 (match_operand:SI 5 "arm_carry_operation" ""))
1350 (match_operand:SI 1 "s_register_operand" "r,r")))]
1351 "TARGET_32BIT"
1352 "adc%?\\t%0, %1, %3%S2"
1353 [(set_attr "conds" "use")
1354 (set_attr "arch" "32,a")
1355 (set_attr "shift" "3")
1356 (set_attr "predicable" "yes")
1357 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1358 )
1359
1360 (define_insn "*addsi3_carryin_clobercc"
1361 [(set (match_operand:SI 0 "s_register_operand" "=r")
1362 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1363 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1364 (match_operand:SI 3 "arm_carry_operation" "")))
1365 (clobber (reg:CC CC_REGNUM))]
1366 "TARGET_32BIT"
1367 "adcs%?\\t%0, %1, %2"
1368 [(set_attr "conds" "set")
1369 (set_attr "type" "adcs_reg")]
1370 )
1371
1372 (define_expand "subvsi4"
1373 [(match_operand:SI 0 "s_register_operand")
1374 (match_operand:SI 1 "arm_rhs_operand")
1375 (match_operand:SI 2 "arm_add_operand")
1376 (match_operand 3 "")]
1377 "TARGET_32BIT"
1378 {
1379 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1380 {
1381 /* If both operands are constants we can decide the result statically. */
1382 wi::overflow_type overflow;
1383 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1384 rtx_mode_t (operands[2], SImode),
1385 SIGNED, &overflow);
1386 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1387 if (overflow != wi::OVF_NONE)
1388 emit_jump_insn (gen_jump (operands[3]));
1389 DONE;
1390 }
1391 else if (CONST_INT_P (operands[2]))
1392 {
1393 operands[2] = GEN_INT (-INTVAL (operands[2]));
1394 /* Special case for INT_MIN. */
1395 if (INTVAL (operands[2]) == 0x80000000)
1396 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1397 else
1398 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1399 operands[2]));
1400 }
1401 else if (CONST_INT_P (operands[1]))
1402 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1403 else
1404 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1405
1406 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1407 DONE;
1408 })
1409
1410 (define_expand "subvdi4"
1411 [(match_operand:DI 0 "s_register_operand")
1412 (match_operand:DI 1 "reg_or_int_operand")
1413 (match_operand:DI 2 "reg_or_int_operand")
1414 (match_operand 3 "")]
1415 "TARGET_32BIT"
1416 {
1417 rtx lo_result, hi_result;
1418 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1419 lo_result = gen_lowpart (SImode, operands[0]);
1420 hi_result = gen_highpart (SImode, operands[0]);
1421 machine_mode mode = CCmode;
1422
1423 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1424 {
1425 /* If both operands are constants we can decide the result statically. */
1426 wi::overflow_type overflow;
1427 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1428 rtx_mode_t (operands[2], DImode),
1429 SIGNED, &overflow);
1430 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1431 if (overflow != wi::OVF_NONE)
1432 emit_jump_insn (gen_jump (operands[3]));
1433 DONE;
1434 }
1435 else if (CONST_INT_P (operands[1]))
1436 {
1437 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1438 &lo_op1, &hi_op1);
1439 if (const_ok_for_arm (INTVAL (lo_op1)))
1440 {
1441 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1442 GEN_INT (~UINTVAL (lo_op1))));
1443 /* We could potentially use RSC here in Arm state, but not
1444 in Thumb, so it's probably not worth the effort of handling
1445 this. */
1446 hi_op1 = force_reg (SImode, hi_op1);
1447 mode = CC_RSBmode;
1448 goto highpart;
1449 }
1450 operands[1] = force_reg (DImode, operands[1]);
1451 }
1452
1453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1454 &lo_op2, &hi_op2);
1455 if (lo_op2 == const0_rtx)
1456 {
1457 emit_move_insn (lo_result, lo_op1);
1458 if (!arm_add_operand (hi_op2, SImode))
1459 hi_op2 = force_reg (SImode, hi_op2);
1460 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1461 DONE;
1462 }
1463
1464 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1465 lo_op2 = force_reg (SImode, lo_op2);
1466 if (CONST_INT_P (lo_op2))
1467 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1468 GEN_INT (-INTVAL (lo_op2))));
1469 else
1470 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1471
1472 highpart:
1473 if (!arm_not_operand (hi_op2, SImode))
1474 hi_op2 = force_reg (SImode, hi_op2);
1475 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1476 if (CONST_INT_P (hi_op2))
1477 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1478 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1479 gen_rtx_LTU (DImode, ccreg,
1480 const0_rtx)));
1481 else
1482 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1483 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1484 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1485 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1486
1487 DONE;
1488 })
1489
1490 (define_expand "usubvsi4"
1491 [(match_operand:SI 0 "s_register_operand")
1492 (match_operand:SI 1 "arm_rhs_operand")
1493 (match_operand:SI 2 "arm_add_operand")
1494 (match_operand 3 "")]
1495 "TARGET_32BIT"
1496 {
1497 machine_mode mode = CCmode;
1498 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1499 {
1500 /* If both operands are constants we can decide the result statically. */
1501 wi::overflow_type overflow;
1502 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1503 rtx_mode_t (operands[2], SImode),
1504 UNSIGNED, &overflow);
1505 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1506 if (overflow != wi::OVF_NONE)
1507 emit_jump_insn (gen_jump (operands[3]));
1508 DONE;
1509 }
1510 else if (CONST_INT_P (operands[2]))
1511 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1512 GEN_INT (-INTVAL (operands[2]))));
1513 else if (CONST_INT_P (operands[1]))
1514 {
1515 mode = CC_RSBmode;
1516 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1517 GEN_INT (~UINTVAL (operands[1]))));
1518 }
1519 else
1520 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1521 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1522
1523 DONE;
1524 })
1525
1526 (define_expand "usubvdi4"
1527 [(match_operand:DI 0 "s_register_operand")
1528 (match_operand:DI 1 "reg_or_int_operand")
1529 (match_operand:DI 2 "reg_or_int_operand")
1530 (match_operand 3 "")]
1531 "TARGET_32BIT"
1532 {
1533 rtx lo_result, hi_result;
1534 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1535 lo_result = gen_lowpart (SImode, operands[0]);
1536 hi_result = gen_highpart (SImode, operands[0]);
1537 machine_mode mode = CCmode;
1538
1539 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1540 {
1541 /* If both operands are constants we can decide the result statically. */
1542 wi::overflow_type overflow;
1543 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1544 rtx_mode_t (operands[2], DImode),
1545 UNSIGNED, &overflow);
1546 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1547 if (overflow != wi::OVF_NONE)
1548 emit_jump_insn (gen_jump (operands[3]));
1549 DONE;
1550 }
1551 else if (CONST_INT_P (operands[1]))
1552 {
1553 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1554 &lo_op1, &hi_op1);
1555 if (const_ok_for_arm (INTVAL (lo_op1)))
1556 {
1557 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1558 GEN_INT (~UINTVAL (lo_op1))));
1559 /* We could potentially use RSC here in Arm state, but not
1560 in Thumb, so it's probably not worth the effort of handling
1561 this. */
1562 hi_op1 = force_reg (SImode, hi_op1);
1563 mode = CC_RSBmode;
1564 goto highpart;
1565 }
1566 operands[1] = force_reg (DImode, operands[1]);
1567 }
1568
1569 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1570 &lo_op2, &hi_op2);
1571 if (lo_op2 == const0_rtx)
1572 {
1573 emit_move_insn (lo_result, lo_op1);
1574 if (!arm_add_operand (hi_op2, SImode))
1575 hi_op2 = force_reg (SImode, hi_op2);
1576 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1577 DONE;
1578 }
1579
1580 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1581 lo_op2 = force_reg (SImode, lo_op2);
1582 if (CONST_INT_P (lo_op2))
1583 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1584 GEN_INT (-INTVAL (lo_op2))));
1585 else
1586 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1587
1588 highpart:
1589 if (!arm_not_operand (hi_op2, SImode))
1590 hi_op2 = force_reg (SImode, hi_op2);
1591 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1592 if (CONST_INT_P (hi_op2))
1593 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1594 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1595 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1596 gen_rtx_LTU (DImode, ccreg,
1597 const0_rtx)));
1598 else
1599 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1600 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1601 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1602 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1603
1604 DONE;
1605 })
1606
1607 (define_insn "subsi3_compare1"
1608 [(set (reg:CC CC_REGNUM)
1609 (compare:CC
1610 (match_operand:SI 1 "register_operand" "r")
1611 (match_operand:SI 2 "register_operand" "r")))
1612 (set (match_operand:SI 0 "register_operand" "=r")
1613 (minus:SI (match_dup 1) (match_dup 2)))]
1614 "TARGET_32BIT"
1615 "subs%?\\t%0, %1, %2"
1616 [(set_attr "conds" "set")
1617 (set_attr "type" "alus_sreg")]
1618 )
1619
1620 (define_insn "subvsi3"
1621 [(set (reg:CC_V CC_REGNUM)
1622 (compare:CC_V
1623 (minus:DI
1624 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1625 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1626 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1627 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1628 (minus:SI (match_dup 1) (match_dup 2)))]
1629 "TARGET_32BIT"
1630 "subs%?\\t%0, %1, %2"
1631 [(set_attr "conds" "set")
1632 (set_attr "arch" "t2,*")
1633 (set_attr "length" "2,4")
1634 (set_attr "type" "alus_sreg")]
1635 )
1636
1637 (define_insn "subvsi3_imm1"
1638 [(set (reg:CC_V CC_REGNUM)
1639 (compare:CC_V
1640 (minus:DI
1641 (match_operand 1 "arm_immediate_operand" "I")
1642 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1643 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1644 (set (match_operand:SI 0 "s_register_operand" "=r")
1645 (minus:SI (match_dup 1) (match_dup 2)))]
1646 "TARGET_32BIT"
1647 "rsbs%?\\t%0, %2, %1"
1648 [(set_attr "conds" "set")
1649 (set_attr "type" "alus_imm")]
1650 )
1651
1652 (define_insn "subsi3_carryin"
1653 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1654 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1655 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1656 (match_operand:SI 3 "arm_borrow_operation" "")))]
1657 "TARGET_32BIT"
1658 "@
1659 sbc%?\\t%0, %1, %2
1660 rsc%?\\t%0, %2, %1
1661 sbc%?\\t%0, %2, %2, lsl #1"
1662 [(set_attr "conds" "use")
1663 (set_attr "arch" "*,a,t2")
1664 (set_attr "predicable" "yes")
1665 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1666 )
1667
1668 ;; Special canonicalization of the above when operand1 == (const_int 1):
1669 ;; in this case the 'borrow' needs to treated like subtracting from the carry.
1670 (define_insn "rsbsi_carryin_reg"
1671 [(set (match_operand:SI 0 "s_register_operand" "=r")
1672 (minus:SI (match_operand:SI 1 "arm_carry_operation" "")
1673 (match_operand:SI 2 "s_register_operand" "r")))]
1674 "TARGET_ARM"
1675 "rsc%?\\t%0, %2, #1"
1676 [(set_attr "conds" "use")
1677 (set_attr "predicable" "yes")
1678 (set_attr "type" "adc_imm")]
1679 )
1680
1681 ;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
1682 ;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
1683 (define_insn "*add_not_cin"
1684 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1685 (plus:SI
1686 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
1687 (match_operand:SI 3 "arm_carry_operation" ""))
1688 (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
1689 "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
1690 "@
1691 sbc%?\\t%0, %2, %1
1692 rsc%?\\t%0, %1, %2"
1693 [(set_attr "conds" "use")
1694 (set_attr "predicable" "yes")
1695 (set_attr "arch" "*,a")
1696 (set_attr "type" "adc_reg,adc_imm")]
1697 )
1698
1699 ;; On Arm we can also use the same trick when the non-inverted operand is
1700 ;; shifted, using RSC.
1701 (define_insn "add_not_shift_cin"
1702 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1703 (plus:SI
1704 (plus:SI (match_operator:SI 3 "shift_operator"
1705 [(match_operand:SI 1 "s_register_operand" "r,r")
1706 (match_operand:SI 2 "shift_amount_operand" "M,r")])
1707 (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
1708 (match_operand:SI 5 "arm_carry_operation" "")))]
1709 "TARGET_ARM"
1710 "rsc%?\\t%0, %4, %1%S3"
1711 [(set_attr "conds" "use")
1712 (set_attr "predicable" "yes")
1713 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1714 )
1715
1716 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1717 [(set (reg:<CC_EXTEND> CC_REGNUM)
1718 (compare:<CC_EXTEND>
1719 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1720 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1721 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1722 (clobber (match_scratch:SI 0 "=l,r"))]
1723 "TARGET_32BIT"
1724 "sbcs\\t%0, %1, %2"
1725 [(set_attr "conds" "set")
1726 (set_attr "arch" "t2,*")
1727 (set_attr "length" "2,4")
1728 (set_attr "type" "adc_reg")]
1729 )
1730
1731 ;; Similar to the above, but handling a constant which has a different
1732 ;; canonicalization.
1733 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1734 [(set (reg:<CC_EXTEND> CC_REGNUM)
1735 (compare:<CC_EXTEND>
1736 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1737 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1738 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1739 (clobber (match_scratch:SI 0 "=l,r"))]
1740 "TARGET_32BIT"
1741 "@
1742 sbcs\\t%0, %1, %2
1743 adcs\\t%0, %1, #%B2"
1744 [(set_attr "conds" "set")
1745 (set_attr "type" "adc_imm")]
1746 )
1747
1748 ;; Further canonicalization when the constant is zero.
1749 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1750 [(set (reg:<CC_EXTEND> CC_REGNUM)
1751 (compare:<CC_EXTEND>
1752 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1753 (match_operand:DI 2 "arm_borrow_operation" "")))
1754 (clobber (match_scratch:SI 0 "=l,r"))]
1755 "TARGET_32BIT"
1756 "sbcs\\t%0, %1, #0"
1757 [(set_attr "conds" "set")
1758 (set_attr "type" "adc_imm")]
1759 )
1760
1761 (define_insn "*subsi3_carryin_const"
1762 [(set (match_operand:SI 0 "s_register_operand" "=r")
1763 (minus:SI (plus:SI
1764 (match_operand:SI 1 "s_register_operand" "r")
1765 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1766 (match_operand:SI 3 "arm_borrow_operation" "")))]
1767 "TARGET_32BIT"
1768 "sbc\\t%0, %1, #%n2"
1769 [(set_attr "conds" "use")
1770 (set_attr "type" "adc_imm")]
1771 )
1772
1773 (define_insn "*subsi3_carryin_const0"
1774 [(set (match_operand:SI 0 "s_register_operand" "=r")
1775 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1776 (match_operand:SI 2 "arm_borrow_operation" "")))]
1777 "TARGET_32BIT"
1778 "sbc\\t%0, %1, #0"
1779 [(set_attr "conds" "use")
1780 (set_attr "type" "adc_imm")]
1781 )
1782
1783 (define_insn "*subsi3_carryin_shift"
1784 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1785 (minus:SI (minus:SI
1786 (match_operand:SI 1 "s_register_operand" "r,r")
1787 (match_operator:SI 2 "shift_operator"
1788 [(match_operand:SI 3 "s_register_operand" "r,r")
1789 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1790 (match_operand:SI 5 "arm_borrow_operation" "")))]
1791 "TARGET_32BIT"
1792 "sbc%?\\t%0, %1, %3%S2"
1793 [(set_attr "conds" "use")
1794 (set_attr "arch" "32,a")
1795 (set_attr "shift" "3")
1796 (set_attr "predicable" "yes")
1797 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1798 )
1799
1800 (define_insn "*subsi3_carryin_shift_alt"
1801 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1802 (minus:SI (minus:SI
1803 (match_operand:SI 1 "s_register_operand" "r,r")
1804 (match_operand:SI 5 "arm_borrow_operation" ""))
1805 (match_operator:SI 2 "shift_operator"
1806 [(match_operand:SI 3 "s_register_operand" "r,r")
1807 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1808 "TARGET_32BIT"
1809 "sbc%?\\t%0, %1, %3%S2"
1810 [(set_attr "conds" "use")
1811 (set_attr "arch" "32,a")
1812 (set_attr "shift" "3")
1813 (set_attr "predicable" "yes")
1814 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1815 )
1816
1817 ;; No RSC in Thumb2
1818 (define_insn "*rsbsi3_carryin_shift"
1819 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1820 (minus:SI (minus:SI
1821 (match_operator:SI 2 "shift_operator"
1822 [(match_operand:SI 3 "s_register_operand" "r,r")
1823 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1824 (match_operand:SI 1 "s_register_operand" "r,r"))
1825 (match_operand:SI 5 "arm_borrow_operation" "")))]
1826 "TARGET_ARM"
1827 "rsc%?\\t%0, %1, %3%S2"
1828 [(set_attr "conds" "use")
1829 (set_attr "predicable" "yes")
1830 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1831 )
1832
1833 (define_insn "*rsbsi3_carryin_shift_alt"
1834 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1835 (minus:SI (minus:SI
1836 (match_operator:SI 2 "shift_operator"
1837 [(match_operand:SI 3 "s_register_operand" "r,r")
1838 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1839 (match_operand:SI 5 "arm_borrow_operation" ""))
1840 (match_operand:SI 1 "s_register_operand" "r,r")))]
1841 "TARGET_ARM"
1842 "rsc%?\\t%0, %1, %3%S2"
1843 [(set_attr "conds" "use")
1844 (set_attr "predicable" "yes")
1845 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1846 )
1847
1848 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1849 (define_split
1850 [(set (match_operand:SI 0 "s_register_operand" "")
1851 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1852 (match_operand:SI 2 "s_register_operand" ""))
1853 (const_int -1)))
1854 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1855 "TARGET_32BIT"
1856 [(set (match_dup 3) (match_dup 1))
1857 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1858 "
1859 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1860 ")
1861
1862 (define_expand "addsf3"
1863 [(set (match_operand:SF 0 "s_register_operand")
1864 (plus:SF (match_operand:SF 1 "s_register_operand")
1865 (match_operand:SF 2 "s_register_operand")))]
1866 "TARGET_32BIT && TARGET_HARD_FLOAT"
1867 "
1868 ")
1869
1870 (define_expand "adddf3"
1871 [(set (match_operand:DF 0 "s_register_operand")
1872 (plus:DF (match_operand:DF 1 "s_register_operand")
1873 (match_operand:DF 2 "s_register_operand")))]
1874 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1875 "
1876 ")
1877
1878 (define_expand "subdi3"
1879 [(parallel
1880 [(set (match_operand:DI 0 "s_register_operand")
1881 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1882 (match_operand:DI 2 "s_register_operand")))
1883 (clobber (reg:CC CC_REGNUM))])]
1884 "TARGET_EITHER"
1885 "
1886 if (TARGET_THUMB1)
1887 {
1888 if (!REG_P (operands[1]))
1889 operands[1] = force_reg (DImode, operands[1]);
1890 }
1891 else
1892 {
1893 rtx lo_result, hi_result, lo_dest, hi_dest;
1894 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1895 rtx condition;
1896
1897 /* Since operands[1] may be an integer, pass it second, so that
1898 any necessary simplifications will be done on the decomposed
1899 constant. */
1900 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1901 &lo_op1, &hi_op1);
1902 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1903 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1904
1905 if (!arm_rhs_operand (lo_op1, SImode))
1906 lo_op1 = force_reg (SImode, lo_op1);
1907
1908 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1909 || !arm_rhs_operand (hi_op1, SImode))
1910 hi_op1 = force_reg (SImode, hi_op1);
1911
1912 rtx cc_reg;
1913 if (lo_op1 == const0_rtx)
1914 {
1915 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1916 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1917 }
1918 else if (CONST_INT_P (lo_op1))
1919 {
1920 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1921 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1922 GEN_INT (~UINTVAL (lo_op1))));
1923 }
1924 else
1925 {
1926 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1927 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1928 }
1929
1930 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1931
1932 if (hi_op1 == const0_rtx)
1933 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1934 else
1935 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1936
1937 if (lo_result != lo_dest)
1938 emit_move_insn (lo_result, lo_dest);
1939
1940 if (hi_result != hi_dest)
1941 emit_move_insn (hi_result, hi_dest);
1942
1943 DONE;
1944 }
1945 "
1946 )
1947
1948 (define_expand "subsi3"
1949 [(set (match_operand:SI 0 "s_register_operand")
1950 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1951 (match_operand:SI 2 "s_register_operand")))]
1952 "TARGET_EITHER"
1953 "
1954 if (CONST_INT_P (operands[1]))
1955 {
1956 if (TARGET_32BIT)
1957 {
1958 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1959 operands[1] = force_reg (SImode, operands[1]);
1960 else
1961 {
1962 arm_split_constant (MINUS, SImode, NULL_RTX,
1963 INTVAL (operands[1]), operands[0],
1964 operands[2],
1965 optimize && can_create_pseudo_p ());
1966 DONE;
1967 }
1968 }
1969 else /* TARGET_THUMB1 */
1970 operands[1] = force_reg (SImode, operands[1]);
1971 }
1972 "
1973 )
1974
1975 ; ??? Check Thumb-2 split length
1976 (define_insn_and_split "*arm_subsi3_insn"
1977 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1978 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1979 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1980 "TARGET_32BIT"
1981 "@
1982 sub%?\\t%0, %1, %2
1983 sub%?\\t%0, %2
1984 sub%?\\t%0, %1, %2
1985 rsb%?\\t%0, %2, %1
1986 rsb%?\\t%0, %2, %1
1987 sub%?\\t%0, %1, %2
1988 sub%?\\t%0, %1, %2
1989 sub%?\\t%0, %1, %2
1990 #"
1991 "&& (CONST_INT_P (operands[1])
1992 && !const_ok_for_arm (INTVAL (operands[1])))"
1993 [(clobber (const_int 0))]
1994 "
1995 arm_split_constant (MINUS, SImode, curr_insn,
1996 INTVAL (operands[1]), operands[0], operands[2], 0);
1997 DONE;
1998 "
1999 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
2000 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
2001 (set_attr "predicable" "yes")
2002 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
2003 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
2004 )
2005
2006 (define_peephole2
2007 [(match_scratch:SI 3 "r")
2008 (set (match_operand:SI 0 "arm_general_register_operand" "")
2009 (minus:SI (match_operand:SI 1 "const_int_operand" "")
2010 (match_operand:SI 2 "arm_general_register_operand" "")))]
2011 "TARGET_32BIT
2012 && !const_ok_for_arm (INTVAL (operands[1]))
2013 && const_ok_for_arm (~INTVAL (operands[1]))"
2014 [(set (match_dup 3) (match_dup 1))
2015 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
2016 ""
2017 )
2018
2019 (define_insn "subsi3_compare0"
2020 [(set (reg:CC_NOOV CC_REGNUM)
2021 (compare:CC_NOOV
2022 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2023 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
2024 (const_int 0)))
2025 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2026 (minus:SI (match_dup 1) (match_dup 2)))]
2027 "TARGET_32BIT"
2028 "@
2029 subs%?\\t%0, %1, %2
2030 subs%?\\t%0, %1, %2
2031 rsbs%?\\t%0, %2, %1"
2032 [(set_attr "conds" "set")
2033 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
2034 )
2035
2036 (define_insn "subsi3_compare"
2037 [(set (reg:CC CC_REGNUM)
2038 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2039 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
2040 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2041 (minus:SI (match_dup 1) (match_dup 2)))]
2042 "TARGET_32BIT"
2043 "@
2044 subs%?\\t%0, %1, %2
2045 subs%?\\t%0, %1, %2
2046 rsbs%?\\t%0, %2, %1"
2047 [(set_attr "conds" "set")
2048 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
2049 )
2050
2051 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
2052 ;; rather than (0 cmp reg). This gives the same results for unsigned
2053 ;; and equality compares which is what we mostly need here.
2054 (define_insn "rsb_imm_compare"
2055 [(set (reg:CC_RSB CC_REGNUM)
2056 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2057 (match_operand 3 "const_int_operand" "")))
2058 (set (match_operand:SI 0 "s_register_operand" "=r")
2059 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
2060 (match_dup 2)))]
2061 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2062 "rsbs\\t%0, %2, %1"
2063 [(set_attr "conds" "set")
2064 (set_attr "type" "alus_imm")]
2065 )
2066
2067 ;; Similarly, but the result is unused.
2068 (define_insn "rsb_imm_compare_scratch"
2069 [(set (reg:CC_RSB CC_REGNUM)
2070 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2071 (match_operand 1 "arm_not_immediate_operand" "K")))
2072 (clobber (match_scratch:SI 0 "=r"))]
2073 "TARGET_32BIT"
2074 "rsbs\\t%0, %2, #%B1"
2075 [(set_attr "conds" "set")
2076 (set_attr "type" "alus_imm")]
2077 )
2078
2079 ;; Compare the sum of a value plus a carry against a constant. Uses
2080 ;; RSC, so the result is swapped. Only available on Arm
2081 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2082 [(set (reg:CC_SWP CC_REGNUM)
2083 (compare:CC_SWP
2084 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2085 (match_operand:DI 3 "arm_borrow_operation" ""))
2086 (match_operand 1 "arm_immediate_operand" "I")))
2087 (clobber (match_scratch:SI 0 "=r"))]
2088 "TARGET_ARM"
2089 "rscs\\t%0, %2, %1"
2090 [(set_attr "conds" "set")
2091 (set_attr "type" "alus_imm")]
2092 )
2093
2094 (define_insn "usubvsi3_borrow"
2095 [(set (reg:CC_B CC_REGNUM)
2096 (compare:CC_B
2097 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2098 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2099 (zero_extend:DI
2100 (match_operand:SI 2 "s_register_operand" "l,r")))))
2101 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2102 (minus:SI (match_dup 1)
2103 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2104 (match_dup 2))))]
2105 "TARGET_32BIT"
2106 "sbcs%?\\t%0, %1, %2"
2107 [(set_attr "conds" "set")
2108 (set_attr "arch" "t2,*")
2109 (set_attr "length" "2,4")]
2110 )
2111
2112 (define_insn "usubvsi3_borrow_imm"
2113 [(set (reg:CC_B CC_REGNUM)
2114 (compare:CC_B
2115 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2116 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2117 (match_operand:DI 3 "const_int_operand" "n,n"))))
2118 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2119 (minus:SI (match_dup 1)
2120 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2121 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2122 "TARGET_32BIT
2123 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2124 "@
2125 sbcs%?\\t%0, %1, %2
2126 adcs%?\\t%0, %1, #%B2"
2127 [(set_attr "conds" "set")
2128 (set_attr "type" "alus_imm")]
2129 )
2130
2131 (define_insn "subvsi3_borrow"
2132 [(set (reg:CC_V CC_REGNUM)
2133 (compare:CC_V
2134 (minus:DI
2135 (minus:DI
2136 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2137 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2138 (match_operand:DI 4 "arm_borrow_operation" ""))
2139 (sign_extend:DI
2140 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2141 (match_operand:SI 3 "arm_borrow_operation" "")))))
2142 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2143 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2144 (match_dup 3)))]
2145 "TARGET_32BIT"
2146 "sbcs%?\\t%0, %1, %2"
2147 [(set_attr "conds" "set")
2148 (set_attr "arch" "t2,*")
2149 (set_attr "length" "2,4")]
2150 )
2151
2152 (define_insn "subvsi3_borrow_imm"
2153 [(set (reg:CC_V CC_REGNUM)
2154 (compare:CC_V
2155 (minus:DI
2156 (minus:DI
2157 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2158 (match_operand 2 "arm_adcimm_operand" "I,K"))
2159 (match_operand:DI 4 "arm_borrow_operation" ""))
2160 (sign_extend:DI
2161 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2162 (match_operand:SI 3 "arm_borrow_operation" "")))))
2163 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2164 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2165 (match_dup 3)))]
2166 "TARGET_32BIT
2167 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2168 "@
2169 sbcs%?\\t%0, %1, %2
2170 adcs%?\\t%0, %1, #%B2"
2171 [(set_attr "conds" "set")
2172 (set_attr "type" "alus_imm")]
2173 )
2174
2175 (define_expand "subsf3"
2176 [(set (match_operand:SF 0 "s_register_operand")
2177 (minus:SF (match_operand:SF 1 "s_register_operand")
2178 (match_operand:SF 2 "s_register_operand")))]
2179 "TARGET_32BIT && TARGET_HARD_FLOAT"
2180 "
2181 ")
2182
2183 (define_expand "subdf3"
2184 [(set (match_operand:DF 0 "s_register_operand")
2185 (minus:DF (match_operand:DF 1 "s_register_operand")
2186 (match_operand:DF 2 "s_register_operand")))]
2187 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2188 "
2189 ")
2190
2191 \f
2192 ;; Multiplication insns
2193
2194 (define_expand "mulhi3"
2195 [(set (match_operand:HI 0 "s_register_operand")
2196 (mult:HI (match_operand:HI 1 "s_register_operand")
2197 (match_operand:HI 2 "s_register_operand")))]
2198 "TARGET_DSP_MULTIPLY"
2199 "
2200 {
2201 rtx result = gen_reg_rtx (SImode);
2202 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2203 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2204 DONE;
2205 }"
2206 )
2207
2208 (define_expand "mulsi3"
2209 [(set (match_operand:SI 0 "s_register_operand")
2210 (mult:SI (match_operand:SI 2 "s_register_operand")
2211 (match_operand:SI 1 "s_register_operand")))]
2212 "TARGET_EITHER"
2213 ""
2214 )
2215
2216 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2217 (define_insn "*mul"
2218 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2219 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2220 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2221 "TARGET_32BIT"
2222 "mul%?\\t%0, %2, %1"
2223 [(set_attr "type" "mul")
2224 (set_attr "predicable" "yes")
2225 (set_attr "arch" "t2,v6,nov6,nov6")
2226 (set_attr "length" "4")
2227 (set_attr "predicable_short_it" "yes,no,*,*")]
2228 )
2229
2230 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2231 ;; reusing the same register.
2232
2233 (define_insn "*mla"
2234 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2235 (plus:SI
2236 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2237 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2238 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2239 "TARGET_32BIT"
2240 "mla%?\\t%0, %3, %2, %1"
2241 [(set_attr "type" "mla")
2242 (set_attr "predicable" "yes")
2243 (set_attr "arch" "v6,nov6,nov6,nov6")]
2244 )
2245
2246 (define_insn "*mls"
2247 [(set (match_operand:SI 0 "s_register_operand" "=r")
2248 (minus:SI
2249 (match_operand:SI 1 "s_register_operand" "r")
2250 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2251 (match_operand:SI 2 "s_register_operand" "r"))))]
2252 "TARGET_32BIT && arm_arch_thumb2"
2253 "mls%?\\t%0, %3, %2, %1"
2254 [(set_attr "type" "mla")
2255 (set_attr "predicable" "yes")]
2256 )
2257
2258 (define_insn "*mulsi3_compare0"
2259 [(set (reg:CC_NOOV CC_REGNUM)
2260 (compare:CC_NOOV (mult:SI
2261 (match_operand:SI 2 "s_register_operand" "r,r")
2262 (match_operand:SI 1 "s_register_operand" "%0,r"))
2263 (const_int 0)))
2264 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2265 (mult:SI (match_dup 2) (match_dup 1)))]
2266 "TARGET_ARM && !arm_arch6"
2267 "muls%?\\t%0, %2, %1"
2268 [(set_attr "conds" "set")
2269 (set_attr "type" "muls")]
2270 )
2271
2272 (define_insn "*mulsi3_compare0_v6"
2273 [(set (reg:CC_NOOV CC_REGNUM)
2274 (compare:CC_NOOV (mult:SI
2275 (match_operand:SI 2 "s_register_operand" "r")
2276 (match_operand:SI 1 "s_register_operand" "r"))
2277 (const_int 0)))
2278 (set (match_operand:SI 0 "s_register_operand" "=r")
2279 (mult:SI (match_dup 2) (match_dup 1)))]
2280 "TARGET_ARM && arm_arch6 && optimize_size"
2281 "muls%?\\t%0, %2, %1"
2282 [(set_attr "conds" "set")
2283 (set_attr "type" "muls")]
2284 )
2285
2286 (define_insn "*mulsi_compare0_scratch"
2287 [(set (reg:CC_NOOV CC_REGNUM)
2288 (compare:CC_NOOV (mult:SI
2289 (match_operand:SI 2 "s_register_operand" "r,r")
2290 (match_operand:SI 1 "s_register_operand" "%0,r"))
2291 (const_int 0)))
2292 (clobber (match_scratch:SI 0 "=&r,&r"))]
2293 "TARGET_ARM && !arm_arch6"
2294 "muls%?\\t%0, %2, %1"
2295 [(set_attr "conds" "set")
2296 (set_attr "type" "muls")]
2297 )
2298
2299 (define_insn "*mulsi_compare0_scratch_v6"
2300 [(set (reg:CC_NOOV CC_REGNUM)
2301 (compare:CC_NOOV (mult:SI
2302 (match_operand:SI 2 "s_register_operand" "r")
2303 (match_operand:SI 1 "s_register_operand" "r"))
2304 (const_int 0)))
2305 (clobber (match_scratch:SI 0 "=r"))]
2306 "TARGET_ARM && arm_arch6 && optimize_size"
2307 "muls%?\\t%0, %2, %1"
2308 [(set_attr "conds" "set")
2309 (set_attr "type" "muls")]
2310 )
2311
2312 (define_insn "*mulsi3addsi_compare0"
2313 [(set (reg:CC_NOOV CC_REGNUM)
2314 (compare:CC_NOOV
2315 (plus:SI (mult:SI
2316 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2317 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2318 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2319 (const_int 0)))
2320 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2321 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2322 (match_dup 3)))]
2323 "TARGET_ARM && arm_arch6"
2324 "mlas%?\\t%0, %2, %1, %3"
2325 [(set_attr "conds" "set")
2326 (set_attr "type" "mlas")]
2327 )
2328
2329 (define_insn "*mulsi3addsi_compare0_v6"
2330 [(set (reg:CC_NOOV CC_REGNUM)
2331 (compare:CC_NOOV
2332 (plus:SI (mult:SI
2333 (match_operand:SI 2 "s_register_operand" "r")
2334 (match_operand:SI 1 "s_register_operand" "r"))
2335 (match_operand:SI 3 "s_register_operand" "r"))
2336 (const_int 0)))
2337 (set (match_operand:SI 0 "s_register_operand" "=r")
2338 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2339 (match_dup 3)))]
2340 "TARGET_ARM && arm_arch6 && optimize_size"
2341 "mlas%?\\t%0, %2, %1, %3"
2342 [(set_attr "conds" "set")
2343 (set_attr "type" "mlas")]
2344 )
2345
2346 (define_insn "*mulsi3addsi_compare0_scratch"
2347 [(set (reg:CC_NOOV CC_REGNUM)
2348 (compare:CC_NOOV
2349 (plus:SI (mult:SI
2350 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2351 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2352 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2353 (const_int 0)))
2354 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2355 "TARGET_ARM && !arm_arch6"
2356 "mlas%?\\t%0, %2, %1, %3"
2357 [(set_attr "conds" "set")
2358 (set_attr "type" "mlas")]
2359 )
2360
2361 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2362 [(set (reg:CC_NOOV CC_REGNUM)
2363 (compare:CC_NOOV
2364 (plus:SI (mult:SI
2365 (match_operand:SI 2 "s_register_operand" "r")
2366 (match_operand:SI 1 "s_register_operand" "r"))
2367 (match_operand:SI 3 "s_register_operand" "r"))
2368 (const_int 0)))
2369 (clobber (match_scratch:SI 0 "=r"))]
2370 "TARGET_ARM && arm_arch6 && optimize_size"
2371 "mlas%?\\t%0, %2, %1, %3"
2372 [(set_attr "conds" "set")
2373 (set_attr "type" "mlas")]
2374 )
2375
2376 ;; 32x32->64 widening multiply.
2377 ;; The only difference between the v3-5 and v6+ versions is the requirement
2378 ;; that the output does not overlap with either input.
2379
2380 (define_expand "<Us>mulsidi3"
2381 [(set (match_operand:DI 0 "s_register_operand")
2382 (mult:DI
2383 (SE:DI (match_operand:SI 1 "s_register_operand"))
2384 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2385 "TARGET_32BIT"
2386 {
2387 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2388 gen_highpart (SImode, operands[0]),
2389 operands[1], operands[2]));
2390 DONE;
2391 }
2392 )
2393
2394 (define_insn "<US>mull"
2395 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2396 (mult:SI
2397 (match_operand:SI 2 "s_register_operand" "%r,r")
2398 (match_operand:SI 3 "s_register_operand" "r,r")))
2399 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2400 (truncate:SI
2401 (lshiftrt:DI
2402 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2403 (const_int 32))))]
2404 "TARGET_32BIT"
2405 "<US>mull%?\\t%0, %1, %2, %3"
2406 [(set_attr "type" "umull")
2407 (set_attr "predicable" "yes")
2408 (set_attr "arch" "v6,nov6")]
2409 )
2410
2411 (define_expand "<Us>maddsidi4"
2412 [(set (match_operand:DI 0 "s_register_operand")
2413 (plus:DI
2414 (mult:DI
2415 (SE:DI (match_operand:SI 1 "s_register_operand"))
2416 (SE:DI (match_operand:SI 2 "s_register_operand")))
2417 (match_operand:DI 3 "s_register_operand")))]
2418 "TARGET_32BIT"
2419 {
2420 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2421 gen_lowpart (SImode, operands[3]),
2422 gen_highpart (SImode, operands[0]),
2423 gen_highpart (SImode, operands[3]),
2424 operands[1], operands[2]));
2425 DONE;
2426 }
2427 )
2428
2429 (define_insn "<US>mlal"
2430 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2431 (plus:SI
2432 (mult:SI
2433 (match_operand:SI 4 "s_register_operand" "%r,r")
2434 (match_operand:SI 5 "s_register_operand" "r,r"))
2435 (match_operand:SI 1 "s_register_operand" "0,0")))
2436 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2437 (plus:SI
2438 (truncate:SI
2439 (lshiftrt:DI
2440 (plus:DI
2441 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2442 (zero_extend:DI (match_dup 1)))
2443 (const_int 32)))
2444 (match_operand:SI 3 "s_register_operand" "2,2")))]
2445 "TARGET_32BIT"
2446 "<US>mlal%?\\t%0, %2, %4, %5"
2447 [(set_attr "type" "umlal")
2448 (set_attr "predicable" "yes")
2449 (set_attr "arch" "v6,nov6")]
2450 )
2451
2452 (define_expand "<US>mulsi3_highpart"
2453 [(parallel
2454 [(set (match_operand:SI 0 "s_register_operand")
2455 (truncate:SI
2456 (lshiftrt:DI
2457 (mult:DI
2458 (SE:DI (match_operand:SI 1 "s_register_operand"))
2459 (SE:DI (match_operand:SI 2 "s_register_operand")))
2460 (const_int 32))))
2461 (clobber (match_scratch:SI 3 ""))])]
2462 "TARGET_32BIT"
2463 ""
2464 )
2465
2466 (define_insn "*<US>mull_high"
2467 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2468 (truncate:SI
2469 (lshiftrt:DI
2470 (mult:DI
2471 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2472 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2473 (const_int 32))))
2474 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2475 "TARGET_32BIT"
2476 "<US>mull%?\\t%3, %0, %2, %1"
2477 [(set_attr "type" "umull")
2478 (set_attr "predicable" "yes")
2479 (set_attr "arch" "v6,nov6,nov6")]
2480 )
2481
2482 (define_insn "mulhisi3"
2483 [(set (match_operand:SI 0 "s_register_operand" "=r")
2484 (mult:SI (sign_extend:SI
2485 (match_operand:HI 1 "s_register_operand" "%r"))
2486 (sign_extend:SI
2487 (match_operand:HI 2 "s_register_operand" "r"))))]
2488 "TARGET_DSP_MULTIPLY"
2489 "smulbb%?\\t%0, %1, %2"
2490 [(set_attr "type" "smulxy")
2491 (set_attr "predicable" "yes")]
2492 )
2493
2494 (define_insn "*mulhisi3tb"
2495 [(set (match_operand:SI 0 "s_register_operand" "=r")
2496 (mult:SI (ashiftrt:SI
2497 (match_operand:SI 1 "s_register_operand" "r")
2498 (const_int 16))
2499 (sign_extend:SI
2500 (match_operand:HI 2 "s_register_operand" "r"))))]
2501 "TARGET_DSP_MULTIPLY"
2502 "smultb%?\\t%0, %1, %2"
2503 [(set_attr "type" "smulxy")
2504 (set_attr "predicable" "yes")]
2505 )
2506
2507 (define_insn "*mulhisi3bt"
2508 [(set (match_operand:SI 0 "s_register_operand" "=r")
2509 (mult:SI (sign_extend:SI
2510 (match_operand:HI 1 "s_register_operand" "r"))
2511 (ashiftrt:SI
2512 (match_operand:SI 2 "s_register_operand" "r")
2513 (const_int 16))))]
2514 "TARGET_DSP_MULTIPLY"
2515 "smulbt%?\\t%0, %1, %2"
2516 [(set_attr "type" "smulxy")
2517 (set_attr "predicable" "yes")]
2518 )
2519
2520 (define_insn "*mulhisi3tt"
2521 [(set (match_operand:SI 0 "s_register_operand" "=r")
2522 (mult:SI (ashiftrt:SI
2523 (match_operand:SI 1 "s_register_operand" "r")
2524 (const_int 16))
2525 (ashiftrt:SI
2526 (match_operand:SI 2 "s_register_operand" "r")
2527 (const_int 16))))]
2528 "TARGET_DSP_MULTIPLY"
2529 "smultt%?\\t%0, %1, %2"
2530 [(set_attr "type" "smulxy")
2531 (set_attr "predicable" "yes")]
2532 )
2533
2534 (define_expand "maddhisi4"
2535 [(set (match_operand:SI 0 "s_register_operand")
2536 (plus:SI (mult:SI (sign_extend:SI
2537 (match_operand:HI 1 "s_register_operand"))
2538 (sign_extend:SI
2539 (match_operand:HI 2 "s_register_operand")))
2540 (match_operand:SI 3 "s_register_operand")))]
2541 "TARGET_DSP_MULTIPLY"
2542 {
2543 /* If this function reads the Q bit from ACLE intrinsics break up the
2544 multiplication and accumulation as an overflow during accumulation will
2545 clobber the Q flag. */
2546 if (ARM_Q_BIT_READ)
2547 {
2548 rtx tmp = gen_reg_rtx (SImode);
2549 emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
2550 emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
2551 DONE;
2552 }
2553 }
2554 )
2555
2556 (define_insn "*arm_maddhisi4"
2557 [(set (match_operand:SI 0 "s_register_operand" "=r")
2558 (plus:SI (mult:SI (sign_extend:SI
2559 (match_operand:HI 1 "s_register_operand" "r"))
2560 (sign_extend:SI
2561 (match_operand:HI 2 "s_register_operand" "r")))
2562 (match_operand:SI 3 "s_register_operand" "r")))]
2563 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2564 "smlabb%?\\t%0, %1, %2, %3"
2565 [(set_attr "type" "smlaxy")
2566 (set_attr "predicable" "yes")]
2567 )
2568
2569 (define_insn "arm_smlabb_setq"
2570 [(set (match_operand:SI 0 "s_register_operand" "=r")
2571 (plus:SI (mult:SI (sign_extend:SI
2572 (match_operand:HI 1 "s_register_operand" "r"))
2573 (sign_extend:SI
2574 (match_operand:HI 2 "s_register_operand" "r")))
2575 (match_operand:SI 3 "s_register_operand" "r")))
2576 (set (reg:CC APSRQ_REGNUM)
2577 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2578 "TARGET_DSP_MULTIPLY"
2579 "smlabb%?\\t%0, %1, %2, %3"
2580 [(set_attr "type" "smlaxy")
2581 (set_attr "predicable" "yes")]
2582 )
2583
2584 (define_expand "arm_smlabb"
2585 [(match_operand:SI 0 "s_register_operand")
2586 (match_operand:SI 1 "s_register_operand")
2587 (match_operand:SI 2 "s_register_operand")
2588 (match_operand:SI 3 "s_register_operand")]
2589 "TARGET_DSP_MULTIPLY"
2590 {
2591 rtx mult1 = gen_lowpart (HImode, operands[1]);
2592 rtx mult2 = gen_lowpart (HImode, operands[2]);
2593 if (ARM_Q_BIT_READ)
2594 emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
2595 else
2596 emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
2597 DONE;
2598 }
2599 )
2600
2601 ;; Note: there is no maddhisi4ibt because this one is canonical form
2602 (define_insn "maddhisi4tb"
2603 [(set (match_operand:SI 0 "s_register_operand" "=r")
2604 (plus:SI (mult:SI (ashiftrt:SI
2605 (match_operand:SI 1 "s_register_operand" "r")
2606 (const_int 16))
2607 (sign_extend:SI
2608 (match_operand:HI 2 "s_register_operand" "r")))
2609 (match_operand:SI 3 "s_register_operand" "r")))]
2610 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2611 "smlatb%?\\t%0, %1, %2, %3"
2612 [(set_attr "type" "smlaxy")
2613 (set_attr "predicable" "yes")]
2614 )
2615
2616 (define_insn "arm_smlatb_setq"
2617 [(set (match_operand:SI 0 "s_register_operand" "=r")
2618 (plus:SI (mult:SI (ashiftrt:SI
2619 (match_operand:SI 1 "s_register_operand" "r")
2620 (const_int 16))
2621 (sign_extend:SI
2622 (match_operand:HI 2 "s_register_operand" "r")))
2623 (match_operand:SI 3 "s_register_operand" "r")))
2624 (set (reg:CC APSRQ_REGNUM)
2625 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2626 "TARGET_DSP_MULTIPLY"
2627 "smlatb%?\\t%0, %1, %2, %3"
2628 [(set_attr "type" "smlaxy")
2629 (set_attr "predicable" "yes")]
2630 )
2631
2632 (define_expand "arm_smlatb"
2633 [(match_operand:SI 0 "s_register_operand")
2634 (match_operand:SI 1 "s_register_operand")
2635 (match_operand:SI 2 "s_register_operand")
2636 (match_operand:SI 3 "s_register_operand")]
2637 "TARGET_DSP_MULTIPLY"
2638 {
2639 rtx mult2 = gen_lowpart (HImode, operands[2]);
2640 if (ARM_Q_BIT_READ)
2641 emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
2642 mult2, operands[3]));
2643 else
2644 emit_insn (gen_maddhisi4tb (operands[0], operands[1],
2645 mult2, operands[3]));
2646 DONE;
2647 }
2648 )
2649
2650 (define_insn "maddhisi4tt"
2651 [(set (match_operand:SI 0 "s_register_operand" "=r")
2652 (plus:SI (mult:SI (ashiftrt:SI
2653 (match_operand:SI 1 "s_register_operand" "r")
2654 (const_int 16))
2655 (ashiftrt:SI
2656 (match_operand:SI 2 "s_register_operand" "r")
2657 (const_int 16)))
2658 (match_operand:SI 3 "s_register_operand" "r")))]
2659 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2660 "smlatt%?\\t%0, %1, %2, %3"
2661 [(set_attr "type" "smlaxy")
2662 (set_attr "predicable" "yes")]
2663 )
2664
2665 (define_insn "arm_smlatt_setq"
2666 [(set (match_operand:SI 0 "s_register_operand" "=r")
2667 (plus:SI (mult:SI (ashiftrt:SI
2668 (match_operand:SI 1 "s_register_operand" "r")
2669 (const_int 16))
2670 (ashiftrt:SI
2671 (match_operand:SI 2 "s_register_operand" "r")
2672 (const_int 16)))
2673 (match_operand:SI 3 "s_register_operand" "r")))
2674 (set (reg:CC APSRQ_REGNUM)
2675 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2676 "TARGET_DSP_MULTIPLY"
2677 "smlatt%?\\t%0, %1, %2, %3"
2678 [(set_attr "type" "smlaxy")
2679 (set_attr "predicable" "yes")]
2680 )
2681
2682 (define_expand "arm_smlatt"
2683 [(match_operand:SI 0 "s_register_operand")
2684 (match_operand:SI 1 "s_register_operand")
2685 (match_operand:SI 2 "s_register_operand")
2686 (match_operand:SI 3 "s_register_operand")]
2687 "TARGET_DSP_MULTIPLY"
2688 {
2689 if (ARM_Q_BIT_READ)
2690 emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
2691 operands[2], operands[3]));
2692 else
2693 emit_insn (gen_maddhisi4tt (operands[0], operands[1],
2694 operands[2], operands[3]));
2695 DONE;
2696 }
2697 )
2698
2699 (define_insn "maddhidi4"
2700 [(set (match_operand:DI 0 "s_register_operand" "=r")
2701 (plus:DI
2702 (mult:DI (sign_extend:DI
2703 (match_operand:HI 1 "s_register_operand" "r"))
2704 (sign_extend:DI
2705 (match_operand:HI 2 "s_register_operand" "r")))
2706 (match_operand:DI 3 "s_register_operand" "0")))]
2707 "TARGET_DSP_MULTIPLY"
2708 "smlalbb%?\\t%Q0, %R0, %1, %2"
2709 [(set_attr "type" "smlalxy")
2710 (set_attr "predicable" "yes")])
2711
2712 ;; Note: there is no maddhidi4ibt because this one is canonical form
2713 (define_insn "*maddhidi4tb"
2714 [(set (match_operand:DI 0 "s_register_operand" "=r")
2715 (plus:DI
2716 (mult:DI (sign_extend:DI
2717 (ashiftrt:SI
2718 (match_operand:SI 1 "s_register_operand" "r")
2719 (const_int 16)))
2720 (sign_extend:DI
2721 (match_operand:HI 2 "s_register_operand" "r")))
2722 (match_operand:DI 3 "s_register_operand" "0")))]
2723 "TARGET_DSP_MULTIPLY"
2724 "smlaltb%?\\t%Q0, %R0, %1, %2"
2725 [(set_attr "type" "smlalxy")
2726 (set_attr "predicable" "yes")])
2727
2728 (define_insn "*maddhidi4tt"
2729 [(set (match_operand:DI 0 "s_register_operand" "=r")
2730 (plus:DI
2731 (mult:DI (sign_extend:DI
2732 (ashiftrt:SI
2733 (match_operand:SI 1 "s_register_operand" "r")
2734 (const_int 16)))
2735 (sign_extend:DI
2736 (ashiftrt:SI
2737 (match_operand:SI 2 "s_register_operand" "r")
2738 (const_int 16))))
2739 (match_operand:DI 3 "s_register_operand" "0")))]
2740 "TARGET_DSP_MULTIPLY"
2741 "smlaltt%?\\t%Q0, %R0, %1, %2"
2742 [(set_attr "type" "smlalxy")
2743 (set_attr "predicable" "yes")])
2744
2745 (define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
2746 [(set (match_operand:SI 0 "s_register_operand" "=r")
2747 (unspec:SI
2748 [(match_operand:SI 1 "s_register_operand" "r")
2749 (match_operand:SI 2 "s_register_operand" "r")
2750 (match_operand:SI 3 "s_register_operand" "r")]
2751 SMLAWBT))]
2752 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
2753 "<smlaw_op>%?\\t%0, %1, %2, %3"
2754 [(set_attr "type" "smlaxy")
2755 (set_attr "predicable" "yes")]
2756 )
2757
2758 (define_expand "arm_<smlaw_op>"
2759 [(set (match_operand:SI 0 "s_register_operand")
2760 (unspec:SI
2761 [(match_operand:SI 1 "s_register_operand")
2762 (match_operand:SI 2 "s_register_operand")
2763 (match_operand:SI 3 "s_register_operand")]
2764 SMLAWBT))]
2765 "TARGET_DSP_MULTIPLY"
2766 {
2767 if (ARM_Q_BIT_READ)
2768 emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
2769 operands[2], operands[3]));
2770 else
2771 emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
2772 operands[2], operands[3]));
2773 DONE;
2774 }
2775 )
2776
2777 (define_expand "mulsf3"
2778 [(set (match_operand:SF 0 "s_register_operand")
2779 (mult:SF (match_operand:SF 1 "s_register_operand")
2780 (match_operand:SF 2 "s_register_operand")))]
2781 "TARGET_32BIT && TARGET_HARD_FLOAT"
2782 "
2783 ")
2784
2785 (define_expand "muldf3"
2786 [(set (match_operand:DF 0 "s_register_operand")
2787 (mult:DF (match_operand:DF 1 "s_register_operand")
2788 (match_operand:DF 2 "s_register_operand")))]
2789 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2790 "
2791 ")
2792 \f
2793 ;; Division insns
2794
2795 (define_expand "divsf3"
2796 [(set (match_operand:SF 0 "s_register_operand")
2797 (div:SF (match_operand:SF 1 "s_register_operand")
2798 (match_operand:SF 2 "s_register_operand")))]
2799 "TARGET_32BIT && TARGET_HARD_FLOAT"
2800 "")
2801
2802 (define_expand "divdf3"
2803 [(set (match_operand:DF 0 "s_register_operand")
2804 (div:DF (match_operand:DF 1 "s_register_operand")
2805 (match_operand:DF 2 "s_register_operand")))]
2806 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2807 "")
2808 \f
2809
2810 ; Expand logical operations. The mid-end expander does not split off memory
2811 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2812 ; So an explicit expander is needed to generate better code.
2813
2814 (define_expand "<LOGICAL:optab>di3"
2815 [(set (match_operand:DI 0 "s_register_operand")
2816 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2817 (match_operand:DI 2 "arm_<optab>di_operand")))]
2818 "TARGET_32BIT"
2819 {
2820 rtx low = simplify_gen_binary (<CODE>, SImode,
2821 gen_lowpart (SImode, operands[1]),
2822 gen_lowpart (SImode, operands[2]));
2823 rtx high = simplify_gen_binary (<CODE>, SImode,
2824 gen_highpart (SImode, operands[1]),
2825 gen_highpart_mode (SImode, DImode,
2826 operands[2]));
2827
2828 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2829 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2830 DONE;
2831 }
2832 )
2833
2834 (define_expand "one_cmpldi2"
2835 [(set (match_operand:DI 0 "s_register_operand")
2836 (not:DI (match_operand:DI 1 "s_register_operand")))]
2837 "TARGET_32BIT"
2838 {
2839 rtx low = simplify_gen_unary (NOT, SImode,
2840 gen_lowpart (SImode, operands[1]),
2841 SImode);
2842 rtx high = simplify_gen_unary (NOT, SImode,
2843 gen_highpart_mode (SImode, DImode,
2844 operands[1]),
2845 SImode);
2846
2847 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2848 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2849 DONE;
2850 }
2851 )
2852
2853 ;; Split DImode and, ior, xor operations. Simply perform the logical
2854 ;; operation on the upper and lower halves of the registers.
2855 ;; This is needed for atomic operations in arm_split_atomic_op.
2856 ;; Avoid splitting IWMMXT instructions.
2857 (define_split
2858 [(set (match_operand:DI 0 "s_register_operand" "")
2859 (match_operator:DI 6 "logical_binary_operator"
2860 [(match_operand:DI 1 "s_register_operand" "")
2861 (match_operand:DI 2 "s_register_operand" "")]))]
2862 "TARGET_32BIT && reload_completed
2863 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2864 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2865 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2866 "
2867 {
2868 operands[3] = gen_highpart (SImode, operands[0]);
2869 operands[0] = gen_lowpart (SImode, operands[0]);
2870 operands[4] = gen_highpart (SImode, operands[1]);
2871 operands[1] = gen_lowpart (SImode, operands[1]);
2872 operands[5] = gen_highpart (SImode, operands[2]);
2873 operands[2] = gen_lowpart (SImode, operands[2]);
2874 }"
2875 )
2876
2877 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2878 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2879 (define_split
2880 [(set (match_operand:DI 0 "s_register_operand")
2881 (not:DI (match_operand:DI 1 "s_register_operand")))]
2882 "TARGET_32BIT"
2883 [(set (match_dup 0) (not:SI (match_dup 1)))
2884 (set (match_dup 2) (not:SI (match_dup 3)))]
2885 "
2886 {
2887 operands[2] = gen_highpart (SImode, operands[0]);
2888 operands[0] = gen_lowpart (SImode, operands[0]);
2889 operands[3] = gen_highpart (SImode, operands[1]);
2890 operands[1] = gen_lowpart (SImode, operands[1]);
2891 }"
2892 )
2893
2894 (define_expand "andsi3"
2895 [(set (match_operand:SI 0 "s_register_operand")
2896 (and:SI (match_operand:SI 1 "s_register_operand")
2897 (match_operand:SI 2 "reg_or_int_operand")))]
2898 "TARGET_EITHER"
2899 "
2900 if (TARGET_32BIT)
2901 {
2902 if (CONST_INT_P (operands[2]))
2903 {
2904 if (INTVAL (operands[2]) == 255 && arm_arch6)
2905 {
2906 operands[1] = convert_to_mode (QImode, operands[1], 1);
2907 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2908 operands[1]));
2909 DONE;
2910 }
2911 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2912 operands[2] = force_reg (SImode, operands[2]);
2913 else
2914 {
2915 arm_split_constant (AND, SImode, NULL_RTX,
2916 INTVAL (operands[2]), operands[0],
2917 operands[1],
2918 optimize && can_create_pseudo_p ());
2919
2920 DONE;
2921 }
2922 }
2923 }
2924 else /* TARGET_THUMB1 */
2925 {
2926 if (!CONST_INT_P (operands[2]))
2927 {
2928 rtx tmp = force_reg (SImode, operands[2]);
2929 if (rtx_equal_p (operands[0], operands[1]))
2930 operands[2] = tmp;
2931 else
2932 {
2933 operands[2] = operands[1];
2934 operands[1] = tmp;
2935 }
2936 }
2937 else
2938 {
2939 int i;
2940
2941 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2942 {
2943 operands[2] = force_reg (SImode,
2944 GEN_INT (~INTVAL (operands[2])));
2945
2946 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2947
2948 DONE;
2949 }
2950
2951 for (i = 9; i <= 31; i++)
2952 {
2953 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2954 {
2955 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2956 const0_rtx));
2957 DONE;
2958 }
2959 else if ((HOST_WIDE_INT_1 << i) - 1
2960 == ~INTVAL (operands[2]))
2961 {
2962 rtx shift = GEN_INT (i);
2963 rtx reg = gen_reg_rtx (SImode);
2964
2965 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2966 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2967
2968 DONE;
2969 }
2970 }
2971
2972 operands[2] = force_reg (SImode, operands[2]);
2973 }
2974 }
2975 "
2976 )
2977
2978 ; ??? Check split length for Thumb-2
2979 (define_insn_and_split "*arm_andsi3_insn"
2980 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2981 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2982 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2983 "TARGET_32BIT"
2984 "@
2985 and%?\\t%0, %1, %2
2986 and%?\\t%0, %1, %2
2987 bic%?\\t%0, %1, #%B2
2988 and%?\\t%0, %1, %2
2989 #"
2990 "TARGET_32BIT
2991 && CONST_INT_P (operands[2])
2992 && !(const_ok_for_arm (INTVAL (operands[2]))
2993 || const_ok_for_arm (~INTVAL (operands[2])))"
2994 [(clobber (const_int 0))]
2995 "
2996 arm_split_constant (AND, SImode, curr_insn,
2997 INTVAL (operands[2]), operands[0], operands[1], 0);
2998 DONE;
2999 "
3000 [(set_attr "length" "4,4,4,4,16")
3001 (set_attr "predicable" "yes")
3002 (set_attr "predicable_short_it" "no,yes,no,no,no")
3003 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
3004 )
3005
3006 (define_insn "*andsi3_compare0"
3007 [(set (reg:CC_NOOV CC_REGNUM)
3008 (compare:CC_NOOV
3009 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
3010 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
3011 (const_int 0)))
3012 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3013 (and:SI (match_dup 1) (match_dup 2)))]
3014 "TARGET_32BIT"
3015 "@
3016 ands%?\\t%0, %1, %2
3017 bics%?\\t%0, %1, #%B2
3018 ands%?\\t%0, %1, %2"
3019 [(set_attr "conds" "set")
3020 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3021 )
3022
3023 (define_insn "*andsi3_compare0_scratch"
3024 [(set (reg:CC_NOOV CC_REGNUM)
3025 (compare:CC_NOOV
3026 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
3027 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
3028 (const_int 0)))
3029 (clobber (match_scratch:SI 2 "=X,r,X"))]
3030 "TARGET_32BIT"
3031 "@
3032 tst%?\\t%0, %1
3033 bics%?\\t%2, %0, #%B1
3034 tst%?\\t%0, %1"
3035 [(set_attr "conds" "set")
3036 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3037 )
3038
3039 (define_insn "*zeroextractsi_compare0_scratch"
3040 [(set (reg:CC_NOOV CC_REGNUM)
3041 (compare:CC_NOOV (zero_extract:SI
3042 (match_operand:SI 0 "s_register_operand" "r")
3043 (match_operand 1 "const_int_operand" "n")
3044 (match_operand 2 "const_int_operand" "n"))
3045 (const_int 0)))]
3046 "TARGET_32BIT
3047 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
3048 && INTVAL (operands[1]) > 0
3049 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
3050 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
3051 "*
3052 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
3053 << INTVAL (operands[2]));
3054 output_asm_insn (\"tst%?\\t%0, %1\", operands);
3055 return \"\";
3056 "
3057 [(set_attr "conds" "set")
3058 (set_attr "predicable" "yes")
3059 (set_attr "type" "logics_imm")]
3060 )
3061
3062 (define_insn_and_split "*ne_zeroextractsi"
3063 [(set (match_operand:SI 0 "s_register_operand" "=r")
3064 (ne:SI (zero_extract:SI
3065 (match_operand:SI 1 "s_register_operand" "r")
3066 (match_operand:SI 2 "const_int_operand" "n")
3067 (match_operand:SI 3 "const_int_operand" "n"))
3068 (const_int 0)))
3069 (clobber (reg:CC CC_REGNUM))]
3070 "TARGET_32BIT
3071 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3072 && INTVAL (operands[2]) > 0
3073 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3074 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3075 "#"
3076 "TARGET_32BIT
3077 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3078 && INTVAL (operands[2]) > 0
3079 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3080 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3081 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3082 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
3083 (const_int 0)))
3084 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3085 (set (match_dup 0)
3086 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
3087 (match_dup 0) (const_int 1)))]
3088 "
3089 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3090 << INTVAL (operands[3]));
3091 "
3092 [(set_attr "conds" "clob")
3093 (set (attr "length")
3094 (if_then_else (eq_attr "is_thumb" "yes")
3095 (const_int 12)
3096 (const_int 8)))
3097 (set_attr "type" "multiple")]
3098 )
3099
3100 (define_insn_and_split "*ne_zeroextractsi_shifted"
3101 [(set (match_operand:SI 0 "s_register_operand" "=r")
3102 (ne:SI (zero_extract:SI
3103 (match_operand:SI 1 "s_register_operand" "r")
3104 (match_operand:SI 2 "const_int_operand" "n")
3105 (const_int 0))
3106 (const_int 0)))
3107 (clobber (reg:CC CC_REGNUM))]
3108 "TARGET_ARM"
3109 "#"
3110 "TARGET_ARM"
3111 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3112 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
3113 (const_int 0)))
3114 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3115 (set (match_dup 0)
3116 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
3117 (match_dup 0) (const_int 1)))]
3118 "
3119 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3120 "
3121 [(set_attr "conds" "clob")
3122 (set_attr "length" "8")
3123 (set_attr "type" "multiple")]
3124 )
3125
3126 (define_insn_and_split "*ite_ne_zeroextractsi"
3127 [(set (match_operand:SI 0 "s_register_operand" "=r")
3128 (if_then_else:SI (ne (zero_extract:SI
3129 (match_operand:SI 1 "s_register_operand" "r")
3130 (match_operand:SI 2 "const_int_operand" "n")
3131 (match_operand:SI 3 "const_int_operand" "n"))
3132 (const_int 0))
3133 (match_operand:SI 4 "arm_not_operand" "rIK")
3134 (const_int 0)))
3135 (clobber (reg:CC CC_REGNUM))]
3136 "TARGET_ARM
3137 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3138 && INTVAL (operands[2]) > 0
3139 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3140 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3141 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3142 "#"
3143 "TARGET_ARM
3144 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3145 && INTVAL (operands[2]) > 0
3146 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3147 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3148 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3149 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3150 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
3151 (const_int 0)))
3152 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3153 (set (match_dup 0)
3154 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
3155 (match_dup 0) (match_dup 4)))]
3156 "
3157 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3158 << INTVAL (operands[3]));
3159 "
3160 [(set_attr "conds" "clob")
3161 (set_attr "length" "8")
3162 (set_attr "type" "multiple")]
3163 )
3164
3165 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
3166 [(set (match_operand:SI 0 "s_register_operand" "=r")
3167 (if_then_else:SI (ne (zero_extract:SI
3168 (match_operand:SI 1 "s_register_operand" "r")
3169 (match_operand:SI 2 "const_int_operand" "n")
3170 (const_int 0))
3171 (const_int 0))
3172 (match_operand:SI 3 "arm_not_operand" "rIK")
3173 (const_int 0)))
3174 (clobber (reg:CC CC_REGNUM))]
3175 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3176 "#"
3177 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3178 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3179 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
3180 (const_int 0)))
3181 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3182 (set (match_dup 0)
3183 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
3184 (match_dup 0) (match_dup 3)))]
3185 "
3186 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3187 "
3188 [(set_attr "conds" "clob")
3189 (set_attr "length" "8")
3190 (set_attr "type" "multiple")]
3191 )
3192
3193 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
3194 (define_split
3195 [(set (match_operand:SI 0 "s_register_operand" "")
3196 (match_operator:SI 1 "shiftable_operator"
3197 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3198 (match_operand:SI 3 "const_int_operand" "")
3199 (match_operand:SI 4 "const_int_operand" ""))
3200 (match_operand:SI 5 "s_register_operand" "")]))
3201 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3202 "TARGET_ARM"
3203 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3204 (set (match_dup 0)
3205 (match_op_dup 1
3206 [(lshiftrt:SI (match_dup 6) (match_dup 4))
3207 (match_dup 5)]))]
3208 "{
3209 HOST_WIDE_INT temp = INTVAL (operands[3]);
3210
3211 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3212 operands[4] = GEN_INT (32 - temp);
3213 }"
3214 )
3215
3216 (define_split
3217 [(set (match_operand:SI 0 "s_register_operand" "")
3218 (match_operator:SI 1 "shiftable_operator"
3219 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3220 (match_operand:SI 3 "const_int_operand" "")
3221 (match_operand:SI 4 "const_int_operand" ""))
3222 (match_operand:SI 5 "s_register_operand" "")]))
3223 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3224 "TARGET_ARM"
3225 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3226 (set (match_dup 0)
3227 (match_op_dup 1
3228 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3229 (match_dup 5)]))]
3230 "{
3231 HOST_WIDE_INT temp = INTVAL (operands[3]);
3232
3233 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3234 operands[4] = GEN_INT (32 - temp);
3235 }"
3236 )
3237
3238 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3239 ;;; represented by the bitfield, then this will produce incorrect results.
3240 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3241 ;;; which have a real bit-field insert instruction, the truncation happens
3242 ;;; in the bit-field insert instruction itself. Since arm does not have a
3243 ;;; bit-field insert instruction, we would have to emit code here to truncate
3244 ;;; the value before we insert. This loses some of the advantage of having
3245 ;;; this insv pattern, so this pattern needs to be reevalutated.
3246
3247 (define_expand "insv"
3248 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3249 (match_operand 1 "general_operand")
3250 (match_operand 2 "general_operand"))
3251 (match_operand 3 "reg_or_int_operand"))]
3252 "TARGET_ARM || arm_arch_thumb2"
3253 "
3254 {
3255 int start_bit = INTVAL (operands[2]);
3256 int width = INTVAL (operands[1]);
3257 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3258 rtx target, subtarget;
3259
3260 if (arm_arch_thumb2)
3261 {
3262 if (unaligned_access && MEM_P (operands[0])
3263 && s_register_operand (operands[3], GET_MODE (operands[3]))
3264 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3265 {
3266 rtx base_addr;
3267
3268 if (BYTES_BIG_ENDIAN)
3269 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3270 - start_bit;
3271
3272 if (width == 32)
3273 {
3274 base_addr = adjust_address (operands[0], SImode,
3275 start_bit / BITS_PER_UNIT);
3276 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3277 }
3278 else
3279 {
3280 rtx tmp = gen_reg_rtx (HImode);
3281
3282 base_addr = adjust_address (operands[0], HImode,
3283 start_bit / BITS_PER_UNIT);
3284 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3285 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3286 }
3287 DONE;
3288 }
3289 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3290 {
3291 bool use_bfi = TRUE;
3292
3293 if (CONST_INT_P (operands[3]))
3294 {
3295 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3296
3297 if (val == 0)
3298 {
3299 emit_insn (gen_insv_zero (operands[0], operands[1],
3300 operands[2]));
3301 DONE;
3302 }
3303
3304 /* See if the set can be done with a single orr instruction. */
3305 if (val == mask && const_ok_for_arm (val << start_bit))
3306 use_bfi = FALSE;
3307 }
3308
3309 if (use_bfi)
3310 {
3311 if (!REG_P (operands[3]))
3312 operands[3] = force_reg (SImode, operands[3]);
3313
3314 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3315 operands[3]));
3316 DONE;
3317 }
3318 }
3319 else
3320 FAIL;
3321 }
3322
3323 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3324 FAIL;
3325
3326 target = copy_rtx (operands[0]);
3327 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3328 subreg as the final target. */
3329 if (GET_CODE (target) == SUBREG)
3330 {
3331 subtarget = gen_reg_rtx (SImode);
3332 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3333 < GET_MODE_SIZE (SImode))
3334 target = SUBREG_REG (target);
3335 }
3336 else
3337 subtarget = target;
3338
3339 if (CONST_INT_P (operands[3]))
3340 {
3341 /* Since we are inserting a known constant, we may be able to
3342 reduce the number of bits that we have to clear so that
3343 the mask becomes simple. */
3344 /* ??? This code does not check to see if the new mask is actually
3345 simpler. It may not be. */
3346 rtx op1 = gen_reg_rtx (SImode);
3347 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3348 start of this pattern. */
3349 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3350 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3351
3352 emit_insn (gen_andsi3 (op1, operands[0],
3353 gen_int_mode (~mask2, SImode)));
3354 emit_insn (gen_iorsi3 (subtarget, op1,
3355 gen_int_mode (op3_value << start_bit, SImode)));
3356 }
3357 else if (start_bit == 0
3358 && !(const_ok_for_arm (mask)
3359 || const_ok_for_arm (~mask)))
3360 {
3361 /* A Trick, since we are setting the bottom bits in the word,
3362 we can shift operand[3] up, operand[0] down, OR them together
3363 and rotate the result back again. This takes 3 insns, and
3364 the third might be mergeable into another op. */
3365 /* The shift up copes with the possibility that operand[3] is
3366 wider than the bitfield. */
3367 rtx op0 = gen_reg_rtx (SImode);
3368 rtx op1 = gen_reg_rtx (SImode);
3369
3370 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3371 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3372 emit_insn (gen_iorsi3 (op1, op1, op0));
3373 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3374 }
3375 else if ((width + start_bit == 32)
3376 && !(const_ok_for_arm (mask)
3377 || const_ok_for_arm (~mask)))
3378 {
3379 /* Similar trick, but slightly less efficient. */
3380
3381 rtx op0 = gen_reg_rtx (SImode);
3382 rtx op1 = gen_reg_rtx (SImode);
3383
3384 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3385 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3386 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3387 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3388 }
3389 else
3390 {
3391 rtx op0 = gen_int_mode (mask, SImode);
3392 rtx op1 = gen_reg_rtx (SImode);
3393 rtx op2 = gen_reg_rtx (SImode);
3394
3395 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3396 {
3397 rtx tmp = gen_reg_rtx (SImode);
3398
3399 emit_insn (gen_movsi (tmp, op0));
3400 op0 = tmp;
3401 }
3402
3403 /* Mask out any bits in operand[3] that are not needed. */
3404 emit_insn (gen_andsi3 (op1, operands[3], op0));
3405
3406 if (CONST_INT_P (op0)
3407 && (const_ok_for_arm (mask << start_bit)
3408 || const_ok_for_arm (~(mask << start_bit))))
3409 {
3410 op0 = gen_int_mode (~(mask << start_bit), SImode);
3411 emit_insn (gen_andsi3 (op2, operands[0], op0));
3412 }
3413 else
3414 {
3415 if (CONST_INT_P (op0))
3416 {
3417 rtx tmp = gen_reg_rtx (SImode);
3418
3419 emit_insn (gen_movsi (tmp, op0));
3420 op0 = tmp;
3421 }
3422
3423 if (start_bit != 0)
3424 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3425
3426 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3427 }
3428
3429 if (start_bit != 0)
3430 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3431
3432 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3433 }
3434
3435 if (subtarget != target)
3436 {
3437 /* If TARGET is still a SUBREG, then it must be wider than a word,
3438 so we must be careful only to set the subword we were asked to. */
3439 if (GET_CODE (target) == SUBREG)
3440 emit_move_insn (target, subtarget);
3441 else
3442 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3443 }
3444
3445 DONE;
3446 }"
3447 )
3448
3449 (define_insn "insv_zero"
3450 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3451 (match_operand:SI 1 "const_int_M_operand" "M")
3452 (match_operand:SI 2 "const_int_M_operand" "M"))
3453 (const_int 0))]
3454 "arm_arch_thumb2"
3455 "bfc%?\t%0, %2, %1"
3456 [(set_attr "length" "4")
3457 (set_attr "predicable" "yes")
3458 (set_attr "type" "bfm")]
3459 )
3460
3461 (define_insn "insv_t2"
3462 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3463 (match_operand:SI 1 "const_int_M_operand" "M")
3464 (match_operand:SI 2 "const_int_M_operand" "M"))
3465 (match_operand:SI 3 "s_register_operand" "r"))]
3466 "arm_arch_thumb2"
3467 "bfi%?\t%0, %3, %2, %1"
3468 [(set_attr "length" "4")
3469 (set_attr "predicable" "yes")
3470 (set_attr "type" "bfm")]
3471 )
3472
3473 (define_insn "andsi_notsi_si"
3474 [(set (match_operand:SI 0 "s_register_operand" "=r")
3475 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3476 (match_operand:SI 1 "s_register_operand" "r")))]
3477 "TARGET_32BIT"
3478 "bic%?\\t%0, %1, %2"
3479 [(set_attr "predicable" "yes")
3480 (set_attr "type" "logic_reg")]
3481 )
3482
3483 (define_insn "andsi_not_shiftsi_si"
3484 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3485 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3486 [(match_operand:SI 2 "s_register_operand" "r,r")
3487 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3488 (match_operand:SI 1 "s_register_operand" "r,r")))]
3489 "TARGET_32BIT"
3490 "bic%?\\t%0, %1, %2%S4"
3491 [(set_attr "predicable" "yes")
3492 (set_attr "shift" "2")
3493 (set_attr "arch" "32,a")
3494 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3495 )
3496
3497 ;; Shifted bics pattern used to set up CC status register and not reusing
3498 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3499 ;; does not support shift by register.
3500 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3501 [(set (reg:CC_NOOV CC_REGNUM)
3502 (compare:CC_NOOV
3503 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3504 [(match_operand:SI 1 "s_register_operand" "r,r")
3505 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3506 (match_operand:SI 3 "s_register_operand" "r,r"))
3507 (const_int 0)))
3508 (clobber (match_scratch:SI 4 "=r,r"))]
3509 "TARGET_32BIT"
3510 "bics%?\\t%4, %3, %1%S0"
3511 [(set_attr "predicable" "yes")
3512 (set_attr "arch" "32,a")
3513 (set_attr "conds" "set")
3514 (set_attr "shift" "1")
3515 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3516 )
3517
3518 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3519 ;; getting reused later.
3520 (define_insn "andsi_not_shiftsi_si_scc"
3521 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3522 (compare:CC_NOOV
3523 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3524 [(match_operand:SI 1 "s_register_operand" "r,r")
3525 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3526 (match_operand:SI 3 "s_register_operand" "r,r"))
3527 (const_int 0)))
3528 (set (match_operand:SI 4 "s_register_operand" "=r,r")
3529 (and:SI (not:SI (match_op_dup 0
3530 [(match_dup 1)
3531 (match_dup 2)]))
3532 (match_dup 3)))])]
3533 "TARGET_32BIT"
3534 "bics%?\\t%4, %3, %1%S0"
3535 [(set_attr "predicable" "yes")
3536 (set_attr "arch" "32,a")
3537 (set_attr "conds" "set")
3538 (set_attr "shift" "1")
3539 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3540 )
3541
3542 (define_insn "*andsi_notsi_si_compare0"
3543 [(set (reg:CC_NOOV CC_REGNUM)
3544 (compare:CC_NOOV
3545 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3546 (match_operand:SI 1 "s_register_operand" "r"))
3547 (const_int 0)))
3548 (set (match_operand:SI 0 "s_register_operand" "=r")
3549 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3550 "TARGET_32BIT"
3551 "bics\\t%0, %1, %2"
3552 [(set_attr "conds" "set")
3553 (set_attr "type" "logics_shift_reg")]
3554 )
3555
3556 (define_insn "*andsi_notsi_si_compare0_scratch"
3557 [(set (reg:CC_NOOV CC_REGNUM)
3558 (compare:CC_NOOV
3559 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3560 (match_operand:SI 1 "s_register_operand" "r"))
3561 (const_int 0)))
3562 (clobber (match_scratch:SI 0 "=r"))]
3563 "TARGET_32BIT"
3564 "bics\\t%0, %1, %2"
3565 [(set_attr "conds" "set")
3566 (set_attr "type" "logics_shift_reg")]
3567 )
3568
3569 (define_expand "iorsi3"
3570 [(set (match_operand:SI 0 "s_register_operand")
3571 (ior:SI (match_operand:SI 1 "s_register_operand")
3572 (match_operand:SI 2 "reg_or_int_operand")))]
3573 "TARGET_EITHER"
3574 "
3575 if (CONST_INT_P (operands[2]))
3576 {
3577 if (TARGET_32BIT)
3578 {
3579 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3580 operands[2] = force_reg (SImode, operands[2]);
3581 else
3582 {
3583 arm_split_constant (IOR, SImode, NULL_RTX,
3584 INTVAL (operands[2]), operands[0],
3585 operands[1],
3586 optimize && can_create_pseudo_p ());
3587 DONE;
3588 }
3589 }
3590 else /* TARGET_THUMB1 */
3591 {
3592 rtx tmp = force_reg (SImode, operands[2]);
3593 if (rtx_equal_p (operands[0], operands[1]))
3594 operands[2] = tmp;
3595 else
3596 {
3597 operands[2] = operands[1];
3598 operands[1] = tmp;
3599 }
3600 }
3601 }
3602 "
3603 )
3604
3605 (define_insn_and_split "*iorsi3_insn"
3606 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3607 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3608 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3609 "TARGET_32BIT"
3610 "@
3611 orr%?\\t%0, %1, %2
3612 orr%?\\t%0, %1, %2
3613 orn%?\\t%0, %1, #%B2
3614 orr%?\\t%0, %1, %2
3615 #"
3616 "TARGET_32BIT
3617 && CONST_INT_P (operands[2])
3618 && !(const_ok_for_arm (INTVAL (operands[2]))
3619 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3620 [(clobber (const_int 0))]
3621 {
3622 arm_split_constant (IOR, SImode, curr_insn,
3623 INTVAL (operands[2]), operands[0], operands[1], 0);
3624 DONE;
3625 }
3626 [(set_attr "length" "4,4,4,4,16")
3627 (set_attr "arch" "32,t2,t2,32,32")
3628 (set_attr "predicable" "yes")
3629 (set_attr "predicable_short_it" "no,yes,no,no,no")
3630 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3631 )
3632
3633 (define_peephole2
3634 [(match_scratch:SI 3 "r")
3635 (set (match_operand:SI 0 "arm_general_register_operand" "")
3636 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3637 (match_operand:SI 2 "const_int_operand" "")))]
3638 "TARGET_ARM
3639 && !const_ok_for_arm (INTVAL (operands[2]))
3640 && const_ok_for_arm (~INTVAL (operands[2]))"
3641 [(set (match_dup 3) (match_dup 2))
3642 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3643 ""
3644 )
3645
3646 (define_insn "*iorsi3_compare0"
3647 [(set (reg:CC_NOOV CC_REGNUM)
3648 (compare:CC_NOOV
3649 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3650 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3651 (const_int 0)))
3652 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3653 (ior:SI (match_dup 1) (match_dup 2)))]
3654 "TARGET_32BIT"
3655 "orrs%?\\t%0, %1, %2"
3656 [(set_attr "conds" "set")
3657 (set_attr "arch" "*,t2,*")
3658 (set_attr "length" "4,2,4")
3659 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3660 )
3661
3662 (define_insn "*iorsi3_compare0_scratch"
3663 [(set (reg:CC_NOOV CC_REGNUM)
3664 (compare:CC_NOOV
3665 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3666 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3667 (const_int 0)))
3668 (clobber (match_scratch:SI 0 "=r,l,r"))]
3669 "TARGET_32BIT"
3670 "orrs%?\\t%0, %1, %2"
3671 [(set_attr "conds" "set")
3672 (set_attr "arch" "*,t2,*")
3673 (set_attr "length" "4,2,4")
3674 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3675 )
3676
3677 (define_expand "xorsi3"
3678 [(set (match_operand:SI 0 "s_register_operand")
3679 (xor:SI (match_operand:SI 1 "s_register_operand")
3680 (match_operand:SI 2 "reg_or_int_operand")))]
3681 "TARGET_EITHER"
3682 "if (CONST_INT_P (operands[2]))
3683 {
3684 if (TARGET_32BIT)
3685 {
3686 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3687 operands[2] = force_reg (SImode, operands[2]);
3688 else
3689 {
3690 arm_split_constant (XOR, SImode, NULL_RTX,
3691 INTVAL (operands[2]), operands[0],
3692 operands[1],
3693 optimize && can_create_pseudo_p ());
3694 DONE;
3695 }
3696 }
3697 else /* TARGET_THUMB1 */
3698 {
3699 rtx tmp = force_reg (SImode, operands[2]);
3700 if (rtx_equal_p (operands[0], operands[1]))
3701 operands[2] = tmp;
3702 else
3703 {
3704 operands[2] = operands[1];
3705 operands[1] = tmp;
3706 }
3707 }
3708 }"
3709 )
3710
3711 (define_insn_and_split "*arm_xorsi3"
3712 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3713 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3714 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3715 "TARGET_32BIT"
3716 "@
3717 eor%?\\t%0, %1, %2
3718 eor%?\\t%0, %1, %2
3719 eor%?\\t%0, %1, %2
3720 #"
3721 "TARGET_32BIT
3722 && CONST_INT_P (operands[2])
3723 && !const_ok_for_arm (INTVAL (operands[2]))"
3724 [(clobber (const_int 0))]
3725 {
3726 arm_split_constant (XOR, SImode, curr_insn,
3727 INTVAL (operands[2]), operands[0], operands[1], 0);
3728 DONE;
3729 }
3730 [(set_attr "length" "4,4,4,16")
3731 (set_attr "predicable" "yes")
3732 (set_attr "predicable_short_it" "no,yes,no,no")
3733 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3734 )
3735
3736 (define_insn "*xorsi3_compare0"
3737 [(set (reg:CC_NOOV CC_REGNUM)
3738 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3739 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3740 (const_int 0)))
3741 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3742 (xor:SI (match_dup 1) (match_dup 2)))]
3743 "TARGET_32BIT"
3744 "eors%?\\t%0, %1, %2"
3745 [(set_attr "conds" "set")
3746 (set_attr "type" "logics_imm,logics_reg")]
3747 )
3748
3749 (define_insn "*xorsi3_compare0_scratch"
3750 [(set (reg:CC_NOOV CC_REGNUM)
3751 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3752 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3753 (const_int 0)))]
3754 "TARGET_32BIT"
3755 "teq%?\\t%0, %1"
3756 [(set_attr "conds" "set")
3757 (set_attr "type" "logics_imm,logics_reg")]
3758 )
3759
3760 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3761 ; (NOT D) we can sometimes merge the final NOT into one of the following
3762 ; insns.
3763
3764 (define_split
3765 [(set (match_operand:SI 0 "s_register_operand" "")
3766 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3767 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3768 (match_operand:SI 3 "arm_rhs_operand" "")))
3769 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3770 "TARGET_32BIT"
3771 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3772 (not:SI (match_dup 3))))
3773 (set (match_dup 0) (not:SI (match_dup 4)))]
3774 ""
3775 )
3776
3777 (define_insn_and_split "*andsi_iorsi3_notsi"
3778 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3779 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3780 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3781 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3782 "TARGET_32BIT"
3783 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3784 "&& reload_completed"
3785 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3786 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3787 {
3788 /* If operands[3] is a constant make sure to fold the NOT into it
3789 to avoid creating a NOT of a CONST_INT. */
3790 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3791 if (CONST_INT_P (not_rtx))
3792 {
3793 operands[4] = operands[0];
3794 operands[5] = not_rtx;
3795 }
3796 else
3797 {
3798 operands[5] = operands[0];
3799 operands[4] = not_rtx;
3800 }
3801 }
3802 [(set_attr "length" "8")
3803 (set_attr "ce_count" "2")
3804 (set_attr "predicable" "yes")
3805 (set_attr "type" "multiple")]
3806 )
3807
3808 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3809 ; insns are available?
3810 (define_split
3811 [(set (match_operand:SI 0 "s_register_operand" "")
3812 (match_operator:SI 1 "logical_binary_operator"
3813 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3814 (match_operand:SI 3 "const_int_operand" "")
3815 (match_operand:SI 4 "const_int_operand" ""))
3816 (match_operator:SI 9 "logical_binary_operator"
3817 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3818 (match_operand:SI 6 "const_int_operand" ""))
3819 (match_operand:SI 7 "s_register_operand" "")])]))
3820 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3821 "TARGET_32BIT
3822 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3823 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3824 [(set (match_dup 8)
3825 (match_op_dup 1
3826 [(ashift:SI (match_dup 2) (match_dup 4))
3827 (match_dup 5)]))
3828 (set (match_dup 0)
3829 (match_op_dup 1
3830 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3831 (match_dup 7)]))]
3832 "
3833 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3834 ")
3835
3836 (define_split
3837 [(set (match_operand:SI 0 "s_register_operand" "")
3838 (match_operator:SI 1 "logical_binary_operator"
3839 [(match_operator:SI 9 "logical_binary_operator"
3840 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3841 (match_operand:SI 6 "const_int_operand" ""))
3842 (match_operand:SI 7 "s_register_operand" "")])
3843 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3844 (match_operand:SI 3 "const_int_operand" "")
3845 (match_operand:SI 4 "const_int_operand" ""))]))
3846 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3847 "TARGET_32BIT
3848 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3849 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3850 [(set (match_dup 8)
3851 (match_op_dup 1
3852 [(ashift:SI (match_dup 2) (match_dup 4))
3853 (match_dup 5)]))
3854 (set (match_dup 0)
3855 (match_op_dup 1
3856 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3857 (match_dup 7)]))]
3858 "
3859 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3860 ")
3861
3862 (define_split
3863 [(set (match_operand:SI 0 "s_register_operand" "")
3864 (match_operator:SI 1 "logical_binary_operator"
3865 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3866 (match_operand:SI 3 "const_int_operand" "")
3867 (match_operand:SI 4 "const_int_operand" ""))
3868 (match_operator:SI 9 "logical_binary_operator"
3869 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3870 (match_operand:SI 6 "const_int_operand" ""))
3871 (match_operand:SI 7 "s_register_operand" "")])]))
3872 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3873 "TARGET_32BIT
3874 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3875 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3876 [(set (match_dup 8)
3877 (match_op_dup 1
3878 [(ashift:SI (match_dup 2) (match_dup 4))
3879 (match_dup 5)]))
3880 (set (match_dup 0)
3881 (match_op_dup 1
3882 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3883 (match_dup 7)]))]
3884 "
3885 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3886 ")
3887
3888 (define_split
3889 [(set (match_operand:SI 0 "s_register_operand" "")
3890 (match_operator:SI 1 "logical_binary_operator"
3891 [(match_operator:SI 9 "logical_binary_operator"
3892 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3893 (match_operand:SI 6 "const_int_operand" ""))
3894 (match_operand:SI 7 "s_register_operand" "")])
3895 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3896 (match_operand:SI 3 "const_int_operand" "")
3897 (match_operand:SI 4 "const_int_operand" ""))]))
3898 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3899 "TARGET_32BIT
3900 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3901 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3902 [(set (match_dup 8)
3903 (match_op_dup 1
3904 [(ashift:SI (match_dup 2) (match_dup 4))
3905 (match_dup 5)]))
3906 (set (match_dup 0)
3907 (match_op_dup 1
3908 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3909 (match_dup 7)]))]
3910 "
3911 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3912 ")
3913 \f
3914
3915 ;; Minimum and maximum insns
3916
3917 (define_expand "smaxsi3"
3918 [(parallel [
3919 (set (match_operand:SI 0 "s_register_operand")
3920 (smax:SI (match_operand:SI 1 "s_register_operand")
3921 (match_operand:SI 2 "arm_rhs_operand")))
3922 (clobber (reg:CC CC_REGNUM))])]
3923 "TARGET_32BIT"
3924 "
3925 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3926 {
3927 /* No need for a clobber of the condition code register here. */
3928 emit_insn (gen_rtx_SET (operands[0],
3929 gen_rtx_SMAX (SImode, operands[1],
3930 operands[2])));
3931 DONE;
3932 }
3933 ")
3934
3935 (define_insn "*smax_0"
3936 [(set (match_operand:SI 0 "s_register_operand" "=r")
3937 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3938 (const_int 0)))]
3939 "TARGET_32BIT"
3940 "bic%?\\t%0, %1, %1, asr #31"
3941 [(set_attr "predicable" "yes")
3942 (set_attr "type" "logic_shift_reg")]
3943 )
3944
3945 (define_insn "*smax_m1"
3946 [(set (match_operand:SI 0 "s_register_operand" "=r")
3947 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3948 (const_int -1)))]
3949 "TARGET_32BIT"
3950 "orr%?\\t%0, %1, %1, asr #31"
3951 [(set_attr "predicable" "yes")
3952 (set_attr "type" "logic_shift_reg")]
3953 )
3954
3955 (define_insn_and_split "*arm_smax_insn"
3956 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3957 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3958 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3959 (clobber (reg:CC CC_REGNUM))]
3960 "TARGET_ARM"
3961 "#"
3962 ; cmp\\t%1, %2\;movlt\\t%0, %2
3963 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3964 "TARGET_ARM"
3965 [(set (reg:CC CC_REGNUM)
3966 (compare:CC (match_dup 1) (match_dup 2)))
3967 (set (match_dup 0)
3968 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3969 (match_dup 1)
3970 (match_dup 2)))]
3971 ""
3972 [(set_attr "conds" "clob")
3973 (set_attr "length" "8,12")
3974 (set_attr "type" "multiple")]
3975 )
3976
3977 (define_expand "sminsi3"
3978 [(parallel [
3979 (set (match_operand:SI 0 "s_register_operand")
3980 (smin:SI (match_operand:SI 1 "s_register_operand")
3981 (match_operand:SI 2 "arm_rhs_operand")))
3982 (clobber (reg:CC CC_REGNUM))])]
3983 "TARGET_32BIT"
3984 "
3985 if (operands[2] == const0_rtx)
3986 {
3987 /* No need for a clobber of the condition code register here. */
3988 emit_insn (gen_rtx_SET (operands[0],
3989 gen_rtx_SMIN (SImode, operands[1],
3990 operands[2])));
3991 DONE;
3992 }
3993 ")
3994
3995 (define_insn "*smin_0"
3996 [(set (match_operand:SI 0 "s_register_operand" "=r")
3997 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3998 (const_int 0)))]
3999 "TARGET_32BIT"
4000 "and%?\\t%0, %1, %1, asr #31"
4001 [(set_attr "predicable" "yes")
4002 (set_attr "type" "logic_shift_reg")]
4003 )
4004
4005 (define_insn_and_split "*arm_smin_insn"
4006 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4007 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4008 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4009 (clobber (reg:CC CC_REGNUM))]
4010 "TARGET_ARM"
4011 "#"
4012 ; cmp\\t%1, %2\;movge\\t%0, %2
4013 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
4014 "TARGET_ARM"
4015 [(set (reg:CC CC_REGNUM)
4016 (compare:CC (match_dup 1) (match_dup 2)))
4017 (set (match_dup 0)
4018 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
4019 (match_dup 1)
4020 (match_dup 2)))]
4021 ""
4022 [(set_attr "conds" "clob")
4023 (set_attr "length" "8,12")
4024 (set_attr "type" "multiple,multiple")]
4025 )
4026
4027 (define_expand "umaxsi3"
4028 [(parallel [
4029 (set (match_operand:SI 0 "s_register_operand")
4030 (umax:SI (match_operand:SI 1 "s_register_operand")
4031 (match_operand:SI 2 "arm_rhs_operand")))
4032 (clobber (reg:CC CC_REGNUM))])]
4033 "TARGET_32BIT"
4034 ""
4035 )
4036
4037 (define_insn_and_split "*arm_umaxsi3"
4038 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4039 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4040 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4041 (clobber (reg:CC CC_REGNUM))]
4042 "TARGET_ARM"
4043 "#"
4044 ; cmp\\t%1, %2\;movcc\\t%0, %2
4045 ; cmp\\t%1, %2\;movcs\\t%0, %1
4046 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
4047 "TARGET_ARM"
4048 [(set (reg:CC CC_REGNUM)
4049 (compare:CC (match_dup 1) (match_dup 2)))
4050 (set (match_dup 0)
4051 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
4052 (match_dup 1)
4053 (match_dup 2)))]
4054 ""
4055 [(set_attr "conds" "clob")
4056 (set_attr "length" "8,8,12")
4057 (set_attr "type" "store_4")]
4058 )
4059
4060 (define_expand "uminsi3"
4061 [(parallel [
4062 (set (match_operand:SI 0 "s_register_operand")
4063 (umin:SI (match_operand:SI 1 "s_register_operand")
4064 (match_operand:SI 2 "arm_rhs_operand")))
4065 (clobber (reg:CC CC_REGNUM))])]
4066 "TARGET_32BIT"
4067 ""
4068 )
4069
4070 (define_insn_and_split "*arm_uminsi3"
4071 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4072 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4073 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4074 (clobber (reg:CC CC_REGNUM))]
4075 "TARGET_ARM"
4076 "#"
4077 ; cmp\\t%1, %2\;movcs\\t%0, %2
4078 ; cmp\\t%1, %2\;movcc\\t%0, %1
4079 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
4080 "TARGET_ARM"
4081 [(set (reg:CC CC_REGNUM)
4082 (compare:CC (match_dup 1) (match_dup 2)))
4083 (set (match_dup 0)
4084 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
4085 (match_dup 1)
4086 (match_dup 2)))]
4087 ""
4088 [(set_attr "conds" "clob")
4089 (set_attr "length" "8,8,12")
4090 (set_attr "type" "store_4")]
4091 )
4092
4093 (define_insn "*store_minmaxsi"
4094 [(set (match_operand:SI 0 "memory_operand" "=m")
4095 (match_operator:SI 3 "minmax_operator"
4096 [(match_operand:SI 1 "s_register_operand" "r")
4097 (match_operand:SI 2 "s_register_operand" "r")]))
4098 (clobber (reg:CC CC_REGNUM))]
4099 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
4100 "*
4101 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
4102 operands[1], operands[2]);
4103 output_asm_insn (\"cmp\\t%1, %2\", operands);
4104 if (TARGET_THUMB2)
4105 output_asm_insn (\"ite\t%d3\", operands);
4106 output_asm_insn (\"str%d3\\t%1, %0\", operands);
4107 output_asm_insn (\"str%D3\\t%2, %0\", operands);
4108 return \"\";
4109 "
4110 [(set_attr "conds" "clob")
4111 (set (attr "length")
4112 (if_then_else (eq_attr "is_thumb" "yes")
4113 (const_int 14)
4114 (const_int 12)))
4115 (set_attr "type" "store_4")]
4116 )
4117
4118 ; Reject the frame pointer in operand[1], since reloading this after
4119 ; it has been eliminated can cause carnage.
4120 (define_insn "*minmax_arithsi"
4121 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4122 (match_operator:SI 4 "shiftable_operator"
4123 [(match_operator:SI 5 "minmax_operator"
4124 [(match_operand:SI 2 "s_register_operand" "r,r")
4125 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
4126 (match_operand:SI 1 "s_register_operand" "0,?r")]))
4127 (clobber (reg:CC CC_REGNUM))]
4128 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
4129 "*
4130 {
4131 enum rtx_code code = GET_CODE (operands[4]);
4132 bool need_else;
4133
4134 if (which_alternative != 0 || operands[3] != const0_rtx
4135 || (code != PLUS && code != IOR && code != XOR))
4136 need_else = true;
4137 else
4138 need_else = false;
4139
4140 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4141 operands[2], operands[3]);
4142 output_asm_insn (\"cmp\\t%2, %3\", operands);
4143 if (TARGET_THUMB2)
4144 {
4145 if (need_else)
4146 output_asm_insn (\"ite\\t%d5\", operands);
4147 else
4148 output_asm_insn (\"it\\t%d5\", operands);
4149 }
4150 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4151 if (need_else)
4152 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4153 return \"\";
4154 }"
4155 [(set_attr "conds" "clob")
4156 (set (attr "length")
4157 (if_then_else (eq_attr "is_thumb" "yes")
4158 (const_int 14)
4159 (const_int 12)))
4160 (set_attr "type" "multiple")]
4161 )
4162
4163 ; Reject the frame pointer in operand[1], since reloading this after
4164 ; it has been eliminated can cause carnage.
4165 (define_insn_and_split "*minmax_arithsi_non_canon"
4166 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4167 (minus:SI
4168 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4169 (match_operator:SI 4 "minmax_operator"
4170 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4171 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4172 (clobber (reg:CC CC_REGNUM))]
4173 "TARGET_32BIT && !arm_eliminable_register (operands[1])
4174 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4175 "#"
4176 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4177 [(set (reg:CC CC_REGNUM)
4178 (compare:CC (match_dup 2) (match_dup 3)))
4179
4180 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4181 (set (match_dup 0)
4182 (minus:SI (match_dup 1)
4183 (match_dup 2))))
4184 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4185 (set (match_dup 0)
4186 (match_dup 6)))]
4187 {
4188 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4189 operands[2], operands[3]);
4190 enum rtx_code rc = minmax_code (operands[4]);
4191 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4192 operands[2], operands[3]);
4193
4194 if (mode == CCFPmode || mode == CCFPEmode)
4195 rc = reverse_condition_maybe_unordered (rc);
4196 else
4197 rc = reverse_condition (rc);
4198 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4199 if (CONST_INT_P (operands[3]))
4200 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4201 else
4202 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4203 }
4204 [(set_attr "conds" "clob")
4205 (set (attr "length")
4206 (if_then_else (eq_attr "is_thumb" "yes")
4207 (const_int 14)
4208 (const_int 12)))
4209 (set_attr "type" "multiple")]
4210 )
4211
4212
4213 (define_expand "arm_<ss_op>"
4214 [(set (match_operand:SI 0 "s_register_operand")
4215 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
4216 (match_operand:SI 2 "s_register_operand")))]
4217 "TARGET_DSP_MULTIPLY"
4218 {
4219 if (ARM_Q_BIT_READ)
4220 emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
4221 operands[1], operands[2]));
4222 else
4223 emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
4224 DONE;
4225 }
4226 )
4227
4228 (define_insn "arm_<ss_op><add_clobber_q_name>_insn"
4229 [(set (match_operand:SI 0 "s_register_operand" "=r")
4230 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
4231 (match_operand:SI 2 "s_register_operand" "r")))]
4232 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
4233 "<ss_op>%?\t%0, %1, %2"
4234 [(set_attr "predicable" "yes")
4235 (set_attr "type" "alu_dsp_reg")]
4236 )
4237
4238 (define_code_iterator SAT [smin smax])
4239 (define_code_attr SATrev [(smin "smax") (smax "smin")])
4240 (define_code_attr SATlo [(smin "1") (smax "2")])
4241 (define_code_attr SAThi [(smin "2") (smax "1")])
4242
4243 (define_expand "arm_ssat"
4244 [(match_operand:SI 0 "s_register_operand")
4245 (match_operand:SI 1 "s_register_operand")
4246 (match_operand:SI 2 "const_int_operand")]
4247 "TARGET_32BIT && arm_arch6"
4248 {
4249 HOST_WIDE_INT val = INTVAL (operands[2]);
4250 /* The builtin checking code should have ensured the right
4251 range for the immediate. */
4252 gcc_assert (IN_RANGE (val, 1, 32));
4253 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
4254 HOST_WIDE_INT lower_bound = -upper_bound - 1;
4255 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4256 rtx lo_rtx = gen_int_mode (lower_bound, SImode);
4257 if (ARM_Q_BIT_READ)
4258 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
4259 up_rtx, operands[1]));
4260 else
4261 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4262 DONE;
4263 }
4264 )
4265
4266 (define_expand "arm_usat"
4267 [(match_operand:SI 0 "s_register_operand")
4268 (match_operand:SI 1 "s_register_operand")
4269 (match_operand:SI 2 "const_int_operand")]
4270 "TARGET_32BIT && arm_arch6"
4271 {
4272 HOST_WIDE_INT val = INTVAL (operands[2]);
4273 /* The builtin checking code should have ensured the right
4274 range for the immediate. */
4275 gcc_assert (IN_RANGE (val, 0, 31));
4276 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
4277 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4278 rtx lo_rtx = CONST0_RTX (SImode);
4279 if (ARM_Q_BIT_READ)
4280 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
4281 operands[1]));
4282 else
4283 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4284 DONE;
4285 }
4286 )
4287
4288 (define_insn "arm_get_apsr"
4289 [(set (match_operand:SI 0 "s_register_operand" "=r")
4290 (unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
4291 "TARGET_ARM_QBIT"
4292 "mrs%?\t%0, APSR"
4293 [(set_attr "predicable" "yes")
4294 (set_attr "conds" "use")]
4295 )
4296
4297 (define_insn "arm_set_apsr"
4298 [(set (reg:CC APSRQ_REGNUM)
4299 (unspec_volatile:CC
4300 [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
4301 "TARGET_ARM_QBIT"
4302 "msr%?\tAPSR_nzcvq, %0"
4303 [(set_attr "predicable" "yes")
4304 (set_attr "conds" "set")]
4305 )
4306
4307 ;; Read the APSR and extract the Q bit (bit 27)
4308 (define_expand "arm_saturation_occurred"
4309 [(match_operand:SI 0 "s_register_operand")]
4310 "TARGET_ARM_QBIT"
4311 {
4312 rtx apsr = gen_reg_rtx (SImode);
4313 emit_insn (gen_arm_get_apsr (apsr));
4314 emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
4315 gen_int_mode (27, SImode)));
4316 DONE;
4317 }
4318 )
4319
4320 ;; Read the APSR and set the Q bit (bit position 27) according to operand 0
4321 (define_expand "arm_set_saturation"
4322 [(match_operand:SI 0 "reg_or_int_operand")]
4323 "TARGET_ARM_QBIT"
4324 {
4325 rtx apsr = gen_reg_rtx (SImode);
4326 emit_insn (gen_arm_get_apsr (apsr));
4327 rtx to_insert = gen_reg_rtx (SImode);
4328 if (CONST_INT_P (operands[0]))
4329 emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
4330 ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
4331 else
4332 {
4333 rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
4334 emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
4335 CONST0_RTX (SImode)));
4336 }
4337 emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
4338 gen_int_mode (27, SImode), to_insert));
4339 emit_insn (gen_arm_set_apsr (apsr));
4340 DONE;
4341 }
4342 )
4343
4344 (define_insn "satsi_<SAT:code><add_clobber_q_name>"
4345 [(set (match_operand:SI 0 "s_register_operand" "=r")
4346 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4347 (match_operand:SI 1 "const_int_operand" "i"))
4348 (match_operand:SI 2 "const_int_operand" "i")))]
4349 "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
4350 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4351 {
4352 int mask;
4353 bool signed_sat;
4354 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4355 &mask, &signed_sat))
4356 gcc_unreachable ();
4357
4358 operands[1] = GEN_INT (mask);
4359 if (signed_sat)
4360 return "ssat%?\t%0, %1, %3";
4361 else
4362 return "usat%?\t%0, %1, %3";
4363 }
4364 [(set_attr "predicable" "yes")
4365 (set_attr "type" "alus_imm")]
4366 )
4367
4368 (define_insn "*satsi_<SAT:code>_shift"
4369 [(set (match_operand:SI 0 "s_register_operand" "=r")
4370 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4371 [(match_operand:SI 4 "s_register_operand" "r")
4372 (match_operand:SI 5 "const_int_operand" "i")])
4373 (match_operand:SI 1 "const_int_operand" "i"))
4374 (match_operand:SI 2 "const_int_operand" "i")))]
4375 "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
4376 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4377 {
4378 int mask;
4379 bool signed_sat;
4380 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4381 &mask, &signed_sat))
4382 gcc_unreachable ();
4383
4384 operands[1] = GEN_INT (mask);
4385 if (signed_sat)
4386 return "ssat%?\t%0, %1, %4%S3";
4387 else
4388 return "usat%?\t%0, %1, %4%S3";
4389 }
4390 [(set_attr "predicable" "yes")
4391 (set_attr "shift" "3")
4392 (set_attr "type" "logic_shift_reg")])
4393 \f
4394 ;; Shift and rotation insns
4395
4396 (define_expand "ashldi3"
4397 [(set (match_operand:DI 0 "s_register_operand")
4398 (ashift:DI (match_operand:DI 1 "s_register_operand")
4399 (match_operand:SI 2 "reg_or_int_operand")))]
4400 "TARGET_32BIT"
4401 "
4402 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4403 operands[2], gen_reg_rtx (SImode),
4404 gen_reg_rtx (SImode));
4405 DONE;
4406 ")
4407
4408 (define_expand "ashlsi3"
4409 [(set (match_operand:SI 0 "s_register_operand")
4410 (ashift:SI (match_operand:SI 1 "s_register_operand")
4411 (match_operand:SI 2 "arm_rhs_operand")))]
4412 "TARGET_EITHER"
4413 "
4414 if (CONST_INT_P (operands[2])
4415 && (UINTVAL (operands[2])) > 31)
4416 {
4417 emit_insn (gen_movsi (operands[0], const0_rtx));
4418 DONE;
4419 }
4420 "
4421 )
4422
4423 (define_expand "ashrdi3"
4424 [(set (match_operand:DI 0 "s_register_operand")
4425 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4426 (match_operand:SI 2 "reg_or_int_operand")))]
4427 "TARGET_32BIT"
4428 "
4429 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4430 operands[2], gen_reg_rtx (SImode),
4431 gen_reg_rtx (SImode));
4432 DONE;
4433 ")
4434
4435 (define_expand "ashrsi3"
4436 [(set (match_operand:SI 0 "s_register_operand")
4437 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4438 (match_operand:SI 2 "arm_rhs_operand")))]
4439 "TARGET_EITHER"
4440 "
4441 if (CONST_INT_P (operands[2])
4442 && UINTVAL (operands[2]) > 31)
4443 operands[2] = GEN_INT (31);
4444 "
4445 )
4446
4447 (define_expand "lshrdi3"
4448 [(set (match_operand:DI 0 "s_register_operand")
4449 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4450 (match_operand:SI 2 "reg_or_int_operand")))]
4451 "TARGET_32BIT"
4452 "
4453 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4454 operands[2], gen_reg_rtx (SImode),
4455 gen_reg_rtx (SImode));
4456 DONE;
4457 ")
4458
4459 (define_expand "lshrsi3"
4460 [(set (match_operand:SI 0 "s_register_operand")
4461 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4462 (match_operand:SI 2 "arm_rhs_operand")))]
4463 "TARGET_EITHER"
4464 "
4465 if (CONST_INT_P (operands[2])
4466 && (UINTVAL (operands[2])) > 31)
4467 {
4468 emit_insn (gen_movsi (operands[0], const0_rtx));
4469 DONE;
4470 }
4471 "
4472 )
4473
4474 (define_expand "rotlsi3"
4475 [(set (match_operand:SI 0 "s_register_operand")
4476 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4477 (match_operand:SI 2 "reg_or_int_operand")))]
4478 "TARGET_32BIT"
4479 "
4480 if (CONST_INT_P (operands[2]))
4481 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4482 else
4483 {
4484 rtx reg = gen_reg_rtx (SImode);
4485 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4486 operands[2] = reg;
4487 }
4488 "
4489 )
4490
4491 (define_expand "rotrsi3"
4492 [(set (match_operand:SI 0 "s_register_operand")
4493 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4494 (match_operand:SI 2 "arm_rhs_operand")))]
4495 "TARGET_EITHER"
4496 "
4497 if (TARGET_32BIT)
4498 {
4499 if (CONST_INT_P (operands[2])
4500 && UINTVAL (operands[2]) > 31)
4501 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4502 }
4503 else /* TARGET_THUMB1 */
4504 {
4505 if (CONST_INT_P (operands [2]))
4506 operands [2] = force_reg (SImode, operands[2]);
4507 }
4508 "
4509 )
4510
4511 (define_insn "*arm_shiftsi3"
4512 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4513 (match_operator:SI 3 "shift_operator"
4514 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4515 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4516 "TARGET_32BIT"
4517 "* return arm_output_shift(operands, 0);"
4518 [(set_attr "predicable" "yes")
4519 (set_attr "arch" "t2,t2,*,*")
4520 (set_attr "predicable_short_it" "yes,yes,no,no")
4521 (set_attr "length" "4")
4522 (set_attr "shift" "1")
4523 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
4524 )
4525
4526 (define_insn "*shiftsi3_compare0"
4527 [(set (reg:CC_NOOV CC_REGNUM)
4528 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4529 [(match_operand:SI 1 "s_register_operand" "r,r")
4530 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4531 (const_int 0)))
4532 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4533 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4534 "TARGET_32BIT"
4535 "* return arm_output_shift(operands, 1);"
4536 [(set_attr "conds" "set")
4537 (set_attr "shift" "1")
4538 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4539 )
4540
4541 (define_insn "*shiftsi3_compare0_scratch"
4542 [(set (reg:CC_NOOV CC_REGNUM)
4543 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4544 [(match_operand:SI 1 "s_register_operand" "r,r")
4545 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4546 (const_int 0)))
4547 (clobber (match_scratch:SI 0 "=r,r"))]
4548 "TARGET_32BIT"
4549 "* return arm_output_shift(operands, 1);"
4550 [(set_attr "conds" "set")
4551 (set_attr "shift" "1")
4552 (set_attr "type" "shift_imm,shift_reg")]
4553 )
4554
4555 (define_insn "*not_shiftsi"
4556 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4557 (not:SI (match_operator:SI 3 "shift_operator"
4558 [(match_operand:SI 1 "s_register_operand" "r,r")
4559 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4560 "TARGET_32BIT"
4561 "mvn%?\\t%0, %1%S3"
4562 [(set_attr "predicable" "yes")
4563 (set_attr "shift" "1")
4564 (set_attr "arch" "32,a")
4565 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4566
4567 (define_insn "*not_shiftsi_compare0"
4568 [(set (reg:CC_NOOV CC_REGNUM)
4569 (compare:CC_NOOV
4570 (not:SI (match_operator:SI 3 "shift_operator"
4571 [(match_operand:SI 1 "s_register_operand" "r,r")
4572 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4573 (const_int 0)))
4574 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4575 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4576 "TARGET_32BIT"
4577 "mvns%?\\t%0, %1%S3"
4578 [(set_attr "conds" "set")
4579 (set_attr "shift" "1")
4580 (set_attr "arch" "32,a")
4581 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4582
4583 (define_insn "*not_shiftsi_compare0_scratch"
4584 [(set (reg:CC_NOOV CC_REGNUM)
4585 (compare:CC_NOOV
4586 (not:SI (match_operator:SI 3 "shift_operator"
4587 [(match_operand:SI 1 "s_register_operand" "r,r")
4588 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4589 (const_int 0)))
4590 (clobber (match_scratch:SI 0 "=r,r"))]
4591 "TARGET_32BIT"
4592 "mvns%?\\t%0, %1%S3"
4593 [(set_attr "conds" "set")
4594 (set_attr "shift" "1")
4595 (set_attr "arch" "32,a")
4596 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4597
4598 ;; We don't really have extzv, but defining this using shifts helps
4599 ;; to reduce register pressure later on.
4600
4601 (define_expand "extzv"
4602 [(set (match_operand 0 "s_register_operand")
4603 (zero_extract (match_operand 1 "nonimmediate_operand")
4604 (match_operand 2 "const_int_operand")
4605 (match_operand 3 "const_int_operand")))]
4606 "TARGET_THUMB1 || arm_arch_thumb2"
4607 "
4608 {
4609 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4610 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4611
4612 if (arm_arch_thumb2)
4613 {
4614 HOST_WIDE_INT width = INTVAL (operands[2]);
4615 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4616
4617 if (unaligned_access && MEM_P (operands[1])
4618 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4619 {
4620 rtx base_addr;
4621
4622 if (BYTES_BIG_ENDIAN)
4623 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4624 - bitpos;
4625
4626 if (width == 32)
4627 {
4628 base_addr = adjust_address (operands[1], SImode,
4629 bitpos / BITS_PER_UNIT);
4630 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4631 }
4632 else
4633 {
4634 rtx dest = operands[0];
4635 rtx tmp = gen_reg_rtx (SImode);
4636
4637 /* We may get a paradoxical subreg here. Strip it off. */
4638 if (GET_CODE (dest) == SUBREG
4639 && GET_MODE (dest) == SImode
4640 && GET_MODE (SUBREG_REG (dest)) == HImode)
4641 dest = SUBREG_REG (dest);
4642
4643 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4644 FAIL;
4645
4646 base_addr = adjust_address (operands[1], HImode,
4647 bitpos / BITS_PER_UNIT);
4648 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4649 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4650 }
4651 DONE;
4652 }
4653 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4654 {
4655 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4656 operands[3]));
4657 DONE;
4658 }
4659 else
4660 FAIL;
4661 }
4662
4663 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4664 FAIL;
4665
4666 operands[3] = GEN_INT (rshift);
4667
4668 if (lshift == 0)
4669 {
4670 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4671 DONE;
4672 }
4673
4674 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4675 operands[3], gen_reg_rtx (SImode)));
4676 DONE;
4677 }"
4678 )
4679
4680 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4681
4682 (define_expand "extzv_t1"
4683 [(set (match_operand:SI 4 "s_register_operand")
4684 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4685 (match_operand:SI 2 "const_int_operand")))
4686 (set (match_operand:SI 0 "s_register_operand")
4687 (lshiftrt:SI (match_dup 4)
4688 (match_operand:SI 3 "const_int_operand")))]
4689 "TARGET_THUMB1"
4690 "")
4691
4692 (define_expand "extv"
4693 [(set (match_operand 0 "s_register_operand")
4694 (sign_extract (match_operand 1 "nonimmediate_operand")
4695 (match_operand 2 "const_int_operand")
4696 (match_operand 3 "const_int_operand")))]
4697 "arm_arch_thumb2"
4698 {
4699 HOST_WIDE_INT width = INTVAL (operands[2]);
4700 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4701
4702 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4703 && (bitpos % BITS_PER_UNIT) == 0)
4704 {
4705 rtx base_addr;
4706
4707 if (BYTES_BIG_ENDIAN)
4708 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4709
4710 if (width == 32)
4711 {
4712 base_addr = adjust_address (operands[1], SImode,
4713 bitpos / BITS_PER_UNIT);
4714 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4715 }
4716 else
4717 {
4718 rtx dest = operands[0];
4719 rtx tmp = gen_reg_rtx (SImode);
4720
4721 /* We may get a paradoxical subreg here. Strip it off. */
4722 if (GET_CODE (dest) == SUBREG
4723 && GET_MODE (dest) == SImode
4724 && GET_MODE (SUBREG_REG (dest)) == HImode)
4725 dest = SUBREG_REG (dest);
4726
4727 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4728 FAIL;
4729
4730 base_addr = adjust_address (operands[1], HImode,
4731 bitpos / BITS_PER_UNIT);
4732 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4733 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4734 }
4735
4736 DONE;
4737 }
4738 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4739 FAIL;
4740 else if (GET_MODE (operands[0]) == SImode
4741 && GET_MODE (operands[1]) == SImode)
4742 {
4743 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4744 operands[3]));
4745 DONE;
4746 }
4747
4748 FAIL;
4749 })
4750
4751 ; Helper to expand register forms of extv with the proper modes.
4752
4753 (define_expand "extv_regsi"
4754 [(set (match_operand:SI 0 "s_register_operand")
4755 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4756 (match_operand 2 "const_int_operand")
4757 (match_operand 3 "const_int_operand")))]
4758 ""
4759 {
4760 })
4761
4762 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4763
4764 (define_insn "unaligned_loaddi"
4765 [(set (match_operand:DI 0 "s_register_operand" "=r")
4766 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4767 UNSPEC_UNALIGNED_LOAD))]
4768 "TARGET_32BIT && TARGET_LDRD"
4769 "*
4770 return output_move_double (operands, true, NULL);
4771 "
4772 [(set_attr "length" "8")
4773 (set_attr "type" "load_8")])
4774
4775 (define_insn "unaligned_loadsi"
4776 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4777 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4778 UNSPEC_UNALIGNED_LOAD))]
4779 "unaligned_access"
4780 "@
4781 ldr\t%0, %1\t@ unaligned
4782 ldr%?\t%0, %1\t@ unaligned
4783 ldr%?\t%0, %1\t@ unaligned"
4784 [(set_attr "arch" "t1,t2,32")
4785 (set_attr "length" "2,2,4")
4786 (set_attr "predicable" "no,yes,yes")
4787 (set_attr "predicable_short_it" "no,yes,no")
4788 (set_attr "type" "load_4")])
4789
4790 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4791 ;; address (there's no immediate format). That's tricky to support
4792 ;; here and we don't really need this pattern for that case, so only
4793 ;; enable for 32-bit ISAs.
4794 (define_insn "unaligned_loadhis"
4795 [(set (match_operand:SI 0 "s_register_operand" "=r")
4796 (sign_extend:SI
4797 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4798 UNSPEC_UNALIGNED_LOAD)))]
4799 "unaligned_access && TARGET_32BIT"
4800 "ldrsh%?\t%0, %1\t@ unaligned"
4801 [(set_attr "predicable" "yes")
4802 (set_attr "type" "load_byte")])
4803
4804 (define_insn "unaligned_loadhiu"
4805 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4806 (zero_extend:SI
4807 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4808 UNSPEC_UNALIGNED_LOAD)))]
4809 "unaligned_access"
4810 "@
4811 ldrh\t%0, %1\t@ unaligned
4812 ldrh%?\t%0, %1\t@ unaligned
4813 ldrh%?\t%0, %1\t@ unaligned"
4814 [(set_attr "arch" "t1,t2,32")
4815 (set_attr "length" "2,2,4")
4816 (set_attr "predicable" "no,yes,yes")
4817 (set_attr "predicable_short_it" "no,yes,no")
4818 (set_attr "type" "load_byte")])
4819
4820 (define_insn "unaligned_storedi"
4821 [(set (match_operand:DI 0 "memory_operand" "=m")
4822 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4823 UNSPEC_UNALIGNED_STORE))]
4824 "TARGET_32BIT && TARGET_LDRD"
4825 "*
4826 return output_move_double (operands, true, NULL);
4827 "
4828 [(set_attr "length" "8")
4829 (set_attr "type" "store_8")])
4830
4831 (define_insn "unaligned_storesi"
4832 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4833 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4834 UNSPEC_UNALIGNED_STORE))]
4835 "unaligned_access"
4836 "@
4837 str\t%1, %0\t@ unaligned
4838 str%?\t%1, %0\t@ unaligned
4839 str%?\t%1, %0\t@ unaligned"
4840 [(set_attr "arch" "t1,t2,32")
4841 (set_attr "length" "2,2,4")
4842 (set_attr "predicable" "no,yes,yes")
4843 (set_attr "predicable_short_it" "no,yes,no")
4844 (set_attr "type" "store_4")])
4845
4846 (define_insn "unaligned_storehi"
4847 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4848 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4849 UNSPEC_UNALIGNED_STORE))]
4850 "unaligned_access"
4851 "@
4852 strh\t%1, %0\t@ unaligned
4853 strh%?\t%1, %0\t@ unaligned
4854 strh%?\t%1, %0\t@ unaligned"
4855 [(set_attr "arch" "t1,t2,32")
4856 (set_attr "length" "2,2,4")
4857 (set_attr "predicable" "no,yes,yes")
4858 (set_attr "predicable_short_it" "no,yes,no")
4859 (set_attr "type" "store_4")])
4860
4861
4862 (define_insn "*extv_reg"
4863 [(set (match_operand:SI 0 "s_register_operand" "=r")
4864 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4865 (match_operand:SI 2 "const_int_operand" "n")
4866 (match_operand:SI 3 "const_int_operand" "n")))]
4867 "arm_arch_thumb2
4868 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4869 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4870 "sbfx%?\t%0, %1, %3, %2"
4871 [(set_attr "length" "4")
4872 (set_attr "predicable" "yes")
4873 (set_attr "type" "bfm")]
4874 )
4875
4876 (define_insn "extzv_t2"
4877 [(set (match_operand:SI 0 "s_register_operand" "=r")
4878 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4879 (match_operand:SI 2 "const_int_operand" "n")
4880 (match_operand:SI 3 "const_int_operand" "n")))]
4881 "arm_arch_thumb2
4882 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4883 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4884 "ubfx%?\t%0, %1, %3, %2"
4885 [(set_attr "length" "4")
4886 (set_attr "predicable" "yes")
4887 (set_attr "type" "bfm")]
4888 )
4889
4890
4891 ;; Division instructions
4892 (define_insn "divsi3"
4893 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4894 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
4895 (match_operand:SI 2 "s_register_operand" "r,r")))]
4896 "TARGET_IDIV"
4897 "@
4898 sdiv%?\t%0, %1, %2
4899 sdiv\t%0, %1, %2"
4900 [(set_attr "arch" "32,v8mb")
4901 (set_attr "predicable" "yes")
4902 (set_attr "type" "sdiv")]
4903 )
4904
4905 (define_insn "udivsi3"
4906 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4907 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
4908 (match_operand:SI 2 "s_register_operand" "r,r")))]
4909 "TARGET_IDIV"
4910 "@
4911 udiv%?\t%0, %1, %2
4912 udiv\t%0, %1, %2"
4913 [(set_attr "arch" "32,v8mb")
4914 (set_attr "predicable" "yes")
4915 (set_attr "type" "udiv")]
4916 )
4917
4918 \f
4919 ;; Unary arithmetic insns
4920
4921 (define_expand "negv<SIDI:mode>3"
4922 [(match_operand:SIDI 0 "s_register_operand")
4923 (match_operand:SIDI 1 "s_register_operand")
4924 (match_operand 2 "")]
4925 "TARGET_32BIT"
4926 {
4927 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
4928 operands[2]));
4929 DONE;
4930 })
4931
4932 (define_expand "negsi2"
4933 [(set (match_operand:SI 0 "s_register_operand")
4934 (neg:SI (match_operand:SI 1 "s_register_operand")))]
4935 "TARGET_EITHER"
4936 ""
4937 )
4938
4939 (define_insn "*arm_negsi2"
4940 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4941 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4942 "TARGET_32BIT"
4943 "rsb%?\\t%0, %1, #0"
4944 [(set_attr "predicable" "yes")
4945 (set_attr "predicable_short_it" "yes,no")
4946 (set_attr "arch" "t2,*")
4947 (set_attr "length" "4")
4948 (set_attr "type" "alu_imm")]
4949 )
4950
4951 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
4952 ;; rather than (0 cmp reg). This gives the same results for unsigned
4953 ;; and equality compares which is what we mostly need here.
4954 (define_insn "negsi2_0compare"
4955 [(set (reg:CC_RSB CC_REGNUM)
4956 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
4957 (const_int -1)))
4958 (set (match_operand:SI 0 "s_register_operand" "=l,r")
4959 (neg:SI (match_dup 1)))]
4960 "TARGET_32BIT"
4961 "@
4962 negs\\t%0, %1
4963 rsbs\\t%0, %1, #0"
4964 [(set_attr "conds" "set")
4965 (set_attr "arch" "t2,*")
4966 (set_attr "length" "2,*")
4967 (set_attr "type" "alus_imm")]
4968 )
4969
4970 (define_insn "negsi2_carryin"
4971 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4972 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
4973 (match_operand:SI 2 "arm_borrow_operation" "")))]
4974 "TARGET_32BIT"
4975 "@
4976 rsc\\t%0, %1, #0
4977 sbc\\t%0, %1, %1, lsl #1"
4978 [(set_attr "conds" "use")
4979 (set_attr "arch" "a,t2")
4980 (set_attr "type" "adc_imm,adc_reg")]
4981 )
4982
4983 (define_expand "negsf2"
4984 [(set (match_operand:SF 0 "s_register_operand")
4985 (neg:SF (match_operand:SF 1 "s_register_operand")))]
4986 "TARGET_32BIT && TARGET_HARD_FLOAT"
4987 ""
4988 )
4989
4990 (define_expand "negdf2"
4991 [(set (match_operand:DF 0 "s_register_operand")
4992 (neg:DF (match_operand:DF 1 "s_register_operand")))]
4993 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4994 "")
4995
4996 ;; abssi2 doesn't really clobber the condition codes if a different register
4997 ;; is being set. To keep things simple, assume during rtl manipulations that
4998 ;; it does, but tell the final scan operator the truth. Similarly for
4999 ;; (neg (abs...))
5000
5001 (define_expand "abssi2"
5002 [(parallel
5003 [(set (match_operand:SI 0 "s_register_operand")
5004 (abs:SI (match_operand:SI 1 "s_register_operand")))
5005 (clobber (match_dup 2))])]
5006 "TARGET_EITHER"
5007 "
5008 if (TARGET_THUMB1)
5009 operands[2] = gen_rtx_SCRATCH (SImode);
5010 else
5011 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5012 ")
5013
5014 (define_insn_and_split "*arm_abssi2"
5015 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5016 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5017 (clobber (reg:CC CC_REGNUM))]
5018 "TARGET_ARM"
5019 "#"
5020 "&& reload_completed"
5021 [(const_int 0)]
5022 {
5023 /* if (which_alternative == 0) */
5024 if (REGNO(operands[0]) == REGNO(operands[1]))
5025 {
5026 /* Emit the pattern:
5027 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5028 [(set (reg:CC CC_REGNUM)
5029 (compare:CC (match_dup 0) (const_int 0)))
5030 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5031 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5032 */
5033 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5034 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5035 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5036 (gen_rtx_LT (SImode,
5037 gen_rtx_REG (CCmode, CC_REGNUM),
5038 const0_rtx)),
5039 (gen_rtx_SET (operands[0],
5040 (gen_rtx_MINUS (SImode,
5041 const0_rtx,
5042 operands[1]))))));
5043 DONE;
5044 }
5045 else
5046 {
5047 /* Emit the pattern:
5048 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5049 [(set (match_dup 0)
5050 (xor:SI (match_dup 1)
5051 (ashiftrt:SI (match_dup 1) (const_int 31))))
5052 (set (match_dup 0)
5053 (minus:SI (match_dup 0)
5054 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5055 */
5056 emit_insn (gen_rtx_SET (operands[0],
5057 gen_rtx_XOR (SImode,
5058 gen_rtx_ASHIFTRT (SImode,
5059 operands[1],
5060 GEN_INT (31)),
5061 operands[1])));
5062 emit_insn (gen_rtx_SET (operands[0],
5063 gen_rtx_MINUS (SImode,
5064 operands[0],
5065 gen_rtx_ASHIFTRT (SImode,
5066 operands[1],
5067 GEN_INT (31)))));
5068 DONE;
5069 }
5070 }
5071 [(set_attr "conds" "clob,*")
5072 (set_attr "shift" "1")
5073 (set_attr "predicable" "no, yes")
5074 (set_attr "length" "8")
5075 (set_attr "type" "multiple")]
5076 )
5077
5078 (define_insn_and_split "*arm_neg_abssi2"
5079 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5080 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5081 (clobber (reg:CC CC_REGNUM))]
5082 "TARGET_ARM"
5083 "#"
5084 "&& reload_completed"
5085 [(const_int 0)]
5086 {
5087 /* if (which_alternative == 0) */
5088 if (REGNO (operands[0]) == REGNO (operands[1]))
5089 {
5090 /* Emit the pattern:
5091 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5092 */
5093 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5094 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5095 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5096 gen_rtx_GT (SImode,
5097 gen_rtx_REG (CCmode, CC_REGNUM),
5098 const0_rtx),
5099 gen_rtx_SET (operands[0],
5100 (gen_rtx_MINUS (SImode,
5101 const0_rtx,
5102 operands[1])))));
5103 }
5104 else
5105 {
5106 /* Emit the pattern:
5107 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5108 */
5109 emit_insn (gen_rtx_SET (operands[0],
5110 gen_rtx_XOR (SImode,
5111 gen_rtx_ASHIFTRT (SImode,
5112 operands[1],
5113 GEN_INT (31)),
5114 operands[1])));
5115 emit_insn (gen_rtx_SET (operands[0],
5116 gen_rtx_MINUS (SImode,
5117 gen_rtx_ASHIFTRT (SImode,
5118 operands[1],
5119 GEN_INT (31)),
5120 operands[0])));
5121 }
5122 DONE;
5123 }
5124 [(set_attr "conds" "clob,*")
5125 (set_attr "shift" "1")
5126 (set_attr "predicable" "no, yes")
5127 (set_attr "length" "8")
5128 (set_attr "type" "multiple")]
5129 )
5130
5131 (define_expand "abssf2"
5132 [(set (match_operand:SF 0 "s_register_operand")
5133 (abs:SF (match_operand:SF 1 "s_register_operand")))]
5134 "TARGET_32BIT && TARGET_HARD_FLOAT"
5135 "")
5136
5137 (define_expand "absdf2"
5138 [(set (match_operand:DF 0 "s_register_operand")
5139 (abs:DF (match_operand:DF 1 "s_register_operand")))]
5140 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5141 "")
5142
5143 (define_expand "sqrtsf2"
5144 [(set (match_operand:SF 0 "s_register_operand")
5145 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
5146 "TARGET_32BIT && TARGET_HARD_FLOAT"
5147 "")
5148
5149 (define_expand "sqrtdf2"
5150 [(set (match_operand:DF 0 "s_register_operand")
5151 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
5152 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5153 "")
5154
5155 (define_expand "one_cmplsi2"
5156 [(set (match_operand:SI 0 "s_register_operand")
5157 (not:SI (match_operand:SI 1 "s_register_operand")))]
5158 "TARGET_EITHER"
5159 ""
5160 )
5161
5162 (define_insn "*arm_one_cmplsi2"
5163 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5164 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5165 "TARGET_32BIT"
5166 "mvn%?\\t%0, %1"
5167 [(set_attr "predicable" "yes")
5168 (set_attr "predicable_short_it" "yes,no")
5169 (set_attr "arch" "t2,*")
5170 (set_attr "length" "4")
5171 (set_attr "type" "mvn_reg")]
5172 )
5173
5174 (define_insn "*notsi_compare0"
5175 [(set (reg:CC_NOOV CC_REGNUM)
5176 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5177 (const_int 0)))
5178 (set (match_operand:SI 0 "s_register_operand" "=r")
5179 (not:SI (match_dup 1)))]
5180 "TARGET_32BIT"
5181 "mvns%?\\t%0, %1"
5182 [(set_attr "conds" "set")
5183 (set_attr "type" "mvn_reg")]
5184 )
5185
5186 (define_insn "*notsi_compare0_scratch"
5187 [(set (reg:CC_NOOV CC_REGNUM)
5188 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5189 (const_int 0)))
5190 (clobber (match_scratch:SI 0 "=r"))]
5191 "TARGET_32BIT"
5192 "mvns%?\\t%0, %1"
5193 [(set_attr "conds" "set")
5194 (set_attr "type" "mvn_reg")]
5195 )
5196 \f
5197 ;; Fixed <--> Floating conversion insns
5198
5199 (define_expand "floatsihf2"
5200 [(set (match_operand:HF 0 "general_operand")
5201 (float:HF (match_operand:SI 1 "general_operand")))]
5202 "TARGET_EITHER"
5203 "
5204 {
5205 rtx op1 = gen_reg_rtx (SFmode);
5206 expand_float (op1, operands[1], 0);
5207 op1 = convert_to_mode (HFmode, op1, 0);
5208 emit_move_insn (operands[0], op1);
5209 DONE;
5210 }"
5211 )
5212
5213 (define_expand "floatdihf2"
5214 [(set (match_operand:HF 0 "general_operand")
5215 (float:HF (match_operand:DI 1 "general_operand")))]
5216 "TARGET_EITHER"
5217 "
5218 {
5219 rtx op1 = gen_reg_rtx (SFmode);
5220 expand_float (op1, operands[1], 0);
5221 op1 = convert_to_mode (HFmode, op1, 0);
5222 emit_move_insn (operands[0], op1);
5223 DONE;
5224 }"
5225 )
5226
5227 (define_expand "floatsisf2"
5228 [(set (match_operand:SF 0 "s_register_operand")
5229 (float:SF (match_operand:SI 1 "s_register_operand")))]
5230 "TARGET_32BIT && TARGET_HARD_FLOAT"
5231 "
5232 ")
5233
5234 (define_expand "floatsidf2"
5235 [(set (match_operand:DF 0 "s_register_operand")
5236 (float:DF (match_operand:SI 1 "s_register_operand")))]
5237 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5238 "
5239 ")
5240
5241 (define_expand "fix_trunchfsi2"
5242 [(set (match_operand:SI 0 "general_operand")
5243 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
5244 "TARGET_EITHER"
5245 "
5246 {
5247 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5248 expand_fix (operands[0], op1, 0);
5249 DONE;
5250 }"
5251 )
5252
5253 (define_expand "fix_trunchfdi2"
5254 [(set (match_operand:DI 0 "general_operand")
5255 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
5256 "TARGET_EITHER"
5257 "
5258 {
5259 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5260 expand_fix (operands[0], op1, 0);
5261 DONE;
5262 }"
5263 )
5264
5265 (define_expand "fix_truncsfsi2"
5266 [(set (match_operand:SI 0 "s_register_operand")
5267 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
5268 "TARGET_32BIT && TARGET_HARD_FLOAT"
5269 "
5270 ")
5271
5272 (define_expand "fix_truncdfsi2"
5273 [(set (match_operand:SI 0 "s_register_operand")
5274 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
5275 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5276 "
5277 ")
5278
5279 ;; Truncation insns
5280
5281 (define_expand "truncdfsf2"
5282 [(set (match_operand:SF 0 "s_register_operand")
5283 (float_truncate:SF
5284 (match_operand:DF 1 "s_register_operand")))]
5285 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5286 ""
5287 )
5288
5289 ;; DFmode to HFmode conversions on targets without a single-step hardware
5290 ;; instruction for it would have to go through SFmode. This is dangerous
5291 ;; as it introduces double rounding.
5292 ;;
5293 ;; Disable this pattern unless we are in an unsafe math mode, or we have
5294 ;; a single-step instruction.
5295
5296 (define_expand "truncdfhf2"
5297 [(set (match_operand:HF 0 "s_register_operand")
5298 (float_truncate:HF
5299 (match_operand:DF 1 "s_register_operand")))]
5300 "(TARGET_EITHER && flag_unsafe_math_optimizations)
5301 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
5302 {
5303 /* We don't have a direct instruction for this, so we must be in
5304 an unsafe math mode, and going via SFmode. */
5305
5306 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5307 {
5308 rtx op1;
5309 op1 = convert_to_mode (SFmode, operands[1], 0);
5310 op1 = convert_to_mode (HFmode, op1, 0);
5311 emit_move_insn (operands[0], op1);
5312 DONE;
5313 }
5314 /* Otherwise, we will pick this up as a single instruction with
5315 no intermediary rounding. */
5316 }
5317 )
5318 \f
5319 ;; Zero and sign extension instructions.
5320
5321 (define_expand "zero_extend<mode>di2"
5322 [(set (match_operand:DI 0 "s_register_operand" "")
5323 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5324 "TARGET_32BIT <qhs_zextenddi_cond>"
5325 {
5326 rtx res_lo, res_hi, op0_lo, op0_hi;
5327 res_lo = gen_lowpart (SImode, operands[0]);
5328 res_hi = gen_highpart (SImode, operands[0]);
5329 if (can_create_pseudo_p ())
5330 {
5331 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5332 op0_hi = gen_reg_rtx (SImode);
5333 }
5334 else
5335 {
5336 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5337 op0_hi = res_hi;
5338 }
5339 if (<MODE>mode != SImode)
5340 emit_insn (gen_rtx_SET (op0_lo,
5341 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5342 emit_insn (gen_movsi (op0_hi, const0_rtx));
5343 if (res_lo != op0_lo)
5344 emit_move_insn (res_lo, op0_lo);
5345 if (res_hi != op0_hi)
5346 emit_move_insn (res_hi, op0_hi);
5347 DONE;
5348 }
5349 )
5350
5351 (define_expand "extend<mode>di2"
5352 [(set (match_operand:DI 0 "s_register_operand" "")
5353 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5354 "TARGET_32BIT <qhs_sextenddi_cond>"
5355 {
5356 rtx res_lo, res_hi, op0_lo, op0_hi;
5357 res_lo = gen_lowpart (SImode, operands[0]);
5358 res_hi = gen_highpart (SImode, operands[0]);
5359 if (can_create_pseudo_p ())
5360 {
5361 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5362 op0_hi = gen_reg_rtx (SImode);
5363 }
5364 else
5365 {
5366 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5367 op0_hi = res_hi;
5368 }
5369 if (<MODE>mode != SImode)
5370 emit_insn (gen_rtx_SET (op0_lo,
5371 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5372 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5373 if (res_lo != op0_lo)
5374 emit_move_insn (res_lo, op0_lo);
5375 if (res_hi != op0_hi)
5376 emit_move_insn (res_hi, op0_hi);
5377 DONE;
5378 }
5379 )
5380
5381 ;; Splits for all extensions to DImode
5382 (define_split
5383 [(set (match_operand:DI 0 "s_register_operand" "")
5384 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5385 "TARGET_32BIT"
5386 [(set (match_dup 0) (match_dup 1))]
5387 {
5388 rtx lo_part = gen_lowpart (SImode, operands[0]);
5389 machine_mode src_mode = GET_MODE (operands[1]);
5390
5391 if (src_mode == SImode)
5392 emit_move_insn (lo_part, operands[1]);
5393 else
5394 emit_insn (gen_rtx_SET (lo_part,
5395 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5396 operands[0] = gen_highpart (SImode, operands[0]);
5397 operands[1] = const0_rtx;
5398 })
5399
5400 (define_split
5401 [(set (match_operand:DI 0 "s_register_operand" "")
5402 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5403 "TARGET_32BIT"
5404 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5405 {
5406 rtx lo_part = gen_lowpart (SImode, operands[0]);
5407 machine_mode src_mode = GET_MODE (operands[1]);
5408
5409 if (src_mode == SImode)
5410 emit_move_insn (lo_part, operands[1]);
5411 else
5412 emit_insn (gen_rtx_SET (lo_part,
5413 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5414 operands[1] = lo_part;
5415 operands[0] = gen_highpart (SImode, operands[0]);
5416 })
5417
5418 (define_expand "zero_extendhisi2"
5419 [(set (match_operand:SI 0 "s_register_operand")
5420 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5421 "TARGET_EITHER"
5422 {
5423 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5424 {
5425 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5426 DONE;
5427 }
5428 if (!arm_arch6 && !MEM_P (operands[1]))
5429 {
5430 rtx t = gen_lowpart (SImode, operands[1]);
5431 rtx tmp = gen_reg_rtx (SImode);
5432 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5433 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5434 DONE;
5435 }
5436 })
5437
5438 (define_split
5439 [(set (match_operand:SI 0 "s_register_operand" "")
5440 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5441 "!TARGET_THUMB2 && !arm_arch6"
5442 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5443 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5444 {
5445 operands[2] = gen_lowpart (SImode, operands[1]);
5446 })
5447
5448 (define_insn "*arm_zero_extendhisi2"
5449 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5450 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5451 "TARGET_ARM && arm_arch4 && !arm_arch6"
5452 "@
5453 #
5454 ldrh%?\\t%0, %1"
5455 [(set_attr "type" "alu_shift_reg,load_byte")
5456 (set_attr "predicable" "yes")]
5457 )
5458
5459 (define_insn "*arm_zero_extendhisi2_v6"
5460 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5461 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5462 "TARGET_ARM && arm_arch6"
5463 "@
5464 uxth%?\\t%0, %1
5465 ldrh%?\\t%0, %1"
5466 [(set_attr "predicable" "yes")
5467 (set_attr "type" "extend,load_byte")]
5468 )
5469
5470 (define_insn "*arm_zero_extendhisi2addsi"
5471 [(set (match_operand:SI 0 "s_register_operand" "=r")
5472 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5473 (match_operand:SI 2 "s_register_operand" "r")))]
5474 "TARGET_INT_SIMD"
5475 "uxtah%?\\t%0, %2, %1"
5476 [(set_attr "type" "alu_shift_reg")
5477 (set_attr "predicable" "yes")]
5478 )
5479
5480 (define_expand "zero_extendqisi2"
5481 [(set (match_operand:SI 0 "s_register_operand")
5482 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5483 "TARGET_EITHER"
5484 {
5485 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5486 {
5487 emit_insn (gen_andsi3 (operands[0],
5488 gen_lowpart (SImode, operands[1]),
5489 GEN_INT (255)));
5490 DONE;
5491 }
5492 if (!arm_arch6 && !MEM_P (operands[1]))
5493 {
5494 rtx t = gen_lowpart (SImode, operands[1]);
5495 rtx tmp = gen_reg_rtx (SImode);
5496 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5497 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5498 DONE;
5499 }
5500 })
5501
5502 (define_split
5503 [(set (match_operand:SI 0 "s_register_operand" "")
5504 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5505 "!arm_arch6"
5506 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5507 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5508 {
5509 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5510 if (TARGET_ARM)
5511 {
5512 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5513 DONE;
5514 }
5515 })
5516
5517 (define_insn "*arm_zero_extendqisi2"
5518 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5519 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5520 "TARGET_ARM && !arm_arch6"
5521 "@
5522 #
5523 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5524 [(set_attr "length" "8,4")
5525 (set_attr "type" "alu_shift_reg,load_byte")
5526 (set_attr "predicable" "yes")]
5527 )
5528
5529 (define_insn "*arm_zero_extendqisi2_v6"
5530 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5531 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5532 "TARGET_ARM && arm_arch6"
5533 "@
5534 uxtb%?\\t%0, %1
5535 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5536 [(set_attr "type" "extend,load_byte")
5537 (set_attr "predicable" "yes")]
5538 )
5539
5540 (define_insn "*arm_zero_extendqisi2addsi"
5541 [(set (match_operand:SI 0 "s_register_operand" "=r")
5542 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5543 (match_operand:SI 2 "s_register_operand" "r")))]
5544 "TARGET_INT_SIMD"
5545 "uxtab%?\\t%0, %2, %1"
5546 [(set_attr "predicable" "yes")
5547 (set_attr "type" "alu_shift_reg")]
5548 )
5549
5550 (define_split
5551 [(set (match_operand:SI 0 "s_register_operand" "")
5552 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5553 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5554 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5555 [(set (match_dup 2) (match_dup 1))
5556 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5557 ""
5558 )
5559
5560 (define_split
5561 [(set (match_operand:SI 0 "s_register_operand" "")
5562 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5563 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5564 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5565 [(set (match_dup 2) (match_dup 1))
5566 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5567 ""
5568 )
5569
5570
5571 (define_split
5572 [(set (match_operand:SI 0 "s_register_operand" "")
5573 (IOR_XOR:SI (and:SI (ashift:SI
5574 (match_operand:SI 1 "s_register_operand" "")
5575 (match_operand:SI 2 "const_int_operand" ""))
5576 (match_operand:SI 3 "const_int_operand" ""))
5577 (zero_extend:SI
5578 (match_operator 5 "subreg_lowpart_operator"
5579 [(match_operand:SI 4 "s_register_operand" "")]))))]
5580 "TARGET_32BIT
5581 && (UINTVAL (operands[3])
5582 == (GET_MODE_MASK (GET_MODE (operands[5]))
5583 & (GET_MODE_MASK (GET_MODE (operands[5]))
5584 << (INTVAL (operands[2])))))"
5585 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5586 (match_dup 4)))
5587 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5588 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5589 )
5590
5591 (define_insn "*compareqi_eq0"
5592 [(set (reg:CC_Z CC_REGNUM)
5593 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5594 (const_int 0)))]
5595 "TARGET_32BIT"
5596 "tst%?\\t%0, #255"
5597 [(set_attr "conds" "set")
5598 (set_attr "predicable" "yes")
5599 (set_attr "type" "logic_imm")]
5600 )
5601
5602 (define_expand "extendhisi2"
5603 [(set (match_operand:SI 0 "s_register_operand")
5604 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5605 "TARGET_EITHER"
5606 {
5607 if (TARGET_THUMB1)
5608 {
5609 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5610 DONE;
5611 }
5612 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5613 {
5614 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5615 DONE;
5616 }
5617
5618 if (!arm_arch6 && !MEM_P (operands[1]))
5619 {
5620 rtx t = gen_lowpart (SImode, operands[1]);
5621 rtx tmp = gen_reg_rtx (SImode);
5622 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5623 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5624 DONE;
5625 }
5626 })
5627
5628 (define_split
5629 [(parallel
5630 [(set (match_operand:SI 0 "register_operand" "")
5631 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5632 (clobber (match_scratch:SI 2 ""))])]
5633 "!arm_arch6"
5634 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5635 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5636 {
5637 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5638 })
5639
5640 ;; This pattern will only be used when ldsh is not available
5641 (define_expand "extendhisi2_mem"
5642 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5643 (set (match_dup 3)
5644 (zero_extend:SI (match_dup 7)))
5645 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5646 (set (match_operand:SI 0 "" "")
5647 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5648 "TARGET_ARM"
5649 "
5650 {
5651 rtx mem1, mem2;
5652 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5653
5654 mem1 = change_address (operands[1], QImode, addr);
5655 mem2 = change_address (operands[1], QImode,
5656 plus_constant (Pmode, addr, 1));
5657 operands[0] = gen_lowpart (SImode, operands[0]);
5658 operands[1] = mem1;
5659 operands[2] = gen_reg_rtx (SImode);
5660 operands[3] = gen_reg_rtx (SImode);
5661 operands[6] = gen_reg_rtx (SImode);
5662 operands[7] = mem2;
5663
5664 if (BYTES_BIG_ENDIAN)
5665 {
5666 operands[4] = operands[2];
5667 operands[5] = operands[3];
5668 }
5669 else
5670 {
5671 operands[4] = operands[3];
5672 operands[5] = operands[2];
5673 }
5674 }"
5675 )
5676
5677 (define_split
5678 [(set (match_operand:SI 0 "register_operand" "")
5679 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5680 "!arm_arch6"
5681 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5682 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5683 {
5684 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5685 })
5686
5687 (define_insn "*arm_extendhisi2"
5688 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5689 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5690 "TARGET_ARM && arm_arch4 && !arm_arch6"
5691 "@
5692 #
5693 ldrsh%?\\t%0, %1"
5694 [(set_attr "length" "8,4")
5695 (set_attr "type" "alu_shift_reg,load_byte")
5696 (set_attr "predicable" "yes")]
5697 )
5698
5699 ;; ??? Check Thumb-2 pool range
5700 (define_insn "*arm_extendhisi2_v6"
5701 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5702 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5703 "TARGET_32BIT && arm_arch6"
5704 "@
5705 sxth%?\\t%0, %1
5706 ldrsh%?\\t%0, %1"
5707 [(set_attr "type" "extend,load_byte")
5708 (set_attr "predicable" "yes")]
5709 )
5710
5711 (define_insn "*arm_extendhisi2addsi"
5712 [(set (match_operand:SI 0 "s_register_operand" "=r")
5713 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5714 (match_operand:SI 2 "s_register_operand" "r")))]
5715 "TARGET_INT_SIMD"
5716 "sxtah%?\\t%0, %2, %1"
5717 [(set_attr "type" "alu_shift_reg")]
5718 )
5719
5720 (define_expand "extendqihi2"
5721 [(set (match_dup 2)
5722 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5723 (const_int 24)))
5724 (set (match_operand:HI 0 "s_register_operand")
5725 (ashiftrt:SI (match_dup 2)
5726 (const_int 24)))]
5727 "TARGET_ARM"
5728 "
5729 {
5730 if (arm_arch4 && MEM_P (operands[1]))
5731 {
5732 emit_insn (gen_rtx_SET (operands[0],
5733 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5734 DONE;
5735 }
5736 if (!s_register_operand (operands[1], QImode))
5737 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5738 operands[0] = gen_lowpart (SImode, operands[0]);
5739 operands[1] = gen_lowpart (SImode, operands[1]);
5740 operands[2] = gen_reg_rtx (SImode);
5741 }"
5742 )
5743
5744 (define_insn "*arm_extendqihi_insn"
5745 [(set (match_operand:HI 0 "s_register_operand" "=r")
5746 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5747 "TARGET_ARM && arm_arch4"
5748 "ldrsb%?\\t%0, %1"
5749 [(set_attr "type" "load_byte")
5750 (set_attr "predicable" "yes")]
5751 )
5752
5753 (define_expand "extendqisi2"
5754 [(set (match_operand:SI 0 "s_register_operand")
5755 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5756 "TARGET_EITHER"
5757 {
5758 if (!arm_arch4 && MEM_P (operands[1]))
5759 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5760
5761 if (!arm_arch6 && !MEM_P (operands[1]))
5762 {
5763 rtx t = gen_lowpart (SImode, operands[1]);
5764 rtx tmp = gen_reg_rtx (SImode);
5765 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5766 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5767 DONE;
5768 }
5769 })
5770
5771 (define_split
5772 [(set (match_operand:SI 0 "register_operand" "")
5773 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5774 "!arm_arch6"
5775 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5776 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5777 {
5778 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5779 })
5780
5781 (define_insn "*arm_extendqisi"
5782 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5783 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5784 "TARGET_ARM && arm_arch4 && !arm_arch6"
5785 "@
5786 #
5787 ldrsb%?\\t%0, %1"
5788 [(set_attr "length" "8,4")
5789 (set_attr "type" "alu_shift_reg,load_byte")
5790 (set_attr "predicable" "yes")]
5791 )
5792
5793 (define_insn "*arm_extendqisi_v6"
5794 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5795 (sign_extend:SI
5796 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5797 "TARGET_ARM && arm_arch6"
5798 "@
5799 sxtb%?\\t%0, %1
5800 ldrsb%?\\t%0, %1"
5801 [(set_attr "type" "extend,load_byte")
5802 (set_attr "predicable" "yes")]
5803 )
5804
5805 (define_insn "*arm_extendqisi2addsi"
5806 [(set (match_operand:SI 0 "s_register_operand" "=r")
5807 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5808 (match_operand:SI 2 "s_register_operand" "r")))]
5809 "TARGET_INT_SIMD"
5810 "sxtab%?\\t%0, %2, %1"
5811 [(set_attr "type" "alu_shift_reg")
5812 (set_attr "predicable" "yes")]
5813 )
5814
5815 (define_insn "arm_<sup>xtb16"
5816 [(set (match_operand:SI 0 "s_register_operand" "=r")
5817 (unspec:SI
5818 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5819 "TARGET_INT_SIMD"
5820 "<sup>xtb16%?\\t%0, %1"
5821 [(set_attr "predicable" "yes")
5822 (set_attr "type" "alu_dsp_reg")])
5823
5824 (define_insn "arm_<simd32_op>"
5825 [(set (match_operand:SI 0 "s_register_operand" "=r")
5826 (unspec:SI
5827 [(match_operand:SI 1 "s_register_operand" "r")
5828 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5829 "TARGET_INT_SIMD"
5830 "<simd32_op>%?\\t%0, %1, %2"
5831 [(set_attr "predicable" "yes")
5832 (set_attr "type" "alu_dsp_reg")])
5833
5834 (define_insn "arm_usada8"
5835 [(set (match_operand:SI 0 "s_register_operand" "=r")
5836 (unspec:SI
5837 [(match_operand:SI 1 "s_register_operand" "r")
5838 (match_operand:SI 2 "s_register_operand" "r")
5839 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5840 "TARGET_INT_SIMD"
5841 "usada8%?\\t%0, %1, %2, %3"
5842 [(set_attr "predicable" "yes")
5843 (set_attr "type" "alu_dsp_reg")])
5844
5845 (define_insn "arm_<simd32_op>"
5846 [(set (match_operand:DI 0 "s_register_operand" "=r")
5847 (unspec:DI
5848 [(match_operand:SI 1 "s_register_operand" "r")
5849 (match_operand:SI 2 "s_register_operand" "r")
5850 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5851 "TARGET_INT_SIMD"
5852 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5853 [(set_attr "predicable" "yes")
5854 (set_attr "type" "smlald")])
5855
5856 (define_insn "arm_<simd32_op>"
5857 [(set (match_operand:SI 0 "s_register_operand" "=r")
5858 (unspec:SI
5859 [(match_operand:SI 1 "s_register_operand" "r")
5860 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
5861 (set (reg:CC APSRGE_REGNUM)
5862 (unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
5863 "TARGET_INT_SIMD"
5864 "<simd32_op>%?\\t%0, %1, %2"
5865 [(set_attr "predicable" "yes")
5866 (set_attr "type" "alu_sreg")])
5867
5868 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
5869 [(set (match_operand:SI 0 "s_register_operand" "=r")
5870 (unspec:SI
5871 [(match_operand:SI 1 "s_register_operand" "r")
5872 (match_operand:SI 2 "s_register_operand" "r")
5873 (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
5874 "TARGET_INT_SIMD && <add_clobber_q_pred>"
5875 "<simd32_op>%?\\t%0, %1, %2, %3"
5876 [(set_attr "predicable" "yes")
5877 (set_attr "type" "alu_sreg")])
5878
5879 (define_expand "arm_<simd32_op>"
5880 [(set (match_operand:SI 0 "s_register_operand")
5881 (unspec:SI
5882 [(match_operand:SI 1 "s_register_operand")
5883 (match_operand:SI 2 "s_register_operand")
5884 (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
5885 "TARGET_INT_SIMD"
5886 {
5887 if (ARM_Q_BIT_READ)
5888 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
5889 operands[2], operands[3]));
5890 else
5891 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
5892 operands[2], operands[3]));
5893 DONE;
5894 }
5895 )
5896
5897 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
5898 [(set (match_operand:SI 0 "s_register_operand" "=r")
5899 (unspec:SI
5900 [(match_operand:SI 1 "s_register_operand" "r")
5901 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
5902 "TARGET_INT_SIMD && <add_clobber_q_pred>"
5903 "<simd32_op>%?\\t%0, %1, %2"
5904 [(set_attr "predicable" "yes")
5905 (set_attr "type" "alu_sreg")])
5906
5907 (define_expand "arm_<simd32_op>"
5908 [(set (match_operand:SI 0 "s_register_operand")
5909 (unspec:SI
5910 [(match_operand:SI 1 "s_register_operand")
5911 (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
5912 "TARGET_INT_SIMD"
5913 {
5914 if (ARM_Q_BIT_READ)
5915 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
5916 operands[2]));
5917 else
5918 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
5919 operands[2]));
5920 DONE;
5921 }
5922 )
5923
5924 (define_insn "arm_sel"
5925 [(set (match_operand:SI 0 "s_register_operand" "=r")
5926 (unspec:SI
5927 [(match_operand:SI 1 "s_register_operand" "r")
5928 (match_operand:SI 2 "s_register_operand" "r")
5929 (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
5930 "TARGET_INT_SIMD"
5931 "sel%?\\t%0, %1, %2"
5932 [(set_attr "predicable" "yes")
5933 (set_attr "type" "alu_sreg")])
5934
5935 (define_expand "extendsfdf2"
5936 [(set (match_operand:DF 0 "s_register_operand")
5937 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
5938 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5939 ""
5940 )
5941
5942 ;; HFmode -> DFmode conversions where we don't have an instruction for it
5943 ;; must go through SFmode.
5944 ;;
5945 ;; This is always safe for an extend.
5946
5947 (define_expand "extendhfdf2"
5948 [(set (match_operand:DF 0 "s_register_operand")
5949 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
5950 "TARGET_EITHER"
5951 {
5952 /* We don't have a direct instruction for this, so go via SFmode. */
5953 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5954 {
5955 rtx op1;
5956 op1 = convert_to_mode (SFmode, operands[1], 0);
5957 op1 = convert_to_mode (DFmode, op1, 0);
5958 emit_insn (gen_movdf (operands[0], op1));
5959 DONE;
5960 }
5961 /* Otherwise, we're done producing RTL and will pick up the correct
5962 pattern to do this with one rounding-step in a single instruction. */
5963 }
5964 )
5965 \f
5966 ;; Move insns (including loads and stores)
5967
5968 ;; XXX Just some ideas about movti.
5969 ;; I don't think these are a good idea on the arm, there just aren't enough
5970 ;; registers
5971 ;;(define_expand "loadti"
5972 ;; [(set (match_operand:TI 0 "s_register_operand")
5973 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
5974 ;; "" "")
5975
5976 ;;(define_expand "storeti"
5977 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
5978 ;; (match_operand:TI 1 "s_register_operand"))]
5979 ;; "" "")
5980
5981 ;;(define_expand "movti"
5982 ;; [(set (match_operand:TI 0 "general_operand")
5983 ;; (match_operand:TI 1 "general_operand"))]
5984 ;; ""
5985 ;; "
5986 ;;{
5987 ;; rtx insn;
5988 ;;
5989 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5990 ;; operands[1] = copy_to_reg (operands[1]);
5991 ;; if (MEM_P (operands[0]))
5992 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5993 ;; else if (MEM_P (operands[1]))
5994 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5995 ;; else
5996 ;; FAIL;
5997 ;;
5998 ;; emit_insn (insn);
5999 ;; DONE;
6000 ;;}")
6001
6002 ;; Recognize garbage generated above.
6003
6004 ;;(define_insn ""
6005 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6006 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6007 ;; ""
6008 ;; "*
6009 ;; {
6010 ;; register mem = (which_alternative < 3);
6011 ;; register const char *template;
6012 ;;
6013 ;; operands[mem] = XEXP (operands[mem], 0);
6014 ;; switch (which_alternative)
6015 ;; {
6016 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6017 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6018 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6019 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6020 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6021 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6022 ;; }
6023 ;; output_asm_insn (template, operands);
6024 ;; return \"\";
6025 ;; }")
6026
6027 (define_expand "movdi"
6028 [(set (match_operand:DI 0 "general_operand")
6029 (match_operand:DI 1 "general_operand"))]
6030 "TARGET_EITHER"
6031 "
6032 gcc_checking_assert (aligned_operand (operands[0], DImode));
6033 gcc_checking_assert (aligned_operand (operands[1], DImode));
6034 if (can_create_pseudo_p ())
6035 {
6036 if (!REG_P (operands[0]))
6037 operands[1] = force_reg (DImode, operands[1]);
6038 }
6039 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
6040 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
6041 {
6042 /* Avoid LDRD's into an odd-numbered register pair in ARM state
6043 when expanding function calls. */
6044 gcc_assert (can_create_pseudo_p ());
6045 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
6046 {
6047 /* Perform load into legal reg pair first, then move. */
6048 rtx reg = gen_reg_rtx (DImode);
6049 emit_insn (gen_movdi (reg, operands[1]));
6050 operands[1] = reg;
6051 }
6052 emit_move_insn (gen_lowpart (SImode, operands[0]),
6053 gen_lowpart (SImode, operands[1]));
6054 emit_move_insn (gen_highpart (SImode, operands[0]),
6055 gen_highpart (SImode, operands[1]));
6056 DONE;
6057 }
6058 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
6059 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
6060 {
6061 /* Avoid STRD's from an odd-numbered register pair in ARM state
6062 when expanding function prologue. */
6063 gcc_assert (can_create_pseudo_p ());
6064 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
6065 ? gen_reg_rtx (DImode)
6066 : operands[0];
6067 emit_move_insn (gen_lowpart (SImode, split_dest),
6068 gen_lowpart (SImode, operands[1]));
6069 emit_move_insn (gen_highpart (SImode, split_dest),
6070 gen_highpart (SImode, operands[1]));
6071 if (split_dest != operands[0])
6072 emit_insn (gen_movdi (operands[0], split_dest));
6073 DONE;
6074 }
6075 "
6076 )
6077
6078 (define_insn "*arm_movdi"
6079 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
6080 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
6081 "TARGET_32BIT
6082 && !(TARGET_HARD_FLOAT)
6083 && !TARGET_IWMMXT
6084 && ( register_operand (operands[0], DImode)
6085 || register_operand (operands[1], DImode))"
6086 "*
6087 switch (which_alternative)
6088 {
6089 case 0:
6090 case 1:
6091 case 2:
6092 return \"#\";
6093 case 3:
6094 /* Cannot load it directly, split to load it via MOV / MOVT. */
6095 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6096 return \"#\";
6097 /* Fall through. */
6098 default:
6099 return output_move_double (operands, true, NULL);
6100 }
6101 "
6102 [(set_attr "length" "8,12,16,8,8")
6103 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6104 (set_attr "arm_pool_range" "*,*,*,1020,*")
6105 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6106 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6107 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6108 )
6109
6110 (define_split
6111 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6112 (match_operand:ANY64 1 "immediate_operand" ""))]
6113 "TARGET_32BIT
6114 && reload_completed
6115 && (arm_disable_literal_pool
6116 || (arm_const_double_inline_cost (operands[1])
6117 <= arm_max_const_double_inline_cost ()))"
6118 [(const_int 0)]
6119 "
6120 arm_split_constant (SET, SImode, curr_insn,
6121 INTVAL (gen_lowpart (SImode, operands[1])),
6122 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6123 arm_split_constant (SET, SImode, curr_insn,
6124 INTVAL (gen_highpart_mode (SImode,
6125 GET_MODE (operands[0]),
6126 operands[1])),
6127 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6128 DONE;
6129 "
6130 )
6131
6132 ; If optimizing for size, or if we have load delay slots, then
6133 ; we want to split the constant into two separate operations.
6134 ; In both cases this may split a trivial part into a single data op
6135 ; leaving a single complex constant to load. We can also get longer
6136 ; offsets in a LDR which means we get better chances of sharing the pool
6137 ; entries. Finally, we can normally do a better job of scheduling
6138 ; LDR instructions than we can with LDM.
6139 ; This pattern will only match if the one above did not.
6140 (define_split
6141 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6142 (match_operand:ANY64 1 "const_double_operand" ""))]
6143 "TARGET_ARM && reload_completed
6144 && arm_const_double_by_parts (operands[1])"
6145 [(set (match_dup 0) (match_dup 1))
6146 (set (match_dup 2) (match_dup 3))]
6147 "
6148 operands[2] = gen_highpart (SImode, operands[0]);
6149 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6150 operands[1]);
6151 operands[0] = gen_lowpart (SImode, operands[0]);
6152 operands[1] = gen_lowpart (SImode, operands[1]);
6153 "
6154 )
6155
6156 (define_split
6157 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6158 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
6159 "TARGET_EITHER && reload_completed"
6160 [(set (match_dup 0) (match_dup 1))
6161 (set (match_dup 2) (match_dup 3))]
6162 "
6163 operands[2] = gen_highpart (SImode, operands[0]);
6164 operands[3] = gen_highpart (SImode, operands[1]);
6165 operands[0] = gen_lowpart (SImode, operands[0]);
6166 operands[1] = gen_lowpart (SImode, operands[1]);
6167
6168 /* Handle a partial overlap. */
6169 if (rtx_equal_p (operands[0], operands[3]))
6170 {
6171 rtx tmp0 = operands[0];
6172 rtx tmp1 = operands[1];
6173
6174 operands[0] = operands[2];
6175 operands[1] = operands[3];
6176 operands[2] = tmp0;
6177 operands[3] = tmp1;
6178 }
6179 "
6180 )
6181
6182 ;; We can't actually do base+index doubleword loads if the index and
6183 ;; destination overlap. Split here so that we at least have chance to
6184 ;; schedule.
6185 (define_split
6186 [(set (match_operand:DI 0 "s_register_operand" "")
6187 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6188 (match_operand:SI 2 "s_register_operand" ""))))]
6189 "TARGET_LDRD
6190 && reg_overlap_mentioned_p (operands[0], operands[1])
6191 && reg_overlap_mentioned_p (operands[0], operands[2])"
6192 [(set (match_dup 4)
6193 (plus:SI (match_dup 1)
6194 (match_dup 2)))
6195 (set (match_dup 0)
6196 (mem:DI (match_dup 4)))]
6197 "
6198 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6199 "
6200 )
6201
6202 (define_expand "movsi"
6203 [(set (match_operand:SI 0 "general_operand")
6204 (match_operand:SI 1 "general_operand"))]
6205 "TARGET_EITHER"
6206 "
6207 {
6208 rtx base, offset, tmp;
6209
6210 gcc_checking_assert (aligned_operand (operands[0], SImode));
6211 gcc_checking_assert (aligned_operand (operands[1], SImode));
6212 if (TARGET_32BIT || TARGET_HAVE_MOVT)
6213 {
6214 /* Everything except mem = const or mem = mem can be done easily. */
6215 if (MEM_P (operands[0]))
6216 operands[1] = force_reg (SImode, operands[1]);
6217 if (arm_general_register_operand (operands[0], SImode)
6218 && CONST_INT_P (operands[1])
6219 && !(const_ok_for_arm (INTVAL (operands[1]))
6220 || const_ok_for_arm (~INTVAL (operands[1]))))
6221 {
6222 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
6223 {
6224 emit_insn (gen_rtx_SET (operands[0], operands[1]));
6225 DONE;
6226 }
6227 else
6228 {
6229 arm_split_constant (SET, SImode, NULL_RTX,
6230 INTVAL (operands[1]), operands[0], NULL_RTX,
6231 optimize && can_create_pseudo_p ());
6232 DONE;
6233 }
6234 }
6235 }
6236 else /* Target doesn't have MOVT... */
6237 {
6238 if (can_create_pseudo_p ())
6239 {
6240 if (!REG_P (operands[0]))
6241 operands[1] = force_reg (SImode, operands[1]);
6242 }
6243 }
6244
6245 split_const (operands[1], &base, &offset);
6246 if (INTVAL (offset) != 0
6247 && targetm.cannot_force_const_mem (SImode, operands[1]))
6248 {
6249 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6250 emit_move_insn (tmp, base);
6251 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6252 DONE;
6253 }
6254
6255 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
6256
6257 /* Recognize the case where operand[1] is a reference to thread-local
6258 data and load its address to a register. Offsets have been split off
6259 already. */
6260 if (arm_tls_referenced_p (operands[1]))
6261 operands[1] = legitimize_tls_address (operands[1], tmp);
6262 else if (flag_pic
6263 && (CONSTANT_P (operands[1])
6264 || symbol_mentioned_p (operands[1])
6265 || label_mentioned_p (operands[1])))
6266 operands[1] =
6267 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
6268 }
6269 "
6270 )
6271
6272 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6273 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6274 ;; so this does not matter.
6275 (define_insn "*arm_movt"
6276 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
6277 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
6278 (match_operand:SI 2 "general_operand" "i,i")))]
6279 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
6280 "@
6281 movt%?\t%0, #:upper16:%c2
6282 movt\t%0, #:upper16:%c2"
6283 [(set_attr "arch" "32,v8mb")
6284 (set_attr "predicable" "yes")
6285 (set_attr "length" "4")
6286 (set_attr "type" "alu_sreg")]
6287 )
6288
6289 (define_insn "*arm_movsi_insn"
6290 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6291 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6292 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
6293 && ( register_operand (operands[0], SImode)
6294 || register_operand (operands[1], SImode))"
6295 "@
6296 mov%?\\t%0, %1
6297 mov%?\\t%0, %1
6298 mvn%?\\t%0, #%B1
6299 movw%?\\t%0, %1
6300 ldr%?\\t%0, %1
6301 str%?\\t%1, %0"
6302 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
6303 (set_attr "predicable" "yes")
6304 (set_attr "arch" "*,*,*,v6t2,*,*")
6305 (set_attr "pool_range" "*,*,*,*,4096,*")
6306 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6307 )
6308
6309 (define_split
6310 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6311 (match_operand:SI 1 "const_int_operand" ""))]
6312 "(TARGET_32BIT || TARGET_HAVE_MOVT)
6313 && (!(const_ok_for_arm (INTVAL (operands[1]))
6314 || const_ok_for_arm (~INTVAL (operands[1]))))"
6315 [(clobber (const_int 0))]
6316 "
6317 arm_split_constant (SET, SImode, NULL_RTX,
6318 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6319 DONE;
6320 "
6321 )
6322
6323 ;; A normal way to do (symbol + offset) requires three instructions at least
6324 ;; (depends on how big the offset is) as below:
6325 ;; movw r0, #:lower16:g
6326 ;; movw r0, #:upper16:g
6327 ;; adds r0, #4
6328 ;;
6329 ;; A better way would be:
6330 ;; movw r0, #:lower16:g+4
6331 ;; movw r0, #:upper16:g+4
6332 ;;
6333 ;; The limitation of this way is that the length of offset should be a 16-bit
6334 ;; signed value, because current assembler only supports REL type relocation for
6335 ;; such case. If the more powerful RELA type is supported in future, we should
6336 ;; update this pattern to go with better way.
6337 (define_split
6338 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6339 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
6340 (match_operand:SI 2 "const_int_operand" ""))))]
6341 "TARGET_THUMB
6342 && TARGET_HAVE_MOVT
6343 && arm_disable_literal_pool
6344 && reload_completed
6345 && GET_CODE (operands[1]) == SYMBOL_REF"
6346 [(clobber (const_int 0))]
6347 "
6348 int offset = INTVAL (operands[2]);
6349
6350 if (offset < -0x8000 || offset > 0x7fff)
6351 {
6352 arm_emit_movpair (operands[0], operands[1]);
6353 emit_insn (gen_rtx_SET (operands[0],
6354 gen_rtx_PLUS (SImode, operands[0], operands[2])));
6355 }
6356 else
6357 {
6358 rtx op = gen_rtx_CONST (SImode,
6359 gen_rtx_PLUS (SImode, operands[1], operands[2]));
6360 arm_emit_movpair (operands[0], op);
6361 }
6362 "
6363 )
6364
6365 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6366 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6367 ;; and lo_sum would be merged back into memory load at cprop. However,
6368 ;; if the default is to prefer movt/movw rather than a load from the constant
6369 ;; pool, the performance is better.
6370 (define_split
6371 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6372 (match_operand:SI 1 "general_operand" ""))]
6373 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6374 && !target_word_relocations
6375 && !arm_tls_referenced_p (operands[1])"
6376 [(clobber (const_int 0))]
6377 {
6378 arm_emit_movpair (operands[0], operands[1]);
6379 DONE;
6380 })
6381
6382 ;; When generating pic, we need to load the symbol offset into a register.
6383 ;; So that the optimizer does not confuse this with a normal symbol load
6384 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6385 ;; since that is the only type of relocation we can use.
6386
6387 ;; Wrap calculation of the whole PIC address in a single pattern for the
6388 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6389 ;; a PIC address involves two loads from memory, so we want to CSE it
6390 ;; as often as possible.
6391 ;; This pattern will be split into one of the pic_load_addr_* patterns
6392 ;; and a move after GCSE optimizations.
6393 ;;
6394 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6395 (define_expand "calculate_pic_address"
6396 [(set (match_operand:SI 0 "register_operand")
6397 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6398 (unspec:SI [(match_operand:SI 2 "" "")]
6399 UNSPEC_PIC_SYM))))]
6400 "flag_pic"
6401 )
6402
6403 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6404 (define_split
6405 [(set (match_operand:SI 0 "register_operand" "")
6406 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6407 (unspec:SI [(match_operand:SI 2 "" "")]
6408 UNSPEC_PIC_SYM))))]
6409 "flag_pic"
6410 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6411 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6412 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6413 )
6414
6415 ;; operand1 is the memory address to go into
6416 ;; pic_load_addr_32bit.
6417 ;; operand2 is the PIC label to be emitted
6418 ;; from pic_add_dot_plus_eight.
6419 ;; We do this to allow hoisting of the entire insn.
6420 (define_insn_and_split "pic_load_addr_unified"
6421 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6422 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6423 (match_operand:SI 2 "" "")]
6424 UNSPEC_PIC_UNIFIED))]
6425 "flag_pic"
6426 "#"
6427 "&& reload_completed"
6428 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6429 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6430 (match_dup 2)] UNSPEC_PIC_BASE))]
6431 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6432 [(set_attr "type" "load_4,load_4,load_4")
6433 (set_attr "pool_range" "4096,4094,1022")
6434 (set_attr "neg_pool_range" "4084,0,0")
6435 (set_attr "arch" "a,t2,t1")
6436 (set_attr "length" "8,6,4")]
6437 )
6438
6439 ;; The rather odd constraints on the following are to force reload to leave
6440 ;; the insn alone, and to force the minipool generation pass to then move
6441 ;; the GOT symbol to memory.
6442
6443 (define_insn "pic_load_addr_32bit"
6444 [(set (match_operand:SI 0 "s_register_operand" "=r")
6445 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6446 "TARGET_32BIT && flag_pic"
6447 "ldr%?\\t%0, %1"
6448 [(set_attr "type" "load_4")
6449 (set (attr "pool_range")
6450 (if_then_else (eq_attr "is_thumb" "no")
6451 (const_int 4096)
6452 (const_int 4094)))
6453 (set (attr "neg_pool_range")
6454 (if_then_else (eq_attr "is_thumb" "no")
6455 (const_int 4084)
6456 (const_int 0)))]
6457 )
6458
6459 (define_insn "pic_load_addr_thumb1"
6460 [(set (match_operand:SI 0 "s_register_operand" "=l")
6461 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6462 "TARGET_THUMB1 && flag_pic"
6463 "ldr\\t%0, %1"
6464 [(set_attr "type" "load_4")
6465 (set (attr "pool_range") (const_int 1018))]
6466 )
6467
6468 (define_insn "pic_add_dot_plus_four"
6469 [(set (match_operand:SI 0 "register_operand" "=r")
6470 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6471 (const_int 4)
6472 (match_operand 2 "" "")]
6473 UNSPEC_PIC_BASE))]
6474 "TARGET_THUMB"
6475 "*
6476 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6477 INTVAL (operands[2]));
6478 return \"add\\t%0, %|pc\";
6479 "
6480 [(set_attr "length" "2")
6481 (set_attr "type" "alu_sreg")]
6482 )
6483
6484 (define_insn "pic_add_dot_plus_eight"
6485 [(set (match_operand:SI 0 "register_operand" "=r")
6486 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6487 (const_int 8)
6488 (match_operand 2 "" "")]
6489 UNSPEC_PIC_BASE))]
6490 "TARGET_ARM"
6491 "*
6492 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6493 INTVAL (operands[2]));
6494 return \"add%?\\t%0, %|pc, %1\";
6495 "
6496 [(set_attr "predicable" "yes")
6497 (set_attr "type" "alu_sreg")]
6498 )
6499
6500 (define_insn "tls_load_dot_plus_eight"
6501 [(set (match_operand:SI 0 "register_operand" "=r")
6502 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6503 (const_int 8)
6504 (match_operand 2 "" "")]
6505 UNSPEC_PIC_BASE)))]
6506 "TARGET_ARM"
6507 "*
6508 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6509 INTVAL (operands[2]));
6510 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6511 "
6512 [(set_attr "predicable" "yes")
6513 (set_attr "type" "load_4")]
6514 )
6515
6516 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6517 ;; followed by a load. These sequences can be crunched down to
6518 ;; tls_load_dot_plus_eight by a peephole.
6519
6520 (define_peephole2
6521 [(set (match_operand:SI 0 "register_operand" "")
6522 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6523 (const_int 8)
6524 (match_operand 1 "" "")]
6525 UNSPEC_PIC_BASE))
6526 (set (match_operand:SI 2 "arm_general_register_operand" "")
6527 (mem:SI (match_dup 0)))]
6528 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6529 [(set (match_dup 2)
6530 (mem:SI (unspec:SI [(match_dup 3)
6531 (const_int 8)
6532 (match_dup 1)]
6533 UNSPEC_PIC_BASE)))]
6534 ""
6535 )
6536
6537 (define_insn "pic_offset_arm"
6538 [(set (match_operand:SI 0 "register_operand" "=r")
6539 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6540 (unspec:SI [(match_operand:SI 2 "" "X")]
6541 UNSPEC_PIC_OFFSET))))]
6542 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6543 "ldr%?\\t%0, [%1,%2]"
6544 [(set_attr "type" "load_4")]
6545 )
6546
6547 (define_expand "builtin_setjmp_receiver"
6548 [(label_ref (match_operand 0 "" ""))]
6549 "flag_pic"
6550 "
6551 {
6552 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6553 register. */
6554 if (arm_pic_register != INVALID_REGNUM)
6555 arm_load_pic_register (1UL << 3, NULL_RTX);
6556 DONE;
6557 }")
6558
6559 ;; If copying one reg to another we can set the condition codes according to
6560 ;; its value. Such a move is common after a return from subroutine and the
6561 ;; result is being tested against zero.
6562
6563 (define_insn "*movsi_compare0"
6564 [(set (reg:CC CC_REGNUM)
6565 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6566 (const_int 0)))
6567 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6568 (match_dup 1))]
6569 "TARGET_32BIT"
6570 "@
6571 cmp%?\\t%0, #0
6572 subs%?\\t%0, %1, #0"
6573 [(set_attr "conds" "set")
6574 (set_attr "type" "alus_imm,alus_imm")]
6575 )
6576
6577 ;; Subroutine to store a half word from a register into memory.
6578 ;; Operand 0 is the source register (HImode)
6579 ;; Operand 1 is the destination address in a register (SImode)
6580
6581 ;; In both this routine and the next, we must be careful not to spill
6582 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6583 ;; can generate unrecognizable rtl.
6584
6585 (define_expand "storehi"
6586 [;; store the low byte
6587 (set (match_operand 1 "" "") (match_dup 3))
6588 ;; extract the high byte
6589 (set (match_dup 2)
6590 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6591 ;; store the high byte
6592 (set (match_dup 4) (match_dup 5))]
6593 "TARGET_ARM"
6594 "
6595 {
6596 rtx op1 = operands[1];
6597 rtx addr = XEXP (op1, 0);
6598 enum rtx_code code = GET_CODE (addr);
6599
6600 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6601 || code == MINUS)
6602 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6603
6604 operands[4] = adjust_address (op1, QImode, 1);
6605 operands[1] = adjust_address (operands[1], QImode, 0);
6606 operands[3] = gen_lowpart (QImode, operands[0]);
6607 operands[0] = gen_lowpart (SImode, operands[0]);
6608 operands[2] = gen_reg_rtx (SImode);
6609 operands[5] = gen_lowpart (QImode, operands[2]);
6610 }"
6611 )
6612
6613 (define_expand "storehi_bigend"
6614 [(set (match_dup 4) (match_dup 3))
6615 (set (match_dup 2)
6616 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6617 (set (match_operand 1 "" "") (match_dup 5))]
6618 "TARGET_ARM"
6619 "
6620 {
6621 rtx op1 = operands[1];
6622 rtx addr = XEXP (op1, 0);
6623 enum rtx_code code = GET_CODE (addr);
6624
6625 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6626 || code == MINUS)
6627 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6628
6629 operands[4] = adjust_address (op1, QImode, 1);
6630 operands[1] = adjust_address (operands[1], QImode, 0);
6631 operands[3] = gen_lowpart (QImode, operands[0]);
6632 operands[0] = gen_lowpart (SImode, operands[0]);
6633 operands[2] = gen_reg_rtx (SImode);
6634 operands[5] = gen_lowpart (QImode, operands[2]);
6635 }"
6636 )
6637
6638 ;; Subroutine to store a half word integer constant into memory.
6639 (define_expand "storeinthi"
6640 [(set (match_operand 0 "" "")
6641 (match_operand 1 "" ""))
6642 (set (match_dup 3) (match_dup 2))]
6643 "TARGET_ARM"
6644 "
6645 {
6646 HOST_WIDE_INT value = INTVAL (operands[1]);
6647 rtx addr = XEXP (operands[0], 0);
6648 rtx op0 = operands[0];
6649 enum rtx_code code = GET_CODE (addr);
6650
6651 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6652 || code == MINUS)
6653 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6654
6655 operands[1] = gen_reg_rtx (SImode);
6656 if (BYTES_BIG_ENDIAN)
6657 {
6658 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6659 if ((value & 255) == ((value >> 8) & 255))
6660 operands[2] = operands[1];
6661 else
6662 {
6663 operands[2] = gen_reg_rtx (SImode);
6664 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6665 }
6666 }
6667 else
6668 {
6669 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6670 if ((value & 255) == ((value >> 8) & 255))
6671 operands[2] = operands[1];
6672 else
6673 {
6674 operands[2] = gen_reg_rtx (SImode);
6675 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6676 }
6677 }
6678
6679 operands[3] = adjust_address (op0, QImode, 1);
6680 operands[0] = adjust_address (operands[0], QImode, 0);
6681 operands[2] = gen_lowpart (QImode, operands[2]);
6682 operands[1] = gen_lowpart (QImode, operands[1]);
6683 }"
6684 )
6685
6686 (define_expand "storehi_single_op"
6687 [(set (match_operand:HI 0 "memory_operand")
6688 (match_operand:HI 1 "general_operand"))]
6689 "TARGET_32BIT && arm_arch4"
6690 "
6691 if (!s_register_operand (operands[1], HImode))
6692 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6693 "
6694 )
6695
6696 (define_expand "movhi"
6697 [(set (match_operand:HI 0 "general_operand")
6698 (match_operand:HI 1 "general_operand"))]
6699 "TARGET_EITHER"
6700 "
6701 gcc_checking_assert (aligned_operand (operands[0], HImode));
6702 gcc_checking_assert (aligned_operand (operands[1], HImode));
6703 if (TARGET_ARM)
6704 {
6705 if (can_create_pseudo_p ())
6706 {
6707 if (MEM_P (operands[0]))
6708 {
6709 if (arm_arch4)
6710 {
6711 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6712 DONE;
6713 }
6714 if (CONST_INT_P (operands[1]))
6715 emit_insn (gen_storeinthi (operands[0], operands[1]));
6716 else
6717 {
6718 if (MEM_P (operands[1]))
6719 operands[1] = force_reg (HImode, operands[1]);
6720 if (BYTES_BIG_ENDIAN)
6721 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6722 else
6723 emit_insn (gen_storehi (operands[1], operands[0]));
6724 }
6725 DONE;
6726 }
6727 /* Sign extend a constant, and keep it in an SImode reg. */
6728 else if (CONST_INT_P (operands[1]))
6729 {
6730 rtx reg = gen_reg_rtx (SImode);
6731 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6732
6733 /* If the constant is already valid, leave it alone. */
6734 if (!const_ok_for_arm (val))
6735 {
6736 /* If setting all the top bits will make the constant
6737 loadable in a single instruction, then set them.
6738 Otherwise, sign extend the number. */
6739
6740 if (const_ok_for_arm (~(val | ~0xffff)))
6741 val |= ~0xffff;
6742 else if (val & 0x8000)
6743 val |= ~0xffff;
6744 }
6745
6746 emit_insn (gen_movsi (reg, GEN_INT (val)));
6747 operands[1] = gen_lowpart (HImode, reg);
6748 }
6749 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6750 && MEM_P (operands[1]))
6751 {
6752 rtx reg = gen_reg_rtx (SImode);
6753
6754 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6755 operands[1] = gen_lowpart (HImode, reg);
6756 }
6757 else if (!arm_arch4)
6758 {
6759 if (MEM_P (operands[1]))
6760 {
6761 rtx base;
6762 rtx offset = const0_rtx;
6763 rtx reg = gen_reg_rtx (SImode);
6764
6765 if ((REG_P (base = XEXP (operands[1], 0))
6766 || (GET_CODE (base) == PLUS
6767 && (CONST_INT_P (offset = XEXP (base, 1)))
6768 && ((INTVAL(offset) & 1) != 1)
6769 && REG_P (base = XEXP (base, 0))))
6770 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6771 {
6772 rtx new_rtx;
6773
6774 new_rtx = widen_memory_access (operands[1], SImode,
6775 ((INTVAL (offset) & ~3)
6776 - INTVAL (offset)));
6777 emit_insn (gen_movsi (reg, new_rtx));
6778 if (((INTVAL (offset) & 2) != 0)
6779 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6780 {
6781 rtx reg2 = gen_reg_rtx (SImode);
6782
6783 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6784 reg = reg2;
6785 }
6786 }
6787 else
6788 emit_insn (gen_movhi_bytes (reg, operands[1]));
6789
6790 operands[1] = gen_lowpart (HImode, reg);
6791 }
6792 }
6793 }
6794 /* Handle loading a large integer during reload. */
6795 else if (CONST_INT_P (operands[1])
6796 && !const_ok_for_arm (INTVAL (operands[1]))
6797 && !const_ok_for_arm (~INTVAL (operands[1])))
6798 {
6799 /* Writing a constant to memory needs a scratch, which should
6800 be handled with SECONDARY_RELOADs. */
6801 gcc_assert (REG_P (operands[0]));
6802
6803 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6804 emit_insn (gen_movsi (operands[0], operands[1]));
6805 DONE;
6806 }
6807 }
6808 else if (TARGET_THUMB2)
6809 {
6810 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6811 if (can_create_pseudo_p ())
6812 {
6813 if (!REG_P (operands[0]))
6814 operands[1] = force_reg (HImode, operands[1]);
6815 /* Zero extend a constant, and keep it in an SImode reg. */
6816 else if (CONST_INT_P (operands[1]))
6817 {
6818 rtx reg = gen_reg_rtx (SImode);
6819 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6820
6821 emit_insn (gen_movsi (reg, GEN_INT (val)));
6822 operands[1] = gen_lowpart (HImode, reg);
6823 }
6824 }
6825 }
6826 else /* TARGET_THUMB1 */
6827 {
6828 if (can_create_pseudo_p ())
6829 {
6830 if (CONST_INT_P (operands[1]))
6831 {
6832 rtx reg = gen_reg_rtx (SImode);
6833
6834 emit_insn (gen_movsi (reg, operands[1]));
6835 operands[1] = gen_lowpart (HImode, reg);
6836 }
6837
6838 /* ??? We shouldn't really get invalid addresses here, but this can
6839 happen if we are passed a SP (never OK for HImode/QImode) or
6840 virtual register (also rejected as illegitimate for HImode/QImode)
6841 relative address. */
6842 /* ??? This should perhaps be fixed elsewhere, for instance, in
6843 fixup_stack_1, by checking for other kinds of invalid addresses,
6844 e.g. a bare reference to a virtual register. This may confuse the
6845 alpha though, which must handle this case differently. */
6846 if (MEM_P (operands[0])
6847 && !memory_address_p (GET_MODE (operands[0]),
6848 XEXP (operands[0], 0)))
6849 operands[0]
6850 = replace_equiv_address (operands[0],
6851 copy_to_reg (XEXP (operands[0], 0)));
6852
6853 if (MEM_P (operands[1])
6854 && !memory_address_p (GET_MODE (operands[1]),
6855 XEXP (operands[1], 0)))
6856 operands[1]
6857 = replace_equiv_address (operands[1],
6858 copy_to_reg (XEXP (operands[1], 0)));
6859
6860 if (MEM_P (operands[1]) && optimize > 0)
6861 {
6862 rtx reg = gen_reg_rtx (SImode);
6863
6864 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6865 operands[1] = gen_lowpart (HImode, reg);
6866 }
6867
6868 if (MEM_P (operands[0]))
6869 operands[1] = force_reg (HImode, operands[1]);
6870 }
6871 else if (CONST_INT_P (operands[1])
6872 && !satisfies_constraint_I (operands[1]))
6873 {
6874 /* Handle loading a large integer during reload. */
6875
6876 /* Writing a constant to memory needs a scratch, which should
6877 be handled with SECONDARY_RELOADs. */
6878 gcc_assert (REG_P (operands[0]));
6879
6880 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6881 emit_insn (gen_movsi (operands[0], operands[1]));
6882 DONE;
6883 }
6884 }
6885 "
6886 )
6887
6888 (define_expand "movhi_bytes"
6889 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6890 (set (match_dup 3)
6891 (zero_extend:SI (match_dup 6)))
6892 (set (match_operand:SI 0 "" "")
6893 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6894 "TARGET_ARM"
6895 "
6896 {
6897 rtx mem1, mem2;
6898 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6899
6900 mem1 = change_address (operands[1], QImode, addr);
6901 mem2 = change_address (operands[1], QImode,
6902 plus_constant (Pmode, addr, 1));
6903 operands[0] = gen_lowpart (SImode, operands[0]);
6904 operands[1] = mem1;
6905 operands[2] = gen_reg_rtx (SImode);
6906 operands[3] = gen_reg_rtx (SImode);
6907 operands[6] = mem2;
6908
6909 if (BYTES_BIG_ENDIAN)
6910 {
6911 operands[4] = operands[2];
6912 operands[5] = operands[3];
6913 }
6914 else
6915 {
6916 operands[4] = operands[3];
6917 operands[5] = operands[2];
6918 }
6919 }"
6920 )
6921
6922 (define_expand "movhi_bigend"
6923 [(set (match_dup 2)
6924 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
6925 (const_int 16)))
6926 (set (match_dup 3)
6927 (ashiftrt:SI (match_dup 2) (const_int 16)))
6928 (set (match_operand:HI 0 "s_register_operand")
6929 (match_dup 4))]
6930 "TARGET_ARM"
6931 "
6932 operands[2] = gen_reg_rtx (SImode);
6933 operands[3] = gen_reg_rtx (SImode);
6934 operands[4] = gen_lowpart (HImode, operands[3]);
6935 "
6936 )
6937
6938 ;; Pattern to recognize insn generated default case above
6939 (define_insn "*movhi_insn_arch4"
6940 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6941 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
6942 "TARGET_ARM
6943 && arm_arch4 && !TARGET_HARD_FLOAT
6944 && (register_operand (operands[0], HImode)
6945 || register_operand (operands[1], HImode))"
6946 "@
6947 mov%?\\t%0, %1\\t%@ movhi
6948 mvn%?\\t%0, #%B1\\t%@ movhi
6949 movw%?\\t%0, %L1\\t%@ movhi
6950 strh%?\\t%1, %0\\t%@ movhi
6951 ldrh%?\\t%0, %1\\t%@ movhi"
6952 [(set_attr "predicable" "yes")
6953 (set_attr "pool_range" "*,*,*,*,256")
6954 (set_attr "neg_pool_range" "*,*,*,*,244")
6955 (set_attr "arch" "*,*,v6t2,*,*")
6956 (set_attr_alternative "type"
6957 [(if_then_else (match_operand 1 "const_int_operand" "")
6958 (const_string "mov_imm" )
6959 (const_string "mov_reg"))
6960 (const_string "mvn_imm")
6961 (const_string "mov_imm")
6962 (const_string "store_4")
6963 (const_string "load_4")])]
6964 )
6965
6966 (define_insn "*movhi_bytes"
6967 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6968 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
6969 "TARGET_ARM && !TARGET_HARD_FLOAT"
6970 "@
6971 mov%?\\t%0, %1\\t%@ movhi
6972 mov%?\\t%0, %1\\t%@ movhi
6973 mvn%?\\t%0, #%B1\\t%@ movhi"
6974 [(set_attr "predicable" "yes")
6975 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
6976 )
6977
6978 ;; We use a DImode scratch because we may occasionally need an additional
6979 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6980 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6981 ;; The reload_in<m> and reload_out<m> patterns require special constraints
6982 ;; to be correctly handled in default_secondary_reload function.
6983 (define_expand "reload_outhi"
6984 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6985 (match_operand:HI 1 "s_register_operand" "r")
6986 (match_operand:DI 2 "s_register_operand" "=&l")])]
6987 "TARGET_EITHER"
6988 "if (TARGET_ARM)
6989 arm_reload_out_hi (operands);
6990 else
6991 thumb_reload_out_hi (operands);
6992 DONE;
6993 "
6994 )
6995
6996 (define_expand "reload_inhi"
6997 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6998 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6999 (match_operand:DI 2 "s_register_operand" "=&r")])]
7000 "TARGET_EITHER"
7001 "
7002 if (TARGET_ARM)
7003 arm_reload_in_hi (operands);
7004 else
7005 thumb_reload_out_hi (operands);
7006 DONE;
7007 ")
7008
7009 (define_expand "movqi"
7010 [(set (match_operand:QI 0 "general_operand")
7011 (match_operand:QI 1 "general_operand"))]
7012 "TARGET_EITHER"
7013 "
7014 /* Everything except mem = const or mem = mem can be done easily */
7015
7016 if (can_create_pseudo_p ())
7017 {
7018 if (CONST_INT_P (operands[1]))
7019 {
7020 rtx reg = gen_reg_rtx (SImode);
7021
7022 /* For thumb we want an unsigned immediate, then we are more likely
7023 to be able to use a movs insn. */
7024 if (TARGET_THUMB)
7025 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7026
7027 emit_insn (gen_movsi (reg, operands[1]));
7028 operands[1] = gen_lowpart (QImode, reg);
7029 }
7030
7031 if (TARGET_THUMB)
7032 {
7033 /* ??? We shouldn't really get invalid addresses here, but this can
7034 happen if we are passed a SP (never OK for HImode/QImode) or
7035 virtual register (also rejected as illegitimate for HImode/QImode)
7036 relative address. */
7037 /* ??? This should perhaps be fixed elsewhere, for instance, in
7038 fixup_stack_1, by checking for other kinds of invalid addresses,
7039 e.g. a bare reference to a virtual register. This may confuse the
7040 alpha though, which must handle this case differently. */
7041 if (MEM_P (operands[0])
7042 && !memory_address_p (GET_MODE (operands[0]),
7043 XEXP (operands[0], 0)))
7044 operands[0]
7045 = replace_equiv_address (operands[0],
7046 copy_to_reg (XEXP (operands[0], 0)));
7047 if (MEM_P (operands[1])
7048 && !memory_address_p (GET_MODE (operands[1]),
7049 XEXP (operands[1], 0)))
7050 operands[1]
7051 = replace_equiv_address (operands[1],
7052 copy_to_reg (XEXP (operands[1], 0)));
7053 }
7054
7055 if (MEM_P (operands[1]) && optimize > 0)
7056 {
7057 rtx reg = gen_reg_rtx (SImode);
7058
7059 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7060 operands[1] = gen_lowpart (QImode, reg);
7061 }
7062
7063 if (MEM_P (operands[0]))
7064 operands[1] = force_reg (QImode, operands[1]);
7065 }
7066 else if (TARGET_THUMB
7067 && CONST_INT_P (operands[1])
7068 && !satisfies_constraint_I (operands[1]))
7069 {
7070 /* Handle loading a large integer during reload. */
7071
7072 /* Writing a constant to memory needs a scratch, which should
7073 be handled with SECONDARY_RELOADs. */
7074 gcc_assert (REG_P (operands[0]));
7075
7076 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7077 emit_insn (gen_movsi (operands[0], operands[1]));
7078 DONE;
7079 }
7080 "
7081 )
7082
7083 (define_insn "*arm_movqi_insn"
7084 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7085 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
7086 "TARGET_32BIT
7087 && ( register_operand (operands[0], QImode)
7088 || register_operand (operands[1], QImode))"
7089 "@
7090 mov%?\\t%0, %1
7091 mov%?\\t%0, %1
7092 mov%?\\t%0, %1
7093 mov%?\\t%0, %1
7094 mvn%?\\t%0, #%B1
7095 ldrb%?\\t%0, %1
7096 strb%?\\t%1, %0
7097 ldrb%?\\t%0, %1
7098 strb%?\\t%1, %0"
7099 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
7100 (set_attr "predicable" "yes")
7101 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
7102 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7103 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7104 )
7105
7106 ;; HFmode moves
7107 (define_expand "movhf"
7108 [(set (match_operand:HF 0 "general_operand")
7109 (match_operand:HF 1 "general_operand"))]
7110 "TARGET_EITHER"
7111 "
7112 gcc_checking_assert (aligned_operand (operands[0], HFmode));
7113 gcc_checking_assert (aligned_operand (operands[1], HFmode));
7114 if (TARGET_32BIT)
7115 {
7116 if (MEM_P (operands[0]))
7117 operands[1] = force_reg (HFmode, operands[1]);
7118 }
7119 else /* TARGET_THUMB1 */
7120 {
7121 if (can_create_pseudo_p ())
7122 {
7123 if (!REG_P (operands[0]))
7124 operands[1] = force_reg (HFmode, operands[1]);
7125 }
7126 }
7127 "
7128 )
7129
7130 (define_insn "*arm32_movhf"
7131 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
7132 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
7133 "TARGET_32BIT && !TARGET_HARD_FLOAT
7134 && ( s_register_operand (operands[0], HFmode)
7135 || s_register_operand (operands[1], HFmode))"
7136 "*
7137 switch (which_alternative)
7138 {
7139 case 0: /* ARM register from memory */
7140 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
7141 case 1: /* memory from ARM register */
7142 return \"strh%?\\t%1, %0\\t%@ __fp16\";
7143 case 2: /* ARM register from ARM register */
7144 return \"mov%?\\t%0, %1\\t%@ __fp16\";
7145 case 3: /* ARM register from constant */
7146 {
7147 long bits;
7148 rtx ops[4];
7149
7150 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
7151 HFmode);
7152 ops[0] = operands[0];
7153 ops[1] = GEN_INT (bits);
7154 ops[2] = GEN_INT (bits & 0xff00);
7155 ops[3] = GEN_INT (bits & 0x00ff);
7156
7157 if (arm_arch_thumb2)
7158 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7159 else
7160 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7161 return \"\";
7162 }
7163 default:
7164 gcc_unreachable ();
7165 }
7166 "
7167 [(set_attr "conds" "unconditional")
7168 (set_attr "type" "load_4,store_4,mov_reg,multiple")
7169 (set_attr "length" "4,4,4,8")
7170 (set_attr "predicable" "yes")]
7171 )
7172
7173 (define_expand "movsf"
7174 [(set (match_operand:SF 0 "general_operand")
7175 (match_operand:SF 1 "general_operand"))]
7176 "TARGET_EITHER"
7177 "
7178 gcc_checking_assert (aligned_operand (operands[0], SFmode));
7179 gcc_checking_assert (aligned_operand (operands[1], SFmode));
7180 if (TARGET_32BIT)
7181 {
7182 if (MEM_P (operands[0]))
7183 operands[1] = force_reg (SFmode, operands[1]);
7184 }
7185 else /* TARGET_THUMB1 */
7186 {
7187 if (can_create_pseudo_p ())
7188 {
7189 if (!REG_P (operands[0]))
7190 operands[1] = force_reg (SFmode, operands[1]);
7191 }
7192 }
7193
7194 /* Cannot load it directly, generate a load with clobber so that it can be
7195 loaded via GPR with MOV / MOVT. */
7196 if (arm_disable_literal_pool
7197 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7198 && CONST_DOUBLE_P (operands[1])
7199 && TARGET_HARD_FLOAT
7200 && !vfp3_const_double_rtx (operands[1]))
7201 {
7202 rtx clobreg = gen_reg_rtx (SFmode);
7203 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
7204 clobreg));
7205 DONE;
7206 }
7207 "
7208 )
7209
7210 ;; Transform a floating-point move of a constant into a core register into
7211 ;; an SImode operation.
7212 (define_split
7213 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7214 (match_operand:SF 1 "immediate_operand" ""))]
7215 "TARGET_EITHER
7216 && reload_completed
7217 && CONST_DOUBLE_P (operands[1])"
7218 [(set (match_dup 2) (match_dup 3))]
7219 "
7220 operands[2] = gen_lowpart (SImode, operands[0]);
7221 operands[3] = gen_lowpart (SImode, operands[1]);
7222 if (operands[2] == 0 || operands[3] == 0)
7223 FAIL;
7224 "
7225 )
7226
7227 (define_insn "*arm_movsf_soft_insn"
7228 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7229 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7230 "TARGET_32BIT
7231 && TARGET_SOFT_FLOAT
7232 && (!MEM_P (operands[0])
7233 || register_operand (operands[1], SFmode))"
7234 {
7235 switch (which_alternative)
7236 {
7237 case 0: return \"mov%?\\t%0, %1\";
7238 case 1:
7239 /* Cannot load it directly, split to load it via MOV / MOVT. */
7240 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7241 return \"#\";
7242 return \"ldr%?\\t%0, %1\\t%@ float\";
7243 case 2: return \"str%?\\t%1, %0\\t%@ float\";
7244 default: gcc_unreachable ();
7245 }
7246 }
7247 [(set_attr "predicable" "yes")
7248 (set_attr "type" "mov_reg,load_4,store_4")
7249 (set_attr "arm_pool_range" "*,4096,*")
7250 (set_attr "thumb2_pool_range" "*,4094,*")
7251 (set_attr "arm_neg_pool_range" "*,4084,*")
7252 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7253 )
7254
7255 ;; Splitter for the above.
7256 (define_split
7257 [(set (match_operand:SF 0 "s_register_operand")
7258 (match_operand:SF 1 "const_double_operand"))]
7259 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7260 [(const_int 0)]
7261 {
7262 long buf;
7263 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
7264 rtx cst = gen_int_mode (buf, SImode);
7265 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
7266 DONE;
7267 }
7268 )
7269
7270 (define_expand "movdf"
7271 [(set (match_operand:DF 0 "general_operand")
7272 (match_operand:DF 1 "general_operand"))]
7273 "TARGET_EITHER"
7274 "
7275 gcc_checking_assert (aligned_operand (operands[0], DFmode));
7276 gcc_checking_assert (aligned_operand (operands[1], DFmode));
7277 if (TARGET_32BIT)
7278 {
7279 if (MEM_P (operands[0]))
7280 operands[1] = force_reg (DFmode, operands[1]);
7281 }
7282 else /* TARGET_THUMB */
7283 {
7284 if (can_create_pseudo_p ())
7285 {
7286 if (!REG_P (operands[0]))
7287 operands[1] = force_reg (DFmode, operands[1]);
7288 }
7289 }
7290
7291 /* Cannot load it directly, generate a load with clobber so that it can be
7292 loaded via GPR with MOV / MOVT. */
7293 if (arm_disable_literal_pool
7294 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7295 && CONSTANT_P (operands[1])
7296 && TARGET_HARD_FLOAT
7297 && !arm_const_double_rtx (operands[1])
7298 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
7299 {
7300 rtx clobreg = gen_reg_rtx (DFmode);
7301 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
7302 clobreg));
7303 DONE;
7304 }
7305 "
7306 )
7307
7308 ;; Reloading a df mode value stored in integer regs to memory can require a
7309 ;; scratch reg.
7310 ;; Another reload_out<m> pattern that requires special constraints.
7311 (define_expand "reload_outdf"
7312 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7313 (match_operand:DF 1 "s_register_operand" "r")
7314 (match_operand:SI 2 "s_register_operand" "=&r")]
7315 "TARGET_THUMB2"
7316 "
7317 {
7318 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7319
7320 if (code == REG)
7321 operands[2] = XEXP (operands[0], 0);
7322 else if (code == POST_INC || code == PRE_DEC)
7323 {
7324 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7325 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7326 emit_insn (gen_movdi (operands[0], operands[1]));
7327 DONE;
7328 }
7329 else if (code == PRE_INC)
7330 {
7331 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7332
7333 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7334 operands[2] = reg;
7335 }
7336 else if (code == POST_DEC)
7337 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7338 else
7339 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7340 XEXP (XEXP (operands[0], 0), 1)));
7341
7342 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
7343 operands[1]));
7344
7345 if (code == POST_DEC)
7346 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7347
7348 DONE;
7349 }"
7350 )
7351
7352 (define_insn "*movdf_soft_insn"
7353 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
7354 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
7355 "TARGET_32BIT && TARGET_SOFT_FLOAT
7356 && ( register_operand (operands[0], DFmode)
7357 || register_operand (operands[1], DFmode))"
7358 "*
7359 switch (which_alternative)
7360 {
7361 case 0:
7362 case 1:
7363 case 2:
7364 return \"#\";
7365 case 3:
7366 /* Cannot load it directly, split to load it via MOV / MOVT. */
7367 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7368 return \"#\";
7369 /* Fall through. */
7370 default:
7371 return output_move_double (operands, true, NULL);
7372 }
7373 "
7374 [(set_attr "length" "8,12,16,8,8")
7375 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
7376 (set_attr "arm_pool_range" "*,*,*,1020,*")
7377 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7378 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7379 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7380 )
7381
7382 ;; Splitter for the above.
7383 (define_split
7384 [(set (match_operand:DF 0 "s_register_operand")
7385 (match_operand:DF 1 "const_double_operand"))]
7386 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7387 [(const_int 0)]
7388 {
7389 long buf[2];
7390 int order = BYTES_BIG_ENDIAN ? 1 : 0;
7391 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7392 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7393 ival |= (zext_hwi (buf[1 - order], 32) << 32);
7394 rtx cst = gen_int_mode (ival, DImode);
7395 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7396 DONE;
7397 }
7398 )
7399 \f
7400
7401 ;; load- and store-multiple insns
7402 ;; The arm can load/store any set of registers, provided that they are in
7403 ;; ascending order, but these expanders assume a contiguous set.
7404
7405 (define_expand "load_multiple"
7406 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7407 (match_operand:SI 1 "" ""))
7408 (use (match_operand:SI 2 "" ""))])]
7409 "TARGET_32BIT"
7410 {
7411 HOST_WIDE_INT offset = 0;
7412
7413 /* Support only fixed point registers. */
7414 if (!CONST_INT_P (operands[2])
7415 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7416 || INTVAL (operands[2]) < 2
7417 || !MEM_P (operands[1])
7418 || !REG_P (operands[0])
7419 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7420 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7421 FAIL;
7422
7423 operands[3]
7424 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7425 INTVAL (operands[2]),
7426 force_reg (SImode, XEXP (operands[1], 0)),
7427 FALSE, operands[1], &offset);
7428 })
7429
7430 (define_expand "store_multiple"
7431 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7432 (match_operand:SI 1 "" ""))
7433 (use (match_operand:SI 2 "" ""))])]
7434 "TARGET_32BIT"
7435 {
7436 HOST_WIDE_INT offset = 0;
7437
7438 /* Support only fixed point registers. */
7439 if (!CONST_INT_P (operands[2])
7440 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7441 || INTVAL (operands[2]) < 2
7442 || !REG_P (operands[1])
7443 || !MEM_P (operands[0])
7444 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7445 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7446 FAIL;
7447
7448 operands[3]
7449 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7450 INTVAL (operands[2]),
7451 force_reg (SImode, XEXP (operands[0], 0)),
7452 FALSE, operands[0], &offset);
7453 })
7454
7455
7456 (define_expand "setmemsi"
7457 [(match_operand:BLK 0 "general_operand")
7458 (match_operand:SI 1 "const_int_operand")
7459 (match_operand:SI 2 "const_int_operand")
7460 (match_operand:SI 3 "const_int_operand")]
7461 "TARGET_32BIT"
7462 {
7463 if (arm_gen_setmem (operands))
7464 DONE;
7465
7466 FAIL;
7467 })
7468
7469
7470 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7471 ;; We could let this apply for blocks of less than this, but it clobbers so
7472 ;; many registers that there is then probably a better way.
7473
7474 (define_expand "cpymemqi"
7475 [(match_operand:BLK 0 "general_operand")
7476 (match_operand:BLK 1 "general_operand")
7477 (match_operand:SI 2 "const_int_operand")
7478 (match_operand:SI 3 "const_int_operand")]
7479 ""
7480 "
7481 if (TARGET_32BIT)
7482 {
7483 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7484 && !optimize_function_for_size_p (cfun))
7485 {
7486 if (gen_cpymem_ldrd_strd (operands))
7487 DONE;
7488 FAIL;
7489 }
7490
7491 if (arm_gen_cpymemqi (operands))
7492 DONE;
7493 FAIL;
7494 }
7495 else /* TARGET_THUMB1 */
7496 {
7497 if ( INTVAL (operands[3]) != 4
7498 || INTVAL (operands[2]) > 48)
7499 FAIL;
7500
7501 thumb_expand_cpymemqi (operands);
7502 DONE;
7503 }
7504 "
7505 )
7506 \f
7507
7508 ;; Compare & branch insns
7509 ;; The range calculations are based as follows:
7510 ;; For forward branches, the address calculation returns the address of
7511 ;; the next instruction. This is 2 beyond the branch instruction.
7512 ;; For backward branches, the address calculation returns the address of
7513 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7514 ;; instruction for the shortest sequence, and 4 before the branch instruction
7515 ;; if we have to jump around an unconditional branch.
7516 ;; To the basic branch range the PC offset must be added (this is +4).
7517 ;; So for forward branches we have
7518 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7519 ;; And for backward branches we have
7520 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7521 ;;
7522 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7523 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7524
7525 (define_expand "cbranchsi4"
7526 [(set (pc) (if_then_else
7527 (match_operator 0 "expandable_comparison_operator"
7528 [(match_operand:SI 1 "s_register_operand")
7529 (match_operand:SI 2 "nonmemory_operand")])
7530 (label_ref (match_operand 3 "" ""))
7531 (pc)))]
7532 "TARGET_EITHER"
7533 "
7534 if (!TARGET_THUMB1)
7535 {
7536 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7537 FAIL;
7538 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7539 operands[3]));
7540 DONE;
7541 }
7542 if (thumb1_cmpneg_operand (operands[2], SImode))
7543 {
7544 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7545 operands[3], operands[0]));
7546 DONE;
7547 }
7548 if (!thumb1_cmp_operand (operands[2], SImode))
7549 operands[2] = force_reg (SImode, operands[2]);
7550 ")
7551
7552 (define_expand "cbranchsf4"
7553 [(set (pc) (if_then_else
7554 (match_operator 0 "expandable_comparison_operator"
7555 [(match_operand:SF 1 "s_register_operand")
7556 (match_operand:SF 2 "vfp_compare_operand")])
7557 (label_ref (match_operand 3 "" ""))
7558 (pc)))]
7559 "TARGET_32BIT && TARGET_HARD_FLOAT"
7560 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7561 operands[3])); DONE;"
7562 )
7563
7564 (define_expand "cbranchdf4"
7565 [(set (pc) (if_then_else
7566 (match_operator 0 "expandable_comparison_operator"
7567 [(match_operand:DF 1 "s_register_operand")
7568 (match_operand:DF 2 "vfp_compare_operand")])
7569 (label_ref (match_operand 3 "" ""))
7570 (pc)))]
7571 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7572 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7573 operands[3])); DONE;"
7574 )
7575
7576 (define_expand "cbranchdi4"
7577 [(set (pc) (if_then_else
7578 (match_operator 0 "expandable_comparison_operator"
7579 [(match_operand:DI 1 "s_register_operand")
7580 (match_operand:DI 2 "reg_or_int_operand")])
7581 (label_ref (match_operand 3 "" ""))
7582 (pc)))]
7583 "TARGET_32BIT"
7584 "{
7585 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7586 FAIL;
7587 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7588 operands[3]));
7589 DONE;
7590 }"
7591 )
7592
7593 ;; Comparison and test insns
7594
7595 (define_insn "*arm_cmpsi_insn"
7596 [(set (reg:CC CC_REGNUM)
7597 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7598 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7599 "TARGET_32BIT"
7600 "@
7601 cmp%?\\t%0, %1
7602 cmp%?\\t%0, %1
7603 cmp%?\\t%0, %1
7604 cmp%?\\t%0, %1
7605 cmn%?\\t%0, #%n1"
7606 [(set_attr "conds" "set")
7607 (set_attr "arch" "t2,t2,any,any,any")
7608 (set_attr "length" "2,2,4,4,4")
7609 (set_attr "predicable" "yes")
7610 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7611 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7612 )
7613
7614 (define_insn "*cmpsi_shiftsi"
7615 [(set (reg:CC CC_REGNUM)
7616 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7617 (match_operator:SI 3 "shift_operator"
7618 [(match_operand:SI 1 "s_register_operand" "r,r")
7619 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7620 "TARGET_32BIT"
7621 "cmp\\t%0, %1%S3"
7622 [(set_attr "conds" "set")
7623 (set_attr "shift" "1")
7624 (set_attr "arch" "32,a")
7625 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7626
7627 (define_insn "*cmpsi_shiftsi_swp"
7628 [(set (reg:CC_SWP CC_REGNUM)
7629 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7630 [(match_operand:SI 1 "s_register_operand" "r,r")
7631 (match_operand:SI 2 "shift_amount_operand" "M,r")])
7632 (match_operand:SI 0 "s_register_operand" "r,r")))]
7633 "TARGET_32BIT"
7634 "cmp%?\\t%0, %1%S3"
7635 [(set_attr "conds" "set")
7636 (set_attr "shift" "1")
7637 (set_attr "arch" "32,a")
7638 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7639
7640 (define_insn "*arm_cmpsi_negshiftsi_si"
7641 [(set (reg:CC_Z CC_REGNUM)
7642 (compare:CC_Z
7643 (neg:SI (match_operator:SI 1 "shift_operator"
7644 [(match_operand:SI 2 "s_register_operand" "r,r")
7645 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7646 (match_operand:SI 0 "s_register_operand" "r,r")))]
7647 "TARGET_32BIT"
7648 "cmn%?\\t%0, %2%S1"
7649 [(set_attr "conds" "set")
7650 (set_attr "arch" "32,a")
7651 (set_attr "shift" "2")
7652 (set_attr "type" "alus_shift_imm,alus_shift_reg")
7653 (set_attr "predicable" "yes")]
7654 )
7655
7656 ; This insn allows redundant compares to be removed by cse, nothing should
7657 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7658 ; is deleted later on. The match_dup will match the mode here, so that
7659 ; mode changes of the condition codes aren't lost by this even though we don't
7660 ; specify what they are.
7661
7662 (define_insn "*deleted_compare"
7663 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7664 "TARGET_32BIT"
7665 "\\t%@ deleted compare"
7666 [(set_attr "conds" "set")
7667 (set_attr "length" "0")
7668 (set_attr "type" "no_insn")]
7669 )
7670
7671 \f
7672 ;; Conditional branch insns
7673
7674 (define_expand "cbranch_cc"
7675 [(set (pc)
7676 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7677 (match_operand 2 "" "")])
7678 (label_ref (match_operand 3 "" ""))
7679 (pc)))]
7680 "TARGET_32BIT"
7681 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7682 operands[1], operands[2], NULL_RTX);
7683 operands[2] = const0_rtx;"
7684 )
7685
7686 ;;
7687 ;; Patterns to match conditional branch insns.
7688 ;;
7689
7690 (define_insn "arm_cond_branch"
7691 [(set (pc)
7692 (if_then_else (match_operator 1 "arm_comparison_operator"
7693 [(match_operand 2 "cc_register" "") (const_int 0)])
7694 (label_ref (match_operand 0 "" ""))
7695 (pc)))]
7696 "TARGET_32BIT"
7697 "*
7698 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7699 {
7700 arm_ccfsm_state += 2;
7701 return \"\";
7702 }
7703 return \"b%d1\\t%l0\";
7704 "
7705 [(set_attr "conds" "use")
7706 (set_attr "type" "branch")
7707 (set (attr "length")
7708 (if_then_else
7709 (and (match_test "TARGET_THUMB2")
7710 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7711 (le (minus (match_dup 0) (pc)) (const_int 256))))
7712 (const_int 2)
7713 (const_int 4)))]
7714 )
7715
7716 (define_insn "*arm_cond_branch_reversed"
7717 [(set (pc)
7718 (if_then_else (match_operator 1 "arm_comparison_operator"
7719 [(match_operand 2 "cc_register" "") (const_int 0)])
7720 (pc)
7721 (label_ref (match_operand 0 "" ""))))]
7722 "TARGET_32BIT"
7723 "*
7724 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7725 {
7726 arm_ccfsm_state += 2;
7727 return \"\";
7728 }
7729 return \"b%D1\\t%l0\";
7730 "
7731 [(set_attr "conds" "use")
7732 (set_attr "type" "branch")
7733 (set (attr "length")
7734 (if_then_else
7735 (and (match_test "TARGET_THUMB2")
7736 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7737 (le (minus (match_dup 0) (pc)) (const_int 256))))
7738 (const_int 2)
7739 (const_int 4)))]
7740 )
7741
7742 \f
7743
7744 ; scc insns
7745
7746 (define_expand "cstore_cc"
7747 [(set (match_operand:SI 0 "s_register_operand")
7748 (match_operator:SI 1 "" [(match_operand 2 "" "")
7749 (match_operand 3 "" "")]))]
7750 "TARGET_32BIT"
7751 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7752 operands[2], operands[3], NULL_RTX);
7753 operands[3] = const0_rtx;"
7754 )
7755
7756 (define_insn_and_split "*mov_scc"
7757 [(set (match_operand:SI 0 "s_register_operand" "=r")
7758 (match_operator:SI 1 "arm_comparison_operator_mode"
7759 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7760 "TARGET_ARM"
7761 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7762 "TARGET_ARM"
7763 [(set (match_dup 0)
7764 (if_then_else:SI (match_dup 1)
7765 (const_int 1)
7766 (const_int 0)))]
7767 ""
7768 [(set_attr "conds" "use")
7769 (set_attr "length" "8")
7770 (set_attr "type" "multiple")]
7771 )
7772
7773 (define_insn "*negscc_borrow"
7774 [(set (match_operand:SI 0 "s_register_operand" "=r")
7775 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
7776 "TARGET_32BIT"
7777 "sbc\\t%0, %0, %0"
7778 [(set_attr "conds" "use")
7779 (set_attr "length" "4")
7780 (set_attr "type" "adc_reg")]
7781 )
7782
7783 (define_insn_and_split "*mov_negscc"
7784 [(set (match_operand:SI 0 "s_register_operand" "=r")
7785 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
7786 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7787 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
7788 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7789 "&& true"
7790 [(set (match_dup 0)
7791 (if_then_else:SI (match_dup 1)
7792 (match_dup 3)
7793 (const_int 0)))]
7794 {
7795 operands[3] = GEN_INT (~0);
7796 }
7797 [(set_attr "conds" "use")
7798 (set_attr "length" "8")
7799 (set_attr "type" "multiple")]
7800 )
7801
7802 (define_insn_and_split "*mov_notscc"
7803 [(set (match_operand:SI 0 "s_register_operand" "=r")
7804 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7805 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7806 "TARGET_ARM"
7807 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7808 "TARGET_ARM"
7809 [(set (match_dup 0)
7810 (if_then_else:SI (match_dup 1)
7811 (match_dup 3)
7812 (match_dup 4)))]
7813 {
7814 operands[3] = GEN_INT (~1);
7815 operands[4] = GEN_INT (~0);
7816 }
7817 [(set_attr "conds" "use")
7818 (set_attr "length" "8")
7819 (set_attr "type" "multiple")]
7820 )
7821
7822 (define_expand "cstoresi4"
7823 [(set (match_operand:SI 0 "s_register_operand")
7824 (match_operator:SI 1 "expandable_comparison_operator"
7825 [(match_operand:SI 2 "s_register_operand")
7826 (match_operand:SI 3 "reg_or_int_operand")]))]
7827 "TARGET_32BIT || TARGET_THUMB1"
7828 "{
7829 rtx op3, scratch, scratch2;
7830
7831 if (!TARGET_THUMB1)
7832 {
7833 if (!arm_add_operand (operands[3], SImode))
7834 operands[3] = force_reg (SImode, operands[3]);
7835 emit_insn (gen_cstore_cc (operands[0], operands[1],
7836 operands[2], operands[3]));
7837 DONE;
7838 }
7839
7840 if (operands[3] == const0_rtx)
7841 {
7842 switch (GET_CODE (operands[1]))
7843 {
7844 case EQ:
7845 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7846 break;
7847
7848 case NE:
7849 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7850 break;
7851
7852 case LE:
7853 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7854 NULL_RTX, 0, OPTAB_WIDEN);
7855 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7856 NULL_RTX, 0, OPTAB_WIDEN);
7857 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7858 operands[0], 1, OPTAB_WIDEN);
7859 break;
7860
7861 case GE:
7862 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7863 NULL_RTX, 1);
7864 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7865 NULL_RTX, 1, OPTAB_WIDEN);
7866 break;
7867
7868 case GT:
7869 scratch = expand_binop (SImode, ashr_optab, operands[2],
7870 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7871 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7872 NULL_RTX, 0, OPTAB_WIDEN);
7873 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7874 0, OPTAB_WIDEN);
7875 break;
7876
7877 /* LT is handled by generic code. No need for unsigned with 0. */
7878 default:
7879 FAIL;
7880 }
7881 DONE;
7882 }
7883
7884 switch (GET_CODE (operands[1]))
7885 {
7886 case EQ:
7887 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7888 NULL_RTX, 0, OPTAB_WIDEN);
7889 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7890 break;
7891
7892 case NE:
7893 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7894 NULL_RTX, 0, OPTAB_WIDEN);
7895 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7896 break;
7897
7898 case LE:
7899 op3 = force_reg (SImode, operands[3]);
7900
7901 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7902 NULL_RTX, 1, OPTAB_WIDEN);
7903 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7904 NULL_RTX, 0, OPTAB_WIDEN);
7905 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7906 op3, operands[2]));
7907 break;
7908
7909 case GE:
7910 op3 = operands[3];
7911 if (!thumb1_cmp_operand (op3, SImode))
7912 op3 = force_reg (SImode, op3);
7913 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7914 NULL_RTX, 0, OPTAB_WIDEN);
7915 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7916 NULL_RTX, 1, OPTAB_WIDEN);
7917 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7918 operands[2], op3));
7919 break;
7920
7921 case LEU:
7922 op3 = force_reg (SImode, operands[3]);
7923 scratch = force_reg (SImode, const0_rtx);
7924 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7925 op3, operands[2]));
7926 break;
7927
7928 case GEU:
7929 op3 = operands[3];
7930 if (!thumb1_cmp_operand (op3, SImode))
7931 op3 = force_reg (SImode, op3);
7932 scratch = force_reg (SImode, const0_rtx);
7933 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7934 operands[2], op3));
7935 break;
7936
7937 case LTU:
7938 op3 = operands[3];
7939 if (!thumb1_cmp_operand (op3, SImode))
7940 op3 = force_reg (SImode, op3);
7941 scratch = gen_reg_rtx (SImode);
7942 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7943 break;
7944
7945 case GTU:
7946 op3 = force_reg (SImode, operands[3]);
7947 scratch = gen_reg_rtx (SImode);
7948 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7949 break;
7950
7951 /* No good sequences for GT, LT. */
7952 default:
7953 FAIL;
7954 }
7955 DONE;
7956 }")
7957
7958 (define_expand "cstorehf4"
7959 [(set (match_operand:SI 0 "s_register_operand")
7960 (match_operator:SI 1 "expandable_comparison_operator"
7961 [(match_operand:HF 2 "s_register_operand")
7962 (match_operand:HF 3 "vfp_compare_operand")]))]
7963 "TARGET_VFP_FP16INST"
7964 {
7965 if (!arm_validize_comparison (&operands[1],
7966 &operands[2],
7967 &operands[3]))
7968 FAIL;
7969
7970 emit_insn (gen_cstore_cc (operands[0], operands[1],
7971 operands[2], operands[3]));
7972 DONE;
7973 }
7974 )
7975
7976 (define_expand "cstoresf4"
7977 [(set (match_operand:SI 0 "s_register_operand")
7978 (match_operator:SI 1 "expandable_comparison_operator"
7979 [(match_operand:SF 2 "s_register_operand")
7980 (match_operand:SF 3 "vfp_compare_operand")]))]
7981 "TARGET_32BIT && TARGET_HARD_FLOAT"
7982 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7983 operands[2], operands[3])); DONE;"
7984 )
7985
7986 (define_expand "cstoredf4"
7987 [(set (match_operand:SI 0 "s_register_operand")
7988 (match_operator:SI 1 "expandable_comparison_operator"
7989 [(match_operand:DF 2 "s_register_operand")
7990 (match_operand:DF 3 "vfp_compare_operand")]))]
7991 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7992 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7993 operands[2], operands[3])); DONE;"
7994 )
7995
7996 (define_expand "cstoredi4"
7997 [(set (match_operand:SI 0 "s_register_operand")
7998 (match_operator:SI 1 "expandable_comparison_operator"
7999 [(match_operand:DI 2 "s_register_operand")
8000 (match_operand:DI 3 "reg_or_int_operand")]))]
8001 "TARGET_32BIT"
8002 "{
8003 if (!arm_validize_comparison (&operands[1],
8004 &operands[2],
8005 &operands[3]))
8006 FAIL;
8007 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8008 operands[3]));
8009 DONE;
8010 }"
8011 )
8012
8013 \f
8014 ;; Conditional move insns
8015
8016 (define_expand "movsicc"
8017 [(set (match_operand:SI 0 "s_register_operand")
8018 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
8019 (match_operand:SI 2 "arm_not_operand")
8020 (match_operand:SI 3 "arm_not_operand")))]
8021 "TARGET_32BIT"
8022 "
8023 {
8024 enum rtx_code code;
8025 rtx ccreg;
8026
8027 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8028 &XEXP (operands[1], 1)))
8029 FAIL;
8030
8031 code = GET_CODE (operands[1]);
8032 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8033 XEXP (operands[1], 1), NULL_RTX);
8034 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8035 }"
8036 )
8037
8038 (define_expand "movhfcc"
8039 [(set (match_operand:HF 0 "s_register_operand")
8040 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
8041 (match_operand:HF 2 "s_register_operand")
8042 (match_operand:HF 3 "s_register_operand")))]
8043 "TARGET_VFP_FP16INST"
8044 "
8045 {
8046 enum rtx_code code = GET_CODE (operands[1]);
8047 rtx ccreg;
8048
8049 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8050 &XEXP (operands[1], 1)))
8051 FAIL;
8052
8053 code = GET_CODE (operands[1]);
8054 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8055 XEXP (operands[1], 1), NULL_RTX);
8056 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8057 }"
8058 )
8059
8060 (define_expand "movsfcc"
8061 [(set (match_operand:SF 0 "s_register_operand")
8062 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
8063 (match_operand:SF 2 "s_register_operand")
8064 (match_operand:SF 3 "s_register_operand")))]
8065 "TARGET_32BIT && TARGET_HARD_FLOAT"
8066 "
8067 {
8068 enum rtx_code code = GET_CODE (operands[1]);
8069 rtx ccreg;
8070
8071 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8072 &XEXP (operands[1], 1)))
8073 FAIL;
8074
8075 code = GET_CODE (operands[1]);
8076 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8077 XEXP (operands[1], 1), NULL_RTX);
8078 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8079 }"
8080 )
8081
8082 (define_expand "movdfcc"
8083 [(set (match_operand:DF 0 "s_register_operand")
8084 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
8085 (match_operand:DF 2 "s_register_operand")
8086 (match_operand:DF 3 "s_register_operand")))]
8087 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8088 "
8089 {
8090 enum rtx_code code = GET_CODE (operands[1]);
8091 rtx ccreg;
8092
8093 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8094 &XEXP (operands[1], 1)))
8095 FAIL;
8096 code = GET_CODE (operands[1]);
8097 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8098 XEXP (operands[1], 1), NULL_RTX);
8099 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8100 }"
8101 )
8102
8103 (define_insn "*cmov<mode>"
8104 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8105 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8106 [(match_operand 2 "cc_register" "") (const_int 0)])
8107 (match_operand:SDF 3 "s_register_operand"
8108 "<F_constraint>")
8109 (match_operand:SDF 4 "s_register_operand"
8110 "<F_constraint>")))]
8111 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
8112 "*
8113 {
8114 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8115 switch (code)
8116 {
8117 case ARM_GE:
8118 case ARM_GT:
8119 case ARM_EQ:
8120 case ARM_VS:
8121 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8122 case ARM_LT:
8123 case ARM_LE:
8124 case ARM_NE:
8125 case ARM_VC:
8126 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8127 default:
8128 gcc_unreachable ();
8129 }
8130 return \"\";
8131 }"
8132 [(set_attr "conds" "use")
8133 (set_attr "type" "fcsel")]
8134 )
8135
8136 (define_insn "*cmovhf"
8137 [(set (match_operand:HF 0 "s_register_operand" "=t")
8138 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
8139 [(match_operand 2 "cc_register" "") (const_int 0)])
8140 (match_operand:HF 3 "s_register_operand" "t")
8141 (match_operand:HF 4 "s_register_operand" "t")))]
8142 "TARGET_VFP_FP16INST"
8143 "*
8144 {
8145 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8146 switch (code)
8147 {
8148 case ARM_GE:
8149 case ARM_GT:
8150 case ARM_EQ:
8151 case ARM_VS:
8152 return \"vsel%d1.f16\\t%0, %3, %4\";
8153 case ARM_LT:
8154 case ARM_LE:
8155 case ARM_NE:
8156 case ARM_VC:
8157 return \"vsel%D1.f16\\t%0, %4, %3\";
8158 default:
8159 gcc_unreachable ();
8160 }
8161 return \"\";
8162 }"
8163 [(set_attr "conds" "use")
8164 (set_attr "type" "fcsel")]
8165 )
8166
8167 (define_insn_and_split "*movsicc_insn"
8168 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8169 (if_then_else:SI
8170 (match_operator 3 "arm_comparison_operator"
8171 [(match_operand 4 "cc_register" "") (const_int 0)])
8172 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8173 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8174 "TARGET_ARM"
8175 "@
8176 mov%D3\\t%0, %2
8177 mvn%D3\\t%0, #%B2
8178 mov%d3\\t%0, %1
8179 mvn%d3\\t%0, #%B1
8180 #
8181 #
8182 #
8183 #"
8184 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8185 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8186 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8187 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8188 "&& reload_completed"
8189 [(const_int 0)]
8190 {
8191 enum rtx_code rev_code;
8192 machine_mode mode;
8193 rtx rev_cond;
8194
8195 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8196 operands[3],
8197 gen_rtx_SET (operands[0], operands[1])));
8198
8199 rev_code = GET_CODE (operands[3]);
8200 mode = GET_MODE (operands[4]);
8201 if (mode == CCFPmode || mode == CCFPEmode)
8202 rev_code = reverse_condition_maybe_unordered (rev_code);
8203 else
8204 rev_code = reverse_condition (rev_code);
8205
8206 rev_cond = gen_rtx_fmt_ee (rev_code,
8207 VOIDmode,
8208 operands[4],
8209 const0_rtx);
8210 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8211 rev_cond,
8212 gen_rtx_SET (operands[0], operands[2])));
8213 DONE;
8214 }
8215 [(set_attr "length" "4,4,4,4,8,8,8,8")
8216 (set_attr "conds" "use")
8217 (set_attr_alternative "type"
8218 [(if_then_else (match_operand 2 "const_int_operand" "")
8219 (const_string "mov_imm")
8220 (const_string "mov_reg"))
8221 (const_string "mvn_imm")
8222 (if_then_else (match_operand 1 "const_int_operand" "")
8223 (const_string "mov_imm")
8224 (const_string "mov_reg"))
8225 (const_string "mvn_imm")
8226 (const_string "multiple")
8227 (const_string "multiple")
8228 (const_string "multiple")
8229 (const_string "multiple")])]
8230 )
8231
8232 (define_insn "*movsfcc_soft_insn"
8233 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8234 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8235 [(match_operand 4 "cc_register" "") (const_int 0)])
8236 (match_operand:SF 1 "s_register_operand" "0,r")
8237 (match_operand:SF 2 "s_register_operand" "r,0")))]
8238 "TARGET_ARM && TARGET_SOFT_FLOAT"
8239 "@
8240 mov%D3\\t%0, %2
8241 mov%d3\\t%0, %1"
8242 [(set_attr "conds" "use")
8243 (set_attr "type" "mov_reg")]
8244 )
8245
8246 \f
8247 ;; Jump and linkage insns
8248
8249 (define_expand "jump"
8250 [(set (pc)
8251 (label_ref (match_operand 0 "" "")))]
8252 "TARGET_EITHER"
8253 ""
8254 )
8255
8256 (define_insn "*arm_jump"
8257 [(set (pc)
8258 (label_ref (match_operand 0 "" "")))]
8259 "TARGET_32BIT"
8260 "*
8261 {
8262 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8263 {
8264 arm_ccfsm_state += 2;
8265 return \"\";
8266 }
8267 return \"b%?\\t%l0\";
8268 }
8269 "
8270 [(set_attr "predicable" "yes")
8271 (set (attr "length")
8272 (if_then_else
8273 (and (match_test "TARGET_THUMB2")
8274 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8275 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8276 (const_int 2)
8277 (const_int 4)))
8278 (set_attr "type" "branch")]
8279 )
8280
8281 (define_expand "call"
8282 [(parallel [(call (match_operand 0 "memory_operand")
8283 (match_operand 1 "general_operand"))
8284 (use (match_operand 2 "" ""))
8285 (clobber (reg:SI LR_REGNUM))])]
8286 "TARGET_EITHER"
8287 "
8288 {
8289 rtx callee, pat;
8290 tree addr = MEM_EXPR (operands[0]);
8291
8292 /* In an untyped call, we can get NULL for operand 2. */
8293 if (operands[2] == NULL_RTX)
8294 operands[2] = const0_rtx;
8295
8296 /* Decide if we should generate indirect calls by loading the
8297 32-bit address of the callee into a register before performing the
8298 branch and link. */
8299 callee = XEXP (operands[0], 0);
8300 if (GET_CODE (callee) == SYMBOL_REF
8301 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8302 : !REG_P (callee))
8303 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8304
8305 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
8306 /* Indirect call: set r9 with FDPIC value of callee. */
8307 XEXP (operands[0], 0)
8308 = arm_load_function_descriptor (XEXP (operands[0], 0));
8309
8310 if (detect_cmse_nonsecure_call (addr))
8311 {
8312 pat = gen_nonsecure_call_internal (operands[0], operands[1],
8313 operands[2]);
8314 emit_call_insn (pat);
8315 }
8316 else
8317 {
8318 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8319 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
8320 }
8321
8322 /* Restore FDPIC register (r9) after call. */
8323 if (TARGET_FDPIC)
8324 {
8325 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8326 rtx initial_fdpic_reg
8327 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8328
8329 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8330 initial_fdpic_reg));
8331 }
8332
8333 DONE;
8334 }"
8335 )
8336
8337 (define_insn "restore_pic_register_after_call"
8338 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
8339 (unspec:SI [(match_dup 0)
8340 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
8341 UNSPEC_PIC_RESTORE))]
8342 ""
8343 "@
8344 mov\t%0, %1
8345 ldr\t%0, %1"
8346 )
8347
8348 (define_expand "call_internal"
8349 [(parallel [(call (match_operand 0 "memory_operand")
8350 (match_operand 1 "general_operand"))
8351 (use (match_operand 2 "" ""))
8352 (clobber (reg:SI LR_REGNUM))])])
8353
8354 (define_expand "nonsecure_call_internal"
8355 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
8356 UNSPEC_NONSECURE_MEM)
8357 (match_operand 1 "general_operand"))
8358 (use (match_operand 2 "" ""))
8359 (clobber (reg:SI LR_REGNUM))])]
8360 "use_cmse"
8361 "
8362 {
8363 rtx tmp;
8364 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
8365 gen_rtx_REG (SImode, R4_REGNUM),
8366 SImode);
8367
8368 operands[0] = replace_equiv_address (operands[0], tmp);
8369 }")
8370
8371 (define_insn "*call_reg_armv5"
8372 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8373 (match_operand 1 "" ""))
8374 (use (match_operand 2 "" ""))
8375 (clobber (reg:SI LR_REGNUM))]
8376 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8377 "blx%?\\t%0"
8378 [(set_attr "type" "call")]
8379 )
8380
8381 (define_insn "*call_reg_arm"
8382 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8383 (match_operand 1 "" ""))
8384 (use (match_operand 2 "" ""))
8385 (clobber (reg:SI LR_REGNUM))]
8386 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8387 "*
8388 return output_call (operands);
8389 "
8390 ;; length is worst case, normally it is only two
8391 [(set_attr "length" "12")
8392 (set_attr "type" "call")]
8393 )
8394
8395
8396 (define_expand "call_value"
8397 [(parallel [(set (match_operand 0 "" "")
8398 (call (match_operand 1 "memory_operand")
8399 (match_operand 2 "general_operand")))
8400 (use (match_operand 3 "" ""))
8401 (clobber (reg:SI LR_REGNUM))])]
8402 "TARGET_EITHER"
8403 "
8404 {
8405 rtx pat, callee;
8406 tree addr = MEM_EXPR (operands[1]);
8407
8408 /* In an untyped call, we can get NULL for operand 2. */
8409 if (operands[3] == 0)
8410 operands[3] = const0_rtx;
8411
8412 /* Decide if we should generate indirect calls by loading the
8413 32-bit address of the callee into a register before performing the
8414 branch and link. */
8415 callee = XEXP (operands[1], 0);
8416 if (GET_CODE (callee) == SYMBOL_REF
8417 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8418 : !REG_P (callee))
8419 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8420
8421 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8422 /* Indirect call: set r9 with FDPIC value of callee. */
8423 XEXP (operands[1], 0)
8424 = arm_load_function_descriptor (XEXP (operands[1], 0));
8425
8426 if (detect_cmse_nonsecure_call (addr))
8427 {
8428 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8429 operands[2], operands[3]);
8430 emit_call_insn (pat);
8431 }
8432 else
8433 {
8434 pat = gen_call_value_internal (operands[0], operands[1],
8435 operands[2], operands[3]);
8436 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8437 }
8438
8439 /* Restore FDPIC register (r9) after call. */
8440 if (TARGET_FDPIC)
8441 {
8442 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8443 rtx initial_fdpic_reg
8444 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8445
8446 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8447 initial_fdpic_reg));
8448 }
8449
8450 DONE;
8451 }"
8452 )
8453
8454 (define_expand "call_value_internal"
8455 [(parallel [(set (match_operand 0 "" "")
8456 (call (match_operand 1 "memory_operand")
8457 (match_operand 2 "general_operand")))
8458 (use (match_operand 3 "" ""))
8459 (clobber (reg:SI LR_REGNUM))])])
8460
8461 (define_expand "nonsecure_call_value_internal"
8462 [(parallel [(set (match_operand 0 "" "")
8463 (call (unspec:SI [(match_operand 1 "memory_operand")]
8464 UNSPEC_NONSECURE_MEM)
8465 (match_operand 2 "general_operand")))
8466 (use (match_operand 3 "" ""))
8467 (clobber (reg:SI LR_REGNUM))])]
8468 "use_cmse"
8469 "
8470 {
8471 rtx tmp;
8472 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
8473 gen_rtx_REG (SImode, R4_REGNUM),
8474 SImode);
8475
8476 operands[1] = replace_equiv_address (operands[1], tmp);
8477 }")
8478
8479 (define_insn "*call_value_reg_armv5"
8480 [(set (match_operand 0 "" "")
8481 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8482 (match_operand 2 "" "")))
8483 (use (match_operand 3 "" ""))
8484 (clobber (reg:SI LR_REGNUM))]
8485 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8486 "blx%?\\t%1"
8487 [(set_attr "type" "call")]
8488 )
8489
8490 (define_insn "*call_value_reg_arm"
8491 [(set (match_operand 0 "" "")
8492 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8493 (match_operand 2 "" "")))
8494 (use (match_operand 3 "" ""))
8495 (clobber (reg:SI LR_REGNUM))]
8496 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8497 "*
8498 return output_call (&operands[1]);
8499 "
8500 [(set_attr "length" "12")
8501 (set_attr "type" "call")]
8502 )
8503
8504 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8505 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8506
8507 (define_insn "*call_symbol"
8508 [(call (mem:SI (match_operand:SI 0 "" ""))
8509 (match_operand 1 "" ""))
8510 (use (match_operand 2 "" ""))
8511 (clobber (reg:SI LR_REGNUM))]
8512 "TARGET_32BIT
8513 && !SIBLING_CALL_P (insn)
8514 && (GET_CODE (operands[0]) == SYMBOL_REF)
8515 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8516 "*
8517 {
8518 rtx op = operands[0];
8519
8520 /* Switch mode now when possible. */
8521 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8522 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8523 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8524
8525 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8526 }"
8527 [(set_attr "type" "call")]
8528 )
8529
8530 (define_insn "*call_value_symbol"
8531 [(set (match_operand 0 "" "")
8532 (call (mem:SI (match_operand:SI 1 "" ""))
8533 (match_operand:SI 2 "" "")))
8534 (use (match_operand 3 "" ""))
8535 (clobber (reg:SI LR_REGNUM))]
8536 "TARGET_32BIT
8537 && !SIBLING_CALL_P (insn)
8538 && (GET_CODE (operands[1]) == SYMBOL_REF)
8539 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8540 "*
8541 {
8542 rtx op = operands[1];
8543
8544 /* Switch mode now when possible. */
8545 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8546 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8547 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8548
8549 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8550 }"
8551 [(set_attr "type" "call")]
8552 )
8553
8554 (define_expand "sibcall_internal"
8555 [(parallel [(call (match_operand 0 "memory_operand")
8556 (match_operand 1 "general_operand"))
8557 (return)
8558 (use (match_operand 2 "" ""))])])
8559
8560 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8561 (define_expand "sibcall"
8562 [(parallel [(call (match_operand 0 "memory_operand")
8563 (match_operand 1 "general_operand"))
8564 (return)
8565 (use (match_operand 2 "" ""))])]
8566 "TARGET_32BIT"
8567 "
8568 {
8569 rtx pat;
8570
8571 if ((!REG_P (XEXP (operands[0], 0))
8572 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8573 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8574 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8575 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8576
8577 if (operands[2] == NULL_RTX)
8578 operands[2] = const0_rtx;
8579
8580 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8581 arm_emit_call_insn (pat, operands[0], true);
8582 DONE;
8583 }"
8584 )
8585
8586 (define_expand "sibcall_value_internal"
8587 [(parallel [(set (match_operand 0 "" "")
8588 (call (match_operand 1 "memory_operand")
8589 (match_operand 2 "general_operand")))
8590 (return)
8591 (use (match_operand 3 "" ""))])])
8592
8593 (define_expand "sibcall_value"
8594 [(parallel [(set (match_operand 0 "" "")
8595 (call (match_operand 1 "memory_operand")
8596 (match_operand 2 "general_operand")))
8597 (return)
8598 (use (match_operand 3 "" ""))])]
8599 "TARGET_32BIT"
8600 "
8601 {
8602 rtx pat;
8603
8604 if ((!REG_P (XEXP (operands[1], 0))
8605 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8606 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8607 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8608 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8609
8610 if (operands[3] == NULL_RTX)
8611 operands[3] = const0_rtx;
8612
8613 pat = gen_sibcall_value_internal (operands[0], operands[1],
8614 operands[2], operands[3]);
8615 arm_emit_call_insn (pat, operands[1], true);
8616 DONE;
8617 }"
8618 )
8619
8620 (define_insn "*sibcall_insn"
8621 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8622 (match_operand 1 "" ""))
8623 (return)
8624 (use (match_operand 2 "" ""))]
8625 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8626 "*
8627 if (which_alternative == 1)
8628 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8629 else
8630 {
8631 if (arm_arch5t || arm_arch4t)
8632 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8633 else
8634 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8635 }
8636 "
8637 [(set_attr "type" "call")]
8638 )
8639
8640 (define_insn "*sibcall_value_insn"
8641 [(set (match_operand 0 "" "")
8642 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8643 (match_operand 2 "" "")))
8644 (return)
8645 (use (match_operand 3 "" ""))]
8646 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8647 "*
8648 if (which_alternative == 1)
8649 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8650 else
8651 {
8652 if (arm_arch5t || arm_arch4t)
8653 return \"bx%?\\t%1\";
8654 else
8655 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8656 }
8657 "
8658 [(set_attr "type" "call")]
8659 )
8660
8661 (define_expand "<return_str>return"
8662 [(RETURNS)]
8663 "(TARGET_ARM || (TARGET_THUMB2
8664 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8665 && !IS_STACKALIGN (arm_current_func_type ())))
8666 <return_cond_false>"
8667 "
8668 {
8669 if (TARGET_THUMB2)
8670 {
8671 thumb2_expand_return (<return_simple_p>);
8672 DONE;
8673 }
8674 }
8675 "
8676 )
8677
8678 ;; Often the return insn will be the same as loading from memory, so set attr
8679 (define_insn "*arm_return"
8680 [(return)]
8681 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8682 "*
8683 {
8684 if (arm_ccfsm_state == 2)
8685 {
8686 arm_ccfsm_state += 2;
8687 return \"\";
8688 }
8689 return output_return_instruction (const_true_rtx, true, false, false);
8690 }"
8691 [(set_attr "type" "load_4")
8692 (set_attr "length" "12")
8693 (set_attr "predicable" "yes")]
8694 )
8695
8696 (define_insn "*cond_<return_str>return"
8697 [(set (pc)
8698 (if_then_else (match_operator 0 "arm_comparison_operator"
8699 [(match_operand 1 "cc_register" "") (const_int 0)])
8700 (RETURNS)
8701 (pc)))]
8702 "TARGET_ARM <return_cond_true>"
8703 "*
8704 {
8705 if (arm_ccfsm_state == 2)
8706 {
8707 arm_ccfsm_state += 2;
8708 return \"\";
8709 }
8710 return output_return_instruction (operands[0], true, false,
8711 <return_simple_p>);
8712 }"
8713 [(set_attr "conds" "use")
8714 (set_attr "length" "12")
8715 (set_attr "type" "load_4")]
8716 )
8717
8718 (define_insn "*cond_<return_str>return_inverted"
8719 [(set (pc)
8720 (if_then_else (match_operator 0 "arm_comparison_operator"
8721 [(match_operand 1 "cc_register" "") (const_int 0)])
8722 (pc)
8723 (RETURNS)))]
8724 "TARGET_ARM <return_cond_true>"
8725 "*
8726 {
8727 if (arm_ccfsm_state == 2)
8728 {
8729 arm_ccfsm_state += 2;
8730 return \"\";
8731 }
8732 return output_return_instruction (operands[0], true, true,
8733 <return_simple_p>);
8734 }"
8735 [(set_attr "conds" "use")
8736 (set_attr "length" "12")
8737 (set_attr "type" "load_4")]
8738 )
8739
8740 (define_insn "*arm_simple_return"
8741 [(simple_return)]
8742 "TARGET_ARM"
8743 "*
8744 {
8745 if (arm_ccfsm_state == 2)
8746 {
8747 arm_ccfsm_state += 2;
8748 return \"\";
8749 }
8750 return output_return_instruction (const_true_rtx, true, false, true);
8751 }"
8752 [(set_attr "type" "branch")
8753 (set_attr "length" "4")
8754 (set_attr "predicable" "yes")]
8755 )
8756
8757 ;; Generate a sequence of instructions to determine if the processor is
8758 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8759 ;; mask.
8760
8761 (define_expand "return_addr_mask"
8762 [(set (match_dup 1)
8763 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8764 (const_int 0)))
8765 (set (match_operand:SI 0 "s_register_operand")
8766 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8767 (const_int -1)
8768 (const_int 67108860)))] ; 0x03fffffc
8769 "TARGET_ARM"
8770 "
8771 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8772 ")
8773
8774 (define_insn "*check_arch2"
8775 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8776 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8777 (const_int 0)))]
8778 "TARGET_ARM"
8779 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8780 [(set_attr "length" "8")
8781 (set_attr "conds" "set")
8782 (set_attr "type" "multiple")]
8783 )
8784
8785 ;; Call subroutine returning any type.
8786
8787 (define_expand "untyped_call"
8788 [(parallel [(call (match_operand 0 "" "")
8789 (const_int 0))
8790 (match_operand 1 "" "")
8791 (match_operand 2 "" "")])]
8792 "TARGET_EITHER && !TARGET_FDPIC"
8793 "
8794 {
8795 int i;
8796 rtx par = gen_rtx_PARALLEL (VOIDmode,
8797 rtvec_alloc (XVECLEN (operands[2], 0)));
8798 rtx addr = gen_reg_rtx (Pmode);
8799 rtx mem;
8800 int size = 0;
8801
8802 emit_move_insn (addr, XEXP (operands[1], 0));
8803 mem = change_address (operands[1], BLKmode, addr);
8804
8805 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8806 {
8807 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8808
8809 /* Default code only uses r0 as a return value, but we could
8810 be using anything up to 4 registers. */
8811 if (REGNO (src) == R0_REGNUM)
8812 src = gen_rtx_REG (TImode, R0_REGNUM);
8813
8814 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8815 GEN_INT (size));
8816 size += GET_MODE_SIZE (GET_MODE (src));
8817 }
8818
8819 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
8820
8821 size = 0;
8822
8823 for (i = 0; i < XVECLEN (par, 0); i++)
8824 {
8825 HOST_WIDE_INT offset = 0;
8826 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8827
8828 if (size != 0)
8829 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8830
8831 mem = change_address (mem, GET_MODE (reg), NULL);
8832 if (REGNO (reg) == R0_REGNUM)
8833 {
8834 /* On thumb we have to use a write-back instruction. */
8835 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8836 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8837 size = TARGET_ARM ? 16 : 0;
8838 }
8839 else
8840 {
8841 emit_move_insn (mem, reg);
8842 size = GET_MODE_SIZE (GET_MODE (reg));
8843 }
8844 }
8845
8846 /* The optimizer does not know that the call sets the function value
8847 registers we stored in the result block. We avoid problems by
8848 claiming that all hard registers are used and clobbered at this
8849 point. */
8850 emit_insn (gen_blockage ());
8851
8852 DONE;
8853 }"
8854 )
8855
8856 (define_expand "untyped_return"
8857 [(match_operand:BLK 0 "memory_operand")
8858 (match_operand 1 "" "")]
8859 "TARGET_EITHER && !TARGET_FDPIC"
8860 "
8861 {
8862 int i;
8863 rtx addr = gen_reg_rtx (Pmode);
8864 rtx mem;
8865 int size = 0;
8866
8867 emit_move_insn (addr, XEXP (operands[0], 0));
8868 mem = change_address (operands[0], BLKmode, addr);
8869
8870 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8871 {
8872 HOST_WIDE_INT offset = 0;
8873 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8874
8875 if (size != 0)
8876 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8877
8878 mem = change_address (mem, GET_MODE (reg), NULL);
8879 if (REGNO (reg) == R0_REGNUM)
8880 {
8881 /* On thumb we have to use a write-back instruction. */
8882 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8883 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8884 size = TARGET_ARM ? 16 : 0;
8885 }
8886 else
8887 {
8888 emit_move_insn (reg, mem);
8889 size = GET_MODE_SIZE (GET_MODE (reg));
8890 }
8891 }
8892
8893 /* Emit USE insns before the return. */
8894 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8895 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8896
8897 /* Construct the return. */
8898 expand_naked_return ();
8899
8900 DONE;
8901 }"
8902 )
8903
8904 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8905 ;; all of memory. This blocks insns from being moved across this point.
8906
8907 (define_insn "blockage"
8908 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8909 "TARGET_EITHER"
8910 ""
8911 [(set_attr "length" "0")
8912 (set_attr "type" "block")]
8913 )
8914
8915 ;; Since we hard code r0 here use the 'o' constraint to prevent
8916 ;; provoking undefined behaviour in the hardware with putting out
8917 ;; auto-increment operations with potentially r0 as the base register.
8918 (define_insn "probe_stack"
8919 [(set (match_operand:SI 0 "memory_operand" "=o")
8920 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
8921 "TARGET_32BIT"
8922 "str%?\\tr0, %0"
8923 [(set_attr "type" "store_4")
8924 (set_attr "predicable" "yes")]
8925 )
8926
8927 (define_insn "probe_stack_range"
8928 [(set (match_operand:SI 0 "register_operand" "=r")
8929 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
8930 (match_operand:SI 2 "register_operand" "r")]
8931 VUNSPEC_PROBE_STACK_RANGE))]
8932 "TARGET_32BIT"
8933 {
8934 return output_probe_stack_range (operands[0], operands[2]);
8935 }
8936 [(set_attr "type" "multiple")
8937 (set_attr "conds" "clob")]
8938 )
8939
8940 ;; Named patterns for stack smashing protection.
8941 (define_expand "stack_protect_combined_set"
8942 [(parallel
8943 [(set (match_operand:SI 0 "memory_operand")
8944 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8945 UNSPEC_SP_SET))
8946 (clobber (match_scratch:SI 2 ""))
8947 (clobber (match_scratch:SI 3 ""))])]
8948 ""
8949 ""
8950 )
8951
8952 ;; Use a separate insn from the above expand to be able to have the mem outside
8953 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8954 ;; try to reload the guard since we need to control how PIC access is done in
8955 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8956 ;; legitimize_pic_address ()).
8957 (define_insn_and_split "*stack_protect_combined_set_insn"
8958 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8959 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8960 UNSPEC_SP_SET))
8961 (clobber (match_scratch:SI 2 "=&l,&r"))
8962 (clobber (match_scratch:SI 3 "=&l,&r"))]
8963 ""
8964 "#"
8965 "reload_completed"
8966 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
8967 UNSPEC_SP_SET))
8968 (clobber (match_dup 2))])]
8969 "
8970 {
8971 if (flag_pic)
8972 {
8973 rtx pic_reg;
8974
8975 if (TARGET_FDPIC)
8976 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8977 else
8978 pic_reg = operands[3];
8979
8980 /* Forces recomputing of GOT base now. */
8981 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
8982 true /*compute_now*/);
8983 }
8984 else
8985 {
8986 if (address_operand (operands[1], SImode))
8987 operands[2] = operands[1];
8988 else
8989 {
8990 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8991 emit_move_insn (operands[2], mem);
8992 }
8993 }
8994 }"
8995 [(set_attr "arch" "t1,32")]
8996 )
8997
8998 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
8999 ;; canary value does not live beyond the life of this sequence.
9000 (define_insn "*stack_protect_set_insn"
9001 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9002 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
9003 UNSPEC_SP_SET))
9004 (clobber (match_dup 1))]
9005 ""
9006 "@
9007 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
9008 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
9009 [(set_attr "length" "8,12")
9010 (set_attr "conds" "clob,nocond")
9011 (set_attr "type" "multiple")
9012 (set_attr "arch" "t1,32")]
9013 )
9014
9015 (define_expand "stack_protect_combined_test"
9016 [(parallel
9017 [(set (pc)
9018 (if_then_else
9019 (eq (match_operand:SI 0 "memory_operand")
9020 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9021 UNSPEC_SP_TEST))
9022 (label_ref (match_operand 2))
9023 (pc)))
9024 (clobber (match_scratch:SI 3 ""))
9025 (clobber (match_scratch:SI 4 ""))
9026 (clobber (reg:CC CC_REGNUM))])]
9027 ""
9028 ""
9029 )
9030
9031 ;; Use a separate insn from the above expand to be able to have the mem outside
9032 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9033 ;; try to reload the guard since we need to control how PIC access is done in
9034 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9035 ;; legitimize_pic_address ()).
9036 (define_insn_and_split "*stack_protect_combined_test_insn"
9037 [(set (pc)
9038 (if_then_else
9039 (eq (match_operand:SI 0 "memory_operand" "m,m")
9040 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9041 UNSPEC_SP_TEST))
9042 (label_ref (match_operand 2))
9043 (pc)))
9044 (clobber (match_scratch:SI 3 "=&l,&r"))
9045 (clobber (match_scratch:SI 4 "=&l,&r"))
9046 (clobber (reg:CC CC_REGNUM))]
9047 ""
9048 "#"
9049 "reload_completed"
9050 [(const_int 0)]
9051 {
9052 rtx eq;
9053
9054 if (flag_pic)
9055 {
9056 rtx pic_reg;
9057
9058 if (TARGET_FDPIC)
9059 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9060 else
9061 pic_reg = operands[4];
9062
9063 /* Forces recomputing of GOT base now. */
9064 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
9065 true /*compute_now*/);
9066 }
9067 else
9068 {
9069 if (address_operand (operands[1], SImode))
9070 operands[3] = operands[1];
9071 else
9072 {
9073 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
9074 emit_move_insn (operands[3], mem);
9075 }
9076 }
9077 if (TARGET_32BIT)
9078 {
9079 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
9080 operands[3]));
9081 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9082 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9083 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9084 }
9085 else
9086 {
9087 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
9088 operands[3]));
9089 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
9090 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
9091 operands[2]));
9092 }
9093 DONE;
9094 }
9095 [(set_attr "arch" "t1,32")]
9096 )
9097
9098 (define_insn "arm_stack_protect_test_insn"
9099 [(set (reg:CC_Z CC_REGNUM)
9100 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
9101 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
9102 UNSPEC_SP_TEST)
9103 (const_int 0)))
9104 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
9105 (clobber (match_dup 2))]
9106 "TARGET_32BIT"
9107 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
9108 [(set_attr "length" "8,12")
9109 (set_attr "conds" "set")
9110 (set_attr "type" "multiple")
9111 (set_attr "arch" "t,32")]
9112 )
9113
9114 (define_expand "casesi"
9115 [(match_operand:SI 0 "s_register_operand") ; index to jump on
9116 (match_operand:SI 1 "const_int_operand") ; lower bound
9117 (match_operand:SI 2 "const_int_operand") ; total range
9118 (match_operand:SI 3 "" "") ; table label
9119 (match_operand:SI 4 "" "")] ; Out of range label
9120 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
9121 "
9122 {
9123 enum insn_code code;
9124 if (operands[1] != const0_rtx)
9125 {
9126 rtx reg = gen_reg_rtx (SImode);
9127
9128 emit_insn (gen_addsi3 (reg, operands[0],
9129 gen_int_mode (-INTVAL (operands[1]),
9130 SImode)));
9131 operands[0] = reg;
9132 }
9133
9134 if (TARGET_ARM)
9135 code = CODE_FOR_arm_casesi_internal;
9136 else if (TARGET_THUMB1)
9137 code = CODE_FOR_thumb1_casesi_internal_pic;
9138 else if (flag_pic)
9139 code = CODE_FOR_thumb2_casesi_internal_pic;
9140 else
9141 code = CODE_FOR_thumb2_casesi_internal;
9142
9143 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9144 operands[2] = force_reg (SImode, operands[2]);
9145
9146 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9147 operands[3], operands[4]));
9148 DONE;
9149 }"
9150 )
9151
9152 ;; The USE in this pattern is needed to tell flow analysis that this is
9153 ;; a CASESI insn. It has no other purpose.
9154 (define_expand "arm_casesi_internal"
9155 [(parallel [(set (pc)
9156 (if_then_else
9157 (leu (match_operand:SI 0 "s_register_operand")
9158 (match_operand:SI 1 "arm_rhs_operand"))
9159 (match_dup 4)
9160 (label_ref:SI (match_operand 3 ""))))
9161 (clobber (reg:CC CC_REGNUM))
9162 (use (label_ref:SI (match_operand 2 "")))])]
9163 "TARGET_ARM"
9164 {
9165 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
9166 operands[4] = gen_rtx_PLUS (SImode, operands[4],
9167 gen_rtx_LABEL_REF (SImode, operands[2]));
9168 operands[4] = gen_rtx_MEM (SImode, operands[4]);
9169 MEM_READONLY_P (operands[4]) = 1;
9170 MEM_NOTRAP_P (operands[4]) = 1;
9171 })
9172
9173 (define_insn "*arm_casesi_internal"
9174 [(parallel [(set (pc)
9175 (if_then_else
9176 (leu (match_operand:SI 0 "s_register_operand" "r")
9177 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9178 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9179 (label_ref:SI (match_operand 2 "" ""))))
9180 (label_ref:SI (match_operand 3 "" ""))))
9181 (clobber (reg:CC CC_REGNUM))
9182 (use (label_ref:SI (match_dup 2)))])]
9183 "TARGET_ARM"
9184 "*
9185 if (flag_pic)
9186 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9187 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9188 "
9189 [(set_attr "conds" "clob")
9190 (set_attr "length" "12")
9191 (set_attr "type" "multiple")]
9192 )
9193
9194 (define_expand "indirect_jump"
9195 [(set (pc)
9196 (match_operand:SI 0 "s_register_operand"))]
9197 "TARGET_EITHER"
9198 "
9199 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9200 address and use bx. */
9201 if (TARGET_THUMB2)
9202 {
9203 rtx tmp;
9204 tmp = gen_reg_rtx (SImode);
9205 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9206 operands[0] = tmp;
9207 }
9208 "
9209 )
9210
9211 ;; NB Never uses BX.
9212 (define_insn "*arm_indirect_jump"
9213 [(set (pc)
9214 (match_operand:SI 0 "s_register_operand" "r"))]
9215 "TARGET_ARM"
9216 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9217 [(set_attr "predicable" "yes")
9218 (set_attr "type" "branch")]
9219 )
9220
9221 (define_insn "*load_indirect_jump"
9222 [(set (pc)
9223 (match_operand:SI 0 "memory_operand" "m"))]
9224 "TARGET_ARM"
9225 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9226 [(set_attr "type" "load_4")
9227 (set_attr "pool_range" "4096")
9228 (set_attr "neg_pool_range" "4084")
9229 (set_attr "predicable" "yes")]
9230 )
9231
9232 \f
9233 ;; Misc insns
9234
9235 (define_insn "nop"
9236 [(const_int 0)]
9237 "TARGET_EITHER"
9238 "nop"
9239 [(set (attr "length")
9240 (if_then_else (eq_attr "is_thumb" "yes")
9241 (const_int 2)
9242 (const_int 4)))
9243 (set_attr "type" "mov_reg")]
9244 )
9245
9246 (define_insn "trap"
9247 [(trap_if (const_int 1) (const_int 0))]
9248 ""
9249 "*
9250 if (TARGET_ARM)
9251 return \".inst\\t0xe7f000f0\";
9252 else
9253 return \".inst\\t0xdeff\";
9254 "
9255 [(set (attr "length")
9256 (if_then_else (eq_attr "is_thumb" "yes")
9257 (const_int 2)
9258 (const_int 4)))
9259 (set_attr "type" "trap")
9260 (set_attr "conds" "unconditional")]
9261 )
9262
9263 \f
9264 ;; Patterns to allow combination of arithmetic, cond code and shifts
9265
9266 (define_insn "*<arith_shift_insn>_multsi"
9267 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9268 (SHIFTABLE_OPS:SI
9269 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
9270 (match_operand:SI 3 "power_of_two_operand" ""))
9271 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
9272 "TARGET_32BIT"
9273 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
9274 [(set_attr "predicable" "yes")
9275 (set_attr "shift" "2")
9276 (set_attr "arch" "a,t2")
9277 (set_attr "type" "alu_shift_imm")])
9278
9279 (define_insn "*<arith_shift_insn>_shiftsi"
9280 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9281 (SHIFTABLE_OPS:SI
9282 (match_operator:SI 2 "shift_nomul_operator"
9283 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9284 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
9285 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
9286 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
9287 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
9288 [(set_attr "predicable" "yes")
9289 (set_attr "shift" "3")
9290 (set_attr "arch" "a,t2,a")
9291 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
9292
9293 (define_split
9294 [(set (match_operand:SI 0 "s_register_operand" "")
9295 (match_operator:SI 1 "shiftable_operator"
9296 [(match_operator:SI 2 "shiftable_operator"
9297 [(match_operator:SI 3 "shift_operator"
9298 [(match_operand:SI 4 "s_register_operand" "")
9299 (match_operand:SI 5 "reg_or_int_operand" "")])
9300 (match_operand:SI 6 "s_register_operand" "")])
9301 (match_operand:SI 7 "arm_rhs_operand" "")]))
9302 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9303 "TARGET_32BIT"
9304 [(set (match_dup 8)
9305 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9306 (match_dup 6)]))
9307 (set (match_dup 0)
9308 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9309 "")
9310
9311 (define_insn "*arith_shiftsi_compare0"
9312 [(set (reg:CC_NOOV CC_REGNUM)
9313 (compare:CC_NOOV
9314 (match_operator:SI 1 "shiftable_operator"
9315 [(match_operator:SI 3 "shift_operator"
9316 [(match_operand:SI 4 "s_register_operand" "r,r")
9317 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9318 (match_operand:SI 2 "s_register_operand" "r,r")])
9319 (const_int 0)))
9320 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9321 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9322 (match_dup 2)]))]
9323 "TARGET_32BIT"
9324 "%i1s%?\\t%0, %2, %4%S3"
9325 [(set_attr "conds" "set")
9326 (set_attr "shift" "4")
9327 (set_attr "arch" "32,a")
9328 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9329
9330 (define_insn "*arith_shiftsi_compare0_scratch"
9331 [(set (reg:CC_NOOV CC_REGNUM)
9332 (compare:CC_NOOV
9333 (match_operator:SI 1 "shiftable_operator"
9334 [(match_operator:SI 3 "shift_operator"
9335 [(match_operand:SI 4 "s_register_operand" "r,r")
9336 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9337 (match_operand:SI 2 "s_register_operand" "r,r")])
9338 (const_int 0)))
9339 (clobber (match_scratch:SI 0 "=r,r"))]
9340 "TARGET_32BIT"
9341 "%i1s%?\\t%0, %2, %4%S3"
9342 [(set_attr "conds" "set")
9343 (set_attr "shift" "4")
9344 (set_attr "arch" "32,a")
9345 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9346
9347 (define_insn "*sub_shiftsi"
9348 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9349 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9350 (match_operator:SI 2 "shift_operator"
9351 [(match_operand:SI 3 "s_register_operand" "r,r")
9352 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9353 "TARGET_32BIT"
9354 "sub%?\\t%0, %1, %3%S2"
9355 [(set_attr "predicable" "yes")
9356 (set_attr "predicable_short_it" "no")
9357 (set_attr "shift" "3")
9358 (set_attr "arch" "32,a")
9359 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9360
9361 (define_insn "*sub_shiftsi_compare0"
9362 [(set (reg:CC_NOOV CC_REGNUM)
9363 (compare:CC_NOOV
9364 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9365 (match_operator:SI 2 "shift_operator"
9366 [(match_operand:SI 3 "s_register_operand" "r,r")
9367 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9368 (const_int 0)))
9369 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9370 (minus:SI (match_dup 1)
9371 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9372 "TARGET_32BIT"
9373 "subs%?\\t%0, %1, %3%S2"
9374 [(set_attr "conds" "set")
9375 (set_attr "shift" "3")
9376 (set_attr "arch" "32,a")
9377 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9378
9379 (define_insn "*sub_shiftsi_compare0_scratch"
9380 [(set (reg:CC_NOOV CC_REGNUM)
9381 (compare:CC_NOOV
9382 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9383 (match_operator:SI 2 "shift_operator"
9384 [(match_operand:SI 3 "s_register_operand" "r,r")
9385 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9386 (const_int 0)))
9387 (clobber (match_scratch:SI 0 "=r,r"))]
9388 "TARGET_32BIT"
9389 "subs%?\\t%0, %1, %3%S2"
9390 [(set_attr "conds" "set")
9391 (set_attr "shift" "3")
9392 (set_attr "arch" "32,a")
9393 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9394 \f
9395
9396 (define_insn_and_split "*and_scc"
9397 [(set (match_operand:SI 0 "s_register_operand" "=r")
9398 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9399 [(match_operand 2 "cc_register" "") (const_int 0)])
9400 (match_operand:SI 3 "s_register_operand" "r")))]
9401 "TARGET_ARM"
9402 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9403 "&& reload_completed"
9404 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9405 (cond_exec (match_dup 4) (set (match_dup 0)
9406 (and:SI (match_dup 3) (const_int 1))))]
9407 {
9408 machine_mode mode = GET_MODE (operands[2]);
9409 enum rtx_code rc = GET_CODE (operands[1]);
9410
9411 /* Note that operands[4] is the same as operands[1],
9412 but with VOIDmode as the result. */
9413 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9414 if (mode == CCFPmode || mode == CCFPEmode)
9415 rc = reverse_condition_maybe_unordered (rc);
9416 else
9417 rc = reverse_condition (rc);
9418 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9419 }
9420 [(set_attr "conds" "use")
9421 (set_attr "type" "multiple")
9422 (set_attr "length" "8")]
9423 )
9424
9425 (define_insn_and_split "*ior_scc"
9426 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9427 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9428 [(match_operand 2 "cc_register" "") (const_int 0)])
9429 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9430 "TARGET_ARM"
9431 "@
9432 orr%d1\\t%0, %3, #1
9433 #"
9434 "&& reload_completed
9435 && REGNO (operands [0]) != REGNO (operands[3])"
9436 ;; && which_alternative == 1
9437 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9438 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9439 (cond_exec (match_dup 4) (set (match_dup 0)
9440 (ior:SI (match_dup 3) (const_int 1))))]
9441 {
9442 machine_mode mode = GET_MODE (operands[2]);
9443 enum rtx_code rc = GET_CODE (operands[1]);
9444
9445 /* Note that operands[4] is the same as operands[1],
9446 but with VOIDmode as the result. */
9447 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9448 if (mode == CCFPmode || mode == CCFPEmode)
9449 rc = reverse_condition_maybe_unordered (rc);
9450 else
9451 rc = reverse_condition (rc);
9452 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9453 }
9454 [(set_attr "conds" "use")
9455 (set_attr "length" "4,8")
9456 (set_attr "type" "logic_imm,multiple")]
9457 )
9458
9459 ; A series of splitters for the compare_scc pattern below. Note that
9460 ; order is important.
9461 (define_split
9462 [(set (match_operand:SI 0 "s_register_operand" "")
9463 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9464 (const_int 0)))
9465 (clobber (reg:CC CC_REGNUM))]
9466 "TARGET_32BIT && reload_completed"
9467 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9468
9469 (define_split
9470 [(set (match_operand:SI 0 "s_register_operand" "")
9471 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9472 (const_int 0)))
9473 (clobber (reg:CC CC_REGNUM))]
9474 "TARGET_32BIT && reload_completed"
9475 [(set (match_dup 0) (not:SI (match_dup 1)))
9476 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9477
9478 (define_split
9479 [(set (match_operand:SI 0 "s_register_operand" "")
9480 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9481 (const_int 0)))
9482 (clobber (reg:CC CC_REGNUM))]
9483 "arm_arch5t && TARGET_32BIT"
9484 [(set (match_dup 0) (clz:SI (match_dup 1)))
9485 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9486 )
9487
9488 (define_split
9489 [(set (match_operand:SI 0 "s_register_operand" "")
9490 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9491 (const_int 0)))
9492 (clobber (reg:CC CC_REGNUM))]
9493 "TARGET_32BIT && reload_completed"
9494 [(parallel
9495 [(set (reg:CC CC_REGNUM)
9496 (compare:CC (const_int 1) (match_dup 1)))
9497 (set (match_dup 0)
9498 (minus:SI (const_int 1) (match_dup 1)))])
9499 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9500 (set (match_dup 0) (const_int 0)))])
9501
9502 (define_split
9503 [(set (match_operand:SI 0 "s_register_operand" "")
9504 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9505 (match_operand:SI 2 "const_int_operand" "")))
9506 (clobber (reg:CC CC_REGNUM))]
9507 "TARGET_32BIT && reload_completed"
9508 [(parallel
9509 [(set (reg:CC CC_REGNUM)
9510 (compare:CC (match_dup 1) (match_dup 2)))
9511 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9512 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9513 (set (match_dup 0) (const_int 1)))]
9514 {
9515 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9516 })
9517
9518 (define_split
9519 [(set (match_operand:SI 0 "s_register_operand" "")
9520 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9521 (match_operand:SI 2 "arm_add_operand" "")))
9522 (clobber (reg:CC CC_REGNUM))]
9523 "TARGET_32BIT && reload_completed"
9524 [(parallel
9525 [(set (reg:CC_NOOV CC_REGNUM)
9526 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9527 (const_int 0)))
9528 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9529 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9530 (set (match_dup 0) (const_int 1)))])
9531
9532 (define_insn_and_split "*compare_scc"
9533 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9534 (match_operator:SI 1 "arm_comparison_operator"
9535 [(match_operand:SI 2 "s_register_operand" "r,r")
9536 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9537 (clobber (reg:CC CC_REGNUM))]
9538 "TARGET_32BIT"
9539 "#"
9540 "&& reload_completed"
9541 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9542 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9543 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9544 {
9545 rtx tmp1;
9546 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9547 operands[2], operands[3]);
9548 enum rtx_code rc = GET_CODE (operands[1]);
9549
9550 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9551
9552 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9553 if (mode == CCFPmode || mode == CCFPEmode)
9554 rc = reverse_condition_maybe_unordered (rc);
9555 else
9556 rc = reverse_condition (rc);
9557 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9558 }
9559 [(set_attr "type" "multiple")]
9560 )
9561
9562 ;; Attempt to improve the sequence generated by the compare_scc splitters
9563 ;; not to use conditional execution.
9564
9565 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9566 ;; clz Rd, reg1
9567 ;; lsr Rd, Rd, #5
9568 (define_peephole2
9569 [(set (reg:CC CC_REGNUM)
9570 (compare:CC (match_operand:SI 1 "register_operand" "")
9571 (const_int 0)))
9572 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9573 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9574 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9575 (set (match_dup 0) (const_int 1)))]
9576 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9577 [(set (match_dup 0) (clz:SI (match_dup 1)))
9578 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9579 )
9580
9581 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9582 ;; negs Rd, reg1
9583 ;; adc Rd, Rd, reg1
9584 (define_peephole2
9585 [(set (reg:CC CC_REGNUM)
9586 (compare:CC (match_operand:SI 1 "register_operand" "")
9587 (const_int 0)))
9588 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9589 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9590 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9591 (set (match_dup 0) (const_int 1)))
9592 (match_scratch:SI 2 "r")]
9593 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9594 [(parallel
9595 [(set (reg:CC CC_REGNUM)
9596 (compare:CC (const_int 0) (match_dup 1)))
9597 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9598 (set (match_dup 0)
9599 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9600 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9601 )
9602
9603 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9604 ;; sub Rd, Reg1, reg2
9605 ;; clz Rd, Rd
9606 ;; lsr Rd, Rd, #5
9607 (define_peephole2
9608 [(set (reg:CC CC_REGNUM)
9609 (compare:CC (match_operand:SI 1 "register_operand" "")
9610 (match_operand:SI 2 "arm_rhs_operand" "")))
9611 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9612 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9613 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9614 (set (match_dup 0) (const_int 1)))]
9615 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9616 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9617 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9618 (set (match_dup 0) (clz:SI (match_dup 0)))
9619 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9620 )
9621
9622
9623 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9624 ;; sub T1, Reg1, reg2
9625 ;; negs Rd, T1
9626 ;; adc Rd, Rd, T1
9627 (define_peephole2
9628 [(set (reg:CC CC_REGNUM)
9629 (compare:CC (match_operand:SI 1 "register_operand" "")
9630 (match_operand:SI 2 "arm_rhs_operand" "")))
9631 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9632 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9633 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9634 (set (match_dup 0) (const_int 1)))
9635 (match_scratch:SI 3 "r")]
9636 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9637 [(set (match_dup 3) (match_dup 4))
9638 (parallel
9639 [(set (reg:CC CC_REGNUM)
9640 (compare:CC (const_int 0) (match_dup 3)))
9641 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9642 (set (match_dup 0)
9643 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9644 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9645 "
9646 if (CONST_INT_P (operands[2]))
9647 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9648 else
9649 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9650 ")
9651
9652 (define_insn "*cond_move"
9653 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9654 (if_then_else:SI (match_operator 3 "equality_operator"
9655 [(match_operator 4 "arm_comparison_operator"
9656 [(match_operand 5 "cc_register" "") (const_int 0)])
9657 (const_int 0)])
9658 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9659 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9660 "TARGET_ARM"
9661 "*
9662 if (GET_CODE (operands[3]) == NE)
9663 {
9664 if (which_alternative != 1)
9665 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9666 if (which_alternative != 0)
9667 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9668 return \"\";
9669 }
9670 if (which_alternative != 0)
9671 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9672 if (which_alternative != 1)
9673 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9674 return \"\";
9675 "
9676 [(set_attr "conds" "use")
9677 (set_attr_alternative "type"
9678 [(if_then_else (match_operand 2 "const_int_operand" "")
9679 (const_string "mov_imm")
9680 (const_string "mov_reg"))
9681 (if_then_else (match_operand 1 "const_int_operand" "")
9682 (const_string "mov_imm")
9683 (const_string "mov_reg"))
9684 (const_string "multiple")])
9685 (set_attr "length" "4,4,8")]
9686 )
9687
9688 (define_insn "*cond_arith"
9689 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9690 (match_operator:SI 5 "shiftable_operator"
9691 [(match_operator:SI 4 "arm_comparison_operator"
9692 [(match_operand:SI 2 "s_register_operand" "r,r")
9693 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9694 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9695 (clobber (reg:CC CC_REGNUM))]
9696 "TARGET_ARM"
9697 "*
9698 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9699 return \"%i5\\t%0, %1, %2, lsr #31\";
9700
9701 output_asm_insn (\"cmp\\t%2, %3\", operands);
9702 if (GET_CODE (operands[5]) == AND)
9703 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9704 else if (GET_CODE (operands[5]) == MINUS)
9705 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9706 else if (which_alternative != 0)
9707 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9708 return \"%i5%d4\\t%0, %1, #1\";
9709 "
9710 [(set_attr "conds" "clob")
9711 (set_attr "length" "12")
9712 (set_attr "type" "multiple")]
9713 )
9714
9715 (define_insn "*cond_sub"
9716 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9717 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9718 (match_operator:SI 4 "arm_comparison_operator"
9719 [(match_operand:SI 2 "s_register_operand" "r,r")
9720 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9721 (clobber (reg:CC CC_REGNUM))]
9722 "TARGET_ARM"
9723 "*
9724 output_asm_insn (\"cmp\\t%2, %3\", operands);
9725 if (which_alternative != 0)
9726 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9727 return \"sub%d4\\t%0, %1, #1\";
9728 "
9729 [(set_attr "conds" "clob")
9730 (set_attr "length" "8,12")
9731 (set_attr "type" "multiple")]
9732 )
9733
9734 (define_insn "*cmp_ite0"
9735 [(set (match_operand 6 "dominant_cc_register" "")
9736 (compare
9737 (if_then_else:SI
9738 (match_operator 4 "arm_comparison_operator"
9739 [(match_operand:SI 0 "s_register_operand"
9740 "l,l,l,r,r,r,r,r,r")
9741 (match_operand:SI 1 "arm_add_operand"
9742 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9743 (match_operator:SI 5 "arm_comparison_operator"
9744 [(match_operand:SI 2 "s_register_operand"
9745 "l,r,r,l,l,r,r,r,r")
9746 (match_operand:SI 3 "arm_add_operand"
9747 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9748 (const_int 0))
9749 (const_int 0)))]
9750 "TARGET_32BIT"
9751 "*
9752 {
9753 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9754 {
9755 {\"cmp%d5\\t%0, %1\",
9756 \"cmp%d4\\t%2, %3\"},
9757 {\"cmn%d5\\t%0, #%n1\",
9758 \"cmp%d4\\t%2, %3\"},
9759 {\"cmp%d5\\t%0, %1\",
9760 \"cmn%d4\\t%2, #%n3\"},
9761 {\"cmn%d5\\t%0, #%n1\",
9762 \"cmn%d4\\t%2, #%n3\"}
9763 };
9764 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9765 {
9766 {\"cmp\\t%2, %3\",
9767 \"cmp\\t%0, %1\"},
9768 {\"cmp\\t%2, %3\",
9769 \"cmn\\t%0, #%n1\"},
9770 {\"cmn\\t%2, #%n3\",
9771 \"cmp\\t%0, %1\"},
9772 {\"cmn\\t%2, #%n3\",
9773 \"cmn\\t%0, #%n1\"}
9774 };
9775 static const char * const ite[2] =
9776 {
9777 \"it\\t%d5\",
9778 \"it\\t%d4\"
9779 };
9780 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9781 CMP_CMP, CMN_CMP, CMP_CMP,
9782 CMN_CMP, CMP_CMN, CMN_CMN};
9783 int swap =
9784 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9785
9786 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9787 if (TARGET_THUMB2) {
9788 output_asm_insn (ite[swap], operands);
9789 }
9790 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9791 return \"\";
9792 }"
9793 [(set_attr "conds" "set")
9794 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9795 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9796 (set_attr "type" "multiple")
9797 (set_attr_alternative "length"
9798 [(const_int 6)
9799 (const_int 8)
9800 (const_int 8)
9801 (const_int 8)
9802 (const_int 8)
9803 (if_then_else (eq_attr "is_thumb" "no")
9804 (const_int 8)
9805 (const_int 10))
9806 (if_then_else (eq_attr "is_thumb" "no")
9807 (const_int 8)
9808 (const_int 10))
9809 (if_then_else (eq_attr "is_thumb" "no")
9810 (const_int 8)
9811 (const_int 10))
9812 (if_then_else (eq_attr "is_thumb" "no")
9813 (const_int 8)
9814 (const_int 10))])]
9815 )
9816
9817 (define_insn "*cmp_ite1"
9818 [(set (match_operand 6 "dominant_cc_register" "")
9819 (compare
9820 (if_then_else:SI
9821 (match_operator 4 "arm_comparison_operator"
9822 [(match_operand:SI 0 "s_register_operand"
9823 "l,l,l,r,r,r,r,r,r")
9824 (match_operand:SI 1 "arm_add_operand"
9825 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9826 (match_operator:SI 5 "arm_comparison_operator"
9827 [(match_operand:SI 2 "s_register_operand"
9828 "l,r,r,l,l,r,r,r,r")
9829 (match_operand:SI 3 "arm_add_operand"
9830 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9831 (const_int 1))
9832 (const_int 0)))]
9833 "TARGET_32BIT"
9834 "*
9835 {
9836 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9837 {
9838 {\"cmp\\t%0, %1\",
9839 \"cmp\\t%2, %3\"},
9840 {\"cmn\\t%0, #%n1\",
9841 \"cmp\\t%2, %3\"},
9842 {\"cmp\\t%0, %1\",
9843 \"cmn\\t%2, #%n3\"},
9844 {\"cmn\\t%0, #%n1\",
9845 \"cmn\\t%2, #%n3\"}
9846 };
9847 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9848 {
9849 {\"cmp%d4\\t%2, %3\",
9850 \"cmp%D5\\t%0, %1\"},
9851 {\"cmp%d4\\t%2, %3\",
9852 \"cmn%D5\\t%0, #%n1\"},
9853 {\"cmn%d4\\t%2, #%n3\",
9854 \"cmp%D5\\t%0, %1\"},
9855 {\"cmn%d4\\t%2, #%n3\",
9856 \"cmn%D5\\t%0, #%n1\"}
9857 };
9858 static const char * const ite[2] =
9859 {
9860 \"it\\t%d4\",
9861 \"it\\t%D5\"
9862 };
9863 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9864 CMP_CMP, CMN_CMP, CMP_CMP,
9865 CMN_CMP, CMP_CMN, CMN_CMN};
9866 int swap =
9867 comparison_dominates_p (GET_CODE (operands[5]),
9868 reverse_condition (GET_CODE (operands[4])));
9869
9870 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9871 if (TARGET_THUMB2) {
9872 output_asm_insn (ite[swap], operands);
9873 }
9874 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9875 return \"\";
9876 }"
9877 [(set_attr "conds" "set")
9878 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9879 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9880 (set_attr_alternative "length"
9881 [(const_int 6)
9882 (const_int 8)
9883 (const_int 8)
9884 (const_int 8)
9885 (const_int 8)
9886 (if_then_else (eq_attr "is_thumb" "no")
9887 (const_int 8)
9888 (const_int 10))
9889 (if_then_else (eq_attr "is_thumb" "no")
9890 (const_int 8)
9891 (const_int 10))
9892 (if_then_else (eq_attr "is_thumb" "no")
9893 (const_int 8)
9894 (const_int 10))
9895 (if_then_else (eq_attr "is_thumb" "no")
9896 (const_int 8)
9897 (const_int 10))])
9898 (set_attr "type" "multiple")]
9899 )
9900
9901 (define_insn "*cmp_and"
9902 [(set (match_operand 6 "dominant_cc_register" "")
9903 (compare
9904 (and:SI
9905 (match_operator 4 "arm_comparison_operator"
9906 [(match_operand:SI 0 "s_register_operand"
9907 "l,l,l,r,r,r,r,r,r,r")
9908 (match_operand:SI 1 "arm_add_operand"
9909 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9910 (match_operator:SI 5 "arm_comparison_operator"
9911 [(match_operand:SI 2 "s_register_operand"
9912 "l,r,r,l,l,r,r,r,r,r")
9913 (match_operand:SI 3 "arm_add_operand"
9914 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9915 (const_int 0)))]
9916 "TARGET_32BIT"
9917 "*
9918 {
9919 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9920 {
9921 {\"cmp%d5\\t%0, %1\",
9922 \"cmp%d4\\t%2, %3\"},
9923 {\"cmn%d5\\t%0, #%n1\",
9924 \"cmp%d4\\t%2, %3\"},
9925 {\"cmp%d5\\t%0, %1\",
9926 \"cmn%d4\\t%2, #%n3\"},
9927 {\"cmn%d5\\t%0, #%n1\",
9928 \"cmn%d4\\t%2, #%n3\"}
9929 };
9930 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9931 {
9932 {\"cmp\\t%2, %3\",
9933 \"cmp\\t%0, %1\"},
9934 {\"cmp\\t%2, %3\",
9935 \"cmn\\t%0, #%n1\"},
9936 {\"cmn\\t%2, #%n3\",
9937 \"cmp\\t%0, %1\"},
9938 {\"cmn\\t%2, #%n3\",
9939 \"cmn\\t%0, #%n1\"}
9940 };
9941 static const char *const ite[2] =
9942 {
9943 \"it\\t%d5\",
9944 \"it\\t%d4\"
9945 };
9946 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9947 CMP_CMP, CMN_CMP, CMP_CMP,
9948 CMP_CMP, CMN_CMP, CMP_CMN,
9949 CMN_CMN};
9950 int swap =
9951 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9952
9953 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9954 if (TARGET_THUMB2) {
9955 output_asm_insn (ite[swap], operands);
9956 }
9957 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9958 return \"\";
9959 }"
9960 [(set_attr "conds" "set")
9961 (set_attr "predicable" "no")
9962 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9963 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9964 (set_attr_alternative "length"
9965 [(const_int 6)
9966 (const_int 8)
9967 (const_int 8)
9968 (const_int 8)
9969 (const_int 8)
9970 (const_int 6)
9971 (if_then_else (eq_attr "is_thumb" "no")
9972 (const_int 8)
9973 (const_int 10))
9974 (if_then_else (eq_attr "is_thumb" "no")
9975 (const_int 8)
9976 (const_int 10))
9977 (if_then_else (eq_attr "is_thumb" "no")
9978 (const_int 8)
9979 (const_int 10))
9980 (if_then_else (eq_attr "is_thumb" "no")
9981 (const_int 8)
9982 (const_int 10))])
9983 (set_attr "type" "multiple")]
9984 )
9985
9986 (define_insn "*cmp_ior"
9987 [(set (match_operand 6 "dominant_cc_register" "")
9988 (compare
9989 (ior:SI
9990 (match_operator 4 "arm_comparison_operator"
9991 [(match_operand:SI 0 "s_register_operand"
9992 "l,l,l,r,r,r,r,r,r,r")
9993 (match_operand:SI 1 "arm_add_operand"
9994 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9995 (match_operator:SI 5 "arm_comparison_operator"
9996 [(match_operand:SI 2 "s_register_operand"
9997 "l,r,r,l,l,r,r,r,r,r")
9998 (match_operand:SI 3 "arm_add_operand"
9999 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10000 (const_int 0)))]
10001 "TARGET_32BIT"
10002 "*
10003 {
10004 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10005 {
10006 {\"cmp\\t%0, %1\",
10007 \"cmp\\t%2, %3\"},
10008 {\"cmn\\t%0, #%n1\",
10009 \"cmp\\t%2, %3\"},
10010 {\"cmp\\t%0, %1\",
10011 \"cmn\\t%2, #%n3\"},
10012 {\"cmn\\t%0, #%n1\",
10013 \"cmn\\t%2, #%n3\"}
10014 };
10015 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10016 {
10017 {\"cmp%D4\\t%2, %3\",
10018 \"cmp%D5\\t%0, %1\"},
10019 {\"cmp%D4\\t%2, %3\",
10020 \"cmn%D5\\t%0, #%n1\"},
10021 {\"cmn%D4\\t%2, #%n3\",
10022 \"cmp%D5\\t%0, %1\"},
10023 {\"cmn%D4\\t%2, #%n3\",
10024 \"cmn%D5\\t%0, #%n1\"}
10025 };
10026 static const char *const ite[2] =
10027 {
10028 \"it\\t%D4\",
10029 \"it\\t%D5\"
10030 };
10031 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10032 CMP_CMP, CMN_CMP, CMP_CMP,
10033 CMP_CMP, CMN_CMP, CMP_CMN,
10034 CMN_CMN};
10035 int swap =
10036 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10037
10038 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10039 if (TARGET_THUMB2) {
10040 output_asm_insn (ite[swap], operands);
10041 }
10042 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10043 return \"\";
10044 }
10045 "
10046 [(set_attr "conds" "set")
10047 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10048 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10049 (set_attr_alternative "length"
10050 [(const_int 6)
10051 (const_int 8)
10052 (const_int 8)
10053 (const_int 8)
10054 (const_int 8)
10055 (const_int 6)
10056 (if_then_else (eq_attr "is_thumb" "no")
10057 (const_int 8)
10058 (const_int 10))
10059 (if_then_else (eq_attr "is_thumb" "no")
10060 (const_int 8)
10061 (const_int 10))
10062 (if_then_else (eq_attr "is_thumb" "no")
10063 (const_int 8)
10064 (const_int 10))
10065 (if_then_else (eq_attr "is_thumb" "no")
10066 (const_int 8)
10067 (const_int 10))])
10068 (set_attr "type" "multiple")]
10069 )
10070
10071 (define_insn_and_split "*ior_scc_scc"
10072 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10073 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10074 [(match_operand:SI 1 "s_register_operand" "l,r")
10075 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10076 (match_operator:SI 6 "arm_comparison_operator"
10077 [(match_operand:SI 4 "s_register_operand" "l,r")
10078 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10079 (clobber (reg:CC CC_REGNUM))]
10080 "TARGET_32BIT
10081 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10082 != CCmode)"
10083 "#"
10084 "TARGET_32BIT && reload_completed"
10085 [(set (match_dup 7)
10086 (compare
10087 (ior:SI
10088 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10089 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10090 (const_int 0)))
10091 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10092 "operands[7]
10093 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10094 DOM_CC_X_OR_Y),
10095 CC_REGNUM);"
10096 [(set_attr "conds" "clob")
10097 (set_attr "enabled_for_short_it" "yes,no")
10098 (set_attr "length" "16")
10099 (set_attr "type" "multiple")]
10100 )
10101
10102 ; If the above pattern is followed by a CMP insn, then the compare is
10103 ; redundant, since we can rework the conditional instruction that follows.
10104 (define_insn_and_split "*ior_scc_scc_cmp"
10105 [(set (match_operand 0 "dominant_cc_register" "")
10106 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10107 [(match_operand:SI 1 "s_register_operand" "l,r")
10108 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10109 (match_operator:SI 6 "arm_comparison_operator"
10110 [(match_operand:SI 4 "s_register_operand" "l,r")
10111 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10112 (const_int 0)))
10113 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10114 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10115 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10116 "TARGET_32BIT"
10117 "#"
10118 "TARGET_32BIT && reload_completed"
10119 [(set (match_dup 0)
10120 (compare
10121 (ior:SI
10122 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10123 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10124 (const_int 0)))
10125 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10126 ""
10127 [(set_attr "conds" "set")
10128 (set_attr "enabled_for_short_it" "yes,no")
10129 (set_attr "length" "16")
10130 (set_attr "type" "multiple")]
10131 )
10132
10133 (define_insn_and_split "*and_scc_scc"
10134 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10135 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10136 [(match_operand:SI 1 "s_register_operand" "l,r")
10137 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10138 (match_operator:SI 6 "arm_comparison_operator"
10139 [(match_operand:SI 4 "s_register_operand" "l,r")
10140 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10141 (clobber (reg:CC CC_REGNUM))]
10142 "TARGET_32BIT
10143 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10144 != CCmode)"
10145 "#"
10146 "TARGET_32BIT && reload_completed
10147 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10148 != CCmode)"
10149 [(set (match_dup 7)
10150 (compare
10151 (and:SI
10152 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10153 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10154 (const_int 0)))
10155 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10156 "operands[7]
10157 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10158 DOM_CC_X_AND_Y),
10159 CC_REGNUM);"
10160 [(set_attr "conds" "clob")
10161 (set_attr "enabled_for_short_it" "yes,no")
10162 (set_attr "length" "16")
10163 (set_attr "type" "multiple")]
10164 )
10165
10166 ; If the above pattern is followed by a CMP insn, then the compare is
10167 ; redundant, since we can rework the conditional instruction that follows.
10168 (define_insn_and_split "*and_scc_scc_cmp"
10169 [(set (match_operand 0 "dominant_cc_register" "")
10170 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10171 [(match_operand:SI 1 "s_register_operand" "l,r")
10172 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10173 (match_operator:SI 6 "arm_comparison_operator"
10174 [(match_operand:SI 4 "s_register_operand" "l,r")
10175 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10176 (const_int 0)))
10177 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10178 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10179 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10180 "TARGET_32BIT"
10181 "#"
10182 "TARGET_32BIT && reload_completed"
10183 [(set (match_dup 0)
10184 (compare
10185 (and:SI
10186 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10187 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10188 (const_int 0)))
10189 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10190 ""
10191 [(set_attr "conds" "set")
10192 (set_attr "enabled_for_short_it" "yes,no")
10193 (set_attr "length" "16")
10194 (set_attr "type" "multiple")]
10195 )
10196
10197 ;; If there is no dominance in the comparison, then we can still save an
10198 ;; instruction in the AND case, since we can know that the second compare
10199 ;; need only zero the value if false (if true, then the value is already
10200 ;; correct).
10201 (define_insn_and_split "*and_scc_scc_nodom"
10202 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10203 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10204 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10205 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10206 (match_operator:SI 6 "arm_comparison_operator"
10207 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10208 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10209 (clobber (reg:CC CC_REGNUM))]
10210 "TARGET_32BIT
10211 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10212 == CCmode)"
10213 "#"
10214 "TARGET_32BIT && reload_completed"
10215 [(parallel [(set (match_dup 0)
10216 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10217 (clobber (reg:CC CC_REGNUM))])
10218 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10219 (set (match_dup 0)
10220 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10221 (match_dup 0)
10222 (const_int 0)))]
10223 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10224 operands[4], operands[5]),
10225 CC_REGNUM);
10226 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10227 operands[5]);"
10228 [(set_attr "conds" "clob")
10229 (set_attr "length" "20")
10230 (set_attr "type" "multiple")]
10231 )
10232
10233 (define_split
10234 [(set (reg:CC_NOOV CC_REGNUM)
10235 (compare:CC_NOOV (ior:SI
10236 (and:SI (match_operand:SI 0 "s_register_operand" "")
10237 (const_int 1))
10238 (match_operator:SI 1 "arm_comparison_operator"
10239 [(match_operand:SI 2 "s_register_operand" "")
10240 (match_operand:SI 3 "arm_add_operand" "")]))
10241 (const_int 0)))
10242 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10243 "TARGET_ARM"
10244 [(set (match_dup 4)
10245 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10246 (match_dup 0)))
10247 (set (reg:CC_NOOV CC_REGNUM)
10248 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10249 (const_int 0)))]
10250 "")
10251
10252 (define_split
10253 [(set (reg:CC_NOOV CC_REGNUM)
10254 (compare:CC_NOOV (ior:SI
10255 (match_operator:SI 1 "arm_comparison_operator"
10256 [(match_operand:SI 2 "s_register_operand" "")
10257 (match_operand:SI 3 "arm_add_operand" "")])
10258 (and:SI (match_operand:SI 0 "s_register_operand" "")
10259 (const_int 1)))
10260 (const_int 0)))
10261 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10262 "TARGET_ARM"
10263 [(set (match_dup 4)
10264 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10265 (match_dup 0)))
10266 (set (reg:CC_NOOV CC_REGNUM)
10267 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10268 (const_int 0)))]
10269 "")
10270 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10271
10272 (define_insn_and_split "*negscc"
10273 [(set (match_operand:SI 0 "s_register_operand" "=r")
10274 (neg:SI (match_operator 3 "arm_comparison_operator"
10275 [(match_operand:SI 1 "s_register_operand" "r")
10276 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10277 (clobber (reg:CC CC_REGNUM))]
10278 "TARGET_ARM"
10279 "#"
10280 "&& reload_completed"
10281 [(const_int 0)]
10282 {
10283 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10284
10285 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10286 {
10287 /* Emit mov\\t%0, %1, asr #31 */
10288 emit_insn (gen_rtx_SET (operands[0],
10289 gen_rtx_ASHIFTRT (SImode,
10290 operands[1],
10291 GEN_INT (31))));
10292 DONE;
10293 }
10294 else if (GET_CODE (operands[3]) == NE)
10295 {
10296 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10297 if (CONST_INT_P (operands[2]))
10298 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10299 gen_int_mode (-INTVAL (operands[2]),
10300 SImode)));
10301 else
10302 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10303
10304 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10305 gen_rtx_NE (SImode,
10306 cc_reg,
10307 const0_rtx),
10308 gen_rtx_SET (operands[0],
10309 GEN_INT (~0))));
10310 DONE;
10311 }
10312 else
10313 {
10314 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10315 emit_insn (gen_rtx_SET (cc_reg,
10316 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10317 enum rtx_code rc = GET_CODE (operands[3]);
10318
10319 rc = reverse_condition (rc);
10320 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10321 gen_rtx_fmt_ee (rc,
10322 VOIDmode,
10323 cc_reg,
10324 const0_rtx),
10325 gen_rtx_SET (operands[0], const0_rtx)));
10326 rc = GET_CODE (operands[3]);
10327 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10328 gen_rtx_fmt_ee (rc,
10329 VOIDmode,
10330 cc_reg,
10331 const0_rtx),
10332 gen_rtx_SET (operands[0],
10333 GEN_INT (~0))));
10334 DONE;
10335 }
10336 FAIL;
10337 }
10338 [(set_attr "conds" "clob")
10339 (set_attr "length" "12")
10340 (set_attr "type" "multiple")]
10341 )
10342
10343 (define_insn_and_split "movcond_addsi"
10344 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
10345 (if_then_else:SI
10346 (match_operator 5 "comparison_operator"
10347 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
10348 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
10349 (const_int 0)])
10350 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
10351 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
10352 (clobber (reg:CC CC_REGNUM))]
10353 "TARGET_32BIT"
10354 "#"
10355 "&& reload_completed"
10356 [(set (reg:CC_NOOV CC_REGNUM)
10357 (compare:CC_NOOV
10358 (plus:SI (match_dup 3)
10359 (match_dup 4))
10360 (const_int 0)))
10361 (set (match_dup 0) (match_dup 1))
10362 (cond_exec (match_dup 6)
10363 (set (match_dup 0) (match_dup 2)))]
10364 "
10365 {
10366 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
10367 operands[3], operands[4]);
10368 enum rtx_code rc = GET_CODE (operands[5]);
10369 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10370 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
10371 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
10372 rc = reverse_condition (rc);
10373 else
10374 std::swap (operands[1], operands[2]);
10375
10376 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10377 }
10378 "
10379 [(set_attr "conds" "clob")
10380 (set_attr "enabled_for_short_it" "no,yes,yes")
10381 (set_attr "type" "multiple")]
10382 )
10383
10384 (define_insn "movcond"
10385 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10386 (if_then_else:SI
10387 (match_operator 5 "arm_comparison_operator"
10388 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10389 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10390 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10391 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10392 (clobber (reg:CC CC_REGNUM))]
10393 "TARGET_ARM"
10394 "*
10395 if (GET_CODE (operands[5]) == LT
10396 && (operands[4] == const0_rtx))
10397 {
10398 if (which_alternative != 1 && REG_P (operands[1]))
10399 {
10400 if (operands[2] == const0_rtx)
10401 return \"and\\t%0, %1, %3, asr #31\";
10402 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10403 }
10404 else if (which_alternative != 0 && REG_P (operands[2]))
10405 {
10406 if (operands[1] == const0_rtx)
10407 return \"bic\\t%0, %2, %3, asr #31\";
10408 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10409 }
10410 /* The only case that falls through to here is when both ops 1 & 2
10411 are constants. */
10412 }
10413
10414 if (GET_CODE (operands[5]) == GE
10415 && (operands[4] == const0_rtx))
10416 {
10417 if (which_alternative != 1 && REG_P (operands[1]))
10418 {
10419 if (operands[2] == const0_rtx)
10420 return \"bic\\t%0, %1, %3, asr #31\";
10421 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10422 }
10423 else if (which_alternative != 0 && REG_P (operands[2]))
10424 {
10425 if (operands[1] == const0_rtx)
10426 return \"and\\t%0, %2, %3, asr #31\";
10427 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10428 }
10429 /* The only case that falls through to here is when both ops 1 & 2
10430 are constants. */
10431 }
10432 if (CONST_INT_P (operands[4])
10433 && !const_ok_for_arm (INTVAL (operands[4])))
10434 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10435 else
10436 output_asm_insn (\"cmp\\t%3, %4\", operands);
10437 if (which_alternative != 0)
10438 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10439 if (which_alternative != 1)
10440 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10441 return \"\";
10442 "
10443 [(set_attr "conds" "clob")
10444 (set_attr "length" "8,8,12")
10445 (set_attr "type" "multiple")]
10446 )
10447
10448 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10449
10450 (define_insn "*ifcompare_plus_move"
10451 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10452 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10453 [(match_operand:SI 4 "s_register_operand" "r,r")
10454 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10455 (plus:SI
10456 (match_operand:SI 2 "s_register_operand" "r,r")
10457 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10458 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10459 (clobber (reg:CC CC_REGNUM))]
10460 "TARGET_ARM"
10461 "#"
10462 [(set_attr "conds" "clob")
10463 (set_attr "length" "8,12")
10464 (set_attr "type" "multiple")]
10465 )
10466
10467 (define_insn "*if_plus_move"
10468 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10469 (if_then_else:SI
10470 (match_operator 4 "arm_comparison_operator"
10471 [(match_operand 5 "cc_register" "") (const_int 0)])
10472 (plus:SI
10473 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10474 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10475 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10476 "TARGET_ARM"
10477 "@
10478 add%d4\\t%0, %2, %3
10479 sub%d4\\t%0, %2, #%n3
10480 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10481 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10482 [(set_attr "conds" "use")
10483 (set_attr "length" "4,4,8,8")
10484 (set_attr_alternative "type"
10485 [(if_then_else (match_operand 3 "const_int_operand" "")
10486 (const_string "alu_imm" )
10487 (const_string "alu_sreg"))
10488 (const_string "alu_imm")
10489 (const_string "multiple")
10490 (const_string "multiple")])]
10491 )
10492
10493 (define_insn "*ifcompare_move_plus"
10494 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10495 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10496 [(match_operand:SI 4 "s_register_operand" "r,r")
10497 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10498 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10499 (plus:SI
10500 (match_operand:SI 2 "s_register_operand" "r,r")
10501 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10502 (clobber (reg:CC CC_REGNUM))]
10503 "TARGET_ARM"
10504 "#"
10505 [(set_attr "conds" "clob")
10506 (set_attr "length" "8,12")
10507 (set_attr "type" "multiple")]
10508 )
10509
10510 (define_insn "*if_move_plus"
10511 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10512 (if_then_else:SI
10513 (match_operator 4 "arm_comparison_operator"
10514 [(match_operand 5 "cc_register" "") (const_int 0)])
10515 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10516 (plus:SI
10517 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10518 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10519 "TARGET_ARM"
10520 "@
10521 add%D4\\t%0, %2, %3
10522 sub%D4\\t%0, %2, #%n3
10523 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10524 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10525 [(set_attr "conds" "use")
10526 (set_attr "length" "4,4,8,8")
10527 (set_attr_alternative "type"
10528 [(if_then_else (match_operand 3 "const_int_operand" "")
10529 (const_string "alu_imm" )
10530 (const_string "alu_sreg"))
10531 (const_string "alu_imm")
10532 (const_string "multiple")
10533 (const_string "multiple")])]
10534 )
10535
10536 (define_insn "*ifcompare_arith_arith"
10537 [(set (match_operand:SI 0 "s_register_operand" "=r")
10538 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10539 [(match_operand:SI 5 "s_register_operand" "r")
10540 (match_operand:SI 6 "arm_add_operand" "rIL")])
10541 (match_operator:SI 8 "shiftable_operator"
10542 [(match_operand:SI 1 "s_register_operand" "r")
10543 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10544 (match_operator:SI 7 "shiftable_operator"
10545 [(match_operand:SI 3 "s_register_operand" "r")
10546 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10547 (clobber (reg:CC CC_REGNUM))]
10548 "TARGET_ARM"
10549 "#"
10550 [(set_attr "conds" "clob")
10551 (set_attr "length" "12")
10552 (set_attr "type" "multiple")]
10553 )
10554
10555 (define_insn "*if_arith_arith"
10556 [(set (match_operand:SI 0 "s_register_operand" "=r")
10557 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10558 [(match_operand 8 "cc_register" "") (const_int 0)])
10559 (match_operator:SI 6 "shiftable_operator"
10560 [(match_operand:SI 1 "s_register_operand" "r")
10561 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10562 (match_operator:SI 7 "shiftable_operator"
10563 [(match_operand:SI 3 "s_register_operand" "r")
10564 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10565 "TARGET_ARM"
10566 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10567 [(set_attr "conds" "use")
10568 (set_attr "length" "8")
10569 (set_attr "type" "multiple")]
10570 )
10571
10572 (define_insn "*ifcompare_arith_move"
10573 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10574 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10575 [(match_operand:SI 2 "s_register_operand" "r,r")
10576 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10577 (match_operator:SI 7 "shiftable_operator"
10578 [(match_operand:SI 4 "s_register_operand" "r,r")
10579 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10580 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10581 (clobber (reg:CC CC_REGNUM))]
10582 "TARGET_ARM"
10583 "*
10584 /* If we have an operation where (op x 0) is the identity operation and
10585 the conditional operator is LT or GE and we are comparing against zero and
10586 everything is in registers then we can do this in two instructions. */
10587 if (operands[3] == const0_rtx
10588 && GET_CODE (operands[7]) != AND
10589 && REG_P (operands[5])
10590 && REG_P (operands[1])
10591 && REGNO (operands[1]) == REGNO (operands[4])
10592 && REGNO (operands[4]) != REGNO (operands[0]))
10593 {
10594 if (GET_CODE (operands[6]) == LT)
10595 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10596 else if (GET_CODE (operands[6]) == GE)
10597 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10598 }
10599 if (CONST_INT_P (operands[3])
10600 && !const_ok_for_arm (INTVAL (operands[3])))
10601 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10602 else
10603 output_asm_insn (\"cmp\\t%2, %3\", operands);
10604 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10605 if (which_alternative != 0)
10606 return \"mov%D6\\t%0, %1\";
10607 return \"\";
10608 "
10609 [(set_attr "conds" "clob")
10610 (set_attr "length" "8,12")
10611 (set_attr "type" "multiple")]
10612 )
10613
10614 (define_insn "*if_arith_move"
10615 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10616 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10617 [(match_operand 6 "cc_register" "") (const_int 0)])
10618 (match_operator:SI 5 "shiftable_operator"
10619 [(match_operand:SI 2 "s_register_operand" "r,r")
10620 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10621 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10622 "TARGET_ARM"
10623 "@
10624 %I5%d4\\t%0, %2, %3
10625 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10626 [(set_attr "conds" "use")
10627 (set_attr "length" "4,8")
10628 (set_attr_alternative "type"
10629 [(if_then_else (match_operand 3 "const_int_operand" "")
10630 (const_string "alu_shift_imm" )
10631 (const_string "alu_shift_reg"))
10632 (const_string "multiple")])]
10633 )
10634
10635 (define_insn "*ifcompare_move_arith"
10636 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10637 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10638 [(match_operand:SI 4 "s_register_operand" "r,r")
10639 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10640 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10641 (match_operator:SI 7 "shiftable_operator"
10642 [(match_operand:SI 2 "s_register_operand" "r,r")
10643 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10644 (clobber (reg:CC CC_REGNUM))]
10645 "TARGET_ARM"
10646 "*
10647 /* If we have an operation where (op x 0) is the identity operation and
10648 the conditional operator is LT or GE and we are comparing against zero and
10649 everything is in registers then we can do this in two instructions */
10650 if (operands[5] == const0_rtx
10651 && GET_CODE (operands[7]) != AND
10652 && REG_P (operands[3])
10653 && REG_P (operands[1])
10654 && REGNO (operands[1]) == REGNO (operands[2])
10655 && REGNO (operands[2]) != REGNO (operands[0]))
10656 {
10657 if (GET_CODE (operands[6]) == GE)
10658 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10659 else if (GET_CODE (operands[6]) == LT)
10660 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10661 }
10662
10663 if (CONST_INT_P (operands[5])
10664 && !const_ok_for_arm (INTVAL (operands[5])))
10665 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10666 else
10667 output_asm_insn (\"cmp\\t%4, %5\", operands);
10668
10669 if (which_alternative != 0)
10670 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10671 return \"%I7%D6\\t%0, %2, %3\";
10672 "
10673 [(set_attr "conds" "clob")
10674 (set_attr "length" "8,12")
10675 (set_attr "type" "multiple")]
10676 )
10677
10678 (define_insn "*if_move_arith"
10679 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10680 (if_then_else:SI
10681 (match_operator 4 "arm_comparison_operator"
10682 [(match_operand 6 "cc_register" "") (const_int 0)])
10683 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10684 (match_operator:SI 5 "shiftable_operator"
10685 [(match_operand:SI 2 "s_register_operand" "r,r")
10686 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10687 "TARGET_ARM"
10688 "@
10689 %I5%D4\\t%0, %2, %3
10690 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10691 [(set_attr "conds" "use")
10692 (set_attr "length" "4,8")
10693 (set_attr_alternative "type"
10694 [(if_then_else (match_operand 3 "const_int_operand" "")
10695 (const_string "alu_shift_imm" )
10696 (const_string "alu_shift_reg"))
10697 (const_string "multiple")])]
10698 )
10699
10700 (define_insn "*ifcompare_move_not"
10701 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10702 (if_then_else:SI
10703 (match_operator 5 "arm_comparison_operator"
10704 [(match_operand:SI 3 "s_register_operand" "r,r")
10705 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10706 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10707 (not:SI
10708 (match_operand:SI 2 "s_register_operand" "r,r"))))
10709 (clobber (reg:CC CC_REGNUM))]
10710 "TARGET_ARM"
10711 "#"
10712 [(set_attr "conds" "clob")
10713 (set_attr "length" "8,12")
10714 (set_attr "type" "multiple")]
10715 )
10716
10717 (define_insn "*if_move_not"
10718 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10719 (if_then_else:SI
10720 (match_operator 4 "arm_comparison_operator"
10721 [(match_operand 3 "cc_register" "") (const_int 0)])
10722 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10723 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10724 "TARGET_ARM"
10725 "@
10726 mvn%D4\\t%0, %2
10727 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10728 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10729 [(set_attr "conds" "use")
10730 (set_attr "type" "mvn_reg")
10731 (set_attr "length" "4,8,8")
10732 (set_attr "type" "mvn_reg,multiple,multiple")]
10733 )
10734
10735 (define_insn "*ifcompare_not_move"
10736 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10737 (if_then_else:SI
10738 (match_operator 5 "arm_comparison_operator"
10739 [(match_operand:SI 3 "s_register_operand" "r,r")
10740 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10741 (not:SI
10742 (match_operand:SI 2 "s_register_operand" "r,r"))
10743 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10744 (clobber (reg:CC CC_REGNUM))]
10745 "TARGET_ARM"
10746 "#"
10747 [(set_attr "conds" "clob")
10748 (set_attr "length" "8,12")
10749 (set_attr "type" "multiple")]
10750 )
10751
10752 (define_insn "*if_not_move"
10753 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10754 (if_then_else:SI
10755 (match_operator 4 "arm_comparison_operator"
10756 [(match_operand 3 "cc_register" "") (const_int 0)])
10757 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10758 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10759 "TARGET_ARM"
10760 "@
10761 mvn%d4\\t%0, %2
10762 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10763 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10764 [(set_attr "conds" "use")
10765 (set_attr "type" "mvn_reg,multiple,multiple")
10766 (set_attr "length" "4,8,8")]
10767 )
10768
10769 (define_insn "*ifcompare_shift_move"
10770 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10771 (if_then_else:SI
10772 (match_operator 6 "arm_comparison_operator"
10773 [(match_operand:SI 4 "s_register_operand" "r,r")
10774 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10775 (match_operator:SI 7 "shift_operator"
10776 [(match_operand:SI 2 "s_register_operand" "r,r")
10777 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10778 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10779 (clobber (reg:CC CC_REGNUM))]
10780 "TARGET_ARM"
10781 "#"
10782 [(set_attr "conds" "clob")
10783 (set_attr "length" "8,12")
10784 (set_attr "type" "multiple")]
10785 )
10786
10787 (define_insn "*if_shift_move"
10788 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10789 (if_then_else:SI
10790 (match_operator 5 "arm_comparison_operator"
10791 [(match_operand 6 "cc_register" "") (const_int 0)])
10792 (match_operator:SI 4 "shift_operator"
10793 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10794 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10795 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10796 "TARGET_ARM"
10797 "@
10798 mov%d5\\t%0, %2%S4
10799 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10800 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10801 [(set_attr "conds" "use")
10802 (set_attr "shift" "2")
10803 (set_attr "length" "4,8,8")
10804 (set_attr_alternative "type"
10805 [(if_then_else (match_operand 3 "const_int_operand" "")
10806 (const_string "mov_shift" )
10807 (const_string "mov_shift_reg"))
10808 (const_string "multiple")
10809 (const_string "multiple")])]
10810 )
10811
10812 (define_insn "*ifcompare_move_shift"
10813 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10814 (if_then_else:SI
10815 (match_operator 6 "arm_comparison_operator"
10816 [(match_operand:SI 4 "s_register_operand" "r,r")
10817 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10818 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10819 (match_operator:SI 7 "shift_operator"
10820 [(match_operand:SI 2 "s_register_operand" "r,r")
10821 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10822 (clobber (reg:CC CC_REGNUM))]
10823 "TARGET_ARM"
10824 "#"
10825 [(set_attr "conds" "clob")
10826 (set_attr "length" "8,12")
10827 (set_attr "type" "multiple")]
10828 )
10829
10830 (define_insn "*if_move_shift"
10831 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10832 (if_then_else:SI
10833 (match_operator 5 "arm_comparison_operator"
10834 [(match_operand 6 "cc_register" "") (const_int 0)])
10835 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10836 (match_operator:SI 4 "shift_operator"
10837 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10838 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10839 "TARGET_ARM"
10840 "@
10841 mov%D5\\t%0, %2%S4
10842 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10843 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10844 [(set_attr "conds" "use")
10845 (set_attr "shift" "2")
10846 (set_attr "length" "4,8,8")
10847 (set_attr_alternative "type"
10848 [(if_then_else (match_operand 3 "const_int_operand" "")
10849 (const_string "mov_shift" )
10850 (const_string "mov_shift_reg"))
10851 (const_string "multiple")
10852 (const_string "multiple")])]
10853 )
10854
10855 (define_insn "*ifcompare_shift_shift"
10856 [(set (match_operand:SI 0 "s_register_operand" "=r")
10857 (if_then_else:SI
10858 (match_operator 7 "arm_comparison_operator"
10859 [(match_operand:SI 5 "s_register_operand" "r")
10860 (match_operand:SI 6 "arm_add_operand" "rIL")])
10861 (match_operator:SI 8 "shift_operator"
10862 [(match_operand:SI 1 "s_register_operand" "r")
10863 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10864 (match_operator:SI 9 "shift_operator"
10865 [(match_operand:SI 3 "s_register_operand" "r")
10866 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10867 (clobber (reg:CC CC_REGNUM))]
10868 "TARGET_ARM"
10869 "#"
10870 [(set_attr "conds" "clob")
10871 (set_attr "length" "12")
10872 (set_attr "type" "multiple")]
10873 )
10874
10875 (define_insn "*if_shift_shift"
10876 [(set (match_operand:SI 0 "s_register_operand" "=r")
10877 (if_then_else:SI
10878 (match_operator 5 "arm_comparison_operator"
10879 [(match_operand 8 "cc_register" "") (const_int 0)])
10880 (match_operator:SI 6 "shift_operator"
10881 [(match_operand:SI 1 "s_register_operand" "r")
10882 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10883 (match_operator:SI 7 "shift_operator"
10884 [(match_operand:SI 3 "s_register_operand" "r")
10885 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10886 "TARGET_ARM"
10887 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10888 [(set_attr "conds" "use")
10889 (set_attr "shift" "1")
10890 (set_attr "length" "8")
10891 (set (attr "type") (if_then_else
10892 (and (match_operand 2 "const_int_operand" "")
10893 (match_operand 4 "const_int_operand" ""))
10894 (const_string "mov_shift")
10895 (const_string "mov_shift_reg")))]
10896 )
10897
10898 (define_insn "*ifcompare_not_arith"
10899 [(set (match_operand:SI 0 "s_register_operand" "=r")
10900 (if_then_else:SI
10901 (match_operator 6 "arm_comparison_operator"
10902 [(match_operand:SI 4 "s_register_operand" "r")
10903 (match_operand:SI 5 "arm_add_operand" "rIL")])
10904 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10905 (match_operator:SI 7 "shiftable_operator"
10906 [(match_operand:SI 2 "s_register_operand" "r")
10907 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10908 (clobber (reg:CC CC_REGNUM))]
10909 "TARGET_ARM"
10910 "#"
10911 [(set_attr "conds" "clob")
10912 (set_attr "length" "12")
10913 (set_attr "type" "multiple")]
10914 )
10915
10916 (define_insn "*if_not_arith"
10917 [(set (match_operand:SI 0 "s_register_operand" "=r")
10918 (if_then_else:SI
10919 (match_operator 5 "arm_comparison_operator"
10920 [(match_operand 4 "cc_register" "") (const_int 0)])
10921 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10922 (match_operator:SI 6 "shiftable_operator"
10923 [(match_operand:SI 2 "s_register_operand" "r")
10924 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10925 "TARGET_ARM"
10926 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10927 [(set_attr "conds" "use")
10928 (set_attr "type" "mvn_reg")
10929 (set_attr "length" "8")]
10930 )
10931
10932 (define_insn "*ifcompare_arith_not"
10933 [(set (match_operand:SI 0 "s_register_operand" "=r")
10934 (if_then_else:SI
10935 (match_operator 6 "arm_comparison_operator"
10936 [(match_operand:SI 4 "s_register_operand" "r")
10937 (match_operand:SI 5 "arm_add_operand" "rIL")])
10938 (match_operator:SI 7 "shiftable_operator"
10939 [(match_operand:SI 2 "s_register_operand" "r")
10940 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10941 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10942 (clobber (reg:CC CC_REGNUM))]
10943 "TARGET_ARM"
10944 "#"
10945 [(set_attr "conds" "clob")
10946 (set_attr "length" "12")
10947 (set_attr "type" "multiple")]
10948 )
10949
10950 (define_insn "*if_arith_not"
10951 [(set (match_operand:SI 0 "s_register_operand" "=r")
10952 (if_then_else:SI
10953 (match_operator 5 "arm_comparison_operator"
10954 [(match_operand 4 "cc_register" "") (const_int 0)])
10955 (match_operator:SI 6 "shiftable_operator"
10956 [(match_operand:SI 2 "s_register_operand" "r")
10957 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10958 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10959 "TARGET_ARM"
10960 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10961 [(set_attr "conds" "use")
10962 (set_attr "type" "multiple")
10963 (set_attr "length" "8")]
10964 )
10965
10966 (define_insn "*ifcompare_neg_move"
10967 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10968 (if_then_else:SI
10969 (match_operator 5 "arm_comparison_operator"
10970 [(match_operand:SI 3 "s_register_operand" "r,r")
10971 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10972 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10973 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10974 (clobber (reg:CC CC_REGNUM))]
10975 "TARGET_ARM"
10976 "#"
10977 [(set_attr "conds" "clob")
10978 (set_attr "length" "8,12")
10979 (set_attr "type" "multiple")]
10980 )
10981
10982 (define_insn_and_split "*if_neg_move"
10983 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10984 (if_then_else:SI
10985 (match_operator 4 "arm_comparison_operator"
10986 [(match_operand 3 "cc_register" "") (const_int 0)])
10987 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
10988 (match_operand:SI 1 "s_register_operand" "0,0")))]
10989 "TARGET_32BIT"
10990 "#"
10991 "&& reload_completed"
10992 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
10993 (set (match_dup 0) (neg:SI (match_dup 2))))]
10994 ""
10995 [(set_attr "conds" "use")
10996 (set_attr "length" "4")
10997 (set_attr "arch" "t2,32")
10998 (set_attr "enabled_for_short_it" "yes,no")
10999 (set_attr "type" "logic_shift_imm")]
11000 )
11001
11002 (define_insn "*ifcompare_move_neg"
11003 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11004 (if_then_else:SI
11005 (match_operator 5 "arm_comparison_operator"
11006 [(match_operand:SI 3 "s_register_operand" "r,r")
11007 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11008 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11009 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11010 (clobber (reg:CC CC_REGNUM))]
11011 "TARGET_ARM"
11012 "#"
11013 [(set_attr "conds" "clob")
11014 (set_attr "length" "8,12")
11015 (set_attr "type" "multiple")]
11016 )
11017
11018 (define_insn_and_split "*if_move_neg"
11019 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11020 (if_then_else:SI
11021 (match_operator 4 "arm_comparison_operator"
11022 [(match_operand 3 "cc_register" "") (const_int 0)])
11023 (match_operand:SI 1 "s_register_operand" "0,0")
11024 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
11025 "TARGET_32BIT"
11026 "#"
11027 "&& reload_completed"
11028 [(cond_exec (match_dup 5)
11029 (set (match_dup 0) (neg:SI (match_dup 2))))]
11030 {
11031 machine_mode mode = GET_MODE (operands[3]);
11032 rtx_code rc = GET_CODE (operands[4]);
11033
11034 if (mode == CCFPmode || mode == CCFPEmode)
11035 rc = reverse_condition_maybe_unordered (rc);
11036 else
11037 rc = reverse_condition (rc);
11038
11039 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
11040 }
11041 [(set_attr "conds" "use")
11042 (set_attr "length" "4")
11043 (set_attr "arch" "t2,32")
11044 (set_attr "enabled_for_short_it" "yes,no")
11045 (set_attr "type" "logic_shift_imm")]
11046 )
11047
11048 (define_insn "*arith_adjacentmem"
11049 [(set (match_operand:SI 0 "s_register_operand" "=r")
11050 (match_operator:SI 1 "shiftable_operator"
11051 [(match_operand:SI 2 "memory_operand" "m")
11052 (match_operand:SI 3 "memory_operand" "m")]))
11053 (clobber (match_scratch:SI 4 "=r"))]
11054 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11055 "*
11056 {
11057 rtx ldm[3];
11058 rtx arith[4];
11059 rtx base_reg;
11060 HOST_WIDE_INT val1 = 0, val2 = 0;
11061
11062 if (REGNO (operands[0]) > REGNO (operands[4]))
11063 {
11064 ldm[1] = operands[4];
11065 ldm[2] = operands[0];
11066 }
11067 else
11068 {
11069 ldm[1] = operands[0];
11070 ldm[2] = operands[4];
11071 }
11072
11073 base_reg = XEXP (operands[2], 0);
11074
11075 if (!REG_P (base_reg))
11076 {
11077 val1 = INTVAL (XEXP (base_reg, 1));
11078 base_reg = XEXP (base_reg, 0);
11079 }
11080
11081 if (!REG_P (XEXP (operands[3], 0)))
11082 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11083
11084 arith[0] = operands[0];
11085 arith[3] = operands[1];
11086
11087 if (val1 < val2)
11088 {
11089 arith[1] = ldm[1];
11090 arith[2] = ldm[2];
11091 }
11092 else
11093 {
11094 arith[1] = ldm[2];
11095 arith[2] = ldm[1];
11096 }
11097
11098 ldm[0] = base_reg;
11099 if (val1 !=0 && val2 != 0)
11100 {
11101 rtx ops[3];
11102
11103 if (val1 == 4 || val2 == 4)
11104 /* Other val must be 8, since we know they are adjacent and neither
11105 is zero. */
11106 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
11107 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11108 {
11109 ldm[0] = ops[0] = operands[4];
11110 ops[1] = base_reg;
11111 ops[2] = GEN_INT (val1);
11112 output_add_immediate (ops);
11113 if (val1 < val2)
11114 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11115 else
11116 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11117 }
11118 else
11119 {
11120 /* Offset is out of range for a single add, so use two ldr. */
11121 ops[0] = ldm[1];
11122 ops[1] = base_reg;
11123 ops[2] = GEN_INT (val1);
11124 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11125 ops[0] = ldm[2];
11126 ops[2] = GEN_INT (val2);
11127 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11128 }
11129 }
11130 else if (val1 != 0)
11131 {
11132 if (val1 < val2)
11133 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11134 else
11135 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11136 }
11137 else
11138 {
11139 if (val1 < val2)
11140 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11141 else
11142 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11143 }
11144 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11145 return \"\";
11146 }"
11147 [(set_attr "length" "12")
11148 (set_attr "predicable" "yes")
11149 (set_attr "type" "load_4")]
11150 )
11151
11152 ; This pattern is never tried by combine, so do it as a peephole
11153
11154 (define_peephole2
11155 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11156 (match_operand:SI 1 "arm_general_register_operand" ""))
11157 (set (reg:CC CC_REGNUM)
11158 (compare:CC (match_dup 1) (const_int 0)))]
11159 "TARGET_ARM"
11160 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11161 (set (match_dup 0) (match_dup 1))])]
11162 ""
11163 )
11164
11165 (define_split
11166 [(set (match_operand:SI 0 "s_register_operand" "")
11167 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11168 (const_int 0))
11169 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11170 [(match_operand:SI 3 "s_register_operand" "")
11171 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11172 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11173 "TARGET_ARM"
11174 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11175 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11176 (match_dup 5)))]
11177 ""
11178 )
11179
11180 ;; This split can be used because CC_Z mode implies that the following
11181 ;; branch will be an equality, or an unsigned inequality, so the sign
11182 ;; extension is not needed.
11183
11184 (define_split
11185 [(set (reg:CC_Z CC_REGNUM)
11186 (compare:CC_Z
11187 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11188 (const_int 24))
11189 (match_operand 1 "const_int_operand" "")))
11190 (clobber (match_scratch:SI 2 ""))]
11191 "TARGET_ARM
11192 && ((UINTVAL (operands[1]))
11193 == ((UINTVAL (operands[1])) >> 24) << 24)"
11194 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11195 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11196 "
11197 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11198 "
11199 )
11200 ;; ??? Check the patterns above for Thumb-2 usefulness
11201
11202 (define_expand "prologue"
11203 [(clobber (const_int 0))]
11204 "TARGET_EITHER"
11205 "if (TARGET_32BIT)
11206 arm_expand_prologue ();
11207 else
11208 thumb1_expand_prologue ();
11209 DONE;
11210 "
11211 )
11212
11213 (define_expand "epilogue"
11214 [(clobber (const_int 0))]
11215 "TARGET_EITHER"
11216 "
11217 if (crtl->calls_eh_return)
11218 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11219 if (TARGET_THUMB1)
11220 {
11221 thumb1_expand_epilogue ();
11222 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11223 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11224 }
11225 else if (HAVE_return)
11226 {
11227 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11228 no need for explicit testing again. */
11229 emit_jump_insn (gen_return ());
11230 }
11231 else if (TARGET_32BIT)
11232 {
11233 arm_expand_epilogue (true);
11234 }
11235 DONE;
11236 "
11237 )
11238
11239 ;; Note - although unspec_volatile's USE all hard registers,
11240 ;; USEs are ignored after relaod has completed. Thus we need
11241 ;; to add an unspec of the link register to ensure that flow
11242 ;; does not think that it is unused by the sibcall branch that
11243 ;; will replace the standard function epilogue.
11244 (define_expand "sibcall_epilogue"
11245 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11246 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11247 "TARGET_32BIT"
11248 "
11249 arm_expand_epilogue (false);
11250 DONE;
11251 "
11252 )
11253
11254 (define_expand "eh_epilogue"
11255 [(use (match_operand:SI 0 "register_operand"))
11256 (use (match_operand:SI 1 "register_operand"))
11257 (use (match_operand:SI 2 "register_operand"))]
11258 "TARGET_EITHER"
11259 "
11260 {
11261 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11262 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11263 {
11264 rtx ra = gen_rtx_REG (Pmode, 2);
11265
11266 emit_move_insn (ra, operands[2]);
11267 operands[2] = ra;
11268 }
11269 /* This is a hack -- we may have crystalized the function type too
11270 early. */
11271 cfun->machine->func_type = 0;
11272 }"
11273 )
11274
11275 ;; This split is only used during output to reduce the number of patterns
11276 ;; that need assembler instructions adding to them. We allowed the setting
11277 ;; of the conditions to be implicit during rtl generation so that
11278 ;; the conditional compare patterns would work. However this conflicts to
11279 ;; some extent with the conditional data operations, so we have to split them
11280 ;; up again here.
11281
11282 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11283 ;; conditional execution sufficient?
11284
11285 (define_split
11286 [(set (match_operand:SI 0 "s_register_operand" "")
11287 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11288 [(match_operand 2 "" "") (match_operand 3 "" "")])
11289 (match_dup 0)
11290 (match_operand 4 "" "")))
11291 (clobber (reg:CC CC_REGNUM))]
11292 "TARGET_ARM && reload_completed"
11293 [(set (match_dup 5) (match_dup 6))
11294 (cond_exec (match_dup 7)
11295 (set (match_dup 0) (match_dup 4)))]
11296 "
11297 {
11298 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11299 operands[2], operands[3]);
11300 enum rtx_code rc = GET_CODE (operands[1]);
11301
11302 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11303 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11304 if (mode == CCFPmode || mode == CCFPEmode)
11305 rc = reverse_condition_maybe_unordered (rc);
11306 else
11307 rc = reverse_condition (rc);
11308
11309 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11310 }"
11311 )
11312
11313 (define_split
11314 [(set (match_operand:SI 0 "s_register_operand" "")
11315 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11316 [(match_operand 2 "" "") (match_operand 3 "" "")])
11317 (match_operand 4 "" "")
11318 (match_dup 0)))
11319 (clobber (reg:CC CC_REGNUM))]
11320 "TARGET_ARM && reload_completed"
11321 [(set (match_dup 5) (match_dup 6))
11322 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11323 (set (match_dup 0) (match_dup 4)))]
11324 "
11325 {
11326 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11327 operands[2], operands[3]);
11328
11329 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11330 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11331 }"
11332 )
11333
11334 (define_split
11335 [(set (match_operand:SI 0 "s_register_operand" "")
11336 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11337 [(match_operand 2 "" "") (match_operand 3 "" "")])
11338 (match_operand 4 "" "")
11339 (match_operand 5 "" "")))
11340 (clobber (reg:CC CC_REGNUM))]
11341 "TARGET_ARM && reload_completed"
11342 [(set (match_dup 6) (match_dup 7))
11343 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11344 (set (match_dup 0) (match_dup 4)))
11345 (cond_exec (match_dup 8)
11346 (set (match_dup 0) (match_dup 5)))]
11347 "
11348 {
11349 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11350 operands[2], operands[3]);
11351 enum rtx_code rc = GET_CODE (operands[1]);
11352
11353 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11354 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11355 if (mode == CCFPmode || mode == CCFPEmode)
11356 rc = reverse_condition_maybe_unordered (rc);
11357 else
11358 rc = reverse_condition (rc);
11359
11360 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11361 }"
11362 )
11363
11364 (define_split
11365 [(set (match_operand:SI 0 "s_register_operand" "")
11366 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11367 [(match_operand:SI 2 "s_register_operand" "")
11368 (match_operand:SI 3 "arm_add_operand" "")])
11369 (match_operand:SI 4 "arm_rhs_operand" "")
11370 (not:SI
11371 (match_operand:SI 5 "s_register_operand" ""))))
11372 (clobber (reg:CC CC_REGNUM))]
11373 "TARGET_ARM && reload_completed"
11374 [(set (match_dup 6) (match_dup 7))
11375 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11376 (set (match_dup 0) (match_dup 4)))
11377 (cond_exec (match_dup 8)
11378 (set (match_dup 0) (not:SI (match_dup 5))))]
11379 "
11380 {
11381 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11382 operands[2], operands[3]);
11383 enum rtx_code rc = GET_CODE (operands[1]);
11384
11385 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11386 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11387 if (mode == CCFPmode || mode == CCFPEmode)
11388 rc = reverse_condition_maybe_unordered (rc);
11389 else
11390 rc = reverse_condition (rc);
11391
11392 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11393 }"
11394 )
11395
11396 (define_insn "*cond_move_not"
11397 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11398 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11399 [(match_operand 3 "cc_register" "") (const_int 0)])
11400 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11401 (not:SI
11402 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11403 "TARGET_ARM"
11404 "@
11405 mvn%D4\\t%0, %2
11406 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11407 [(set_attr "conds" "use")
11408 (set_attr "type" "mvn_reg,multiple")
11409 (set_attr "length" "4,8")]
11410 )
11411
11412 ;; The next two patterns occur when an AND operation is followed by a
11413 ;; scc insn sequence
11414
11415 (define_insn "*sign_extract_onebit"
11416 [(set (match_operand:SI 0 "s_register_operand" "=r")
11417 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11418 (const_int 1)
11419 (match_operand:SI 2 "const_int_operand" "n")))
11420 (clobber (reg:CC CC_REGNUM))]
11421 "TARGET_ARM"
11422 "*
11423 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11424 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11425 return \"mvnne\\t%0, #0\";
11426 "
11427 [(set_attr "conds" "clob")
11428 (set_attr "length" "8")
11429 (set_attr "type" "multiple")]
11430 )
11431
11432 (define_insn "*not_signextract_onebit"
11433 [(set (match_operand:SI 0 "s_register_operand" "=r")
11434 (not:SI
11435 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11436 (const_int 1)
11437 (match_operand:SI 2 "const_int_operand" "n"))))
11438 (clobber (reg:CC CC_REGNUM))]
11439 "TARGET_ARM"
11440 "*
11441 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11442 output_asm_insn (\"tst\\t%1, %2\", operands);
11443 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11444 return \"movne\\t%0, #0\";
11445 "
11446 [(set_attr "conds" "clob")
11447 (set_attr "length" "12")
11448 (set_attr "type" "multiple")]
11449 )
11450 ;; ??? The above patterns need auditing for Thumb-2
11451
11452 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11453 ;; expressions. For simplicity, the first register is also in the unspec
11454 ;; part.
11455 ;; To avoid the usage of GNU extension, the length attribute is computed
11456 ;; in a C function arm_attr_length_push_multi.
11457 (define_insn "*push_multi"
11458 [(match_parallel 2 "multi_register_push"
11459 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11460 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11461 UNSPEC_PUSH_MULT))])]
11462 ""
11463 "*
11464 {
11465 int num_saves = XVECLEN (operands[2], 0);
11466
11467 /* For the StrongARM at least it is faster to
11468 use STR to store only a single register.
11469 In Thumb mode always use push, and the assembler will pick
11470 something appropriate. */
11471 if (num_saves == 1 && TARGET_ARM)
11472 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11473 else
11474 {
11475 int i;
11476 char pattern[100];
11477
11478 if (TARGET_32BIT)
11479 strcpy (pattern, \"push%?\\t{%1\");
11480 else
11481 strcpy (pattern, \"push\\t{%1\");
11482
11483 for (i = 1; i < num_saves; i++)
11484 {
11485 strcat (pattern, \", %|\");
11486 strcat (pattern,
11487 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11488 }
11489
11490 strcat (pattern, \"}\");
11491 output_asm_insn (pattern, operands);
11492 }
11493
11494 return \"\";
11495 }"
11496 [(set_attr "type" "store_16")
11497 (set (attr "length")
11498 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11499 )
11500
11501 (define_insn "stack_tie"
11502 [(set (mem:BLK (scratch))
11503 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11504 (match_operand:SI 1 "s_register_operand" "rk")]
11505 UNSPEC_PRLG_STK))]
11506 ""
11507 ""
11508 [(set_attr "length" "0")
11509 (set_attr "type" "block")]
11510 )
11511
11512 ;; Pop (as used in epilogue RTL)
11513 ;;
11514 (define_insn "*load_multiple_with_writeback"
11515 [(match_parallel 0 "load_multiple_operation"
11516 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11517 (plus:SI (match_dup 1)
11518 (match_operand:SI 2 "const_int_I_operand" "I")))
11519 (set (match_operand:SI 3 "s_register_operand" "=rk")
11520 (mem:SI (match_dup 1)))
11521 ])]
11522 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11523 "*
11524 {
11525 arm_output_multireg_pop (operands, /*return_pc=*/false,
11526 /*cond=*/const_true_rtx,
11527 /*reverse=*/false,
11528 /*update=*/true);
11529 return \"\";
11530 }
11531 "
11532 [(set_attr "type" "load_16")
11533 (set_attr "predicable" "yes")
11534 (set (attr "length")
11535 (symbol_ref "arm_attr_length_pop_multi (operands,
11536 /*return_pc=*/false,
11537 /*write_back_p=*/true)"))]
11538 )
11539
11540 ;; Pop with return (as used in epilogue RTL)
11541 ;;
11542 ;; This instruction is generated when the registers are popped at the end of
11543 ;; epilogue. Here, instead of popping the value into LR and then generating
11544 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11545 ;; with (return).
11546 (define_insn "*pop_multiple_with_writeback_and_return"
11547 [(match_parallel 0 "pop_multiple_return"
11548 [(return)
11549 (set (match_operand:SI 1 "s_register_operand" "+rk")
11550 (plus:SI (match_dup 1)
11551 (match_operand:SI 2 "const_int_I_operand" "I")))
11552 (set (match_operand:SI 3 "s_register_operand" "=rk")
11553 (mem:SI (match_dup 1)))
11554 ])]
11555 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11556 "*
11557 {
11558 arm_output_multireg_pop (operands, /*return_pc=*/true,
11559 /*cond=*/const_true_rtx,
11560 /*reverse=*/false,
11561 /*update=*/true);
11562 return \"\";
11563 }
11564 "
11565 [(set_attr "type" "load_16")
11566 (set_attr "predicable" "yes")
11567 (set (attr "length")
11568 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11569 /*write_back_p=*/true)"))]
11570 )
11571
11572 (define_insn "*pop_multiple_with_return"
11573 [(match_parallel 0 "pop_multiple_return"
11574 [(return)
11575 (set (match_operand:SI 2 "s_register_operand" "=rk")
11576 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11577 ])]
11578 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11579 "*
11580 {
11581 arm_output_multireg_pop (operands, /*return_pc=*/true,
11582 /*cond=*/const_true_rtx,
11583 /*reverse=*/false,
11584 /*update=*/false);
11585 return \"\";
11586 }
11587 "
11588 [(set_attr "type" "load_16")
11589 (set_attr "predicable" "yes")
11590 (set (attr "length")
11591 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11592 /*write_back_p=*/false)"))]
11593 )
11594
11595 ;; Load into PC and return
11596 (define_insn "*ldr_with_return"
11597 [(return)
11598 (set (reg:SI PC_REGNUM)
11599 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11600 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11601 "ldr%?\t%|pc, [%0], #4"
11602 [(set_attr "type" "load_4")
11603 (set_attr "predicable" "yes")]
11604 )
11605 ;; Pop for floating point registers (as used in epilogue RTL)
11606 (define_insn "*vfp_pop_multiple_with_writeback"
11607 [(match_parallel 0 "pop_multiple_fp"
11608 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11609 (plus:SI (match_dup 1)
11610 (match_operand:SI 2 "const_int_I_operand" "I")))
11611 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11612 (mem:DF (match_dup 1)))])]
11613 "TARGET_32BIT && TARGET_HARD_FLOAT"
11614 "*
11615 {
11616 int num_regs = XVECLEN (operands[0], 0);
11617 char pattern[100];
11618 rtx op_list[2];
11619 strcpy (pattern, \"vldm\\t\");
11620 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11621 strcat (pattern, \"!, {\");
11622 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11623 strcat (pattern, \"%P0\");
11624 if ((num_regs - 1) > 1)
11625 {
11626 strcat (pattern, \"-%P1\");
11627 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11628 }
11629
11630 strcat (pattern, \"}\");
11631 output_asm_insn (pattern, op_list);
11632 return \"\";
11633 }
11634 "
11635 [(set_attr "type" "load_16")
11636 (set_attr "conds" "unconditional")
11637 (set_attr "predicable" "no")]
11638 )
11639
11640 ;; Special patterns for dealing with the constant pool
11641
11642 (define_insn "align_4"
11643 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11644 "TARGET_EITHER"
11645 "*
11646 assemble_align (32);
11647 return \"\";
11648 "
11649 [(set_attr "type" "no_insn")]
11650 )
11651
11652 (define_insn "align_8"
11653 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11654 "TARGET_EITHER"
11655 "*
11656 assemble_align (64);
11657 return \"\";
11658 "
11659 [(set_attr "type" "no_insn")]
11660 )
11661
11662 (define_insn "consttable_end"
11663 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11664 "TARGET_EITHER"
11665 "*
11666 making_const_table = FALSE;
11667 return \"\";
11668 "
11669 [(set_attr "type" "no_insn")]
11670 )
11671
11672 (define_insn "consttable_1"
11673 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11674 "TARGET_EITHER"
11675 "*
11676 making_const_table = TRUE;
11677 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11678 assemble_zeros (3);
11679 return \"\";
11680 "
11681 [(set_attr "length" "4")
11682 (set_attr "type" "no_insn")]
11683 )
11684
11685 (define_insn "consttable_2"
11686 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11687 "TARGET_EITHER"
11688 "*
11689 {
11690 rtx x = operands[0];
11691 making_const_table = TRUE;
11692 switch (GET_MODE_CLASS (GET_MODE (x)))
11693 {
11694 case MODE_FLOAT:
11695 arm_emit_fp16_const (x);
11696 break;
11697 default:
11698 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11699 assemble_zeros (2);
11700 break;
11701 }
11702 return \"\";
11703 }"
11704 [(set_attr "length" "4")
11705 (set_attr "type" "no_insn")]
11706 )
11707
11708 (define_insn "consttable_4"
11709 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11710 "TARGET_EITHER"
11711 "*
11712 {
11713 rtx x = operands[0];
11714 making_const_table = TRUE;
11715 scalar_float_mode float_mode;
11716 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11717 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11718 else
11719 {
11720 /* XXX: Sometimes gcc does something really dumb and ends up with
11721 a HIGH in a constant pool entry, usually because it's trying to
11722 load into a VFP register. We know this will always be used in
11723 combination with a LO_SUM which ignores the high bits, so just
11724 strip off the HIGH. */
11725 if (GET_CODE (x) == HIGH)
11726 x = XEXP (x, 0);
11727 assemble_integer (x, 4, BITS_PER_WORD, 1);
11728 mark_symbol_refs_as_used (x);
11729 }
11730 return \"\";
11731 }"
11732 [(set_attr "length" "4")
11733 (set_attr "type" "no_insn")]
11734 )
11735
11736 (define_insn "consttable_8"
11737 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11738 "TARGET_EITHER"
11739 "*
11740 {
11741 making_const_table = TRUE;
11742 scalar_float_mode float_mode;
11743 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11744 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11745 float_mode, BITS_PER_WORD);
11746 else
11747 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11748 return \"\";
11749 }"
11750 [(set_attr "length" "8")
11751 (set_attr "type" "no_insn")]
11752 )
11753
11754 (define_insn "consttable_16"
11755 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11756 "TARGET_EITHER"
11757 "*
11758 {
11759 making_const_table = TRUE;
11760 scalar_float_mode float_mode;
11761 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11762 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11763 float_mode, BITS_PER_WORD);
11764 else
11765 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11766 return \"\";
11767 }"
11768 [(set_attr "length" "16")
11769 (set_attr "type" "no_insn")]
11770 )
11771
11772 ;; V5 Instructions,
11773
11774 (define_insn "clzsi2"
11775 [(set (match_operand:SI 0 "s_register_operand" "=r")
11776 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11777 "TARGET_32BIT && arm_arch5t"
11778 "clz%?\\t%0, %1"
11779 [(set_attr "predicable" "yes")
11780 (set_attr "type" "clz")])
11781
11782 (define_insn "rbitsi2"
11783 [(set (match_operand:SI 0 "s_register_operand" "=r")
11784 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11785 "TARGET_32BIT && arm_arch_thumb2"
11786 "rbit%?\\t%0, %1"
11787 [(set_attr "predicable" "yes")
11788 (set_attr "type" "clz")])
11789
11790 ;; Keep this as a CTZ expression until after reload and then split
11791 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
11792 ;; to fold with any other expression.
11793
11794 (define_insn_and_split "ctzsi2"
11795 [(set (match_operand:SI 0 "s_register_operand" "=r")
11796 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11797 "TARGET_32BIT && arm_arch_thumb2"
11798 "#"
11799 "&& reload_completed"
11800 [(const_int 0)]
11801 "
11802 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
11803 emit_insn (gen_clzsi2 (operands[0], operands[0]));
11804 DONE;
11805 ")
11806
11807 ;; V5E instructions.
11808
11809 (define_insn "prefetch"
11810 [(prefetch (match_operand:SI 0 "address_operand" "p")
11811 (match_operand:SI 1 "" "")
11812 (match_operand:SI 2 "" ""))]
11813 "TARGET_32BIT && arm_arch5te"
11814 "pld\\t%a0"
11815 [(set_attr "type" "load_4")]
11816 )
11817
11818 ;; General predication pattern
11819
11820 (define_cond_exec
11821 [(match_operator 0 "arm_comparison_operator"
11822 [(match_operand 1 "cc_register" "")
11823 (const_int 0)])]
11824 "TARGET_32BIT
11825 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
11826 ""
11827 [(set_attr "predicated" "yes")]
11828 )
11829
11830 (define_insn "force_register_use"
11831 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11832 ""
11833 "%@ %0 needed"
11834 [(set_attr "length" "0")
11835 (set_attr "type" "no_insn")]
11836 )
11837
11838
11839 ;; Patterns for exception handling
11840
11841 (define_expand "eh_return"
11842 [(use (match_operand 0 "general_operand"))]
11843 "TARGET_EITHER"
11844 "
11845 {
11846 if (TARGET_32BIT)
11847 emit_insn (gen_arm_eh_return (operands[0]));
11848 else
11849 emit_insn (gen_thumb_eh_return (operands[0]));
11850 DONE;
11851 }"
11852 )
11853
11854 ;; We can't expand this before we know where the link register is stored.
11855 (define_insn_and_split "arm_eh_return"
11856 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11857 VUNSPEC_EH_RETURN)
11858 (clobber (match_scratch:SI 1 "=&r"))]
11859 "TARGET_ARM"
11860 "#"
11861 "&& reload_completed"
11862 [(const_int 0)]
11863 "
11864 {
11865 arm_set_return_address (operands[0], operands[1]);
11866 DONE;
11867 }"
11868 )
11869
11870 \f
11871 ;; TLS support
11872
11873 (define_insn "load_tp_hard"
11874 [(set (match_operand:SI 0 "register_operand" "=r")
11875 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11876 "TARGET_HARD_TP"
11877 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11878 [(set_attr "predicable" "yes")
11879 (set_attr "type" "mrs")]
11880 )
11881
11882 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11883 (define_insn "load_tp_soft_fdpic"
11884 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11885 (clobber (reg:SI FDPIC_REGNUM))
11886 (clobber (reg:SI LR_REGNUM))
11887 (clobber (reg:SI IP_REGNUM))
11888 (clobber (reg:CC CC_REGNUM))]
11889 "TARGET_SOFT_TP && TARGET_FDPIC"
11890 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11891 [(set_attr "conds" "clob")
11892 (set_attr "type" "branch")]
11893 )
11894
11895 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11896 (define_insn "load_tp_soft"
11897 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11898 (clobber (reg:SI LR_REGNUM))
11899 (clobber (reg:SI IP_REGNUM))
11900 (clobber (reg:CC CC_REGNUM))]
11901 "TARGET_SOFT_TP && !TARGET_FDPIC"
11902 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11903 [(set_attr "conds" "clob")
11904 (set_attr "type" "branch")]
11905 )
11906
11907 ;; tls descriptor call
11908 (define_insn "tlscall"
11909 [(set (reg:SI R0_REGNUM)
11910 (unspec:SI [(reg:SI R0_REGNUM)
11911 (match_operand:SI 0 "" "X")
11912 (match_operand 1 "" "")] UNSPEC_TLS))
11913 (clobber (reg:SI R1_REGNUM))
11914 (clobber (reg:SI LR_REGNUM))
11915 (clobber (reg:SI CC_REGNUM))]
11916 "TARGET_GNU2_TLS"
11917 {
11918 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11919 INTVAL (operands[1]));
11920 return "bl\\t%c0(tlscall)";
11921 }
11922 [(set_attr "conds" "clob")
11923 (set_attr "length" "4")
11924 (set_attr "type" "branch")]
11925 )
11926
11927 ;; For thread pointer builtin
11928 (define_expand "get_thread_pointersi"
11929 [(match_operand:SI 0 "s_register_operand")]
11930 ""
11931 "
11932 {
11933 arm_load_tp (operands[0]);
11934 DONE;
11935 }")
11936
11937 ;;
11938
11939 ;; We only care about the lower 16 bits of the constant
11940 ;; being inserted into the upper 16 bits of the register.
11941 (define_insn "*arm_movtas_ze"
11942 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
11943 (const_int 16)
11944 (const_int 16))
11945 (match_operand:SI 1 "const_int_operand" ""))]
11946 "TARGET_HAVE_MOVT"
11947 "@
11948 movt%?\t%0, %L1
11949 movt\t%0, %L1"
11950 [(set_attr "arch" "32,v8mb")
11951 (set_attr "predicable" "yes")
11952 (set_attr "length" "4")
11953 (set_attr "type" "alu_sreg")]
11954 )
11955
11956 (define_insn "*arm_rev"
11957 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11958 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11959 "arm_arch6"
11960 "@
11961 rev\t%0, %1
11962 rev%?\t%0, %1
11963 rev%?\t%0, %1"
11964 [(set_attr "arch" "t1,t2,32")
11965 (set_attr "length" "2,2,4")
11966 (set_attr "predicable" "no,yes,yes")
11967 (set_attr "type" "rev")]
11968 )
11969
11970 (define_expand "arm_legacy_rev"
11971 [(set (match_operand:SI 2 "s_register_operand")
11972 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
11973 (const_int 16))
11974 (match_dup 1)))
11975 (set (match_dup 2)
11976 (lshiftrt:SI (match_dup 2)
11977 (const_int 8)))
11978 (set (match_operand:SI 3 "s_register_operand")
11979 (rotatert:SI (match_dup 1)
11980 (const_int 8)))
11981 (set (match_dup 2)
11982 (and:SI (match_dup 2)
11983 (const_int -65281)))
11984 (set (match_operand:SI 0 "s_register_operand")
11985 (xor:SI (match_dup 3)
11986 (match_dup 2)))]
11987 "TARGET_32BIT"
11988 ""
11989 )
11990
11991 ;; Reuse temporaries to keep register pressure down.
11992 (define_expand "thumb_legacy_rev"
11993 [(set (match_operand:SI 2 "s_register_operand")
11994 (ashift:SI (match_operand:SI 1 "s_register_operand")
11995 (const_int 24)))
11996 (set (match_operand:SI 3 "s_register_operand")
11997 (lshiftrt:SI (match_dup 1)
11998 (const_int 24)))
11999 (set (match_dup 3)
12000 (ior:SI (match_dup 3)
12001 (match_dup 2)))
12002 (set (match_operand:SI 4 "s_register_operand")
12003 (const_int 16))
12004 (set (match_operand:SI 5 "s_register_operand")
12005 (rotatert:SI (match_dup 1)
12006 (match_dup 4)))
12007 (set (match_dup 2)
12008 (ashift:SI (match_dup 5)
12009 (const_int 24)))
12010 (set (match_dup 5)
12011 (lshiftrt:SI (match_dup 5)
12012 (const_int 24)))
12013 (set (match_dup 5)
12014 (ior:SI (match_dup 5)
12015 (match_dup 2)))
12016 (set (match_dup 5)
12017 (rotatert:SI (match_dup 5)
12018 (match_dup 4)))
12019 (set (match_operand:SI 0 "s_register_operand")
12020 (ior:SI (match_dup 5)
12021 (match_dup 3)))]
12022 "TARGET_THUMB"
12023 ""
12024 )
12025
12026 ;; ARM-specific expansion of signed mod by power of 2
12027 ;; using conditional negate.
12028 ;; For r0 % n where n is a power of 2 produce:
12029 ;; rsbs r1, r0, #0
12030 ;; and r0, r0, #(n - 1)
12031 ;; and r1, r1, #(n - 1)
12032 ;; rsbpl r0, r1, #0
12033
12034 (define_expand "modsi3"
12035 [(match_operand:SI 0 "register_operand")
12036 (match_operand:SI 1 "register_operand")
12037 (match_operand:SI 2 "const_int_operand")]
12038 "TARGET_32BIT"
12039 {
12040 HOST_WIDE_INT val = INTVAL (operands[2]);
12041
12042 if (val <= 0
12043 || exact_log2 (val) <= 0)
12044 FAIL;
12045
12046 rtx mask = GEN_INT (val - 1);
12047
12048 /* In the special case of x0 % 2 we can do the even shorter:
12049 cmp r0, #0
12050 and r0, r0, #1
12051 rsblt r0, r0, #0. */
12052
12053 if (val == 2)
12054 {
12055 rtx cc_reg = arm_gen_compare_reg (LT,
12056 operands[1], const0_rtx, NULL_RTX);
12057 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
12058 rtx masked = gen_reg_rtx (SImode);
12059
12060 emit_insn (gen_andsi3 (masked, operands[1], mask));
12061 emit_move_insn (operands[0],
12062 gen_rtx_IF_THEN_ELSE (SImode, cond,
12063 gen_rtx_NEG (SImode,
12064 masked),
12065 masked));
12066 DONE;
12067 }
12068
12069 rtx neg_op = gen_reg_rtx (SImode);
12070 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
12071 operands[1]));
12072
12073 /* Extract the condition register and mode. */
12074 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
12075 rtx cc_reg = SET_DEST (cmp);
12076 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
12077
12078 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
12079
12080 rtx masked_neg = gen_reg_rtx (SImode);
12081 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
12082
12083 /* We want a conditional negate here, but emitting COND_EXEC rtxes
12084 during expand does not always work. Do an IF_THEN_ELSE instead. */
12085 emit_move_insn (operands[0],
12086 gen_rtx_IF_THEN_ELSE (SImode, cond,
12087 gen_rtx_NEG (SImode, masked_neg),
12088 operands[0]));
12089
12090
12091 DONE;
12092 }
12093 )
12094
12095 (define_expand "bswapsi2"
12096 [(set (match_operand:SI 0 "s_register_operand")
12097 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
12098 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12099 "
12100 if (!arm_arch6)
12101 {
12102 rtx op2 = gen_reg_rtx (SImode);
12103 rtx op3 = gen_reg_rtx (SImode);
12104
12105 if (TARGET_THUMB)
12106 {
12107 rtx op4 = gen_reg_rtx (SImode);
12108 rtx op5 = gen_reg_rtx (SImode);
12109
12110 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12111 op2, op3, op4, op5));
12112 }
12113 else
12114 {
12115 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12116 op2, op3));
12117 }
12118
12119 DONE;
12120 }
12121 "
12122 )
12123
12124 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12125 ;; and unsigned variants, respectively. For rev16, expose
12126 ;; byte-swapping in the lower 16 bits only.
12127 (define_insn "*arm_revsh"
12128 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12129 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12130 "arm_arch6"
12131 "@
12132 revsh\t%0, %1
12133 revsh%?\t%0, %1
12134 revsh%?\t%0, %1"
12135 [(set_attr "arch" "t1,t2,32")
12136 (set_attr "length" "2,2,4")
12137 (set_attr "type" "rev")]
12138 )
12139
12140 (define_insn "*arm_rev16"
12141 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12142 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12143 "arm_arch6"
12144 "@
12145 rev16\t%0, %1
12146 rev16%?\t%0, %1
12147 rev16%?\t%0, %1"
12148 [(set_attr "arch" "t1,t2,32")
12149 (set_attr "length" "2,2,4")
12150 (set_attr "type" "rev")]
12151 )
12152
12153 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
12154 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
12155 ;; each valid permutation.
12156
12157 (define_insn "arm_rev16si2"
12158 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12159 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
12160 (const_int 8))
12161 (match_operand:SI 3 "const_int_operand" "n,n,n"))
12162 (and:SI (lshiftrt:SI (match_dup 1)
12163 (const_int 8))
12164 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
12165 "arm_arch6
12166 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12167 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12168 "rev16\\t%0, %1"
12169 [(set_attr "arch" "t1,t2,32")
12170 (set_attr "length" "2,2,4")
12171 (set_attr "type" "rev")]
12172 )
12173
12174 (define_insn "arm_rev16si2_alt"
12175 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12176 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
12177 (const_int 8))
12178 (match_operand:SI 2 "const_int_operand" "n,n,n"))
12179 (and:SI (ashift:SI (match_dup 1)
12180 (const_int 8))
12181 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
12182 "arm_arch6
12183 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12184 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12185 "rev16\\t%0, %1"
12186 [(set_attr "arch" "t1,t2,32")
12187 (set_attr "length" "2,2,4")
12188 (set_attr "type" "rev")]
12189 )
12190
12191 (define_expand "bswaphi2"
12192 [(set (match_operand:HI 0 "s_register_operand")
12193 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
12194 "arm_arch6"
12195 ""
12196 )
12197
12198 ;; Patterns for LDRD/STRD in Thumb2 mode
12199
12200 (define_insn "*thumb2_ldrd"
12201 [(set (match_operand:SI 0 "s_register_operand" "=r")
12202 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12203 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12204 (set (match_operand:SI 3 "s_register_operand" "=r")
12205 (mem:SI (plus:SI (match_dup 1)
12206 (match_operand:SI 4 "const_int_operand" ""))))]
12207 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12208 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12209 && (operands_ok_ldrd_strd (operands[0], operands[3],
12210 operands[1], INTVAL (operands[2]),
12211 false, true))"
12212 "ldrd%?\t%0, %3, [%1, %2]"
12213 [(set_attr "type" "load_8")
12214 (set_attr "predicable" "yes")])
12215
12216 (define_insn "*thumb2_ldrd_base"
12217 [(set (match_operand:SI 0 "s_register_operand" "=r")
12218 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12219 (set (match_operand:SI 2 "s_register_operand" "=r")
12220 (mem:SI (plus:SI (match_dup 1)
12221 (const_int 4))))]
12222 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12223 && (operands_ok_ldrd_strd (operands[0], operands[2],
12224 operands[1], 0, false, true))"
12225 "ldrd%?\t%0, %2, [%1]"
12226 [(set_attr "type" "load_8")
12227 (set_attr "predicable" "yes")])
12228
12229 (define_insn "*thumb2_ldrd_base_neg"
12230 [(set (match_operand:SI 0 "s_register_operand" "=r")
12231 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12232 (const_int -4))))
12233 (set (match_operand:SI 2 "s_register_operand" "=r")
12234 (mem:SI (match_dup 1)))]
12235 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12236 && (operands_ok_ldrd_strd (operands[0], operands[2],
12237 operands[1], -4, false, true))"
12238 "ldrd%?\t%0, %2, [%1, #-4]"
12239 [(set_attr "type" "load_8")
12240 (set_attr "predicable" "yes")])
12241
12242 (define_insn "*thumb2_strd"
12243 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12244 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12245 (match_operand:SI 2 "s_register_operand" "r"))
12246 (set (mem:SI (plus:SI (match_dup 0)
12247 (match_operand:SI 3 "const_int_operand" "")))
12248 (match_operand:SI 4 "s_register_operand" "r"))]
12249 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12250 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12251 && (operands_ok_ldrd_strd (operands[2], operands[4],
12252 operands[0], INTVAL (operands[1]),
12253 false, false))"
12254 "strd%?\t%2, %4, [%0, %1]"
12255 [(set_attr "type" "store_8")
12256 (set_attr "predicable" "yes")])
12257
12258 (define_insn "*thumb2_strd_base"
12259 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12260 (match_operand:SI 1 "s_register_operand" "r"))
12261 (set (mem:SI (plus:SI (match_dup 0)
12262 (const_int 4)))
12263 (match_operand:SI 2 "s_register_operand" "r"))]
12264 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12265 && (operands_ok_ldrd_strd (operands[1], operands[2],
12266 operands[0], 0, false, false))"
12267 "strd%?\t%1, %2, [%0]"
12268 [(set_attr "type" "store_8")
12269 (set_attr "predicable" "yes")])
12270
12271 (define_insn "*thumb2_strd_base_neg"
12272 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12273 (const_int -4)))
12274 (match_operand:SI 1 "s_register_operand" "r"))
12275 (set (mem:SI (match_dup 0))
12276 (match_operand:SI 2 "s_register_operand" "r"))]
12277 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12278 && (operands_ok_ldrd_strd (operands[1], operands[2],
12279 operands[0], -4, false, false))"
12280 "strd%?\t%1, %2, [%0, #-4]"
12281 [(set_attr "type" "store_8")
12282 (set_attr "predicable" "yes")])
12283
12284 ;; ARMv8 CRC32 instructions.
12285 (define_insn "arm_<crc_variant>"
12286 [(set (match_operand:SI 0 "s_register_operand" "=r")
12287 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
12288 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
12289 CRC))]
12290 "TARGET_CRC32"
12291 "<crc_variant>\\t%0, %1, %2"
12292 [(set_attr "type" "crc")
12293 (set_attr "conds" "unconditional")]
12294 )
12295
12296 ;; Load the load/store double peephole optimizations.
12297 (include "ldrdstrd.md")
12298
12299 ;; Load the load/store multiple patterns
12300 (include "ldmstm.md")
12301
12302 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12303 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12304 ;; The operands are validated through the load_multiple_operation
12305 ;; match_parallel predicate rather than through constraints so enable it only
12306 ;; after reload.
12307 (define_insn "*load_multiple"
12308 [(match_parallel 0 "load_multiple_operation"
12309 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12310 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12311 ])]
12312 "TARGET_32BIT && reload_completed"
12313 "*
12314 {
12315 arm_output_multireg_pop (operands, /*return_pc=*/false,
12316 /*cond=*/const_true_rtx,
12317 /*reverse=*/false,
12318 /*update=*/false);
12319 return \"\";
12320 }
12321 "
12322 [(set_attr "predicable" "yes")]
12323 )
12324
12325 (define_expand "copysignsf3"
12326 [(match_operand:SF 0 "register_operand")
12327 (match_operand:SF 1 "register_operand")
12328 (match_operand:SF 2 "register_operand")]
12329 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12330 "{
12331 emit_move_insn (operands[0], operands[2]);
12332 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
12333 GEN_INT (31), GEN_INT (0),
12334 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
12335 DONE;
12336 }"
12337 )
12338
12339 (define_expand "copysigndf3"
12340 [(match_operand:DF 0 "register_operand")
12341 (match_operand:DF 1 "register_operand")
12342 (match_operand:DF 2 "register_operand")]
12343 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12344 "{
12345 rtx op0_low = gen_lowpart (SImode, operands[0]);
12346 rtx op0_high = gen_highpart (SImode, operands[0]);
12347 rtx op1_low = gen_lowpart (SImode, operands[1]);
12348 rtx op1_high = gen_highpart (SImode, operands[1]);
12349 rtx op2_high = gen_highpart (SImode, operands[2]);
12350
12351 rtx scratch1 = gen_reg_rtx (SImode);
12352 rtx scratch2 = gen_reg_rtx (SImode);
12353 emit_move_insn (scratch1, op2_high);
12354 emit_move_insn (scratch2, op1_high);
12355
12356 emit_insn(gen_rtx_SET(scratch1,
12357 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
12358 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
12359 emit_move_insn (op0_low, op1_low);
12360 emit_move_insn (op0_high, scratch2);
12361
12362 DONE;
12363 }"
12364 )
12365
12366 ;; movmisalign patterns for HImode and SImode.
12367 (define_expand "movmisalign<mode>"
12368 [(match_operand:HSI 0 "general_operand")
12369 (match_operand:HSI 1 "general_operand")]
12370 "unaligned_access"
12371 {
12372 /* This pattern is not permitted to fail during expansion: if both arguments
12373 are non-registers (e.g. memory := constant), force operand 1 into a
12374 register. */
12375 rtx (* gen_unaligned_load)(rtx, rtx);
12376 rtx tmp_dest = operands[0];
12377 if (!s_register_operand (operands[0], <MODE>mode)
12378 && !s_register_operand (operands[1], <MODE>mode))
12379 operands[1] = force_reg (<MODE>mode, operands[1]);
12380
12381 if (<MODE>mode == HImode)
12382 {
12383 gen_unaligned_load = gen_unaligned_loadhiu;
12384 tmp_dest = gen_reg_rtx (SImode);
12385 }
12386 else
12387 gen_unaligned_load = gen_unaligned_loadsi;
12388
12389 if (MEM_P (operands[1]))
12390 {
12391 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12392 if (<MODE>mode == HImode)
12393 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12394 }
12395 else
12396 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12397
12398 DONE;
12399 })
12400
12401 (define_insn "arm_<cdp>"
12402 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12403 (match_operand:SI 1 "immediate_operand" "n")
12404 (match_operand:SI 2 "immediate_operand" "n")
12405 (match_operand:SI 3 "immediate_operand" "n")
12406 (match_operand:SI 4 "immediate_operand" "n")
12407 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12408 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12409 {
12410 arm_const_bounds (operands[0], 0, 16);
12411 arm_const_bounds (operands[1], 0, 16);
12412 arm_const_bounds (operands[2], 0, (1 << 5));
12413 arm_const_bounds (operands[3], 0, (1 << 5));
12414 arm_const_bounds (operands[4], 0, (1 << 5));
12415 arm_const_bounds (operands[5], 0, 8);
12416 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12417 }
12418 [(set_attr "length" "4")
12419 (set_attr "type" "coproc")])
12420
12421 (define_insn "*ldc"
12422 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12423 (match_operand:SI 1 "immediate_operand" "n")
12424 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12425 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12426 {
12427 arm_const_bounds (operands[0], 0, 16);
12428 arm_const_bounds (operands[1], 0, (1 << 5));
12429 return "<ldc>\\tp%c0, CR%c1, %2";
12430 }
12431 [(set_attr "length" "4")
12432 (set_attr "type" "coproc")])
12433
12434 (define_insn "*stc"
12435 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12436 (match_operand:SI 1 "immediate_operand" "n")
12437 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12438 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12439 {
12440 arm_const_bounds (operands[0], 0, 16);
12441 arm_const_bounds (operands[1], 0, (1 << 5));
12442 return "<stc>\\tp%c0, CR%c1, %2";
12443 }
12444 [(set_attr "length" "4")
12445 (set_attr "type" "coproc")])
12446
12447 (define_expand "arm_<ldc>"
12448 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12449 (match_operand:SI 1 "immediate_operand")
12450 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12451 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12452
12453 (define_expand "arm_<stc>"
12454 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12455 (match_operand:SI 1 "immediate_operand")
12456 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12457 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12458
12459 (define_insn "arm_<mcr>"
12460 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12461 (match_operand:SI 1 "immediate_operand" "n")
12462 (match_operand:SI 2 "s_register_operand" "r")
12463 (match_operand:SI 3 "immediate_operand" "n")
12464 (match_operand:SI 4 "immediate_operand" "n")
12465 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12466 (use (match_dup 2))]
12467 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12468 {
12469 arm_const_bounds (operands[0], 0, 16);
12470 arm_const_bounds (operands[1], 0, 8);
12471 arm_const_bounds (operands[3], 0, (1 << 5));
12472 arm_const_bounds (operands[4], 0, (1 << 5));
12473 arm_const_bounds (operands[5], 0, 8);
12474 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12475 }
12476 [(set_attr "length" "4")
12477 (set_attr "type" "coproc")])
12478
12479 (define_insn "arm_<mrc>"
12480 [(set (match_operand:SI 0 "s_register_operand" "=r")
12481 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12482 (match_operand:SI 2 "immediate_operand" "n")
12483 (match_operand:SI 3 "immediate_operand" "n")
12484 (match_operand:SI 4 "immediate_operand" "n")
12485 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12486 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12487 {
12488 arm_const_bounds (operands[1], 0, 16);
12489 arm_const_bounds (operands[2], 0, 8);
12490 arm_const_bounds (operands[3], 0, (1 << 5));
12491 arm_const_bounds (operands[4], 0, (1 << 5));
12492 arm_const_bounds (operands[5], 0, 8);
12493 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12494 }
12495 [(set_attr "length" "4")
12496 (set_attr "type" "coproc")])
12497
12498 (define_insn "arm_<mcrr>"
12499 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12500 (match_operand:SI 1 "immediate_operand" "n")
12501 (match_operand:DI 2 "s_register_operand" "r")
12502 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12503 (use (match_dup 2))]
12504 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12505 {
12506 arm_const_bounds (operands[0], 0, 16);
12507 arm_const_bounds (operands[1], 0, 8);
12508 arm_const_bounds (operands[3], 0, (1 << 5));
12509 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12510 }
12511 [(set_attr "length" "4")
12512 (set_attr "type" "coproc")])
12513
12514 (define_insn "arm_<mrrc>"
12515 [(set (match_operand:DI 0 "s_register_operand" "=r")
12516 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12517 (match_operand:SI 2 "immediate_operand" "n")
12518 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12519 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12520 {
12521 arm_const_bounds (operands[1], 0, 16);
12522 arm_const_bounds (operands[2], 0, 8);
12523 arm_const_bounds (operands[3], 0, (1 << 5));
12524 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12525 }
12526 [(set_attr "length" "4")
12527 (set_attr "type" "coproc")])
12528
12529 (define_expand "speculation_barrier"
12530 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12531 "TARGET_EITHER"
12532 "
12533 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12534 have a usable barrier (and probably don't need one in practice).
12535 But to be safe if such code is run on later architectures, call a
12536 helper function in libgcc that will do the thing for the active
12537 system. */
12538 if (!(arm_arch7 || arm_arch8))
12539 {
12540 arm_emit_speculation_barrier_function ();
12541 DONE;
12542 }
12543 "
12544 )
12545
12546 ;; Generate a hard speculation barrier when we have not enabled speculation
12547 ;; tracking.
12548 (define_insn "*speculation_barrier_insn"
12549 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12550 "arm_arch7 || arm_arch8"
12551 "isb\;dsb\\tsy"
12552 [(set_attr "type" "block")
12553 (set_attr "length" "8")]
12554 )
12555
12556 ;; Vector bits common to IWMMXT and Neon
12557 (include "vec-common.md")
12558 ;; Load the Intel Wireless Multimedia Extension patterns
12559 (include "iwmmxt.md")
12560 ;; Load the VFP co-processor patterns
12561 (include "vfp.md")
12562 ;; Thumb-1 patterns
12563 (include "thumb1.md")
12564 ;; Thumb-2 patterns
12565 (include "thumb2.md")
12566 ;; Neon patterns
12567 (include "neon.md")
12568 ;; Crypto patterns
12569 (include "crypto.md")
12570 ;; Synchronization Primitives
12571 (include "sync.md")
12572 ;; Fixed-point patterns
12573 (include "arm-fixed.md")