]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
Update copyright years.
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2023 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 (APSRQ_REGNUM 104) ; Q bit pseudo register
43 (APSRGE_REGNUM 105) ; GE bits pseudo register
44 (VPR_REGNUM 106) ; Vector Predication Register - MVE register.
45 ]
46 )
47 ;; 3rd operand to select_dominance_cc_mode
48 (define_constants
49 [(DOM_CC_X_AND_Y 0)
50 (DOM_CC_NX_OR_Y 1)
51 (DOM_CC_X_OR_Y 2)
52 ]
53 )
54 ;; conditional compare combination
55 (define_constants
56 [(CMP_CMP 0)
57 (CMN_CMP 1)
58 (CMP_CMN 2)
59 (CMN_CMN 3)
60 (NUM_OF_COND_CMP 4)
61 ]
62 )
63
64 \f
65 ;;---------------------------------------------------------------------------
66 ;; Attributes
67
68 ;; Processor type. This is created automatically from arm-cores.def.
69 (include "arm-tune.md")
70
71 ;; Instruction classification types
72 (include "types.md")
73
74 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
75 ; generating ARM code. This is used to control the length of some insn
76 ; patterns that share the same RTL in both ARM and Thumb code.
77 (define_attr "is_thumb" "yes,no"
78 (const (if_then_else (symbol_ref "TARGET_THUMB")
79 (const_string "yes") (const_string "no"))))
80
81 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
82 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
83
84 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
85 (define_attr "is_thumb1" "yes,no"
86 (const (if_then_else (symbol_ref "TARGET_THUMB1")
87 (const_string "yes") (const_string "no"))))
88
89 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
90 ; The arm_restrict_it flag enables the "short IT" feature which
91 ; restricts IT blocks to a single 16-bit instruction.
92 ; This attribute should only be used on 16-bit Thumb-2 instructions
93 ; which may be predicated (the "predicable" attribute must be set).
94 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
95
96 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
97 ; This attribute should only be used on instructions which may emit
98 ; an IT block in their expansion which is not a short IT.
99 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
100
101 ; Mark an instruction sequence as the required way of loading a
102 ; constant when -mpure-code is enabled (which implies
103 ; arm_disable_literal_pool)
104 (define_attr "required_for_purecode" "no,yes" (const_string "no"))
105
106 ;; Operand number of an input operand that is shifted. Zero if the
107 ;; given instruction does not shift one of its input operands.
108 (define_attr "shift" "" (const_int 0))
109
110 ;; [For compatibility with AArch64 in pipeline models]
111 ;; Attribute that specifies whether or not the instruction touches fp
112 ;; registers.
113 (define_attr "fp" "no,yes" (const_string "no"))
114
115 ; Floating Point Unit. If we only have floating point emulation, then there
116 ; is no point in scheduling the floating point insns. (Well, for best
117 ; performance we should try and group them together).
118 (define_attr "fpu" "none,vfp"
119 (const (symbol_ref "arm_fpu_attr")))
120
121 ; Predicated means that the insn form is conditionally executed based on a
122 ; predicate. We default to 'no' because no Thumb patterns match this rule
123 ; and not all ARM insns do.
124 (define_attr "predicated" "yes,no" (const_string "no"))
125
126 ; LENGTH of an instruction (in bytes)
127 (define_attr "length" ""
128 (const_int 4))
129
130 ; The architecture which supports the instruction (or alternative).
131 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
132 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
133 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
134 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
135 ; Baseline. "fix_vlldm" is for fixing the v8-m/v8.1-m VLLDM erratum.
136 ; This attribute is used to compute attribute "enabled",
137 ; use type "any" to enable an alternative in all cases.
138 (define_attr "arch" "any, a, t, 32, t1, t2, v6,nov6, v6t2, \
139 v8mb, fix_vlldm, iwmmxt, iwmmxt2, armv6_or_vfpv3, \
140 neon, mve"
141 (const_string "any"))
142
143 (define_attr "arch_enabled" "no,yes"
144 (cond [(eq_attr "arch" "any")
145 (const_string "yes")
146
147 (and (eq_attr "arch" "a")
148 (match_test "TARGET_ARM"))
149 (const_string "yes")
150
151 (and (eq_attr "arch" "t")
152 (match_test "TARGET_THUMB"))
153 (const_string "yes")
154
155 (and (eq_attr "arch" "t1")
156 (match_test "TARGET_THUMB1"))
157 (const_string "yes")
158
159 (and (eq_attr "arch" "t2")
160 (match_test "TARGET_THUMB2"))
161 (const_string "yes")
162
163 (and (eq_attr "arch" "32")
164 (match_test "TARGET_32BIT"))
165 (const_string "yes")
166
167 (and (eq_attr "arch" "v6")
168 (match_test "TARGET_32BIT && arm_arch6"))
169 (const_string "yes")
170
171 (and (eq_attr "arch" "nov6")
172 (match_test "TARGET_32BIT && !arm_arch6"))
173 (const_string "yes")
174
175 (and (eq_attr "arch" "v6t2")
176 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
177 (const_string "yes")
178
179 (and (eq_attr "arch" "v8mb")
180 (match_test "TARGET_THUMB1 && arm_arch8"))
181 (const_string "yes")
182
183 (and (eq_attr "arch" "fix_vlldm")
184 (match_test "fix_vlldm"))
185 (const_string "yes")
186
187 (and (eq_attr "arch" "iwmmxt2")
188 (match_test "TARGET_REALLY_IWMMXT2"))
189 (const_string "yes")
190
191 (and (eq_attr "arch" "armv6_or_vfpv3")
192 (match_test "arm_arch6 || TARGET_VFP3"))
193 (const_string "yes")
194
195 (and (eq_attr "arch" "neon")
196 (match_test "TARGET_NEON"))
197 (const_string "yes")
198
199 (and (eq_attr "arch" "mve")
200 (match_test "TARGET_HAVE_MVE"))
201 (const_string "yes")
202 ]
203
204 (const_string "no")))
205
206 (define_attr "opt" "any,speed,size"
207 (const_string "any"))
208
209 (define_attr "opt_enabled" "no,yes"
210 (cond [(eq_attr "opt" "any")
211 (const_string "yes")
212
213 (and (eq_attr "opt" "speed")
214 (match_test "optimize_function_for_speed_p (cfun)"))
215 (const_string "yes")
216
217 (and (eq_attr "opt" "size")
218 (match_test "optimize_function_for_size_p (cfun)"))
219 (const_string "yes")]
220 (const_string "no")))
221
222 (define_attr "use_literal_pool" "no,yes"
223 (cond [(and (eq_attr "type" "f_loads,f_loadd")
224 (match_test "CONSTANT_P (operands[1])"))
225 (const_string "yes")]
226 (const_string "no")))
227
228 ; Enable all alternatives that are both arch_enabled and insn_enabled.
229 ; FIXME:: opt_enabled has been temporarily removed till the time we have
230 ; an attribute that allows the use of such alternatives.
231 ; This depends on caching of speed_p, size_p on a per
232 ; alternative basis. The problem is that the enabled attribute
233 ; cannot depend on any state that is not cached or is not constant
234 ; for a compilation unit. We probably need a generic "hot/cold"
235 ; alternative which if implemented can help with this. We disable this
236 ; until such a time as this is implemented and / or the improvements or
237 ; regressions with removing this attribute are double checked.
238 ; See ashldi3_neon and <shift>di3_neon in neon.md.
239
240 (define_attr "enabled" "no,yes"
241 (cond [(and (eq_attr "predicable_short_it" "no")
242 (and (eq_attr "predicated" "yes")
243 (match_test "arm_restrict_it")))
244 (const_string "no")
245
246 (and (eq_attr "enabled_for_short_it" "no")
247 (match_test "arm_restrict_it"))
248 (const_string "no")
249
250 (and (eq_attr "required_for_purecode" "yes")
251 (not (match_test "arm_disable_literal_pool")))
252 (const_string "no")
253
254 (eq_attr "arch_enabled" "no")
255 (const_string "no")]
256 (const_string "yes")))
257
258 ; POOL_RANGE is how far away from a constant pool entry that this insn
259 ; can be placed. If the distance is zero, then this insn will never
260 ; reference the pool.
261 ; Note that for Thumb constant pools the PC value is rounded down to the
262 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
263 ; Thumb insns) should be set to <max_range> - 2.
264 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
265 ; before its address. It is set to <max_range> - (8 + <data_size>).
266 (define_attr "arm_pool_range" "" (const_int 0))
267 (define_attr "thumb2_pool_range" "" (const_int 0))
268 (define_attr "arm_neg_pool_range" "" (const_int 0))
269 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
270
271 (define_attr "pool_range" ""
272 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
273 (attr "arm_pool_range")))
274 (define_attr "neg_pool_range" ""
275 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
276 (attr "arm_neg_pool_range")))
277
278 ; An assembler sequence may clobber the condition codes without us knowing.
279 ; If such an insn references the pool, then we have no way of knowing how,
280 ; so use the most conservative value for pool_range.
281 (define_asm_attributes
282 [(set_attr "conds" "clob")
283 (set_attr "length" "4")
284 (set_attr "pool_range" "250")])
285
286 ; Load scheduling, set from the arm_ld_sched variable
287 ; initialized by arm_option_override()
288 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
289
290 ; condition codes: this one is used by final_prescan_insn to speed up
291 ; conditionalizing instructions. It saves having to scan the rtl to see if
292 ; it uses or alters the condition codes.
293 ;
294 ; USE means that the condition codes are used by the insn in the process of
295 ; outputting code, this means (at present) that we can't use the insn in
296 ; inlined branches
297 ;
298 ; SET means that the purpose of the insn is to set the condition codes in a
299 ; well defined manner.
300 ;
301 ; CLOB means that the condition codes are altered in an undefined manner, if
302 ; they are altered at all
303 ;
304 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
305 ; that the instruction does not use or alter the condition codes.
306 ;
307 ; NOCOND means that the instruction does not use or alter the condition
308 ; codes but can be converted into a conditionally exectuted instruction.
309
310 (define_attr "conds" "use,set,clob,unconditional,nocond"
311 (if_then_else
312 (ior (eq_attr "is_thumb1" "yes")
313 (eq_attr "type" "call"))
314 (const_string "clob")
315 (if_then_else
316 (ior (eq_attr "is_neon_type" "yes")
317 (eq_attr "is_mve_type" "yes"))
318 (const_string "unconditional")
319 (const_string "nocond"))))
320
321 ; Predicable means that the insn can be conditionally executed based on
322 ; an automatically added predicate (additional patterns are generated by
323 ; gen...). We default to 'no' because no Thumb patterns match this rule
324 ; and not all ARM patterns do.
325 (define_attr "predicable" "no,yes" (const_string "no"))
326
327 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
328 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
329 ; suffer blockages enough to warrant modelling this (and it can adversely
330 ; affect the schedule).
331 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
332
333 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
334 ; to stall the processor. Used with model_wbuf above.
335 (define_attr "write_conflict" "no,yes"
336 (if_then_else (eq_attr "type"
337 "block,call,load_4")
338 (const_string "yes")
339 (const_string "no")))
340
341 ; Classify the insns into those that take one cycle and those that take more
342 ; than one on the main cpu execution unit.
343 (define_attr "core_cycles" "single,multi"
344 (if_then_else (eq_attr "type"
345 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
346 alu_shift_imm_lsl_1to4, alu_shift_imm_other, alu_shift_reg, alu_dsp_reg,\
347 alus_ext, alus_imm, alus_sreg,\
348 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
349 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
350 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
351 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
352 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
353 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
354 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
355 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
356 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
357 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
358 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
359 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
360 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
361 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
362 (const_string "single")
363 (const_string "multi")))
364
365 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
366 ;; distant label. Only applicable to Thumb code.
367 (define_attr "far_jump" "yes,no" (const_string "no"))
368
369
370 ;; The number of machine instructions this pattern expands to.
371 ;; Used for Thumb-2 conditional execution.
372 (define_attr "ce_count" "" (const_int 1))
373
374 ;;---------------------------------------------------------------------------
375 ;; Unspecs
376
377 (include "unspecs.md")
378
379 ;;---------------------------------------------------------------------------
380 ;; Mode iterators
381
382 (include "iterators.md")
383
384 ;;---------------------------------------------------------------------------
385 ;; Predicates
386
387 (include "predicates.md")
388 (include "constraints.md")
389
390 ;;---------------------------------------------------------------------------
391 ;; Pipeline descriptions
392
393 (define_attr "tune_cortexr4" "yes,no"
394 (const (if_then_else
395 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
396 (const_string "yes")
397 (const_string "no"))))
398
399 ;; True if the generic scheduling description should be used.
400
401 (define_attr "generic_sched" "yes,no"
402 (const (if_then_else
403 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
404 arm926ejs,arm10e,arm1026ejs,arm1136js,\
405 arm1136jfs,cortexa5,cortexa7,cortexa8,\
406 cortexa9,cortexa12,cortexa15,cortexa17,\
407 cortexa53,cortexa57,cortexm4,cortexm7,\
408 exynosm1,marvell_pj4,xgene1")
409 (eq_attr "tune_cortexr4" "yes"))
410 (const_string "no")
411 (const_string "yes"))))
412
413 (define_attr "generic_vfp" "yes,no"
414 (const (if_then_else
415 (and (eq_attr "fpu" "vfp")
416 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
417 cortexa8,cortexa9,cortexa53,cortexm4,\
418 cortexm7,marvell_pj4,xgene1")
419 (eq_attr "tune_cortexr4" "no"))
420 (const_string "yes")
421 (const_string "no"))))
422
423 (include "marvell-f-iwmmxt.md")
424 (include "arm-generic.md")
425 (include "arm926ejs.md")
426 (include "arm1020e.md")
427 (include "arm1026ejs.md")
428 (include "arm1136jfs.md")
429 (include "fa526.md")
430 (include "fa606te.md")
431 (include "fa626te.md")
432 (include "fmp626.md")
433 (include "fa726te.md")
434 (include "cortex-a5.md")
435 (include "cortex-a7.md")
436 (include "cortex-a8.md")
437 (include "cortex-a9.md")
438 (include "cortex-a15.md")
439 (include "cortex-a17.md")
440 (include "cortex-a53.md")
441 (include "cortex-a57.md")
442 (include "cortex-r4.md")
443 (include "cortex-r4f.md")
444 (include "cortex-m7.md")
445 (include "cortex-m4.md")
446 (include "cortex-m4-fpu.md")
447 (include "exynos-m1.md")
448 (include "vfp11.md")
449 (include "marvell-pj4.md")
450 (include "xgene1.md")
451
452 ;; define_subst and associated attributes
453
454 (define_subst "add_setq"
455 [(set (match_operand:SI 0 "" "")
456 (match_operand:SI 1 "" ""))]
457 ""
458 [(set (match_dup 0)
459 (match_dup 1))
460 (set (reg:CC APSRQ_REGNUM)
461 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
462
463 (define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
464 (define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
465 "ARM_Q_BIT_READ")
466 \f
467 ;;---------------------------------------------------------------------------
468 ;; Insn patterns
469 ;;
470 ;; Addition insns.
471
472 ;; Note: For DImode insns, there is normally no reason why operands should
473 ;; not be in the same register, what we don't want is for something being
474 ;; written to partially overlap something that is an input.
475
476 (define_expand "adddi3"
477 [(parallel
478 [(set (match_operand:DI 0 "s_register_operand")
479 (plus:DI (match_operand:DI 1 "s_register_operand")
480 (match_operand:DI 2 "reg_or_int_operand")))
481 (clobber (reg:CC CC_REGNUM))])]
482 "TARGET_EITHER"
483 "
484 if (TARGET_THUMB1)
485 {
486 if (!REG_P (operands[2]))
487 operands[2] = force_reg (DImode, operands[2]);
488 }
489 else
490 {
491 rtx lo_result, hi_result, lo_dest, hi_dest;
492 rtx lo_op1, hi_op1, lo_op2, hi_op2;
493 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
494 &lo_op2, &hi_op2);
495 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
496 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
497
498 if (lo_op2 == const0_rtx)
499 {
500 lo_dest = lo_op1;
501 if (!arm_add_operand (hi_op2, SImode))
502 hi_op2 = force_reg (SImode, hi_op2);
503 /* Assume hi_op2 won't also be zero. */
504 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
505 }
506 else
507 {
508 if (!arm_add_operand (lo_op2, SImode))
509 lo_op2 = force_reg (SImode, lo_op2);
510 if (!arm_not_operand (hi_op2, SImode))
511 hi_op2 = force_reg (SImode, hi_op2);
512
513 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
514 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
515 const0_rtx);
516 if (hi_op2 == const0_rtx)
517 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
518 else
519 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
520 }
521
522 if (lo_result != lo_dest)
523 emit_move_insn (lo_result, lo_dest);
524 if (hi_result != hi_dest)
525 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
526 DONE;
527 }
528 "
529 )
530
531 (define_expand "addvsi4"
532 [(match_operand:SI 0 "s_register_operand")
533 (match_operand:SI 1 "s_register_operand")
534 (match_operand:SI 2 "arm_add_operand")
535 (match_operand 3 "")]
536 "TARGET_32BIT"
537 {
538 if (CONST_INT_P (operands[2]))
539 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
540 else
541 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
542 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
543
544 DONE;
545 })
546
547 (define_expand "addvdi4"
548 [(match_operand:DI 0 "s_register_operand")
549 (match_operand:DI 1 "s_register_operand")
550 (match_operand:DI 2 "reg_or_int_operand")
551 (match_operand 3 "")]
552 "TARGET_32BIT"
553 {
554 rtx lo_result, hi_result;
555 rtx lo_op1, hi_op1, lo_op2, hi_op2;
556 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
557 &lo_op2, &hi_op2);
558 lo_result = gen_lowpart (SImode, operands[0]);
559 hi_result = gen_highpart (SImode, operands[0]);
560
561 if (lo_op2 == const0_rtx)
562 {
563 emit_move_insn (lo_result, lo_op1);
564 if (!arm_add_operand (hi_op2, SImode))
565 hi_op2 = force_reg (SImode, hi_op2);
566
567 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
568 }
569 else
570 {
571 if (!arm_add_operand (lo_op2, SImode))
572 lo_op2 = force_reg (SImode, lo_op2);
573 if (!arm_not_operand (hi_op2, SImode))
574 hi_op2 = force_reg (SImode, hi_op2);
575
576 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
577
578 if (hi_op2 == const0_rtx)
579 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
580 else if (CONST_INT_P (hi_op2))
581 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
582 else
583 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
584
585 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
586 }
587
588 DONE;
589 })
590
591 (define_expand "addsi3_cin_vout_reg"
592 [(parallel
593 [(set (match_dup 3)
594 (compare:CC_V
595 (plus:DI
596 (plus:DI (match_dup 4)
597 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
598 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
599 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
600 (match_dup 2)))))
601 (set (match_operand:SI 0 "s_register_operand")
602 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
603 (match_dup 2)))])]
604 "TARGET_32BIT"
605 {
606 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
607 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
608 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
609 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
610 }
611 )
612
613 (define_insn "*addsi3_cin_vout_reg_insn"
614 [(set (reg:CC_V CC_REGNUM)
615 (compare:CC_V
616 (plus:DI
617 (plus:DI
618 (match_operand:DI 3 "arm_carry_operation" "")
619 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
620 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
621 (sign_extend:DI
622 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
623 (match_dup 1))
624 (match_dup 2)))))
625 (set (match_operand:SI 0 "s_register_operand" "=l,r")
626 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
627 (match_dup 2)))]
628 "TARGET_32BIT"
629 "@
630 adcs%?\\t%0, %0, %2
631 adcs%?\\t%0, %1, %2"
632 [(set_attr "type" "alus_sreg")
633 (set_attr "arch" "t2,*")
634 (set_attr "length" "2,4")]
635 )
636
637 (define_expand "addsi3_cin_vout_imm"
638 [(parallel
639 [(set (match_dup 3)
640 (compare:CC_V
641 (plus:DI
642 (plus:DI (match_dup 4)
643 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
644 (match_dup 2))
645 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
646 (match_dup 2)))))
647 (set (match_operand:SI 0 "s_register_operand")
648 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
649 (match_operand 2 "arm_adcimm_operand")))])]
650 "TARGET_32BIT"
651 {
652 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
656 }
657 )
658
659 (define_insn "*addsi3_cin_vout_imm_insn"
660 [(set (reg:CC_V CC_REGNUM)
661 (compare:CC_V
662 (plus:DI
663 (plus:DI
664 (match_operand:DI 3 "arm_carry_operation" "")
665 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
666 (match_operand 2 "arm_adcimm_operand" "I,K"))
667 (sign_extend:DI
668 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
669 (match_dup 1))
670 (match_dup 2)))))
671 (set (match_operand:SI 0 "s_register_operand" "=r,r")
672 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
673 (match_dup 2)))]
674 "TARGET_32BIT"
675 "@
676 adcs%?\\t%0, %1, %2
677 sbcs%?\\t%0, %1, #%B2"
678 [(set_attr "type" "alus_imm")]
679 )
680
681 (define_expand "addsi3_cin_vout_0"
682 [(parallel
683 [(set (match_dup 2)
684 (compare:CC_V
685 (plus:DI (match_dup 3)
686 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
687 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
688 (set (match_operand:SI 0 "s_register_operand")
689 (plus:SI (match_dup 4) (match_dup 1)))])]
690 "TARGET_32BIT"
691 {
692 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
693 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
694 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
695 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
696 }
697 )
698
699 (define_insn "*addsi3_cin_vout_0_insn"
700 [(set (reg:CC_V CC_REGNUM)
701 (compare:CC_V
702 (plus:DI
703 (match_operand:DI 2 "arm_carry_operation" "")
704 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
705 (sign_extend:DI (plus:SI
706 (match_operand:SI 3 "arm_carry_operation" "")
707 (match_dup 1)))))
708 (set (match_operand:SI 0 "s_register_operand" "=r")
709 (plus:SI (match_dup 3) (match_dup 1)))]
710 "TARGET_32BIT"
711 "adcs%?\\t%0, %1, #0"
712 [(set_attr "type" "alus_imm")]
713 )
714
715 (define_expand "uaddvsi4"
716 [(match_operand:SI 0 "s_register_operand")
717 (match_operand:SI 1 "s_register_operand")
718 (match_operand:SI 2 "arm_add_operand")
719 (match_operand 3 "")]
720 "TARGET_32BIT"
721 {
722 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
723 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
724
725 DONE;
726 })
727
728 (define_expand "uaddvdi4"
729 [(match_operand:DI 0 "s_register_operand")
730 (match_operand:DI 1 "s_register_operand")
731 (match_operand:DI 2 "reg_or_int_operand")
732 (match_operand 3 "")]
733 "TARGET_32BIT"
734 {
735 rtx lo_result, hi_result;
736 rtx lo_op1, hi_op1, lo_op2, hi_op2;
737 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
738 &lo_op2, &hi_op2);
739 lo_result = gen_lowpart (SImode, operands[0]);
740 hi_result = gen_highpart (SImode, operands[0]);
741
742 if (lo_op2 == const0_rtx)
743 {
744 emit_move_insn (lo_result, lo_op1);
745 if (!arm_add_operand (hi_op2, SImode))
746 hi_op2 = force_reg (SImode, hi_op2);
747
748 emit_insn (gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
749 }
750 else
751 {
752 if (!arm_add_operand (lo_op2, SImode))
753 lo_op2 = force_reg (SImode, lo_op2);
754 if (!arm_not_operand (hi_op2, SImode))
755 hi_op2 = force_reg (SImode, hi_op2);
756
757 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
758
759 if (hi_op2 == const0_rtx)
760 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
761 else if (CONST_INT_P (hi_op2))
762 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
763 else
764 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
765
766 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
767 }
768
769 DONE;
770 })
771
772 (define_expand "addsi3_cin_cout_reg"
773 [(parallel
774 [(set (match_dup 3)
775 (compare:CC_ADC
776 (plus:DI
777 (plus:DI (match_dup 4)
778 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
779 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
780 (const_int 4294967296)))
781 (set (match_operand:SI 0 "s_register_operand")
782 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
783 (match_dup 2)))])]
784 "TARGET_32BIT"
785 {
786 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
787 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
788 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
789 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
790 }
791 )
792
793 (define_insn "*addsi3_cin_cout_reg_insn"
794 [(set (reg:CC_ADC CC_REGNUM)
795 (compare:CC_ADC
796 (plus:DI
797 (plus:DI
798 (match_operand:DI 3 "arm_carry_operation" "")
799 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
800 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
801 (const_int 4294967296)))
802 (set (match_operand:SI 0 "s_register_operand" "=l,r")
803 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
804 (match_dup 1))
805 (match_dup 2)))]
806 "TARGET_32BIT"
807 "@
808 adcs%?\\t%0, %0, %2
809 adcs%?\\t%0, %1, %2"
810 [(set_attr "type" "alus_sreg")
811 (set_attr "arch" "t2,*")
812 (set_attr "length" "2,4")]
813 )
814
815 (define_expand "addsi3_cin_cout_imm"
816 [(parallel
817 [(set (match_dup 3)
818 (compare:CC_ADC
819 (plus:DI
820 (plus:DI (match_dup 4)
821 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
822 (match_dup 6))
823 (const_int 4294967296)))
824 (set (match_operand:SI 0 "s_register_operand")
825 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
826 (match_operand:SI 2 "arm_adcimm_operand")))])]
827 "TARGET_32BIT"
828 {
829 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
833 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
834 }
835 )
836
837 (define_insn "*addsi3_cin_cout_imm_insn"
838 [(set (reg:CC_ADC CC_REGNUM)
839 (compare:CC_ADC
840 (plus:DI
841 (plus:DI
842 (match_operand:DI 3 "arm_carry_operation" "")
843 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
844 (match_operand:DI 5 "const_int_operand" "n,n"))
845 (const_int 4294967296)))
846 (set (match_operand:SI 0 "s_register_operand" "=r,r")
847 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
848 (match_dup 1))
849 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
850 "TARGET_32BIT
851 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
852 "@
853 adcs%?\\t%0, %1, %2
854 sbcs%?\\t%0, %1, #%B2"
855 [(set_attr "type" "alus_imm")]
856 )
857
858 (define_expand "addsi3_cin_cout_0"
859 [(parallel
860 [(set (match_dup 2)
861 (compare:CC_ADC
862 (plus:DI (match_dup 3)
863 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
864 (const_int 4294967296)))
865 (set (match_operand:SI 0 "s_register_operand")
866 (plus:SI (match_dup 4) (match_dup 1)))])]
867 "TARGET_32BIT"
868 {
869 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
870 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
871 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
872 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
873 }
874 )
875
876 (define_insn "*addsi3_cin_cout_0_insn"
877 [(set (reg:CC_ADC CC_REGNUM)
878 (compare:CC_ADC
879 (plus:DI
880 (match_operand:DI 2 "arm_carry_operation" "")
881 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
882 (const_int 4294967296)))
883 (set (match_operand:SI 0 "s_register_operand" "=r")
884 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
885 "TARGET_32BIT"
886 "adcs%?\\t%0, %1, #0"
887 [(set_attr "type" "alus_imm")]
888 )
889
890 (define_expand "addsi3"
891 [(set (match_operand:SI 0 "s_register_operand")
892 (plus:SI (match_operand:SI 1 "s_register_operand")
893 (match_operand:SI 2 "reg_or_int_operand")))]
894 "TARGET_EITHER"
895 "
896 if (TARGET_32BIT && CONST_INT_P (operands[2]))
897 {
898 arm_split_constant (PLUS, SImode, NULL_RTX,
899 INTVAL (operands[2]), operands[0], operands[1],
900 optimize && can_create_pseudo_p ());
901 DONE;
902 }
903 "
904 )
905
906 ; If there is a scratch available, this will be faster than synthesizing the
907 ; addition.
908 (define_peephole2
909 [(match_scratch:SI 3 "r")
910 (set (match_operand:SI 0 "arm_general_register_operand" "")
911 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
912 (match_operand:SI 2 "const_int_operand" "")))]
913 "TARGET_32BIT &&
914 !(const_ok_for_arm (INTVAL (operands[2]))
915 || const_ok_for_arm (-INTVAL (operands[2])))
916 && const_ok_for_arm (~INTVAL (operands[2]))"
917 [(set (match_dup 3) (match_dup 2))
918 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
919 ""
920 )
921
922 ;; The r/r/k alternative is required when reloading the address
923 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
924 ;; put the duplicated register first, and not try the commutative version.
925 (define_insn_and_split "*arm_addsi3"
926 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
927 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
928 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
929 "TARGET_32BIT"
930 "@
931 add%?\\t%0, %0, %2
932 add%?\\t%0, %1, %2
933 add%?\\t%0, %1, %2
934 add%?\\t%0, %1, %2
935 add%?\\t%0, %1, %2
936 add%?\\t%0, %1, %2
937 add%?\\t%0, %2, %1
938 add%?\\t%0, %1, %2
939 addw%?\\t%0, %1, %2
940 addw%?\\t%0, %1, %2
941 sub%?\\t%0, %1, #%n2
942 sub%?\\t%0, %1, #%n2
943 sub%?\\t%0, %1, #%n2
944 subw%?\\t%0, %1, #%n2
945 subw%?\\t%0, %1, #%n2
946 #"
947 "TARGET_32BIT
948 && CONST_INT_P (operands[2])
949 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
950 && (reload_completed || !arm_eliminable_register (operands[1]))"
951 [(clobber (const_int 0))]
952 "
953 arm_split_constant (PLUS, SImode, curr_insn,
954 INTVAL (operands[2]), operands[0],
955 operands[1], 0);
956 DONE;
957 "
958 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
959 (set_attr "predicable" "yes")
960 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
961 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
962 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
963 (const_string "alu_imm")
964 (const_string "alu_sreg")))
965 ]
966 )
967
968 (define_insn "addsi3_compareV_reg"
969 [(set (reg:CC_V CC_REGNUM)
970 (compare:CC_V
971 (plus:DI
972 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
973 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
974 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
975 (set (match_operand:SI 0 "register_operand" "=l,r,r")
976 (plus:SI (match_dup 1) (match_dup 2)))]
977 "TARGET_32BIT"
978 "adds%?\\t%0, %1, %2"
979 [(set_attr "conds" "set")
980 (set_attr "arch" "t2,t2,*")
981 (set_attr "length" "2,2,4")
982 (set_attr "type" "alus_sreg")]
983 )
984
985 (define_insn "*addsi3_compareV_reg_nosum"
986 [(set (reg:CC_V CC_REGNUM)
987 (compare:CC_V
988 (plus:DI
989 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
990 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
991 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
992 "TARGET_32BIT"
993 "cmn%?\\t%0, %1"
994 [(set_attr "conds" "set")
995 (set_attr "arch" "t2,*")
996 (set_attr "length" "2,4")
997 (set_attr "type" "alus_sreg")]
998 )
999
1000 (define_insn "subvsi3_intmin"
1001 [(set (reg:CC_V CC_REGNUM)
1002 (compare:CC_V
1003 (plus:DI
1004 (sign_extend:DI
1005 (match_operand:SI 1 "register_operand" "r"))
1006 (const_int 2147483648))
1007 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
1008 (set (match_operand:SI 0 "register_operand" "=r")
1009 (plus:SI (match_dup 1) (const_int -2147483648)))]
1010 "TARGET_32BIT"
1011 "subs%?\\t%0, %1, #-2147483648"
1012 [(set_attr "conds" "set")
1013 (set_attr "type" "alus_imm")]
1014 )
1015
1016 (define_insn "addsi3_compareV_imm"
1017 [(set (reg:CC_V CC_REGNUM)
1018 (compare:CC_V
1019 (plus:DI
1020 (sign_extend:DI
1021 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
1022 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
1023 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
1024 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
1025 (plus:SI (match_dup 1) (match_dup 2)))]
1026 "TARGET_32BIT
1027 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
1028 "@
1029 adds%?\\t%0, %1, %2
1030 adds%?\\t%0, %0, %2
1031 subs%?\\t%0, %1, #%n2
1032 subs%?\\t%0, %0, #%n2
1033 adds%?\\t%0, %1, %2
1034 subs%?\\t%0, %1, #%n2"
1035 [(set_attr "conds" "set")
1036 (set_attr "arch" "t2,t2,t2,t2,*,*")
1037 (set_attr "length" "2,2,2,2,4,4")
1038 (set_attr "type" "alus_imm")]
1039 )
1040
1041 (define_insn "addsi3_compareV_imm_nosum"
1042 [(set (reg:CC_V CC_REGNUM)
1043 (compare:CC_V
1044 (plus:DI
1045 (sign_extend:DI
1046 (match_operand:SI 0 "register_operand" "l,r,r"))
1047 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1048 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1049 "TARGET_32BIT
1050 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1051 "@
1052 cmp%?\\t%0, #%n1
1053 cmn%?\\t%0, %1
1054 cmp%?\\t%0, #%n1"
1055 [(set_attr "conds" "set")
1056 (set_attr "arch" "t2,*,*")
1057 (set_attr "length" "2,4,4")
1058 (set_attr "type" "alus_imm")]
1059 )
1060
1061 ;; We can handle more constants efficently if we can clobber either a scratch
1062 ;; or the other source operand. We deliberately leave this late as in
1063 ;; high register pressure situations it's not worth forcing any reloads.
1064 (define_peephole2
1065 [(match_scratch:SI 2 "l")
1066 (set (reg:CC_V CC_REGNUM)
1067 (compare:CC_V
1068 (plus:DI
1069 (sign_extend:DI
1070 (match_operand:SI 0 "low_register_operand"))
1071 (match_operand 1 "const_int_operand"))
1072 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1073 "TARGET_THUMB2
1074 && satisfies_constraint_Pd (operands[1])"
1075 [(parallel[
1076 (set (reg:CC_V CC_REGNUM)
1077 (compare:CC_V
1078 (plus:DI (sign_extend:DI (match_dup 0))
1079 (sign_extend:DI (match_dup 1)))
1080 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1081 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1082 )
1083
1084 (define_peephole2
1085 [(set (reg:CC_V CC_REGNUM)
1086 (compare:CC_V
1087 (plus:DI
1088 (sign_extend:DI
1089 (match_operand:SI 0 "low_register_operand"))
1090 (match_operand 1 "const_int_operand"))
1091 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1092 "TARGET_THUMB2
1093 && dead_or_set_p (peep2_next_insn (0), operands[0])
1094 && satisfies_constraint_Py (operands[1])"
1095 [(parallel[
1096 (set (reg:CC_V CC_REGNUM)
1097 (compare:CC_V
1098 (plus:DI (sign_extend:DI (match_dup 0))
1099 (sign_extend:DI (match_dup 1)))
1100 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1101 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1102 )
1103
1104 (define_insn "addsi3_compare0"
1105 [(set (reg:CC_NZ CC_REGNUM)
1106 (compare:CC_NZ
1107 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1108 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1109 (const_int 0)))
1110 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1111 (plus:SI (match_dup 1) (match_dup 2)))]
1112 "TARGET_ARM"
1113 "@
1114 adds%?\\t%0, %1, %2
1115 subs%?\\t%0, %1, #%n2
1116 adds%?\\t%0, %1, %2"
1117 [(set_attr "conds" "set")
1118 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1119 )
1120
1121 (define_insn "*addsi3_compare0_scratch"
1122 [(set (reg:CC_NZ CC_REGNUM)
1123 (compare:CC_NZ
1124 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1125 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1126 (const_int 0)))]
1127 "TARGET_ARM"
1128 "@
1129 cmn%?\\t%0, %1
1130 cmp%?\\t%0, #%n1
1131 cmn%?\\t%0, %1"
1132 [(set_attr "conds" "set")
1133 (set_attr "predicable" "yes")
1134 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1135 )
1136
1137 (define_insn "*compare_negsi_si"
1138 [(set (reg:CC_Z CC_REGNUM)
1139 (compare:CC_Z
1140 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1141 (match_operand:SI 1 "s_register_operand" "l,r")))]
1142 "TARGET_32BIT"
1143 "cmn%?\\t%1, %0"
1144 [(set_attr "conds" "set")
1145 (set_attr "predicable" "yes")
1146 (set_attr "arch" "t2,*")
1147 (set_attr "length" "2,4")
1148 (set_attr "predicable_short_it" "yes,no")
1149 (set_attr "type" "alus_sreg")]
1150 )
1151
1152 ;; This is the canonicalization of subsi3_compare when the
1153 ;; addend is a constant.
1154 (define_insn "cmpsi2_addneg"
1155 [(set (reg:CC CC_REGNUM)
1156 (compare:CC
1157 (match_operand:SI 1 "s_register_operand" "r,r")
1158 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1159 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1160 (plus:SI (match_dup 1)
1161 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1162 "TARGET_32BIT
1163 && (INTVAL (operands[2])
1164 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1165 {
1166 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1167 in different condition codes (like cmn rather than like cmp), so that
1168 alternative comes first. Both alternatives can match for any 0x??000000
1169 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1170 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1171 as it is shorter. */
1172 if (which_alternative == 0 && operands[3] != const1_rtx)
1173 return "subs%?\\t%0, %1, #%n3";
1174 else
1175 return "adds%?\\t%0, %1, %3";
1176 }
1177 [(set_attr "conds" "set")
1178 (set_attr "type" "alus_sreg")]
1179 )
1180
1181 ;; Convert the sequence
1182 ;; sub rd, rn, #1
1183 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1184 ;; bne dest
1185 ;; into
1186 ;; subs rd, rn, #1
1187 ;; bcs dest ((unsigned)rn >= 1)
1188 ;; similarly for the beq variant using bcc.
1189 ;; This is a common looping idiom (while (n--))
1190 (define_peephole2
1191 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1192 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1193 (const_int -1)))
1194 (set (match_operand 2 "cc_register" "")
1195 (compare (match_dup 0) (const_int -1)))
1196 (set (pc)
1197 (if_then_else (match_operator 3 "equality_operator"
1198 [(match_dup 2) (const_int 0)])
1199 (match_operand 4 "" "")
1200 (match_operand 5 "" "")))]
1201 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1202 [(parallel[
1203 (set (match_dup 2)
1204 (compare:CC
1205 (match_dup 1) (const_int 1)))
1206 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1207 (set (pc)
1208 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1209 (match_dup 4)
1210 (match_dup 5)))]
1211 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1212 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1213 ? GEU : LTU),
1214 VOIDmode,
1215 operands[2], const0_rtx);"
1216 )
1217
1218 ;; The next four insns work because they compare the result with one of
1219 ;; the operands, and we know that the use of the condition code is
1220 ;; either GEU or LTU, so we can use the carry flag from the addition
1221 ;; instead of doing the compare a second time.
1222 (define_insn "addsi3_compare_op1"
1223 [(set (reg:CC_C CC_REGNUM)
1224 (compare:CC_C
1225 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1226 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1227 (match_dup 1)))
1228 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1229 (plus:SI (match_dup 1) (match_dup 2)))]
1230 "TARGET_32BIT"
1231 "@
1232 adds%?\\t%0, %1, %2
1233 adds%?\\t%0, %0, %2
1234 subs%?\\t%0, %1, #%n2
1235 subs%?\\t%0, %0, #%n2
1236 adds%?\\t%0, %1, %2
1237 subs%?\\t%0, %1, #%n2"
1238 [(set_attr "conds" "set")
1239 (set_attr "arch" "t2,t2,t2,t2,*,*")
1240 (set_attr "length" "2,2,2,2,4,4")
1241 (set (attr "type")
1242 (if_then_else (match_operand 2 "const_int_operand")
1243 (const_string "alu_imm")
1244 (const_string "alu_sreg")))]
1245 )
1246
1247 (define_insn "*addsi3_compare_op2"
1248 [(set (reg:CC_C CC_REGNUM)
1249 (compare:CC_C
1250 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1251 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1252 (match_dup 2)))
1253 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1254 (plus:SI (match_dup 1) (match_dup 2)))]
1255 "TARGET_32BIT"
1256 "@
1257 adds%?\\t%0, %1, %2
1258 adds%?\\t%0, %0, %2
1259 subs%?\\t%0, %1, #%n2
1260 subs%?\\t%0, %0, #%n2
1261 adds%?\\t%0, %1, %2
1262 subs%?\\t%0, %1, #%n2"
1263 [(set_attr "conds" "set")
1264 (set_attr "arch" "t2,t2,t2,t2,*,*")
1265 (set_attr "length" "2,2,2,2,4,4")
1266 (set (attr "type")
1267 (if_then_else (match_operand 2 "const_int_operand")
1268 (const_string "alu_imm")
1269 (const_string "alu_sreg")))]
1270 )
1271
1272 (define_insn "*compare_addsi2_op0"
1273 [(set (reg:CC_C CC_REGNUM)
1274 (compare:CC_C
1275 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1276 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1277 (match_dup 0)))]
1278 "TARGET_32BIT"
1279 "@
1280 cmn%?\\t%0, %1
1281 cmp%?\\t%0, #%n1
1282 cmn%?\\t%0, %1
1283 cmp%?\\t%0, #%n1"
1284 [(set_attr "conds" "set")
1285 (set_attr "predicable" "yes")
1286 (set_attr "arch" "t2,t2,*,*")
1287 (set_attr "predicable_short_it" "yes,yes,no,no")
1288 (set_attr "length" "2,2,4,4")
1289 (set (attr "type")
1290 (if_then_else (match_operand 1 "const_int_operand")
1291 (const_string "alu_imm")
1292 (const_string "alu_sreg")))]
1293 )
1294
1295 (define_insn "*compare_addsi2_op1"
1296 [(set (reg:CC_C CC_REGNUM)
1297 (compare:CC_C
1298 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1299 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1300 (match_dup 1)))]
1301 "TARGET_32BIT"
1302 "@
1303 cmn%?\\t%0, %1
1304 cmp%?\\t%0, #%n1
1305 cmn%?\\t%0, %1
1306 cmp%?\\t%0, #%n1"
1307 [(set_attr "conds" "set")
1308 (set_attr "predicable" "yes")
1309 (set_attr "arch" "t2,t2,*,*")
1310 (set_attr "predicable_short_it" "yes,yes,no,no")
1311 (set_attr "length" "2,2,4,4")
1312 (set (attr "type")
1313 (if_then_else (match_operand 1 "const_int_operand")
1314 (const_string "alu_imm")
1315 (const_string "alu_sreg")))]
1316 )
1317
1318 (define_insn "addsi3_carryin"
1319 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1320 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1321 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1322 (match_operand:SI 3 "arm_carry_operation" "")))]
1323 "TARGET_32BIT"
1324 "@
1325 adc%?\\t%0, %1, %2
1326 adc%?\\t%0, %1, %2
1327 sbc%?\\t%0, %1, #%B2"
1328 [(set_attr "conds" "use")
1329 (set_attr "predicable" "yes")
1330 (set_attr "arch" "t2,*,*")
1331 (set_attr "length" "4")
1332 (set_attr "predicable_short_it" "yes,no,no")
1333 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1334 )
1335
1336 ;; Canonicalization of the above when the immediate is zero.
1337 (define_insn "add0si3_carryin"
1338 [(set (match_operand:SI 0 "s_register_operand" "=r")
1339 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1340 (match_operand:SI 1 "arm_not_operand" "r")))]
1341 "TARGET_32BIT"
1342 "adc%?\\t%0, %1, #0"
1343 [(set_attr "conds" "use")
1344 (set_attr "predicable" "yes")
1345 (set_attr "length" "4")
1346 (set_attr "type" "adc_imm")]
1347 )
1348
1349 (define_insn "*addsi3_carryin_alt2"
1350 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1351 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1352 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1353 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1354 "TARGET_32BIT"
1355 "@
1356 adc%?\\t%0, %1, %2
1357 adc%?\\t%0, %1, %2
1358 sbc%?\\t%0, %1, #%B2"
1359 [(set_attr "conds" "use")
1360 (set_attr "predicable" "yes")
1361 (set_attr "arch" "t2,*,*")
1362 (set_attr "length" "4")
1363 (set_attr "predicable_short_it" "yes,no,no")
1364 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1365 )
1366
1367 (define_insn "*addsi3_carryin_shift"
1368 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1369 (plus:SI (plus:SI
1370 (match_operator:SI 2 "shift_operator"
1371 [(match_operand:SI 3 "s_register_operand" "r,r")
1372 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1373 (match_operand:SI 5 "arm_carry_operation" ""))
1374 (match_operand:SI 1 "s_register_operand" "r,r")))]
1375 "TARGET_32BIT"
1376 "adc%?\\t%0, %1, %3%S2"
1377 [(set_attr "conds" "use")
1378 (set_attr "arch" "32,a")
1379 (set_attr "shift" "3")
1380 (set_attr "predicable" "yes")
1381 (set_attr "autodetect_type" "alu_shift_operator2")]
1382 )
1383
1384 (define_insn "*addsi3_carryin_clobercc"
1385 [(set (match_operand:SI 0 "s_register_operand" "=r")
1386 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1387 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1388 (match_operand:SI 3 "arm_carry_operation" "")))
1389 (clobber (reg:CC CC_REGNUM))]
1390 "TARGET_32BIT"
1391 "adcs%?\\t%0, %1, %2"
1392 [(set_attr "conds" "set")
1393 (set_attr "type" "adcs_reg")]
1394 )
1395
1396 (define_expand "subvsi4"
1397 [(match_operand:SI 0 "s_register_operand")
1398 (match_operand:SI 1 "arm_rhs_operand")
1399 (match_operand:SI 2 "arm_add_operand")
1400 (match_operand 3 "")]
1401 "TARGET_32BIT"
1402 {
1403 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1404 {
1405 /* If both operands are constants we can decide the result statically. */
1406 wi::overflow_type overflow;
1407 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1408 rtx_mode_t (operands[2], SImode),
1409 SIGNED, &overflow);
1410 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1411 if (overflow != wi::OVF_NONE)
1412 emit_jump_insn (gen_jump (operands[3]));
1413 DONE;
1414 }
1415 else if (CONST_INT_P (operands[2]))
1416 {
1417 operands[2] = GEN_INT (-INTVAL (operands[2]));
1418 /* Special case for INT_MIN. */
1419 if (INTVAL (operands[2]) == 0x80000000)
1420 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1421 else
1422 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1423 operands[2]));
1424 }
1425 else if (CONST_INT_P (operands[1]))
1426 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1427 else
1428 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1429
1430 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1431 DONE;
1432 })
1433
1434 (define_expand "subvdi4"
1435 [(match_operand:DI 0 "s_register_operand")
1436 (match_operand:DI 1 "reg_or_int_operand")
1437 (match_operand:DI 2 "reg_or_int_operand")
1438 (match_operand 3 "")]
1439 "TARGET_32BIT"
1440 {
1441 rtx lo_result, hi_result;
1442 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1443 lo_result = gen_lowpart (SImode, operands[0]);
1444 hi_result = gen_highpart (SImode, operands[0]);
1445 machine_mode mode = CCmode;
1446
1447 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1448 {
1449 /* If both operands are constants we can decide the result statically. */
1450 wi::overflow_type overflow;
1451 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1452 rtx_mode_t (operands[2], DImode),
1453 SIGNED, &overflow);
1454 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1455 if (overflow != wi::OVF_NONE)
1456 emit_jump_insn (gen_jump (operands[3]));
1457 DONE;
1458 }
1459 else if (CONST_INT_P (operands[1]))
1460 {
1461 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1462 &lo_op1, &hi_op1);
1463 if (const_ok_for_arm (INTVAL (lo_op1)))
1464 {
1465 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1466 GEN_INT (~UINTVAL (lo_op1))));
1467 /* We could potentially use RSC here in Arm state, but not
1468 in Thumb, so it's probably not worth the effort of handling
1469 this. */
1470 hi_op1 = force_reg (SImode, hi_op1);
1471 mode = CC_RSBmode;
1472 goto highpart;
1473 }
1474 operands[1] = force_reg (DImode, operands[1]);
1475 }
1476
1477 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1478 &lo_op2, &hi_op2);
1479 if (lo_op2 == const0_rtx)
1480 {
1481 emit_move_insn (lo_result, lo_op1);
1482 if (!arm_add_operand (hi_op2, SImode))
1483 hi_op2 = force_reg (SImode, hi_op2);
1484 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1485 DONE;
1486 }
1487
1488 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1489 lo_op2 = force_reg (SImode, lo_op2);
1490 if (CONST_INT_P (lo_op2))
1491 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1492 gen_int_mode (-INTVAL (lo_op2), SImode)));
1493 else
1494 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1495
1496 highpart:
1497 if (!arm_not_operand (hi_op2, SImode))
1498 hi_op2 = force_reg (SImode, hi_op2);
1499 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1500 if (CONST_INT_P (hi_op2))
1501 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1502 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1503 gen_rtx_LTU (DImode, ccreg,
1504 const0_rtx)));
1505 else
1506 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1507 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1508 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1509 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1510
1511 DONE;
1512 })
1513
1514 (define_expand "usubvsi4"
1515 [(match_operand:SI 0 "s_register_operand")
1516 (match_operand:SI 1 "arm_rhs_operand")
1517 (match_operand:SI 2 "arm_add_operand")
1518 (match_operand 3 "")]
1519 "TARGET_32BIT"
1520 {
1521 machine_mode mode = CCmode;
1522 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1523 {
1524 /* If both operands are constants we can decide the result statically. */
1525 wi::overflow_type overflow;
1526 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1527 rtx_mode_t (operands[2], SImode),
1528 UNSIGNED, &overflow);
1529 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1530 if (overflow != wi::OVF_NONE)
1531 emit_jump_insn (gen_jump (operands[3]));
1532 DONE;
1533 }
1534 else if (CONST_INT_P (operands[2]))
1535 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1536 gen_int_mode (-INTVAL (operands[2]),
1537 SImode)));
1538 else if (CONST_INT_P (operands[1]))
1539 {
1540 mode = CC_RSBmode;
1541 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1542 GEN_INT (~UINTVAL (operands[1]))));
1543 }
1544 else
1545 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1546 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1547
1548 DONE;
1549 })
1550
1551 (define_expand "usubvdi4"
1552 [(match_operand:DI 0 "s_register_operand")
1553 (match_operand:DI 1 "reg_or_int_operand")
1554 (match_operand:DI 2 "reg_or_int_operand")
1555 (match_operand 3 "")]
1556 "TARGET_32BIT"
1557 {
1558 rtx lo_result, hi_result;
1559 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1560 lo_result = gen_lowpart (SImode, operands[0]);
1561 hi_result = gen_highpart (SImode, operands[0]);
1562 machine_mode mode = CCmode;
1563
1564 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1565 {
1566 /* If both operands are constants we can decide the result statically. */
1567 wi::overflow_type overflow;
1568 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1569 rtx_mode_t (operands[2], DImode),
1570 UNSIGNED, &overflow);
1571 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1572 if (overflow != wi::OVF_NONE)
1573 emit_jump_insn (gen_jump (operands[3]));
1574 DONE;
1575 }
1576 else if (CONST_INT_P (operands[1]))
1577 {
1578 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1579 &lo_op1, &hi_op1);
1580 if (const_ok_for_arm (INTVAL (lo_op1)))
1581 {
1582 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1583 GEN_INT (~UINTVAL (lo_op1))));
1584 /* We could potentially use RSC here in Arm state, but not
1585 in Thumb, so it's probably not worth the effort of handling
1586 this. */
1587 hi_op1 = force_reg (SImode, hi_op1);
1588 mode = CC_RSBmode;
1589 goto highpart;
1590 }
1591 operands[1] = force_reg (DImode, operands[1]);
1592 }
1593
1594 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1595 &lo_op2, &hi_op2);
1596 if (lo_op2 == const0_rtx)
1597 {
1598 emit_move_insn (lo_result, lo_op1);
1599 if (!arm_add_operand (hi_op2, SImode))
1600 hi_op2 = force_reg (SImode, hi_op2);
1601 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1602 DONE;
1603 }
1604
1605 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1606 lo_op2 = force_reg (SImode, lo_op2);
1607 if (CONST_INT_P (lo_op2))
1608 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1609 gen_int_mode (-INTVAL (lo_op2), SImode)));
1610 else
1611 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1612
1613 highpart:
1614 if (!arm_not_operand (hi_op2, SImode))
1615 hi_op2 = force_reg (SImode, hi_op2);
1616 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1617 if (CONST_INT_P (hi_op2))
1618 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1619 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1620 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1621 gen_rtx_LTU (DImode, ccreg,
1622 const0_rtx)));
1623 else
1624 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1625 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1626 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1627 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1628
1629 DONE;
1630 })
1631
1632 (define_insn "subsi3_compare1"
1633 [(set (reg:CC CC_REGNUM)
1634 (compare:CC
1635 (match_operand:SI 1 "register_operand" "r")
1636 (match_operand:SI 2 "register_operand" "r")))
1637 (set (match_operand:SI 0 "register_operand" "=r")
1638 (minus:SI (match_dup 1) (match_dup 2)))]
1639 "TARGET_32BIT"
1640 "subs%?\\t%0, %1, %2"
1641 [(set_attr "conds" "set")
1642 (set_attr "type" "alus_sreg")]
1643 )
1644
1645 (define_insn "subvsi3"
1646 [(set (reg:CC_V CC_REGNUM)
1647 (compare:CC_V
1648 (minus:DI
1649 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1650 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1651 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1652 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1653 (minus:SI (match_dup 1) (match_dup 2)))]
1654 "TARGET_32BIT"
1655 "subs%?\\t%0, %1, %2"
1656 [(set_attr "conds" "set")
1657 (set_attr "arch" "t2,*")
1658 (set_attr "length" "2,4")
1659 (set_attr "type" "alus_sreg")]
1660 )
1661
1662 (define_insn "subvsi3_imm1"
1663 [(set (reg:CC_V CC_REGNUM)
1664 (compare:CC_V
1665 (minus:DI
1666 (match_operand 1 "arm_immediate_operand" "I")
1667 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1668 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1669 (set (match_operand:SI 0 "s_register_operand" "=r")
1670 (minus:SI (match_dup 1) (match_dup 2)))]
1671 "TARGET_32BIT"
1672 "rsbs%?\\t%0, %2, %1"
1673 [(set_attr "conds" "set")
1674 (set_attr "type" "alus_imm")]
1675 )
1676
1677 (define_insn "subsi3_carryin"
1678 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1679 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1680 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1681 (match_operand:SI 3 "arm_borrow_operation" "")))]
1682 "TARGET_32BIT"
1683 "@
1684 sbc%?\\t%0, %1, %2
1685 rsc%?\\t%0, %2, %1
1686 sbc%?\\t%0, %2, %2, lsl #1"
1687 [(set_attr "conds" "use")
1688 (set_attr "arch" "*,a,t2")
1689 (set_attr "predicable" "yes")
1690 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm_lsl_1to4")]
1691 )
1692
1693 ;; Special canonicalization of the above when operand1 == (const_int 1):
1694 ;; in this case the 'borrow' needs to treated like subtracting from the carry.
1695 (define_insn "rsbsi_carryin_reg"
1696 [(set (match_operand:SI 0 "s_register_operand" "=r")
1697 (minus:SI (match_operand:SI 1 "arm_carry_operation" "")
1698 (match_operand:SI 2 "s_register_operand" "r")))]
1699 "TARGET_ARM"
1700 "rsc%?\\t%0, %2, #1"
1701 [(set_attr "conds" "use")
1702 (set_attr "predicable" "yes")
1703 (set_attr "type" "adc_imm")]
1704 )
1705
1706 ;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
1707 ;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
1708 (define_insn "*add_not_cin"
1709 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1710 (plus:SI
1711 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
1712 (match_operand:SI 3 "arm_carry_operation" ""))
1713 (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
1714 "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
1715 "@
1716 sbc%?\\t%0, %2, %1
1717 rsc%?\\t%0, %1, %2"
1718 [(set_attr "conds" "use")
1719 (set_attr "predicable" "yes")
1720 (set_attr "arch" "*,a")
1721 (set_attr "type" "adc_reg,adc_imm")]
1722 )
1723
1724 ;; On Arm we can also use the same trick when the non-inverted operand is
1725 ;; shifted, using RSC.
1726 (define_insn "add_not_shift_cin"
1727 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1728 (plus:SI
1729 (plus:SI (match_operator:SI 3 "shift_operator"
1730 [(match_operand:SI 1 "s_register_operand" "r,r")
1731 (match_operand:SI 2 "shift_amount_operand" "M,r")])
1732 (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
1733 (match_operand:SI 5 "arm_carry_operation" "")))]
1734 "TARGET_ARM"
1735 "rsc%?\\t%0, %4, %1%S3"
1736 [(set_attr "conds" "use")
1737 (set_attr "predicable" "yes")
1738 (set_attr "autodetect_type" "alu_shift_operator3")]
1739 )
1740
1741 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1742 [(set (reg:<CC_EXTEND> CC_REGNUM)
1743 (compare:<CC_EXTEND>
1744 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1745 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1746 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1747 (clobber (match_scratch:SI 0 "=l,r"))]
1748 "TARGET_32BIT"
1749 "sbcs\\t%0, %1, %2"
1750 [(set_attr "conds" "set")
1751 (set_attr "arch" "t2,*")
1752 (set_attr "length" "2,4")
1753 (set_attr "type" "adc_reg")]
1754 )
1755
1756 ;; Similar to the above, but handling a constant which has a different
1757 ;; canonicalization.
1758 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1759 [(set (reg:<CC_EXTEND> CC_REGNUM)
1760 (compare:<CC_EXTEND>
1761 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1762 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1763 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1764 (clobber (match_scratch:SI 0 "=l,r"))]
1765 "TARGET_32BIT"
1766 "@
1767 sbcs\\t%0, %1, %2
1768 adcs\\t%0, %1, #%B2"
1769 [(set_attr "conds" "set")
1770 (set_attr "type" "adc_imm")]
1771 )
1772
1773 ;; Further canonicalization when the constant is zero.
1774 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1775 [(set (reg:<CC_EXTEND> CC_REGNUM)
1776 (compare:<CC_EXTEND>
1777 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1778 (match_operand:DI 2 "arm_borrow_operation" "")))
1779 (clobber (match_scratch:SI 0 "=l,r"))]
1780 "TARGET_32BIT"
1781 "sbcs\\t%0, %1, #0"
1782 [(set_attr "conds" "set")
1783 (set_attr "type" "adc_imm")]
1784 )
1785
1786 (define_insn "*subsi3_carryin_const"
1787 [(set (match_operand:SI 0 "s_register_operand" "=r")
1788 (minus:SI (plus:SI
1789 (match_operand:SI 1 "s_register_operand" "r")
1790 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1791 (match_operand:SI 3 "arm_borrow_operation" "")))]
1792 "TARGET_32BIT"
1793 "sbc\\t%0, %1, #%n2"
1794 [(set_attr "conds" "use")
1795 (set_attr "type" "adc_imm")]
1796 )
1797
1798 (define_insn "*subsi3_carryin_const0"
1799 [(set (match_operand:SI 0 "s_register_operand" "=r")
1800 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1801 (match_operand:SI 2 "arm_borrow_operation" "")))]
1802 "TARGET_32BIT"
1803 "sbc\\t%0, %1, #0"
1804 [(set_attr "conds" "use")
1805 (set_attr "type" "adc_imm")]
1806 )
1807
1808 (define_insn "*subsi3_carryin_shift"
1809 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1810 (minus:SI (minus:SI
1811 (match_operand:SI 1 "s_register_operand" "r,r")
1812 (match_operator:SI 2 "shift_operator"
1813 [(match_operand:SI 3 "s_register_operand" "r,r")
1814 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1815 (match_operand:SI 5 "arm_borrow_operation" "")))]
1816 "TARGET_32BIT"
1817 "sbc%?\\t%0, %1, %3%S2"
1818 [(set_attr "conds" "use")
1819 (set_attr "arch" "32,a")
1820 (set_attr "shift" "3")
1821 (set_attr "predicable" "yes")
1822 (set_attr "autodetect_type" "alu_shift_operator2")]
1823 )
1824
1825 (define_insn "*subsi3_carryin_shift_alt"
1826 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1827 (minus:SI (minus:SI
1828 (match_operand:SI 1 "s_register_operand" "r,r")
1829 (match_operand:SI 5 "arm_borrow_operation" ""))
1830 (match_operator:SI 2 "shift_operator"
1831 [(match_operand:SI 3 "s_register_operand" "r,r")
1832 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1833 "TARGET_32BIT"
1834 "sbc%?\\t%0, %1, %3%S2"
1835 [(set_attr "conds" "use")
1836 (set_attr "arch" "32,a")
1837 (set_attr "shift" "3")
1838 (set_attr "predicable" "yes")
1839 (set_attr "autodetect_type" "alu_shift_operator2")]
1840 )
1841
1842 ;; No RSC in Thumb2
1843 (define_insn "*rsbsi3_carryin_shift"
1844 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1845 (minus:SI (minus:SI
1846 (match_operator:SI 2 "shift_operator"
1847 [(match_operand:SI 3 "s_register_operand" "r,r")
1848 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1849 (match_operand:SI 1 "s_register_operand" "r,r"))
1850 (match_operand:SI 5 "arm_borrow_operation" "")))]
1851 "TARGET_ARM"
1852 "rsc%?\\t%0, %1, %3%S2"
1853 [(set_attr "conds" "use")
1854 (set_attr "predicable" "yes")
1855 (set_attr "autodetect_type" "alu_shift_operator2")]
1856 )
1857
1858 (define_insn "*rsbsi3_carryin_shift_alt"
1859 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1860 (minus:SI (minus:SI
1861 (match_operator:SI 2 "shift_operator"
1862 [(match_operand:SI 3 "s_register_operand" "r,r")
1863 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1864 (match_operand:SI 5 "arm_borrow_operation" ""))
1865 (match_operand:SI 1 "s_register_operand" "r,r")))]
1866 "TARGET_ARM"
1867 "rsc%?\\t%0, %1, %3%S2"
1868 [(set_attr "conds" "use")
1869 (set_attr "predicable" "yes")
1870 (set_attr "autodetect_type" "alu_shift_operator2")]
1871 )
1872
1873 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1874 (define_split
1875 [(set (match_operand:SI 0 "s_register_operand" "")
1876 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1877 (match_operand:SI 2 "s_register_operand" ""))
1878 (const_int -1)))
1879 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1880 "TARGET_32BIT"
1881 [(set (match_dup 3) (match_dup 1))
1882 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1883 "
1884 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1885 ")
1886
1887 (define_expand "addsf3"
1888 [(set (match_operand:SF 0 "s_register_operand")
1889 (plus:SF (match_operand:SF 1 "s_register_operand")
1890 (match_operand:SF 2 "s_register_operand")))]
1891 "TARGET_32BIT && TARGET_HARD_FLOAT"
1892 "
1893 ")
1894
1895 (define_expand "adddf3"
1896 [(set (match_operand:DF 0 "s_register_operand")
1897 (plus:DF (match_operand:DF 1 "s_register_operand")
1898 (match_operand:DF 2 "s_register_operand")))]
1899 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1900 "
1901 ")
1902
1903 (define_expand "subdi3"
1904 [(parallel
1905 [(set (match_operand:DI 0 "s_register_operand")
1906 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1907 (match_operand:DI 2 "s_register_operand")))
1908 (clobber (reg:CC CC_REGNUM))])]
1909 "TARGET_EITHER"
1910 "
1911 if (TARGET_THUMB1)
1912 {
1913 if (!REG_P (operands[1]))
1914 operands[1] = force_reg (DImode, operands[1]);
1915 }
1916 else
1917 {
1918 rtx lo_result, hi_result, lo_dest, hi_dest;
1919 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1920 rtx condition;
1921
1922 /* Since operands[1] may be an integer, pass it second, so that
1923 any necessary simplifications will be done on the decomposed
1924 constant. */
1925 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1926 &lo_op1, &hi_op1);
1927 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1928 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1929
1930 if (!arm_rhs_operand (lo_op1, SImode))
1931 lo_op1 = force_reg (SImode, lo_op1);
1932
1933 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1934 || !arm_rhs_operand (hi_op1, SImode))
1935 hi_op1 = force_reg (SImode, hi_op1);
1936
1937 rtx cc_reg;
1938 if (lo_op1 == const0_rtx)
1939 {
1940 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1941 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1942 }
1943 else if (CONST_INT_P (lo_op1))
1944 {
1945 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1946 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1947 GEN_INT (~UINTVAL (lo_op1))));
1948 }
1949 else
1950 {
1951 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1952 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1953 }
1954
1955 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1956
1957 if (hi_op1 == const0_rtx)
1958 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1959 else
1960 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1961
1962 if (lo_result != lo_dest)
1963 emit_move_insn (lo_result, lo_dest);
1964
1965 if (hi_result != hi_dest)
1966 emit_move_insn (hi_result, hi_dest);
1967
1968 DONE;
1969 }
1970 "
1971 )
1972
1973 (define_expand "subsi3"
1974 [(set (match_operand:SI 0 "s_register_operand")
1975 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1976 (match_operand:SI 2 "s_register_operand")))]
1977 "TARGET_EITHER"
1978 "
1979 if (CONST_INT_P (operands[1]))
1980 {
1981 if (TARGET_32BIT)
1982 {
1983 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1984 operands[1] = force_reg (SImode, operands[1]);
1985 else
1986 {
1987 arm_split_constant (MINUS, SImode, NULL_RTX,
1988 INTVAL (operands[1]), operands[0],
1989 operands[2],
1990 optimize && can_create_pseudo_p ());
1991 DONE;
1992 }
1993 }
1994 else /* TARGET_THUMB1 */
1995 operands[1] = force_reg (SImode, operands[1]);
1996 }
1997 "
1998 )
1999
2000 ; ??? Check Thumb-2 split length
2001 (define_insn_and_split "*arm_subsi3_insn"
2002 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
2003 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
2004 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
2005 "TARGET_32BIT"
2006 "@
2007 sub%?\\t%0, %1, %2
2008 sub%?\\t%0, %2
2009 sub%?\\t%0, %1, %2
2010 rsb%?\\t%0, %2, %1
2011 rsb%?\\t%0, %2, %1
2012 sub%?\\t%0, %1, %2
2013 sub%?\\t%0, %1, %2
2014 sub%?\\t%0, %1, %2
2015 #"
2016 "&& (CONST_INT_P (operands[1])
2017 && !const_ok_for_arm (INTVAL (operands[1])))"
2018 [(clobber (const_int 0))]
2019 "
2020 arm_split_constant (MINUS, SImode, curr_insn,
2021 INTVAL (operands[1]), operands[0], operands[2], 0);
2022 DONE;
2023 "
2024 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
2025 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
2026 (set_attr "predicable" "yes")
2027 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
2028 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
2029 )
2030
2031 (define_peephole2
2032 [(match_scratch:SI 3 "r")
2033 (set (match_operand:SI 0 "arm_general_register_operand" "")
2034 (minus:SI (match_operand:SI 1 "const_int_operand" "")
2035 (match_operand:SI 2 "arm_general_register_operand" "")))]
2036 "TARGET_32BIT
2037 && !const_ok_for_arm (INTVAL (operands[1]))
2038 && const_ok_for_arm (~INTVAL (operands[1]))"
2039 [(set (match_dup 3) (match_dup 1))
2040 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
2041 ""
2042 )
2043
2044 (define_insn "subsi3_compare0"
2045 [(set (reg:CC_NZ CC_REGNUM)
2046 (compare:CC_NZ
2047 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2048 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
2049 (const_int 0)))
2050 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2051 (minus:SI (match_dup 1) (match_dup 2)))]
2052 "TARGET_32BIT"
2053 "@
2054 subs%?\\t%0, %1, %2
2055 subs%?\\t%0, %1, %2
2056 rsbs%?\\t%0, %2, %1"
2057 [(set_attr "conds" "set")
2058 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
2059 )
2060
2061 (define_insn "subsi3_compare"
2062 [(set (reg:CC CC_REGNUM)
2063 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2064 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
2065 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2066 (minus:SI (match_dup 1) (match_dup 2)))]
2067 "TARGET_32BIT"
2068 "@
2069 subs%?\\t%0, %1, %2
2070 subs%?\\t%0, %1, %2
2071 rsbs%?\\t%0, %2, %1"
2072 [(set_attr "conds" "set")
2073 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
2074 )
2075
2076 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
2077 ;; rather than (0 cmp reg). This gives the same results for unsigned
2078 ;; and equality compares which is what we mostly need here.
2079 (define_insn "rsb_imm_compare"
2080 [(set (reg:CC_RSB CC_REGNUM)
2081 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2082 (match_operand 3 "const_int_operand" "")))
2083 (set (match_operand:SI 0 "s_register_operand" "=r")
2084 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
2085 (match_dup 2)))]
2086 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2087 "rsbs\\t%0, %2, %1"
2088 [(set_attr "conds" "set")
2089 (set_attr "type" "alus_imm")]
2090 )
2091
2092 ;; Similarly, but the result is unused.
2093 (define_insn "rsb_imm_compare_scratch"
2094 [(set (reg:CC_RSB CC_REGNUM)
2095 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2096 (match_operand 1 "arm_not_immediate_operand" "K")))
2097 (clobber (match_scratch:SI 0 "=r"))]
2098 "TARGET_32BIT"
2099 "rsbs\\t%0, %2, #%B1"
2100 [(set_attr "conds" "set")
2101 (set_attr "type" "alus_imm")]
2102 )
2103
2104 ;; Compare the sum of a value plus a carry against a constant. Uses
2105 ;; RSC, so the result is swapped. Only available on Arm
2106 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2107 [(set (reg:CC_SWP CC_REGNUM)
2108 (compare:CC_SWP
2109 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2110 (match_operand:DI 3 "arm_borrow_operation" ""))
2111 (match_operand 1 "arm_immediate_operand" "I")))
2112 (clobber (match_scratch:SI 0 "=r"))]
2113 "TARGET_ARM"
2114 "rscs\\t%0, %2, %1"
2115 [(set_attr "conds" "set")
2116 (set_attr "type" "alus_imm")]
2117 )
2118
2119 (define_insn "usubvsi3_borrow"
2120 [(set (reg:CC_B CC_REGNUM)
2121 (compare:CC_B
2122 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2123 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2124 (zero_extend:DI
2125 (match_operand:SI 2 "s_register_operand" "l,r")))))
2126 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2127 (minus:SI (match_dup 1)
2128 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2129 (match_dup 2))))]
2130 "TARGET_32BIT"
2131 "sbcs%?\\t%0, %1, %2"
2132 [(set_attr "conds" "set")
2133 (set_attr "arch" "t2,*")
2134 (set_attr "length" "2,4")]
2135 )
2136
2137 (define_insn "usubvsi3_borrow_imm"
2138 [(set (reg:CC_B CC_REGNUM)
2139 (compare:CC_B
2140 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2141 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2142 (match_operand:DI 3 "const_int_operand" "n,n"))))
2143 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2144 (minus:SI (match_dup 1)
2145 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2146 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2147 "TARGET_32BIT
2148 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2149 "@
2150 sbcs%?\\t%0, %1, %2
2151 adcs%?\\t%0, %1, #%B2"
2152 [(set_attr "conds" "set")
2153 (set_attr "type" "alus_imm")]
2154 )
2155
2156 (define_insn "subvsi3_borrow"
2157 [(set (reg:CC_V CC_REGNUM)
2158 (compare:CC_V
2159 (minus:DI
2160 (minus:DI
2161 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2162 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2163 (match_operand:DI 4 "arm_borrow_operation" ""))
2164 (sign_extend:DI
2165 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2166 (match_operand:SI 3 "arm_borrow_operation" "")))))
2167 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2168 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2169 (match_dup 3)))]
2170 "TARGET_32BIT"
2171 "sbcs%?\\t%0, %1, %2"
2172 [(set_attr "conds" "set")
2173 (set_attr "arch" "t2,*")
2174 (set_attr "length" "2,4")]
2175 )
2176
2177 (define_insn "subvsi3_borrow_imm"
2178 [(set (reg:CC_V CC_REGNUM)
2179 (compare:CC_V
2180 (minus:DI
2181 (minus:DI
2182 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2183 (match_operand 2 "arm_adcimm_operand" "I,K"))
2184 (match_operand:DI 4 "arm_borrow_operation" ""))
2185 (sign_extend:DI
2186 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2187 (match_operand:SI 3 "arm_borrow_operation" "")))))
2188 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2189 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2190 (match_dup 3)))]
2191 "TARGET_32BIT
2192 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2193 "@
2194 sbcs%?\\t%0, %1, %2
2195 adcs%?\\t%0, %1, #%B2"
2196 [(set_attr "conds" "set")
2197 (set_attr "type" "alus_imm")]
2198 )
2199
2200 (define_expand "subsf3"
2201 [(set (match_operand:SF 0 "s_register_operand")
2202 (minus:SF (match_operand:SF 1 "s_register_operand")
2203 (match_operand:SF 2 "s_register_operand")))]
2204 "TARGET_32BIT && TARGET_HARD_FLOAT"
2205 "
2206 ")
2207
2208 (define_expand "subdf3"
2209 [(set (match_operand:DF 0 "s_register_operand")
2210 (minus:DF (match_operand:DF 1 "s_register_operand")
2211 (match_operand:DF 2 "s_register_operand")))]
2212 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2213 "
2214 ")
2215
2216 \f
2217 ;; Multiplication insns
2218
2219 (define_expand "mulhi3"
2220 [(set (match_operand:HI 0 "s_register_operand")
2221 (mult:HI (match_operand:HI 1 "s_register_operand")
2222 (match_operand:HI 2 "s_register_operand")))]
2223 "TARGET_DSP_MULTIPLY"
2224 "
2225 {
2226 rtx result = gen_reg_rtx (SImode);
2227 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2228 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2229 DONE;
2230 }"
2231 )
2232
2233 (define_expand "mulsi3"
2234 [(set (match_operand:SI 0 "s_register_operand")
2235 (mult:SI (match_operand:SI 2 "s_register_operand")
2236 (match_operand:SI 1 "s_register_operand")))]
2237 "TARGET_EITHER"
2238 ""
2239 )
2240
2241 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2242 (define_insn "*mul"
2243 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2244 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2245 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2246 "TARGET_32BIT"
2247 "mul%?\\t%0, %2, %1"
2248 [(set_attr "type" "mul")
2249 (set_attr "predicable" "yes")
2250 (set_attr "arch" "t2,v6,nov6,nov6")
2251 (set_attr "length" "4")
2252 (set_attr "predicable_short_it" "yes,no,*,*")]
2253 )
2254
2255 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2256 ;; reusing the same register.
2257
2258 (define_insn "*mla"
2259 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2260 (plus:SI
2261 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2262 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2263 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2264 "TARGET_32BIT"
2265 "mla%?\\t%0, %3, %2, %1"
2266 [(set_attr "type" "mla")
2267 (set_attr "predicable" "yes")
2268 (set_attr "arch" "v6,nov6,nov6,nov6")]
2269 )
2270
2271 (define_insn "*mls"
2272 [(set (match_operand:SI 0 "s_register_operand" "=r")
2273 (minus:SI
2274 (match_operand:SI 1 "s_register_operand" "r")
2275 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2276 (match_operand:SI 2 "s_register_operand" "r"))))]
2277 "TARGET_32BIT && arm_arch_thumb2"
2278 "mls%?\\t%0, %3, %2, %1"
2279 [(set_attr "type" "mla")
2280 (set_attr "predicable" "yes")]
2281 )
2282
2283 (define_insn "*mulsi3_compare0"
2284 [(set (reg:CC_NZ CC_REGNUM)
2285 (compare:CC_NZ (mult:SI
2286 (match_operand:SI 2 "s_register_operand" "r,r")
2287 (match_operand:SI 1 "s_register_operand" "%0,r"))
2288 (const_int 0)))
2289 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2290 (mult:SI (match_dup 2) (match_dup 1)))]
2291 "TARGET_ARM && !arm_arch6"
2292 "muls%?\\t%0, %2, %1"
2293 [(set_attr "conds" "set")
2294 (set_attr "type" "muls")]
2295 )
2296
2297 (define_insn "*mulsi3_compare0_v6"
2298 [(set (reg:CC_NZ CC_REGNUM)
2299 (compare:CC_NZ (mult:SI
2300 (match_operand:SI 2 "s_register_operand" "r")
2301 (match_operand:SI 1 "s_register_operand" "r"))
2302 (const_int 0)))
2303 (set (match_operand:SI 0 "s_register_operand" "=r")
2304 (mult:SI (match_dup 2) (match_dup 1)))]
2305 "TARGET_ARM && arm_arch6 && optimize_size"
2306 "muls%?\\t%0, %2, %1"
2307 [(set_attr "conds" "set")
2308 (set_attr "type" "muls")]
2309 )
2310
2311 (define_insn "*mulsi_compare0_scratch"
2312 [(set (reg:CC_NZ CC_REGNUM)
2313 (compare:CC_NZ (mult:SI
2314 (match_operand:SI 2 "s_register_operand" "r,r")
2315 (match_operand:SI 1 "s_register_operand" "%0,r"))
2316 (const_int 0)))
2317 (clobber (match_scratch:SI 0 "=&r,&r"))]
2318 "TARGET_ARM && !arm_arch6"
2319 "muls%?\\t%0, %2, %1"
2320 [(set_attr "conds" "set")
2321 (set_attr "type" "muls")]
2322 )
2323
2324 (define_insn "*mulsi_compare0_scratch_v6"
2325 [(set (reg:CC_NZ CC_REGNUM)
2326 (compare:CC_NZ (mult:SI
2327 (match_operand:SI 2 "s_register_operand" "r")
2328 (match_operand:SI 1 "s_register_operand" "r"))
2329 (const_int 0)))
2330 (clobber (match_scratch:SI 0 "=r"))]
2331 "TARGET_ARM && arm_arch6 && optimize_size"
2332 "muls%?\\t%0, %2, %1"
2333 [(set_attr "conds" "set")
2334 (set_attr "type" "muls")]
2335 )
2336
2337 (define_insn "*mulsi3addsi_compare0"
2338 [(set (reg:CC_NZ CC_REGNUM)
2339 (compare:CC_NZ
2340 (plus:SI (mult:SI
2341 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2342 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2343 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2344 (const_int 0)))
2345 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2346 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2347 (match_dup 3)))]
2348 "TARGET_ARM && arm_arch6"
2349 "mlas%?\\t%0, %2, %1, %3"
2350 [(set_attr "conds" "set")
2351 (set_attr "type" "mlas")]
2352 )
2353
2354 (define_insn "*mulsi3addsi_compare0_v6"
2355 [(set (reg:CC_NZ CC_REGNUM)
2356 (compare:CC_NZ
2357 (plus:SI (mult:SI
2358 (match_operand:SI 2 "s_register_operand" "r")
2359 (match_operand:SI 1 "s_register_operand" "r"))
2360 (match_operand:SI 3 "s_register_operand" "r"))
2361 (const_int 0)))
2362 (set (match_operand:SI 0 "s_register_operand" "=r")
2363 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2364 (match_dup 3)))]
2365 "TARGET_ARM && arm_arch6 && optimize_size"
2366 "mlas%?\\t%0, %2, %1, %3"
2367 [(set_attr "conds" "set")
2368 (set_attr "type" "mlas")]
2369 )
2370
2371 (define_insn "*mulsi3addsi_compare0_scratch"
2372 [(set (reg:CC_NZ CC_REGNUM)
2373 (compare:CC_NZ
2374 (plus:SI (mult:SI
2375 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2376 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2377 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2378 (const_int 0)))
2379 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2380 "TARGET_ARM && !arm_arch6"
2381 "mlas%?\\t%0, %2, %1, %3"
2382 [(set_attr "conds" "set")
2383 (set_attr "type" "mlas")]
2384 )
2385
2386 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2387 [(set (reg:CC_NZ CC_REGNUM)
2388 (compare:CC_NZ
2389 (plus:SI (mult:SI
2390 (match_operand:SI 2 "s_register_operand" "r")
2391 (match_operand:SI 1 "s_register_operand" "r"))
2392 (match_operand:SI 3 "s_register_operand" "r"))
2393 (const_int 0)))
2394 (clobber (match_scratch:SI 0 "=r"))]
2395 "TARGET_ARM && arm_arch6 && optimize_size"
2396 "mlas%?\\t%0, %2, %1, %3"
2397 [(set_attr "conds" "set")
2398 (set_attr "type" "mlas")]
2399 )
2400
2401 ;; 32x32->64 widening multiply.
2402 ;; The only difference between the v3-5 and v6+ versions is the requirement
2403 ;; that the output does not overlap with either input.
2404
2405 (define_expand "<Us>mulsidi3"
2406 [(set (match_operand:DI 0 "s_register_operand")
2407 (mult:DI
2408 (SE:DI (match_operand:SI 1 "s_register_operand"))
2409 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2410 "TARGET_32BIT"
2411 {
2412 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2413 gen_highpart (SImode, operands[0]),
2414 operands[1], operands[2]));
2415 DONE;
2416 }
2417 )
2418
2419 (define_insn "<US>mull"
2420 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2421 (mult:SI
2422 (match_operand:SI 2 "s_register_operand" "%r,r")
2423 (match_operand:SI 3 "s_register_operand" "r,r")))
2424 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2425 (truncate:SI
2426 (lshiftrt:DI
2427 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2428 (const_int 32))))]
2429 "TARGET_32BIT"
2430 "<US>mull%?\\t%0, %1, %2, %3"
2431 [(set_attr "type" "umull")
2432 (set_attr "predicable" "yes")
2433 (set_attr "arch" "v6,nov6")]
2434 )
2435
2436 (define_expand "<Us>maddsidi4"
2437 [(set (match_operand:DI 0 "s_register_operand")
2438 (plus:DI
2439 (mult:DI
2440 (SE:DI (match_operand:SI 1 "s_register_operand"))
2441 (SE:DI (match_operand:SI 2 "s_register_operand")))
2442 (match_operand:DI 3 "s_register_operand")))]
2443 "TARGET_32BIT"
2444 {
2445 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2446 gen_lowpart (SImode, operands[3]),
2447 gen_highpart (SImode, operands[0]),
2448 gen_highpart (SImode, operands[3]),
2449 operands[1], operands[2]));
2450 DONE;
2451 }
2452 )
2453
2454 (define_insn "<US>mlal"
2455 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2456 (plus:SI
2457 (mult:SI
2458 (match_operand:SI 4 "s_register_operand" "%r,r")
2459 (match_operand:SI 5 "s_register_operand" "r,r"))
2460 (match_operand:SI 1 "s_register_operand" "0,0")))
2461 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2462 (plus:SI
2463 (truncate:SI
2464 (lshiftrt:DI
2465 (plus:DI
2466 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2467 (zero_extend:DI (match_dup 1)))
2468 (const_int 32)))
2469 (match_operand:SI 3 "s_register_operand" "2,2")))]
2470 "TARGET_32BIT"
2471 "<US>mlal%?\\t%0, %2, %4, %5"
2472 [(set_attr "type" "umlal")
2473 (set_attr "predicable" "yes")
2474 (set_attr "arch" "v6,nov6")]
2475 )
2476
2477 (define_expand "<US>mulsi3_highpart"
2478 [(parallel
2479 [(set (match_operand:SI 0 "s_register_operand")
2480 (truncate:SI
2481 (lshiftrt:DI
2482 (mult:DI
2483 (SE:DI (match_operand:SI 1 "s_register_operand"))
2484 (SE:DI (match_operand:SI 2 "s_register_operand")))
2485 (const_int 32))))
2486 (clobber (match_scratch:SI 3 ""))])]
2487 "TARGET_32BIT"
2488 ""
2489 )
2490
2491 (define_insn "*<US>mull_high"
2492 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2493 (truncate:SI
2494 (lshiftrt:DI
2495 (mult:DI
2496 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2497 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2498 (const_int 32))))
2499 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2500 "TARGET_32BIT"
2501 "<US>mull%?\\t%3, %0, %2, %1"
2502 [(set_attr "type" "umull")
2503 (set_attr "predicable" "yes")
2504 (set_attr "arch" "v6,nov6,nov6")]
2505 )
2506
2507 (define_insn "mulhisi3"
2508 [(set (match_operand:SI 0 "s_register_operand" "=r")
2509 (mult:SI (sign_extend:SI
2510 (match_operand:HI 1 "s_register_operand" "%r"))
2511 (sign_extend:SI
2512 (match_operand:HI 2 "s_register_operand" "r"))))]
2513 "TARGET_DSP_MULTIPLY"
2514 "smulbb%?\\t%0, %1, %2"
2515 [(set_attr "type" "smulxy")
2516 (set_attr "predicable" "yes")]
2517 )
2518
2519 (define_insn "*mulhisi3tb"
2520 [(set (match_operand:SI 0 "s_register_operand" "=r")
2521 (mult:SI (ashiftrt:SI
2522 (match_operand:SI 1 "s_register_operand" "r")
2523 (const_int 16))
2524 (sign_extend:SI
2525 (match_operand:HI 2 "s_register_operand" "r"))))]
2526 "TARGET_DSP_MULTIPLY"
2527 "smultb%?\\t%0, %1, %2"
2528 [(set_attr "type" "smulxy")
2529 (set_attr "predicable" "yes")]
2530 )
2531
2532 (define_insn "*mulhisi3bt"
2533 [(set (match_operand:SI 0 "s_register_operand" "=r")
2534 (mult:SI (sign_extend:SI
2535 (match_operand:HI 1 "s_register_operand" "r"))
2536 (ashiftrt:SI
2537 (match_operand:SI 2 "s_register_operand" "r")
2538 (const_int 16))))]
2539 "TARGET_DSP_MULTIPLY"
2540 "smulbt%?\\t%0, %1, %2"
2541 [(set_attr "type" "smulxy")
2542 (set_attr "predicable" "yes")]
2543 )
2544
2545 (define_insn "*mulhisi3tt"
2546 [(set (match_operand:SI 0 "s_register_operand" "=r")
2547 (mult:SI (ashiftrt:SI
2548 (match_operand:SI 1 "s_register_operand" "r")
2549 (const_int 16))
2550 (ashiftrt:SI
2551 (match_operand:SI 2 "s_register_operand" "r")
2552 (const_int 16))))]
2553 "TARGET_DSP_MULTIPLY"
2554 "smultt%?\\t%0, %1, %2"
2555 [(set_attr "type" "smulxy")
2556 (set_attr "predicable" "yes")]
2557 )
2558
2559 (define_expand "maddhisi4"
2560 [(set (match_operand:SI 0 "s_register_operand")
2561 (plus:SI (mult:SI (sign_extend:SI
2562 (match_operand:HI 1 "s_register_operand"))
2563 (sign_extend:SI
2564 (match_operand:HI 2 "s_register_operand")))
2565 (match_operand:SI 3 "s_register_operand")))]
2566 "TARGET_DSP_MULTIPLY"
2567 {
2568 /* If this function reads the Q bit from ACLE intrinsics break up the
2569 multiplication and accumulation as an overflow during accumulation will
2570 clobber the Q flag. */
2571 if (ARM_Q_BIT_READ)
2572 {
2573 rtx tmp = gen_reg_rtx (SImode);
2574 emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
2575 emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
2576 DONE;
2577 }
2578 }
2579 )
2580
2581 (define_insn "*arm_maddhisi4"
2582 [(set (match_operand:SI 0 "s_register_operand" "=r")
2583 (plus:SI (mult:SI (sign_extend:SI
2584 (match_operand:HI 1 "s_register_operand" "r"))
2585 (sign_extend:SI
2586 (match_operand:HI 2 "s_register_operand" "r")))
2587 (match_operand:SI 3 "s_register_operand" "r")))]
2588 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2589 "smlabb%?\\t%0, %1, %2, %3"
2590 [(set_attr "type" "smlaxy")
2591 (set_attr "predicable" "yes")]
2592 )
2593
2594 (define_insn "arm_smlabb_setq"
2595 [(set (match_operand:SI 0 "s_register_operand" "=r")
2596 (plus:SI (mult:SI (sign_extend:SI
2597 (match_operand:HI 1 "s_register_operand" "r"))
2598 (sign_extend:SI
2599 (match_operand:HI 2 "s_register_operand" "r")))
2600 (match_operand:SI 3 "s_register_operand" "r")))
2601 (set (reg:CC APSRQ_REGNUM)
2602 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2603 "TARGET_DSP_MULTIPLY"
2604 "smlabb%?\\t%0, %1, %2, %3"
2605 [(set_attr "type" "smlaxy")
2606 (set_attr "predicable" "yes")]
2607 )
2608
2609 (define_expand "arm_smlabb"
2610 [(match_operand:SI 0 "s_register_operand")
2611 (match_operand:SI 1 "s_register_operand")
2612 (match_operand:SI 2 "s_register_operand")
2613 (match_operand:SI 3 "s_register_operand")]
2614 "TARGET_DSP_MULTIPLY"
2615 {
2616 rtx mult1 = gen_lowpart (HImode, operands[1]);
2617 rtx mult2 = gen_lowpart (HImode, operands[2]);
2618 if (ARM_Q_BIT_READ)
2619 emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
2620 else
2621 emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
2622 DONE;
2623 }
2624 )
2625
2626 ;; Note: there is no maddhisi4ibt because this one is canonical form
2627 (define_insn "maddhisi4tb"
2628 [(set (match_operand:SI 0 "s_register_operand" "=r")
2629 (plus:SI (mult:SI (ashiftrt:SI
2630 (match_operand:SI 1 "s_register_operand" "r")
2631 (const_int 16))
2632 (sign_extend:SI
2633 (match_operand:HI 2 "s_register_operand" "r")))
2634 (match_operand:SI 3 "s_register_operand" "r")))]
2635 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2636 "smlatb%?\\t%0, %1, %2, %3"
2637 [(set_attr "type" "smlaxy")
2638 (set_attr "predicable" "yes")]
2639 )
2640
2641 (define_insn "arm_smlatb_setq"
2642 [(set (match_operand:SI 0 "s_register_operand" "=r")
2643 (plus:SI (mult:SI (ashiftrt:SI
2644 (match_operand:SI 1 "s_register_operand" "r")
2645 (const_int 16))
2646 (sign_extend:SI
2647 (match_operand:HI 2 "s_register_operand" "r")))
2648 (match_operand:SI 3 "s_register_operand" "r")))
2649 (set (reg:CC APSRQ_REGNUM)
2650 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2651 "TARGET_DSP_MULTIPLY"
2652 "smlatb%?\\t%0, %1, %2, %3"
2653 [(set_attr "type" "smlaxy")
2654 (set_attr "predicable" "yes")]
2655 )
2656
2657 (define_expand "arm_smlatb"
2658 [(match_operand:SI 0 "s_register_operand")
2659 (match_operand:SI 1 "s_register_operand")
2660 (match_operand:SI 2 "s_register_operand")
2661 (match_operand:SI 3 "s_register_operand")]
2662 "TARGET_DSP_MULTIPLY"
2663 {
2664 rtx mult2 = gen_lowpart (HImode, operands[2]);
2665 if (ARM_Q_BIT_READ)
2666 emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
2667 mult2, operands[3]));
2668 else
2669 emit_insn (gen_maddhisi4tb (operands[0], operands[1],
2670 mult2, operands[3]));
2671 DONE;
2672 }
2673 )
2674
2675 (define_insn "maddhisi4tt"
2676 [(set (match_operand:SI 0 "s_register_operand" "=r")
2677 (plus:SI (mult:SI (ashiftrt:SI
2678 (match_operand:SI 1 "s_register_operand" "r")
2679 (const_int 16))
2680 (ashiftrt:SI
2681 (match_operand:SI 2 "s_register_operand" "r")
2682 (const_int 16)))
2683 (match_operand:SI 3 "s_register_operand" "r")))]
2684 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2685 "smlatt%?\\t%0, %1, %2, %3"
2686 [(set_attr "type" "smlaxy")
2687 (set_attr "predicable" "yes")]
2688 )
2689
2690 (define_insn "arm_smlatt_setq"
2691 [(set (match_operand:SI 0 "s_register_operand" "=r")
2692 (plus:SI (mult:SI (ashiftrt:SI
2693 (match_operand:SI 1 "s_register_operand" "r")
2694 (const_int 16))
2695 (ashiftrt:SI
2696 (match_operand:SI 2 "s_register_operand" "r")
2697 (const_int 16)))
2698 (match_operand:SI 3 "s_register_operand" "r")))
2699 (set (reg:CC APSRQ_REGNUM)
2700 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2701 "TARGET_DSP_MULTIPLY"
2702 "smlatt%?\\t%0, %1, %2, %3"
2703 [(set_attr "type" "smlaxy")
2704 (set_attr "predicable" "yes")]
2705 )
2706
2707 (define_expand "arm_smlatt"
2708 [(match_operand:SI 0 "s_register_operand")
2709 (match_operand:SI 1 "s_register_operand")
2710 (match_operand:SI 2 "s_register_operand")
2711 (match_operand:SI 3 "s_register_operand")]
2712 "TARGET_DSP_MULTIPLY"
2713 {
2714 if (ARM_Q_BIT_READ)
2715 emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
2716 operands[2], operands[3]));
2717 else
2718 emit_insn (gen_maddhisi4tt (operands[0], operands[1],
2719 operands[2], operands[3]));
2720 DONE;
2721 }
2722 )
2723
2724 (define_insn "maddhidi4"
2725 [(set (match_operand:DI 0 "s_register_operand" "=r")
2726 (plus:DI
2727 (mult:DI (sign_extend:DI
2728 (match_operand:HI 1 "s_register_operand" "r"))
2729 (sign_extend:DI
2730 (match_operand:HI 2 "s_register_operand" "r")))
2731 (match_operand:DI 3 "s_register_operand" "0")))]
2732 "TARGET_DSP_MULTIPLY"
2733 "smlalbb%?\\t%Q0, %R0, %1, %2"
2734 [(set_attr "type" "smlalxy")
2735 (set_attr "predicable" "yes")])
2736
2737 ;; Note: there is no maddhidi4ibt because this one is canonical form
2738 (define_insn "*maddhidi4tb"
2739 [(set (match_operand:DI 0 "s_register_operand" "=r")
2740 (plus:DI
2741 (mult:DI (sign_extend:DI
2742 (ashiftrt:SI
2743 (match_operand:SI 1 "s_register_operand" "r")
2744 (const_int 16)))
2745 (sign_extend:DI
2746 (match_operand:HI 2 "s_register_operand" "r")))
2747 (match_operand:DI 3 "s_register_operand" "0")))]
2748 "TARGET_DSP_MULTIPLY"
2749 "smlaltb%?\\t%Q0, %R0, %1, %2"
2750 [(set_attr "type" "smlalxy")
2751 (set_attr "predicable" "yes")])
2752
2753 (define_insn "*maddhidi4tt"
2754 [(set (match_operand:DI 0 "s_register_operand" "=r")
2755 (plus:DI
2756 (mult:DI (sign_extend:DI
2757 (ashiftrt:SI
2758 (match_operand:SI 1 "s_register_operand" "r")
2759 (const_int 16)))
2760 (sign_extend:DI
2761 (ashiftrt:SI
2762 (match_operand:SI 2 "s_register_operand" "r")
2763 (const_int 16))))
2764 (match_operand:DI 3 "s_register_operand" "0")))]
2765 "TARGET_DSP_MULTIPLY"
2766 "smlaltt%?\\t%Q0, %R0, %1, %2"
2767 [(set_attr "type" "smlalxy")
2768 (set_attr "predicable" "yes")])
2769
2770 (define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
2771 [(set (match_operand:SI 0 "s_register_operand" "=r")
2772 (unspec:SI
2773 [(match_operand:SI 1 "s_register_operand" "r")
2774 (match_operand:SI 2 "s_register_operand" "r")
2775 (match_operand:SI 3 "s_register_operand" "r")]
2776 SMLAWBT))]
2777 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
2778 "<smlaw_op>%?\\t%0, %1, %2, %3"
2779 [(set_attr "type" "smlaxy")
2780 (set_attr "predicable" "yes")]
2781 )
2782
2783 (define_expand "arm_<smlaw_op>"
2784 [(set (match_operand:SI 0 "s_register_operand")
2785 (unspec:SI
2786 [(match_operand:SI 1 "s_register_operand")
2787 (match_operand:SI 2 "s_register_operand")
2788 (match_operand:SI 3 "s_register_operand")]
2789 SMLAWBT))]
2790 "TARGET_DSP_MULTIPLY"
2791 {
2792 if (ARM_Q_BIT_READ)
2793 emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
2794 operands[2], operands[3]));
2795 else
2796 emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
2797 operands[2], operands[3]));
2798 DONE;
2799 }
2800 )
2801
2802 (define_expand "mulsf3"
2803 [(set (match_operand:SF 0 "s_register_operand")
2804 (mult:SF (match_operand:SF 1 "s_register_operand")
2805 (match_operand:SF 2 "s_register_operand")))]
2806 "TARGET_32BIT && TARGET_HARD_FLOAT"
2807 "
2808 ")
2809
2810 (define_expand "muldf3"
2811 [(set (match_operand:DF 0 "s_register_operand")
2812 (mult:DF (match_operand:DF 1 "s_register_operand")
2813 (match_operand:DF 2 "s_register_operand")))]
2814 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2815 "
2816 ")
2817 \f
2818 ;; Division insns
2819
2820 (define_expand "divsf3"
2821 [(set (match_operand:SF 0 "s_register_operand")
2822 (div:SF (match_operand:SF 1 "s_register_operand")
2823 (match_operand:SF 2 "s_register_operand")))]
2824 "TARGET_32BIT && TARGET_HARD_FLOAT"
2825 "")
2826
2827 (define_expand "divdf3"
2828 [(set (match_operand:DF 0 "s_register_operand")
2829 (div:DF (match_operand:DF 1 "s_register_operand")
2830 (match_operand:DF 2 "s_register_operand")))]
2831 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2832 "")
2833 \f
2834
2835 ; Expand logical operations. The mid-end expander does not split off memory
2836 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2837 ; So an explicit expander is needed to generate better code.
2838
2839 (define_expand "<LOGICAL:optab>di3"
2840 [(set (match_operand:DI 0 "s_register_operand")
2841 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2842 (match_operand:DI 2 "arm_<optab>di_operand")))]
2843 "TARGET_32BIT"
2844 {
2845 rtx low = simplify_gen_binary (<CODE>, SImode,
2846 gen_lowpart (SImode, operands[1]),
2847 gen_lowpart (SImode, operands[2]));
2848 rtx high = simplify_gen_binary (<CODE>, SImode,
2849 gen_highpart (SImode, operands[1]),
2850 gen_highpart_mode (SImode, DImode,
2851 operands[2]));
2852
2853 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2854 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2855 DONE;
2856 }
2857 )
2858
2859 (define_expand "one_cmpldi2"
2860 [(set (match_operand:DI 0 "s_register_operand")
2861 (not:DI (match_operand:DI 1 "s_register_operand")))]
2862 "TARGET_32BIT"
2863 {
2864 rtx low = simplify_gen_unary (NOT, SImode,
2865 gen_lowpart (SImode, operands[1]),
2866 SImode);
2867 rtx high = simplify_gen_unary (NOT, SImode,
2868 gen_highpart_mode (SImode, DImode,
2869 operands[1]),
2870 SImode);
2871
2872 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2873 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2874 DONE;
2875 }
2876 )
2877
2878 ;; Split DImode and, ior, xor operations. Simply perform the logical
2879 ;; operation on the upper and lower halves of the registers.
2880 ;; This is needed for atomic operations in arm_split_atomic_op.
2881 ;; Avoid splitting IWMMXT instructions.
2882 (define_split
2883 [(set (match_operand:DI 0 "s_register_operand" "")
2884 (match_operator:DI 6 "logical_binary_operator"
2885 [(match_operand:DI 1 "s_register_operand" "")
2886 (match_operand:DI 2 "s_register_operand" "")]))]
2887 "TARGET_32BIT && reload_completed
2888 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2889 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2890 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2891 "
2892 {
2893 operands[3] = gen_highpart (SImode, operands[0]);
2894 operands[0] = gen_lowpart (SImode, operands[0]);
2895 operands[4] = gen_highpart (SImode, operands[1]);
2896 operands[1] = gen_lowpart (SImode, operands[1]);
2897 operands[5] = gen_highpart (SImode, operands[2]);
2898 operands[2] = gen_lowpart (SImode, operands[2]);
2899 }"
2900 )
2901
2902 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2903 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2904 (define_split
2905 [(set (match_operand:DI 0 "s_register_operand")
2906 (not:DI (match_operand:DI 1 "s_register_operand")))]
2907 "TARGET_32BIT"
2908 [(set (match_dup 0) (not:SI (match_dup 1)))
2909 (set (match_dup 2) (not:SI (match_dup 3)))]
2910 "
2911 {
2912 operands[2] = gen_highpart (SImode, operands[0]);
2913 operands[0] = gen_lowpart (SImode, operands[0]);
2914 operands[3] = gen_highpart (SImode, operands[1]);
2915 operands[1] = gen_lowpart (SImode, operands[1]);
2916 }"
2917 )
2918
2919 (define_expand "andsi3"
2920 [(set (match_operand:SI 0 "s_register_operand")
2921 (and:SI (match_operand:SI 1 "s_register_operand")
2922 (match_operand:SI 2 "reg_or_int_operand")))]
2923 "TARGET_EITHER"
2924 "
2925 if (TARGET_32BIT)
2926 {
2927 if (CONST_INT_P (operands[2]))
2928 {
2929 if (INTVAL (operands[2]) == 255 && arm_arch6)
2930 {
2931 operands[1] = convert_to_mode (QImode, operands[1], 1);
2932 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2933 operands[1]));
2934 DONE;
2935 }
2936 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2937 operands[2] = force_reg (SImode, operands[2]);
2938 else
2939 {
2940 arm_split_constant (AND, SImode, NULL_RTX,
2941 INTVAL (operands[2]), operands[0],
2942 operands[1],
2943 optimize && can_create_pseudo_p ());
2944
2945 DONE;
2946 }
2947 }
2948 }
2949 else /* TARGET_THUMB1 */
2950 {
2951 if (!CONST_INT_P (operands[2]))
2952 {
2953 rtx tmp = force_reg (SImode, operands[2]);
2954 if (rtx_equal_p (operands[0], operands[1]))
2955 operands[2] = tmp;
2956 else
2957 {
2958 operands[2] = operands[1];
2959 operands[1] = tmp;
2960 }
2961 }
2962 else
2963 {
2964 int i;
2965
2966 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2967 {
2968 operands[2] = force_reg (SImode,
2969 GEN_INT (~INTVAL (operands[2])));
2970
2971 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2972
2973 DONE;
2974 }
2975
2976 for (i = 9; i <= 31; i++)
2977 {
2978 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2979 {
2980 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2981 const0_rtx));
2982 DONE;
2983 }
2984 else if ((HOST_WIDE_INT_1 << i) - 1
2985 == ~INTVAL (operands[2]))
2986 {
2987 rtx shift = GEN_INT (i);
2988 rtx reg = gen_reg_rtx (SImode);
2989
2990 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2991 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2992
2993 DONE;
2994 }
2995 }
2996
2997 operands[2] = force_reg (SImode, operands[2]);
2998 }
2999 }
3000 "
3001 )
3002
3003 ; ??? Check split length for Thumb-2
3004 (define_insn_and_split "*arm_andsi3_insn"
3005 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r,r")
3006 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,0,r")
3007 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,Dj,?n")))]
3008 "TARGET_32BIT"
3009 "@
3010 and%?\\t%0, %1, %2
3011 and%?\\t%0, %1, %2
3012 bic%?\\t%0, %1, #%B2
3013 and%?\\t%0, %1, %2
3014 bfc%?\\t%0, %V2
3015 #"
3016 "TARGET_32BIT
3017 && CONST_INT_P (operands[2])
3018 && !(const_ok_for_arm (INTVAL (operands[2]))
3019 || const_ok_for_arm (~INTVAL (operands[2]))
3020 || (arm_arch_thumb2
3021 && satisfies_constraint_Dj (operands[2])
3022 && (rtx_equal_p (operands[0], operands[1])
3023 || !reload_completed)))"
3024 [(clobber (const_int 0))]
3025 "
3026 arm_split_constant (AND, SImode, curr_insn,
3027 INTVAL (operands[2]), operands[0], operands[1], 0);
3028 DONE;
3029 "
3030 [(set_attr "length" "4,4,4,4,4,16")
3031 (set_attr "predicable" "yes")
3032 (set_attr "predicable_short_it" "no,yes,no,no,no,no")
3033 (set_attr "arch" "*,*,*,*,v6t2,*")
3034 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,bfm,logic_imm")]
3035 )
3036
3037 (define_insn "*andsi3_compare0"
3038 [(set (reg:CC_NZ CC_REGNUM)
3039 (compare:CC_NZ
3040 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
3041 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
3042 (const_int 0)))
3043 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3044 (and:SI (match_dup 1) (match_dup 2)))]
3045 "TARGET_32BIT"
3046 "@
3047 ands%?\\t%0, %1, %2
3048 bics%?\\t%0, %1, #%B2
3049 ands%?\\t%0, %1, %2"
3050 [(set_attr "conds" "set")
3051 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3052 )
3053
3054 (define_insn "*andsi3_compare0_scratch"
3055 [(set (reg:CC_NZ CC_REGNUM)
3056 (compare:CC_NZ
3057 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
3058 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
3059 (const_int 0)))
3060 (clobber (match_scratch:SI 2 "=X,r,X"))]
3061 "TARGET_32BIT"
3062 "@
3063 tst%?\\t%0, %1
3064 bics%?\\t%2, %0, #%B1
3065 tst%?\\t%0, %1"
3066 [(set_attr "conds" "set")
3067 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3068 )
3069
3070 (define_insn "*zeroextractsi_compare0_scratch"
3071 [(set (reg:CC_NZ CC_REGNUM)
3072 (compare:CC_NZ (zero_extract:SI
3073 (match_operand:SI 0 "s_register_operand" "r")
3074 (match_operand 1 "const_int_operand" "n")
3075 (match_operand 2 "const_int_operand" "n"))
3076 (const_int 0)))]
3077 "TARGET_32BIT
3078 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
3079 && INTVAL (operands[1]) > 0
3080 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
3081 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
3082 "*
3083 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
3084 << INTVAL (operands[2]));
3085 output_asm_insn (\"tst%?\\t%0, %1\", operands);
3086 return \"\";
3087 "
3088 [(set_attr "conds" "set")
3089 (set_attr "predicable" "yes")
3090 (set_attr "type" "logics_imm")]
3091 )
3092
3093 (define_insn_and_split "*ne_zeroextractsi"
3094 [(set (match_operand:SI 0 "s_register_operand" "=r")
3095 (ne:SI (zero_extract:SI
3096 (match_operand:SI 1 "s_register_operand" "r")
3097 (match_operand:SI 2 "const_int_operand" "n")
3098 (match_operand:SI 3 "const_int_operand" "n"))
3099 (const_int 0)))
3100 (clobber (reg:CC CC_REGNUM))]
3101 "TARGET_32BIT
3102 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3103 && INTVAL (operands[2]) > 0
3104 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3105 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3106 "#"
3107 "TARGET_32BIT
3108 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3109 && INTVAL (operands[2]) > 0
3110 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3111 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3112 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3113 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3114 (const_int 0)))
3115 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3116 (set (match_dup 0)
3117 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3118 (match_dup 0) (const_int 1)))]
3119 "
3120 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3121 << INTVAL (operands[3]));
3122 "
3123 [(set_attr "conds" "clob")
3124 (set (attr "length")
3125 (if_then_else (eq_attr "is_thumb" "yes")
3126 (const_int 12)
3127 (const_int 8)))
3128 (set_attr "type" "multiple")]
3129 )
3130
3131 (define_insn_and_split "*ne_zeroextractsi_shifted"
3132 [(set (match_operand:SI 0 "s_register_operand" "=r")
3133 (ne:SI (zero_extract:SI
3134 (match_operand:SI 1 "s_register_operand" "r")
3135 (match_operand:SI 2 "const_int_operand" "n")
3136 (const_int 0))
3137 (const_int 0)))
3138 (clobber (reg:CC CC_REGNUM))]
3139 "TARGET_ARM"
3140 "#"
3141 "TARGET_ARM"
3142 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3143 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3144 (const_int 0)))
3145 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3146 (set (match_dup 0)
3147 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3148 (match_dup 0) (const_int 1)))]
3149 "
3150 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3151 "
3152 [(set_attr "conds" "clob")
3153 (set_attr "length" "8")
3154 (set_attr "type" "multiple")]
3155 )
3156
3157 (define_insn_and_split "*ite_ne_zeroextractsi"
3158 [(set (match_operand:SI 0 "s_register_operand" "=r")
3159 (if_then_else:SI (ne (zero_extract:SI
3160 (match_operand:SI 1 "s_register_operand" "r")
3161 (match_operand:SI 2 "const_int_operand" "n")
3162 (match_operand:SI 3 "const_int_operand" "n"))
3163 (const_int 0))
3164 (match_operand:SI 4 "arm_not_operand" "rIK")
3165 (const_int 0)))
3166 (clobber (reg:CC CC_REGNUM))]
3167 "TARGET_ARM
3168 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3169 && INTVAL (operands[2]) > 0
3170 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3171 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3172 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3173 "#"
3174 "TARGET_ARM
3175 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3176 && INTVAL (operands[2]) > 0
3177 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3178 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3179 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3180 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3181 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3182 (const_int 0)))
3183 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3184 (set (match_dup 0)
3185 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3186 (match_dup 0) (match_dup 4)))]
3187 "
3188 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3189 << INTVAL (operands[3]));
3190 "
3191 [(set_attr "conds" "clob")
3192 (set_attr "length" "8")
3193 (set_attr "type" "multiple")]
3194 )
3195
3196 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
3197 [(set (match_operand:SI 0 "s_register_operand" "=r")
3198 (if_then_else:SI (ne (zero_extract:SI
3199 (match_operand:SI 1 "s_register_operand" "r")
3200 (match_operand:SI 2 "const_int_operand" "n")
3201 (const_int 0))
3202 (const_int 0))
3203 (match_operand:SI 3 "arm_not_operand" "rIK")
3204 (const_int 0)))
3205 (clobber (reg:CC CC_REGNUM))]
3206 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3207 "#"
3208 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3209 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3210 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3211 (const_int 0)))
3212 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3213 (set (match_dup 0)
3214 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3215 (match_dup 0) (match_dup 3)))]
3216 "
3217 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3218 "
3219 [(set_attr "conds" "clob")
3220 (set_attr "length" "8")
3221 (set_attr "type" "multiple")]
3222 )
3223
3224 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
3225 (define_split
3226 [(set (match_operand:SI 0 "s_register_operand" "")
3227 (match_operator:SI 1 "shiftable_operator"
3228 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3229 (match_operand:SI 3 "const_int_operand" "")
3230 (match_operand:SI 4 "const_int_operand" ""))
3231 (match_operand:SI 5 "s_register_operand" "")]))
3232 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3233 "TARGET_ARM"
3234 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3235 (set (match_dup 0)
3236 (match_op_dup 1
3237 [(lshiftrt:SI (match_dup 6) (match_dup 4))
3238 (match_dup 5)]))]
3239 "{
3240 HOST_WIDE_INT temp = INTVAL (operands[3]);
3241
3242 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3243 operands[4] = GEN_INT (32 - temp);
3244 }"
3245 )
3246
3247 (define_split
3248 [(set (match_operand:SI 0 "s_register_operand" "")
3249 (match_operator:SI 1 "shiftable_operator"
3250 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3251 (match_operand:SI 3 "const_int_operand" "")
3252 (match_operand:SI 4 "const_int_operand" ""))
3253 (match_operand:SI 5 "s_register_operand" "")]))
3254 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3255 "TARGET_ARM"
3256 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3257 (set (match_dup 0)
3258 (match_op_dup 1
3259 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3260 (match_dup 5)]))]
3261 "{
3262 HOST_WIDE_INT temp = INTVAL (operands[3]);
3263
3264 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3265 operands[4] = GEN_INT (32 - temp);
3266 }"
3267 )
3268
3269 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3270 ;;; represented by the bitfield, then this will produce incorrect results.
3271 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3272 ;;; which have a real bit-field insert instruction, the truncation happens
3273 ;;; in the bit-field insert instruction itself. Since arm does not have a
3274 ;;; bit-field insert instruction, we would have to emit code here to truncate
3275 ;;; the value before we insert. This loses some of the advantage of having
3276 ;;; this insv pattern, so this pattern needs to be reevalutated.
3277
3278 (define_expand "insv"
3279 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3280 (match_operand 1 "general_operand")
3281 (match_operand 2 "general_operand"))
3282 (match_operand 3 "reg_or_int_operand"))]
3283 "TARGET_ARM || arm_arch_thumb2"
3284 "
3285 {
3286 int start_bit = INTVAL (operands[2]);
3287 int width = INTVAL (operands[1]);
3288 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3289 rtx target, subtarget;
3290
3291 if (arm_arch_thumb2)
3292 {
3293 if (unaligned_access && MEM_P (operands[0])
3294 && s_register_operand (operands[3], GET_MODE (operands[3]))
3295 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3296 {
3297 rtx base_addr;
3298
3299 if (BYTES_BIG_ENDIAN)
3300 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3301 - start_bit;
3302
3303 if (width == 32)
3304 {
3305 base_addr = adjust_address (operands[0], SImode,
3306 start_bit / BITS_PER_UNIT);
3307 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3308 }
3309 else
3310 {
3311 rtx tmp = gen_reg_rtx (HImode);
3312
3313 base_addr = adjust_address (operands[0], HImode,
3314 start_bit / BITS_PER_UNIT);
3315 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3316 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3317 }
3318 DONE;
3319 }
3320 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3321 {
3322 bool use_bfi = TRUE;
3323
3324 if (CONST_INT_P (operands[3]))
3325 {
3326 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3327
3328 if (val == 0)
3329 {
3330 emit_insn (gen_insv_zero (operands[0], operands[1],
3331 operands[2]));
3332 DONE;
3333 }
3334
3335 /* See if the set can be done with a single orr instruction. */
3336 if (val == mask && const_ok_for_arm (val << start_bit))
3337 use_bfi = FALSE;
3338 }
3339
3340 if (use_bfi)
3341 {
3342 if (!REG_P (operands[3]))
3343 operands[3] = force_reg (SImode, operands[3]);
3344
3345 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3346 operands[3]));
3347 DONE;
3348 }
3349 }
3350 else
3351 FAIL;
3352 }
3353
3354 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3355 FAIL;
3356
3357 target = copy_rtx (operands[0]);
3358 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3359 subreg as the final target. */
3360 if (GET_CODE (target) == SUBREG)
3361 {
3362 subtarget = gen_reg_rtx (SImode);
3363 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3364 < GET_MODE_SIZE (SImode))
3365 target = SUBREG_REG (target);
3366 }
3367 else
3368 subtarget = target;
3369
3370 if (CONST_INT_P (operands[3]))
3371 {
3372 /* Since we are inserting a known constant, we may be able to
3373 reduce the number of bits that we have to clear so that
3374 the mask becomes simple. */
3375 /* ??? This code does not check to see if the new mask is actually
3376 simpler. It may not be. */
3377 rtx op1 = gen_reg_rtx (SImode);
3378 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3379 start of this pattern. */
3380 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3381 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3382
3383 emit_insn (gen_andsi3 (op1, operands[0],
3384 gen_int_mode (~mask2, SImode)));
3385 emit_insn (gen_iorsi3 (subtarget, op1,
3386 gen_int_mode (op3_value << start_bit, SImode)));
3387 }
3388 else if (start_bit == 0
3389 && !(const_ok_for_arm (mask)
3390 || const_ok_for_arm (~mask)))
3391 {
3392 /* A Trick, since we are setting the bottom bits in the word,
3393 we can shift operand[3] up, operand[0] down, OR them together
3394 and rotate the result back again. This takes 3 insns, and
3395 the third might be mergeable into another op. */
3396 /* The shift up copes with the possibility that operand[3] is
3397 wider than the bitfield. */
3398 rtx op0 = gen_reg_rtx (SImode);
3399 rtx op1 = gen_reg_rtx (SImode);
3400
3401 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3402 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3403 emit_insn (gen_iorsi3 (op1, op1, op0));
3404 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3405 }
3406 else if ((width + start_bit == 32)
3407 && !(const_ok_for_arm (mask)
3408 || const_ok_for_arm (~mask)))
3409 {
3410 /* Similar trick, but slightly less efficient. */
3411
3412 rtx op0 = gen_reg_rtx (SImode);
3413 rtx op1 = gen_reg_rtx (SImode);
3414
3415 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3416 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3417 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3418 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3419 }
3420 else
3421 {
3422 rtx op0 = gen_int_mode (mask, SImode);
3423 rtx op1 = gen_reg_rtx (SImode);
3424 rtx op2 = gen_reg_rtx (SImode);
3425
3426 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3427 {
3428 rtx tmp = gen_reg_rtx (SImode);
3429
3430 emit_insn (gen_movsi (tmp, op0));
3431 op0 = tmp;
3432 }
3433
3434 /* Mask out any bits in operand[3] that are not needed. */
3435 emit_insn (gen_andsi3 (op1, operands[3], op0));
3436
3437 if (CONST_INT_P (op0)
3438 && (const_ok_for_arm (mask << start_bit)
3439 || const_ok_for_arm (~(mask << start_bit))))
3440 {
3441 op0 = gen_int_mode (~(mask << start_bit), SImode);
3442 emit_insn (gen_andsi3 (op2, operands[0], op0));
3443 }
3444 else
3445 {
3446 if (CONST_INT_P (op0))
3447 {
3448 rtx tmp = gen_reg_rtx (SImode);
3449
3450 emit_insn (gen_movsi (tmp, op0));
3451 op0 = tmp;
3452 }
3453
3454 if (start_bit != 0)
3455 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3456
3457 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3458 }
3459
3460 if (start_bit != 0)
3461 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3462
3463 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3464 }
3465
3466 if (subtarget != target)
3467 {
3468 /* If TARGET is still a SUBREG, then it must be wider than a word,
3469 so we must be careful only to set the subword we were asked to. */
3470 if (GET_CODE (target) == SUBREG)
3471 emit_move_insn (target, subtarget);
3472 else
3473 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3474 }
3475
3476 DONE;
3477 }"
3478 )
3479
3480 (define_insn_and_split "insv_zero"
3481 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3482 (match_operand:SI 1 "const_int_M_operand" "M")
3483 (match_operand:SI 2 "const_int_M_operand" "M"))
3484 (const_int 0))]
3485 "arm_arch_thumb2"
3486 "bfc%?\t%0, %2, %1"
3487 ""
3488 [(set (match_dup 0) (and:SI (match_dup 0) (match_dup 1)))]
3489 {
3490 /* Convert back to a normal AND operation, so that we can take advantage
3491 of BIC and AND when appropriate; we'll still emit BFC if that's the
3492 right thing to do. */
3493 unsigned HOST_WIDE_INT width = UINTVAL (operands[1]);
3494 unsigned HOST_WIDE_INT lsb = UINTVAL (operands[2]);
3495 unsigned HOST_WIDE_INT mask = (HOST_WIDE_INT_1U << width) - 1;
3496
3497 operands[1] = gen_int_mode (~(mask << lsb), SImode);
3498 }
3499 [(set_attr "length" "4")
3500 (set_attr "predicable" "yes")
3501 (set_attr "type" "bfm")]
3502 )
3503
3504 (define_insn "insv_t2"
3505 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3506 (match_operand:SI 1 "const_int_M_operand" "M")
3507 (match_operand:SI 2 "const_int_M_operand" "M"))
3508 (match_operand:SI 3 "s_register_operand" "r"))]
3509 "arm_arch_thumb2"
3510 "bfi%?\t%0, %3, %2, %1"
3511 [(set_attr "length" "4")
3512 (set_attr "predicable" "yes")
3513 (set_attr "type" "bfm")]
3514 )
3515
3516 (define_insn "*bfi"
3517 [(set (match_operand:SI 0 "s_register_operand" "=r")
3518 (ior:SI (and:SI (match_operand:SI 1 "s_register_operand" "0")
3519 (match_operand 2 "const_int_operand" "Dj"))
3520 (and:SI (ashift:SI
3521 (match_operand:SI 3 "s_register_operand" "r")
3522 (match_operand 4 "const_int_operand" "i"))
3523 (match_operand 5 "const_int_operand" "i"))))]
3524 "arm_arch_thumb2
3525 && UINTVAL (operands[4]) < 32
3526 && UINTVAL (operands[2]) == ~UINTVAL (operands[5])
3527 && (exact_log2 (UINTVAL (operands[5])
3528 + (HOST_WIDE_INT_1U << UINTVAL (operands[4])))
3529 >= 0)"
3530 "bfi%?\t%0, %3, %V2"
3531 [(set_attr "length" "4")
3532 (set_attr "predicable" "yes")
3533 (set_attr "type" "bfm")]
3534 )
3535
3536 (define_insn "*bfi_alt1"
3537 [(set (match_operand:SI 0 "s_register_operand" "=r")
3538 (ior:SI (and:SI (ashift:SI
3539 (match_operand:SI 3 "s_register_operand" "r")
3540 (match_operand 4 "const_int_operand" "i"))
3541 (match_operand 5 "const_int_operand" "i"))
3542 (and:SI (match_operand:SI 1 "s_register_operand" "0")
3543 (match_operand 2 "const_int_operand" "Dj"))))]
3544 "arm_arch_thumb2
3545 && UINTVAL (operands[4]) < 32
3546 && UINTVAL (operands[2]) == ~UINTVAL (operands[5])
3547 && (exact_log2 (UINTVAL (operands[5])
3548 + (HOST_WIDE_INT_1U << UINTVAL (operands[4])))
3549 >= 0)"
3550 "bfi%?\t%0, %3, %V2"
3551 [(set_attr "length" "4")
3552 (set_attr "predicable" "yes")
3553 (set_attr "type" "bfm")]
3554 )
3555
3556 (define_insn "*bfi_alt2"
3557 [(set (match_operand:SI 0 "s_register_operand" "=r")
3558 (ior:SI (and:SI (match_operand:SI 1 "s_register_operand" "0")
3559 (match_operand 2 "const_int_operand" "i"))
3560 (and:SI (match_operand:SI 3 "s_register_operand" "r")
3561 (match_operand 4 "const_int_operand" "i"))))]
3562 "arm_arch_thumb2
3563 && UINTVAL (operands[2]) == ~UINTVAL (operands[4])
3564 && exact_log2 (UINTVAL (operands[4]) + 1) >= 0"
3565 "bfi%?\t%0, %3, %V2"
3566 [(set_attr "length" "4")
3567 (set_attr "predicable" "yes")
3568 (set_attr "type" "bfm")]
3569 )
3570
3571 (define_insn "*bfi_alt3"
3572 [(set (match_operand:SI 0 "s_register_operand" "=r")
3573 (ior:SI (and:SI (match_operand:SI 3 "s_register_operand" "r")
3574 (match_operand 4 "const_int_operand" "i"))
3575 (and:SI (match_operand:SI 1 "s_register_operand" "0")
3576 (match_operand 2 "const_int_operand" "i"))))]
3577 "arm_arch_thumb2
3578 && UINTVAL (operands[2]) == ~UINTVAL (operands[4])
3579 && exact_log2 (UINTVAL (operands[4]) + 1) >= 0"
3580 "bfi%?\t%0, %3, %V2"
3581 [(set_attr "length" "4")
3582 (set_attr "predicable" "yes")
3583 (set_attr "type" "bfm")]
3584 )
3585
3586 (define_insn "andsi_notsi_si"
3587 [(set (match_operand:SI 0 "s_register_operand" "=r")
3588 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3589 (match_operand:SI 1 "s_register_operand" "r")))]
3590 "TARGET_32BIT"
3591 "bic%?\\t%0, %1, %2"
3592 [(set_attr "predicable" "yes")
3593 (set_attr "type" "logic_reg")]
3594 )
3595
3596 (define_insn "andsi_not_shiftsi_si"
3597 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3598 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3599 [(match_operand:SI 2 "s_register_operand" "r,r")
3600 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3601 (match_operand:SI 1 "s_register_operand" "r,r")))]
3602 "TARGET_32BIT"
3603 "bic%?\\t%0, %1, %2%S4"
3604 [(set_attr "predicable" "yes")
3605 (set_attr "shift" "2")
3606 (set_attr "arch" "32,a")
3607 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3608 )
3609
3610 ;; Shifted bics pattern used to set up CC status register and not reusing
3611 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3612 ;; does not support shift by register.
3613 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3614 [(set (reg:CC_NZ CC_REGNUM)
3615 (compare:CC_NZ
3616 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3617 [(match_operand:SI 1 "s_register_operand" "r,r")
3618 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3619 (match_operand:SI 3 "s_register_operand" "r,r"))
3620 (const_int 0)))
3621 (clobber (match_scratch:SI 4 "=r,r"))]
3622 "TARGET_32BIT"
3623 "bics%?\\t%4, %3, %1%S0"
3624 [(set_attr "predicable" "yes")
3625 (set_attr "arch" "32,a")
3626 (set_attr "conds" "set")
3627 (set_attr "shift" "1")
3628 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3629 )
3630
3631 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3632 ;; getting reused later.
3633 (define_insn "andsi_not_shiftsi_si_scc"
3634 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3635 (compare:CC_NZ
3636 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3637 [(match_operand:SI 1 "s_register_operand" "r,r")
3638 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3639 (match_operand:SI 3 "s_register_operand" "r,r"))
3640 (const_int 0)))
3641 (set (match_operand:SI 4 "s_register_operand" "=r,r")
3642 (and:SI (not:SI (match_op_dup 0
3643 [(match_dup 1)
3644 (match_dup 2)]))
3645 (match_dup 3)))])]
3646 "TARGET_32BIT"
3647 "bics%?\\t%4, %3, %1%S0"
3648 [(set_attr "predicable" "yes")
3649 (set_attr "arch" "32,a")
3650 (set_attr "conds" "set")
3651 (set_attr "shift" "1")
3652 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3653 )
3654
3655 (define_insn "*andsi_notsi_si_compare0"
3656 [(set (reg:CC_NZ CC_REGNUM)
3657 (compare:CC_NZ
3658 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3659 (match_operand:SI 1 "s_register_operand" "r"))
3660 (const_int 0)))
3661 (set (match_operand:SI 0 "s_register_operand" "=r")
3662 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3663 "TARGET_32BIT"
3664 "bics\\t%0, %1, %2"
3665 [(set_attr "conds" "set")
3666 (set_attr "type" "logics_shift_reg")]
3667 )
3668
3669 (define_insn "*andsi_notsi_si_compare0_scratch"
3670 [(set (reg:CC_NZ CC_REGNUM)
3671 (compare:CC_NZ
3672 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3673 (match_operand:SI 1 "s_register_operand" "r"))
3674 (const_int 0)))
3675 (clobber (match_scratch:SI 0 "=r"))]
3676 "TARGET_32BIT"
3677 "bics\\t%0, %1, %2"
3678 [(set_attr "conds" "set")
3679 (set_attr "type" "logics_shift_reg")]
3680 )
3681
3682 (define_expand "iorsi3"
3683 [(set (match_operand:SI 0 "s_register_operand")
3684 (ior:SI (match_operand:SI 1 "s_register_operand")
3685 (match_operand:SI 2 "reg_or_int_operand")))]
3686 "TARGET_EITHER"
3687 "
3688 if (CONST_INT_P (operands[2]))
3689 {
3690 if (TARGET_32BIT)
3691 {
3692 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3693 operands[2] = force_reg (SImode, operands[2]);
3694 else
3695 {
3696 arm_split_constant (IOR, SImode, NULL_RTX,
3697 INTVAL (operands[2]), operands[0],
3698 operands[1],
3699 optimize && can_create_pseudo_p ());
3700 DONE;
3701 }
3702 }
3703 else /* TARGET_THUMB1 */
3704 {
3705 rtx tmp = force_reg (SImode, operands[2]);
3706 if (rtx_equal_p (operands[0], operands[1]))
3707 operands[2] = tmp;
3708 else
3709 {
3710 operands[2] = operands[1];
3711 operands[1] = tmp;
3712 }
3713 }
3714 }
3715 "
3716 )
3717
3718 (define_insn_and_split "*iorsi3_insn"
3719 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3720 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3721 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3722 "TARGET_32BIT"
3723 "@
3724 orr%?\\t%0, %1, %2
3725 orr%?\\t%0, %1, %2
3726 orn%?\\t%0, %1, #%B2
3727 orr%?\\t%0, %1, %2
3728 #"
3729 "TARGET_32BIT
3730 && CONST_INT_P (operands[2])
3731 && !(const_ok_for_arm (INTVAL (operands[2]))
3732 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3733 [(clobber (const_int 0))]
3734 {
3735 arm_split_constant (IOR, SImode, curr_insn,
3736 INTVAL (operands[2]), operands[0], operands[1], 0);
3737 DONE;
3738 }
3739 [(set_attr "length" "4,4,4,4,16")
3740 (set_attr "arch" "32,t2,t2,32,32")
3741 (set_attr "predicable" "yes")
3742 (set_attr "predicable_short_it" "no,yes,no,no,no")
3743 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3744 )
3745
3746 (define_peephole2
3747 [(match_scratch:SI 3 "r")
3748 (set (match_operand:SI 0 "arm_general_register_operand" "")
3749 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3750 (match_operand:SI 2 "const_int_operand" "")))]
3751 "TARGET_ARM
3752 && !const_ok_for_arm (INTVAL (operands[2]))
3753 && const_ok_for_arm (~INTVAL (operands[2]))"
3754 [(set (match_dup 3) (match_dup 2))
3755 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3756 ""
3757 )
3758
3759 (define_insn "*iorsi3_compare0"
3760 [(set (reg:CC_NZ CC_REGNUM)
3761 (compare:CC_NZ
3762 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3763 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3764 (const_int 0)))
3765 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3766 (ior:SI (match_dup 1) (match_dup 2)))]
3767 "TARGET_32BIT"
3768 "orrs%?\\t%0, %1, %2"
3769 [(set_attr "conds" "set")
3770 (set_attr "arch" "*,t2,*")
3771 (set_attr "length" "4,2,4")
3772 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3773 )
3774
3775 (define_insn "*iorsi3_compare0_scratch"
3776 [(set (reg:CC_NZ CC_REGNUM)
3777 (compare:CC_NZ
3778 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3779 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3780 (const_int 0)))
3781 (clobber (match_scratch:SI 0 "=r,l,r"))]
3782 "TARGET_32BIT"
3783 "orrs%?\\t%0, %1, %2"
3784 [(set_attr "conds" "set")
3785 (set_attr "arch" "*,t2,*")
3786 (set_attr "length" "4,2,4")
3787 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3788 )
3789
3790 (define_expand "xorsi3"
3791 [(set (match_operand:SI 0 "s_register_operand")
3792 (xor:SI (match_operand:SI 1 "s_register_operand")
3793 (match_operand:SI 2 "reg_or_int_operand")))]
3794 "TARGET_EITHER"
3795 "if (CONST_INT_P (operands[2]))
3796 {
3797 if (TARGET_32BIT)
3798 {
3799 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3800 operands[2] = force_reg (SImode, operands[2]);
3801 else
3802 {
3803 arm_split_constant (XOR, SImode, NULL_RTX,
3804 INTVAL (operands[2]), operands[0],
3805 operands[1],
3806 optimize && can_create_pseudo_p ());
3807 DONE;
3808 }
3809 }
3810 else /* TARGET_THUMB1 */
3811 {
3812 rtx tmp = force_reg (SImode, operands[2]);
3813 if (rtx_equal_p (operands[0], operands[1]))
3814 operands[2] = tmp;
3815 else
3816 {
3817 operands[2] = operands[1];
3818 operands[1] = tmp;
3819 }
3820 }
3821 }"
3822 )
3823
3824 (define_insn_and_split "*arm_xorsi3"
3825 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3826 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3827 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3828 "TARGET_32BIT"
3829 "@
3830 eor%?\\t%0, %1, %2
3831 eor%?\\t%0, %1, %2
3832 eor%?\\t%0, %1, %2
3833 #"
3834 "TARGET_32BIT
3835 && CONST_INT_P (operands[2])
3836 && !const_ok_for_arm (INTVAL (operands[2]))"
3837 [(clobber (const_int 0))]
3838 {
3839 arm_split_constant (XOR, SImode, curr_insn,
3840 INTVAL (operands[2]), operands[0], operands[1], 0);
3841 DONE;
3842 }
3843 [(set_attr "length" "4,4,4,16")
3844 (set_attr "predicable" "yes")
3845 (set_attr "predicable_short_it" "no,yes,no,no")
3846 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3847 )
3848
3849 (define_insn "*xorsi3_compare0"
3850 [(set (reg:CC_NZ CC_REGNUM)
3851 (compare:CC_NZ (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3852 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3853 (const_int 0)))
3854 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3855 (xor:SI (match_dup 1) (match_dup 2)))]
3856 "TARGET_32BIT"
3857 "eors%?\\t%0, %1, %2"
3858 [(set_attr "conds" "set")
3859 (set_attr "type" "logics_imm,logics_reg")]
3860 )
3861
3862 (define_insn "*xorsi3_compare0_scratch"
3863 [(set (reg:CC_NZ CC_REGNUM)
3864 (compare:CC_NZ (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3865 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3866 (const_int 0)))]
3867 "TARGET_32BIT"
3868 "teq%?\\t%0, %1"
3869 [(set_attr "conds" "set")
3870 (set_attr "type" "logics_imm,logics_reg")]
3871 )
3872
3873 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3874 ; (NOT D) we can sometimes merge the final NOT into one of the following
3875 ; insns.
3876
3877 (define_split
3878 [(set (match_operand:SI 0 "s_register_operand" "")
3879 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3880 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3881 (match_operand:SI 3 "arm_rhs_operand" "")))
3882 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3883 "TARGET_32BIT"
3884 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3885 (not:SI (match_dup 3))))
3886 (set (match_dup 0) (not:SI (match_dup 4)))]
3887 ""
3888 )
3889
3890 (define_insn_and_split "*andsi_iorsi3_notsi"
3891 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3892 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3893 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3894 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3895 "TARGET_32BIT"
3896 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3897 "&& reload_completed"
3898 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3899 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3900 {
3901 /* If operands[3] is a constant make sure to fold the NOT into it
3902 to avoid creating a NOT of a CONST_INT. */
3903 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3904 if (CONST_INT_P (not_rtx))
3905 {
3906 operands[4] = operands[0];
3907 operands[5] = not_rtx;
3908 }
3909 else
3910 {
3911 operands[5] = operands[0];
3912 operands[4] = not_rtx;
3913 }
3914 }
3915 [(set_attr "length" "8")
3916 (set_attr "ce_count" "2")
3917 (set_attr "predicable" "yes")
3918 (set_attr "type" "multiple")]
3919 )
3920
3921 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3922 ; insns are available?
3923 (define_split
3924 [(set (match_operand:SI 0 "s_register_operand" "")
3925 (match_operator:SI 1 "logical_binary_operator"
3926 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3927 (match_operand:SI 3 "const_int_operand" "")
3928 (match_operand:SI 4 "const_int_operand" ""))
3929 (match_operator:SI 9 "logical_binary_operator"
3930 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3931 (match_operand:SI 6 "const_int_operand" ""))
3932 (match_operand:SI 7 "s_register_operand" "")])]))
3933 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3934 "TARGET_32BIT
3935 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3936 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3937 [(set (match_dup 8)
3938 (match_op_dup 1
3939 [(ashift:SI (match_dup 2) (match_dup 4))
3940 (match_dup 5)]))
3941 (set (match_dup 0)
3942 (match_op_dup 1
3943 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3944 (match_dup 7)]))]
3945 "
3946 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3947 ")
3948
3949 (define_split
3950 [(set (match_operand:SI 0 "s_register_operand" "")
3951 (match_operator:SI 1 "logical_binary_operator"
3952 [(match_operator:SI 9 "logical_binary_operator"
3953 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3954 (match_operand:SI 6 "const_int_operand" ""))
3955 (match_operand:SI 7 "s_register_operand" "")])
3956 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3957 (match_operand:SI 3 "const_int_operand" "")
3958 (match_operand:SI 4 "const_int_operand" ""))]))
3959 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3960 "TARGET_32BIT
3961 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3962 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3963 [(set (match_dup 8)
3964 (match_op_dup 1
3965 [(ashift:SI (match_dup 2) (match_dup 4))
3966 (match_dup 5)]))
3967 (set (match_dup 0)
3968 (match_op_dup 1
3969 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3970 (match_dup 7)]))]
3971 "
3972 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3973 ")
3974
3975 (define_split
3976 [(set (match_operand:SI 0 "s_register_operand" "")
3977 (match_operator:SI 1 "logical_binary_operator"
3978 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3979 (match_operand:SI 3 "const_int_operand" "")
3980 (match_operand:SI 4 "const_int_operand" ""))
3981 (match_operator:SI 9 "logical_binary_operator"
3982 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3983 (match_operand:SI 6 "const_int_operand" ""))
3984 (match_operand:SI 7 "s_register_operand" "")])]))
3985 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3986 "TARGET_32BIT
3987 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3988 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3989 [(set (match_dup 8)
3990 (match_op_dup 1
3991 [(ashift:SI (match_dup 2) (match_dup 4))
3992 (match_dup 5)]))
3993 (set (match_dup 0)
3994 (match_op_dup 1
3995 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3996 (match_dup 7)]))]
3997 "
3998 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3999 ")
4000
4001 (define_split
4002 [(set (match_operand:SI 0 "s_register_operand" "")
4003 (match_operator:SI 1 "logical_binary_operator"
4004 [(match_operator:SI 9 "logical_binary_operator"
4005 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
4006 (match_operand:SI 6 "const_int_operand" ""))
4007 (match_operand:SI 7 "s_register_operand" "")])
4008 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
4009 (match_operand:SI 3 "const_int_operand" "")
4010 (match_operand:SI 4 "const_int_operand" ""))]))
4011 (clobber (match_operand:SI 8 "s_register_operand" ""))]
4012 "TARGET_32BIT
4013 && GET_CODE (operands[1]) == GET_CODE (operands[9])
4014 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
4015 [(set (match_dup 8)
4016 (match_op_dup 1
4017 [(ashift:SI (match_dup 2) (match_dup 4))
4018 (match_dup 5)]))
4019 (set (match_dup 0)
4020 (match_op_dup 1
4021 [(ashiftrt:SI (match_dup 8) (match_dup 6))
4022 (match_dup 7)]))]
4023 "
4024 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
4025 ")
4026 \f
4027
4028 ;; Minimum and maximum insns
4029
4030 (define_expand "smaxsi3"
4031 [(parallel [
4032 (set (match_operand:SI 0 "s_register_operand")
4033 (smax:SI (match_operand:SI 1 "s_register_operand")
4034 (match_operand:SI 2 "arm_rhs_operand")))
4035 (clobber (reg:CC CC_REGNUM))])]
4036 "TARGET_32BIT"
4037 "
4038 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
4039 {
4040 /* No need for a clobber of the condition code register here. */
4041 emit_insn (gen_rtx_SET (operands[0],
4042 gen_rtx_SMAX (SImode, operands[1],
4043 operands[2])));
4044 DONE;
4045 }
4046 ")
4047
4048 (define_insn "*smax_0"
4049 [(set (match_operand:SI 0 "s_register_operand" "=r")
4050 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
4051 (const_int 0)))]
4052 "TARGET_32BIT"
4053 "bic%?\\t%0, %1, %1, asr #31"
4054 [(set_attr "predicable" "yes")
4055 (set_attr "type" "logic_shift_reg")]
4056 )
4057
4058 (define_insn "*smax_m1"
4059 [(set (match_operand:SI 0 "s_register_operand" "=r")
4060 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
4061 (const_int -1)))]
4062 "TARGET_32BIT"
4063 "orr%?\\t%0, %1, %1, asr #31"
4064 [(set_attr "predicable" "yes")
4065 (set_attr "type" "logic_shift_reg")]
4066 )
4067
4068 (define_insn_and_split "*arm_smax_insn"
4069 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4070 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4071 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4072 (clobber (reg:CC CC_REGNUM))]
4073 "TARGET_ARM"
4074 "#"
4075 ; cmp\\t%1, %2\;movlt\\t%0, %2
4076 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
4077 "TARGET_ARM"
4078 [(set (reg:CC CC_REGNUM)
4079 (compare:CC (match_dup 1) (match_dup 2)))
4080 (set (match_dup 0)
4081 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
4082 (match_dup 1)
4083 (match_dup 2)))]
4084 ""
4085 [(set_attr "conds" "clob")
4086 (set_attr "length" "8,12")
4087 (set_attr "type" "multiple")]
4088 )
4089
4090 (define_expand "sminsi3"
4091 [(parallel [
4092 (set (match_operand:SI 0 "s_register_operand")
4093 (smin:SI (match_operand:SI 1 "s_register_operand")
4094 (match_operand:SI 2 "arm_rhs_operand")))
4095 (clobber (reg:CC CC_REGNUM))])]
4096 "TARGET_32BIT"
4097 "
4098 if (operands[2] == const0_rtx)
4099 {
4100 /* No need for a clobber of the condition code register here. */
4101 emit_insn (gen_rtx_SET (operands[0],
4102 gen_rtx_SMIN (SImode, operands[1],
4103 operands[2])));
4104 DONE;
4105 }
4106 ")
4107
4108 (define_insn "*smin_0"
4109 [(set (match_operand:SI 0 "s_register_operand" "=r")
4110 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
4111 (const_int 0)))]
4112 "TARGET_32BIT"
4113 "and%?\\t%0, %1, %1, asr #31"
4114 [(set_attr "predicable" "yes")
4115 (set_attr "type" "logic_shift_reg")]
4116 )
4117
4118 (define_insn_and_split "*arm_smin_insn"
4119 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4120 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4121 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4122 (clobber (reg:CC CC_REGNUM))]
4123 "TARGET_ARM"
4124 "#"
4125 ; cmp\\t%1, %2\;movge\\t%0, %2
4126 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
4127 "TARGET_ARM"
4128 [(set (reg:CC CC_REGNUM)
4129 (compare:CC (match_dup 1) (match_dup 2)))
4130 (set (match_dup 0)
4131 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
4132 (match_dup 1)
4133 (match_dup 2)))]
4134 ""
4135 [(set_attr "conds" "clob")
4136 (set_attr "length" "8,12")
4137 (set_attr "type" "multiple,multiple")]
4138 )
4139
4140 (define_expand "umaxsi3"
4141 [(parallel [
4142 (set (match_operand:SI 0 "s_register_operand")
4143 (umax:SI (match_operand:SI 1 "s_register_operand")
4144 (match_operand:SI 2 "arm_rhs_operand")))
4145 (clobber (reg:CC CC_REGNUM))])]
4146 "TARGET_32BIT"
4147 ""
4148 )
4149
4150 (define_insn_and_split "*arm_umaxsi3"
4151 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4152 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4153 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4154 (clobber (reg:CC CC_REGNUM))]
4155 "TARGET_ARM"
4156 "#"
4157 ; cmp\\t%1, %2\;movcc\\t%0, %2
4158 ; cmp\\t%1, %2\;movcs\\t%0, %1
4159 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
4160 "TARGET_ARM"
4161 [(set (reg:CC CC_REGNUM)
4162 (compare:CC (match_dup 1) (match_dup 2)))
4163 (set (match_dup 0)
4164 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
4165 (match_dup 1)
4166 (match_dup 2)))]
4167 ""
4168 [(set_attr "conds" "clob")
4169 (set_attr "length" "8,8,12")
4170 (set_attr "type" "store_4")]
4171 )
4172
4173 (define_expand "uminsi3"
4174 [(parallel [
4175 (set (match_operand:SI 0 "s_register_operand")
4176 (umin:SI (match_operand:SI 1 "s_register_operand")
4177 (match_operand:SI 2 "arm_rhs_operand")))
4178 (clobber (reg:CC CC_REGNUM))])]
4179 "TARGET_32BIT"
4180 ""
4181 )
4182
4183 (define_insn_and_split "*arm_uminsi3"
4184 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4185 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4186 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4187 (clobber (reg:CC CC_REGNUM))]
4188 "TARGET_ARM"
4189 "#"
4190 ; cmp\\t%1, %2\;movcs\\t%0, %2
4191 ; cmp\\t%1, %2\;movcc\\t%0, %1
4192 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
4193 "TARGET_ARM"
4194 [(set (reg:CC CC_REGNUM)
4195 (compare:CC (match_dup 1) (match_dup 2)))
4196 (set (match_dup 0)
4197 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
4198 (match_dup 1)
4199 (match_dup 2)))]
4200 ""
4201 [(set_attr "conds" "clob")
4202 (set_attr "length" "8,8,12")
4203 (set_attr "type" "store_4")]
4204 )
4205
4206 (define_insn "*store_minmaxsi"
4207 [(set (match_operand:SI 0 "memory_operand" "=m")
4208 (match_operator:SI 3 "minmax_operator"
4209 [(match_operand:SI 1 "s_register_operand" "r")
4210 (match_operand:SI 2 "s_register_operand" "r")]))
4211 (clobber (reg:CC CC_REGNUM))]
4212 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
4213 "*
4214 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
4215 operands[1], operands[2]);
4216 output_asm_insn (\"cmp\\t%1, %2\", operands);
4217 if (TARGET_THUMB2)
4218 output_asm_insn (\"ite\t%d3\", operands);
4219 output_asm_insn (\"str%d3\\t%1, %0\", operands);
4220 output_asm_insn (\"str%D3\\t%2, %0\", operands);
4221 return \"\";
4222 "
4223 [(set_attr "conds" "clob")
4224 (set (attr "length")
4225 (if_then_else (eq_attr "is_thumb" "yes")
4226 (const_int 14)
4227 (const_int 12)))
4228 (set_attr "type" "store_4")]
4229 )
4230
4231 ; Reject the frame pointer in operand[1], since reloading this after
4232 ; it has been eliminated can cause carnage.
4233 (define_insn "*minmax_arithsi"
4234 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4235 (match_operator:SI 4 "shiftable_operator"
4236 [(match_operator:SI 5 "minmax_operator"
4237 [(match_operand:SI 2 "s_register_operand" "r,r")
4238 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
4239 (match_operand:SI 1 "s_register_operand" "0,?r")]))
4240 (clobber (reg:CC CC_REGNUM))]
4241 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
4242 "*
4243 {
4244 enum rtx_code code = GET_CODE (operands[4]);
4245 bool need_else;
4246
4247 if (which_alternative != 0 || operands[3] != const0_rtx
4248 || (code != PLUS && code != IOR && code != XOR))
4249 need_else = true;
4250 else
4251 need_else = false;
4252
4253 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4254 operands[2], operands[3]);
4255 output_asm_insn (\"cmp\\t%2, %3\", operands);
4256 if (TARGET_THUMB2)
4257 {
4258 if (need_else)
4259 output_asm_insn (\"ite\\t%d5\", operands);
4260 else
4261 output_asm_insn (\"it\\t%d5\", operands);
4262 }
4263 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4264 if (need_else)
4265 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4266 return \"\";
4267 }"
4268 [(set_attr "conds" "clob")
4269 (set (attr "length")
4270 (if_then_else (eq_attr "is_thumb" "yes")
4271 (const_int 14)
4272 (const_int 12)))
4273 (set_attr "type" "multiple")]
4274 )
4275
4276 ; Reject the frame pointer in operand[1], since reloading this after
4277 ; it has been eliminated can cause carnage.
4278 (define_insn_and_split "*minmax_arithsi_non_canon"
4279 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4280 (minus:SI
4281 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4282 (match_operator:SI 4 "minmax_operator"
4283 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4284 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4285 (clobber (reg:CC CC_REGNUM))]
4286 "TARGET_32BIT && !arm_eliminable_register (operands[1])
4287 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4288 "#"
4289 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4290 [(set (reg:CC CC_REGNUM)
4291 (compare:CC (match_dup 2) (match_dup 3)))
4292
4293 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4294 (set (match_dup 0)
4295 (minus:SI (match_dup 1)
4296 (match_dup 2))))
4297 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4298 (set (match_dup 0)
4299 (match_dup 6)))]
4300 {
4301 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4302 operands[2], operands[3]);
4303 enum rtx_code rc = minmax_code (operands[4]);
4304 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4305 operands[2], operands[3]);
4306
4307 if (mode == CCFPmode || mode == CCFPEmode)
4308 rc = reverse_condition_maybe_unordered (rc);
4309 else
4310 rc = reverse_condition (rc);
4311 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4312 if (CONST_INT_P (operands[3]))
4313 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4314 else
4315 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4316 }
4317 [(set_attr "conds" "clob")
4318 (set (attr "length")
4319 (if_then_else (eq_attr "is_thumb" "yes")
4320 (const_int 14)
4321 (const_int 12)))
4322 (set_attr "type" "multiple")]
4323 )
4324
4325
4326 (define_expand "arm_<ss_op>"
4327 [(set (match_operand:SI 0 "s_register_operand")
4328 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
4329 (match_operand:SI 2 "s_register_operand")))]
4330 "TARGET_DSP_MULTIPLY"
4331 {
4332 if (ARM_Q_BIT_READ)
4333 emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
4334 operands[1], operands[2]));
4335 else
4336 emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
4337 DONE;
4338 }
4339 )
4340
4341 (define_insn "arm_<ss_op><add_clobber_q_name>_insn"
4342 [(set (match_operand:SI 0 "s_register_operand" "=r")
4343 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
4344 (match_operand:SI 2 "s_register_operand" "r")))]
4345 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
4346 "<ss_op>%?\t%0, %1, %2"
4347 [(set_attr "predicable" "yes")
4348 (set_attr "type" "alu_dsp_reg")]
4349 )
4350
4351 (define_code_iterator SAT [smin smax])
4352 (define_code_attr SATrev [(smin "smax") (smax "smin")])
4353 (define_code_attr SATlo [(smin "1") (smax "2")])
4354 (define_code_attr SAThi [(smin "2") (smax "1")])
4355
4356 (define_expand "arm_ssat"
4357 [(match_operand:SI 0 "s_register_operand")
4358 (match_operand:SI 1 "s_register_operand")
4359 (match_operand:SI 2 "const_int_operand")]
4360 "TARGET_32BIT && arm_arch6"
4361 {
4362 HOST_WIDE_INT val = INTVAL (operands[2]);
4363 /* The builtin checking code should have ensured the right
4364 range for the immediate. */
4365 gcc_assert (IN_RANGE (val, 1, 32));
4366 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
4367 HOST_WIDE_INT lower_bound = -upper_bound - 1;
4368 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4369 rtx lo_rtx = gen_int_mode (lower_bound, SImode);
4370 if (ARM_Q_BIT_READ)
4371 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
4372 up_rtx, operands[1]));
4373 else
4374 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4375 DONE;
4376 }
4377 )
4378
4379 (define_expand "arm_usat"
4380 [(match_operand:SI 0 "s_register_operand")
4381 (match_operand:SI 1 "s_register_operand")
4382 (match_operand:SI 2 "const_int_operand")]
4383 "TARGET_32BIT && arm_arch6"
4384 {
4385 HOST_WIDE_INT val = INTVAL (operands[2]);
4386 /* The builtin checking code should have ensured the right
4387 range for the immediate. */
4388 gcc_assert (IN_RANGE (val, 0, 31));
4389 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
4390 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4391 rtx lo_rtx = CONST0_RTX (SImode);
4392 if (ARM_Q_BIT_READ)
4393 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
4394 operands[1]));
4395 else
4396 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4397 DONE;
4398 }
4399 )
4400
4401 (define_insn "arm_get_apsr"
4402 [(set (match_operand:SI 0 "s_register_operand" "=r")
4403 (unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
4404 "TARGET_ARM_QBIT"
4405 "mrs%?\t%0, APSR"
4406 [(set_attr "predicable" "yes")
4407 (set_attr "conds" "use")]
4408 )
4409
4410 (define_insn "arm_set_apsr"
4411 [(set (reg:CC APSRQ_REGNUM)
4412 (unspec_volatile:CC
4413 [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
4414 "TARGET_ARM_QBIT"
4415 "msr%?\tAPSR_nzcvq, %0"
4416 [(set_attr "predicable" "yes")
4417 (set_attr "conds" "set")]
4418 )
4419
4420 ;; Read the APSR and extract the Q bit (bit 27)
4421 (define_expand "arm_saturation_occurred"
4422 [(match_operand:SI 0 "s_register_operand")]
4423 "TARGET_ARM_QBIT"
4424 {
4425 rtx apsr = gen_reg_rtx (SImode);
4426 emit_insn (gen_arm_get_apsr (apsr));
4427 emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
4428 gen_int_mode (27, SImode)));
4429 DONE;
4430 }
4431 )
4432
4433 ;; Read the APSR and set the Q bit (bit position 27) according to operand 0
4434 (define_expand "arm_set_saturation"
4435 [(match_operand:SI 0 "reg_or_int_operand")]
4436 "TARGET_ARM_QBIT"
4437 {
4438 rtx apsr = gen_reg_rtx (SImode);
4439 emit_insn (gen_arm_get_apsr (apsr));
4440 rtx to_insert = gen_reg_rtx (SImode);
4441 if (CONST_INT_P (operands[0]))
4442 emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
4443 ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
4444 else
4445 {
4446 rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
4447 emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
4448 CONST0_RTX (SImode)));
4449 }
4450 emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
4451 gen_int_mode (27, SImode), to_insert));
4452 emit_insn (gen_arm_set_apsr (apsr));
4453 DONE;
4454 }
4455 )
4456
4457 (define_insn "satsi_<SAT:code><add_clobber_q_name>"
4458 [(set (match_operand:SI 0 "s_register_operand" "=r")
4459 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4460 (match_operand:SI 1 "const_int_operand" "i"))
4461 (match_operand:SI 2 "const_int_operand" "i")))]
4462 "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
4463 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4464 {
4465 int mask;
4466 bool signed_sat;
4467 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4468 &mask, &signed_sat))
4469 gcc_unreachable ();
4470
4471 operands[1] = GEN_INT (mask);
4472 if (signed_sat)
4473 return "ssat%?\t%0, %1, %3";
4474 else
4475 return "usat%?\t%0, %1, %3";
4476 }
4477 [(set_attr "predicable" "yes")
4478 (set_attr "type" "alus_imm")]
4479 )
4480
4481 (define_insn "*satsi_<SAT:code>_shift"
4482 [(set (match_operand:SI 0 "s_register_operand" "=r")
4483 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4484 [(match_operand:SI 4 "s_register_operand" "r")
4485 (match_operand:SI 5 "const_int_operand" "i")])
4486 (match_operand:SI 1 "const_int_operand" "i"))
4487 (match_operand:SI 2 "const_int_operand" "i")))]
4488 "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
4489 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4490 {
4491 int mask;
4492 bool signed_sat;
4493 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4494 &mask, &signed_sat))
4495 gcc_unreachable ();
4496
4497 operands[1] = GEN_INT (mask);
4498 if (signed_sat)
4499 return "ssat%?\t%0, %1, %4%S3";
4500 else
4501 return "usat%?\t%0, %1, %4%S3";
4502 }
4503 [(set_attr "predicable" "yes")
4504 (set_attr "shift" "3")
4505 (set_attr "type" "logic_shift_reg")])
4506 \f
4507 ;; Custom Datapath Extension insns.
4508 (define_insn "arm_cx1<mode>"
4509 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4510 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4511 (match_operand:SI 2 "const_int_ccde1_operand" "i")]
4512 UNSPEC_CDE))]
4513 "TARGET_CDE"
4514 "cx1<cde_suffix>\\tp%c1, <cde_dest>, %2"
4515 [(set_attr "type" "coproc")]
4516 )
4517
4518 (define_insn "arm_cx1a<mode>"
4519 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4520 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4521 (match_operand:SIDI 2 "s_register_operand" "0")
4522 (match_operand:SI 3 "const_int_ccde1_operand" "i")]
4523 UNSPEC_CDEA))]
4524 "TARGET_CDE"
4525 "cx1<cde_suffix>a\\tp%c1, <cde_dest>, %3"
4526 [(set_attr "type" "coproc")]
4527 )
4528
4529 (define_insn "arm_cx2<mode>"
4530 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4531 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4532 (match_operand:SI 2 "s_register_operand" "r")
4533 (match_operand:SI 3 "const_int_ccde2_operand" "i")]
4534 UNSPEC_CDE))]
4535 "TARGET_CDE"
4536 "cx2<cde_suffix>\\tp%c1, <cde_dest>, %2, %3"
4537 [(set_attr "type" "coproc")]
4538 )
4539
4540 (define_insn "arm_cx2a<mode>"
4541 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4542 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4543 (match_operand:SIDI 2 "s_register_operand" "0")
4544 (match_operand:SI 3 "s_register_operand" "r")
4545 (match_operand:SI 4 "const_int_ccde2_operand" "i")]
4546 UNSPEC_CDEA))]
4547 "TARGET_CDE"
4548 "cx2<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4"
4549 [(set_attr "type" "coproc")]
4550 )
4551
4552 (define_insn "arm_cx3<mode>"
4553 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4554 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4555 (match_operand:SI 2 "s_register_operand" "r")
4556 (match_operand:SI 3 "s_register_operand" "r")
4557 (match_operand:SI 4 "const_int_ccde3_operand" "i")]
4558 UNSPEC_CDE))]
4559 "TARGET_CDE"
4560 "cx3<cde_suffix>\\tp%c1, <cde_dest>, %2, %3, %4"
4561 [(set_attr "type" "coproc")]
4562 )
4563
4564 (define_insn "arm_cx3a<mode>"
4565 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4566 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4567 (match_operand:SIDI 2 "s_register_operand" "0")
4568 (match_operand:SI 3 "s_register_operand" "r")
4569 (match_operand:SI 4 "s_register_operand" "r")
4570 (match_operand:SI 5 "const_int_ccde3_operand" "i")]
4571 UNSPEC_CDEA))]
4572 "TARGET_CDE"
4573 "cx3<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4, %5"
4574 [(set_attr "type" "coproc")]
4575 )
4576 \f
4577 ;; Shift and rotation insns
4578
4579 (define_expand "ashldi3"
4580 [(set (match_operand:DI 0 "s_register_operand")
4581 (ashift:DI (match_operand:DI 1 "s_register_operand")
4582 (match_operand:SI 2 "reg_or_int_operand")))]
4583 "TARGET_32BIT"
4584 "
4585 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN)
4586 {
4587 if (!reg_or_int_operand (operands[2], SImode))
4588 operands[2] = force_reg (SImode, operands[2]);
4589
4590 /* Armv8.1-M Mainline double shifts are not expanded. */
4591 if (arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2]))
4592 && (REG_P (operands[2]) || INTVAL(operands[2]) != 32))
4593 {
4594 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4595 emit_insn (gen_movdi (operands[0], operands[1]));
4596
4597 emit_insn (gen_thumb2_lsll (operands[0], operands[2]));
4598 DONE;
4599 }
4600 }
4601
4602 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4603 operands[2], gen_reg_rtx (SImode),
4604 gen_reg_rtx (SImode));
4605 DONE;
4606 ")
4607
4608 (define_expand "ashlsi3"
4609 [(set (match_operand:SI 0 "s_register_operand")
4610 (ashift:SI (match_operand:SI 1 "s_register_operand")
4611 (match_operand:SI 2 "arm_rhs_operand")))]
4612 "TARGET_EITHER"
4613 "
4614 if (CONST_INT_P (operands[2])
4615 && (UINTVAL (operands[2])) > 31)
4616 {
4617 emit_insn (gen_movsi (operands[0], const0_rtx));
4618 DONE;
4619 }
4620 "
4621 )
4622
4623 (define_expand "ashrdi3"
4624 [(set (match_operand:DI 0 "s_register_operand")
4625 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4626 (match_operand:SI 2 "reg_or_int_operand")))]
4627 "TARGET_32BIT"
4628 "
4629 /* Armv8.1-M Mainline double shifts are not expanded. */
4630 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4631 && arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2])))
4632 {
4633 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4634 emit_insn (gen_movdi (operands[0], operands[1]));
4635
4636 emit_insn (gen_thumb2_asrl (operands[0], operands[2]));
4637 DONE;
4638 }
4639
4640 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4641 operands[2], gen_reg_rtx (SImode),
4642 gen_reg_rtx (SImode));
4643 DONE;
4644 ")
4645
4646 (define_expand "ashrsi3"
4647 [(set (match_operand:SI 0 "s_register_operand")
4648 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4649 (match_operand:SI 2 "arm_rhs_operand")))]
4650 "TARGET_EITHER"
4651 "
4652 if (CONST_INT_P (operands[2])
4653 && UINTVAL (operands[2]) > 31)
4654 operands[2] = GEN_INT (31);
4655 "
4656 )
4657
4658 (define_expand "lshrdi3"
4659 [(set (match_operand:DI 0 "s_register_operand")
4660 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4661 (match_operand:SI 2 "reg_or_int_operand")))]
4662 "TARGET_32BIT"
4663 "
4664 /* Armv8.1-M Mainline double shifts are not expanded. */
4665 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4666 && long_shift_imm (operands[2], GET_MODE (operands[2])))
4667 {
4668 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4669 emit_insn (gen_movdi (operands[0], operands[1]));
4670
4671 emit_insn (gen_thumb2_lsrl (operands[0], operands[2]));
4672 DONE;
4673 }
4674
4675 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4676 operands[2], gen_reg_rtx (SImode),
4677 gen_reg_rtx (SImode));
4678 DONE;
4679 ")
4680
4681 (define_expand "lshrsi3"
4682 [(set (match_operand:SI 0 "s_register_operand")
4683 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4684 (match_operand:SI 2 "arm_rhs_operand")))]
4685 "TARGET_EITHER"
4686 "
4687 if (CONST_INT_P (operands[2])
4688 && (UINTVAL (operands[2])) > 31)
4689 {
4690 emit_insn (gen_movsi (operands[0], const0_rtx));
4691 DONE;
4692 }
4693 "
4694 )
4695
4696 (define_expand "rotlsi3"
4697 [(set (match_operand:SI 0 "s_register_operand")
4698 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4699 (match_operand:SI 2 "reg_or_int_operand")))]
4700 "TARGET_32BIT"
4701 "
4702 if (CONST_INT_P (operands[2]))
4703 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4704 else
4705 {
4706 rtx reg = gen_reg_rtx (SImode);
4707 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4708 operands[2] = reg;
4709 }
4710 "
4711 )
4712
4713 (define_expand "rotrsi3"
4714 [(set (match_operand:SI 0 "s_register_operand")
4715 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4716 (match_operand:SI 2 "arm_rhs_operand")))]
4717 "TARGET_EITHER"
4718 "
4719 if (TARGET_32BIT)
4720 {
4721 if (CONST_INT_P (operands[2])
4722 && UINTVAL (operands[2]) > 31)
4723 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4724 }
4725 else /* TARGET_THUMB1 */
4726 {
4727 if (CONST_INT_P (operands [2]))
4728 operands [2] = force_reg (SImode, operands[2]);
4729 }
4730 "
4731 )
4732
4733 (define_insn "*arm_shiftsi3"
4734 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4735 (match_operator:SI 3 "shift_operator"
4736 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4737 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4738 "TARGET_32BIT"
4739 "* return arm_output_shift(operands, 0);"
4740 [(set_attr "predicable" "yes")
4741 (set_attr "arch" "t2,t2,*,*")
4742 (set_attr "predicable_short_it" "yes,yes,no,no")
4743 (set_attr "length" "4")
4744 (set_attr "shift" "1")
4745 (set_attr "autodetect_type" "alu_shift_operator3")]
4746 )
4747
4748 (define_insn "*shiftsi3_compare0"
4749 [(set (reg:CC_NZ CC_REGNUM)
4750 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4751 [(match_operand:SI 1 "s_register_operand" "r,r")
4752 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4753 (const_int 0)))
4754 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4755 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4756 "TARGET_32BIT"
4757 "* return arm_output_shift(operands, 1);"
4758 [(set_attr "conds" "set")
4759 (set_attr "shift" "1")
4760 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4761 )
4762
4763 (define_insn "*shiftsi3_compare0_scratch"
4764 [(set (reg:CC_NZ CC_REGNUM)
4765 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4766 [(match_operand:SI 1 "s_register_operand" "r,r")
4767 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4768 (const_int 0)))
4769 (clobber (match_scratch:SI 0 "=r,r"))]
4770 "TARGET_32BIT"
4771 "* return arm_output_shift(operands, 1);"
4772 [(set_attr "conds" "set")
4773 (set_attr "shift" "1")
4774 (set_attr "type" "shift_imm,shift_reg")]
4775 )
4776
4777 (define_insn "*not_shiftsi"
4778 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4779 (not:SI (match_operator:SI 3 "shift_operator"
4780 [(match_operand:SI 1 "s_register_operand" "r,r")
4781 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4782 "TARGET_32BIT"
4783 "mvn%?\\t%0, %1%S3"
4784 [(set_attr "predicable" "yes")
4785 (set_attr "shift" "1")
4786 (set_attr "arch" "32,a")
4787 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4788
4789 (define_insn "*not_shiftsi_compare0"
4790 [(set (reg:CC_NZ CC_REGNUM)
4791 (compare:CC_NZ
4792 (not:SI (match_operator:SI 3 "shift_operator"
4793 [(match_operand:SI 1 "s_register_operand" "r,r")
4794 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4795 (const_int 0)))
4796 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4797 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4798 "TARGET_32BIT"
4799 "mvns%?\\t%0, %1%S3"
4800 [(set_attr "conds" "set")
4801 (set_attr "shift" "1")
4802 (set_attr "arch" "32,a")
4803 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4804
4805 (define_insn "*not_shiftsi_compare0_scratch"
4806 [(set (reg:CC_NZ CC_REGNUM)
4807 (compare:CC_NZ
4808 (not:SI (match_operator:SI 3 "shift_operator"
4809 [(match_operand:SI 1 "s_register_operand" "r,r")
4810 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4811 (const_int 0)))
4812 (clobber (match_scratch:SI 0 "=r,r"))]
4813 "TARGET_32BIT"
4814 "mvns%?\\t%0, %1%S3"
4815 [(set_attr "conds" "set")
4816 (set_attr "shift" "1")
4817 (set_attr "arch" "32,a")
4818 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4819
4820 ;; We don't really have extzv, but defining this using shifts helps
4821 ;; to reduce register pressure later on.
4822
4823 (define_expand "extzv"
4824 [(set (match_operand 0 "s_register_operand")
4825 (zero_extract (match_operand 1 "nonimmediate_operand")
4826 (match_operand 2 "const_int_operand")
4827 (match_operand 3 "const_int_operand")))]
4828 "TARGET_THUMB1 || arm_arch_thumb2"
4829 "
4830 {
4831 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4832 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4833
4834 if (arm_arch_thumb2)
4835 {
4836 HOST_WIDE_INT width = INTVAL (operands[2]);
4837 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4838
4839 if (unaligned_access && MEM_P (operands[1])
4840 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4841 {
4842 rtx base_addr;
4843
4844 if (BYTES_BIG_ENDIAN)
4845 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4846 - bitpos;
4847
4848 if (width == 32)
4849 {
4850 base_addr = adjust_address (operands[1], SImode,
4851 bitpos / BITS_PER_UNIT);
4852 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4853 }
4854 else
4855 {
4856 rtx dest = operands[0];
4857 rtx tmp = gen_reg_rtx (SImode);
4858
4859 /* We may get a paradoxical subreg here. Strip it off. */
4860 if (GET_CODE (dest) == SUBREG
4861 && GET_MODE (dest) == SImode
4862 && GET_MODE (SUBREG_REG (dest)) == HImode)
4863 dest = SUBREG_REG (dest);
4864
4865 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4866 FAIL;
4867
4868 base_addr = adjust_address (operands[1], HImode,
4869 bitpos / BITS_PER_UNIT);
4870 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4871 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4872 }
4873 DONE;
4874 }
4875 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4876 {
4877 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4878 operands[3]));
4879 DONE;
4880 }
4881 else
4882 FAIL;
4883 }
4884
4885 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4886 FAIL;
4887
4888 operands[3] = GEN_INT (rshift);
4889
4890 if (lshift == 0)
4891 {
4892 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4893 DONE;
4894 }
4895
4896 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4897 operands[3], gen_reg_rtx (SImode)));
4898 DONE;
4899 }"
4900 )
4901
4902 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4903
4904 (define_expand "extzv_t1"
4905 [(set (match_operand:SI 4 "s_register_operand")
4906 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4907 (match_operand:SI 2 "const_int_operand")))
4908 (set (match_operand:SI 0 "s_register_operand")
4909 (lshiftrt:SI (match_dup 4)
4910 (match_operand:SI 3 "const_int_operand")))]
4911 "TARGET_THUMB1"
4912 "")
4913
4914 (define_expand "extv"
4915 [(set (match_operand 0 "s_register_operand")
4916 (sign_extract (match_operand 1 "nonimmediate_operand")
4917 (match_operand 2 "const_int_operand")
4918 (match_operand 3 "const_int_operand")))]
4919 "arm_arch_thumb2"
4920 {
4921 HOST_WIDE_INT width = INTVAL (operands[2]);
4922 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4923
4924 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4925 && (bitpos % BITS_PER_UNIT) == 0)
4926 {
4927 rtx base_addr;
4928
4929 if (BYTES_BIG_ENDIAN)
4930 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4931
4932 if (width == 32)
4933 {
4934 base_addr = adjust_address (operands[1], SImode,
4935 bitpos / BITS_PER_UNIT);
4936 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4937 }
4938 else
4939 {
4940 rtx dest = operands[0];
4941 rtx tmp = gen_reg_rtx (SImode);
4942
4943 /* We may get a paradoxical subreg here. Strip it off. */
4944 if (GET_CODE (dest) == SUBREG
4945 && GET_MODE (dest) == SImode
4946 && GET_MODE (SUBREG_REG (dest)) == HImode)
4947 dest = SUBREG_REG (dest);
4948
4949 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4950 FAIL;
4951
4952 base_addr = adjust_address (operands[1], HImode,
4953 bitpos / BITS_PER_UNIT);
4954 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4955 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4956 }
4957
4958 DONE;
4959 }
4960 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4961 FAIL;
4962 else if (GET_MODE (operands[0]) == SImode
4963 && GET_MODE (operands[1]) == SImode)
4964 {
4965 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4966 operands[3]));
4967 DONE;
4968 }
4969
4970 FAIL;
4971 })
4972
4973 ; Helper to expand register forms of extv with the proper modes.
4974
4975 (define_expand "extv_regsi"
4976 [(set (match_operand:SI 0 "s_register_operand")
4977 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4978 (match_operand 2 "const_int_operand")
4979 (match_operand 3 "const_int_operand")))]
4980 ""
4981 {
4982 })
4983
4984 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4985
4986 (define_insn "unaligned_loaddi"
4987 [(set (match_operand:DI 0 "s_register_operand" "=r")
4988 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4989 UNSPEC_UNALIGNED_LOAD))]
4990 "TARGET_32BIT && TARGET_LDRD"
4991 "*
4992 return output_move_double (operands, true, NULL);
4993 "
4994 [(set_attr "length" "8")
4995 (set_attr "type" "load_8")])
4996
4997 (define_insn "unaligned_loadsi"
4998 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4999 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
5000 UNSPEC_UNALIGNED_LOAD))]
5001 "unaligned_access"
5002 "@
5003 ldr\t%0, %1\t@ unaligned
5004 ldr%?\t%0, %1\t@ unaligned
5005 ldr%?\t%0, %1\t@ unaligned"
5006 [(set_attr "arch" "t1,t2,32")
5007 (set_attr "length" "2,2,4")
5008 (set_attr "predicable" "no,yes,yes")
5009 (set_attr "predicable_short_it" "no,yes,no")
5010 (set_attr "type" "load_4")])
5011
5012 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
5013 ;; address (there's no immediate format). That's tricky to support
5014 ;; here and we don't really need this pattern for that case, so only
5015 ;; enable for 32-bit ISAs.
5016 (define_insn "unaligned_loadhis"
5017 [(set (match_operand:SI 0 "s_register_operand" "=r")
5018 (sign_extend:SI
5019 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
5020 UNSPEC_UNALIGNED_LOAD)))]
5021 "unaligned_access && TARGET_32BIT"
5022 "ldrsh%?\t%0, %1\t@ unaligned"
5023 [(set_attr "predicable" "yes")
5024 (set_attr "type" "load_byte")])
5025
5026 (define_insn "unaligned_loadhiu"
5027 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
5028 (zero_extend:SI
5029 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
5030 UNSPEC_UNALIGNED_LOAD)))]
5031 "unaligned_access"
5032 "@
5033 ldrh\t%0, %1\t@ unaligned
5034 ldrh%?\t%0, %1\t@ unaligned
5035 ldrh%?\t%0, %1\t@ unaligned"
5036 [(set_attr "arch" "t1,t2,32")
5037 (set_attr "length" "2,2,4")
5038 (set_attr "predicable" "no,yes,yes")
5039 (set_attr "predicable_short_it" "no,yes,no")
5040 (set_attr "type" "load_byte")])
5041
5042 (define_insn "unaligned_storedi"
5043 [(set (match_operand:DI 0 "memory_operand" "=m")
5044 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
5045 UNSPEC_UNALIGNED_STORE))]
5046 "TARGET_32BIT && TARGET_LDRD"
5047 "*
5048 return output_move_double (operands, true, NULL);
5049 "
5050 [(set_attr "length" "8")
5051 (set_attr "type" "store_8")])
5052
5053 (define_insn "unaligned_storesi"
5054 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
5055 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
5056 UNSPEC_UNALIGNED_STORE))]
5057 "unaligned_access"
5058 "@
5059 str\t%1, %0\t@ unaligned
5060 str%?\t%1, %0\t@ unaligned
5061 str%?\t%1, %0\t@ unaligned"
5062 [(set_attr "arch" "t1,t2,32")
5063 (set_attr "length" "2,2,4")
5064 (set_attr "predicable" "no,yes,yes")
5065 (set_attr "predicable_short_it" "no,yes,no")
5066 (set_attr "type" "store_4")])
5067
5068 (define_insn "unaligned_storehi"
5069 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
5070 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
5071 UNSPEC_UNALIGNED_STORE))]
5072 "unaligned_access"
5073 "@
5074 strh\t%1, %0\t@ unaligned
5075 strh%?\t%1, %0\t@ unaligned
5076 strh%?\t%1, %0\t@ unaligned"
5077 [(set_attr "arch" "t1,t2,32")
5078 (set_attr "length" "2,2,4")
5079 (set_attr "predicable" "no,yes,yes")
5080 (set_attr "predicable_short_it" "no,yes,no")
5081 (set_attr "type" "store_4")])
5082
5083
5084 (define_insn "*extv_reg"
5085 [(set (match_operand:SI 0 "s_register_operand" "=r")
5086 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5087 (match_operand:SI 2 "const_int_operand" "n")
5088 (match_operand:SI 3 "const_int_operand" "n")))]
5089 "arm_arch_thumb2
5090 && IN_RANGE (INTVAL (operands[3]), 0, 31)
5091 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5092 "sbfx%?\t%0, %1, %3, %2"
5093 [(set_attr "length" "4")
5094 (set_attr "predicable" "yes")
5095 (set_attr "type" "bfm")]
5096 )
5097
5098 (define_insn "extzv_t2"
5099 [(set (match_operand:SI 0 "s_register_operand" "=r")
5100 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5101 (match_operand:SI 2 "const_int_operand" "n")
5102 (match_operand:SI 3 "const_int_operand" "n")))]
5103 "arm_arch_thumb2
5104 && IN_RANGE (INTVAL (operands[3]), 0, 31)
5105 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5106 "ubfx%?\t%0, %1, %3, %2"
5107 [(set_attr "length" "4")
5108 (set_attr "predicable" "yes")
5109 (set_attr "type" "bfm")]
5110 )
5111
5112
5113 ;; Division instructions
5114 (define_insn "divsi3"
5115 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5116 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
5117 (match_operand:SI 2 "s_register_operand" "r,r")))]
5118 "TARGET_IDIV"
5119 "@
5120 sdiv%?\t%0, %1, %2
5121 sdiv\t%0, %1, %2"
5122 [(set_attr "arch" "32,v8mb")
5123 (set_attr "predicable" "yes")
5124 (set_attr "type" "sdiv")]
5125 )
5126
5127 (define_insn "udivsi3"
5128 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5129 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
5130 (match_operand:SI 2 "s_register_operand" "r,r")))]
5131 "TARGET_IDIV"
5132 "@
5133 udiv%?\t%0, %1, %2
5134 udiv\t%0, %1, %2"
5135 [(set_attr "arch" "32,v8mb")
5136 (set_attr "predicable" "yes")
5137 (set_attr "type" "udiv")]
5138 )
5139
5140 \f
5141 ;; Unary arithmetic insns
5142
5143 (define_expand "negv<SIDI:mode>3"
5144 [(match_operand:SIDI 0 "s_register_operand")
5145 (match_operand:SIDI 1 "s_register_operand")
5146 (match_operand 2 "")]
5147 "TARGET_32BIT"
5148 {
5149 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
5150 operands[2]));
5151 DONE;
5152 })
5153
5154 (define_expand "negsi2"
5155 [(set (match_operand:SI 0 "s_register_operand")
5156 (neg:SI (match_operand:SI 1 "s_register_operand")))]
5157 "TARGET_EITHER"
5158 ""
5159 )
5160
5161 (define_insn "*arm_negsi2"
5162 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5163 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5164 "TARGET_32BIT"
5165 "rsb%?\\t%0, %1, #0"
5166 [(set_attr "predicable" "yes")
5167 (set_attr "predicable_short_it" "yes,no")
5168 (set_attr "arch" "t2,*")
5169 (set_attr "length" "4")
5170 (set_attr "type" "alu_imm")]
5171 )
5172
5173 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
5174 ;; rather than (0 cmp reg). This gives the same results for unsigned
5175 ;; and equality compares which is what we mostly need here.
5176 (define_insn "negsi2_0compare"
5177 [(set (reg:CC_RSB CC_REGNUM)
5178 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
5179 (const_int -1)))
5180 (set (match_operand:SI 0 "s_register_operand" "=l,r")
5181 (neg:SI (match_dup 1)))]
5182 "TARGET_32BIT"
5183 "@
5184 negs\\t%0, %1
5185 rsbs\\t%0, %1, #0"
5186 [(set_attr "conds" "set")
5187 (set_attr "arch" "t2,*")
5188 (set_attr "length" "2,*")
5189 (set_attr "type" "alus_imm")]
5190 )
5191
5192 (define_insn "negsi2_carryin"
5193 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5194 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
5195 (match_operand:SI 2 "arm_borrow_operation" "")))]
5196 "TARGET_32BIT"
5197 "@
5198 rsc\\t%0, %1, #0
5199 sbc\\t%0, %1, %1, lsl #1"
5200 [(set_attr "conds" "use")
5201 (set_attr "arch" "a,t2")
5202 (set_attr "type" "adc_imm,adc_reg")]
5203 )
5204
5205 (define_expand "negsf2"
5206 [(set (match_operand:SF 0 "s_register_operand")
5207 (neg:SF (match_operand:SF 1 "s_register_operand")))]
5208 "TARGET_32BIT && TARGET_HARD_FLOAT"
5209 ""
5210 )
5211
5212 (define_expand "negdf2"
5213 [(set (match_operand:DF 0 "s_register_operand")
5214 (neg:DF (match_operand:DF 1 "s_register_operand")))]
5215 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5216 "")
5217
5218 ;; abssi2 doesn't really clobber the condition codes if a different register
5219 ;; is being set. To keep things simple, assume during rtl manipulations that
5220 ;; it does, but tell the final scan operator the truth. Similarly for
5221 ;; (neg (abs...))
5222
5223 (define_expand "abssi2"
5224 [(parallel
5225 [(set (match_operand:SI 0 "s_register_operand")
5226 (abs:SI (match_operand:SI 1 "s_register_operand")))
5227 (clobber (match_dup 2))])]
5228 "TARGET_EITHER"
5229 "
5230 if (TARGET_THUMB1)
5231 operands[2] = gen_rtx_SCRATCH (SImode);
5232 else
5233 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5234 ")
5235
5236 (define_insn_and_split "*arm_abssi2"
5237 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5238 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5239 (clobber (reg:CC CC_REGNUM))]
5240 "TARGET_ARM"
5241 "#"
5242 "&& reload_completed"
5243 [(const_int 0)]
5244 {
5245 /* if (which_alternative == 0) */
5246 if (REGNO(operands[0]) == REGNO(operands[1]))
5247 {
5248 /* Emit the pattern:
5249 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5250 [(set (reg:CC CC_REGNUM)
5251 (compare:CC (match_dup 0) (const_int 0)))
5252 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5253 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5254 */
5255 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5256 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5257 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5258 (gen_rtx_LT (SImode,
5259 gen_rtx_REG (CCmode, CC_REGNUM),
5260 const0_rtx)),
5261 (gen_rtx_SET (operands[0],
5262 (gen_rtx_MINUS (SImode,
5263 const0_rtx,
5264 operands[1]))))));
5265 DONE;
5266 }
5267 else
5268 {
5269 /* Emit the pattern:
5270 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5271 [(set (match_dup 0)
5272 (xor:SI (match_dup 1)
5273 (ashiftrt:SI (match_dup 1) (const_int 31))))
5274 (set (match_dup 0)
5275 (minus:SI (match_dup 0)
5276 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5277 */
5278 emit_insn (gen_rtx_SET (operands[0],
5279 gen_rtx_XOR (SImode,
5280 gen_rtx_ASHIFTRT (SImode,
5281 operands[1],
5282 GEN_INT (31)),
5283 operands[1])));
5284 emit_insn (gen_rtx_SET (operands[0],
5285 gen_rtx_MINUS (SImode,
5286 operands[0],
5287 gen_rtx_ASHIFTRT (SImode,
5288 operands[1],
5289 GEN_INT (31)))));
5290 DONE;
5291 }
5292 }
5293 [(set_attr "conds" "clob,*")
5294 (set_attr "shift" "1")
5295 (set_attr "predicable" "no, yes")
5296 (set_attr "length" "8")
5297 (set_attr "type" "multiple")]
5298 )
5299
5300 (define_insn_and_split "*arm_neg_abssi2"
5301 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5302 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5303 (clobber (reg:CC CC_REGNUM))]
5304 "TARGET_ARM"
5305 "#"
5306 "&& reload_completed"
5307 [(const_int 0)]
5308 {
5309 /* if (which_alternative == 0) */
5310 if (REGNO (operands[0]) == REGNO (operands[1]))
5311 {
5312 /* Emit the pattern:
5313 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5314 */
5315 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5316 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5317 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5318 gen_rtx_GT (SImode,
5319 gen_rtx_REG (CCmode, CC_REGNUM),
5320 const0_rtx),
5321 gen_rtx_SET (operands[0],
5322 (gen_rtx_MINUS (SImode,
5323 const0_rtx,
5324 operands[1])))));
5325 }
5326 else
5327 {
5328 /* Emit the pattern:
5329 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5330 */
5331 emit_insn (gen_rtx_SET (operands[0],
5332 gen_rtx_XOR (SImode,
5333 gen_rtx_ASHIFTRT (SImode,
5334 operands[1],
5335 GEN_INT (31)),
5336 operands[1])));
5337 emit_insn (gen_rtx_SET (operands[0],
5338 gen_rtx_MINUS (SImode,
5339 gen_rtx_ASHIFTRT (SImode,
5340 operands[1],
5341 GEN_INT (31)),
5342 operands[0])));
5343 }
5344 DONE;
5345 }
5346 [(set_attr "conds" "clob,*")
5347 (set_attr "shift" "1")
5348 (set_attr "predicable" "no, yes")
5349 (set_attr "length" "8")
5350 (set_attr "type" "multiple")]
5351 )
5352
5353 (define_expand "abssf2"
5354 [(set (match_operand:SF 0 "s_register_operand")
5355 (abs:SF (match_operand:SF 1 "s_register_operand")))]
5356 "TARGET_32BIT && TARGET_HARD_FLOAT"
5357 "")
5358
5359 (define_expand "absdf2"
5360 [(set (match_operand:DF 0 "s_register_operand")
5361 (abs:DF (match_operand:DF 1 "s_register_operand")))]
5362 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5363 "")
5364
5365 (define_expand "sqrtsf2"
5366 [(set (match_operand:SF 0 "s_register_operand")
5367 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
5368 "TARGET_32BIT && TARGET_HARD_FLOAT"
5369 "")
5370
5371 (define_expand "sqrtdf2"
5372 [(set (match_operand:DF 0 "s_register_operand")
5373 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
5374 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5375 "")
5376
5377 (define_expand "one_cmplsi2"
5378 [(set (match_operand:SI 0 "s_register_operand")
5379 (not:SI (match_operand:SI 1 "s_register_operand")))]
5380 "TARGET_EITHER"
5381 ""
5382 )
5383
5384 (define_insn "*arm_one_cmplsi2"
5385 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5386 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5387 "TARGET_32BIT"
5388 "mvn%?\\t%0, %1"
5389 [(set_attr "predicable" "yes")
5390 (set_attr "predicable_short_it" "yes,no")
5391 (set_attr "arch" "t2,*")
5392 (set_attr "length" "4")
5393 (set_attr "type" "mvn_reg")]
5394 )
5395
5396 (define_insn "*notsi_compare0"
5397 [(set (reg:CC_NZ CC_REGNUM)
5398 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5399 (const_int 0)))
5400 (set (match_operand:SI 0 "s_register_operand" "=r")
5401 (not:SI (match_dup 1)))]
5402 "TARGET_32BIT"
5403 "mvns%?\\t%0, %1"
5404 [(set_attr "conds" "set")
5405 (set_attr "type" "mvn_reg")]
5406 )
5407
5408 (define_insn "*notsi_compare0_scratch"
5409 [(set (reg:CC_NZ CC_REGNUM)
5410 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5411 (const_int 0)))
5412 (clobber (match_scratch:SI 0 "=r"))]
5413 "TARGET_32BIT"
5414 "mvns%?\\t%0, %1"
5415 [(set_attr "conds" "set")
5416 (set_attr "type" "mvn_reg")]
5417 )
5418 \f
5419 ;; Fixed <--> Floating conversion insns
5420
5421 (define_expand "floatsihf2"
5422 [(set (match_operand:HF 0 "general_operand")
5423 (float:HF (match_operand:SI 1 "general_operand")))]
5424 "TARGET_EITHER"
5425 "
5426 {
5427 rtx op1 = gen_reg_rtx (SFmode);
5428 expand_float (op1, operands[1], 0);
5429 op1 = convert_to_mode (HFmode, op1, 0);
5430 emit_move_insn (operands[0], op1);
5431 DONE;
5432 }"
5433 )
5434
5435 (define_expand "floatdihf2"
5436 [(set (match_operand:HF 0 "general_operand")
5437 (float:HF (match_operand:DI 1 "general_operand")))]
5438 "TARGET_EITHER"
5439 "
5440 {
5441 rtx op1 = gen_reg_rtx (SFmode);
5442 expand_float (op1, operands[1], 0);
5443 op1 = convert_to_mode (HFmode, op1, 0);
5444 emit_move_insn (operands[0], op1);
5445 DONE;
5446 }"
5447 )
5448
5449 (define_expand "floatsisf2"
5450 [(set (match_operand:SF 0 "s_register_operand")
5451 (float:SF (match_operand:SI 1 "s_register_operand")))]
5452 "TARGET_32BIT && TARGET_HARD_FLOAT"
5453 "
5454 ")
5455
5456 (define_expand "floatsidf2"
5457 [(set (match_operand:DF 0 "s_register_operand")
5458 (float:DF (match_operand:SI 1 "s_register_operand")))]
5459 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5460 "
5461 ")
5462
5463 (define_expand "fix_trunchfsi2"
5464 [(set (match_operand:SI 0 "general_operand")
5465 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
5466 "TARGET_EITHER"
5467 "
5468 {
5469 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5470 expand_fix (operands[0], op1, 0);
5471 DONE;
5472 }"
5473 )
5474
5475 (define_expand "fix_trunchfdi2"
5476 [(set (match_operand:DI 0 "general_operand")
5477 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
5478 "TARGET_EITHER"
5479 "
5480 {
5481 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5482 expand_fix (operands[0], op1, 0);
5483 DONE;
5484 }"
5485 )
5486
5487 (define_expand "fix_truncsfsi2"
5488 [(set (match_operand:SI 0 "s_register_operand")
5489 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
5490 "TARGET_32BIT && TARGET_HARD_FLOAT"
5491 "
5492 ")
5493
5494 (define_expand "fix_truncdfsi2"
5495 [(set (match_operand:SI 0 "s_register_operand")
5496 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
5497 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5498 "
5499 ")
5500
5501 ;; Truncation insns
5502
5503 (define_expand "truncdfsf2"
5504 [(set (match_operand:SF 0 "s_register_operand")
5505 (float_truncate:SF
5506 (match_operand:DF 1 "s_register_operand")))]
5507 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5508 ""
5509 )
5510
5511 ;; DFmode to HFmode conversions on targets without a single-step hardware
5512 ;; instruction for it would have to go through SFmode. This is dangerous
5513 ;; as it introduces double rounding.
5514 ;;
5515 ;; Disable this pattern unless we are in an unsafe math mode, or we have
5516 ;; a single-step instruction.
5517
5518 (define_expand "truncdfhf2"
5519 [(set (match_operand:HF 0 "s_register_operand")
5520 (float_truncate:HF
5521 (match_operand:DF 1 "s_register_operand")))]
5522 "(TARGET_EITHER && flag_unsafe_math_optimizations)
5523 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
5524 {
5525 /* We don't have a direct instruction for this, so we must be in
5526 an unsafe math mode, and going via SFmode. */
5527
5528 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5529 {
5530 rtx op1;
5531 op1 = convert_to_mode (SFmode, operands[1], 0);
5532 op1 = convert_to_mode (HFmode, op1, 0);
5533 emit_move_insn (operands[0], op1);
5534 DONE;
5535 }
5536 /* Otherwise, we will pick this up as a single instruction with
5537 no intermediary rounding. */
5538 }
5539 )
5540 \f
5541 ;; Zero and sign extension instructions.
5542
5543 (define_expand "zero_extend<mode>di2"
5544 [(set (match_operand:DI 0 "s_register_operand" "")
5545 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5546 "TARGET_32BIT <qhs_zextenddi_cond>"
5547 {
5548 rtx res_lo, res_hi, op0_lo, op0_hi;
5549 res_lo = gen_lowpart (SImode, operands[0]);
5550 res_hi = gen_highpart (SImode, operands[0]);
5551 if (can_create_pseudo_p ())
5552 {
5553 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5554 op0_hi = gen_reg_rtx (SImode);
5555 }
5556 else
5557 {
5558 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5559 op0_hi = res_hi;
5560 }
5561 if (<MODE>mode != SImode)
5562 emit_insn (gen_rtx_SET (op0_lo,
5563 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5564 emit_insn (gen_movsi (op0_hi, const0_rtx));
5565 if (res_lo != op0_lo)
5566 emit_move_insn (res_lo, op0_lo);
5567 if (res_hi != op0_hi)
5568 emit_move_insn (res_hi, op0_hi);
5569 DONE;
5570 }
5571 )
5572
5573 (define_expand "extend<mode>di2"
5574 [(set (match_operand:DI 0 "s_register_operand" "")
5575 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5576 "TARGET_32BIT <qhs_sextenddi_cond>"
5577 {
5578 rtx res_lo, res_hi, op0_lo, op0_hi;
5579 res_lo = gen_lowpart (SImode, operands[0]);
5580 res_hi = gen_highpart (SImode, operands[0]);
5581 if (can_create_pseudo_p ())
5582 {
5583 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5584 op0_hi = gen_reg_rtx (SImode);
5585 }
5586 else
5587 {
5588 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5589 op0_hi = res_hi;
5590 }
5591 if (<MODE>mode != SImode)
5592 emit_insn (gen_rtx_SET (op0_lo,
5593 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5594 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5595 if (res_lo != op0_lo)
5596 emit_move_insn (res_lo, op0_lo);
5597 if (res_hi != op0_hi)
5598 emit_move_insn (res_hi, op0_hi);
5599 DONE;
5600 }
5601 )
5602
5603 ;; Splits for all extensions to DImode
5604 (define_split
5605 [(set (match_operand:DI 0 "s_register_operand" "")
5606 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5607 "TARGET_32BIT"
5608 [(set (match_dup 0) (match_dup 1))]
5609 {
5610 rtx lo_part = gen_lowpart (SImode, operands[0]);
5611 machine_mode src_mode = GET_MODE (operands[1]);
5612
5613 if (src_mode == SImode)
5614 emit_move_insn (lo_part, operands[1]);
5615 else
5616 emit_insn (gen_rtx_SET (lo_part,
5617 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5618 operands[0] = gen_highpart (SImode, operands[0]);
5619 operands[1] = const0_rtx;
5620 })
5621
5622 (define_split
5623 [(set (match_operand:DI 0 "s_register_operand" "")
5624 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5625 "TARGET_32BIT"
5626 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5627 {
5628 rtx lo_part = gen_lowpart (SImode, operands[0]);
5629 machine_mode src_mode = GET_MODE (operands[1]);
5630
5631 if (src_mode == SImode)
5632 emit_move_insn (lo_part, operands[1]);
5633 else
5634 emit_insn (gen_rtx_SET (lo_part,
5635 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5636 operands[1] = lo_part;
5637 operands[0] = gen_highpart (SImode, operands[0]);
5638 })
5639
5640 (define_expand "zero_extendhisi2"
5641 [(set (match_operand:SI 0 "s_register_operand")
5642 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5643 "TARGET_EITHER"
5644 {
5645 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5646 {
5647 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5648 DONE;
5649 }
5650 if (!arm_arch6 && !MEM_P (operands[1]))
5651 {
5652 rtx t = gen_lowpart (SImode, operands[1]);
5653 rtx tmp = gen_reg_rtx (SImode);
5654 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5655 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5656 DONE;
5657 }
5658 })
5659
5660 (define_split
5661 [(set (match_operand:SI 0 "s_register_operand" "")
5662 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5663 "!TARGET_THUMB2 && !arm_arch6"
5664 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5665 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5666 {
5667 operands[2] = gen_lowpart (SImode, operands[1]);
5668 })
5669
5670 (define_insn "*arm_zero_extendhisi2"
5671 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5672 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5673 "TARGET_ARM && arm_arch4 && !arm_arch6"
5674 "@
5675 #
5676 ldrh%?\\t%0, %1"
5677 [(set_attr "type" "alu_shift_reg,load_byte")
5678 (set_attr "predicable" "yes")]
5679 )
5680
5681 (define_insn "*arm_zero_extendhisi2_v6"
5682 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5683 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5684 "TARGET_ARM && arm_arch6"
5685 "@
5686 uxth%?\\t%0, %1
5687 ldrh%?\\t%0, %1"
5688 [(set_attr "predicable" "yes")
5689 (set_attr "type" "extend,load_byte")]
5690 )
5691
5692 (define_insn "*arm_zero_extendhisi2addsi"
5693 [(set (match_operand:SI 0 "s_register_operand" "=r")
5694 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5695 (match_operand:SI 2 "s_register_operand" "r")))]
5696 "TARGET_INT_SIMD"
5697 "uxtah%?\\t%0, %2, %1"
5698 [(set_attr "type" "alu_shift_reg")
5699 (set_attr "predicable" "yes")]
5700 )
5701
5702 (define_expand "zero_extendqisi2"
5703 [(set (match_operand:SI 0 "s_register_operand")
5704 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5705 "TARGET_EITHER"
5706 {
5707 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5708 {
5709 emit_insn (gen_andsi3 (operands[0],
5710 gen_lowpart (SImode, operands[1]),
5711 GEN_INT (255)));
5712 DONE;
5713 }
5714 if (!arm_arch6 && !MEM_P (operands[1]))
5715 {
5716 rtx t = gen_lowpart (SImode, operands[1]);
5717 rtx tmp = gen_reg_rtx (SImode);
5718 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5719 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5720 DONE;
5721 }
5722 })
5723
5724 (define_split
5725 [(set (match_operand:SI 0 "s_register_operand" "")
5726 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5727 "!arm_arch6"
5728 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5729 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5730 {
5731 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5732 if (TARGET_ARM)
5733 {
5734 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5735 DONE;
5736 }
5737 })
5738
5739 (define_insn "*arm_zero_extendqisi2"
5740 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5741 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5742 "TARGET_ARM && !arm_arch6"
5743 "@
5744 #
5745 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5746 [(set_attr "length" "8,4")
5747 (set_attr "type" "alu_shift_reg,load_byte")
5748 (set_attr "predicable" "yes")]
5749 )
5750
5751 (define_insn "*arm_zero_extendqisi2_v6"
5752 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5753 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5754 "TARGET_ARM && arm_arch6"
5755 "@
5756 uxtb%?\\t%0, %1
5757 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5758 [(set_attr "type" "extend,load_byte")
5759 (set_attr "predicable" "yes")]
5760 )
5761
5762 (define_insn "*arm_zero_extendqisi2addsi"
5763 [(set (match_operand:SI 0 "s_register_operand" "=r")
5764 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5765 (match_operand:SI 2 "s_register_operand" "r")))]
5766 "TARGET_INT_SIMD"
5767 "uxtab%?\\t%0, %2, %1"
5768 [(set_attr "predicable" "yes")
5769 (set_attr "type" "alu_shift_reg")]
5770 )
5771
5772 (define_split
5773 [(set (match_operand:SI 0 "s_register_operand" "")
5774 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5775 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5776 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5777 [(set (match_dup 2) (match_dup 1))
5778 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5779 ""
5780 )
5781
5782 (define_split
5783 [(set (match_operand:SI 0 "s_register_operand" "")
5784 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5785 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5786 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5787 [(set (match_dup 2) (match_dup 1))
5788 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5789 ""
5790 )
5791
5792
5793 (define_split
5794 [(set (match_operand:SI 0 "s_register_operand" "")
5795 (IOR_XOR:SI (and:SI (ashift:SI
5796 (match_operand:SI 1 "s_register_operand" "")
5797 (match_operand:SI 2 "const_int_operand" ""))
5798 (match_operand:SI 3 "const_int_operand" ""))
5799 (zero_extend:SI
5800 (match_operator 5 "subreg_lowpart_operator"
5801 [(match_operand:SI 4 "s_register_operand" "")]))))]
5802 "TARGET_32BIT
5803 && (UINTVAL (operands[3])
5804 == (GET_MODE_MASK (GET_MODE (operands[5]))
5805 & (GET_MODE_MASK (GET_MODE (operands[5]))
5806 << (INTVAL (operands[2])))))"
5807 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5808 (match_dup 4)))
5809 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5810 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5811 )
5812
5813 (define_insn "*compareqi_eq0"
5814 [(set (reg:CC_Z CC_REGNUM)
5815 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5816 (const_int 0)))]
5817 "TARGET_32BIT"
5818 "tst%?\\t%0, #255"
5819 [(set_attr "conds" "set")
5820 (set_attr "predicable" "yes")
5821 (set_attr "type" "logic_imm")]
5822 )
5823
5824 (define_expand "extendhisi2"
5825 [(set (match_operand:SI 0 "s_register_operand")
5826 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5827 "TARGET_EITHER"
5828 {
5829 if (TARGET_THUMB1)
5830 {
5831 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5832 DONE;
5833 }
5834 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5835 {
5836 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5837 DONE;
5838 }
5839
5840 if (!arm_arch6 && !MEM_P (operands[1]))
5841 {
5842 rtx t = gen_lowpart (SImode, operands[1]);
5843 rtx tmp = gen_reg_rtx (SImode);
5844 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5845 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5846 DONE;
5847 }
5848 })
5849
5850 (define_split
5851 [(parallel
5852 [(set (match_operand:SI 0 "register_operand" "")
5853 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5854 (clobber (match_scratch:SI 2 ""))])]
5855 "!arm_arch6"
5856 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5857 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5858 {
5859 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5860 })
5861
5862 ;; This pattern will only be used when ldsh is not available
5863 (define_expand "extendhisi2_mem"
5864 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5865 (set (match_dup 3)
5866 (zero_extend:SI (match_dup 7)))
5867 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5868 (set (match_operand:SI 0 "" "")
5869 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5870 "TARGET_ARM"
5871 "
5872 {
5873 rtx mem1, mem2;
5874 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5875
5876 mem1 = change_address (operands[1], QImode, addr);
5877 mem2 = change_address (operands[1], QImode,
5878 plus_constant (Pmode, addr, 1));
5879 operands[0] = gen_lowpart (SImode, operands[0]);
5880 operands[1] = mem1;
5881 operands[2] = gen_reg_rtx (SImode);
5882 operands[3] = gen_reg_rtx (SImode);
5883 operands[6] = gen_reg_rtx (SImode);
5884 operands[7] = mem2;
5885
5886 if (BYTES_BIG_ENDIAN)
5887 {
5888 operands[4] = operands[2];
5889 operands[5] = operands[3];
5890 }
5891 else
5892 {
5893 operands[4] = operands[3];
5894 operands[5] = operands[2];
5895 }
5896 }"
5897 )
5898
5899 (define_split
5900 [(set (match_operand:SI 0 "register_operand" "")
5901 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5902 "!arm_arch6"
5903 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5904 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5905 {
5906 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5907 })
5908
5909 (define_insn "*arm_extendhisi2"
5910 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5911 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5912 "TARGET_ARM && arm_arch4 && !arm_arch6"
5913 "@
5914 #
5915 ldrsh%?\\t%0, %1"
5916 [(set_attr "length" "8,4")
5917 (set_attr "type" "alu_shift_reg,load_byte")
5918 (set_attr "predicable" "yes")]
5919 )
5920
5921 ;; ??? Check Thumb-2 pool range
5922 (define_insn "*arm_extendhisi2_v6"
5923 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5924 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5925 "TARGET_32BIT && arm_arch6"
5926 "@
5927 sxth%?\\t%0, %1
5928 ldrsh%?\\t%0, %1"
5929 [(set_attr "type" "extend,load_byte")
5930 (set_attr "predicable" "yes")]
5931 )
5932
5933 (define_insn "*arm_extendhisi2addsi"
5934 [(set (match_operand:SI 0 "s_register_operand" "=r")
5935 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5936 (match_operand:SI 2 "s_register_operand" "r")))]
5937 "TARGET_INT_SIMD"
5938 "sxtah%?\\t%0, %2, %1"
5939 [(set_attr "type" "alu_shift_reg")]
5940 )
5941
5942 (define_expand "extendqihi2"
5943 [(set (match_dup 2)
5944 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5945 (const_int 24)))
5946 (set (match_operand:HI 0 "s_register_operand")
5947 (ashiftrt:SI (match_dup 2)
5948 (const_int 24)))]
5949 "TARGET_ARM"
5950 "
5951 {
5952 if (arm_arch4 && MEM_P (operands[1]))
5953 {
5954 emit_insn (gen_rtx_SET (operands[0],
5955 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5956 DONE;
5957 }
5958 if (!s_register_operand (operands[1], QImode))
5959 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5960 operands[0] = gen_lowpart (SImode, operands[0]);
5961 operands[1] = gen_lowpart (SImode, operands[1]);
5962 operands[2] = gen_reg_rtx (SImode);
5963 }"
5964 )
5965
5966 (define_insn "*arm_extendqihi_insn"
5967 [(set (match_operand:HI 0 "s_register_operand" "=r")
5968 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5969 "TARGET_ARM && arm_arch4"
5970 "ldrsb%?\\t%0, %1"
5971 [(set_attr "type" "load_byte")
5972 (set_attr "predicable" "yes")]
5973 )
5974
5975 (define_expand "extendqisi2"
5976 [(set (match_operand:SI 0 "s_register_operand")
5977 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5978 "TARGET_EITHER"
5979 {
5980 if (!arm_arch4 && MEM_P (operands[1]))
5981 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5982
5983 if (!arm_arch6 && !MEM_P (operands[1]))
5984 {
5985 rtx t = gen_lowpart (SImode, operands[1]);
5986 rtx tmp = gen_reg_rtx (SImode);
5987 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5988 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5989 DONE;
5990 }
5991 })
5992
5993 (define_split
5994 [(set (match_operand:SI 0 "register_operand" "")
5995 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5996 "!arm_arch6"
5997 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5998 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5999 {
6000 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
6001 })
6002
6003 (define_insn "*arm_extendqisi"
6004 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
6005 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
6006 "TARGET_ARM && arm_arch4 && !arm_arch6"
6007 "@
6008 #
6009 ldrsb%?\\t%0, %1"
6010 [(set_attr "length" "8,4")
6011 (set_attr "type" "alu_shift_reg,load_byte")
6012 (set_attr "predicable" "yes")]
6013 )
6014
6015 (define_insn "*arm_extendqisi_v6"
6016 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
6017 (sign_extend:SI
6018 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
6019 "TARGET_ARM && arm_arch6"
6020 "@
6021 sxtb%?\\t%0, %1
6022 ldrsb%?\\t%0, %1"
6023 [(set_attr "type" "extend,load_byte")
6024 (set_attr "predicable" "yes")]
6025 )
6026
6027 (define_insn "*arm_extendqisi2addsi"
6028 [(set (match_operand:SI 0 "s_register_operand" "=r")
6029 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
6030 (match_operand:SI 2 "s_register_operand" "r")))]
6031 "TARGET_INT_SIMD"
6032 "sxtab%?\\t%0, %2, %1"
6033 [(set_attr "type" "alu_shift_reg")
6034 (set_attr "predicable" "yes")]
6035 )
6036
6037 (define_insn "arm_<sup>xtb16"
6038 [(set (match_operand:SI 0 "s_register_operand" "=r")
6039 (unspec:SI
6040 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
6041 "TARGET_INT_SIMD"
6042 "<sup>xtb16%?\\t%0, %1"
6043 [(set_attr "predicable" "yes")
6044 (set_attr "type" "alu_dsp_reg")])
6045
6046 (define_insn "arm_<simd32_op>"
6047 [(set (match_operand:SI 0 "s_register_operand" "=r")
6048 (unspec:SI
6049 [(match_operand:SI 1 "s_register_operand" "r")
6050 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
6051 "TARGET_INT_SIMD"
6052 "<simd32_op>%?\\t%0, %1, %2"
6053 [(set_attr "predicable" "yes")
6054 (set_attr "type" "alu_dsp_reg")])
6055
6056 (define_insn "arm_usada8"
6057 [(set (match_operand:SI 0 "s_register_operand" "=r")
6058 (unspec:SI
6059 [(match_operand:SI 1 "s_register_operand" "r")
6060 (match_operand:SI 2 "s_register_operand" "r")
6061 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
6062 "TARGET_INT_SIMD"
6063 "usada8%?\\t%0, %1, %2, %3"
6064 [(set_attr "predicable" "yes")
6065 (set_attr "type" "alu_dsp_reg")])
6066
6067 (define_insn "arm_<simd32_op>"
6068 [(set (match_operand:DI 0 "s_register_operand" "=r")
6069 (unspec:DI
6070 [(match_operand:SI 1 "s_register_operand" "r")
6071 (match_operand:SI 2 "s_register_operand" "r")
6072 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
6073 "TARGET_INT_SIMD"
6074 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
6075 [(set_attr "predicable" "yes")
6076 (set_attr "type" "smlald")])
6077
6078 (define_insn "arm_<simd32_op>"
6079 [(set (match_operand:SI 0 "s_register_operand" "=r")
6080 (unspec:SI
6081 [(match_operand:SI 1 "s_register_operand" "r")
6082 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
6083 (set (reg:CC APSRGE_REGNUM)
6084 (unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
6085 "TARGET_INT_SIMD"
6086 "<simd32_op>%?\\t%0, %1, %2"
6087 [(set_attr "predicable" "yes")
6088 (set_attr "type" "alu_sreg")])
6089
6090 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6091 [(set (match_operand:SI 0 "s_register_operand" "=r")
6092 (unspec:SI
6093 [(match_operand:SI 1 "s_register_operand" "r")
6094 (match_operand:SI 2 "s_register_operand" "r")
6095 (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
6096 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6097 "<simd32_op>%?\\t%0, %1, %2, %3"
6098 [(set_attr "predicable" "yes")
6099 (set_attr "type" "alu_sreg")])
6100
6101 (define_expand "arm_<simd32_op>"
6102 [(set (match_operand:SI 0 "s_register_operand")
6103 (unspec:SI
6104 [(match_operand:SI 1 "s_register_operand")
6105 (match_operand:SI 2 "s_register_operand")
6106 (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
6107 "TARGET_INT_SIMD"
6108 {
6109 if (ARM_Q_BIT_READ)
6110 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6111 operands[2], operands[3]));
6112 else
6113 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6114 operands[2], operands[3]));
6115 DONE;
6116 }
6117 )
6118
6119 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6120 [(set (match_operand:SI 0 "s_register_operand" "=r")
6121 (unspec:SI
6122 [(match_operand:SI 1 "s_register_operand" "r")
6123 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
6124 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6125 "<simd32_op>%?\\t%0, %1, %2"
6126 [(set_attr "predicable" "yes")
6127 (set_attr "type" "alu_sreg")])
6128
6129 (define_expand "arm_<simd32_op>"
6130 [(set (match_operand:SI 0 "s_register_operand")
6131 (unspec:SI
6132 [(match_operand:SI 1 "s_register_operand")
6133 (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
6134 "TARGET_INT_SIMD"
6135 {
6136 if (ARM_Q_BIT_READ)
6137 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6138 operands[2]));
6139 else
6140 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6141 operands[2]));
6142 DONE;
6143 }
6144 )
6145
6146 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6147 [(set (match_operand:SI 0 "s_register_operand" "=r")
6148 (unspec:SI
6149 [(match_operand:SI 1 "s_register_operand" "r")
6150 (match_operand:SI 2 "<sup>sat16_imm" "i")] USSAT16))]
6151 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6152 "<simd32_op>%?\\t%0, %2, %1"
6153 [(set_attr "predicable" "yes")
6154 (set_attr "type" "alu_sreg")])
6155
6156 (define_expand "arm_<simd32_op>"
6157 [(set (match_operand:SI 0 "s_register_operand")
6158 (unspec:SI
6159 [(match_operand:SI 1 "s_register_operand")
6160 (match_operand:SI 2 "<sup>sat16_imm")] USSAT16))]
6161 "TARGET_INT_SIMD"
6162 {
6163 if (ARM_Q_BIT_READ)
6164 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6165 operands[2]));
6166 else
6167 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6168 operands[2]));
6169 DONE;
6170 }
6171 )
6172
6173 (define_insn "arm_sel"
6174 [(set (match_operand:SI 0 "s_register_operand" "=r")
6175 (unspec:SI
6176 [(match_operand:SI 1 "s_register_operand" "r")
6177 (match_operand:SI 2 "s_register_operand" "r")
6178 (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
6179 "TARGET_INT_SIMD"
6180 "sel%?\\t%0, %1, %2"
6181 [(set_attr "predicable" "yes")
6182 (set_attr "type" "alu_sreg")])
6183
6184 (define_expand "extendsfdf2"
6185 [(set (match_operand:DF 0 "s_register_operand")
6186 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
6187 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6188 ""
6189 )
6190
6191 ;; HFmode -> DFmode conversions where we don't have an instruction for it
6192 ;; must go through SFmode.
6193 ;;
6194 ;; This is always safe for an extend.
6195
6196 (define_expand "extendhfdf2"
6197 [(set (match_operand:DF 0 "s_register_operand")
6198 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
6199 "TARGET_EITHER"
6200 {
6201 /* We don't have a direct instruction for this, so go via SFmode. */
6202 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
6203 {
6204 rtx op1;
6205 op1 = convert_to_mode (SFmode, operands[1], 0);
6206 op1 = convert_to_mode (DFmode, op1, 0);
6207 emit_insn (gen_movdf (operands[0], op1));
6208 DONE;
6209 }
6210 /* Otherwise, we're done producing RTL and will pick up the correct
6211 pattern to do this with one rounding-step in a single instruction. */
6212 }
6213 )
6214 \f
6215 ;; Move insns (including loads and stores)
6216
6217 ;; XXX Just some ideas about movti.
6218 ;; I don't think these are a good idea on the arm, there just aren't enough
6219 ;; registers
6220 ;;(define_expand "loadti"
6221 ;; [(set (match_operand:TI 0 "s_register_operand")
6222 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
6223 ;; "" "")
6224
6225 ;;(define_expand "storeti"
6226 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
6227 ;; (match_operand:TI 1 "s_register_operand"))]
6228 ;; "" "")
6229
6230 ;;(define_expand "movti"
6231 ;; [(set (match_operand:TI 0 "general_operand")
6232 ;; (match_operand:TI 1 "general_operand"))]
6233 ;; ""
6234 ;; "
6235 ;;{
6236 ;; rtx insn;
6237 ;;
6238 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
6239 ;; operands[1] = copy_to_reg (operands[1]);
6240 ;; if (MEM_P (operands[0]))
6241 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6242 ;; else if (MEM_P (operands[1]))
6243 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6244 ;; else
6245 ;; FAIL;
6246 ;;
6247 ;; emit_insn (insn);
6248 ;; DONE;
6249 ;;}")
6250
6251 ;; Recognize garbage generated above.
6252
6253 ;;(define_insn ""
6254 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6255 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6256 ;; ""
6257 ;; "*
6258 ;; {
6259 ;; register mem = (which_alternative < 3);
6260 ;; register const char *template;
6261 ;;
6262 ;; operands[mem] = XEXP (operands[mem], 0);
6263 ;; switch (which_alternative)
6264 ;; {
6265 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6266 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6267 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6268 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6269 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6270 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6271 ;; }
6272 ;; output_asm_insn (template, operands);
6273 ;; return \"\";
6274 ;; }")
6275
6276 (define_expand "movdi"
6277 [(set (match_operand:DI 0 "general_operand")
6278 (match_operand:DI 1 "general_operand"))]
6279 "TARGET_EITHER"
6280 "
6281 gcc_checking_assert (aligned_operand (operands[0], DImode));
6282 gcc_checking_assert (aligned_operand (operands[1], DImode));
6283 if (can_create_pseudo_p ())
6284 {
6285 if (!REG_P (operands[0]))
6286 operands[1] = force_reg (DImode, operands[1]);
6287 }
6288 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
6289 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
6290 {
6291 /* Avoid LDRD's into an odd-numbered register pair in ARM state
6292 when expanding function calls. */
6293 gcc_assert (can_create_pseudo_p ());
6294 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
6295 {
6296 /* Perform load into legal reg pair first, then move. */
6297 rtx reg = gen_reg_rtx (DImode);
6298 emit_insn (gen_movdi (reg, operands[1]));
6299 operands[1] = reg;
6300 }
6301 emit_move_insn (gen_lowpart (SImode, operands[0]),
6302 gen_lowpart (SImode, operands[1]));
6303 emit_move_insn (gen_highpart (SImode, operands[0]),
6304 gen_highpart (SImode, operands[1]));
6305 DONE;
6306 }
6307 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
6308 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
6309 {
6310 /* Avoid STRD's from an odd-numbered register pair in ARM state
6311 when expanding function prologue. */
6312 gcc_assert (can_create_pseudo_p ());
6313 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
6314 ? gen_reg_rtx (DImode)
6315 : operands[0];
6316 emit_move_insn (gen_lowpart (SImode, split_dest),
6317 gen_lowpart (SImode, operands[1]));
6318 emit_move_insn (gen_highpart (SImode, split_dest),
6319 gen_highpart (SImode, operands[1]));
6320 if (split_dest != operands[0])
6321 emit_insn (gen_movdi (operands[0], split_dest));
6322 DONE;
6323 }
6324 "
6325 )
6326
6327 (define_insn "*arm_movdi"
6328 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
6329 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
6330 "TARGET_32BIT
6331 && !(TARGET_HARD_FLOAT)
6332 && !(TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT)
6333 && !TARGET_IWMMXT
6334 && ( register_operand (operands[0], DImode)
6335 || register_operand (operands[1], DImode))"
6336 "*
6337 switch (which_alternative)
6338 {
6339 case 0:
6340 case 1:
6341 case 2:
6342 return \"#\";
6343 case 3:
6344 /* Cannot load it directly, split to load it via MOV / MOVT. */
6345 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6346 return \"#\";
6347 /* Fall through. */
6348 default:
6349 return output_move_double (operands, true, NULL);
6350 }
6351 "
6352 [(set_attr "length" "8,12,16,8,8")
6353 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6354 (set_attr "arm_pool_range" "*,*,*,1020,*")
6355 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6356 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6357 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6358 )
6359
6360 (define_split
6361 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6362 (match_operand:ANY64 1 "immediate_operand" ""))]
6363 "TARGET_32BIT
6364 && reload_completed
6365 && (arm_disable_literal_pool
6366 || (arm_const_double_inline_cost (operands[1])
6367 <= arm_max_const_double_inline_cost ()))"
6368 [(const_int 0)]
6369 "
6370 arm_split_constant (SET, SImode, curr_insn,
6371 INTVAL (gen_lowpart (SImode, operands[1])),
6372 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6373 arm_split_constant (SET, SImode, curr_insn,
6374 INTVAL (gen_highpart_mode (SImode,
6375 GET_MODE (operands[0]),
6376 operands[1])),
6377 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6378 DONE;
6379 "
6380 )
6381
6382 ; If optimizing for size, or if we have load delay slots, then
6383 ; we want to split the constant into two separate operations.
6384 ; In both cases this may split a trivial part into a single data op
6385 ; leaving a single complex constant to load. We can also get longer
6386 ; offsets in a LDR which means we get better chances of sharing the pool
6387 ; entries. Finally, we can normally do a better job of scheduling
6388 ; LDR instructions than we can with LDM.
6389 ; This pattern will only match if the one above did not.
6390 (define_split
6391 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6392 (match_operand:ANY64 1 "const_double_operand" ""))]
6393 "TARGET_ARM && reload_completed
6394 && arm_const_double_by_parts (operands[1])"
6395 [(set (match_dup 0) (match_dup 1))
6396 (set (match_dup 2) (match_dup 3))]
6397 "
6398 operands[2] = gen_highpart (SImode, operands[0]);
6399 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6400 operands[1]);
6401 operands[0] = gen_lowpart (SImode, operands[0]);
6402 operands[1] = gen_lowpart (SImode, operands[1]);
6403 "
6404 )
6405
6406 (define_split
6407 [(set (match_operand:ANY64_BF 0 "arm_general_register_operand" "")
6408 (match_operand:ANY64_BF 1 "arm_general_register_operand" ""))]
6409 "TARGET_EITHER && reload_completed"
6410 [(set (match_dup 0) (match_dup 1))
6411 (set (match_dup 2) (match_dup 3))]
6412 "
6413 operands[2] = gen_highpart (SImode, operands[0]);
6414 operands[3] = gen_highpart (SImode, operands[1]);
6415 operands[0] = gen_lowpart (SImode, operands[0]);
6416 operands[1] = gen_lowpart (SImode, operands[1]);
6417
6418 /* Handle a partial overlap. */
6419 if (rtx_equal_p (operands[0], operands[3]))
6420 {
6421 rtx tmp0 = operands[0];
6422 rtx tmp1 = operands[1];
6423
6424 operands[0] = operands[2];
6425 operands[1] = operands[3];
6426 operands[2] = tmp0;
6427 operands[3] = tmp1;
6428 }
6429 "
6430 )
6431
6432 ;; We can't actually do base+index doubleword loads if the index and
6433 ;; destination overlap. Split here so that we at least have chance to
6434 ;; schedule.
6435 (define_split
6436 [(set (match_operand:DI 0 "s_register_operand" "")
6437 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6438 (match_operand:SI 2 "s_register_operand" ""))))]
6439 "TARGET_LDRD
6440 && reg_overlap_mentioned_p (operands[0], operands[1])
6441 && reg_overlap_mentioned_p (operands[0], operands[2])"
6442 [(set (match_dup 4)
6443 (plus:SI (match_dup 1)
6444 (match_dup 2)))
6445 (set (match_dup 0)
6446 (mem:DI (match_dup 4)))]
6447 "
6448 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6449 "
6450 )
6451
6452 (define_expand "movsi"
6453 [(set (match_operand:SI 0 "general_operand")
6454 (match_operand:SI 1 "general_operand"))]
6455 "TARGET_EITHER"
6456 "
6457 {
6458 rtx base, offset, tmp;
6459
6460 gcc_checking_assert (aligned_operand (operands[0], SImode));
6461 gcc_checking_assert (aligned_operand (operands[1], SImode));
6462 if (TARGET_32BIT || TARGET_HAVE_MOVT)
6463 {
6464 /* Everything except mem = const or mem = mem can be done easily. */
6465 if (MEM_P (operands[0]))
6466 operands[1] = force_reg (SImode, operands[1]);
6467 if (arm_general_register_operand (operands[0], SImode)
6468 && CONST_INT_P (operands[1])
6469 && !(const_ok_for_arm (INTVAL (operands[1]))
6470 || const_ok_for_arm (~INTVAL (operands[1]))))
6471 {
6472 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
6473 {
6474 emit_insn (gen_rtx_SET (operands[0], operands[1]));
6475 DONE;
6476 }
6477 else
6478 {
6479 arm_split_constant (SET, SImode, NULL_RTX,
6480 INTVAL (operands[1]), operands[0], NULL_RTX,
6481 optimize && can_create_pseudo_p ());
6482 DONE;
6483 }
6484 }
6485 }
6486 else /* Target doesn't have MOVT... */
6487 {
6488 if (can_create_pseudo_p ())
6489 {
6490 if (!REG_P (operands[0]))
6491 operands[1] = force_reg (SImode, operands[1]);
6492 }
6493 }
6494
6495 split_const (operands[1], &base, &offset);
6496 if (INTVAL (offset) != 0
6497 && targetm.cannot_force_const_mem (SImode, operands[1]))
6498 {
6499 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6500 emit_move_insn (tmp, base);
6501 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6502 DONE;
6503 }
6504
6505 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
6506
6507 /* Recognize the case where operand[1] is a reference to thread-local
6508 data and load its address to a register. Offsets have been split off
6509 already. */
6510 if (arm_tls_referenced_p (operands[1]))
6511 operands[1] = legitimize_tls_address (operands[1], tmp);
6512 else if (flag_pic
6513 && (CONSTANT_P (operands[1])
6514 || symbol_mentioned_p (operands[1])
6515 || label_mentioned_p (operands[1])))
6516 operands[1] =
6517 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
6518 }
6519 "
6520 )
6521
6522 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6523 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6524 ;; so this does not matter.
6525 (define_insn "*arm_movt"
6526 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
6527 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
6528 (match_operand:SI 2 "general_operand" "i,i")))]
6529 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
6530 "@
6531 movt%?\t%0, #:upper16:%c2
6532 movt\t%0, #:upper16:%c2"
6533 [(set_attr "arch" "32,v8mb")
6534 (set_attr "predicable" "yes")
6535 (set_attr "length" "4")
6536 (set_attr "type" "alu_sreg")]
6537 )
6538
6539 (define_insn "*arm_movsi_insn"
6540 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6541 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6542 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
6543 && ( register_operand (operands[0], SImode)
6544 || register_operand (operands[1], SImode))"
6545 "@
6546 mov%?\\t%0, %1
6547 mov%?\\t%0, %1
6548 mvn%?\\t%0, #%B1
6549 movw%?\\t%0, %1
6550 ldr%?\\t%0, %1
6551 str%?\\t%1, %0"
6552 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
6553 (set_attr "predicable" "yes")
6554 (set_attr "arch" "*,*,*,v6t2,*,*")
6555 (set_attr "pool_range" "*,*,*,*,4096,*")
6556 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6557 )
6558
6559 (define_split
6560 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6561 (match_operand:SI 1 "const_int_operand" ""))]
6562 "(TARGET_32BIT || TARGET_HAVE_MOVT)
6563 && (!(const_ok_for_arm (INTVAL (operands[1]))
6564 || const_ok_for_arm (~INTVAL (operands[1]))))"
6565 [(clobber (const_int 0))]
6566 "
6567 arm_split_constant (SET, SImode, NULL_RTX,
6568 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6569 DONE;
6570 "
6571 )
6572
6573 ;; A normal way to do (symbol + offset) requires three instructions at least
6574 ;; (depends on how big the offset is) as below:
6575 ;; movw r0, #:lower16:g
6576 ;; movw r0, #:upper16:g
6577 ;; adds r0, #4
6578 ;;
6579 ;; A better way would be:
6580 ;; movw r0, #:lower16:g+4
6581 ;; movw r0, #:upper16:g+4
6582 ;;
6583 ;; The limitation of this way is that the length of offset should be a 16-bit
6584 ;; signed value, because current assembler only supports REL type relocation for
6585 ;; such case. If the more powerful RELA type is supported in future, we should
6586 ;; update this pattern to go with better way.
6587 (define_split
6588 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6589 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
6590 (match_operand:SI 2 "const_int_operand" ""))))]
6591 "TARGET_THUMB
6592 && TARGET_HAVE_MOVT
6593 && arm_disable_literal_pool
6594 && reload_completed
6595 && GET_CODE (operands[1]) == SYMBOL_REF"
6596 [(clobber (const_int 0))]
6597 "
6598 int offset = INTVAL (operands[2]);
6599
6600 if (offset < -0x8000 || offset > 0x7fff)
6601 {
6602 arm_emit_movpair (operands[0], operands[1]);
6603 emit_insn (gen_rtx_SET (operands[0],
6604 gen_rtx_PLUS (SImode, operands[0], operands[2])));
6605 }
6606 else
6607 {
6608 rtx op = gen_rtx_CONST (SImode,
6609 gen_rtx_PLUS (SImode, operands[1], operands[2]));
6610 arm_emit_movpair (operands[0], op);
6611 }
6612 "
6613 )
6614
6615 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6616 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6617 ;; and lo_sum would be merged back into memory load at cprop. However,
6618 ;; if the default is to prefer movt/movw rather than a load from the constant
6619 ;; pool, the performance is better.
6620 (define_split
6621 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6622 (match_operand:SI 1 "general_operand" ""))]
6623 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6624 && !target_word_relocations
6625 && !arm_tls_referenced_p (operands[1])"
6626 [(clobber (const_int 0))]
6627 {
6628 arm_emit_movpair (operands[0], operands[1]);
6629 DONE;
6630 })
6631
6632 ;; When generating pic, we need to load the symbol offset into a register.
6633 ;; So that the optimizer does not confuse this with a normal symbol load
6634 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6635 ;; since that is the only type of relocation we can use.
6636
6637 ;; Wrap calculation of the whole PIC address in a single pattern for the
6638 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6639 ;; a PIC address involves two loads from memory, so we want to CSE it
6640 ;; as often as possible.
6641 ;; This pattern will be split into one of the pic_load_addr_* patterns
6642 ;; and a move after GCSE optimizations.
6643 ;;
6644 ;; Note: Update arm.cc: legitimize_pic_address() when changing this pattern.
6645 (define_expand "calculate_pic_address"
6646 [(set (match_operand:SI 0 "register_operand")
6647 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6648 (unspec:SI [(match_operand:SI 2 "" "")]
6649 UNSPEC_PIC_SYM))))]
6650 "flag_pic"
6651 )
6652
6653 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6654 (define_split
6655 [(set (match_operand:SI 0 "register_operand" "")
6656 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6657 (unspec:SI [(match_operand:SI 2 "" "")]
6658 UNSPEC_PIC_SYM))))]
6659 "flag_pic"
6660 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6661 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6662 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6663 )
6664
6665 ;; operand1 is the memory address to go into
6666 ;; pic_load_addr_32bit.
6667 ;; operand2 is the PIC label to be emitted
6668 ;; from pic_add_dot_plus_eight.
6669 ;; We do this to allow hoisting of the entire insn.
6670 (define_insn_and_split "pic_load_addr_unified"
6671 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6672 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6673 (match_operand:SI 2 "" "")]
6674 UNSPEC_PIC_UNIFIED))]
6675 "flag_pic"
6676 "#"
6677 "&& reload_completed"
6678 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6679 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6680 (match_dup 2)] UNSPEC_PIC_BASE))]
6681 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6682 [(set_attr "type" "load_4,load_4,load_4")
6683 (set_attr "pool_range" "4096,4094,1022")
6684 (set_attr "neg_pool_range" "4084,0,0")
6685 (set_attr "arch" "a,t2,t1")
6686 (set_attr "length" "8,6,4")]
6687 )
6688
6689 ;; The rather odd constraints on the following are to force reload to leave
6690 ;; the insn alone, and to force the minipool generation pass to then move
6691 ;; the GOT symbol to memory.
6692
6693 (define_insn "pic_load_addr_32bit"
6694 [(set (match_operand:SI 0 "s_register_operand" "=r")
6695 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6696 "TARGET_32BIT && flag_pic"
6697 "ldr%?\\t%0, %1"
6698 [(set_attr "type" "load_4")
6699 (set (attr "pool_range")
6700 (if_then_else (eq_attr "is_thumb" "no")
6701 (const_int 4096)
6702 (const_int 4094)))
6703 (set (attr "neg_pool_range")
6704 (if_then_else (eq_attr "is_thumb" "no")
6705 (const_int 4084)
6706 (const_int 0)))]
6707 )
6708
6709 (define_insn "pic_load_addr_thumb1"
6710 [(set (match_operand:SI 0 "s_register_operand" "=l")
6711 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6712 "TARGET_THUMB1 && flag_pic"
6713 "ldr\\t%0, %1"
6714 [(set_attr "type" "load_4")
6715 (set (attr "pool_range") (const_int 1018))]
6716 )
6717
6718 (define_insn "pic_add_dot_plus_four"
6719 [(set (match_operand:SI 0 "register_operand" "=r")
6720 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6721 (const_int 4)
6722 (match_operand 2 "" "")]
6723 UNSPEC_PIC_BASE))]
6724 "TARGET_THUMB"
6725 "*
6726 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6727 INTVAL (operands[2]));
6728 return \"add\\t%0, %|pc\";
6729 "
6730 [(set_attr "length" "2")
6731 (set_attr "type" "alu_sreg")]
6732 )
6733
6734 (define_insn "pic_add_dot_plus_eight"
6735 [(set (match_operand:SI 0 "register_operand" "=r")
6736 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6737 (const_int 8)
6738 (match_operand 2 "" "")]
6739 UNSPEC_PIC_BASE))]
6740 "TARGET_ARM"
6741 "*
6742 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6743 INTVAL (operands[2]));
6744 return \"add%?\\t%0, %|pc, %1\";
6745 "
6746 [(set_attr "predicable" "yes")
6747 (set_attr "type" "alu_sreg")]
6748 )
6749
6750 (define_insn "tls_load_dot_plus_eight"
6751 [(set (match_operand:SI 0 "register_operand" "=r")
6752 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6753 (const_int 8)
6754 (match_operand 2 "" "")]
6755 UNSPEC_PIC_BASE)))]
6756 "TARGET_ARM"
6757 "*
6758 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6759 INTVAL (operands[2]));
6760 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6761 "
6762 [(set_attr "predicable" "yes")
6763 (set_attr "type" "load_4")]
6764 )
6765
6766 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6767 ;; followed by a load. These sequences can be crunched down to
6768 ;; tls_load_dot_plus_eight by a peephole.
6769
6770 (define_peephole2
6771 [(set (match_operand:SI 0 "register_operand" "")
6772 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6773 (const_int 8)
6774 (match_operand 1 "" "")]
6775 UNSPEC_PIC_BASE))
6776 (set (match_operand:SI 2 "arm_general_register_operand" "")
6777 (mem:SI (match_dup 0)))]
6778 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6779 [(set (match_dup 2)
6780 (mem:SI (unspec:SI [(match_dup 3)
6781 (const_int 8)
6782 (match_dup 1)]
6783 UNSPEC_PIC_BASE)))]
6784 ""
6785 )
6786
6787 (define_insn "pic_offset_arm"
6788 [(set (match_operand:SI 0 "register_operand" "=r")
6789 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6790 (unspec:SI [(match_operand:SI 2 "" "X")]
6791 UNSPEC_PIC_OFFSET))))]
6792 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6793 "ldr%?\\t%0, [%1,%2]"
6794 [(set_attr "type" "load_4")]
6795 )
6796
6797 (define_expand "builtin_setjmp_receiver"
6798 [(label_ref (match_operand 0 "" ""))]
6799 "flag_pic"
6800 "
6801 {
6802 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6803 register. */
6804 if (arm_pic_register != INVALID_REGNUM)
6805 arm_load_pic_register (1UL << 3, NULL_RTX);
6806 DONE;
6807 }")
6808
6809 ;; If copying one reg to another we can set the condition codes according to
6810 ;; its value. Such a move is common after a return from subroutine and the
6811 ;; result is being tested against zero.
6812
6813 (define_insn "*movsi_compare0"
6814 [(set (reg:CC CC_REGNUM)
6815 (compare:CC (match_operand:SI 1 "s_register_operand" "0,0,l,rk,rk")
6816 (const_int 0)))
6817 (set (match_operand:SI 0 "s_register_operand" "=l,rk,l,r,rk")
6818 (match_dup 1))]
6819 "TARGET_32BIT"
6820 "@
6821 cmp%?\\t%0, #0
6822 cmp%?\\t%0, #0
6823 subs%?\\t%0, %1, #0
6824 subs%?\\t%0, %1, #0
6825 subs%?\\t%0, %1, #0"
6826 [(set_attr "conds" "set")
6827 (set_attr "arch" "t2,*,t2,t2,a")
6828 (set_attr "type" "alus_imm")
6829 (set_attr "length" "2,4,2,4,4")]
6830 )
6831
6832 ;; Subroutine to store a half word from a register into memory.
6833 ;; Operand 0 is the source register (HImode)
6834 ;; Operand 1 is the destination address in a register (SImode)
6835
6836 ;; In both this routine and the next, we must be careful not to spill
6837 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6838 ;; can generate unrecognizable rtl.
6839
6840 (define_expand "storehi"
6841 [;; store the low byte
6842 (set (match_operand 1 "" "") (match_dup 3))
6843 ;; extract the high byte
6844 (set (match_dup 2)
6845 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6846 ;; store the high byte
6847 (set (match_dup 4) (match_dup 5))]
6848 "TARGET_ARM"
6849 "
6850 {
6851 rtx op1 = operands[1];
6852 rtx addr = XEXP (op1, 0);
6853 enum rtx_code code = GET_CODE (addr);
6854
6855 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6856 || code == MINUS)
6857 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6858
6859 operands[4] = adjust_address (op1, QImode, 1);
6860 operands[1] = adjust_address (operands[1], QImode, 0);
6861 operands[3] = gen_lowpart (QImode, operands[0]);
6862 operands[0] = gen_lowpart (SImode, operands[0]);
6863 operands[2] = gen_reg_rtx (SImode);
6864 operands[5] = gen_lowpart (QImode, operands[2]);
6865 }"
6866 )
6867
6868 (define_expand "storehi_bigend"
6869 [(set (match_dup 4) (match_dup 3))
6870 (set (match_dup 2)
6871 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6872 (set (match_operand 1 "" "") (match_dup 5))]
6873 "TARGET_ARM"
6874 "
6875 {
6876 rtx op1 = operands[1];
6877 rtx addr = XEXP (op1, 0);
6878 enum rtx_code code = GET_CODE (addr);
6879
6880 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6881 || code == MINUS)
6882 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6883
6884 operands[4] = adjust_address (op1, QImode, 1);
6885 operands[1] = adjust_address (operands[1], QImode, 0);
6886 operands[3] = gen_lowpart (QImode, operands[0]);
6887 operands[0] = gen_lowpart (SImode, operands[0]);
6888 operands[2] = gen_reg_rtx (SImode);
6889 operands[5] = gen_lowpart (QImode, operands[2]);
6890 }"
6891 )
6892
6893 ;; Subroutine to store a half word integer constant into memory.
6894 (define_expand "storeinthi"
6895 [(set (match_operand 0 "" "")
6896 (match_operand 1 "" ""))
6897 (set (match_dup 3) (match_dup 2))]
6898 "TARGET_ARM"
6899 "
6900 {
6901 HOST_WIDE_INT value = INTVAL (operands[1]);
6902 rtx addr = XEXP (operands[0], 0);
6903 rtx op0 = operands[0];
6904 enum rtx_code code = GET_CODE (addr);
6905
6906 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6907 || code == MINUS)
6908 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6909
6910 operands[1] = gen_reg_rtx (SImode);
6911 if (BYTES_BIG_ENDIAN)
6912 {
6913 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6914 if ((value & 255) == ((value >> 8) & 255))
6915 operands[2] = operands[1];
6916 else
6917 {
6918 operands[2] = gen_reg_rtx (SImode);
6919 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6920 }
6921 }
6922 else
6923 {
6924 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6925 if ((value & 255) == ((value >> 8) & 255))
6926 operands[2] = operands[1];
6927 else
6928 {
6929 operands[2] = gen_reg_rtx (SImode);
6930 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6931 }
6932 }
6933
6934 operands[3] = adjust_address (op0, QImode, 1);
6935 operands[0] = adjust_address (operands[0], QImode, 0);
6936 operands[2] = gen_lowpart (QImode, operands[2]);
6937 operands[1] = gen_lowpart (QImode, operands[1]);
6938 }"
6939 )
6940
6941 (define_expand "storehi_single_op"
6942 [(set (match_operand:HI 0 "memory_operand")
6943 (match_operand:HI 1 "general_operand"))]
6944 "TARGET_32BIT && arm_arch4"
6945 "
6946 if (!s_register_operand (operands[1], HImode))
6947 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6948 "
6949 )
6950
6951 (define_expand "movhi"
6952 [(set (match_operand:HI 0 "general_operand")
6953 (match_operand:HI 1 "general_operand"))]
6954 "TARGET_EITHER"
6955 "
6956 gcc_checking_assert (aligned_operand (operands[0], HImode));
6957 gcc_checking_assert (aligned_operand (operands[1], HImode));
6958 if (TARGET_ARM)
6959 {
6960 if (can_create_pseudo_p ())
6961 {
6962 if (MEM_P (operands[0]))
6963 {
6964 if (arm_arch4)
6965 {
6966 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6967 DONE;
6968 }
6969 if (CONST_INT_P (operands[1]))
6970 emit_insn (gen_storeinthi (operands[0], operands[1]));
6971 else
6972 {
6973 if (MEM_P (operands[1]))
6974 operands[1] = force_reg (HImode, operands[1]);
6975 if (BYTES_BIG_ENDIAN)
6976 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6977 else
6978 emit_insn (gen_storehi (operands[1], operands[0]));
6979 }
6980 DONE;
6981 }
6982 /* Sign extend a constant, and keep it in an SImode reg. */
6983 else if (CONST_INT_P (operands[1]))
6984 {
6985 rtx reg = gen_reg_rtx (SImode);
6986 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6987
6988 /* If the constant is already valid, leave it alone. */
6989 if (!const_ok_for_arm (val))
6990 {
6991 /* If setting all the top bits will make the constant
6992 loadable in a single instruction, then set them.
6993 Otherwise, sign extend the number. */
6994
6995 if (const_ok_for_arm (~(val | ~0xffff)))
6996 val |= ~0xffff;
6997 else if (val & 0x8000)
6998 val |= ~0xffff;
6999 }
7000
7001 emit_insn (gen_movsi (reg, GEN_INT (val)));
7002 operands[1] = gen_lowpart (HImode, reg);
7003 }
7004 else if (arm_arch4 && optimize && can_create_pseudo_p ()
7005 && MEM_P (operands[1]))
7006 {
7007 rtx reg = gen_reg_rtx (SImode);
7008
7009 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7010 operands[1] = gen_lowpart (HImode, reg);
7011 }
7012 else if (!arm_arch4)
7013 {
7014 if (MEM_P (operands[1]))
7015 {
7016 rtx base;
7017 rtx offset = const0_rtx;
7018 rtx reg = gen_reg_rtx (SImode);
7019
7020 if ((REG_P (base = XEXP (operands[1], 0))
7021 || (GET_CODE (base) == PLUS
7022 && (CONST_INT_P (offset = XEXP (base, 1)))
7023 && ((INTVAL(offset) & 1) != 1)
7024 && REG_P (base = XEXP (base, 0))))
7025 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
7026 {
7027 rtx new_rtx;
7028
7029 new_rtx = widen_memory_access (operands[1], SImode,
7030 ((INTVAL (offset) & ~3)
7031 - INTVAL (offset)));
7032 emit_insn (gen_movsi (reg, new_rtx));
7033 if (((INTVAL (offset) & 2) != 0)
7034 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
7035 {
7036 rtx reg2 = gen_reg_rtx (SImode);
7037
7038 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
7039 reg = reg2;
7040 }
7041 }
7042 else
7043 emit_insn (gen_movhi_bytes (reg, operands[1]));
7044
7045 operands[1] = gen_lowpart (HImode, reg);
7046 }
7047 }
7048 }
7049 /* Handle loading a large integer during reload. */
7050 else if (CONST_INT_P (operands[1])
7051 && !const_ok_for_arm (INTVAL (operands[1]))
7052 && !const_ok_for_arm (~INTVAL (operands[1])))
7053 {
7054 /* Writing a constant to memory needs a scratch, which should
7055 be handled with SECONDARY_RELOADs. */
7056 gcc_assert (REG_P (operands[0]));
7057
7058 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7059 emit_insn (gen_movsi (operands[0], operands[1]));
7060 DONE;
7061 }
7062 }
7063 else if (TARGET_THUMB2)
7064 {
7065 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
7066 if (can_create_pseudo_p ())
7067 {
7068 if (!REG_P (operands[0]))
7069 operands[1] = force_reg (HImode, operands[1]);
7070 /* Zero extend a constant, and keep it in an SImode reg. */
7071 else if (CONST_INT_P (operands[1]))
7072 {
7073 rtx reg = gen_reg_rtx (SImode);
7074 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
7075
7076 emit_insn (gen_movsi (reg, GEN_INT (val)));
7077 operands[1] = gen_lowpart (HImode, reg);
7078 }
7079 }
7080 }
7081 else /* TARGET_THUMB1 */
7082 {
7083 if (can_create_pseudo_p ())
7084 {
7085 if (CONST_INT_P (operands[1]))
7086 {
7087 rtx reg = gen_reg_rtx (SImode);
7088
7089 emit_insn (gen_movsi (reg, operands[1]));
7090 operands[1] = gen_lowpart (HImode, reg);
7091 }
7092
7093 /* ??? We shouldn't really get invalid addresses here, but this can
7094 happen if we are passed a SP (never OK for HImode/QImode) or
7095 virtual register (also rejected as illegitimate for HImode/QImode)
7096 relative address. */
7097 /* ??? This should perhaps be fixed elsewhere, for instance, in
7098 fixup_stack_1, by checking for other kinds of invalid addresses,
7099 e.g. a bare reference to a virtual register. This may confuse the
7100 alpha though, which must handle this case differently. */
7101 if (MEM_P (operands[0])
7102 && !memory_address_p (GET_MODE (operands[0]),
7103 XEXP (operands[0], 0)))
7104 operands[0]
7105 = replace_equiv_address (operands[0],
7106 copy_to_reg (XEXP (operands[0], 0)));
7107
7108 if (MEM_P (operands[1])
7109 && !memory_address_p (GET_MODE (operands[1]),
7110 XEXP (operands[1], 0)))
7111 operands[1]
7112 = replace_equiv_address (operands[1],
7113 copy_to_reg (XEXP (operands[1], 0)));
7114
7115 if (MEM_P (operands[1]) && optimize > 0)
7116 {
7117 rtx reg = gen_reg_rtx (SImode);
7118
7119 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7120 operands[1] = gen_lowpart (HImode, reg);
7121 }
7122
7123 if (MEM_P (operands[0]))
7124 operands[1] = force_reg (HImode, operands[1]);
7125 }
7126 else if (CONST_INT_P (operands[1])
7127 && !satisfies_constraint_I (operands[1]))
7128 {
7129 /* Handle loading a large integer during reload. */
7130
7131 /* Writing a constant to memory needs a scratch, which should
7132 be handled with SECONDARY_RELOADs. */
7133 gcc_assert (REG_P (operands[0]));
7134
7135 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7136 emit_insn (gen_movsi (operands[0], operands[1]));
7137 DONE;
7138 }
7139 }
7140 "
7141 )
7142
7143 (define_expand "movhi_bytes"
7144 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7145 (set (match_dup 3)
7146 (zero_extend:SI (match_dup 6)))
7147 (set (match_operand:SI 0 "" "")
7148 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7149 "TARGET_ARM"
7150 "
7151 {
7152 rtx mem1, mem2;
7153 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7154
7155 mem1 = change_address (operands[1], QImode, addr);
7156 mem2 = change_address (operands[1], QImode,
7157 plus_constant (Pmode, addr, 1));
7158 operands[0] = gen_lowpart (SImode, operands[0]);
7159 operands[1] = mem1;
7160 operands[2] = gen_reg_rtx (SImode);
7161 operands[3] = gen_reg_rtx (SImode);
7162 operands[6] = mem2;
7163
7164 if (BYTES_BIG_ENDIAN)
7165 {
7166 operands[4] = operands[2];
7167 operands[5] = operands[3];
7168 }
7169 else
7170 {
7171 operands[4] = operands[3];
7172 operands[5] = operands[2];
7173 }
7174 }"
7175 )
7176
7177 (define_expand "movhi_bigend"
7178 [(set (match_dup 2)
7179 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
7180 (const_int 16)))
7181 (set (match_dup 3)
7182 (ashiftrt:SI (match_dup 2) (const_int 16)))
7183 (set (match_operand:HI 0 "s_register_operand")
7184 (match_dup 4))]
7185 "TARGET_ARM"
7186 "
7187 operands[2] = gen_reg_rtx (SImode);
7188 operands[3] = gen_reg_rtx (SImode);
7189 operands[4] = gen_lowpart (HImode, operands[3]);
7190 "
7191 )
7192
7193 ;; Pattern to recognize insn generated default case above
7194 (define_insn "*movhi_insn_arch4"
7195 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
7196 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
7197 "TARGET_ARM
7198 && arm_arch4 && !TARGET_HARD_FLOAT
7199 && (register_operand (operands[0], HImode)
7200 || register_operand (operands[1], HImode))"
7201 "@
7202 mov%?\\t%0, %1\\t%@ movhi
7203 mvn%?\\t%0, #%B1\\t%@ movhi
7204 movw%?\\t%0, %L1\\t%@ movhi
7205 strh%?\\t%1, %0\\t%@ movhi
7206 ldrh%?\\t%0, %1\\t%@ movhi"
7207 [(set_attr "predicable" "yes")
7208 (set_attr "pool_range" "*,*,*,*,256")
7209 (set_attr "neg_pool_range" "*,*,*,*,244")
7210 (set_attr "arch" "*,*,v6t2,*,*")
7211 (set_attr_alternative "type"
7212 [(if_then_else (match_operand 1 "const_int_operand" "")
7213 (const_string "mov_imm" )
7214 (const_string "mov_reg"))
7215 (const_string "mvn_imm")
7216 (const_string "mov_imm")
7217 (const_string "store_4")
7218 (const_string "load_4")])]
7219 )
7220
7221 (define_insn "*movhi_bytes"
7222 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7223 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
7224 "TARGET_ARM && !TARGET_HARD_FLOAT"
7225 "@
7226 mov%?\\t%0, %1\\t%@ movhi
7227 mov%?\\t%0, %1\\t%@ movhi
7228 mvn%?\\t%0, #%B1\\t%@ movhi"
7229 [(set_attr "predicable" "yes")
7230 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
7231 )
7232
7233 ;; We use a DImode scratch because we may occasionally need an additional
7234 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
7235 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
7236 ;; The reload_in<m> and reload_out<m> patterns require special constraints
7237 ;; to be correctly handled in default_secondary_reload function.
7238 (define_expand "reload_outhi"
7239 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7240 (match_operand:HI 1 "s_register_operand" "r")
7241 (match_operand:DI 2 "s_register_operand" "=&l")])]
7242 "TARGET_EITHER"
7243 "if (TARGET_ARM)
7244 arm_reload_out_hi (operands);
7245 else
7246 thumb_reload_out_hi (operands);
7247 DONE;
7248 "
7249 )
7250
7251 (define_expand "reload_inhi"
7252 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7253 (match_operand:HI 1 "arm_reload_memory_operand" "o")
7254 (match_operand:DI 2 "s_register_operand" "=&r")])]
7255 "TARGET_EITHER"
7256 "
7257 if (TARGET_ARM)
7258 arm_reload_in_hi (operands);
7259 else
7260 thumb_reload_out_hi (operands);
7261 DONE;
7262 ")
7263
7264 (define_expand "movqi"
7265 [(set (match_operand:QI 0 "general_operand")
7266 (match_operand:QI 1 "general_operand"))]
7267 "TARGET_EITHER"
7268 "
7269 /* Everything except mem = const or mem = mem can be done easily */
7270
7271 if (can_create_pseudo_p ())
7272 {
7273 if (CONST_INT_P (operands[1]))
7274 {
7275 rtx reg = gen_reg_rtx (SImode);
7276
7277 /* For thumb we want an unsigned immediate, then we are more likely
7278 to be able to use a movs insn. */
7279 if (TARGET_THUMB)
7280 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7281
7282 emit_insn (gen_movsi (reg, operands[1]));
7283 operands[1] = gen_lowpart (QImode, reg);
7284 }
7285
7286 if (TARGET_THUMB)
7287 {
7288 /* ??? We shouldn't really get invalid addresses here, but this can
7289 happen if we are passed a SP (never OK for HImode/QImode) or
7290 virtual register (also rejected as illegitimate for HImode/QImode)
7291 relative address. */
7292 /* ??? This should perhaps be fixed elsewhere, for instance, in
7293 fixup_stack_1, by checking for other kinds of invalid addresses,
7294 e.g. a bare reference to a virtual register. This may confuse the
7295 alpha though, which must handle this case differently. */
7296 if (MEM_P (operands[0])
7297 && !memory_address_p (GET_MODE (operands[0]),
7298 XEXP (operands[0], 0)))
7299 operands[0]
7300 = replace_equiv_address (operands[0],
7301 copy_to_reg (XEXP (operands[0], 0)));
7302 if (MEM_P (operands[1])
7303 && !memory_address_p (GET_MODE (operands[1]),
7304 XEXP (operands[1], 0)))
7305 operands[1]
7306 = replace_equiv_address (operands[1],
7307 copy_to_reg (XEXP (operands[1], 0)));
7308 }
7309
7310 if (MEM_P (operands[1]) && optimize > 0)
7311 {
7312 rtx reg = gen_reg_rtx (SImode);
7313
7314 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7315 operands[1] = gen_lowpart (QImode, reg);
7316 }
7317
7318 if (MEM_P (operands[0]))
7319 operands[1] = force_reg (QImode, operands[1]);
7320 }
7321 else if (TARGET_THUMB
7322 && CONST_INT_P (operands[1])
7323 && !satisfies_constraint_I (operands[1]))
7324 {
7325 /* Handle loading a large integer during reload. */
7326
7327 /* Writing a constant to memory needs a scratch, which should
7328 be handled with SECONDARY_RELOADs. */
7329 gcc_assert (REG_P (operands[0]));
7330
7331 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7332 emit_insn (gen_movsi (operands[0], operands[1]));
7333 DONE;
7334 }
7335 "
7336 )
7337
7338 (define_insn "*arm_movqi_insn"
7339 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7340 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
7341 "TARGET_32BIT
7342 && ( register_operand (operands[0], QImode)
7343 || register_operand (operands[1], QImode))"
7344 "@
7345 mov%?\\t%0, %1
7346 mov%?\\t%0, %1
7347 mov%?\\t%0, %1
7348 mov%?\\t%0, %1
7349 mvn%?\\t%0, #%B1
7350 ldrb%?\\t%0, %1
7351 strb%?\\t%1, %0
7352 ldrb%?\\t%0, %1
7353 strb%?\\t%1, %0"
7354 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
7355 (set_attr "predicable" "yes")
7356 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
7357 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7358 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7359 )
7360
7361 ;; HFmode and BFmode moves.
7362 (define_expand "mov<mode>"
7363 [(set (match_operand:HFBF 0 "general_operand")
7364 (match_operand:HFBF 1 "general_operand"))]
7365 "TARGET_EITHER"
7366 "
7367 gcc_checking_assert (aligned_operand (operands[0], <MODE>mode));
7368 gcc_checking_assert (aligned_operand (operands[1], <MODE>mode));
7369 if (TARGET_32BIT)
7370 {
7371 if (MEM_P (operands[0]))
7372 operands[1] = force_reg (<MODE>mode, operands[1]);
7373 }
7374 else /* TARGET_THUMB1 */
7375 {
7376 if (can_create_pseudo_p ())
7377 {
7378 if (!REG_P (operands[0]))
7379 operands[1] = force_reg (<MODE>mode, operands[1]);
7380 }
7381 }
7382 "
7383 )
7384
7385 (define_insn "*arm32_mov<mode>"
7386 [(set (match_operand:HFBF 0 "nonimmediate_operand" "=r,m,r,r")
7387 (match_operand:HFBF 1 "general_operand" " m,r,r,F"))]
7388 "TARGET_32BIT
7389 && !TARGET_HARD_FLOAT
7390 && !TARGET_HAVE_MVE
7391 && ( s_register_operand (operands[0], <MODE>mode)
7392 || s_register_operand (operands[1], <MODE>mode))"
7393 "*
7394 switch (which_alternative)
7395 {
7396 case 0: /* ARM register from memory */
7397 return \"ldrh%?\\t%0, %1\\t%@ __<fporbf>\";
7398 case 1: /* memory from ARM register */
7399 return \"strh%?\\t%1, %0\\t%@ __<fporbf>\";
7400 case 2: /* ARM register from ARM register */
7401 return \"mov%?\\t%0, %1\\t%@ __<fporbf>\";
7402 case 3: /* ARM register from constant */
7403 {
7404 long bits;
7405 rtx ops[4];
7406
7407 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
7408 <MODE>mode);
7409 ops[0] = operands[0];
7410 ops[1] = GEN_INT (bits);
7411 ops[2] = GEN_INT (bits & 0xff00);
7412 ops[3] = GEN_INT (bits & 0x00ff);
7413
7414 if (arm_arch_thumb2)
7415 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7416 else
7417 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7418 return \"\";
7419 }
7420 default:
7421 gcc_unreachable ();
7422 }
7423 "
7424 [(set_attr "conds" "unconditional")
7425 (set_attr "type" "load_4,store_4,mov_reg,multiple")
7426 (set_attr "length" "4,4,4,8")
7427 (set_attr "predicable" "yes")]
7428 )
7429
7430 (define_expand "movsf"
7431 [(set (match_operand:SF 0 "general_operand")
7432 (match_operand:SF 1 "general_operand"))]
7433 "TARGET_EITHER"
7434 "
7435 gcc_checking_assert (aligned_operand (operands[0], SFmode));
7436 gcc_checking_assert (aligned_operand (operands[1], SFmode));
7437 if (TARGET_32BIT)
7438 {
7439 if (MEM_P (operands[0]))
7440 operands[1] = force_reg (SFmode, operands[1]);
7441 }
7442 else /* TARGET_THUMB1 */
7443 {
7444 if (can_create_pseudo_p ())
7445 {
7446 if (!REG_P (operands[0]))
7447 operands[1] = force_reg (SFmode, operands[1]);
7448 }
7449 }
7450
7451 /* Cannot load it directly, generate a load with clobber so that it can be
7452 loaded via GPR with MOV / MOVT. */
7453 if (arm_disable_literal_pool
7454 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7455 && CONST_DOUBLE_P (operands[1])
7456 && TARGET_VFP_BASE
7457 && !vfp3_const_double_rtx (operands[1]))
7458 {
7459 rtx clobreg = gen_reg_rtx (SFmode);
7460 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
7461 clobreg));
7462 DONE;
7463 }
7464 "
7465 )
7466
7467 ;; Transform a floating-point move of a constant into a core register into
7468 ;; an SImode operation.
7469 (define_split
7470 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7471 (match_operand:SF 1 "immediate_operand" ""))]
7472 "TARGET_EITHER
7473 && reload_completed
7474 && CONST_DOUBLE_P (operands[1])"
7475 [(set (match_dup 2) (match_dup 3))]
7476 "
7477 operands[2] = gen_lowpart (SImode, operands[0]);
7478 operands[3] = gen_lowpart (SImode, operands[1]);
7479 if (operands[2] == 0 || operands[3] == 0)
7480 FAIL;
7481 "
7482 )
7483
7484 (define_insn "*arm_movsf_soft_insn"
7485 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7486 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7487 "TARGET_32BIT
7488 && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7489 && (!MEM_P (operands[0])
7490 || register_operand (operands[1], SFmode))"
7491 {
7492 switch (which_alternative)
7493 {
7494 case 0: return \"mov%?\\t%0, %1\";
7495 case 1:
7496 /* Cannot load it directly, split to load it via MOV / MOVT. */
7497 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7498 return \"#\";
7499 return \"ldr%?\\t%0, %1\\t%@ float\";
7500 case 2: return \"str%?\\t%1, %0\\t%@ float\";
7501 default: gcc_unreachable ();
7502 }
7503 }
7504 [(set_attr "predicable" "yes")
7505 (set_attr "type" "mov_reg,load_4,store_4")
7506 (set_attr "arm_pool_range" "*,4096,*")
7507 (set_attr "thumb2_pool_range" "*,4094,*")
7508 (set_attr "arm_neg_pool_range" "*,4084,*")
7509 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7510 )
7511
7512 ;; Splitter for the above.
7513 (define_split
7514 [(set (match_operand:SF 0 "s_register_operand")
7515 (match_operand:SF 1 "const_double_operand"))]
7516 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7517 [(const_int 0)]
7518 {
7519 long buf;
7520 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
7521 rtx cst = gen_int_mode (buf, SImode);
7522 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
7523 DONE;
7524 }
7525 )
7526
7527 (define_expand "movdf"
7528 [(set (match_operand:DF 0 "general_operand")
7529 (match_operand:DF 1 "general_operand"))]
7530 "TARGET_EITHER"
7531 "
7532 gcc_checking_assert (aligned_operand (operands[0], DFmode));
7533 gcc_checking_assert (aligned_operand (operands[1], DFmode));
7534 if (TARGET_32BIT)
7535 {
7536 if (MEM_P (operands[0]))
7537 operands[1] = force_reg (DFmode, operands[1]);
7538 }
7539 else /* TARGET_THUMB */
7540 {
7541 if (can_create_pseudo_p ())
7542 {
7543 if (!REG_P (operands[0]))
7544 operands[1] = force_reg (DFmode, operands[1]);
7545 }
7546 }
7547
7548 /* Cannot load it directly, generate a load with clobber so that it can be
7549 loaded via GPR with MOV / MOVT. */
7550 if (arm_disable_literal_pool
7551 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7552 && CONSTANT_P (operands[1])
7553 && TARGET_VFP_BASE
7554 && !arm_const_double_rtx (operands[1])
7555 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
7556 {
7557 rtx clobreg = gen_reg_rtx (DFmode);
7558 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
7559 clobreg));
7560 DONE;
7561 }
7562 "
7563 )
7564
7565 ;; Reloading a df mode value stored in integer regs to memory can require a
7566 ;; scratch reg.
7567 ;; Another reload_out<m> pattern that requires special constraints.
7568 (define_expand "reload_outdf"
7569 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7570 (match_operand:DF 1 "s_register_operand" "r")
7571 (match_operand:SI 2 "s_register_operand" "=&r")]
7572 "TARGET_THUMB2"
7573 "
7574 {
7575 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7576
7577 if (code == REG)
7578 operands[2] = XEXP (operands[0], 0);
7579 else if (code == POST_INC || code == PRE_DEC)
7580 {
7581 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7582 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7583 emit_insn (gen_movdi (operands[0], operands[1]));
7584 DONE;
7585 }
7586 else if (code == PRE_INC)
7587 {
7588 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7589
7590 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7591 operands[2] = reg;
7592 }
7593 else if (code == POST_DEC)
7594 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7595 else
7596 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7597 XEXP (XEXP (operands[0], 0), 1)));
7598
7599 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
7600 operands[1]));
7601
7602 if (code == POST_DEC)
7603 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7604
7605 DONE;
7606 }"
7607 )
7608
7609 (define_insn "*movdf_soft_insn"
7610 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
7611 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
7612 "TARGET_32BIT && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7613 && ( register_operand (operands[0], DFmode)
7614 || register_operand (operands[1], DFmode))"
7615 "*
7616 switch (which_alternative)
7617 {
7618 case 0:
7619 case 1:
7620 case 2:
7621 return \"#\";
7622 case 3:
7623 /* Cannot load it directly, split to load it via MOV / MOVT. */
7624 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7625 return \"#\";
7626 /* Fall through. */
7627 default:
7628 return output_move_double (operands, true, NULL);
7629 }
7630 "
7631 [(set_attr "length" "8,12,16,8,8")
7632 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
7633 (set_attr "arm_pool_range" "*,*,*,1020,*")
7634 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7635 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7636 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7637 )
7638
7639 ;; Splitter for the above.
7640 (define_split
7641 [(set (match_operand:DF 0 "s_register_operand")
7642 (match_operand:DF 1 "const_double_operand"))]
7643 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7644 [(const_int 0)]
7645 {
7646 long buf[2];
7647 int order = BYTES_BIG_ENDIAN ? 1 : 0;
7648 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7649 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7650 ival |= (zext_hwi (buf[1 - order], 32) << 32);
7651 rtx cst = gen_int_mode (ival, DImode);
7652 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7653 DONE;
7654 }
7655 )
7656 \f
7657
7658 ;; load- and store-multiple insns
7659 ;; The arm can load/store any set of registers, provided that they are in
7660 ;; ascending order, but these expanders assume a contiguous set.
7661
7662 (define_expand "load_multiple"
7663 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7664 (match_operand:SI 1 "" ""))
7665 (use (match_operand:SI 2 "" ""))])]
7666 "TARGET_32BIT"
7667 {
7668 HOST_WIDE_INT offset = 0;
7669
7670 /* Support only fixed point registers. */
7671 if (!CONST_INT_P (operands[2])
7672 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7673 || INTVAL (operands[2]) < 2
7674 || !MEM_P (operands[1])
7675 || !REG_P (operands[0])
7676 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7677 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7678 FAIL;
7679
7680 operands[3]
7681 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7682 INTVAL (operands[2]),
7683 force_reg (SImode, XEXP (operands[1], 0)),
7684 FALSE, operands[1], &offset);
7685 })
7686
7687 (define_expand "store_multiple"
7688 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7689 (match_operand:SI 1 "" ""))
7690 (use (match_operand:SI 2 "" ""))])]
7691 "TARGET_32BIT"
7692 {
7693 HOST_WIDE_INT offset = 0;
7694
7695 /* Support only fixed point registers. */
7696 if (!CONST_INT_P (operands[2])
7697 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7698 || INTVAL (operands[2]) < 2
7699 || !REG_P (operands[1])
7700 || !MEM_P (operands[0])
7701 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7702 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7703 FAIL;
7704
7705 operands[3]
7706 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7707 INTVAL (operands[2]),
7708 force_reg (SImode, XEXP (operands[0], 0)),
7709 FALSE, operands[0], &offset);
7710 })
7711
7712
7713 (define_expand "setmemsi"
7714 [(match_operand:BLK 0 "general_operand")
7715 (match_operand:SI 1 "const_int_operand")
7716 (match_operand:SI 2 "const_int_operand")
7717 (match_operand:SI 3 "const_int_operand")]
7718 "TARGET_32BIT"
7719 {
7720 if (arm_gen_setmem (operands))
7721 DONE;
7722
7723 FAIL;
7724 })
7725
7726
7727 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7728 ;; We could let this apply for blocks of less than this, but it clobbers so
7729 ;; many registers that there is then probably a better way.
7730
7731 (define_expand "cpymemqi"
7732 [(match_operand:BLK 0 "general_operand")
7733 (match_operand:BLK 1 "general_operand")
7734 (match_operand:SI 2 "const_int_operand")
7735 (match_operand:SI 3 "const_int_operand")]
7736 ""
7737 "
7738 if (TARGET_32BIT)
7739 {
7740 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7741 && !optimize_function_for_size_p (cfun))
7742 {
7743 if (gen_cpymem_ldrd_strd (operands))
7744 DONE;
7745 FAIL;
7746 }
7747
7748 if (arm_gen_cpymemqi (operands))
7749 DONE;
7750 FAIL;
7751 }
7752 else /* TARGET_THUMB1 */
7753 {
7754 if ( INTVAL (operands[3]) != 4
7755 || INTVAL (operands[2]) > 48)
7756 FAIL;
7757
7758 thumb_expand_cpymemqi (operands);
7759 DONE;
7760 }
7761 "
7762 )
7763 \f
7764
7765 ;; Compare & branch insns
7766 ;; The range calculations are based as follows:
7767 ;; For forward branches, the address calculation returns the address of
7768 ;; the next instruction. This is 2 beyond the branch instruction.
7769 ;; For backward branches, the address calculation returns the address of
7770 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7771 ;; instruction for the shortest sequence, and 4 before the branch instruction
7772 ;; if we have to jump around an unconditional branch.
7773 ;; To the basic branch range the PC offset must be added (this is +4).
7774 ;; So for forward branches we have
7775 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7776 ;; And for backward branches we have
7777 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7778 ;;
7779 ;; In 16-bit Thumb these ranges are:
7780 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7781 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7782
7783 ;; In 32-bit Thumb these ranges are:
7784 ;; For a 'b' +/- 16MB is not checked for.
7785 ;; For a 'b<cond>' pos_range = 1048574, neg_range = -1048576 giving
7786 ;; (-1048568 -> 1048576).
7787
7788 (define_expand "cbranchsi4"
7789 [(set (pc) (if_then_else
7790 (match_operator 0 "expandable_comparison_operator"
7791 [(match_operand:SI 1 "s_register_operand")
7792 (match_operand:SI 2 "nonmemory_operand")])
7793 (label_ref (match_operand 3 "" ""))
7794 (pc)))]
7795 "TARGET_EITHER"
7796 "
7797 if (!TARGET_THUMB1)
7798 {
7799 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7800 FAIL;
7801 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7802 operands[3]));
7803 DONE;
7804 }
7805 if (thumb1_cmpneg_operand (operands[2], SImode))
7806 {
7807 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7808 operands[3], operands[0]));
7809 DONE;
7810 }
7811 if (!thumb1_cmp_operand (operands[2], SImode))
7812 operands[2] = force_reg (SImode, operands[2]);
7813 ")
7814
7815 (define_expand "cbranchsf4"
7816 [(set (pc) (if_then_else
7817 (match_operator 0 "expandable_comparison_operator"
7818 [(match_operand:SF 1 "s_register_operand")
7819 (match_operand:SF 2 "vfp_compare_operand")])
7820 (label_ref (match_operand 3 "" ""))
7821 (pc)))]
7822 "TARGET_32BIT && TARGET_HARD_FLOAT"
7823 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7824 operands[3])); DONE;"
7825 )
7826
7827 (define_expand "cbranchdf4"
7828 [(set (pc) (if_then_else
7829 (match_operator 0 "expandable_comparison_operator"
7830 [(match_operand:DF 1 "s_register_operand")
7831 (match_operand:DF 2 "vfp_compare_operand")])
7832 (label_ref (match_operand 3 "" ""))
7833 (pc)))]
7834 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7835 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7836 operands[3])); DONE;"
7837 )
7838
7839 (define_expand "cbranchdi4"
7840 [(set (pc) (if_then_else
7841 (match_operator 0 "expandable_comparison_operator"
7842 [(match_operand:DI 1 "s_register_operand")
7843 (match_operand:DI 2 "reg_or_int_operand")])
7844 (label_ref (match_operand 3 "" ""))
7845 (pc)))]
7846 "TARGET_32BIT"
7847 "{
7848 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7849 FAIL;
7850 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7851 operands[3]));
7852 DONE;
7853 }"
7854 )
7855
7856 ;; Comparison and test insns
7857
7858 (define_insn "*arm_cmpsi_insn"
7859 [(set (reg:CC CC_REGNUM)
7860 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7861 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7862 "TARGET_32BIT"
7863 "@
7864 cmp%?\\t%0, %1
7865 cmp%?\\t%0, %1
7866 cmp%?\\t%0, %1
7867 cmp%?\\t%0, %1
7868 cmn%?\\t%0, #%n1"
7869 [(set_attr "conds" "set")
7870 (set_attr "arch" "t2,t2,any,any,any")
7871 (set_attr "length" "2,2,4,4,4")
7872 (set_attr "predicable" "yes")
7873 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7874 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7875 )
7876
7877 (define_insn "*cmpsi_shiftsi"
7878 [(set (reg:CC CC_REGNUM)
7879 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7880 (match_operator:SI 3 "shift_operator"
7881 [(match_operand:SI 1 "s_register_operand" "r,r")
7882 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7883 "TARGET_32BIT"
7884 "cmp\\t%0, %1%S3"
7885 [(set_attr "conds" "set")
7886 (set_attr "shift" "1")
7887 (set_attr "arch" "32,a")
7888 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7889
7890 (define_insn "*cmpsi_shiftsi_swp"
7891 [(set (reg:CC_SWP CC_REGNUM)
7892 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7893 [(match_operand:SI 1 "s_register_operand" "r,r")
7894 (match_operand:SI 2 "shift_amount_operand" "M,r")])
7895 (match_operand:SI 0 "s_register_operand" "r,r")))]
7896 "TARGET_32BIT"
7897 "cmp%?\\t%0, %1%S3"
7898 [(set_attr "conds" "set")
7899 (set_attr "shift" "1")
7900 (set_attr "arch" "32,a")
7901 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7902
7903 (define_insn "*arm_cmpsi_negshiftsi_si"
7904 [(set (reg:CC_Z CC_REGNUM)
7905 (compare:CC_Z
7906 (neg:SI (match_operator:SI 1 "shift_operator"
7907 [(match_operand:SI 2 "s_register_operand" "r,r")
7908 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7909 (match_operand:SI 0 "s_register_operand" "r,r")))]
7910 "TARGET_32BIT"
7911 "cmn%?\\t%0, %2%S1"
7912 [(set_attr "conds" "set")
7913 (set_attr "arch" "32,a")
7914 (set_attr "shift" "2")
7915 (set_attr "type" "alus_shift_imm,alus_shift_reg")
7916 (set_attr "predicable" "yes")]
7917 )
7918
7919 ; This insn allows redundant compares to be removed by cse, nothing should
7920 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7921 ; is deleted later on. The match_dup will match the mode here, so that
7922 ; mode changes of the condition codes aren't lost by this even though we don't
7923 ; specify what they are.
7924
7925 (define_insn "*deleted_compare"
7926 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7927 "TARGET_32BIT"
7928 "\\t%@ deleted compare"
7929 [(set_attr "conds" "set")
7930 (set_attr "length" "0")
7931 (set_attr "type" "no_insn")]
7932 )
7933
7934 \f
7935 ;; Conditional branch insns
7936
7937 (define_expand "cbranch_cc"
7938 [(set (pc)
7939 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7940 (match_operand 2 "" "")])
7941 (label_ref (match_operand 3 "" ""))
7942 (pc)))]
7943 "TARGET_32BIT"
7944 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7945 operands[1], operands[2], NULL_RTX);
7946 operands[2] = const0_rtx;"
7947 )
7948
7949 ;;
7950 ;; Patterns to match conditional branch insns.
7951 ;;
7952
7953 (define_insn "arm_cond_branch"
7954 [(set (pc)
7955 (if_then_else (match_operator 1 "arm_comparison_operator"
7956 [(match_operand 2 "cc_register" "") (const_int 0)])
7957 (label_ref (match_operand 0 "" ""))
7958 (pc)))]
7959 "TARGET_32BIT"
7960 {
7961 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7962 {
7963 arm_ccfsm_state += 2;
7964 return "";
7965 }
7966 switch (get_attr_length (insn))
7967 {
7968 case 2: /* Thumb2 16-bit b{cond}. */
7969 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
7970 return "b%d1\t%l0";
7971 break;
7972
7973 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
7974 unconditional branch b. */
7975 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%D1\t");
7976 }
7977 }
7978 [(set_attr "conds" "use")
7979 (set_attr "type" "branch")
7980 (set (attr "length")
7981 (if_then_else (match_test "!TARGET_THUMB2")
7982
7983 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
7984 (const_int 4)
7985
7986 ;; Check if target is within 16-bit Thumb2 b{cond} range.
7987 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7988 (le (minus (match_dup 0) (pc)) (const_int 256)))
7989
7990 ;; Target is Thumb2, within narrow range.
7991 ;; Generate b{cond}.
7992 (const_int 2)
7993
7994 ;; Check if target is within 32-bit Thumb2 b{cond} range.
7995 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7996 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7997
7998 ;; Target is Thumb2, within wide range.
7999 ;; Generate b{cond}
8000 (const_int 4)
8001 ;; Target is Thumb2, out of range.
8002 ;; Generate narrow b{cond} and unconditional branch b.
8003 (const_int 6)))))]
8004 )
8005
8006 (define_insn "*arm_cond_branch_reversed"
8007 [(set (pc)
8008 (if_then_else (match_operator 1 "arm_comparison_operator"
8009 [(match_operand 2 "cc_register" "") (const_int 0)])
8010 (pc)
8011 (label_ref (match_operand 0 "" ""))))]
8012 "TARGET_32BIT"
8013 {
8014 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8015 {
8016 arm_ccfsm_state += 2;
8017 return "";
8018 }
8019 switch (get_attr_length (insn))
8020 {
8021 case 2: /* Thumb2 16-bit b{cond}. */
8022 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
8023 return "b%D1\t%l0";
8024 break;
8025
8026 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
8027 unconditional branch b. */
8028 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%d1\t");
8029 }
8030 }
8031 [(set_attr "conds" "use")
8032 (set_attr "type" "branch")
8033 (set (attr "length")
8034 (if_then_else (match_test "!TARGET_THUMB2")
8035
8036 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
8037 (const_int 4)
8038
8039 ;; Check if target is within 16-bit Thumb2 b{cond} range.
8040 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8041 (le (minus (match_dup 0) (pc)) (const_int 256)))
8042
8043 ;; Target is Thumb2, within narrow range.
8044 ;; Generate b{cond}.
8045 (const_int 2)
8046
8047 ;; Check if target is within 32-bit Thumb2 b{cond} range.
8048 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
8049 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
8050
8051 ;; Target is Thumb2, within wide range.
8052 ;; Generate b{cond}.
8053 (const_int 4)
8054 ;; Target is Thumb2, out of range.
8055 ;; Generate narrow b{cond} and unconditional branch b.
8056 (const_int 6)))))]
8057 )
8058
8059 \f
8060
8061 ; scc insns
8062
8063 (define_expand "cstore_cc"
8064 [(set (match_operand:SI 0 "s_register_operand")
8065 (match_operator:SI 1 "" [(match_operand 2 "" "")
8066 (match_operand 3 "" "")]))]
8067 "TARGET_32BIT"
8068 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
8069 operands[2], operands[3], NULL_RTX);
8070 operands[3] = const0_rtx;"
8071 )
8072
8073 (define_insn_and_split "*mov_scc"
8074 [(set (match_operand:SI 0 "s_register_operand" "=r")
8075 (match_operator:SI 1 "arm_comparison_operator_mode"
8076 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8077 "TARGET_ARM"
8078 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8079 "TARGET_ARM"
8080 [(set (match_dup 0)
8081 (if_then_else:SI (match_dup 1)
8082 (const_int 1)
8083 (const_int 0)))]
8084 ""
8085 [(set_attr "conds" "use")
8086 (set_attr "length" "8")
8087 (set_attr "type" "multiple")]
8088 )
8089
8090 (define_insn "*negscc_borrow"
8091 [(set (match_operand:SI 0 "s_register_operand" "=r")
8092 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
8093 "TARGET_32BIT"
8094 "sbc\\t%0, %0, %0"
8095 [(set_attr "conds" "use")
8096 (set_attr "length" "4")
8097 (set_attr "type" "adc_reg")]
8098 )
8099
8100 (define_insn_and_split "*mov_negscc"
8101 [(set (match_operand:SI 0 "s_register_operand" "=r")
8102 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
8103 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8104 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
8105 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8106 "&& true"
8107 [(set (match_dup 0)
8108 (if_then_else:SI (match_dup 1)
8109 (match_dup 3)
8110 (const_int 0)))]
8111 {
8112 operands[3] = GEN_INT (~0);
8113 }
8114 [(set_attr "conds" "use")
8115 (set_attr "length" "8")
8116 (set_attr "type" "multiple")]
8117 )
8118
8119 (define_insn_and_split "*mov_notscc"
8120 [(set (match_operand:SI 0 "s_register_operand" "=r")
8121 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8122 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8123 "TARGET_ARM"
8124 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8125 "TARGET_ARM"
8126 [(set (match_dup 0)
8127 (if_then_else:SI (match_dup 1)
8128 (match_dup 3)
8129 (match_dup 4)))]
8130 {
8131 operands[3] = GEN_INT (~1);
8132 operands[4] = GEN_INT (~0);
8133 }
8134 [(set_attr "conds" "use")
8135 (set_attr "length" "8")
8136 (set_attr "type" "multiple")]
8137 )
8138
8139 (define_expand "cstoresi4"
8140 [(set (match_operand:SI 0 "s_register_operand")
8141 (match_operator:SI 1 "expandable_comparison_operator"
8142 [(match_operand:SI 2 "s_register_operand")
8143 (match_operand:SI 3 "reg_or_int_operand")]))]
8144 "TARGET_32BIT || TARGET_THUMB1"
8145 "{
8146 rtx op3, scratch, scratch2;
8147
8148 if (!TARGET_THUMB1)
8149 {
8150 if (!arm_add_operand (operands[3], SImode))
8151 operands[3] = force_reg (SImode, operands[3]);
8152 emit_insn (gen_cstore_cc (operands[0], operands[1],
8153 operands[2], operands[3]));
8154 DONE;
8155 }
8156
8157 if (operands[3] == const0_rtx)
8158 {
8159 switch (GET_CODE (operands[1]))
8160 {
8161 case EQ:
8162 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8163 break;
8164
8165 case NE:
8166 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8167 break;
8168
8169 case LE:
8170 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8171 NULL_RTX, 0, OPTAB_WIDEN);
8172 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8173 NULL_RTX, 0, OPTAB_WIDEN);
8174 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8175 operands[0], 1, OPTAB_WIDEN);
8176 break;
8177
8178 case GE:
8179 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8180 NULL_RTX, 1);
8181 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8182 NULL_RTX, 1, OPTAB_WIDEN);
8183 break;
8184
8185 case GT:
8186 scratch = expand_binop (SImode, ashr_optab, operands[2],
8187 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8188 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8189 NULL_RTX, 0, OPTAB_WIDEN);
8190 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8191 0, OPTAB_WIDEN);
8192 break;
8193
8194 /* LT is handled by generic code. No need for unsigned with 0. */
8195 default:
8196 FAIL;
8197 }
8198 DONE;
8199 }
8200
8201 switch (GET_CODE (operands[1]))
8202 {
8203 case EQ:
8204 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8205 NULL_RTX, 0, OPTAB_WIDEN);
8206 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8207 break;
8208
8209 case NE:
8210 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8211 NULL_RTX, 0, OPTAB_WIDEN);
8212 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8213 break;
8214
8215 case LE:
8216 op3 = force_reg (SImode, operands[3]);
8217
8218 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8219 NULL_RTX, 1, OPTAB_WIDEN);
8220 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8221 NULL_RTX, 0, OPTAB_WIDEN);
8222 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8223 op3, operands[2]));
8224 break;
8225
8226 case GE:
8227 op3 = operands[3];
8228 if (!thumb1_cmp_operand (op3, SImode))
8229 op3 = force_reg (SImode, op3);
8230 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8231 NULL_RTX, 0, OPTAB_WIDEN);
8232 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8233 NULL_RTX, 1, OPTAB_WIDEN);
8234 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8235 operands[2], op3));
8236 break;
8237
8238 case LEU:
8239 op3 = force_reg (SImode, operands[3]);
8240 scratch = force_reg (SImode, const0_rtx);
8241 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8242 op3, operands[2]));
8243 break;
8244
8245 case GEU:
8246 op3 = operands[3];
8247 if (!thumb1_cmp_operand (op3, SImode))
8248 op3 = force_reg (SImode, op3);
8249 scratch = force_reg (SImode, const0_rtx);
8250 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8251 operands[2], op3));
8252 break;
8253
8254 case LTU:
8255 op3 = operands[3];
8256 if (!thumb1_cmp_operand (op3, SImode))
8257 op3 = force_reg (SImode, op3);
8258 scratch = gen_reg_rtx (SImode);
8259 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8260 break;
8261
8262 case GTU:
8263 op3 = force_reg (SImode, operands[3]);
8264 scratch = gen_reg_rtx (SImode);
8265 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8266 break;
8267
8268 /* No good sequences for GT, LT. */
8269 default:
8270 FAIL;
8271 }
8272 DONE;
8273 }")
8274
8275 (define_expand "cstorehf4"
8276 [(set (match_operand:SI 0 "s_register_operand")
8277 (match_operator:SI 1 "expandable_comparison_operator"
8278 [(match_operand:HF 2 "s_register_operand")
8279 (match_operand:HF 3 "vfp_compare_operand")]))]
8280 "TARGET_VFP_FP16INST"
8281 {
8282 if (!arm_validize_comparison (&operands[1],
8283 &operands[2],
8284 &operands[3]))
8285 FAIL;
8286
8287 emit_insn (gen_cstore_cc (operands[0], operands[1],
8288 operands[2], operands[3]));
8289 DONE;
8290 }
8291 )
8292
8293 (define_expand "cstoresf4"
8294 [(set (match_operand:SI 0 "s_register_operand")
8295 (match_operator:SI 1 "expandable_comparison_operator"
8296 [(match_operand:SF 2 "s_register_operand")
8297 (match_operand:SF 3 "vfp_compare_operand")]))]
8298 "TARGET_32BIT && TARGET_HARD_FLOAT"
8299 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8300 operands[2], operands[3])); DONE;"
8301 )
8302
8303 (define_expand "cstoredf4"
8304 [(set (match_operand:SI 0 "s_register_operand")
8305 (match_operator:SI 1 "expandable_comparison_operator"
8306 [(match_operand:DF 2 "s_register_operand")
8307 (match_operand:DF 3 "vfp_compare_operand")]))]
8308 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8309 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8310 operands[2], operands[3])); DONE;"
8311 )
8312
8313 (define_expand "cstoredi4"
8314 [(set (match_operand:SI 0 "s_register_operand")
8315 (match_operator:SI 1 "expandable_comparison_operator"
8316 [(match_operand:DI 2 "s_register_operand")
8317 (match_operand:DI 3 "reg_or_int_operand")]))]
8318 "TARGET_32BIT"
8319 "{
8320 if (!arm_validize_comparison (&operands[1],
8321 &operands[2],
8322 &operands[3]))
8323 FAIL;
8324 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8325 operands[3]));
8326 DONE;
8327 }"
8328 )
8329
8330 \f
8331 ;; Conditional move insns
8332
8333 (define_expand "movsicc"
8334 [(set (match_operand:SI 0 "s_register_operand")
8335 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
8336 (match_operand:SI 2 "arm_not_operand")
8337 (match_operand:SI 3 "arm_not_operand")))]
8338 "TARGET_32BIT"
8339 "
8340 {
8341 enum rtx_code code;
8342 rtx ccreg;
8343
8344 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8345 &XEXP (operands[1], 1)))
8346 FAIL;
8347
8348 code = GET_CODE (operands[1]);
8349 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8350 XEXP (operands[1], 1), NULL_RTX);
8351 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8352 }"
8353 )
8354
8355 (define_expand "movhfcc"
8356 [(set (match_operand:HF 0 "s_register_operand")
8357 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
8358 (match_operand:HF 2 "s_register_operand")
8359 (match_operand:HF 3 "s_register_operand")))]
8360 "TARGET_VFP_FP16INST"
8361 "
8362 {
8363 enum rtx_code code = GET_CODE (operands[1]);
8364 rtx ccreg;
8365
8366 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8367 &XEXP (operands[1], 1)))
8368 FAIL;
8369
8370 code = GET_CODE (operands[1]);
8371 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8372 XEXP (operands[1], 1), NULL_RTX);
8373 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8374 }"
8375 )
8376
8377 (define_expand "movsfcc"
8378 [(set (match_operand:SF 0 "s_register_operand")
8379 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
8380 (match_operand:SF 2 "s_register_operand")
8381 (match_operand:SF 3 "s_register_operand")))]
8382 "TARGET_32BIT && TARGET_HARD_FLOAT"
8383 "
8384 {
8385 enum rtx_code code = GET_CODE (operands[1]);
8386 rtx ccreg;
8387
8388 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8389 &XEXP (operands[1], 1)))
8390 FAIL;
8391
8392 code = GET_CODE (operands[1]);
8393 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8394 XEXP (operands[1], 1), NULL_RTX);
8395 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8396 }"
8397 )
8398
8399 (define_expand "movdfcc"
8400 [(set (match_operand:DF 0 "s_register_operand")
8401 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
8402 (match_operand:DF 2 "s_register_operand")
8403 (match_operand:DF 3 "s_register_operand")))]
8404 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8405 "
8406 {
8407 enum rtx_code code = GET_CODE (operands[1]);
8408 rtx ccreg;
8409
8410 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8411 &XEXP (operands[1], 1)))
8412 FAIL;
8413 code = GET_CODE (operands[1]);
8414 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8415 XEXP (operands[1], 1), NULL_RTX);
8416 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8417 }"
8418 )
8419
8420 (define_insn "*cmov<mode>"
8421 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8422 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8423 [(match_operand 2 "cc_register" "") (const_int 0)])
8424 (match_operand:SDF 3 "s_register_operand"
8425 "<F_constraint>")
8426 (match_operand:SDF 4 "s_register_operand"
8427 "<F_constraint>")))]
8428 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
8429 "*
8430 {
8431 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8432 switch (code)
8433 {
8434 case ARM_GE:
8435 case ARM_GT:
8436 case ARM_EQ:
8437 case ARM_VS:
8438 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8439 case ARM_LT:
8440 case ARM_LE:
8441 case ARM_NE:
8442 case ARM_VC:
8443 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8444 default:
8445 gcc_unreachable ();
8446 }
8447 return \"\";
8448 }"
8449 [(set_attr "conds" "use")
8450 (set_attr "type" "fcsel")]
8451 )
8452
8453 (define_insn "*cmovhf"
8454 [(set (match_operand:HF 0 "s_register_operand" "=t")
8455 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
8456 [(match_operand 2 "cc_register" "") (const_int 0)])
8457 (match_operand:HF 3 "s_register_operand" "t")
8458 (match_operand:HF 4 "s_register_operand" "t")))]
8459 "TARGET_VFP_FP16INST"
8460 "*
8461 {
8462 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8463 switch (code)
8464 {
8465 case ARM_GE:
8466 case ARM_GT:
8467 case ARM_EQ:
8468 case ARM_VS:
8469 return \"vsel%d1.f16\\t%0, %3, %4\";
8470 case ARM_LT:
8471 case ARM_LE:
8472 case ARM_NE:
8473 case ARM_VC:
8474 return \"vsel%D1.f16\\t%0, %4, %3\";
8475 default:
8476 gcc_unreachable ();
8477 }
8478 return \"\";
8479 }"
8480 [(set_attr "conds" "use")
8481 (set_attr "type" "fcsel")]
8482 )
8483
8484 (define_insn_and_split "*movsicc_insn"
8485 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8486 (if_then_else:SI
8487 (match_operator 3 "arm_comparison_operator"
8488 [(match_operand 4 "cc_register" "") (const_int 0)])
8489 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8490 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8491 "TARGET_ARM"
8492 "@
8493 mov%D3\\t%0, %2
8494 mvn%D3\\t%0, #%B2
8495 mov%d3\\t%0, %1
8496 mvn%d3\\t%0, #%B1
8497 #
8498 #
8499 #
8500 #"
8501 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8502 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8503 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8504 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8505 "&& reload_completed"
8506 [(const_int 0)]
8507 {
8508 enum rtx_code rev_code;
8509 machine_mode mode;
8510 rtx rev_cond;
8511
8512 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8513 operands[3],
8514 gen_rtx_SET (operands[0], operands[1])));
8515
8516 rev_code = GET_CODE (operands[3]);
8517 mode = GET_MODE (operands[4]);
8518 if (mode == CCFPmode || mode == CCFPEmode)
8519 rev_code = reverse_condition_maybe_unordered (rev_code);
8520 else
8521 rev_code = reverse_condition (rev_code);
8522
8523 rev_cond = gen_rtx_fmt_ee (rev_code,
8524 VOIDmode,
8525 operands[4],
8526 const0_rtx);
8527 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8528 rev_cond,
8529 gen_rtx_SET (operands[0], operands[2])));
8530 DONE;
8531 }
8532 [(set_attr "length" "4,4,4,4,8,8,8,8")
8533 (set_attr "conds" "use")
8534 (set_attr_alternative "type"
8535 [(if_then_else (match_operand 2 "const_int_operand" "")
8536 (const_string "mov_imm")
8537 (const_string "mov_reg"))
8538 (const_string "mvn_imm")
8539 (if_then_else (match_operand 1 "const_int_operand" "")
8540 (const_string "mov_imm")
8541 (const_string "mov_reg"))
8542 (const_string "mvn_imm")
8543 (const_string "multiple")
8544 (const_string "multiple")
8545 (const_string "multiple")
8546 (const_string "multiple")])]
8547 )
8548
8549 (define_insn "*movsfcc_soft_insn"
8550 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8551 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8552 [(match_operand 4 "cc_register" "") (const_int 0)])
8553 (match_operand:SF 1 "s_register_operand" "0,r")
8554 (match_operand:SF 2 "s_register_operand" "r,0")))]
8555 "TARGET_ARM && TARGET_SOFT_FLOAT"
8556 "@
8557 mov%D3\\t%0, %2
8558 mov%d3\\t%0, %1"
8559 [(set_attr "conds" "use")
8560 (set_attr "type" "mov_reg")]
8561 )
8562
8563 \f
8564 ;; Jump and linkage insns
8565
8566 (define_expand "jump"
8567 [(set (pc)
8568 (label_ref (match_operand 0 "" "")))]
8569 "TARGET_EITHER"
8570 ""
8571 )
8572
8573 (define_insn "*arm_jump"
8574 [(set (pc)
8575 (label_ref (match_operand 0 "" "")))]
8576 "TARGET_32BIT"
8577 "*
8578 {
8579 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8580 {
8581 arm_ccfsm_state += 2;
8582 return \"\";
8583 }
8584 return \"b%?\\t%l0\";
8585 }
8586 "
8587 [(set_attr "predicable" "yes")
8588 (set (attr "length")
8589 (if_then_else
8590 (and (match_test "TARGET_THUMB2")
8591 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8592 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8593 (const_int 2)
8594 (const_int 4)))
8595 (set_attr "type" "branch")]
8596 )
8597
8598 (define_expand "call"
8599 [(parallel [(call (match_operand 0 "memory_operand")
8600 (match_operand 1 "general_operand"))
8601 (use (match_operand 2 "" ""))
8602 (clobber (reg:SI LR_REGNUM))])]
8603 "TARGET_EITHER"
8604 "
8605 {
8606 rtx callee, pat;
8607 tree addr = MEM_EXPR (operands[0]);
8608
8609 /* In an untyped call, we can get NULL for operand 2. */
8610 if (operands[2] == NULL_RTX)
8611 operands[2] = const0_rtx;
8612
8613 /* Decide if we should generate indirect calls by loading the
8614 32-bit address of the callee into a register before performing the
8615 branch and link. */
8616 callee = XEXP (operands[0], 0);
8617 if (GET_CODE (callee) == SYMBOL_REF
8618 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8619 : !REG_P (callee))
8620 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8621
8622 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
8623 /* Indirect call: set r9 with FDPIC value of callee. */
8624 XEXP (operands[0], 0)
8625 = arm_load_function_descriptor (XEXP (operands[0], 0));
8626
8627 if (detect_cmse_nonsecure_call (addr))
8628 {
8629 pat = gen_nonsecure_call_internal (operands[0], operands[1],
8630 operands[2]);
8631 emit_call_insn (pat);
8632 }
8633 else
8634 {
8635 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8636 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
8637 }
8638
8639 /* Restore FDPIC register (r9) after call. */
8640 if (TARGET_FDPIC)
8641 {
8642 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8643 rtx initial_fdpic_reg
8644 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8645
8646 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8647 initial_fdpic_reg));
8648 }
8649
8650 DONE;
8651 }"
8652 )
8653
8654 (define_insn "restore_pic_register_after_call"
8655 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
8656 (unspec:SI [(match_dup 0)
8657 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
8658 UNSPEC_PIC_RESTORE))]
8659 ""
8660 "@
8661 mov\t%0, %1
8662 ldr\t%0, %1"
8663 )
8664
8665 (define_expand "call_internal"
8666 [(parallel [(call (match_operand 0 "memory_operand")
8667 (match_operand 1 "general_operand"))
8668 (use (match_operand 2 "" ""))
8669 (clobber (reg:SI LR_REGNUM))])])
8670
8671 (define_expand "nonsecure_call_internal"
8672 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
8673 UNSPEC_NONSECURE_MEM)
8674 (match_operand 1 "general_operand"))
8675 (use (match_operand 2 "" ""))
8676 (clobber (reg:SI LR_REGNUM))])]
8677 "use_cmse"
8678 {
8679 rtx addr = XEXP (operands[0], 0);
8680 rtx tmp = REG_P (addr) ? addr : force_reg (SImode, addr);
8681
8682 if (!TARGET_HAVE_FPCXT_CMSE)
8683 {
8684 rtx r4 = gen_rtx_REG (SImode, R4_REGNUM);
8685 emit_move_insn (r4, tmp);
8686 tmp = r4;
8687 }
8688
8689 if (tmp != addr)
8690 operands[0] = replace_equiv_address (operands[0], tmp);
8691 }
8692 )
8693
8694 (define_insn "*call_reg_armv5"
8695 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8696 (match_operand 1 "" ""))
8697 (use (match_operand 2 "" ""))
8698 (clobber (reg:SI LR_REGNUM))]
8699 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8700 "blx%?\\t%0"
8701 [(set_attr "type" "call")]
8702 )
8703
8704 (define_insn "*call_reg_arm"
8705 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8706 (match_operand 1 "" ""))
8707 (use (match_operand 2 "" ""))
8708 (clobber (reg:SI LR_REGNUM))]
8709 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8710 "*
8711 return output_call (operands);
8712 "
8713 ;; length is worst case, normally it is only two
8714 [(set_attr "length" "12")
8715 (set_attr "type" "call")]
8716 )
8717
8718
8719 (define_expand "call_value"
8720 [(parallel [(set (match_operand 0 "" "")
8721 (call (match_operand 1 "memory_operand")
8722 (match_operand 2 "general_operand")))
8723 (use (match_operand 3 "" ""))
8724 (clobber (reg:SI LR_REGNUM))])]
8725 "TARGET_EITHER"
8726 "
8727 {
8728 rtx pat, callee;
8729 tree addr = MEM_EXPR (operands[1]);
8730
8731 /* In an untyped call, we can get NULL for operand 2. */
8732 if (operands[3] == 0)
8733 operands[3] = const0_rtx;
8734
8735 /* Decide if we should generate indirect calls by loading the
8736 32-bit address of the callee into a register before performing the
8737 branch and link. */
8738 callee = XEXP (operands[1], 0);
8739 if (GET_CODE (callee) == SYMBOL_REF
8740 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8741 : !REG_P (callee))
8742 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8743
8744 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8745 /* Indirect call: set r9 with FDPIC value of callee. */
8746 XEXP (operands[1], 0)
8747 = arm_load_function_descriptor (XEXP (operands[1], 0));
8748
8749 if (detect_cmse_nonsecure_call (addr))
8750 {
8751 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8752 operands[2], operands[3]);
8753 emit_call_insn (pat);
8754 }
8755 else
8756 {
8757 pat = gen_call_value_internal (operands[0], operands[1],
8758 operands[2], operands[3]);
8759 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8760 }
8761
8762 /* Restore FDPIC register (r9) after call. */
8763 if (TARGET_FDPIC)
8764 {
8765 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8766 rtx initial_fdpic_reg
8767 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8768
8769 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8770 initial_fdpic_reg));
8771 }
8772
8773 DONE;
8774 }"
8775 )
8776
8777 (define_expand "call_value_internal"
8778 [(parallel [(set (match_operand 0 "" "")
8779 (call (match_operand 1 "memory_operand")
8780 (match_operand 2 "general_operand")))
8781 (use (match_operand 3 "" ""))
8782 (clobber (reg:SI LR_REGNUM))])])
8783
8784 (define_expand "nonsecure_call_value_internal"
8785 [(parallel [(set (match_operand 0 "" "")
8786 (call (unspec:SI [(match_operand 1 "memory_operand")]
8787 UNSPEC_NONSECURE_MEM)
8788 (match_operand 2 "general_operand")))
8789 (use (match_operand 3 "" ""))
8790 (clobber (reg:SI LR_REGNUM))])]
8791 "use_cmse"
8792 "
8793 {
8794 if (!TARGET_HAVE_FPCXT_CMSE)
8795 {
8796 rtx tmp =
8797 copy_to_suggested_reg (XEXP (operands[1], 0),
8798 gen_rtx_REG (SImode, R4_REGNUM),
8799 SImode);
8800
8801 operands[1] = replace_equiv_address (operands[1], tmp);
8802 }
8803 }")
8804
8805 (define_insn "*call_value_reg_armv5"
8806 [(set (match_operand 0 "" "")
8807 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8808 (match_operand 2 "" "")))
8809 (use (match_operand 3 "" ""))
8810 (clobber (reg:SI LR_REGNUM))]
8811 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8812 "blx%?\\t%1"
8813 [(set_attr "type" "call")]
8814 )
8815
8816 (define_insn "*call_value_reg_arm"
8817 [(set (match_operand 0 "" "")
8818 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8819 (match_operand 2 "" "")))
8820 (use (match_operand 3 "" ""))
8821 (clobber (reg:SI LR_REGNUM))]
8822 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8823 "*
8824 return output_call (&operands[1]);
8825 "
8826 [(set_attr "length" "12")
8827 (set_attr "type" "call")]
8828 )
8829
8830 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8831 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8832
8833 (define_insn "*call_symbol"
8834 [(call (mem:SI (match_operand:SI 0 "" ""))
8835 (match_operand 1 "" ""))
8836 (use (match_operand 2 "" ""))
8837 (clobber (reg:SI LR_REGNUM))]
8838 "TARGET_32BIT
8839 && !SIBLING_CALL_P (insn)
8840 && (GET_CODE (operands[0]) == SYMBOL_REF)
8841 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8842 "*
8843 {
8844 rtx op = operands[0];
8845
8846 /* Switch mode now when possible. */
8847 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8848 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8849 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8850
8851 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8852 }"
8853 [(set_attr "type" "call")]
8854 )
8855
8856 (define_insn "*call_value_symbol"
8857 [(set (match_operand 0 "" "")
8858 (call (mem:SI (match_operand:SI 1 "" ""))
8859 (match_operand:SI 2 "" "")))
8860 (use (match_operand 3 "" ""))
8861 (clobber (reg:SI LR_REGNUM))]
8862 "TARGET_32BIT
8863 && !SIBLING_CALL_P (insn)
8864 && (GET_CODE (operands[1]) == SYMBOL_REF)
8865 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8866 "*
8867 {
8868 rtx op = operands[1];
8869
8870 /* Switch mode now when possible. */
8871 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8872 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8873 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8874
8875 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8876 }"
8877 [(set_attr "type" "call")]
8878 )
8879
8880 (define_expand "sibcall_internal"
8881 [(parallel [(call (match_operand 0 "memory_operand")
8882 (match_operand 1 "general_operand"))
8883 (return)
8884 (use (match_operand 2 "" ""))])])
8885
8886 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8887 (define_expand "sibcall"
8888 [(parallel [(call (match_operand 0 "memory_operand")
8889 (match_operand 1 "general_operand"))
8890 (return)
8891 (use (match_operand 2 "" ""))])]
8892 "TARGET_32BIT"
8893 "
8894 {
8895 rtx pat;
8896
8897 if ((!REG_P (XEXP (operands[0], 0))
8898 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8899 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8900 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8901 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8902
8903 if (operands[2] == NULL_RTX)
8904 operands[2] = const0_rtx;
8905
8906 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8907 arm_emit_call_insn (pat, operands[0], true);
8908 DONE;
8909 }"
8910 )
8911
8912 (define_expand "sibcall_value_internal"
8913 [(parallel [(set (match_operand 0 "" "")
8914 (call (match_operand 1 "memory_operand")
8915 (match_operand 2 "general_operand")))
8916 (return)
8917 (use (match_operand 3 "" ""))])])
8918
8919 (define_expand "sibcall_value"
8920 [(parallel [(set (match_operand 0 "" "")
8921 (call (match_operand 1 "memory_operand")
8922 (match_operand 2 "general_operand")))
8923 (return)
8924 (use (match_operand 3 "" ""))])]
8925 "TARGET_32BIT"
8926 "
8927 {
8928 rtx pat;
8929
8930 if ((!REG_P (XEXP (operands[1], 0))
8931 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8932 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8933 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8934 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8935
8936 if (operands[3] == NULL_RTX)
8937 operands[3] = const0_rtx;
8938
8939 pat = gen_sibcall_value_internal (operands[0], operands[1],
8940 operands[2], operands[3]);
8941 arm_emit_call_insn (pat, operands[1], true);
8942 DONE;
8943 }"
8944 )
8945
8946 (define_insn "*sibcall_insn"
8947 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8948 (match_operand 1 "" ""))
8949 (return)
8950 (use (match_operand 2 "" ""))]
8951 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8952 "*
8953 if (which_alternative == 1)
8954 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8955 else
8956 {
8957 if (arm_arch5t || arm_arch4t)
8958 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8959 else
8960 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8961 }
8962 "
8963 [(set_attr "type" "call")]
8964 )
8965
8966 (define_insn "*sibcall_value_insn"
8967 [(set (match_operand 0 "" "")
8968 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8969 (match_operand 2 "" "")))
8970 (return)
8971 (use (match_operand 3 "" ""))]
8972 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8973 "*
8974 if (which_alternative == 1)
8975 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8976 else
8977 {
8978 if (arm_arch5t || arm_arch4t)
8979 return \"bx%?\\t%1\";
8980 else
8981 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8982 }
8983 "
8984 [(set_attr "type" "call")]
8985 )
8986
8987 (define_expand "<return_str>return"
8988 [(RETURNS)]
8989 "(TARGET_ARM || (TARGET_THUMB2
8990 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8991 && !IS_STACKALIGN (arm_current_func_type ())))
8992 <return_cond_false>"
8993 "
8994 {
8995 if (TARGET_THUMB2)
8996 {
8997 thumb2_expand_return (<return_simple_p>);
8998 DONE;
8999 }
9000 }
9001 "
9002 )
9003
9004 ;; Often the return insn will be the same as loading from memory, so set attr
9005 (define_insn "*arm_return"
9006 [(return)]
9007 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
9008 "*
9009 {
9010 if (arm_ccfsm_state == 2)
9011 {
9012 arm_ccfsm_state += 2;
9013 return \"\";
9014 }
9015 return output_return_instruction (const_true_rtx, true, false, false);
9016 }"
9017 [(set_attr "type" "load_4")
9018 (set_attr "length" "12")
9019 (set_attr "predicable" "yes")]
9020 )
9021
9022 (define_insn "*cond_<return_str>return"
9023 [(set (pc)
9024 (if_then_else (match_operator 0 "arm_comparison_operator"
9025 [(match_operand 1 "cc_register" "") (const_int 0)])
9026 (RETURNS)
9027 (pc)))]
9028 "TARGET_ARM <return_cond_true>"
9029 "*
9030 {
9031 if (arm_ccfsm_state == 2)
9032 {
9033 arm_ccfsm_state += 2;
9034 return \"\";
9035 }
9036 return output_return_instruction (operands[0], true, false,
9037 <return_simple_p>);
9038 }"
9039 [(set_attr "conds" "use")
9040 (set_attr "length" "12")
9041 (set_attr "type" "load_4")]
9042 )
9043
9044 (define_insn "*cond_<return_str>return_inverted"
9045 [(set (pc)
9046 (if_then_else (match_operator 0 "arm_comparison_operator"
9047 [(match_operand 1 "cc_register" "") (const_int 0)])
9048 (pc)
9049 (RETURNS)))]
9050 "TARGET_ARM <return_cond_true>"
9051 "*
9052 {
9053 if (arm_ccfsm_state == 2)
9054 {
9055 arm_ccfsm_state += 2;
9056 return \"\";
9057 }
9058 return output_return_instruction (operands[0], true, true,
9059 <return_simple_p>);
9060 }"
9061 [(set_attr "conds" "use")
9062 (set_attr "length" "12")
9063 (set_attr "type" "load_4")]
9064 )
9065
9066 (define_insn "*arm_simple_return"
9067 [(simple_return)]
9068 "TARGET_ARM"
9069 "*
9070 {
9071 if (arm_ccfsm_state == 2)
9072 {
9073 arm_ccfsm_state += 2;
9074 return \"\";
9075 }
9076 return output_return_instruction (const_true_rtx, true, false, true);
9077 }"
9078 [(set_attr "type" "branch")
9079 (set_attr "length" "4")
9080 (set_attr "predicable" "yes")]
9081 )
9082
9083 ;; Generate a sequence of instructions to determine if the processor is
9084 ;; in 26-bit or 32-bit mode, and return the appropriate return address
9085 ;; mask.
9086
9087 (define_expand "return_addr_mask"
9088 [(set (match_dup 1)
9089 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9090 (const_int 0)))
9091 (set (match_operand:SI 0 "s_register_operand")
9092 (if_then_else:SI (eq (match_dup 1) (const_int 0))
9093 (const_int -1)
9094 (const_int 67108860)))] ; 0x03fffffc
9095 "TARGET_ARM"
9096 "
9097 operands[1] = gen_rtx_REG (CC_NZmode, CC_REGNUM);
9098 ")
9099
9100 (define_insn "*check_arch2"
9101 [(set (match_operand:CC_NZ 0 "cc_register" "")
9102 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9103 (const_int 0)))]
9104 "TARGET_ARM"
9105 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9106 [(set_attr "length" "8")
9107 (set_attr "conds" "set")
9108 (set_attr "type" "multiple")]
9109 )
9110
9111 ;; Call subroutine returning any type.
9112
9113 (define_expand "untyped_call"
9114 [(parallel [(call (match_operand 0 "" "")
9115 (const_int 0))
9116 (match_operand 1 "" "")
9117 (match_operand 2 "" "")])]
9118 "TARGET_EITHER && !TARGET_FDPIC"
9119 "
9120 {
9121 int i;
9122 rtx par = gen_rtx_PARALLEL (VOIDmode,
9123 rtvec_alloc (XVECLEN (operands[2], 0)));
9124 rtx addr = gen_reg_rtx (Pmode);
9125 rtx mem;
9126 int size = 0;
9127
9128 emit_move_insn (addr, XEXP (operands[1], 0));
9129 mem = change_address (operands[1], BLKmode, addr);
9130
9131 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9132 {
9133 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9134
9135 /* Default code only uses r0 as a return value, but we could
9136 be using anything up to 4 registers. */
9137 if (REGNO (src) == R0_REGNUM)
9138 src = gen_rtx_REG (TImode, R0_REGNUM);
9139
9140 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9141 GEN_INT (size));
9142 size += GET_MODE_SIZE (GET_MODE (src));
9143 }
9144
9145 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
9146
9147 size = 0;
9148
9149 for (i = 0; i < XVECLEN (par, 0); i++)
9150 {
9151 HOST_WIDE_INT offset = 0;
9152 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9153
9154 if (size != 0)
9155 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9156
9157 mem = change_address (mem, GET_MODE (reg), NULL);
9158 if (REGNO (reg) == R0_REGNUM)
9159 {
9160 /* On thumb we have to use a write-back instruction. */
9161 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9162 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9163 size = TARGET_ARM ? 16 : 0;
9164 }
9165 else
9166 {
9167 emit_move_insn (mem, reg);
9168 size = GET_MODE_SIZE (GET_MODE (reg));
9169 }
9170 }
9171
9172 /* The optimizer does not know that the call sets the function value
9173 registers we stored in the result block. We avoid problems by
9174 claiming that all hard registers are used and clobbered at this
9175 point. */
9176 emit_insn (gen_blockage ());
9177
9178 DONE;
9179 }"
9180 )
9181
9182 (define_expand "untyped_return"
9183 [(match_operand:BLK 0 "memory_operand")
9184 (match_operand 1 "" "")]
9185 "TARGET_EITHER && !TARGET_FDPIC"
9186 "
9187 {
9188 int i;
9189 rtx addr = gen_reg_rtx (Pmode);
9190 rtx mem;
9191 int size = 0;
9192
9193 emit_move_insn (addr, XEXP (operands[0], 0));
9194 mem = change_address (operands[0], BLKmode, addr);
9195
9196 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9197 {
9198 HOST_WIDE_INT offset = 0;
9199 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9200
9201 if (size != 0)
9202 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9203
9204 mem = change_address (mem, GET_MODE (reg), NULL);
9205 if (REGNO (reg) == R0_REGNUM)
9206 {
9207 /* On thumb we have to use a write-back instruction. */
9208 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9209 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9210 size = TARGET_ARM ? 16 : 0;
9211 }
9212 else
9213 {
9214 emit_move_insn (reg, mem);
9215 size = GET_MODE_SIZE (GET_MODE (reg));
9216 }
9217 }
9218
9219 /* Emit USE insns before the return. */
9220 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9221 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9222
9223 /* Construct the return. */
9224 expand_naked_return ();
9225
9226 DONE;
9227 }"
9228 )
9229
9230 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9231 ;; all of memory. This blocks insns from being moved across this point.
9232
9233 (define_insn "blockage"
9234 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9235 "TARGET_EITHER"
9236 ""
9237 [(set_attr "length" "0")
9238 (set_attr "type" "block")]
9239 )
9240
9241 ;; Since we hard code r0 here use the 'o' constraint to prevent
9242 ;; provoking undefined behaviour in the hardware with putting out
9243 ;; auto-increment operations with potentially r0 as the base register.
9244 (define_insn "probe_stack"
9245 [(set (match_operand:SI 0 "memory_operand" "=o")
9246 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
9247 "TARGET_32BIT"
9248 "str%?\\tr0, %0"
9249 [(set_attr "type" "store_4")
9250 (set_attr "predicable" "yes")]
9251 )
9252
9253 (define_insn "probe_stack_range"
9254 [(set (match_operand:SI 0 "register_operand" "=r")
9255 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
9256 (match_operand:SI 2 "register_operand" "r")]
9257 VUNSPEC_PROBE_STACK_RANGE))]
9258 "TARGET_32BIT"
9259 {
9260 return output_probe_stack_range (operands[0], operands[2]);
9261 }
9262 [(set_attr "type" "multiple")
9263 (set_attr "conds" "clob")]
9264 )
9265
9266 ;; Named patterns for stack smashing protection.
9267 (define_expand "stack_protect_combined_set"
9268 [(parallel
9269 [(set (match_operand:SI 0 "memory_operand")
9270 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9271 UNSPEC_SP_SET))
9272 (clobber (match_scratch:SI 2 ""))
9273 (clobber (match_scratch:SI 3 ""))])]
9274 "arm_stack_protector_guard == SSP_GLOBAL"
9275 ""
9276 )
9277
9278 ;; Use a separate insn from the above expand to be able to have the mem outside
9279 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9280 ;; try to reload the guard since we need to control how PIC access is done in
9281 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9282 ;; legitimize_pic_address ()).
9283 (define_insn_and_split "*stack_protect_combined_set_insn"
9284 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9285 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9286 UNSPEC_SP_SET))
9287 (clobber (match_scratch:SI 2 "=&l,&r"))
9288 (clobber (match_scratch:SI 3 "=&l,&r"))]
9289 ""
9290 "#"
9291 "reload_completed"
9292 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
9293 UNSPEC_SP_SET))
9294 (clobber (match_dup 2))])]
9295 "
9296 {
9297 if (flag_pic)
9298 {
9299 rtx pic_reg;
9300
9301 if (TARGET_FDPIC)
9302 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9303 else
9304 pic_reg = operands[3];
9305
9306 /* Forces recomputing of GOT base now. */
9307 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
9308 true /*compute_now*/);
9309 }
9310 else
9311 {
9312 if (address_operand (operands[1], SImode))
9313 operands[2] = operands[1];
9314 else
9315 {
9316 rtx mem = force_const_mem (SImode, operands[1]);
9317 if (!general_operand (mem, SImode))
9318 {
9319 emit_move_insn (operands[2], XEXP (mem, 0));
9320 mem = replace_equiv_address (mem, operands[2], false);
9321 }
9322 emit_move_insn (operands[2], mem);
9323 }
9324 }
9325 }"
9326 [(set_attr "arch" "t1,32")]
9327 )
9328
9329 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
9330 ;; canary value does not live beyond the life of this sequence.
9331 (define_insn "*stack_protect_set_insn"
9332 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9333 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
9334 UNSPEC_SP_SET))
9335 (clobber (match_dup 1))]
9336 ""
9337 "@
9338 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
9339 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
9340 [(set_attr "length" "8,12")
9341 (set_attr "conds" "clob,nocond")
9342 (set_attr "type" "multiple")
9343 (set_attr "arch" "t1,32")]
9344 )
9345
9346 (define_expand "stack_protect_combined_test"
9347 [(parallel
9348 [(set (pc)
9349 (if_then_else
9350 (eq (match_operand:SI 0 "memory_operand")
9351 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9352 UNSPEC_SP_TEST))
9353 (label_ref (match_operand 2))
9354 (pc)))
9355 (clobber (match_scratch:SI 3 ""))
9356 (clobber (match_scratch:SI 4 ""))
9357 (clobber (reg:CC CC_REGNUM))])]
9358 "arm_stack_protector_guard == SSP_GLOBAL"
9359 ""
9360 )
9361
9362 ;; Use a separate insn from the above expand to be able to have the mem outside
9363 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9364 ;; try to reload the guard since we need to control how PIC access is done in
9365 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9366 ;; legitimize_pic_address ()).
9367 (define_insn_and_split "*stack_protect_combined_test_insn"
9368 [(set (pc)
9369 (if_then_else
9370 (eq (match_operand:SI 0 "memory_operand" "m,m")
9371 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9372 UNSPEC_SP_TEST))
9373 (label_ref (match_operand 2))
9374 (pc)))
9375 (clobber (match_scratch:SI 3 "=&l,&r"))
9376 (clobber (match_scratch:SI 4 "=&l,&r"))
9377 (clobber (reg:CC CC_REGNUM))]
9378 ""
9379 "#"
9380 "reload_completed"
9381 [(const_int 0)]
9382 {
9383 rtx eq;
9384
9385 if (flag_pic)
9386 {
9387 rtx pic_reg;
9388
9389 if (TARGET_FDPIC)
9390 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9391 else
9392 pic_reg = operands[4];
9393
9394 /* Forces recomputing of GOT base now. */
9395 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
9396 true /*compute_now*/);
9397 }
9398 else
9399 {
9400 if (address_operand (operands[1], SImode))
9401 operands[3] = operands[1];
9402 else
9403 {
9404 rtx mem = force_const_mem (SImode, operands[1]);
9405 if (!general_operand (mem, SImode))
9406 {
9407 emit_move_insn (operands[3], XEXP (mem, 0));
9408 mem = replace_equiv_address (mem, operands[3], false);
9409 }
9410 emit_move_insn (operands[3], mem);
9411 }
9412 }
9413 if (TARGET_32BIT)
9414 {
9415 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
9416 operands[3]));
9417 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9418 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9419 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9420 }
9421 else
9422 {
9423 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
9424 operands[3]));
9425 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
9426 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
9427 operands[2]));
9428 }
9429 DONE;
9430 }
9431 [(set_attr "arch" "t1,32")]
9432 )
9433
9434 ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the
9435 ;; canary value does not live beyond the end of this sequence.
9436 (define_insn "arm_stack_protect_test_insn"
9437 [(set (reg:CC_Z CC_REGNUM)
9438 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
9439 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
9440 UNSPEC_SP_TEST)
9441 (const_int 0)))
9442 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
9443 (clobber (match_dup 2))]
9444 "TARGET_32BIT"
9445 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;mov\t%2, #0"
9446 [(set_attr "length" "12,16")
9447 (set_attr "conds" "set")
9448 (set_attr "type" "multiple")
9449 (set_attr "arch" "t,32")]
9450 )
9451
9452 (define_expand "stack_protect_set"
9453 [(match_operand:SI 0 "memory_operand")
9454 (match_operand:SI 1 "memory_operand")]
9455 "arm_stack_protector_guard == SSP_TLSREG"
9456 "
9457 {
9458 operands[1] = arm_stack_protect_tls_canary_mem (false /* reload */);
9459 emit_insn (gen_stack_protect_set_tls (operands[0], operands[1]));
9460 DONE;
9461 }"
9462 )
9463
9464 ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the
9465 ;; canary value does not live beyond the life of this sequence.
9466 (define_insn "stack_protect_set_tls"
9467 [(set (match_operand:SI 0 "memory_operand" "=m")
9468 (unspec:SI [(match_operand:SI 1 "memory_operand" "m")]
9469 UNSPEC_SP_SET))
9470 (set (match_scratch:SI 2 "=&r") (const_int 0))]
9471 ""
9472 "ldr\\t%2, %1\;str\\t%2, %0\;mov\t%2, #0"
9473 [(set_attr "length" "12")
9474 (set_attr "conds" "unconditional")
9475 (set_attr "type" "multiple")]
9476 )
9477
9478 (define_expand "stack_protect_test"
9479 [(match_operand:SI 0 "memory_operand")
9480 (match_operand:SI 1 "memory_operand")
9481 (match_operand:SI 2)]
9482 "arm_stack_protector_guard == SSP_TLSREG"
9483 "
9484 {
9485 operands[1] = arm_stack_protect_tls_canary_mem (true /* reload */);
9486 emit_insn (gen_stack_protect_test_tls (operands[0], operands[1]));
9487
9488 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9489 rtx eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9490 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9491 DONE;
9492 }"
9493 )
9494
9495 (define_insn "stack_protect_test_tls"
9496 [(set (reg:CC_Z CC_REGNUM)
9497 (compare:CC_Z (unspec:SI [(match_operand:SI 0 "memory_operand" "m")
9498 (match_operand:SI 1 "memory_operand" "m")]
9499 UNSPEC_SP_TEST)
9500 (const_int 0)))
9501 (clobber (match_scratch:SI 2 "=&r"))
9502 (clobber (match_scratch:SI 3 "=&r"))]
9503 ""
9504 "ldr\t%2, %0\;ldr\t%3, %1\;eors\t%2, %3, %2\;mov\t%3, #0"
9505 [(set_attr "length" "16")
9506 (set_attr "conds" "set")
9507 (set_attr "type" "multiple")]
9508 )
9509
9510 (define_expand "casesi"
9511 [(match_operand:SI 0 "s_register_operand") ; index to jump on
9512 (match_operand:SI 1 "const_int_operand") ; lower bound
9513 (match_operand:SI 2 "const_int_operand") ; total range
9514 (match_operand:SI 3 "" "") ; table label
9515 (match_operand:SI 4 "" "")] ; Out of range label
9516 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
9517 "
9518 {
9519 enum insn_code code;
9520 if (operands[1] != const0_rtx)
9521 {
9522 rtx reg = gen_reg_rtx (SImode);
9523
9524 emit_insn (gen_addsi3 (reg, operands[0],
9525 gen_int_mode (-INTVAL (operands[1]),
9526 SImode)));
9527 operands[0] = reg;
9528 }
9529
9530 if (TARGET_ARM)
9531 code = CODE_FOR_arm_casesi_internal;
9532 else if (TARGET_THUMB1)
9533 code = CODE_FOR_thumb1_casesi_internal_pic;
9534 else if (flag_pic)
9535 code = CODE_FOR_thumb2_casesi_internal_pic;
9536 else
9537 code = CODE_FOR_thumb2_casesi_internal;
9538
9539 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9540 operands[2] = force_reg (SImode, operands[2]);
9541
9542 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9543 operands[3], operands[4]));
9544 DONE;
9545 }"
9546 )
9547
9548 ;; The USE in this pattern is needed to tell flow analysis that this is
9549 ;; a CASESI insn. It has no other purpose.
9550 (define_expand "arm_casesi_internal"
9551 [(parallel [(set (pc)
9552 (if_then_else
9553 (leu (match_operand:SI 0 "s_register_operand")
9554 (match_operand:SI 1 "arm_rhs_operand"))
9555 (match_dup 4)
9556 (label_ref:SI (match_operand 3 ""))))
9557 (clobber (reg:CC CC_REGNUM))
9558 (use (label_ref:SI (match_operand 2 "")))])]
9559 "TARGET_ARM"
9560 {
9561 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
9562 operands[4] = gen_rtx_PLUS (SImode, operands[4],
9563 gen_rtx_LABEL_REF (SImode, operands[2]));
9564 operands[4] = gen_rtx_MEM (SImode, operands[4]);
9565 MEM_READONLY_P (operands[4]) = 1;
9566 MEM_NOTRAP_P (operands[4]) = 1;
9567 })
9568
9569 (define_insn "*arm_casesi_internal"
9570 [(parallel [(set (pc)
9571 (if_then_else
9572 (leu (match_operand:SI 0 "s_register_operand" "r")
9573 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9574 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9575 (label_ref:SI (match_operand 2 "" ""))))
9576 (label_ref:SI (match_operand 3 "" ""))))
9577 (clobber (reg:CC CC_REGNUM))
9578 (use (label_ref:SI (match_dup 2)))])]
9579 "TARGET_ARM"
9580 "*
9581 if (flag_pic)
9582 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9583 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9584 "
9585 [(set_attr "conds" "clob")
9586 (set_attr "length" "12")
9587 (set_attr "type" "multiple")]
9588 )
9589
9590 (define_expand "indirect_jump"
9591 [(set (pc)
9592 (match_operand:SI 0 "s_register_operand"))]
9593 "TARGET_EITHER"
9594 "
9595 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9596 address and use bx. */
9597 if (TARGET_THUMB2)
9598 {
9599 rtx tmp;
9600 tmp = gen_reg_rtx (SImode);
9601 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9602 operands[0] = tmp;
9603 }
9604 "
9605 )
9606
9607 ;; NB Never uses BX.
9608 (define_insn "*arm_indirect_jump"
9609 [(set (pc)
9610 (match_operand:SI 0 "s_register_operand" "r"))]
9611 "TARGET_ARM"
9612 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9613 [(set_attr "predicable" "yes")
9614 (set_attr "type" "branch")]
9615 )
9616
9617 (define_insn "*load_indirect_jump"
9618 [(set (pc)
9619 (match_operand:SI 0 "memory_operand" "m"))]
9620 "TARGET_ARM"
9621 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9622 [(set_attr "type" "load_4")
9623 (set_attr "pool_range" "4096")
9624 (set_attr "neg_pool_range" "4084")
9625 (set_attr "predicable" "yes")]
9626 )
9627
9628 \f
9629 ;; Misc insns
9630
9631 (define_insn "nop"
9632 [(const_int 0)]
9633 "TARGET_EITHER"
9634 "nop"
9635 [(set (attr "length")
9636 (if_then_else (eq_attr "is_thumb" "yes")
9637 (const_int 2)
9638 (const_int 4)))
9639 (set_attr "type" "mov_reg")]
9640 )
9641
9642 (define_insn "trap"
9643 [(trap_if (const_int 1) (const_int 0))]
9644 ""
9645 "*
9646 if (TARGET_ARM)
9647 return \".inst\\t0xe7f000f0\";
9648 else
9649 return \".inst\\t0xdeff\";
9650 "
9651 [(set (attr "length")
9652 (if_then_else (eq_attr "is_thumb" "yes")
9653 (const_int 2)
9654 (const_int 4)))
9655 (set_attr "type" "trap")
9656 (set_attr "conds" "unconditional")]
9657 )
9658
9659 \f
9660 ;; Patterns to allow combination of arithmetic, cond code and shifts
9661
9662 (define_insn "*<arith_shift_insn>_multsi"
9663 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9664 (SHIFTABLE_OPS:SI
9665 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
9666 (match_operand:SI 3 "power_of_two_operand" ""))
9667 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
9668 "TARGET_32BIT"
9669 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
9670 [(set_attr "predicable" "yes")
9671 (set_attr "shift" "2")
9672 (set_attr "arch" "a,t2")
9673 (set_attr "autodetect_type" "alu_shift_mul_op3")])
9674
9675 (define_insn "*<arith_shift_insn>_shiftsi"
9676 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9677 (SHIFTABLE_OPS:SI
9678 (match_operator:SI 2 "shift_nomul_operator"
9679 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9680 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
9681 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
9682 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
9683 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
9684 [(set_attr "predicable" "yes")
9685 (set_attr "shift" "3")
9686 (set_attr "arch" "a,t2,a")
9687 (set_attr "autodetect_type" "alu_shift_operator2")])
9688
9689 (define_split
9690 [(set (match_operand:SI 0 "s_register_operand" "")
9691 (match_operator:SI 1 "shiftable_operator"
9692 [(match_operator:SI 2 "shiftable_operator"
9693 [(match_operator:SI 3 "shift_operator"
9694 [(match_operand:SI 4 "s_register_operand" "")
9695 (match_operand:SI 5 "reg_or_int_operand" "")])
9696 (match_operand:SI 6 "s_register_operand" "")])
9697 (match_operand:SI 7 "arm_rhs_operand" "")]))
9698 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9699 "TARGET_32BIT"
9700 [(set (match_dup 8)
9701 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9702 (match_dup 6)]))
9703 (set (match_dup 0)
9704 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9705 "")
9706
9707 (define_insn "*arith_shiftsi_compare0"
9708 [(set (reg:CC_NZ CC_REGNUM)
9709 (compare:CC_NZ
9710 (match_operator:SI 1 "shiftable_operator"
9711 [(match_operator:SI 3 "shift_operator"
9712 [(match_operand:SI 4 "s_register_operand" "r,r")
9713 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9714 (match_operand:SI 2 "s_register_operand" "r,r")])
9715 (const_int 0)))
9716 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9717 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9718 (match_dup 2)]))]
9719 "TARGET_32BIT"
9720 "%i1s%?\\t%0, %2, %4%S3"
9721 [(set_attr "conds" "set")
9722 (set_attr "shift" "4")
9723 (set_attr "arch" "32,a")
9724 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9725
9726 (define_insn "*arith_shiftsi_compare0_scratch"
9727 [(set (reg:CC_NZ CC_REGNUM)
9728 (compare:CC_NZ
9729 (match_operator:SI 1 "shiftable_operator"
9730 [(match_operator:SI 3 "shift_operator"
9731 [(match_operand:SI 4 "s_register_operand" "r,r")
9732 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9733 (match_operand:SI 2 "s_register_operand" "r,r")])
9734 (const_int 0)))
9735 (clobber (match_scratch:SI 0 "=r,r"))]
9736 "TARGET_32BIT"
9737 "%i1s%?\\t%0, %2, %4%S3"
9738 [(set_attr "conds" "set")
9739 (set_attr "shift" "4")
9740 (set_attr "arch" "32,a")
9741 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9742
9743 (define_insn "*sub_shiftsi"
9744 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9745 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9746 (match_operator:SI 2 "shift_operator"
9747 [(match_operand:SI 3 "s_register_operand" "r,r")
9748 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9749 "TARGET_32BIT"
9750 "sub%?\\t%0, %1, %3%S2"
9751 [(set_attr "predicable" "yes")
9752 (set_attr "predicable_short_it" "no")
9753 (set_attr "shift" "3")
9754 (set_attr "arch" "32,a")
9755 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9756
9757 (define_insn "*sub_shiftsi_compare0"
9758 [(set (reg:CC_NZ CC_REGNUM)
9759 (compare:CC_NZ
9760 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9761 (match_operator:SI 2 "shift_operator"
9762 [(match_operand:SI 3 "s_register_operand" "r,r")
9763 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9764 (const_int 0)))
9765 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9766 (minus:SI (match_dup 1)
9767 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9768 "TARGET_32BIT"
9769 "subs%?\\t%0, %1, %3%S2"
9770 [(set_attr "conds" "set")
9771 (set_attr "shift" "3")
9772 (set_attr "arch" "32,a")
9773 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9774
9775 (define_insn "*sub_shiftsi_compare0_scratch"
9776 [(set (reg:CC_NZ CC_REGNUM)
9777 (compare:CC_NZ
9778 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9779 (match_operator:SI 2 "shift_operator"
9780 [(match_operand:SI 3 "s_register_operand" "r,r")
9781 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9782 (const_int 0)))
9783 (clobber (match_scratch:SI 0 "=r,r"))]
9784 "TARGET_32BIT"
9785 "subs%?\\t%0, %1, %3%S2"
9786 [(set_attr "conds" "set")
9787 (set_attr "shift" "3")
9788 (set_attr "arch" "32,a")
9789 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9790 \f
9791
9792 (define_insn_and_split "*and_scc"
9793 [(set (match_operand:SI 0 "s_register_operand" "=r")
9794 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9795 [(match_operand 2 "cc_register" "") (const_int 0)])
9796 (match_operand:SI 3 "s_register_operand" "r")))]
9797 "TARGET_ARM"
9798 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9799 "&& reload_completed"
9800 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9801 (cond_exec (match_dup 4) (set (match_dup 0)
9802 (and:SI (match_dup 3) (const_int 1))))]
9803 {
9804 machine_mode mode = GET_MODE (operands[2]);
9805 enum rtx_code rc = GET_CODE (operands[1]);
9806
9807 /* Note that operands[4] is the same as operands[1],
9808 but with VOIDmode as the result. */
9809 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9810 if (mode == CCFPmode || mode == CCFPEmode)
9811 rc = reverse_condition_maybe_unordered (rc);
9812 else
9813 rc = reverse_condition (rc);
9814 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9815 }
9816 [(set_attr "conds" "use")
9817 (set_attr "type" "multiple")
9818 (set_attr "length" "8")]
9819 )
9820
9821 (define_insn_and_split "*ior_scc"
9822 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9823 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9824 [(match_operand 2 "cc_register" "") (const_int 0)])
9825 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9826 "TARGET_ARM"
9827 "@
9828 orr%d1\\t%0, %3, #1
9829 #"
9830 "&& reload_completed
9831 && REGNO (operands [0]) != REGNO (operands[3])"
9832 ;; && which_alternative == 1
9833 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9834 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9835 (cond_exec (match_dup 4) (set (match_dup 0)
9836 (ior:SI (match_dup 3) (const_int 1))))]
9837 {
9838 machine_mode mode = GET_MODE (operands[2]);
9839 enum rtx_code rc = GET_CODE (operands[1]);
9840
9841 /* Note that operands[4] is the same as operands[1],
9842 but with VOIDmode as the result. */
9843 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9844 if (mode == CCFPmode || mode == CCFPEmode)
9845 rc = reverse_condition_maybe_unordered (rc);
9846 else
9847 rc = reverse_condition (rc);
9848 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9849 }
9850 [(set_attr "conds" "use")
9851 (set_attr "length" "4,8")
9852 (set_attr "type" "logic_imm,multiple")]
9853 )
9854
9855 ; A series of splitters for the compare_scc pattern below. Note that
9856 ; order is important.
9857 (define_split
9858 [(set (match_operand:SI 0 "s_register_operand" "")
9859 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9860 (const_int 0)))
9861 (clobber (reg:CC CC_REGNUM))]
9862 "TARGET_32BIT && reload_completed"
9863 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9864
9865 (define_split
9866 [(set (match_operand:SI 0 "s_register_operand" "")
9867 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9868 (const_int 0)))
9869 (clobber (reg:CC CC_REGNUM))]
9870 "TARGET_32BIT && reload_completed"
9871 [(set (match_dup 0) (not:SI (match_dup 1)))
9872 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9873
9874 (define_split
9875 [(set (match_operand:SI 0 "s_register_operand" "")
9876 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9877 (const_int 0)))
9878 (clobber (reg:CC CC_REGNUM))]
9879 "arm_arch5t && TARGET_32BIT"
9880 [(set (match_dup 0) (clz:SI (match_dup 1)))
9881 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9882 )
9883
9884 (define_split
9885 [(set (match_operand:SI 0 "s_register_operand" "")
9886 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9887 (const_int 0)))
9888 (clobber (reg:CC CC_REGNUM))]
9889 "TARGET_32BIT && reload_completed"
9890 [(parallel
9891 [(set (reg:CC CC_REGNUM)
9892 (compare:CC (const_int 1) (match_dup 1)))
9893 (set (match_dup 0)
9894 (minus:SI (const_int 1) (match_dup 1)))])
9895 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9896 (set (match_dup 0) (const_int 0)))])
9897
9898 (define_split
9899 [(set (match_operand:SI 0 "s_register_operand" "")
9900 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9901 (match_operand:SI 2 "const_int_operand" "")))
9902 (clobber (reg:CC CC_REGNUM))]
9903 "TARGET_32BIT && reload_completed"
9904 [(parallel
9905 [(set (reg:CC CC_REGNUM)
9906 (compare:CC (match_dup 1) (match_dup 2)))
9907 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9908 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9909 (set (match_dup 0) (const_int 1)))]
9910 {
9911 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9912 })
9913
9914 (define_split
9915 [(set (match_operand:SI 0 "s_register_operand" "")
9916 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9917 (match_operand:SI 2 "arm_add_operand" "")))
9918 (clobber (reg:CC CC_REGNUM))]
9919 "TARGET_32BIT && reload_completed"
9920 [(parallel
9921 [(set (reg:CC_NZ CC_REGNUM)
9922 (compare:CC_NZ (minus:SI (match_dup 1) (match_dup 2))
9923 (const_int 0)))
9924 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9925 (cond_exec (ne:CC_NZ (reg:CC_NZ CC_REGNUM) (const_int 0))
9926 (set (match_dup 0) (const_int 1)))])
9927
9928 (define_insn_and_split "*compare_scc"
9929 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9930 (match_operator:SI 1 "arm_comparison_operator"
9931 [(match_operand:SI 2 "s_register_operand" "r,r")
9932 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9933 (clobber (reg:CC CC_REGNUM))]
9934 "TARGET_32BIT"
9935 "#"
9936 "&& reload_completed"
9937 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9938 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9939 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9940 {
9941 rtx tmp1;
9942 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9943 operands[2], operands[3]);
9944 enum rtx_code rc = GET_CODE (operands[1]);
9945
9946 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9947
9948 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9949 if (mode == CCFPmode || mode == CCFPEmode)
9950 rc = reverse_condition_maybe_unordered (rc);
9951 else
9952 rc = reverse_condition (rc);
9953 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9954 }
9955 [(set_attr "type" "multiple")]
9956 )
9957
9958 ;; Attempt to improve the sequence generated by the compare_scc splitters
9959 ;; not to use conditional execution.
9960
9961 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9962 ;; clz Rd, reg1
9963 ;; lsr Rd, Rd, #5
9964 (define_peephole2
9965 [(set (reg:CC CC_REGNUM)
9966 (compare:CC (match_operand:SI 1 "register_operand" "")
9967 (const_int 0)))
9968 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9969 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9970 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9971 (set (match_dup 0) (const_int 1)))]
9972 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9973 [(set (match_dup 0) (clz:SI (match_dup 1)))
9974 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9975 )
9976
9977 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9978 ;; negs Rd, reg1
9979 ;; adc Rd, Rd, reg1
9980 (define_peephole2
9981 [(set (reg:CC CC_REGNUM)
9982 (compare:CC (match_operand:SI 1 "register_operand" "")
9983 (const_int 0)))
9984 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9985 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9986 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9987 (set (match_dup 0) (const_int 1)))
9988 (match_scratch:SI 2 "r")]
9989 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9990 [(parallel
9991 [(set (reg:CC CC_REGNUM)
9992 (compare:CC (const_int 0) (match_dup 1)))
9993 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9994 (set (match_dup 0)
9995 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9996 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9997 )
9998
9999 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
10000 ;; sub Rd, Reg1, reg2
10001 ;; clz Rd, Rd
10002 ;; lsr Rd, Rd, #5
10003 (define_peephole2
10004 [(set (reg:CC CC_REGNUM)
10005 (compare:CC (match_operand:SI 1 "register_operand" "")
10006 (match_operand:SI 2 "arm_rhs_operand" "")))
10007 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10008 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10009 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10010 (set (match_dup 0) (const_int 1)))]
10011 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
10012 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
10013 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
10014 (set (match_dup 0) (clz:SI (match_dup 0)))
10015 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
10016 )
10017
10018
10019 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
10020 ;; sub T1, Reg1, reg2
10021 ;; negs Rd, T1
10022 ;; adc Rd, Rd, T1
10023 (define_peephole2
10024 [(set (reg:CC CC_REGNUM)
10025 (compare:CC (match_operand:SI 1 "register_operand" "")
10026 (match_operand:SI 2 "arm_rhs_operand" "")))
10027 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10028 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10029 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10030 (set (match_dup 0) (const_int 1)))
10031 (match_scratch:SI 3 "r")]
10032 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10033 [(set (match_dup 3) (match_dup 4))
10034 (parallel
10035 [(set (reg:CC CC_REGNUM)
10036 (compare:CC (const_int 0) (match_dup 3)))
10037 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
10038 (set (match_dup 0)
10039 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
10040 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
10041 "
10042 if (CONST_INT_P (operands[2]))
10043 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
10044 else
10045 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
10046 ")
10047
10048 (define_insn "*cond_move"
10049 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10050 (if_then_else:SI (match_operator 3 "equality_operator"
10051 [(match_operator 4 "arm_comparison_operator"
10052 [(match_operand 5 "cc_register" "") (const_int 0)])
10053 (const_int 0)])
10054 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10055 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
10056 "TARGET_ARM"
10057 "*
10058 if (GET_CODE (operands[3]) == NE)
10059 {
10060 if (which_alternative != 1)
10061 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
10062 if (which_alternative != 0)
10063 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
10064 return \"\";
10065 }
10066 if (which_alternative != 0)
10067 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10068 if (which_alternative != 1)
10069 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
10070 return \"\";
10071 "
10072 [(set_attr "conds" "use")
10073 (set_attr_alternative "type"
10074 [(if_then_else (match_operand 2 "const_int_operand" "")
10075 (const_string "mov_imm")
10076 (const_string "mov_reg"))
10077 (if_then_else (match_operand 1 "const_int_operand" "")
10078 (const_string "mov_imm")
10079 (const_string "mov_reg"))
10080 (const_string "multiple")])
10081 (set_attr "length" "4,4,8")]
10082 )
10083
10084 (define_insn "*cond_arith"
10085 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10086 (match_operator:SI 5 "shiftable_operator"
10087 [(match_operator:SI 4 "arm_comparison_operator"
10088 [(match_operand:SI 2 "s_register_operand" "r,r")
10089 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10090 (match_operand:SI 1 "s_register_operand" "0,?r")]))
10091 (clobber (reg:CC CC_REGNUM))]
10092 "TARGET_ARM"
10093 "*
10094 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
10095 return \"%i5\\t%0, %1, %2, lsr #31\";
10096
10097 output_asm_insn (\"cmp\\t%2, %3\", operands);
10098 if (GET_CODE (operands[5]) == AND)
10099 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
10100 else if (GET_CODE (operands[5]) == MINUS)
10101 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
10102 else if (which_alternative != 0)
10103 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10104 return \"%i5%d4\\t%0, %1, #1\";
10105 "
10106 [(set_attr "conds" "clob")
10107 (set_attr "length" "12")
10108 (set_attr "type" "multiple")]
10109 )
10110
10111 (define_insn "*cond_sub"
10112 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10113 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
10114 (match_operator:SI 4 "arm_comparison_operator"
10115 [(match_operand:SI 2 "s_register_operand" "r,r")
10116 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10117 (clobber (reg:CC CC_REGNUM))]
10118 "TARGET_ARM"
10119 "*
10120 output_asm_insn (\"cmp\\t%2, %3\", operands);
10121 if (which_alternative != 0)
10122 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10123 return \"sub%d4\\t%0, %1, #1\";
10124 "
10125 [(set_attr "conds" "clob")
10126 (set_attr "length" "8,12")
10127 (set_attr "type" "multiple")]
10128 )
10129
10130 (define_insn "*cmp_ite0"
10131 [(set (match_operand 6 "dominant_cc_register" "")
10132 (compare
10133 (if_then_else:SI
10134 (match_operator 4 "arm_comparison_operator"
10135 [(match_operand:SI 0 "s_register_operand"
10136 "l,l,l,r,r,r,r,r,r")
10137 (match_operand:SI 1 "arm_add_operand"
10138 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10139 (match_operator:SI 5 "arm_comparison_operator"
10140 [(match_operand:SI 2 "s_register_operand"
10141 "l,r,r,l,l,r,r,r,r")
10142 (match_operand:SI 3 "arm_add_operand"
10143 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10144 (const_int 0))
10145 (const_int 0)))]
10146 "TARGET_32BIT"
10147 "*
10148 {
10149 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10150 {
10151 {\"cmp%d5\\t%0, %1\",
10152 \"cmp%d4\\t%2, %3\"},
10153 {\"cmn%d5\\t%0, #%n1\",
10154 \"cmp%d4\\t%2, %3\"},
10155 {\"cmp%d5\\t%0, %1\",
10156 \"cmn%d4\\t%2, #%n3\"},
10157 {\"cmn%d5\\t%0, #%n1\",
10158 \"cmn%d4\\t%2, #%n3\"}
10159 };
10160 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10161 {
10162 {\"cmp\\t%2, %3\",
10163 \"cmp\\t%0, %1\"},
10164 {\"cmp\\t%2, %3\",
10165 \"cmn\\t%0, #%n1\"},
10166 {\"cmn\\t%2, #%n3\",
10167 \"cmp\\t%0, %1\"},
10168 {\"cmn\\t%2, #%n3\",
10169 \"cmn\\t%0, #%n1\"}
10170 };
10171 static const char * const ite[2] =
10172 {
10173 \"it\\t%d5\",
10174 \"it\\t%d4\"
10175 };
10176 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10177 CMP_CMP, CMN_CMP, CMP_CMP,
10178 CMN_CMP, CMP_CMN, CMN_CMN};
10179 int swap =
10180 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10181
10182 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10183 if (TARGET_THUMB2) {
10184 output_asm_insn (ite[swap], operands);
10185 }
10186 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10187 return \"\";
10188 }"
10189 [(set_attr "conds" "set")
10190 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10191 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10192 (set_attr "type" "multiple")
10193 (set_attr_alternative "length"
10194 [(const_int 6)
10195 (const_int 8)
10196 (const_int 8)
10197 (const_int 8)
10198 (const_int 8)
10199 (if_then_else (eq_attr "is_thumb" "no")
10200 (const_int 8)
10201 (const_int 10))
10202 (if_then_else (eq_attr "is_thumb" "no")
10203 (const_int 8)
10204 (const_int 10))
10205 (if_then_else (eq_attr "is_thumb" "no")
10206 (const_int 8)
10207 (const_int 10))
10208 (if_then_else (eq_attr "is_thumb" "no")
10209 (const_int 8)
10210 (const_int 10))])]
10211 )
10212
10213 (define_insn "*cmp_ite1"
10214 [(set (match_operand 6 "dominant_cc_register" "")
10215 (compare
10216 (if_then_else:SI
10217 (match_operator 4 "arm_comparison_operator"
10218 [(match_operand:SI 0 "s_register_operand"
10219 "l,l,l,r,r,r,r,r,r")
10220 (match_operand:SI 1 "arm_add_operand"
10221 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10222 (match_operator:SI 5 "arm_comparison_operator"
10223 [(match_operand:SI 2 "s_register_operand"
10224 "l,r,r,l,l,r,r,r,r")
10225 (match_operand:SI 3 "arm_add_operand"
10226 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10227 (const_int 1))
10228 (const_int 0)))]
10229 "TARGET_32BIT"
10230 "*
10231 {
10232 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10233 {
10234 {\"cmp\\t%0, %1\",
10235 \"cmp\\t%2, %3\"},
10236 {\"cmn\\t%0, #%n1\",
10237 \"cmp\\t%2, %3\"},
10238 {\"cmp\\t%0, %1\",
10239 \"cmn\\t%2, #%n3\"},
10240 {\"cmn\\t%0, #%n1\",
10241 \"cmn\\t%2, #%n3\"}
10242 };
10243 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10244 {
10245 {\"cmp%d4\\t%2, %3\",
10246 \"cmp%D5\\t%0, %1\"},
10247 {\"cmp%d4\\t%2, %3\",
10248 \"cmn%D5\\t%0, #%n1\"},
10249 {\"cmn%d4\\t%2, #%n3\",
10250 \"cmp%D5\\t%0, %1\"},
10251 {\"cmn%d4\\t%2, #%n3\",
10252 \"cmn%D5\\t%0, #%n1\"}
10253 };
10254 static const char * const ite[2] =
10255 {
10256 \"it\\t%d4\",
10257 \"it\\t%D5\"
10258 };
10259 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10260 CMP_CMP, CMN_CMP, CMP_CMP,
10261 CMN_CMP, CMP_CMN, CMN_CMN};
10262 int swap =
10263 comparison_dominates_p (GET_CODE (operands[5]),
10264 reverse_condition (GET_CODE (operands[4])));
10265
10266 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10267 if (TARGET_THUMB2) {
10268 output_asm_insn (ite[swap], operands);
10269 }
10270 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10271 return \"\";
10272 }"
10273 [(set_attr "conds" "set")
10274 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10275 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10276 (set_attr_alternative "length"
10277 [(const_int 6)
10278 (const_int 8)
10279 (const_int 8)
10280 (const_int 8)
10281 (const_int 8)
10282 (if_then_else (eq_attr "is_thumb" "no")
10283 (const_int 8)
10284 (const_int 10))
10285 (if_then_else (eq_attr "is_thumb" "no")
10286 (const_int 8)
10287 (const_int 10))
10288 (if_then_else (eq_attr "is_thumb" "no")
10289 (const_int 8)
10290 (const_int 10))
10291 (if_then_else (eq_attr "is_thumb" "no")
10292 (const_int 8)
10293 (const_int 10))])
10294 (set_attr "type" "multiple")]
10295 )
10296
10297 (define_insn "*cmp_and"
10298 [(set (match_operand 6 "dominant_cc_register" "")
10299 (compare
10300 (and:SI
10301 (match_operator 4 "arm_comparison_operator"
10302 [(match_operand:SI 0 "s_register_operand"
10303 "l,l,l,r,r,r,r,r,r,r")
10304 (match_operand:SI 1 "arm_add_operand"
10305 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10306 (match_operator:SI 5 "arm_comparison_operator"
10307 [(match_operand:SI 2 "s_register_operand"
10308 "l,r,r,l,l,r,r,r,r,r")
10309 (match_operand:SI 3 "arm_add_operand"
10310 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10311 (const_int 0)))]
10312 "TARGET_32BIT"
10313 "*
10314 {
10315 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10316 {
10317 {\"cmp%d5\\t%0, %1\",
10318 \"cmp%d4\\t%2, %3\"},
10319 {\"cmn%d5\\t%0, #%n1\",
10320 \"cmp%d4\\t%2, %3\"},
10321 {\"cmp%d5\\t%0, %1\",
10322 \"cmn%d4\\t%2, #%n3\"},
10323 {\"cmn%d5\\t%0, #%n1\",
10324 \"cmn%d4\\t%2, #%n3\"}
10325 };
10326 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10327 {
10328 {\"cmp\\t%2, %3\",
10329 \"cmp\\t%0, %1\"},
10330 {\"cmp\\t%2, %3\",
10331 \"cmn\\t%0, #%n1\"},
10332 {\"cmn\\t%2, #%n3\",
10333 \"cmp\\t%0, %1\"},
10334 {\"cmn\\t%2, #%n3\",
10335 \"cmn\\t%0, #%n1\"}
10336 };
10337 static const char *const ite[2] =
10338 {
10339 \"it\\t%d5\",
10340 \"it\\t%d4\"
10341 };
10342 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10343 CMP_CMP, CMN_CMP, CMP_CMP,
10344 CMP_CMP, CMN_CMP, CMP_CMN,
10345 CMN_CMN};
10346 int swap =
10347 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10348
10349 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10350 if (TARGET_THUMB2) {
10351 output_asm_insn (ite[swap], operands);
10352 }
10353 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10354 return \"\";
10355 }"
10356 [(set_attr "conds" "set")
10357 (set_attr "predicable" "no")
10358 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10359 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10360 (set_attr_alternative "length"
10361 [(const_int 6)
10362 (const_int 8)
10363 (const_int 8)
10364 (const_int 8)
10365 (const_int 8)
10366 (const_int 6)
10367 (if_then_else (eq_attr "is_thumb" "no")
10368 (const_int 8)
10369 (const_int 10))
10370 (if_then_else (eq_attr "is_thumb" "no")
10371 (const_int 8)
10372 (const_int 10))
10373 (if_then_else (eq_attr "is_thumb" "no")
10374 (const_int 8)
10375 (const_int 10))
10376 (if_then_else (eq_attr "is_thumb" "no")
10377 (const_int 8)
10378 (const_int 10))])
10379 (set_attr "type" "multiple")]
10380 )
10381
10382 (define_insn "*cmp_ior"
10383 [(set (match_operand 6 "dominant_cc_register" "")
10384 (compare
10385 (ior:SI
10386 (match_operator 4 "arm_comparison_operator"
10387 [(match_operand:SI 0 "s_register_operand"
10388 "l,l,l,r,r,r,r,r,r,r")
10389 (match_operand:SI 1 "arm_add_operand"
10390 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10391 (match_operator:SI 5 "arm_comparison_operator"
10392 [(match_operand:SI 2 "s_register_operand"
10393 "l,r,r,l,l,r,r,r,r,r")
10394 (match_operand:SI 3 "arm_add_operand"
10395 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10396 (const_int 0)))]
10397 "TARGET_32BIT"
10398 "*
10399 {
10400 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10401 {
10402 {\"cmp\\t%0, %1\",
10403 \"cmp\\t%2, %3\"},
10404 {\"cmn\\t%0, #%n1\",
10405 \"cmp\\t%2, %3\"},
10406 {\"cmp\\t%0, %1\",
10407 \"cmn\\t%2, #%n3\"},
10408 {\"cmn\\t%0, #%n1\",
10409 \"cmn\\t%2, #%n3\"}
10410 };
10411 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10412 {
10413 {\"cmp%D4\\t%2, %3\",
10414 \"cmp%D5\\t%0, %1\"},
10415 {\"cmp%D4\\t%2, %3\",
10416 \"cmn%D5\\t%0, #%n1\"},
10417 {\"cmn%D4\\t%2, #%n3\",
10418 \"cmp%D5\\t%0, %1\"},
10419 {\"cmn%D4\\t%2, #%n3\",
10420 \"cmn%D5\\t%0, #%n1\"}
10421 };
10422 static const char *const ite[2] =
10423 {
10424 \"it\\t%D4\",
10425 \"it\\t%D5\"
10426 };
10427 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10428 CMP_CMP, CMN_CMP, CMP_CMP,
10429 CMP_CMP, CMN_CMP, CMP_CMN,
10430 CMN_CMN};
10431 int swap =
10432 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10433
10434 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10435 if (TARGET_THUMB2) {
10436 output_asm_insn (ite[swap], operands);
10437 }
10438 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10439 return \"\";
10440 }
10441 "
10442 [(set_attr "conds" "set")
10443 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10444 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10445 (set_attr_alternative "length"
10446 [(const_int 6)
10447 (const_int 8)
10448 (const_int 8)
10449 (const_int 8)
10450 (const_int 8)
10451 (const_int 6)
10452 (if_then_else (eq_attr "is_thumb" "no")
10453 (const_int 8)
10454 (const_int 10))
10455 (if_then_else (eq_attr "is_thumb" "no")
10456 (const_int 8)
10457 (const_int 10))
10458 (if_then_else (eq_attr "is_thumb" "no")
10459 (const_int 8)
10460 (const_int 10))
10461 (if_then_else (eq_attr "is_thumb" "no")
10462 (const_int 8)
10463 (const_int 10))])
10464 (set_attr "type" "multiple")]
10465 )
10466
10467 (define_insn_and_split "*ior_scc_scc"
10468 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10469 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10470 [(match_operand:SI 1 "s_register_operand" "l,r")
10471 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10472 (match_operator:SI 6 "arm_comparison_operator"
10473 [(match_operand:SI 4 "s_register_operand" "l,r")
10474 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10475 (clobber (reg:CC CC_REGNUM))]
10476 "TARGET_32BIT
10477 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10478 != CCmode)"
10479 "#"
10480 "TARGET_32BIT && reload_completed"
10481 [(set (match_dup 7)
10482 (compare
10483 (ior:SI
10484 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10485 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10486 (const_int 0)))
10487 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10488 "operands[7]
10489 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10490 DOM_CC_X_OR_Y),
10491 CC_REGNUM);"
10492 [(set_attr "conds" "clob")
10493 (set_attr "enabled_for_short_it" "yes,no")
10494 (set_attr "length" "16")
10495 (set_attr "type" "multiple")]
10496 )
10497
10498 ; If the above pattern is followed by a CMP insn, then the compare is
10499 ; redundant, since we can rework the conditional instruction that follows.
10500 (define_insn_and_split "*ior_scc_scc_cmp"
10501 [(set (match_operand 0 "dominant_cc_register" "")
10502 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10503 [(match_operand:SI 1 "s_register_operand" "l,r")
10504 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10505 (match_operator:SI 6 "arm_comparison_operator"
10506 [(match_operand:SI 4 "s_register_operand" "l,r")
10507 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10508 (const_int 0)))
10509 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10510 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10511 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10512 "TARGET_32BIT"
10513 "#"
10514 "TARGET_32BIT && reload_completed"
10515 [(set (match_dup 0)
10516 (compare
10517 (ior:SI
10518 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10519 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10520 (const_int 0)))
10521 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10522 ""
10523 [(set_attr "conds" "set")
10524 (set_attr "enabled_for_short_it" "yes,no")
10525 (set_attr "length" "16")
10526 (set_attr "type" "multiple")]
10527 )
10528
10529 (define_insn_and_split "*and_scc_scc"
10530 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10531 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10532 [(match_operand:SI 1 "s_register_operand" "l,r")
10533 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10534 (match_operator:SI 6 "arm_comparison_operator"
10535 [(match_operand:SI 4 "s_register_operand" "l,r")
10536 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10537 (clobber (reg:CC CC_REGNUM))]
10538 "TARGET_32BIT
10539 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10540 != CCmode)"
10541 "#"
10542 "TARGET_32BIT && reload_completed
10543 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10544 != CCmode)"
10545 [(set (match_dup 7)
10546 (compare
10547 (and:SI
10548 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10549 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10550 (const_int 0)))
10551 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10552 "operands[7]
10553 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10554 DOM_CC_X_AND_Y),
10555 CC_REGNUM);"
10556 [(set_attr "conds" "clob")
10557 (set_attr "enabled_for_short_it" "yes,no")
10558 (set_attr "length" "16")
10559 (set_attr "type" "multiple")]
10560 )
10561
10562 ; If the above pattern is followed by a CMP insn, then the compare is
10563 ; redundant, since we can rework the conditional instruction that follows.
10564 (define_insn_and_split "*and_scc_scc_cmp"
10565 [(set (match_operand 0 "dominant_cc_register" "")
10566 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10567 [(match_operand:SI 1 "s_register_operand" "l,r")
10568 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10569 (match_operator:SI 6 "arm_comparison_operator"
10570 [(match_operand:SI 4 "s_register_operand" "l,r")
10571 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10572 (const_int 0)))
10573 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10574 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10575 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10576 "TARGET_32BIT"
10577 "#"
10578 "TARGET_32BIT && reload_completed"
10579 [(set (match_dup 0)
10580 (compare
10581 (and:SI
10582 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10583 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10584 (const_int 0)))
10585 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10586 ""
10587 [(set_attr "conds" "set")
10588 (set_attr "enabled_for_short_it" "yes,no")
10589 (set_attr "length" "16")
10590 (set_attr "type" "multiple")]
10591 )
10592
10593 ;; If there is no dominance in the comparison, then we can still save an
10594 ;; instruction in the AND case, since we can know that the second compare
10595 ;; need only zero the value if false (if true, then the value is already
10596 ;; correct).
10597 (define_insn_and_split "*and_scc_scc_nodom"
10598 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10599 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10600 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10601 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10602 (match_operator:SI 6 "arm_comparison_operator"
10603 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10604 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10605 (clobber (reg:CC CC_REGNUM))]
10606 "TARGET_32BIT
10607 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10608 == CCmode)"
10609 "#"
10610 "TARGET_32BIT && reload_completed"
10611 [(parallel [(set (match_dup 0)
10612 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10613 (clobber (reg:CC CC_REGNUM))])
10614 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10615 (set (match_dup 0)
10616 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10617 (match_dup 0)
10618 (const_int 0)))]
10619 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10620 operands[4], operands[5]),
10621 CC_REGNUM);
10622 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10623 operands[5]);"
10624 [(set_attr "conds" "clob")
10625 (set_attr "length" "20")
10626 (set_attr "type" "multiple")]
10627 )
10628
10629 (define_split
10630 [(set (reg:CC_NZ CC_REGNUM)
10631 (compare:CC_NZ (ior:SI
10632 (and:SI (match_operand:SI 0 "s_register_operand" "")
10633 (const_int 1))
10634 (match_operator:SI 1 "arm_comparison_operator"
10635 [(match_operand:SI 2 "s_register_operand" "")
10636 (match_operand:SI 3 "arm_add_operand" "")]))
10637 (const_int 0)))
10638 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10639 "TARGET_ARM"
10640 [(set (match_dup 4)
10641 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10642 (match_dup 0)))
10643 (set (reg:CC_NZ CC_REGNUM)
10644 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10645 (const_int 0)))]
10646 "")
10647
10648 (define_split
10649 [(set (reg:CC_NZ CC_REGNUM)
10650 (compare:CC_NZ (ior:SI
10651 (match_operator:SI 1 "arm_comparison_operator"
10652 [(match_operand:SI 2 "s_register_operand" "")
10653 (match_operand:SI 3 "arm_add_operand" "")])
10654 (and:SI (match_operand:SI 0 "s_register_operand" "")
10655 (const_int 1)))
10656 (const_int 0)))
10657 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10658 "TARGET_ARM"
10659 [(set (match_dup 4)
10660 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10661 (match_dup 0)))
10662 (set (reg:CC_NZ CC_REGNUM)
10663 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10664 (const_int 0)))]
10665 "")
10666 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10667
10668 (define_insn_and_split "*negscc"
10669 [(set (match_operand:SI 0 "s_register_operand" "=r")
10670 (neg:SI (match_operator 3 "arm_comparison_operator"
10671 [(match_operand:SI 1 "s_register_operand" "r")
10672 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10673 (clobber (reg:CC CC_REGNUM))]
10674 "TARGET_ARM"
10675 "#"
10676 "&& reload_completed"
10677 [(const_int 0)]
10678 {
10679 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10680
10681 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10682 {
10683 /* Emit mov\\t%0, %1, asr #31 */
10684 emit_insn (gen_rtx_SET (operands[0],
10685 gen_rtx_ASHIFTRT (SImode,
10686 operands[1],
10687 GEN_INT (31))));
10688 DONE;
10689 }
10690 else if (GET_CODE (operands[3]) == NE)
10691 {
10692 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10693 if (CONST_INT_P (operands[2]))
10694 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10695 gen_int_mode (-INTVAL (operands[2]),
10696 SImode)));
10697 else
10698 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10699
10700 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10701 gen_rtx_NE (SImode,
10702 cc_reg,
10703 const0_rtx),
10704 gen_rtx_SET (operands[0],
10705 GEN_INT (~0))));
10706 DONE;
10707 }
10708 else
10709 {
10710 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10711 emit_insn (gen_rtx_SET (cc_reg,
10712 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10713 enum rtx_code rc = GET_CODE (operands[3]);
10714
10715 rc = reverse_condition (rc);
10716 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10717 gen_rtx_fmt_ee (rc,
10718 VOIDmode,
10719 cc_reg,
10720 const0_rtx),
10721 gen_rtx_SET (operands[0], const0_rtx)));
10722 rc = GET_CODE (operands[3]);
10723 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10724 gen_rtx_fmt_ee (rc,
10725 VOIDmode,
10726 cc_reg,
10727 const0_rtx),
10728 gen_rtx_SET (operands[0],
10729 GEN_INT (~0))));
10730 DONE;
10731 }
10732 FAIL;
10733 }
10734 [(set_attr "conds" "clob")
10735 (set_attr "length" "12")
10736 (set_attr "type" "multiple")]
10737 )
10738
10739 (define_insn_and_split "movcond_addsi"
10740 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
10741 (if_then_else:SI
10742 (match_operator 5 "comparison_operator"
10743 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
10744 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
10745 (const_int 0)])
10746 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
10747 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
10748 (clobber (reg:CC CC_REGNUM))]
10749 "TARGET_32BIT"
10750 "#"
10751 "&& reload_completed"
10752 [(set (reg:CC_NZ CC_REGNUM)
10753 (compare:CC_NZ
10754 (plus:SI (match_dup 3)
10755 (match_dup 4))
10756 (const_int 0)))
10757 (set (match_dup 0) (match_dup 1))
10758 (cond_exec (match_dup 6)
10759 (set (match_dup 0) (match_dup 2)))]
10760 "
10761 {
10762 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
10763 operands[3], operands[4]);
10764 enum rtx_code rc = GET_CODE (operands[5]);
10765 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10766 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
10767 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
10768 rc = reverse_condition (rc);
10769 else
10770 std::swap (operands[1], operands[2]);
10771
10772 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10773 }
10774 "
10775 [(set_attr "conds" "clob")
10776 (set_attr "enabled_for_short_it" "no,yes,yes")
10777 (set_attr "type" "multiple")]
10778 )
10779
10780 (define_insn "movcond"
10781 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10782 (if_then_else:SI
10783 (match_operator 5 "arm_comparison_operator"
10784 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10785 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10786 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10787 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10788 (clobber (reg:CC CC_REGNUM))]
10789 "TARGET_ARM"
10790 "*
10791 if (GET_CODE (operands[5]) == LT
10792 && (operands[4] == const0_rtx))
10793 {
10794 if (which_alternative != 1 && REG_P (operands[1]))
10795 {
10796 if (operands[2] == const0_rtx)
10797 return \"and\\t%0, %1, %3, asr #31\";
10798 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10799 }
10800 else if (which_alternative != 0 && REG_P (operands[2]))
10801 {
10802 if (operands[1] == const0_rtx)
10803 return \"bic\\t%0, %2, %3, asr #31\";
10804 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10805 }
10806 /* The only case that falls through to here is when both ops 1 & 2
10807 are constants. */
10808 }
10809
10810 if (GET_CODE (operands[5]) == GE
10811 && (operands[4] == const0_rtx))
10812 {
10813 if (which_alternative != 1 && REG_P (operands[1]))
10814 {
10815 if (operands[2] == const0_rtx)
10816 return \"bic\\t%0, %1, %3, asr #31\";
10817 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10818 }
10819 else if (which_alternative != 0 && REG_P (operands[2]))
10820 {
10821 if (operands[1] == const0_rtx)
10822 return \"and\\t%0, %2, %3, asr #31\";
10823 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10824 }
10825 /* The only case that falls through to here is when both ops 1 & 2
10826 are constants. */
10827 }
10828 if (CONST_INT_P (operands[4])
10829 && !const_ok_for_arm (INTVAL (operands[4])))
10830 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10831 else
10832 output_asm_insn (\"cmp\\t%3, %4\", operands);
10833 if (which_alternative != 0)
10834 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10835 if (which_alternative != 1)
10836 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10837 return \"\";
10838 "
10839 [(set_attr "conds" "clob")
10840 (set_attr "length" "8,8,12")
10841 (set_attr "type" "multiple")]
10842 )
10843
10844 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10845
10846 (define_insn "*ifcompare_plus_move"
10847 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10848 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10849 [(match_operand:SI 4 "s_register_operand" "r,r")
10850 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10851 (plus:SI
10852 (match_operand:SI 2 "s_register_operand" "r,r")
10853 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10854 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10855 (clobber (reg:CC CC_REGNUM))]
10856 "TARGET_ARM"
10857 "#"
10858 [(set_attr "conds" "clob")
10859 (set_attr "length" "8,12")
10860 (set_attr "type" "multiple")]
10861 )
10862
10863 (define_insn "*if_plus_move"
10864 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10865 (if_then_else:SI
10866 (match_operator 4 "arm_comparison_operator"
10867 [(match_operand 5 "cc_register" "") (const_int 0)])
10868 (plus:SI
10869 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10870 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10871 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10872 "TARGET_ARM"
10873 "@
10874 add%d4\\t%0, %2, %3
10875 sub%d4\\t%0, %2, #%n3
10876 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10877 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10878 [(set_attr "conds" "use")
10879 (set_attr "length" "4,4,8,8")
10880 (set_attr_alternative "type"
10881 [(if_then_else (match_operand 3 "const_int_operand" "")
10882 (const_string "alu_imm" )
10883 (const_string "alu_sreg"))
10884 (const_string "alu_imm")
10885 (const_string "multiple")
10886 (const_string "multiple")])]
10887 )
10888
10889 (define_insn "*ifcompare_move_plus"
10890 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10891 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10892 [(match_operand:SI 4 "s_register_operand" "r,r")
10893 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10894 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10895 (plus:SI
10896 (match_operand:SI 2 "s_register_operand" "r,r")
10897 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10898 (clobber (reg:CC CC_REGNUM))]
10899 "TARGET_ARM"
10900 "#"
10901 [(set_attr "conds" "clob")
10902 (set_attr "length" "8,12")
10903 (set_attr "type" "multiple")]
10904 )
10905
10906 (define_insn "*if_move_plus"
10907 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10908 (if_then_else:SI
10909 (match_operator 4 "arm_comparison_operator"
10910 [(match_operand 5 "cc_register" "") (const_int 0)])
10911 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10912 (plus:SI
10913 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10914 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10915 "TARGET_ARM"
10916 "@
10917 add%D4\\t%0, %2, %3
10918 sub%D4\\t%0, %2, #%n3
10919 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10920 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10921 [(set_attr "conds" "use")
10922 (set_attr "length" "4,4,8,8")
10923 (set_attr_alternative "type"
10924 [(if_then_else (match_operand 3 "const_int_operand" "")
10925 (const_string "alu_imm" )
10926 (const_string "alu_sreg"))
10927 (const_string "alu_imm")
10928 (const_string "multiple")
10929 (const_string "multiple")])]
10930 )
10931
10932 (define_insn "*ifcompare_arith_arith"
10933 [(set (match_operand:SI 0 "s_register_operand" "=r")
10934 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10935 [(match_operand:SI 5 "s_register_operand" "r")
10936 (match_operand:SI 6 "arm_add_operand" "rIL")])
10937 (match_operator:SI 8 "shiftable_operator"
10938 [(match_operand:SI 1 "s_register_operand" "r")
10939 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10940 (match_operator:SI 7 "shiftable_operator"
10941 [(match_operand:SI 3 "s_register_operand" "r")
10942 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10943 (clobber (reg:CC CC_REGNUM))]
10944 "TARGET_ARM"
10945 "#"
10946 [(set_attr "conds" "clob")
10947 (set_attr "length" "12")
10948 (set_attr "type" "multiple")]
10949 )
10950
10951 (define_insn "*if_arith_arith"
10952 [(set (match_operand:SI 0 "s_register_operand" "=r")
10953 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10954 [(match_operand 8 "cc_register" "") (const_int 0)])
10955 (match_operator:SI 6 "shiftable_operator"
10956 [(match_operand:SI 1 "s_register_operand" "r")
10957 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10958 (match_operator:SI 7 "shiftable_operator"
10959 [(match_operand:SI 3 "s_register_operand" "r")
10960 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10961 "TARGET_ARM"
10962 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10963 [(set_attr "conds" "use")
10964 (set_attr "length" "8")
10965 (set_attr "type" "multiple")]
10966 )
10967
10968 (define_insn "*ifcompare_arith_move"
10969 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10970 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10971 [(match_operand:SI 2 "s_register_operand" "r,r")
10972 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10973 (match_operator:SI 7 "shiftable_operator"
10974 [(match_operand:SI 4 "s_register_operand" "r,r")
10975 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10976 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10977 (clobber (reg:CC CC_REGNUM))]
10978 "TARGET_ARM"
10979 "*
10980 /* If we have an operation where (op x 0) is the identity operation and
10981 the conditional operator is LT or GE and we are comparing against zero and
10982 everything is in registers then we can do this in two instructions. */
10983 if (operands[3] == const0_rtx
10984 && GET_CODE (operands[7]) != AND
10985 && REG_P (operands[5])
10986 && REG_P (operands[1])
10987 && REGNO (operands[1]) == REGNO (operands[4])
10988 && REGNO (operands[4]) != REGNO (operands[0]))
10989 {
10990 if (GET_CODE (operands[6]) == LT)
10991 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10992 else if (GET_CODE (operands[6]) == GE)
10993 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10994 }
10995 if (CONST_INT_P (operands[3])
10996 && !const_ok_for_arm (INTVAL (operands[3])))
10997 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10998 else
10999 output_asm_insn (\"cmp\\t%2, %3\", operands);
11000 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
11001 if (which_alternative != 0)
11002 return \"mov%D6\\t%0, %1\";
11003 return \"\";
11004 "
11005 [(set_attr "conds" "clob")
11006 (set_attr "length" "8,12")
11007 (set_attr "type" "multiple")]
11008 )
11009
11010 (define_insn "*if_arith_move"
11011 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11012 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11013 [(match_operand 6 "cc_register" "") (const_int 0)])
11014 (match_operator:SI 5 "shiftable_operator"
11015 [(match_operand:SI 2 "s_register_operand" "r,r")
11016 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
11017 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
11018 "TARGET_ARM"
11019 "@
11020 %I5%d4\\t%0, %2, %3
11021 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
11022 [(set_attr "conds" "use")
11023 (set_attr "length" "4,8")
11024 (set_attr_alternative "type"
11025 [(if_then_else (match_operand 3 "const_int_operand" "")
11026 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
11027 (const_string "alu_shift_imm_lsl_1to4")
11028 (const_string "alu_shift_imm_other"))
11029 (const_string "alu_shift_reg"))
11030 (const_string "multiple")])]
11031 )
11032
11033 (define_insn "*ifcompare_move_arith"
11034 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11035 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11036 [(match_operand:SI 4 "s_register_operand" "r,r")
11037 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11038 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11039 (match_operator:SI 7 "shiftable_operator"
11040 [(match_operand:SI 2 "s_register_operand" "r,r")
11041 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
11042 (clobber (reg:CC CC_REGNUM))]
11043 "TARGET_ARM"
11044 "*
11045 /* If we have an operation where (op x 0) is the identity operation and
11046 the conditional operator is LT or GE and we are comparing against zero and
11047 everything is in registers then we can do this in two instructions */
11048 if (operands[5] == const0_rtx
11049 && GET_CODE (operands[7]) != AND
11050 && REG_P (operands[3])
11051 && REG_P (operands[1])
11052 && REGNO (operands[1]) == REGNO (operands[2])
11053 && REGNO (operands[2]) != REGNO (operands[0]))
11054 {
11055 if (GET_CODE (operands[6]) == GE)
11056 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11057 else if (GET_CODE (operands[6]) == LT)
11058 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11059 }
11060
11061 if (CONST_INT_P (operands[5])
11062 && !const_ok_for_arm (INTVAL (operands[5])))
11063 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
11064 else
11065 output_asm_insn (\"cmp\\t%4, %5\", operands);
11066
11067 if (which_alternative != 0)
11068 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
11069 return \"%I7%D6\\t%0, %2, %3\";
11070 "
11071 [(set_attr "conds" "clob")
11072 (set_attr "length" "8,12")
11073 (set_attr "type" "multiple")]
11074 )
11075
11076 (define_insn "*if_move_arith"
11077 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11078 (if_then_else:SI
11079 (match_operator 4 "arm_comparison_operator"
11080 [(match_operand 6 "cc_register" "") (const_int 0)])
11081 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11082 (match_operator:SI 5 "shiftable_operator"
11083 [(match_operand:SI 2 "s_register_operand" "r,r")
11084 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
11085 "TARGET_ARM"
11086 "@
11087 %I5%D4\\t%0, %2, %3
11088 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
11089 [(set_attr "conds" "use")
11090 (set_attr "length" "4,8")
11091 (set_attr_alternative "type"
11092 [(if_then_else (match_operand 3 "const_int_operand" "")
11093 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
11094 (const_string "alu_shift_imm_lsl_1to4")
11095 (const_string "alu_shift_imm_other"))
11096 (const_string "alu_shift_reg"))
11097 (const_string "multiple")])]
11098 )
11099
11100 (define_insn "*ifcompare_move_not"
11101 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11102 (if_then_else:SI
11103 (match_operator 5 "arm_comparison_operator"
11104 [(match_operand:SI 3 "s_register_operand" "r,r")
11105 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11106 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11107 (not:SI
11108 (match_operand:SI 2 "s_register_operand" "r,r"))))
11109 (clobber (reg:CC CC_REGNUM))]
11110 "TARGET_ARM"
11111 "#"
11112 [(set_attr "conds" "clob")
11113 (set_attr "length" "8,12")
11114 (set_attr "type" "multiple")]
11115 )
11116
11117 (define_insn "*if_move_not"
11118 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11119 (if_then_else:SI
11120 (match_operator 4 "arm_comparison_operator"
11121 [(match_operand 3 "cc_register" "") (const_int 0)])
11122 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11123 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11124 "TARGET_ARM"
11125 "@
11126 mvn%D4\\t%0, %2
11127 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
11128 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
11129 [(set_attr "conds" "use")
11130 (set_attr "type" "mvn_reg")
11131 (set_attr "length" "4,8,8")
11132 (set_attr "type" "mvn_reg,multiple,multiple")]
11133 )
11134
11135 (define_insn "*ifcompare_not_move"
11136 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11137 (if_then_else:SI
11138 (match_operator 5 "arm_comparison_operator"
11139 [(match_operand:SI 3 "s_register_operand" "r,r")
11140 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11141 (not:SI
11142 (match_operand:SI 2 "s_register_operand" "r,r"))
11143 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11144 (clobber (reg:CC CC_REGNUM))]
11145 "TARGET_ARM"
11146 "#"
11147 [(set_attr "conds" "clob")
11148 (set_attr "length" "8,12")
11149 (set_attr "type" "multiple")]
11150 )
11151
11152 (define_insn "*if_not_move"
11153 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11154 (if_then_else:SI
11155 (match_operator 4 "arm_comparison_operator"
11156 [(match_operand 3 "cc_register" "") (const_int 0)])
11157 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11158 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11159 "TARGET_ARM"
11160 "@
11161 mvn%d4\\t%0, %2
11162 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11163 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11164 [(set_attr "conds" "use")
11165 (set_attr "type" "mvn_reg,multiple,multiple")
11166 (set_attr "length" "4,8,8")]
11167 )
11168
11169 (define_insn "*ifcompare_shift_move"
11170 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11171 (if_then_else:SI
11172 (match_operator 6 "arm_comparison_operator"
11173 [(match_operand:SI 4 "s_register_operand" "r,r")
11174 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11175 (match_operator:SI 7 "shift_operator"
11176 [(match_operand:SI 2 "s_register_operand" "r,r")
11177 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11178 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11179 (clobber (reg:CC CC_REGNUM))]
11180 "TARGET_ARM"
11181 "#"
11182 [(set_attr "conds" "clob")
11183 (set_attr "length" "8,12")
11184 (set_attr "type" "multiple")]
11185 )
11186
11187 (define_insn "*if_shift_move"
11188 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11189 (if_then_else:SI
11190 (match_operator 5 "arm_comparison_operator"
11191 [(match_operand 6 "cc_register" "") (const_int 0)])
11192 (match_operator:SI 4 "shift_operator"
11193 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11194 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11195 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11196 "TARGET_ARM"
11197 "@
11198 mov%d5\\t%0, %2%S4
11199 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11200 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11201 [(set_attr "conds" "use")
11202 (set_attr "shift" "2")
11203 (set_attr "length" "4,8,8")
11204 (set_attr_alternative "type"
11205 [(if_then_else (match_operand 3 "const_int_operand" "")
11206 (const_string "mov_shift" )
11207 (const_string "mov_shift_reg"))
11208 (const_string "multiple")
11209 (const_string "multiple")])]
11210 )
11211
11212 (define_insn "*ifcompare_move_shift"
11213 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11214 (if_then_else:SI
11215 (match_operator 6 "arm_comparison_operator"
11216 [(match_operand:SI 4 "s_register_operand" "r,r")
11217 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11218 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11219 (match_operator:SI 7 "shift_operator"
11220 [(match_operand:SI 2 "s_register_operand" "r,r")
11221 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11222 (clobber (reg:CC CC_REGNUM))]
11223 "TARGET_ARM"
11224 "#"
11225 [(set_attr "conds" "clob")
11226 (set_attr "length" "8,12")
11227 (set_attr "type" "multiple")]
11228 )
11229
11230 (define_insn "*if_move_shift"
11231 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11232 (if_then_else:SI
11233 (match_operator 5 "arm_comparison_operator"
11234 [(match_operand 6 "cc_register" "") (const_int 0)])
11235 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11236 (match_operator:SI 4 "shift_operator"
11237 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11238 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11239 "TARGET_ARM"
11240 "@
11241 mov%D5\\t%0, %2%S4
11242 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11243 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11244 [(set_attr "conds" "use")
11245 (set_attr "shift" "2")
11246 (set_attr "length" "4,8,8")
11247 (set_attr_alternative "type"
11248 [(if_then_else (match_operand 3 "const_int_operand" "")
11249 (const_string "mov_shift" )
11250 (const_string "mov_shift_reg"))
11251 (const_string "multiple")
11252 (const_string "multiple")])]
11253 )
11254
11255 (define_insn "*ifcompare_shift_shift"
11256 [(set (match_operand:SI 0 "s_register_operand" "=r")
11257 (if_then_else:SI
11258 (match_operator 7 "arm_comparison_operator"
11259 [(match_operand:SI 5 "s_register_operand" "r")
11260 (match_operand:SI 6 "arm_add_operand" "rIL")])
11261 (match_operator:SI 8 "shift_operator"
11262 [(match_operand:SI 1 "s_register_operand" "r")
11263 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11264 (match_operator:SI 9 "shift_operator"
11265 [(match_operand:SI 3 "s_register_operand" "r")
11266 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11267 (clobber (reg:CC CC_REGNUM))]
11268 "TARGET_ARM"
11269 "#"
11270 [(set_attr "conds" "clob")
11271 (set_attr "length" "12")
11272 (set_attr "type" "multiple")]
11273 )
11274
11275 (define_insn "*if_shift_shift"
11276 [(set (match_operand:SI 0 "s_register_operand" "=r")
11277 (if_then_else:SI
11278 (match_operator 5 "arm_comparison_operator"
11279 [(match_operand 8 "cc_register" "") (const_int 0)])
11280 (match_operator:SI 6 "shift_operator"
11281 [(match_operand:SI 1 "s_register_operand" "r")
11282 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11283 (match_operator:SI 7 "shift_operator"
11284 [(match_operand:SI 3 "s_register_operand" "r")
11285 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11286 "TARGET_ARM"
11287 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11288 [(set_attr "conds" "use")
11289 (set_attr "shift" "1")
11290 (set_attr "length" "8")
11291 (set (attr "type") (if_then_else
11292 (and (match_operand 2 "const_int_operand" "")
11293 (match_operand 4 "const_int_operand" ""))
11294 (const_string "mov_shift")
11295 (const_string "mov_shift_reg")))]
11296 )
11297
11298 (define_insn "*ifcompare_not_arith"
11299 [(set (match_operand:SI 0 "s_register_operand" "=r")
11300 (if_then_else:SI
11301 (match_operator 6 "arm_comparison_operator"
11302 [(match_operand:SI 4 "s_register_operand" "r")
11303 (match_operand:SI 5 "arm_add_operand" "rIL")])
11304 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11305 (match_operator:SI 7 "shiftable_operator"
11306 [(match_operand:SI 2 "s_register_operand" "r")
11307 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11308 (clobber (reg:CC CC_REGNUM))]
11309 "TARGET_ARM"
11310 "#"
11311 [(set_attr "conds" "clob")
11312 (set_attr "length" "12")
11313 (set_attr "type" "multiple")]
11314 )
11315
11316 (define_insn "*if_not_arith"
11317 [(set (match_operand:SI 0 "s_register_operand" "=r")
11318 (if_then_else:SI
11319 (match_operator 5 "arm_comparison_operator"
11320 [(match_operand 4 "cc_register" "") (const_int 0)])
11321 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11322 (match_operator:SI 6 "shiftable_operator"
11323 [(match_operand:SI 2 "s_register_operand" "r")
11324 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11325 "TARGET_ARM"
11326 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11327 [(set_attr "conds" "use")
11328 (set_attr "type" "mvn_reg")
11329 (set_attr "length" "8")]
11330 )
11331
11332 (define_insn "*ifcompare_arith_not"
11333 [(set (match_operand:SI 0 "s_register_operand" "=r")
11334 (if_then_else:SI
11335 (match_operator 6 "arm_comparison_operator"
11336 [(match_operand:SI 4 "s_register_operand" "r")
11337 (match_operand:SI 5 "arm_add_operand" "rIL")])
11338 (match_operator:SI 7 "shiftable_operator"
11339 [(match_operand:SI 2 "s_register_operand" "r")
11340 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11341 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11342 (clobber (reg:CC CC_REGNUM))]
11343 "TARGET_ARM"
11344 "#"
11345 [(set_attr "conds" "clob")
11346 (set_attr "length" "12")
11347 (set_attr "type" "multiple")]
11348 )
11349
11350 (define_insn "*if_arith_not"
11351 [(set (match_operand:SI 0 "s_register_operand" "=r")
11352 (if_then_else:SI
11353 (match_operator 5 "arm_comparison_operator"
11354 [(match_operand 4 "cc_register" "") (const_int 0)])
11355 (match_operator:SI 6 "shiftable_operator"
11356 [(match_operand:SI 2 "s_register_operand" "r")
11357 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11358 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11359 "TARGET_ARM"
11360 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11361 [(set_attr "conds" "use")
11362 (set_attr "type" "multiple")
11363 (set_attr "length" "8")]
11364 )
11365
11366 (define_insn "*ifcompare_neg_move"
11367 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11368 (if_then_else:SI
11369 (match_operator 5 "arm_comparison_operator"
11370 [(match_operand:SI 3 "s_register_operand" "r,r")
11371 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11372 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11373 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11374 (clobber (reg:CC CC_REGNUM))]
11375 "TARGET_ARM"
11376 "#"
11377 [(set_attr "conds" "clob")
11378 (set_attr "length" "8,12")
11379 (set_attr "type" "multiple")]
11380 )
11381
11382 (define_insn_and_split "*if_neg_move"
11383 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11384 (if_then_else:SI
11385 (match_operator 4 "arm_comparison_operator"
11386 [(match_operand 3 "cc_register" "") (const_int 0)])
11387 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
11388 (match_operand:SI 1 "s_register_operand" "0,0")))]
11389 "TARGET_32BIT && !TARGET_COND_ARITH"
11390 "#"
11391 "&& reload_completed"
11392 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
11393 (set (match_dup 0) (neg:SI (match_dup 2))))]
11394 ""
11395 [(set_attr "conds" "use")
11396 (set_attr "length" "4")
11397 (set_attr "arch" "t2,32")
11398 (set_attr "enabled_for_short_it" "yes,no")
11399 (set_attr "type" "logic_shift_imm")]
11400 )
11401
11402 (define_insn "*ifcompare_move_neg"
11403 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11404 (if_then_else:SI
11405 (match_operator 5 "arm_comparison_operator"
11406 [(match_operand:SI 3 "s_register_operand" "r,r")
11407 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11408 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11409 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11410 (clobber (reg:CC CC_REGNUM))]
11411 "TARGET_ARM"
11412 "#"
11413 [(set_attr "conds" "clob")
11414 (set_attr "length" "8,12")
11415 (set_attr "type" "multiple")]
11416 )
11417
11418 (define_insn_and_split "*if_move_neg"
11419 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11420 (if_then_else:SI
11421 (match_operator 4 "arm_comparison_operator"
11422 [(match_operand 3 "cc_register" "") (const_int 0)])
11423 (match_operand:SI 1 "s_register_operand" "0,0")
11424 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
11425 "TARGET_32BIT"
11426 "#"
11427 "&& reload_completed"
11428 [(cond_exec (match_dup 5)
11429 (set (match_dup 0) (neg:SI (match_dup 2))))]
11430 {
11431 machine_mode mode = GET_MODE (operands[3]);
11432 rtx_code rc = GET_CODE (operands[4]);
11433
11434 if (mode == CCFPmode || mode == CCFPEmode)
11435 rc = reverse_condition_maybe_unordered (rc);
11436 else
11437 rc = reverse_condition (rc);
11438
11439 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
11440 }
11441 [(set_attr "conds" "use")
11442 (set_attr "length" "4")
11443 (set_attr "arch" "t2,32")
11444 (set_attr "enabled_for_short_it" "yes,no")
11445 (set_attr "type" "logic_shift_imm")]
11446 )
11447
11448 (define_insn "*arith_adjacentmem"
11449 [(set (match_operand:SI 0 "s_register_operand" "=r")
11450 (match_operator:SI 1 "shiftable_operator"
11451 [(match_operand:SI 2 "memory_operand" "m")
11452 (match_operand:SI 3 "memory_operand" "m")]))
11453 (clobber (match_scratch:SI 4 "=r"))]
11454 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11455 "*
11456 {
11457 rtx ldm[3];
11458 rtx arith[4];
11459 rtx base_reg;
11460 HOST_WIDE_INT val1 = 0, val2 = 0;
11461
11462 if (REGNO (operands[0]) > REGNO (operands[4]))
11463 {
11464 ldm[1] = operands[4];
11465 ldm[2] = operands[0];
11466 }
11467 else
11468 {
11469 ldm[1] = operands[0];
11470 ldm[2] = operands[4];
11471 }
11472
11473 base_reg = XEXP (operands[2], 0);
11474
11475 if (!REG_P (base_reg))
11476 {
11477 val1 = INTVAL (XEXP (base_reg, 1));
11478 base_reg = XEXP (base_reg, 0);
11479 }
11480
11481 if (!REG_P (XEXP (operands[3], 0)))
11482 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11483
11484 arith[0] = operands[0];
11485 arith[3] = operands[1];
11486
11487 if (val1 < val2)
11488 {
11489 arith[1] = ldm[1];
11490 arith[2] = ldm[2];
11491 }
11492 else
11493 {
11494 arith[1] = ldm[2];
11495 arith[2] = ldm[1];
11496 }
11497
11498 ldm[0] = base_reg;
11499 if (val1 !=0 && val2 != 0)
11500 {
11501 rtx ops[3];
11502
11503 if (val1 == 4 || val2 == 4)
11504 /* Other val must be 8, since we know they are adjacent and neither
11505 is zero. */
11506 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
11507 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11508 {
11509 ldm[0] = ops[0] = operands[4];
11510 ops[1] = base_reg;
11511 ops[2] = GEN_INT (val1);
11512 output_add_immediate (ops);
11513 if (val1 < val2)
11514 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11515 else
11516 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11517 }
11518 else
11519 {
11520 /* Offset is out of range for a single add, so use two ldr. */
11521 ops[0] = ldm[1];
11522 ops[1] = base_reg;
11523 ops[2] = GEN_INT (val1);
11524 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11525 ops[0] = ldm[2];
11526 ops[2] = GEN_INT (val2);
11527 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11528 }
11529 }
11530 else if (val1 != 0)
11531 {
11532 if (val1 < val2)
11533 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11534 else
11535 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11536 }
11537 else
11538 {
11539 if (val1 < val2)
11540 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11541 else
11542 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11543 }
11544 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11545 return \"\";
11546 }"
11547 [(set_attr "length" "12")
11548 (set_attr "predicable" "yes")
11549 (set_attr "type" "load_4")]
11550 )
11551
11552 ; This pattern is never tried by combine, so do it as a peephole
11553
11554 (define_peephole2
11555 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11556 (match_operand:SI 1 "arm_general_register_operand" ""))
11557 (set (reg:CC CC_REGNUM)
11558 (compare:CC (match_dup 1) (const_int 0)))]
11559 "TARGET_ARM"
11560 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11561 (set (match_dup 0) (match_dup 1))])]
11562 ""
11563 )
11564
11565 (define_split
11566 [(set (match_operand:SI 0 "s_register_operand" "")
11567 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11568 (const_int 0))
11569 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11570 [(match_operand:SI 3 "s_register_operand" "")
11571 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11572 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11573 "TARGET_ARM"
11574 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11575 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11576 (match_dup 5)))]
11577 ""
11578 )
11579
11580 ;; This split can be used because CC_Z mode implies that the following
11581 ;; branch will be an equality, or an unsigned inequality, so the sign
11582 ;; extension is not needed.
11583
11584 (define_split
11585 [(set (reg:CC_Z CC_REGNUM)
11586 (compare:CC_Z
11587 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11588 (const_int 24))
11589 (match_operand 1 "const_int_operand" "")))
11590 (clobber (match_scratch:SI 2 ""))]
11591 "TARGET_ARM
11592 && ((UINTVAL (operands[1]))
11593 == ((UINTVAL (operands[1])) >> 24) << 24)"
11594 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11595 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11596 "
11597 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11598 "
11599 )
11600 ;; ??? Check the patterns above for Thumb-2 usefulness
11601
11602 (define_expand "prologue"
11603 [(clobber (const_int 0))]
11604 "TARGET_EITHER"
11605 "if (TARGET_32BIT)
11606 arm_expand_prologue ();
11607 else
11608 thumb1_expand_prologue ();
11609 DONE;
11610 "
11611 )
11612
11613 (define_expand "epilogue"
11614 [(clobber (const_int 0))]
11615 "TARGET_EITHER"
11616 "
11617 if (crtl->calls_eh_return)
11618 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11619 if (TARGET_THUMB1)
11620 {
11621 thumb1_expand_epilogue ();
11622 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11623 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11624 }
11625 else if (HAVE_return)
11626 {
11627 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11628 no need for explicit testing again. */
11629 emit_jump_insn (gen_return ());
11630 }
11631 else if (TARGET_32BIT)
11632 {
11633 arm_expand_epilogue (true);
11634 }
11635 DONE;
11636 "
11637 )
11638
11639 ;; Note - although unspec_volatile's USE all hard registers,
11640 ;; USEs are ignored after relaod has completed. Thus we need
11641 ;; to add an unspec of the link register to ensure that flow
11642 ;; does not think that it is unused by the sibcall branch that
11643 ;; will replace the standard function epilogue.
11644 (define_expand "sibcall_epilogue"
11645 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11646 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11647 "TARGET_32BIT"
11648 "
11649 arm_expand_epilogue (false);
11650 DONE;
11651 "
11652 )
11653
11654 (define_expand "eh_epilogue"
11655 [(use (match_operand:SI 0 "register_operand"))
11656 (use (match_operand:SI 1 "register_operand"))
11657 (use (match_operand:SI 2 "register_operand"))]
11658 "TARGET_EITHER"
11659 "
11660 {
11661 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11662 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11663 {
11664 rtx ra = gen_rtx_REG (Pmode, 2);
11665
11666 emit_move_insn (ra, operands[2]);
11667 operands[2] = ra;
11668 }
11669 /* This is a hack -- we may have crystalized the function type too
11670 early. */
11671 cfun->machine->func_type = 0;
11672 }"
11673 )
11674
11675 ;; This split is only used during output to reduce the number of patterns
11676 ;; that need assembler instructions adding to them. We allowed the setting
11677 ;; of the conditions to be implicit during rtl generation so that
11678 ;; the conditional compare patterns would work. However this conflicts to
11679 ;; some extent with the conditional data operations, so we have to split them
11680 ;; up again here.
11681
11682 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11683 ;; conditional execution sufficient?
11684
11685 (define_split
11686 [(set (match_operand:SI 0 "s_register_operand" "")
11687 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11688 [(match_operand 2 "" "") (match_operand 3 "" "")])
11689 (match_dup 0)
11690 (match_operand 4 "" "")))
11691 (clobber (reg:CC CC_REGNUM))]
11692 "TARGET_ARM && reload_completed"
11693 [(set (match_dup 5) (match_dup 6))
11694 (cond_exec (match_dup 7)
11695 (set (match_dup 0) (match_dup 4)))]
11696 "
11697 {
11698 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11699 operands[2], operands[3]);
11700 enum rtx_code rc = GET_CODE (operands[1]);
11701
11702 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11703 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11704 if (mode == CCFPmode || mode == CCFPEmode)
11705 rc = reverse_condition_maybe_unordered (rc);
11706 else
11707 rc = reverse_condition (rc);
11708
11709 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11710 }"
11711 )
11712
11713 (define_split
11714 [(set (match_operand:SI 0 "s_register_operand" "")
11715 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11716 [(match_operand 2 "" "") (match_operand 3 "" "")])
11717 (match_operand 4 "" "")
11718 (match_dup 0)))
11719 (clobber (reg:CC CC_REGNUM))]
11720 "TARGET_ARM && reload_completed"
11721 [(set (match_dup 5) (match_dup 6))
11722 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11723 (set (match_dup 0) (match_dup 4)))]
11724 "
11725 {
11726 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11727 operands[2], operands[3]);
11728
11729 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11730 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11731 }"
11732 )
11733
11734 (define_split
11735 [(set (match_operand:SI 0 "s_register_operand" "")
11736 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11737 [(match_operand 2 "" "") (match_operand 3 "" "")])
11738 (match_operand 4 "" "")
11739 (match_operand 5 "" "")))
11740 (clobber (reg:CC CC_REGNUM))]
11741 "TARGET_ARM && reload_completed"
11742 [(set (match_dup 6) (match_dup 7))
11743 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11744 (set (match_dup 0) (match_dup 4)))
11745 (cond_exec (match_dup 8)
11746 (set (match_dup 0) (match_dup 5)))]
11747 "
11748 {
11749 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11750 operands[2], operands[3]);
11751 enum rtx_code rc = GET_CODE (operands[1]);
11752
11753 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11754 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11755 if (mode == CCFPmode || mode == CCFPEmode)
11756 rc = reverse_condition_maybe_unordered (rc);
11757 else
11758 rc = reverse_condition (rc);
11759
11760 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11761 }"
11762 )
11763
11764 (define_split
11765 [(set (match_operand:SI 0 "s_register_operand" "")
11766 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11767 [(match_operand:SI 2 "s_register_operand" "")
11768 (match_operand:SI 3 "arm_add_operand" "")])
11769 (match_operand:SI 4 "arm_rhs_operand" "")
11770 (not:SI
11771 (match_operand:SI 5 "s_register_operand" ""))))
11772 (clobber (reg:CC CC_REGNUM))]
11773 "TARGET_ARM && reload_completed"
11774 [(set (match_dup 6) (match_dup 7))
11775 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11776 (set (match_dup 0) (match_dup 4)))
11777 (cond_exec (match_dup 8)
11778 (set (match_dup 0) (not:SI (match_dup 5))))]
11779 "
11780 {
11781 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11782 operands[2], operands[3]);
11783 enum rtx_code rc = GET_CODE (operands[1]);
11784
11785 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11786 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11787 if (mode == CCFPmode || mode == CCFPEmode)
11788 rc = reverse_condition_maybe_unordered (rc);
11789 else
11790 rc = reverse_condition (rc);
11791
11792 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11793 }"
11794 )
11795
11796 (define_insn "*cond_move_not"
11797 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11798 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11799 [(match_operand 3 "cc_register" "") (const_int 0)])
11800 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11801 (not:SI
11802 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11803 "TARGET_ARM"
11804 "@
11805 mvn%D4\\t%0, %2
11806 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11807 [(set_attr "conds" "use")
11808 (set_attr "type" "mvn_reg,multiple")
11809 (set_attr "length" "4,8")]
11810 )
11811
11812 ;; The next two patterns occur when an AND operation is followed by a
11813 ;; scc insn sequence
11814
11815 (define_insn "*sign_extract_onebit"
11816 [(set (match_operand:SI 0 "s_register_operand" "=r")
11817 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11818 (const_int 1)
11819 (match_operand:SI 2 "const_int_operand" "n")))
11820 (clobber (reg:CC CC_REGNUM))]
11821 "TARGET_ARM"
11822 "*
11823 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11824 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11825 return \"mvnne\\t%0, #0\";
11826 "
11827 [(set_attr "conds" "clob")
11828 (set_attr "length" "8")
11829 (set_attr "type" "multiple")]
11830 )
11831
11832 (define_insn "*not_signextract_onebit"
11833 [(set (match_operand:SI 0 "s_register_operand" "=r")
11834 (not:SI
11835 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11836 (const_int 1)
11837 (match_operand:SI 2 "const_int_operand" "n"))))
11838 (clobber (reg:CC CC_REGNUM))]
11839 "TARGET_ARM"
11840 "*
11841 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11842 output_asm_insn (\"tst\\t%1, %2\", operands);
11843 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11844 return \"movne\\t%0, #0\";
11845 "
11846 [(set_attr "conds" "clob")
11847 (set_attr "length" "12")
11848 (set_attr "type" "multiple")]
11849 )
11850 ;; ??? The above patterns need auditing for Thumb-2
11851
11852 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11853 ;; expressions. For simplicity, the first register is also in the unspec
11854 ;; part.
11855 ;; To avoid the usage of GNU extension, the length attribute is computed
11856 ;; in a C function arm_attr_length_push_multi.
11857 (define_insn "*push_multi"
11858 [(match_parallel 2 "multi_register_push"
11859 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11860 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11861 UNSPEC_PUSH_MULT))])]
11862 ""
11863 "*
11864 {
11865 int num_saves = XVECLEN (operands[2], 0);
11866
11867 /* For the StrongARM at least it is faster to
11868 use STR to store only a single register.
11869 In Thumb mode always use push, and the assembler will pick
11870 something appropriate. */
11871 if (num_saves == 1 && TARGET_ARM)
11872 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11873 else
11874 {
11875 int i;
11876 char pattern[100];
11877
11878 if (TARGET_32BIT)
11879 strcpy (pattern, \"push%?\\t{%1\");
11880 else
11881 strcpy (pattern, \"push\\t{%1\");
11882
11883 for (i = 1; i < num_saves; i++)
11884 {
11885 strcat (pattern, \", %|\");
11886 strcat (pattern,
11887 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11888 }
11889
11890 strcat (pattern, \"}\");
11891 output_asm_insn (pattern, operands);
11892 }
11893
11894 return \"\";
11895 }"
11896 [(set_attr "type" "store_16")
11897 (set (attr "length")
11898 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11899 )
11900
11901 (define_insn "stack_tie"
11902 [(set (mem:BLK (scratch))
11903 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11904 (match_operand:SI 1 "s_register_operand" "rk")]
11905 UNSPEC_PRLG_STK))]
11906 ""
11907 ""
11908 [(set_attr "length" "0")
11909 (set_attr "type" "block")]
11910 )
11911
11912 ;; Pop (as used in epilogue RTL)
11913 ;;
11914 (define_insn "*load_multiple_with_writeback"
11915 [(match_parallel 0 "load_multiple_operation"
11916 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11917 (plus:SI (match_dup 1)
11918 (match_operand:SI 2 "const_int_I_operand" "I")))
11919 (set (match_operand:SI 3 "s_register_operand" "=rk")
11920 (mem:SI (match_dup 1)))
11921 ])]
11922 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11923 "*
11924 {
11925 arm_output_multireg_pop (operands, /*return_pc=*/false,
11926 /*cond=*/const_true_rtx,
11927 /*reverse=*/false,
11928 /*update=*/true);
11929 return \"\";
11930 }
11931 "
11932 [(set_attr "type" "load_16")
11933 (set_attr "predicable" "yes")
11934 (set (attr "length")
11935 (symbol_ref "arm_attr_length_pop_multi (operands,
11936 /*return_pc=*/false,
11937 /*write_back_p=*/true)"))]
11938 )
11939
11940 ;; Pop with return (as used in epilogue RTL)
11941 ;;
11942 ;; This instruction is generated when the registers are popped at the end of
11943 ;; epilogue. Here, instead of popping the value into LR and then generating
11944 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11945 ;; with (return).
11946 (define_insn "*pop_multiple_with_writeback_and_return"
11947 [(match_parallel 0 "pop_multiple_return"
11948 [(return)
11949 (set (match_operand:SI 1 "s_register_operand" "+rk")
11950 (plus:SI (match_dup 1)
11951 (match_operand:SI 2 "const_int_I_operand" "I")))
11952 (set (match_operand:SI 3 "s_register_operand" "=rk")
11953 (mem:SI (match_dup 1)))
11954 ])]
11955 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11956 "*
11957 {
11958 arm_output_multireg_pop (operands, /*return_pc=*/true,
11959 /*cond=*/const_true_rtx,
11960 /*reverse=*/false,
11961 /*update=*/true);
11962 return \"\";
11963 }
11964 "
11965 [(set_attr "type" "load_16")
11966 (set_attr "predicable" "yes")
11967 (set (attr "length")
11968 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11969 /*write_back_p=*/true)"))]
11970 )
11971
11972 (define_insn "*pop_multiple_with_return"
11973 [(match_parallel 0 "pop_multiple_return"
11974 [(return)
11975 (set (match_operand:SI 2 "s_register_operand" "=rk")
11976 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11977 ])]
11978 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11979 "*
11980 {
11981 arm_output_multireg_pop (operands, /*return_pc=*/true,
11982 /*cond=*/const_true_rtx,
11983 /*reverse=*/false,
11984 /*update=*/false);
11985 return \"\";
11986 }
11987 "
11988 [(set_attr "type" "load_16")
11989 (set_attr "predicable" "yes")
11990 (set (attr "length")
11991 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11992 /*write_back_p=*/false)"))]
11993 )
11994
11995 ;; Load into PC and return
11996 (define_insn "*ldr_with_return"
11997 [(return)
11998 (set (reg:SI PC_REGNUM)
11999 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
12000 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12001 "ldr%?\t%|pc, [%0], #4"
12002 [(set_attr "type" "load_4")
12003 (set_attr "predicable" "yes")]
12004 )
12005 ;; Pop for floating point registers (as used in epilogue RTL)
12006 (define_insn "*vfp_pop_multiple_with_writeback"
12007 [(match_parallel 0 "pop_multiple_fp"
12008 [(set (match_operand:SI 1 "s_register_operand" "+rk")
12009 (plus:SI (match_dup 1)
12010 (match_operand:SI 2 "const_int_I_operand" "I")))
12011 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
12012 (mem:DF (match_dup 1)))])]
12013 "TARGET_32BIT && TARGET_VFP_BASE"
12014 "*
12015 {
12016 int num_regs = XVECLEN (operands[0], 0);
12017 char pattern[100];
12018 rtx op_list[2];
12019 strcpy (pattern, \"vldm\\t\");
12020 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
12021 strcat (pattern, \"!, {\");
12022 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
12023 strcat (pattern, \"%P0\");
12024 if ((num_regs - 1) > 1)
12025 {
12026 strcat (pattern, \"-%P1\");
12027 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
12028 }
12029
12030 strcat (pattern, \"}\");
12031 output_asm_insn (pattern, op_list);
12032 return \"\";
12033 }
12034 "
12035 [(set_attr "type" "load_16")
12036 (set_attr "conds" "unconditional")
12037 (set_attr "predicable" "no")]
12038 )
12039
12040 ;; Special patterns for dealing with the constant pool
12041
12042 (define_insn "align_4"
12043 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
12044 "TARGET_EITHER"
12045 "*
12046 assemble_align (32);
12047 return \"\";
12048 "
12049 [(set_attr "type" "no_insn")]
12050 )
12051
12052 (define_insn "align_8"
12053 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
12054 "TARGET_EITHER"
12055 "*
12056 assemble_align (64);
12057 return \"\";
12058 "
12059 [(set_attr "type" "no_insn")]
12060 )
12061
12062 (define_insn "consttable_end"
12063 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
12064 "TARGET_EITHER"
12065 "*
12066 making_const_table = FALSE;
12067 return \"\";
12068 "
12069 [(set_attr "type" "no_insn")]
12070 )
12071
12072 (define_insn "consttable_1"
12073 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
12074 "TARGET_EITHER"
12075 "*
12076 making_const_table = TRUE;
12077 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
12078 assemble_zeros (3);
12079 return \"\";
12080 "
12081 [(set_attr "length" "4")
12082 (set_attr "type" "no_insn")]
12083 )
12084
12085 (define_insn "consttable_2"
12086 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
12087 "TARGET_EITHER"
12088 "*
12089 {
12090 rtx x = operands[0];
12091 making_const_table = TRUE;
12092 switch (GET_MODE_CLASS (GET_MODE (x)))
12093 {
12094 case MODE_FLOAT:
12095 arm_emit_fp16_const (x);
12096 break;
12097 default:
12098 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
12099 assemble_zeros (2);
12100 break;
12101 }
12102 return \"\";
12103 }"
12104 [(set_attr "length" "4")
12105 (set_attr "type" "no_insn")]
12106 )
12107
12108 (define_insn "consttable_4"
12109 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
12110 "TARGET_EITHER"
12111 "*
12112 {
12113 rtx x = operands[0];
12114 making_const_table = TRUE;
12115 scalar_float_mode float_mode;
12116 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
12117 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
12118 else
12119 {
12120 /* XXX: Sometimes gcc does something really dumb and ends up with
12121 a HIGH in a constant pool entry, usually because it's trying to
12122 load into a VFP register. We know this will always be used in
12123 combination with a LO_SUM which ignores the high bits, so just
12124 strip off the HIGH. */
12125 if (GET_CODE (x) == HIGH)
12126 x = XEXP (x, 0);
12127 assemble_integer (x, 4, BITS_PER_WORD, 1);
12128 mark_symbol_refs_as_used (x);
12129 }
12130 return \"\";
12131 }"
12132 [(set_attr "length" "4")
12133 (set_attr "type" "no_insn")]
12134 )
12135
12136 (define_insn "consttable_8"
12137 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
12138 "TARGET_EITHER"
12139 "*
12140 {
12141 making_const_table = TRUE;
12142 scalar_float_mode float_mode;
12143 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
12144 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
12145 float_mode, BITS_PER_WORD);
12146 else
12147 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12148 return \"\";
12149 }"
12150 [(set_attr "length" "8")
12151 (set_attr "type" "no_insn")]
12152 )
12153
12154 (define_insn "consttable_16"
12155 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12156 "TARGET_EITHER"
12157 "*
12158 {
12159 making_const_table = TRUE;
12160 scalar_float_mode float_mode;
12161 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
12162 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
12163 float_mode, BITS_PER_WORD);
12164 else
12165 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12166 return \"\";
12167 }"
12168 [(set_attr "length" "16")
12169 (set_attr "type" "no_insn")]
12170 )
12171
12172 ;; V5 Instructions,
12173
12174 (define_insn "clzsi2"
12175 [(set (match_operand:SI 0 "s_register_operand" "=r")
12176 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12177 "TARGET_32BIT && arm_arch5t"
12178 "clz%?\\t%0, %1"
12179 [(set_attr "predicable" "yes")
12180 (set_attr "type" "clz")])
12181
12182 (define_insn "rbitsi2"
12183 [(set (match_operand:SI 0 "s_register_operand" "=r")
12184 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12185 "TARGET_32BIT && arm_arch_thumb2"
12186 "rbit%?\\t%0, %1"
12187 [(set_attr "predicable" "yes")
12188 (set_attr "type" "clz")])
12189
12190 ;; Keep this as a CTZ expression until after reload and then split
12191 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
12192 ;; to fold with any other expression.
12193
12194 (define_insn_and_split "ctzsi2"
12195 [(set (match_operand:SI 0 "s_register_operand" "=r")
12196 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12197 "TARGET_32BIT && arm_arch_thumb2"
12198 "#"
12199 "&& reload_completed"
12200 [(const_int 0)]
12201 "
12202 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
12203 emit_insn (gen_clzsi2 (operands[0], operands[0]));
12204 DONE;
12205 ")
12206
12207 ;; V5E instructions.
12208
12209 (define_insn "prefetch"
12210 [(prefetch (match_operand:SI 0 "address_operand" "p")
12211 (match_operand:SI 1 "" "")
12212 (match_operand:SI 2 "" ""))]
12213 "TARGET_32BIT && arm_arch5te"
12214 "pld\\t%a0"
12215 [(set_attr "type" "load_4")]
12216 )
12217
12218 ;; General predication pattern
12219
12220 (define_cond_exec
12221 [(match_operator 0 "arm_comparison_operator"
12222 [(match_operand 1 "cc_register" "")
12223 (const_int 0)])]
12224 "TARGET_32BIT
12225 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
12226 ""
12227 [(set_attr "predicated" "yes")]
12228 )
12229
12230 (define_insn "force_register_use"
12231 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12232 ""
12233 "%@ %0 needed"
12234 [(set_attr "length" "0")
12235 (set_attr "type" "no_insn")]
12236 )
12237
12238
12239 ;; Patterns for exception handling
12240
12241 (define_expand "eh_return"
12242 [(use (match_operand 0 "general_operand"))]
12243 "TARGET_EITHER"
12244 "
12245 {
12246 if (TARGET_32BIT)
12247 emit_insn (gen_arm_eh_return (operands[0]));
12248 else
12249 emit_insn (gen_thumb_eh_return (operands[0]));
12250 DONE;
12251 }"
12252 )
12253
12254 ;; We can't expand this before we know where the link register is stored.
12255 (define_insn_and_split "arm_eh_return"
12256 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12257 VUNSPEC_EH_RETURN)
12258 (clobber (match_scratch:SI 1 "=&r"))]
12259 "TARGET_ARM"
12260 "#"
12261 "&& reload_completed"
12262 [(const_int 0)]
12263 "
12264 {
12265 arm_set_return_address (operands[0], operands[1]);
12266 DONE;
12267 }"
12268 )
12269
12270 \f
12271 ;; TLS support
12272
12273 (define_insn "load_tp_hard"
12274 [(set (match_operand:SI 0 "register_operand" "=r")
12275 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12276 "TARGET_HARD_TP"
12277 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12278 [(set_attr "predicable" "yes")
12279 (set_attr "type" "mrs")]
12280 )
12281
12282 ;; Used by the TLS register based stack protector
12283 (define_insn "reload_tp_hard"
12284 [(set (match_operand:SI 0 "register_operand" "=r")
12285 (unspec_volatile:SI [(const_int 0)] VUNSPEC_MRC))]
12286 "TARGET_HARD_TP"
12287 "mrc\\tp15, 0, %0, c13, c0, 3\\t@ reload_tp_hard"
12288 [(set_attr "type" "mrs")]
12289 )
12290
12291 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12292 (define_insn "load_tp_soft_fdpic"
12293 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12294 (clobber (reg:SI FDPIC_REGNUM))
12295 (clobber (reg:SI LR_REGNUM))
12296 (clobber (reg:SI IP_REGNUM))
12297 (clobber (reg:CC CC_REGNUM))]
12298 "TARGET_SOFT_TP && TARGET_FDPIC"
12299 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12300 [(set_attr "conds" "clob")
12301 (set_attr "type" "branch")]
12302 )
12303
12304 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12305 (define_insn "load_tp_soft"
12306 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12307 (clobber (reg:SI LR_REGNUM))
12308 (clobber (reg:SI IP_REGNUM))
12309 (clobber (reg:CC CC_REGNUM))]
12310 "TARGET_SOFT_TP && !TARGET_FDPIC"
12311 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12312 [(set_attr "conds" "clob")
12313 (set_attr "type" "branch")]
12314 )
12315
12316 ;; tls descriptor call
12317 (define_insn "tlscall"
12318 [(set (reg:SI R0_REGNUM)
12319 (unspec:SI [(reg:SI R0_REGNUM)
12320 (match_operand:SI 0 "" "X")
12321 (match_operand 1 "" "")] UNSPEC_TLS))
12322 (clobber (reg:SI R1_REGNUM))
12323 (clobber (reg:SI LR_REGNUM))
12324 (clobber (reg:SI CC_REGNUM))]
12325 "TARGET_GNU2_TLS"
12326 {
12327 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12328 INTVAL (operands[1]));
12329 return "bl\\t%c0(tlscall)";
12330 }
12331 [(set_attr "conds" "clob")
12332 (set_attr "length" "4")
12333 (set_attr "type" "branch")]
12334 )
12335
12336 ;; For thread pointer builtin
12337 (define_expand "get_thread_pointersi"
12338 [(match_operand:SI 0 "s_register_operand")]
12339 ""
12340 "
12341 {
12342 arm_load_tp (operands[0]);
12343 DONE;
12344 }")
12345
12346 ;;
12347
12348 ;; We only care about the lower 16 bits of the constant
12349 ;; being inserted into the upper 16 bits of the register.
12350 (define_insn "*arm_movtas_ze"
12351 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
12352 (const_int 16)
12353 (const_int 16))
12354 (match_operand:SI 1 "const_int_operand" ""))]
12355 "TARGET_HAVE_MOVT"
12356 "@
12357 movt%?\t%0, %L1
12358 movt\t%0, %L1"
12359 [(set_attr "arch" "32,v8mb")
12360 (set_attr "predicable" "yes")
12361 (set_attr "length" "4")
12362 (set_attr "type" "alu_sreg")]
12363 )
12364
12365 (define_insn "*arm_rev"
12366 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12367 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12368 "arm_arch6"
12369 "@
12370 rev\t%0, %1
12371 rev%?\t%0, %1
12372 rev%?\t%0, %1"
12373 [(set_attr "arch" "t1,t2,32")
12374 (set_attr "length" "2,2,4")
12375 (set_attr "predicable" "no,yes,yes")
12376 (set_attr "type" "rev")]
12377 )
12378
12379 (define_expand "arm_legacy_rev"
12380 [(set (match_operand:SI 2 "s_register_operand")
12381 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
12382 (const_int 16))
12383 (match_dup 1)))
12384 (set (match_dup 2)
12385 (lshiftrt:SI (match_dup 2)
12386 (const_int 8)))
12387 (set (match_operand:SI 3 "s_register_operand")
12388 (rotatert:SI (match_dup 1)
12389 (const_int 8)))
12390 (set (match_dup 2)
12391 (and:SI (match_dup 2)
12392 (const_int -65281)))
12393 (set (match_operand:SI 0 "s_register_operand")
12394 (xor:SI (match_dup 3)
12395 (match_dup 2)))]
12396 "TARGET_32BIT"
12397 ""
12398 )
12399
12400 ;; Reuse temporaries to keep register pressure down.
12401 (define_expand "thumb_legacy_rev"
12402 [(set (match_operand:SI 2 "s_register_operand")
12403 (ashift:SI (match_operand:SI 1 "s_register_operand")
12404 (const_int 24)))
12405 (set (match_operand:SI 3 "s_register_operand")
12406 (lshiftrt:SI (match_dup 1)
12407 (const_int 24)))
12408 (set (match_dup 3)
12409 (ior:SI (match_dup 3)
12410 (match_dup 2)))
12411 (set (match_operand:SI 4 "s_register_operand")
12412 (const_int 16))
12413 (set (match_operand:SI 5 "s_register_operand")
12414 (rotatert:SI (match_dup 1)
12415 (match_dup 4)))
12416 (set (match_dup 2)
12417 (ashift:SI (match_dup 5)
12418 (const_int 24)))
12419 (set (match_dup 5)
12420 (lshiftrt:SI (match_dup 5)
12421 (const_int 24)))
12422 (set (match_dup 5)
12423 (ior:SI (match_dup 5)
12424 (match_dup 2)))
12425 (set (match_dup 5)
12426 (rotatert:SI (match_dup 5)
12427 (match_dup 4)))
12428 (set (match_operand:SI 0 "s_register_operand")
12429 (ior:SI (match_dup 5)
12430 (match_dup 3)))]
12431 "TARGET_THUMB"
12432 ""
12433 )
12434
12435 ;; ARM-specific expansion of signed mod by power of 2
12436 ;; using conditional negate.
12437 ;; For r0 % n where n is a power of 2 produce:
12438 ;; rsbs r1, r0, #0
12439 ;; and r0, r0, #(n - 1)
12440 ;; and r1, r1, #(n - 1)
12441 ;; rsbpl r0, r1, #0
12442
12443 (define_expand "modsi3"
12444 [(match_operand:SI 0 "register_operand")
12445 (match_operand:SI 1 "register_operand")
12446 (match_operand:SI 2 "const_int_operand")]
12447 "TARGET_32BIT"
12448 {
12449 HOST_WIDE_INT val = INTVAL (operands[2]);
12450
12451 if (val <= 0
12452 || exact_log2 (val) <= 0)
12453 FAIL;
12454
12455 rtx mask = GEN_INT (val - 1);
12456
12457 /* In the special case of x0 % 2 we can do the even shorter:
12458 cmp r0, #0
12459 and r0, r0, #1
12460 rsblt r0, r0, #0. */
12461
12462 if (val == 2)
12463 {
12464 rtx cc_reg = arm_gen_compare_reg (LT,
12465 operands[1], const0_rtx, NULL_RTX);
12466 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
12467 rtx masked = gen_reg_rtx (SImode);
12468
12469 emit_insn (gen_andsi3 (masked, operands[1], mask));
12470 emit_move_insn (operands[0],
12471 gen_rtx_IF_THEN_ELSE (SImode, cond,
12472 gen_rtx_NEG (SImode,
12473 masked),
12474 masked));
12475 DONE;
12476 }
12477
12478 rtx neg_op = gen_reg_rtx (SImode);
12479 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
12480 operands[1]));
12481
12482 /* Extract the condition register and mode. */
12483 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
12484 rtx cc_reg = SET_DEST (cmp);
12485 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
12486
12487 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
12488
12489 rtx masked_neg = gen_reg_rtx (SImode);
12490 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
12491
12492 /* We want a conditional negate here, but emitting COND_EXEC rtxes
12493 during expand does not always work. Do an IF_THEN_ELSE instead. */
12494 emit_move_insn (operands[0],
12495 gen_rtx_IF_THEN_ELSE (SImode, cond,
12496 gen_rtx_NEG (SImode, masked_neg),
12497 operands[0]));
12498
12499
12500 DONE;
12501 }
12502 )
12503
12504 (define_expand "bswapsi2"
12505 [(set (match_operand:SI 0 "s_register_operand")
12506 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
12507 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12508 "
12509 if (!arm_arch6)
12510 {
12511 rtx op2 = gen_reg_rtx (SImode);
12512 rtx op3 = gen_reg_rtx (SImode);
12513
12514 if (TARGET_THUMB)
12515 {
12516 rtx op4 = gen_reg_rtx (SImode);
12517 rtx op5 = gen_reg_rtx (SImode);
12518
12519 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12520 op2, op3, op4, op5));
12521 }
12522 else
12523 {
12524 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12525 op2, op3));
12526 }
12527
12528 DONE;
12529 }
12530 "
12531 )
12532
12533 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12534 ;; and unsigned variants, respectively. For rev16, expose
12535 ;; byte-swapping in the lower 16 bits only.
12536 (define_insn "*arm_revsh"
12537 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12538 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12539 "arm_arch6"
12540 "@
12541 revsh\t%0, %1
12542 revsh%?\t%0, %1
12543 revsh%?\t%0, %1"
12544 [(set_attr "arch" "t1,t2,32")
12545 (set_attr "length" "2,2,4")
12546 (set_attr "type" "rev")]
12547 )
12548
12549 (define_insn "*arm_rev16"
12550 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12551 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12552 "arm_arch6"
12553 "@
12554 rev16\t%0, %1
12555 rev16%?\t%0, %1
12556 rev16%?\t%0, %1"
12557 [(set_attr "arch" "t1,t2,32")
12558 (set_attr "length" "2,2,4")
12559 (set_attr "type" "rev")]
12560 )
12561
12562 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
12563 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
12564 ;; each valid permutation.
12565
12566 (define_insn "arm_rev16si2"
12567 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12568 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
12569 (const_int 8))
12570 (match_operand:SI 3 "const_int_operand" "n,n,n"))
12571 (and:SI (lshiftrt:SI (match_dup 1)
12572 (const_int 8))
12573 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
12574 "arm_arch6
12575 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12576 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12577 "rev16\\t%0, %1"
12578 [(set_attr "arch" "t1,t2,32")
12579 (set_attr "length" "2,2,4")
12580 (set_attr "type" "rev")]
12581 )
12582
12583 (define_insn "arm_rev16si2_alt"
12584 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12585 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
12586 (const_int 8))
12587 (match_operand:SI 2 "const_int_operand" "n,n,n"))
12588 (and:SI (ashift:SI (match_dup 1)
12589 (const_int 8))
12590 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
12591 "arm_arch6
12592 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12593 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12594 "rev16\\t%0, %1"
12595 [(set_attr "arch" "t1,t2,32")
12596 (set_attr "length" "2,2,4")
12597 (set_attr "type" "rev")]
12598 )
12599
12600 (define_expand "bswaphi2"
12601 [(set (match_operand:HI 0 "s_register_operand")
12602 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
12603 "arm_arch6"
12604 ""
12605 )
12606
12607 ;; Patterns for LDRD/STRD in Thumb2 mode
12608
12609 (define_insn "*thumb2_ldrd"
12610 [(set (match_operand:SI 0 "s_register_operand" "=r")
12611 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12612 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12613 (set (match_operand:SI 3 "s_register_operand" "=r")
12614 (mem:SI (plus:SI (match_dup 1)
12615 (match_operand:SI 4 "const_int_operand" ""))))]
12616 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12617 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12618 && (operands_ok_ldrd_strd (operands[0], operands[3],
12619 operands[1], INTVAL (operands[2]),
12620 false, true))"
12621 "ldrd%?\t%0, %3, [%1, %2]"
12622 [(set_attr "type" "load_8")
12623 (set_attr "predicable" "yes")])
12624
12625 (define_insn "*thumb2_ldrd_base"
12626 [(set (match_operand:SI 0 "s_register_operand" "=r")
12627 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12628 (set (match_operand:SI 2 "s_register_operand" "=r")
12629 (mem:SI (plus:SI (match_dup 1)
12630 (const_int 4))))]
12631 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12632 && (operands_ok_ldrd_strd (operands[0], operands[2],
12633 operands[1], 0, false, true))"
12634 "ldrd%?\t%0, %2, [%1]"
12635 [(set_attr "type" "load_8")
12636 (set_attr "predicable" "yes")])
12637
12638 (define_insn "*thumb2_ldrd_base_neg"
12639 [(set (match_operand:SI 0 "s_register_operand" "=r")
12640 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12641 (const_int -4))))
12642 (set (match_operand:SI 2 "s_register_operand" "=r")
12643 (mem:SI (match_dup 1)))]
12644 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12645 && (operands_ok_ldrd_strd (operands[0], operands[2],
12646 operands[1], -4, false, true))"
12647 "ldrd%?\t%0, %2, [%1, #-4]"
12648 [(set_attr "type" "load_8")
12649 (set_attr "predicable" "yes")])
12650
12651 (define_insn "*thumb2_strd"
12652 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12653 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12654 (match_operand:SI 2 "s_register_operand" "r"))
12655 (set (mem:SI (plus:SI (match_dup 0)
12656 (match_operand:SI 3 "const_int_operand" "")))
12657 (match_operand:SI 4 "s_register_operand" "r"))]
12658 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12659 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12660 && (operands_ok_ldrd_strd (operands[2], operands[4],
12661 operands[0], INTVAL (operands[1]),
12662 false, false))"
12663 "strd%?\t%2, %4, [%0, %1]"
12664 [(set_attr "type" "store_8")
12665 (set_attr "predicable" "yes")])
12666
12667 (define_insn "*thumb2_strd_base"
12668 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12669 (match_operand:SI 1 "s_register_operand" "r"))
12670 (set (mem:SI (plus:SI (match_dup 0)
12671 (const_int 4)))
12672 (match_operand:SI 2 "s_register_operand" "r"))]
12673 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12674 && (operands_ok_ldrd_strd (operands[1], operands[2],
12675 operands[0], 0, false, false))"
12676 "strd%?\t%1, %2, [%0]"
12677 [(set_attr "type" "store_8")
12678 (set_attr "predicable" "yes")])
12679
12680 (define_insn "*thumb2_strd_base_neg"
12681 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12682 (const_int -4)))
12683 (match_operand:SI 1 "s_register_operand" "r"))
12684 (set (mem:SI (match_dup 0))
12685 (match_operand:SI 2 "s_register_operand" "r"))]
12686 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12687 && (operands_ok_ldrd_strd (operands[1], operands[2],
12688 operands[0], -4, false, false))"
12689 "strd%?\t%1, %2, [%0, #-4]"
12690 [(set_attr "type" "store_8")
12691 (set_attr "predicable" "yes")])
12692
12693 ;; ARMv8 CRC32 instructions.
12694 (define_insn "arm_<crc_variant>"
12695 [(set (match_operand:SI 0 "s_register_operand" "=r")
12696 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
12697 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
12698 CRC))]
12699 "TARGET_CRC32"
12700 "<crc_variant>\\t%0, %1, %2"
12701 [(set_attr "type" "crc")
12702 (set_attr "conds" "unconditional")]
12703 )
12704
12705 ;; Load the load/store double peephole optimizations.
12706 (include "ldrdstrd.md")
12707
12708 ;; Load the load/store multiple patterns
12709 (include "ldmstm.md")
12710
12711 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12712 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12713 ;; The operands are validated through the load_multiple_operation
12714 ;; match_parallel predicate rather than through constraints so enable it only
12715 ;; after reload.
12716 (define_insn "*load_multiple"
12717 [(match_parallel 0 "load_multiple_operation"
12718 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12719 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12720 ])]
12721 "TARGET_32BIT && reload_completed"
12722 "*
12723 {
12724 arm_output_multireg_pop (operands, /*return_pc=*/false,
12725 /*cond=*/const_true_rtx,
12726 /*reverse=*/false,
12727 /*update=*/false);
12728 return \"\";
12729 }
12730 "
12731 [(set_attr "predicable" "yes")]
12732 )
12733
12734 (define_expand "copysignsf3"
12735 [(match_operand:SF 0 "register_operand")
12736 (match_operand:SF 1 "register_operand")
12737 (match_operand:SF 2 "register_operand")]
12738 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12739 "{
12740 emit_move_insn (operands[0], operands[2]);
12741 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
12742 GEN_INT (31), GEN_INT (0),
12743 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
12744 DONE;
12745 }"
12746 )
12747
12748 (define_expand "copysigndf3"
12749 [(match_operand:DF 0 "register_operand")
12750 (match_operand:DF 1 "register_operand")
12751 (match_operand:DF 2 "register_operand")]
12752 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12753 "{
12754 rtx op0_low = gen_lowpart (SImode, operands[0]);
12755 rtx op0_high = gen_highpart (SImode, operands[0]);
12756 rtx op1_low = gen_lowpart (SImode, operands[1]);
12757 rtx op1_high = gen_highpart (SImode, operands[1]);
12758 rtx op2_high = gen_highpart (SImode, operands[2]);
12759
12760 rtx scratch1 = gen_reg_rtx (SImode);
12761 rtx scratch2 = gen_reg_rtx (SImode);
12762 emit_move_insn (scratch1, op2_high);
12763 emit_move_insn (scratch2, op1_high);
12764
12765 emit_insn(gen_rtx_SET(scratch1,
12766 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
12767 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
12768 emit_move_insn (op0_low, op1_low);
12769 emit_move_insn (op0_high, scratch2);
12770
12771 DONE;
12772 }"
12773 )
12774
12775 ;; movmisalign for DImode
12776 (define_expand "movmisaligndi"
12777 [(match_operand:DI 0 "general_operand")
12778 (match_operand:DI 1 "general_operand")]
12779 "unaligned_access"
12780 {
12781 rtx lo_op0 = gen_lowpart (SImode, operands[0]);
12782 rtx lo_op1 = gen_lowpart (SImode, operands[1]);
12783 rtx hi_op0 = gen_highpart_mode (SImode, DImode, operands[0]);
12784 rtx hi_op1 = gen_highpart_mode (SImode, DImode, operands[1]);
12785
12786 emit_insn (gen_movmisalignsi (lo_op0, lo_op1));
12787 emit_insn (gen_movmisalignsi (hi_op0, hi_op1));
12788 DONE;
12789 })
12790
12791 ;; movmisalign patterns for HImode and SImode.
12792 (define_expand "movmisalign<mode>"
12793 [(match_operand:HSI 0 "general_operand")
12794 (match_operand:HSI 1 "general_operand")]
12795 "unaligned_access"
12796 {
12797 /* This pattern is not permitted to fail during expansion: if both arguments
12798 are non-registers (e.g. memory := constant), force operand 1 into a
12799 register. */
12800 rtx (* gen_unaligned_load)(rtx, rtx);
12801 rtx tmp_dest = operands[0];
12802 if (!s_register_operand (operands[0], <MODE>mode)
12803 && !s_register_operand (operands[1], <MODE>mode))
12804 operands[1] = force_reg (<MODE>mode, operands[1]);
12805
12806 if (<MODE>mode == HImode)
12807 {
12808 gen_unaligned_load = gen_unaligned_loadhiu;
12809 tmp_dest = gen_reg_rtx (SImode);
12810 }
12811 else
12812 gen_unaligned_load = gen_unaligned_loadsi;
12813
12814 if (MEM_P (operands[1]))
12815 {
12816 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12817 if (<MODE>mode == HImode)
12818 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12819 }
12820 else
12821 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12822
12823 DONE;
12824 })
12825
12826 (define_insn "arm_<cdp>"
12827 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12828 (match_operand:SI 1 "immediate_operand" "n")
12829 (match_operand:SI 2 "immediate_operand" "n")
12830 (match_operand:SI 3 "immediate_operand" "n")
12831 (match_operand:SI 4 "immediate_operand" "n")
12832 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12833 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12834 {
12835 arm_const_bounds (operands[0], 0, 16);
12836 arm_const_bounds (operands[1], 0, 16);
12837 arm_const_bounds (operands[2], 0, (1 << 5));
12838 arm_const_bounds (operands[3], 0, (1 << 5));
12839 arm_const_bounds (operands[4], 0, (1 << 5));
12840 arm_const_bounds (operands[5], 0, 8);
12841 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12842 }
12843 [(set_attr "length" "4")
12844 (set_attr "type" "coproc")])
12845
12846 (define_insn "*ldc"
12847 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12848 (match_operand:SI 1 "immediate_operand" "n")
12849 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12850 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12851 {
12852 arm_const_bounds (operands[0], 0, 16);
12853 arm_const_bounds (operands[1], 0, (1 << 5));
12854 return "<ldc>\\tp%c0, CR%c1, %2";
12855 }
12856 [(set_attr "length" "4")
12857 (set_attr "type" "coproc")])
12858
12859 (define_insn "*stc"
12860 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12861 (match_operand:SI 1 "immediate_operand" "n")
12862 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12863 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12864 {
12865 arm_const_bounds (operands[0], 0, 16);
12866 arm_const_bounds (operands[1], 0, (1 << 5));
12867 return "<stc>\\tp%c0, CR%c1, %2";
12868 }
12869 [(set_attr "length" "4")
12870 (set_attr "type" "coproc")])
12871
12872 (define_expand "arm_<ldc>"
12873 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12874 (match_operand:SI 1 "immediate_operand")
12875 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12876 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12877
12878 (define_expand "arm_<stc>"
12879 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12880 (match_operand:SI 1 "immediate_operand")
12881 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12882 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12883
12884 (define_insn "arm_<mcr>"
12885 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12886 (match_operand:SI 1 "immediate_operand" "n")
12887 (match_operand:SI 2 "s_register_operand" "r")
12888 (match_operand:SI 3 "immediate_operand" "n")
12889 (match_operand:SI 4 "immediate_operand" "n")
12890 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12891 (use (match_dup 2))]
12892 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12893 {
12894 arm_const_bounds (operands[0], 0, 16);
12895 arm_const_bounds (operands[1], 0, 8);
12896 arm_const_bounds (operands[3], 0, (1 << 5));
12897 arm_const_bounds (operands[4], 0, (1 << 5));
12898 arm_const_bounds (operands[5], 0, 8);
12899 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12900 }
12901 [(set_attr "length" "4")
12902 (set_attr "type" "coproc")])
12903
12904 (define_insn "arm_<mrc>"
12905 [(set (match_operand:SI 0 "s_register_operand" "=r")
12906 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12907 (match_operand:SI 2 "immediate_operand" "n")
12908 (match_operand:SI 3 "immediate_operand" "n")
12909 (match_operand:SI 4 "immediate_operand" "n")
12910 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12911 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12912 {
12913 arm_const_bounds (operands[1], 0, 16);
12914 arm_const_bounds (operands[2], 0, 8);
12915 arm_const_bounds (operands[3], 0, (1 << 5));
12916 arm_const_bounds (operands[4], 0, (1 << 5));
12917 arm_const_bounds (operands[5], 0, 8);
12918 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12919 }
12920 [(set_attr "length" "4")
12921 (set_attr "type" "coproc")])
12922
12923 (define_insn "arm_<mcrr>"
12924 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12925 (match_operand:SI 1 "immediate_operand" "n")
12926 (match_operand:DI 2 "s_register_operand" "r")
12927 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12928 (use (match_dup 2))]
12929 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12930 {
12931 arm_const_bounds (operands[0], 0, 16);
12932 arm_const_bounds (operands[1], 0, 8);
12933 arm_const_bounds (operands[3], 0, (1 << 5));
12934 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12935 }
12936 [(set_attr "length" "4")
12937 (set_attr "type" "coproc")])
12938
12939 (define_insn "arm_<mrrc>"
12940 [(set (match_operand:DI 0 "s_register_operand" "=r")
12941 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12942 (match_operand:SI 2 "immediate_operand" "n")
12943 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12944 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12945 {
12946 arm_const_bounds (operands[1], 0, 16);
12947 arm_const_bounds (operands[2], 0, 8);
12948 arm_const_bounds (operands[3], 0, (1 << 5));
12949 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12950 }
12951 [(set_attr "length" "4")
12952 (set_attr "type" "coproc")])
12953
12954 (define_expand "speculation_barrier"
12955 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12956 "TARGET_EITHER"
12957 "
12958 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12959 have a usable barrier (and probably don't need one in practice).
12960 But to be safe if such code is run on later architectures, call a
12961 helper function in libgcc that will do the thing for the active
12962 system. */
12963 if (!(arm_arch7 || arm_arch8))
12964 {
12965 arm_emit_speculation_barrier_function ();
12966 DONE;
12967 }
12968 "
12969 )
12970
12971 ;; Generate a hard speculation barrier when we have not enabled speculation
12972 ;; tracking.
12973 (define_insn "*speculation_barrier_insn"
12974 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12975 "arm_arch7 || arm_arch8"
12976 "isb\;dsb\\tsy"
12977 [(set_attr "type" "block")
12978 (set_attr "length" "8")]
12979 )
12980
12981 ;; Vector bits common to IWMMXT, Neon and MVE
12982 (include "vec-common.md")
12983 ;; Load the Intel Wireless Multimedia Extension patterns
12984 (include "iwmmxt.md")
12985 ;; Load the VFP co-processor patterns
12986 (include "vfp.md")
12987 ;; Thumb-1 patterns
12988 (include "thumb1.md")
12989 ;; Thumb-2 patterns
12990 (include "thumb2.md")
12991 ;; Neon patterns
12992 (include "neon.md")
12993 ;; Crypto patterns
12994 (include "crypto.md")
12995 ;; Synchronization Primitives
12996 (include "sync.md")
12997 ;; Fixed-point patterns
12998 (include "arm-fixed.md")
12999 ;; M-profile Vector Extension
13000 (include "mve.md")