]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
fbe154a9873fb20cd60b04ffb71f861ea8e80736
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51 ;; conditional compare combination
52 (define_constants
53 [(CMP_CMP 0)
54 (CMN_CMP 1)
55 (CMP_CMN 2)
56 (CMN_CMN 3)
57 (NUM_OF_COND_CMP 4)
58 ]
59 )
60
61 \f
62 ;;---------------------------------------------------------------------------
63 ;; Attributes
64
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
67
68 ;; Instruction classification types
69 (include "types.md")
70
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
77
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
85
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
101
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
104 ;; registers.
105 (define_attr "fp" "no,yes" (const_string "no"))
106
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
112
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
117
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
120 (const_int 4))
121
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
131
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
134 (const_string "yes")
135
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
138 (const_string "yes")
139
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
142 (const_string "yes")
143
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
182 (const_string "yes")
183 ]
184
185 (const_string "no")))
186
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
189
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
192 (const_string "yes")
193
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
196 (const_string "yes")
197
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
202
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
208
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
225 (const_string "no")
226
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
229 (const_string "no")
230
231 (eq_attr "arch_enabled" "no")
232 (const_string "no")]
233 (const_string "yes")))
234
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
254
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
262
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
270 ;
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
273 ; inlined branches
274 ;
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
277 ;
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
280 ;
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
283 ;
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
286
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (if_then_else
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
295
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
301
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
312 "block,call,load_4")
313 (const_string "yes")
314 (const_string "no")))
315
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
338
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
342
343
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
347
348 ;;---------------------------------------------------------------------------
349 ;; Unspecs
350
351 (include "unspecs.md")
352
353 ;;---------------------------------------------------------------------------
354 ;; Mode iterators
355
356 (include "iterators.md")
357
358 ;;---------------------------------------------------------------------------
359 ;; Predicates
360
361 (include "predicates.md")
362 (include "constraints.md")
363
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
366
367 (define_attr "tune_cortexr4" "yes,no"
368 (const (if_then_else
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "yes")
371 (const_string "no"))))
372
373 ;; True if the generic scheduling description should be used.
374
375 (define_attr "generic_sched" "yes,no"
376 (const (if_then_else
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "no")
385 (const_string "yes"))))
386
387 (define_attr "generic_vfp" "yes,no"
388 (const (if_then_else
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
394 (const_string "yes")
395 (const_string "no"))))
396
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
403 (include "fa526.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
422 (include "vfp11.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
425
426 \f
427 ;;---------------------------------------------------------------------------
428 ;; Insn patterns
429 ;;
430 ;; Addition insns.
431
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
435
436 (define_expand "adddi3"
437 [(parallel
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
442 "TARGET_EITHER"
443 "
444 if (TARGET_THUMB1)
445 {
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
448 }
449 else
450 {
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
454 &lo_op2, &hi_op2);
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
457
458 if (lo_op2 == const0_rtx)
459 {
460 lo_dest = lo_op1;
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
465 }
466 else
467 {
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
472
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 if (hi_op2 == const0_rtx)
475 emit_insn (gen_add0si3_carryin_ltu (hi_dest, hi_op1));
476 else
477 emit_insn (gen_addsi3_carryin_ltu (hi_dest, hi_op1, hi_op2));
478 }
479
480 if (lo_result != lo_dest)
481 emit_move_insn (lo_result, lo_dest);
482 if (hi_result != hi_dest)
483 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
484 DONE;
485 }
486 "
487 )
488
489 (define_expand "addv<mode>4"
490 [(match_operand:SIDI 0 "register_operand")
491 (match_operand:SIDI 1 "register_operand")
492 (match_operand:SIDI 2 "register_operand")
493 (match_operand 3 "")]
494 "TARGET_32BIT"
495 {
496 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
497 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
498
499 DONE;
500 })
501
502 (define_expand "uaddv<mode>4"
503 [(match_operand:SIDI 0 "register_operand")
504 (match_operand:SIDI 1 "register_operand")
505 (match_operand:SIDI 2 "register_operand")
506 (match_operand 3 "")]
507 "TARGET_32BIT"
508 {
509 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
510 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
511
512 DONE;
513 })
514
515 (define_expand "addsi3"
516 [(set (match_operand:SI 0 "s_register_operand")
517 (plus:SI (match_operand:SI 1 "s_register_operand")
518 (match_operand:SI 2 "reg_or_int_operand")))]
519 "TARGET_EITHER"
520 "
521 if (TARGET_32BIT && CONST_INT_P (operands[2]))
522 {
523 arm_split_constant (PLUS, SImode, NULL_RTX,
524 INTVAL (operands[2]), operands[0], operands[1],
525 optimize && can_create_pseudo_p ());
526 DONE;
527 }
528 "
529 )
530
531 ; If there is a scratch available, this will be faster than synthesizing the
532 ; addition.
533 (define_peephole2
534 [(match_scratch:SI 3 "r")
535 (set (match_operand:SI 0 "arm_general_register_operand" "")
536 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
537 (match_operand:SI 2 "const_int_operand" "")))]
538 "TARGET_32BIT &&
539 !(const_ok_for_arm (INTVAL (operands[2]))
540 || const_ok_for_arm (-INTVAL (operands[2])))
541 && const_ok_for_arm (~INTVAL (operands[2]))"
542 [(set (match_dup 3) (match_dup 2))
543 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
544 ""
545 )
546
547 ;; The r/r/k alternative is required when reloading the address
548 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
549 ;; put the duplicated register first, and not try the commutative version.
550 (define_insn_and_split "*arm_addsi3"
551 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
552 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
553 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
554 "TARGET_32BIT"
555 "@
556 add%?\\t%0, %0, %2
557 add%?\\t%0, %1, %2
558 add%?\\t%0, %1, %2
559 add%?\\t%0, %1, %2
560 add%?\\t%0, %1, %2
561 add%?\\t%0, %1, %2
562 add%?\\t%0, %2, %1
563 add%?\\t%0, %1, %2
564 addw%?\\t%0, %1, %2
565 addw%?\\t%0, %1, %2
566 sub%?\\t%0, %1, #%n2
567 sub%?\\t%0, %1, #%n2
568 sub%?\\t%0, %1, #%n2
569 subw%?\\t%0, %1, #%n2
570 subw%?\\t%0, %1, #%n2
571 #"
572 "TARGET_32BIT
573 && CONST_INT_P (operands[2])
574 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
575 && (reload_completed || !arm_eliminable_register (operands[1]))"
576 [(clobber (const_int 0))]
577 "
578 arm_split_constant (PLUS, SImode, curr_insn,
579 INTVAL (operands[2]), operands[0],
580 operands[1], 0);
581 DONE;
582 "
583 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
584 (set_attr "predicable" "yes")
585 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
586 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
587 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
588 (const_string "alu_imm")
589 (const_string "alu_sreg")))
590 ]
591 )
592
593 (define_insn "adddi3_compareV"
594 [(set (reg:CC_V CC_REGNUM)
595 (ne:CC_V
596 (plus:TI
597 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
598 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
599 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
600 (set (match_operand:DI 0 "s_register_operand" "=&r")
601 (plus:DI (match_dup 1) (match_dup 2)))]
602 "TARGET_32BIT"
603 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
604 [(set_attr "conds" "set")
605 (set_attr "length" "8")
606 (set_attr "type" "multiple")]
607 )
608
609 (define_insn "addsi3_compareV"
610 [(set (reg:CC_V CC_REGNUM)
611 (ne:CC_V
612 (plus:DI
613 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
614 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
615 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
616 (set (match_operand:SI 0 "register_operand" "=r")
617 (plus:SI (match_dup 1) (match_dup 2)))]
618 "TARGET_32BIT"
619 "adds%?\\t%0, %1, %2"
620 [(set_attr "conds" "set")
621 (set_attr "type" "alus_sreg")]
622 )
623
624 (define_insn "adddi3_compareC"
625 [(set (reg:CC_C CC_REGNUM)
626 (compare:CC_C
627 (plus:DI
628 (match_operand:DI 1 "register_operand" "r")
629 (match_operand:DI 2 "register_operand" "r"))
630 (match_dup 1)))
631 (set (match_operand:DI 0 "register_operand" "=&r")
632 (plus:DI (match_dup 1) (match_dup 2)))]
633 "TARGET_32BIT"
634 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
635 [(set_attr "conds" "set")
636 (set_attr "length" "8")
637 (set_attr "type" "multiple")]
638 )
639
640 (define_insn "addsi3_compareC"
641 [(set (reg:CC_C CC_REGNUM)
642 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
643 (match_operand:SI 2 "register_operand" "r"))
644 (match_dup 1)))
645 (set (match_operand:SI 0 "register_operand" "=r")
646 (plus:SI (match_dup 1) (match_dup 2)))]
647 "TARGET_32BIT"
648 "adds%?\\t%0, %1, %2"
649 [(set_attr "conds" "set")
650 (set_attr "type" "alus_sreg")]
651 )
652
653 (define_insn "addsi3_compare0"
654 [(set (reg:CC_NOOV CC_REGNUM)
655 (compare:CC_NOOV
656 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
657 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
658 (const_int 0)))
659 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
660 (plus:SI (match_dup 1) (match_dup 2)))]
661 "TARGET_ARM"
662 "@
663 adds%?\\t%0, %1, %2
664 subs%?\\t%0, %1, #%n2
665 adds%?\\t%0, %1, %2"
666 [(set_attr "conds" "set")
667 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
668 )
669
670 (define_insn "*addsi3_compare0_scratch"
671 [(set (reg:CC_NOOV CC_REGNUM)
672 (compare:CC_NOOV
673 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
674 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
675 (const_int 0)))]
676 "TARGET_ARM"
677 "@
678 cmn%?\\t%0, %1
679 cmp%?\\t%0, #%n1
680 cmn%?\\t%0, %1"
681 [(set_attr "conds" "set")
682 (set_attr "predicable" "yes")
683 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
684 )
685
686 (define_insn "*compare_negsi_si"
687 [(set (reg:CC_Z CC_REGNUM)
688 (compare:CC_Z
689 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
690 (match_operand:SI 1 "s_register_operand" "l,r")))]
691 "TARGET_32BIT"
692 "cmn%?\\t%1, %0"
693 [(set_attr "conds" "set")
694 (set_attr "predicable" "yes")
695 (set_attr "arch" "t2,*")
696 (set_attr "length" "2,4")
697 (set_attr "predicable_short_it" "yes,no")
698 (set_attr "type" "alus_sreg")]
699 )
700
701 ;; This is the canonicalization of subsi3_compare when the
702 ;; addend is a constant.
703 (define_insn "cmpsi2_addneg"
704 [(set (reg:CC CC_REGNUM)
705 (compare:CC
706 (match_operand:SI 1 "s_register_operand" "r,r")
707 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
708 (set (match_operand:SI 0 "s_register_operand" "=r,r")
709 (plus:SI (match_dup 1)
710 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
711 "TARGET_32BIT
712 && (INTVAL (operands[2])
713 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
714 {
715 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
716 in different condition codes (like cmn rather than like cmp), so that
717 alternative comes first. Both alternatives can match for any 0x??000000
718 where except for 0 and INT_MIN it doesn't matter what we choose, and also
719 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
720 as it is shorter. */
721 if (which_alternative == 0 && operands[3] != const1_rtx)
722 return "subs%?\\t%0, %1, #%n3";
723 else
724 return "adds%?\\t%0, %1, %3";
725 }
726 [(set_attr "conds" "set")
727 (set_attr "type" "alus_sreg")]
728 )
729
730 ;; Convert the sequence
731 ;; sub rd, rn, #1
732 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
733 ;; bne dest
734 ;; into
735 ;; subs rd, rn, #1
736 ;; bcs dest ((unsigned)rn >= 1)
737 ;; similarly for the beq variant using bcc.
738 ;; This is a common looping idiom (while (n--))
739 (define_peephole2
740 [(set (match_operand:SI 0 "arm_general_register_operand" "")
741 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
742 (const_int -1)))
743 (set (match_operand 2 "cc_register" "")
744 (compare (match_dup 0) (const_int -1)))
745 (set (pc)
746 (if_then_else (match_operator 3 "equality_operator"
747 [(match_dup 2) (const_int 0)])
748 (match_operand 4 "" "")
749 (match_operand 5 "" "")))]
750 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
751 [(parallel[
752 (set (match_dup 2)
753 (compare:CC
754 (match_dup 1) (const_int 1)))
755 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
756 (set (pc)
757 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
758 (match_dup 4)
759 (match_dup 5)))]
760 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
761 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
762 ? GEU : LTU),
763 VOIDmode,
764 operands[2], const0_rtx);"
765 )
766
767 ;; The next four insns work because they compare the result with one of
768 ;; the operands, and we know that the use of the condition code is
769 ;; either GEU or LTU, so we can use the carry flag from the addition
770 ;; instead of doing the compare a second time.
771 (define_insn "*addsi3_compare_op1"
772 [(set (reg:CC_C CC_REGNUM)
773 (compare:CC_C
774 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
775 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
776 (match_dup 1)))
777 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
778 (plus:SI (match_dup 1) (match_dup 2)))]
779 "TARGET_32BIT"
780 "@
781 adds%?\\t%0, %1, %2
782 adds%?\\t%0, %0, %2
783 subs%?\\t%0, %1, #%n2
784 subs%?\\t%0, %0, #%n2
785 adds%?\\t%0, %1, %2
786 subs%?\\t%0, %1, #%n2
787 adds%?\\t%0, %1, %2"
788 [(set_attr "conds" "set")
789 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
790 (set_attr "length" "2,2,2,2,4,4,4")
791 (set_attr "type"
792 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
793 )
794
795 (define_insn "*addsi3_compare_op2"
796 [(set (reg:CC_C CC_REGNUM)
797 (compare:CC_C
798 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
799 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
800 (match_dup 2)))
801 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
802 (plus:SI (match_dup 1) (match_dup 2)))]
803 "TARGET_32BIT"
804 "@
805 adds%?\\t%0, %1, %2
806 adds%?\\t%0, %0, %2
807 subs%?\\t%0, %1, #%n2
808 subs%?\\t%0, %0, #%n2
809 adds%?\\t%0, %1, %2
810 subs%?\\t%0, %1, #%n2
811 adds%?\\t%0, %1, %2"
812 [(set_attr "conds" "set")
813 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
814 (set_attr "length" "2,2,2,2,4,4,4")
815 (set_attr "type"
816 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
817 )
818
819 (define_insn "*compare_addsi2_op0"
820 [(set (reg:CC_C CC_REGNUM)
821 (compare:CC_C
822 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
823 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
824 (match_dup 0)))]
825 "TARGET_32BIT"
826 "@
827 cmp%?\\t%0, #%n1
828 cmn%?\\t%0, %1
829 cmn%?\\t%0, %1
830 cmp%?\\t%0, #%n1
831 cmn%?\\t%0, %1"
832 [(set_attr "conds" "set")
833 (set_attr "predicable" "yes")
834 (set_attr "arch" "t2,t2,*,*,*")
835 (set_attr "predicable_short_it" "yes,yes,no,no,no")
836 (set_attr "length" "2,2,4,4,4")
837 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
838 )
839
840 (define_insn "*compare_addsi2_op1"
841 [(set (reg:CC_C CC_REGNUM)
842 (compare:CC_C
843 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
844 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
845 (match_dup 1)))]
846 "TARGET_32BIT"
847 "@
848 cmp%?\\t%0, #%n1
849 cmn%?\\t%0, %1
850 cmn%?\\t%0, %1
851 cmp%?\\t%0, #%n1
852 cmn%?\\t%0, %1"
853 [(set_attr "conds" "set")
854 (set_attr "predicable" "yes")
855 (set_attr "arch" "t2,t2,*,*,*")
856 (set_attr "predicable_short_it" "yes,yes,no,no,no")
857 (set_attr "length" "2,2,4,4,4")
858 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
859 )
860
861 (define_insn "addsi3_carryin_<optab>"
862 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
863 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
864 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
865 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
866 "TARGET_32BIT"
867 "@
868 adc%?\\t%0, %1, %2
869 adc%?\\t%0, %1, %2
870 sbc%?\\t%0, %1, #%B2"
871 [(set_attr "conds" "use")
872 (set_attr "predicable" "yes")
873 (set_attr "arch" "t2,*,*")
874 (set_attr "length" "4")
875 (set_attr "predicable_short_it" "yes,no,no")
876 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
877 )
878
879 ;; Canonicalization of the above when the immediate is zero.
880 (define_insn "add0si3_carryin_<optab>"
881 [(set (match_operand:SI 0 "s_register_operand" "=r")
882 (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
883 (match_operand:SI 1 "arm_not_operand" "r")))]
884 "TARGET_32BIT"
885 "adc%?\\t%0, %1, #0"
886 [(set_attr "conds" "use")
887 (set_attr "predicable" "yes")
888 (set_attr "length" "4")
889 (set_attr "type" "adc_imm")]
890 )
891
892 (define_insn "*addsi3_carryin_alt2_<optab>"
893 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
894 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
895 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
896 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
897 "TARGET_32BIT"
898 "@
899 adc%?\\t%0, %1, %2
900 adc%?\\t%0, %1, %2
901 sbc%?\\t%0, %1, #%B2"
902 [(set_attr "conds" "use")
903 (set_attr "predicable" "yes")
904 (set_attr "arch" "t2,*,*")
905 (set_attr "length" "4")
906 (set_attr "predicable_short_it" "yes,no,no")
907 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
908 )
909
910 (define_insn "*addsi3_carryin_shift_<optab>"
911 [(set (match_operand:SI 0 "s_register_operand" "=r")
912 (plus:SI (plus:SI
913 (match_operator:SI 2 "shift_operator"
914 [(match_operand:SI 3 "s_register_operand" "r")
915 (match_operand:SI 4 "reg_or_int_operand" "rM")])
916 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0)))
917 (match_operand:SI 1 "s_register_operand" "r")))]
918 "TARGET_32BIT"
919 "adc%?\\t%0, %1, %3%S2"
920 [(set_attr "conds" "use")
921 (set_attr "predicable" "yes")
922 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
923 (const_string "alu_shift_imm")
924 (const_string "alu_shift_reg")))]
925 )
926
927 (define_insn "*addsi3_carryin_clobercc_<optab>"
928 [(set (match_operand:SI 0 "s_register_operand" "=r")
929 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
930 (match_operand:SI 2 "arm_rhs_operand" "rI"))
931 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
932 (clobber (reg:CC CC_REGNUM))]
933 "TARGET_32BIT"
934 "adcs%?\\t%0, %1, %2"
935 [(set_attr "conds" "set")
936 (set_attr "type" "adcs_reg")]
937 )
938
939 (define_expand "subv<mode>4"
940 [(match_operand:SIDI 0 "register_operand")
941 (match_operand:SIDI 1 "register_operand")
942 (match_operand:SIDI 2 "register_operand")
943 (match_operand 3 "")]
944 "TARGET_32BIT"
945 {
946 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
947 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
948
949 DONE;
950 })
951
952 (define_expand "usubv<mode>4"
953 [(match_operand:SIDI 0 "register_operand")
954 (match_operand:SIDI 1 "register_operand")
955 (match_operand:SIDI 2 "register_operand")
956 (match_operand 3 "")]
957 "TARGET_32BIT"
958 {
959 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
960 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
961
962 DONE;
963 })
964
965 (define_insn "subdi3_compare1"
966 [(set (reg:CC CC_REGNUM)
967 (compare:CC
968 (match_operand:DI 1 "s_register_operand" "r")
969 (match_operand:DI 2 "s_register_operand" "r")))
970 (set (match_operand:DI 0 "s_register_operand" "=&r")
971 (minus:DI (match_dup 1) (match_dup 2)))]
972 "TARGET_32BIT"
973 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
974 [(set_attr "conds" "set")
975 (set_attr "length" "8")
976 (set_attr "type" "multiple")]
977 )
978
979 (define_insn "subsi3_compare1"
980 [(set (reg:CC CC_REGNUM)
981 (compare:CC
982 (match_operand:SI 1 "register_operand" "r")
983 (match_operand:SI 2 "register_operand" "r")))
984 (set (match_operand:SI 0 "register_operand" "=r")
985 (minus:SI (match_dup 1) (match_dup 2)))]
986 "TARGET_32BIT"
987 "subs%?\\t%0, %1, %2"
988 [(set_attr "conds" "set")
989 (set_attr "type" "alus_sreg")]
990 )
991
992 (define_insn "*subsi3_carryin"
993 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
994 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
995 (match_operand:SI 2 "s_register_operand" "r,r,r"))
996 (match_operand:SI 3 "arm_borrow_operation" "")))]
997 "TARGET_32BIT"
998 "@
999 sbc%?\\t%0, %1, %2
1000 rsc%?\\t%0, %2, %1
1001 sbc%?\\t%0, %2, %2, lsl #1"
1002 [(set_attr "conds" "use")
1003 (set_attr "arch" "*,a,t2")
1004 (set_attr "predicable" "yes")
1005 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1006 )
1007
1008 (define_insn "*subsi3_carryin_const"
1009 [(set (match_operand:SI 0 "s_register_operand" "=r")
1010 (minus:SI (plus:SI
1011 (match_operand:SI 1 "s_register_operand" "r")
1012 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1013 (match_operand:SI 3 "arm_borrow_operation" "")))]
1014 "TARGET_32BIT"
1015 "sbc\\t%0, %1, #%n2"
1016 [(set_attr "conds" "use")
1017 (set_attr "type" "adc_imm")]
1018 )
1019
1020 (define_insn "*subsi3_carryin_const0"
1021 [(set (match_operand:SI 0 "s_register_operand" "=r")
1022 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1023 (match_operand:SI 2 "arm_borrow_operation" "")))]
1024 "TARGET_32BIT"
1025 "sbc\\t%0, %1, #0"
1026 [(set_attr "conds" "use")
1027 (set_attr "type" "adc_imm")]
1028 )
1029
1030 (define_insn "*subsi3_carryin_shift"
1031 [(set (match_operand:SI 0 "s_register_operand" "=r")
1032 (minus:SI (minus:SI
1033 (match_operand:SI 1 "s_register_operand" "r")
1034 (match_operator:SI 2 "shift_operator"
1035 [(match_operand:SI 3 "s_register_operand" "r")
1036 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1037 (match_operand:SI 5 "arm_borrow_operation" "")))]
1038 "TARGET_32BIT"
1039 "sbc%?\\t%0, %1, %3%S2"
1040 [(set_attr "conds" "use")
1041 (set_attr "predicable" "yes")
1042 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1043 (const_string "alu_shift_imm")
1044 (const_string "alu_shift_reg")))]
1045 )
1046
1047 (define_insn "*rsbsi3_carryin_shift"
1048 [(set (match_operand:SI 0 "s_register_operand" "=r")
1049 (minus:SI (minus:SI
1050 (match_operator:SI 2 "shift_operator"
1051 [(match_operand:SI 3 "s_register_operand" "r")
1052 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1053 (match_operand:SI 1 "s_register_operand" "r"))
1054 (match_operand:SI 5 "arm_borrow_operation" "")))]
1055 "TARGET_ARM"
1056 "rsc%?\\t%0, %1, %3%S2"
1057 [(set_attr "conds" "use")
1058 (set_attr "predicable" "yes")
1059 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1060 (const_string "alu_shift_imm")
1061 (const_string "alu_shift_reg")))]
1062 )
1063
1064 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1065 (define_split
1066 [(set (match_operand:SI 0 "s_register_operand" "")
1067 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1068 (match_operand:SI 2 "s_register_operand" ""))
1069 (const_int -1)))
1070 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1071 "TARGET_32BIT"
1072 [(set (match_dup 3) (match_dup 1))
1073 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1074 "
1075 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1076 ")
1077
1078 (define_expand "addsf3"
1079 [(set (match_operand:SF 0 "s_register_operand")
1080 (plus:SF (match_operand:SF 1 "s_register_operand")
1081 (match_operand:SF 2 "s_register_operand")))]
1082 "TARGET_32BIT && TARGET_HARD_FLOAT"
1083 "
1084 ")
1085
1086 (define_expand "adddf3"
1087 [(set (match_operand:DF 0 "s_register_operand")
1088 (plus:DF (match_operand:DF 1 "s_register_operand")
1089 (match_operand:DF 2 "s_register_operand")))]
1090 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1091 "
1092 ")
1093
1094 (define_expand "subdi3"
1095 [(parallel
1096 [(set (match_operand:DI 0 "s_register_operand")
1097 (minus:DI (match_operand:DI 1 "s_register_operand")
1098 (match_operand:DI 2 "s_register_operand")))
1099 (clobber (reg:CC CC_REGNUM))])]
1100 "TARGET_EITHER"
1101 "
1102 ")
1103
1104 (define_insn "*arm_subdi3"
1105 [(set (match_operand:DI 0 "arm_general_register_operand" "=&r,&r,&r")
1106 (minus:DI (match_operand:DI 1 "arm_general_register_operand" "0,r,0")
1107 (match_operand:DI 2 "arm_general_register_operand" "r,0,0")))
1108 (clobber (reg:CC CC_REGNUM))]
1109 "TARGET_32BIT"
1110 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1111 [(set_attr "conds" "clob")
1112 (set_attr "length" "8")
1113 (set_attr "type" "multiple")]
1114 )
1115
1116 (define_expand "subsi3"
1117 [(set (match_operand:SI 0 "s_register_operand")
1118 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1119 (match_operand:SI 2 "s_register_operand")))]
1120 "TARGET_EITHER"
1121 "
1122 if (CONST_INT_P (operands[1]))
1123 {
1124 if (TARGET_32BIT)
1125 {
1126 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1127 operands[1] = force_reg (SImode, operands[1]);
1128 else
1129 {
1130 arm_split_constant (MINUS, SImode, NULL_RTX,
1131 INTVAL (operands[1]), operands[0],
1132 operands[2],
1133 optimize && can_create_pseudo_p ());
1134 DONE;
1135 }
1136 }
1137 else /* TARGET_THUMB1 */
1138 operands[1] = force_reg (SImode, operands[1]);
1139 }
1140 "
1141 )
1142
1143 ; ??? Check Thumb-2 split length
1144 (define_insn_and_split "*arm_subsi3_insn"
1145 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1146 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1147 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1148 "TARGET_32BIT"
1149 "@
1150 sub%?\\t%0, %1, %2
1151 sub%?\\t%0, %2
1152 sub%?\\t%0, %1, %2
1153 rsb%?\\t%0, %2, %1
1154 rsb%?\\t%0, %2, %1
1155 sub%?\\t%0, %1, %2
1156 sub%?\\t%0, %1, %2
1157 sub%?\\t%0, %1, %2
1158 #"
1159 "&& (CONST_INT_P (operands[1])
1160 && !const_ok_for_arm (INTVAL (operands[1])))"
1161 [(clobber (const_int 0))]
1162 "
1163 arm_split_constant (MINUS, SImode, curr_insn,
1164 INTVAL (operands[1]), operands[0], operands[2], 0);
1165 DONE;
1166 "
1167 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1168 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1169 (set_attr "predicable" "yes")
1170 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1171 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1172 )
1173
1174 (define_peephole2
1175 [(match_scratch:SI 3 "r")
1176 (set (match_operand:SI 0 "arm_general_register_operand" "")
1177 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1178 (match_operand:SI 2 "arm_general_register_operand" "")))]
1179 "TARGET_32BIT
1180 && !const_ok_for_arm (INTVAL (operands[1]))
1181 && const_ok_for_arm (~INTVAL (operands[1]))"
1182 [(set (match_dup 3) (match_dup 1))
1183 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1184 ""
1185 )
1186
1187 (define_insn "subsi3_compare0"
1188 [(set (reg:CC_NOOV CC_REGNUM)
1189 (compare:CC_NOOV
1190 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1191 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1192 (const_int 0)))
1193 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1194 (minus:SI (match_dup 1) (match_dup 2)))]
1195 "TARGET_32BIT"
1196 "@
1197 subs%?\\t%0, %1, %2
1198 subs%?\\t%0, %1, %2
1199 rsbs%?\\t%0, %2, %1"
1200 [(set_attr "conds" "set")
1201 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1202 )
1203
1204 (define_insn "subsi3_compare"
1205 [(set (reg:CC CC_REGNUM)
1206 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1207 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1208 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1209 (minus:SI (match_dup 1) (match_dup 2)))]
1210 "TARGET_32BIT"
1211 "@
1212 subs%?\\t%0, %1, %2
1213 subs%?\\t%0, %1, %2
1214 rsbs%?\\t%0, %2, %1"
1215 [(set_attr "conds" "set")
1216 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1217 )
1218
1219 (define_expand "subsf3"
1220 [(set (match_operand:SF 0 "s_register_operand")
1221 (minus:SF (match_operand:SF 1 "s_register_operand")
1222 (match_operand:SF 2 "s_register_operand")))]
1223 "TARGET_32BIT && TARGET_HARD_FLOAT"
1224 "
1225 ")
1226
1227 (define_expand "subdf3"
1228 [(set (match_operand:DF 0 "s_register_operand")
1229 (minus:DF (match_operand:DF 1 "s_register_operand")
1230 (match_operand:DF 2 "s_register_operand")))]
1231 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1232 "
1233 ")
1234
1235 \f
1236 ;; Multiplication insns
1237
1238 (define_expand "mulhi3"
1239 [(set (match_operand:HI 0 "s_register_operand")
1240 (mult:HI (match_operand:HI 1 "s_register_operand")
1241 (match_operand:HI 2 "s_register_operand")))]
1242 "TARGET_DSP_MULTIPLY"
1243 "
1244 {
1245 rtx result = gen_reg_rtx (SImode);
1246 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1247 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1248 DONE;
1249 }"
1250 )
1251
1252 (define_expand "mulsi3"
1253 [(set (match_operand:SI 0 "s_register_operand")
1254 (mult:SI (match_operand:SI 2 "s_register_operand")
1255 (match_operand:SI 1 "s_register_operand")))]
1256 "TARGET_EITHER"
1257 ""
1258 )
1259
1260 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1261 (define_insn "*mul"
1262 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1263 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1264 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1265 "TARGET_32BIT"
1266 "mul%?\\t%0, %2, %1"
1267 [(set_attr "type" "mul")
1268 (set_attr "predicable" "yes")
1269 (set_attr "arch" "t2,v6,nov6,nov6")
1270 (set_attr "length" "4")
1271 (set_attr "predicable_short_it" "yes,no,*,*")]
1272 )
1273
1274 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1275 ;; reusing the same register.
1276
1277 (define_insn "*mla"
1278 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1279 (plus:SI
1280 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1281 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1282 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1283 "TARGET_32BIT"
1284 "mla%?\\t%0, %3, %2, %1"
1285 [(set_attr "type" "mla")
1286 (set_attr "predicable" "yes")
1287 (set_attr "arch" "v6,nov6,nov6,nov6")]
1288 )
1289
1290 (define_insn "*mls"
1291 [(set (match_operand:SI 0 "s_register_operand" "=r")
1292 (minus:SI
1293 (match_operand:SI 1 "s_register_operand" "r")
1294 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1295 (match_operand:SI 2 "s_register_operand" "r"))))]
1296 "TARGET_32BIT && arm_arch_thumb2"
1297 "mls%?\\t%0, %3, %2, %1"
1298 [(set_attr "type" "mla")
1299 (set_attr "predicable" "yes")]
1300 )
1301
1302 (define_insn "*mulsi3_compare0"
1303 [(set (reg:CC_NOOV CC_REGNUM)
1304 (compare:CC_NOOV (mult:SI
1305 (match_operand:SI 2 "s_register_operand" "r,r")
1306 (match_operand:SI 1 "s_register_operand" "%0,r"))
1307 (const_int 0)))
1308 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1309 (mult:SI (match_dup 2) (match_dup 1)))]
1310 "TARGET_ARM && !arm_arch6"
1311 "muls%?\\t%0, %2, %1"
1312 [(set_attr "conds" "set")
1313 (set_attr "type" "muls")]
1314 )
1315
1316 (define_insn "*mulsi3_compare0_v6"
1317 [(set (reg:CC_NOOV CC_REGNUM)
1318 (compare:CC_NOOV (mult:SI
1319 (match_operand:SI 2 "s_register_operand" "r")
1320 (match_operand:SI 1 "s_register_operand" "r"))
1321 (const_int 0)))
1322 (set (match_operand:SI 0 "s_register_operand" "=r")
1323 (mult:SI (match_dup 2) (match_dup 1)))]
1324 "TARGET_ARM && arm_arch6 && optimize_size"
1325 "muls%?\\t%0, %2, %1"
1326 [(set_attr "conds" "set")
1327 (set_attr "type" "muls")]
1328 )
1329
1330 (define_insn "*mulsi_compare0_scratch"
1331 [(set (reg:CC_NOOV CC_REGNUM)
1332 (compare:CC_NOOV (mult:SI
1333 (match_operand:SI 2 "s_register_operand" "r,r")
1334 (match_operand:SI 1 "s_register_operand" "%0,r"))
1335 (const_int 0)))
1336 (clobber (match_scratch:SI 0 "=&r,&r"))]
1337 "TARGET_ARM && !arm_arch6"
1338 "muls%?\\t%0, %2, %1"
1339 [(set_attr "conds" "set")
1340 (set_attr "type" "muls")]
1341 )
1342
1343 (define_insn "*mulsi_compare0_scratch_v6"
1344 [(set (reg:CC_NOOV CC_REGNUM)
1345 (compare:CC_NOOV (mult:SI
1346 (match_operand:SI 2 "s_register_operand" "r")
1347 (match_operand:SI 1 "s_register_operand" "r"))
1348 (const_int 0)))
1349 (clobber (match_scratch:SI 0 "=r"))]
1350 "TARGET_ARM && arm_arch6 && optimize_size"
1351 "muls%?\\t%0, %2, %1"
1352 [(set_attr "conds" "set")
1353 (set_attr "type" "muls")]
1354 )
1355
1356 (define_insn "*mulsi3addsi_compare0"
1357 [(set (reg:CC_NOOV CC_REGNUM)
1358 (compare:CC_NOOV
1359 (plus:SI (mult:SI
1360 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1361 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1362 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1363 (const_int 0)))
1364 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1365 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1366 (match_dup 3)))]
1367 "TARGET_ARM && arm_arch6"
1368 "mlas%?\\t%0, %2, %1, %3"
1369 [(set_attr "conds" "set")
1370 (set_attr "type" "mlas")]
1371 )
1372
1373 (define_insn "*mulsi3addsi_compare0_v6"
1374 [(set (reg:CC_NOOV CC_REGNUM)
1375 (compare:CC_NOOV
1376 (plus:SI (mult:SI
1377 (match_operand:SI 2 "s_register_operand" "r")
1378 (match_operand:SI 1 "s_register_operand" "r"))
1379 (match_operand:SI 3 "s_register_operand" "r"))
1380 (const_int 0)))
1381 (set (match_operand:SI 0 "s_register_operand" "=r")
1382 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1383 (match_dup 3)))]
1384 "TARGET_ARM && arm_arch6 && optimize_size"
1385 "mlas%?\\t%0, %2, %1, %3"
1386 [(set_attr "conds" "set")
1387 (set_attr "type" "mlas")]
1388 )
1389
1390 (define_insn "*mulsi3addsi_compare0_scratch"
1391 [(set (reg:CC_NOOV CC_REGNUM)
1392 (compare:CC_NOOV
1393 (plus:SI (mult:SI
1394 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1395 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1396 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1397 (const_int 0)))
1398 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1399 "TARGET_ARM && !arm_arch6"
1400 "mlas%?\\t%0, %2, %1, %3"
1401 [(set_attr "conds" "set")
1402 (set_attr "type" "mlas")]
1403 )
1404
1405 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1406 [(set (reg:CC_NOOV CC_REGNUM)
1407 (compare:CC_NOOV
1408 (plus:SI (mult:SI
1409 (match_operand:SI 2 "s_register_operand" "r")
1410 (match_operand:SI 1 "s_register_operand" "r"))
1411 (match_operand:SI 3 "s_register_operand" "r"))
1412 (const_int 0)))
1413 (clobber (match_scratch:SI 0 "=r"))]
1414 "TARGET_ARM && arm_arch6 && optimize_size"
1415 "mlas%?\\t%0, %2, %1, %3"
1416 [(set_attr "conds" "set")
1417 (set_attr "type" "mlas")]
1418 )
1419
1420 ;; 32x32->64 widening multiply.
1421 ;; The only difference between the v3-5 and v6+ versions is the requirement
1422 ;; that the output does not overlap with either input.
1423
1424 (define_expand "<Us>mulsidi3"
1425 [(set (match_operand:DI 0 "s_register_operand")
1426 (mult:DI
1427 (SE:DI (match_operand:SI 1 "s_register_operand"))
1428 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1429 "TARGET_32BIT"
1430 {
1431 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1432 gen_highpart (SImode, operands[0]),
1433 operands[1], operands[2]));
1434 DONE;
1435 }
1436 )
1437
1438 (define_insn "<US>mull"
1439 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1440 (mult:SI
1441 (match_operand:SI 2 "s_register_operand" "%r,r")
1442 (match_operand:SI 3 "s_register_operand" "r,r")))
1443 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1444 (truncate:SI
1445 (lshiftrt:DI
1446 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1447 (const_int 32))))]
1448 "TARGET_32BIT"
1449 "<US>mull%?\\t%0, %1, %2, %3"
1450 [(set_attr "type" "umull")
1451 (set_attr "predicable" "yes")
1452 (set_attr "arch" "v6,nov6")]
1453 )
1454
1455 (define_expand "<Us>maddsidi4"
1456 [(set (match_operand:DI 0 "s_register_operand")
1457 (plus:DI
1458 (mult:DI
1459 (SE:DI (match_operand:SI 1 "s_register_operand"))
1460 (SE:DI (match_operand:SI 2 "s_register_operand")))
1461 (match_operand:DI 3 "s_register_operand")))]
1462 "TARGET_32BIT"
1463 {
1464 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1465 gen_lowpart (SImode, operands[3]),
1466 gen_highpart (SImode, operands[0]),
1467 gen_highpart (SImode, operands[3]),
1468 operands[1], operands[2]));
1469 DONE;
1470 }
1471 )
1472
1473 (define_insn "<US>mlal"
1474 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1475 (plus:SI
1476 (mult:SI
1477 (match_operand:SI 4 "s_register_operand" "%r,r")
1478 (match_operand:SI 5 "s_register_operand" "r,r"))
1479 (match_operand:SI 1 "s_register_operand" "0,0")))
1480 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1481 (plus:SI
1482 (truncate:SI
1483 (lshiftrt:DI
1484 (plus:DI
1485 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1486 (zero_extend:DI (match_dup 1)))
1487 (const_int 32)))
1488 (match_operand:SI 3 "s_register_operand" "2,2")))]
1489 "TARGET_32BIT"
1490 "<US>mlal%?\\t%0, %2, %4, %5"
1491 [(set_attr "type" "umlal")
1492 (set_attr "predicable" "yes")
1493 (set_attr "arch" "v6,nov6")]
1494 )
1495
1496 (define_expand "<US>mulsi3_highpart"
1497 [(parallel
1498 [(set (match_operand:SI 0 "s_register_operand")
1499 (truncate:SI
1500 (lshiftrt:DI
1501 (mult:DI
1502 (SE:DI (match_operand:SI 1 "s_register_operand"))
1503 (SE:DI (match_operand:SI 2 "s_register_operand")))
1504 (const_int 32))))
1505 (clobber (match_scratch:SI 3 ""))])]
1506 "TARGET_32BIT"
1507 ""
1508 )
1509
1510 (define_insn "*<US>mull_high"
1511 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1512 (truncate:SI
1513 (lshiftrt:DI
1514 (mult:DI
1515 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1516 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1517 (const_int 32))))
1518 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1519 "TARGET_32BIT"
1520 "<US>mull%?\\t%3, %0, %2, %1"
1521 [(set_attr "type" "umull")
1522 (set_attr "predicable" "yes")
1523 (set_attr "arch" "v6,nov6,nov6")]
1524 )
1525
1526 (define_insn "mulhisi3"
1527 [(set (match_operand:SI 0 "s_register_operand" "=r")
1528 (mult:SI (sign_extend:SI
1529 (match_operand:HI 1 "s_register_operand" "%r"))
1530 (sign_extend:SI
1531 (match_operand:HI 2 "s_register_operand" "r"))))]
1532 "TARGET_DSP_MULTIPLY"
1533 "smulbb%?\\t%0, %1, %2"
1534 [(set_attr "type" "smulxy")
1535 (set_attr "predicable" "yes")]
1536 )
1537
1538 (define_insn "*mulhisi3tb"
1539 [(set (match_operand:SI 0 "s_register_operand" "=r")
1540 (mult:SI (ashiftrt:SI
1541 (match_operand:SI 1 "s_register_operand" "r")
1542 (const_int 16))
1543 (sign_extend:SI
1544 (match_operand:HI 2 "s_register_operand" "r"))))]
1545 "TARGET_DSP_MULTIPLY"
1546 "smultb%?\\t%0, %1, %2"
1547 [(set_attr "type" "smulxy")
1548 (set_attr "predicable" "yes")]
1549 )
1550
1551 (define_insn "*mulhisi3bt"
1552 [(set (match_operand:SI 0 "s_register_operand" "=r")
1553 (mult:SI (sign_extend:SI
1554 (match_operand:HI 1 "s_register_operand" "r"))
1555 (ashiftrt:SI
1556 (match_operand:SI 2 "s_register_operand" "r")
1557 (const_int 16))))]
1558 "TARGET_DSP_MULTIPLY"
1559 "smulbt%?\\t%0, %1, %2"
1560 [(set_attr "type" "smulxy")
1561 (set_attr "predicable" "yes")]
1562 )
1563
1564 (define_insn "*mulhisi3tt"
1565 [(set (match_operand:SI 0 "s_register_operand" "=r")
1566 (mult:SI (ashiftrt:SI
1567 (match_operand:SI 1 "s_register_operand" "r")
1568 (const_int 16))
1569 (ashiftrt:SI
1570 (match_operand:SI 2 "s_register_operand" "r")
1571 (const_int 16))))]
1572 "TARGET_DSP_MULTIPLY"
1573 "smultt%?\\t%0, %1, %2"
1574 [(set_attr "type" "smulxy")
1575 (set_attr "predicable" "yes")]
1576 )
1577
1578 (define_insn "maddhisi4"
1579 [(set (match_operand:SI 0 "s_register_operand" "=r")
1580 (plus:SI (mult:SI (sign_extend:SI
1581 (match_operand:HI 1 "s_register_operand" "r"))
1582 (sign_extend:SI
1583 (match_operand:HI 2 "s_register_operand" "r")))
1584 (match_operand:SI 3 "s_register_operand" "r")))]
1585 "TARGET_DSP_MULTIPLY"
1586 "smlabb%?\\t%0, %1, %2, %3"
1587 [(set_attr "type" "smlaxy")
1588 (set_attr "predicable" "yes")]
1589 )
1590
1591 ;; Note: there is no maddhisi4ibt because this one is canonical form
1592 (define_insn "*maddhisi4tb"
1593 [(set (match_operand:SI 0 "s_register_operand" "=r")
1594 (plus:SI (mult:SI (ashiftrt:SI
1595 (match_operand:SI 1 "s_register_operand" "r")
1596 (const_int 16))
1597 (sign_extend:SI
1598 (match_operand:HI 2 "s_register_operand" "r")))
1599 (match_operand:SI 3 "s_register_operand" "r")))]
1600 "TARGET_DSP_MULTIPLY"
1601 "smlatb%?\\t%0, %1, %2, %3"
1602 [(set_attr "type" "smlaxy")
1603 (set_attr "predicable" "yes")]
1604 )
1605
1606 (define_insn "*maddhisi4tt"
1607 [(set (match_operand:SI 0 "s_register_operand" "=r")
1608 (plus:SI (mult:SI (ashiftrt:SI
1609 (match_operand:SI 1 "s_register_operand" "r")
1610 (const_int 16))
1611 (ashiftrt:SI
1612 (match_operand:SI 2 "s_register_operand" "r")
1613 (const_int 16)))
1614 (match_operand:SI 3 "s_register_operand" "r")))]
1615 "TARGET_DSP_MULTIPLY"
1616 "smlatt%?\\t%0, %1, %2, %3"
1617 [(set_attr "type" "smlaxy")
1618 (set_attr "predicable" "yes")]
1619 )
1620
1621 (define_insn "maddhidi4"
1622 [(set (match_operand:DI 0 "s_register_operand" "=r")
1623 (plus:DI
1624 (mult:DI (sign_extend:DI
1625 (match_operand:HI 1 "s_register_operand" "r"))
1626 (sign_extend:DI
1627 (match_operand:HI 2 "s_register_operand" "r")))
1628 (match_operand:DI 3 "s_register_operand" "0")))]
1629 "TARGET_DSP_MULTIPLY"
1630 "smlalbb%?\\t%Q0, %R0, %1, %2"
1631 [(set_attr "type" "smlalxy")
1632 (set_attr "predicable" "yes")])
1633
1634 ;; Note: there is no maddhidi4ibt because this one is canonical form
1635 (define_insn "*maddhidi4tb"
1636 [(set (match_operand:DI 0 "s_register_operand" "=r")
1637 (plus:DI
1638 (mult:DI (sign_extend:DI
1639 (ashiftrt:SI
1640 (match_operand:SI 1 "s_register_operand" "r")
1641 (const_int 16)))
1642 (sign_extend:DI
1643 (match_operand:HI 2 "s_register_operand" "r")))
1644 (match_operand:DI 3 "s_register_operand" "0")))]
1645 "TARGET_DSP_MULTIPLY"
1646 "smlaltb%?\\t%Q0, %R0, %1, %2"
1647 [(set_attr "type" "smlalxy")
1648 (set_attr "predicable" "yes")])
1649
1650 (define_insn "*maddhidi4tt"
1651 [(set (match_operand:DI 0 "s_register_operand" "=r")
1652 (plus:DI
1653 (mult:DI (sign_extend:DI
1654 (ashiftrt:SI
1655 (match_operand:SI 1 "s_register_operand" "r")
1656 (const_int 16)))
1657 (sign_extend:DI
1658 (ashiftrt:SI
1659 (match_operand:SI 2 "s_register_operand" "r")
1660 (const_int 16))))
1661 (match_operand:DI 3 "s_register_operand" "0")))]
1662 "TARGET_DSP_MULTIPLY"
1663 "smlaltt%?\\t%Q0, %R0, %1, %2"
1664 [(set_attr "type" "smlalxy")
1665 (set_attr "predicable" "yes")])
1666
1667 (define_expand "mulsf3"
1668 [(set (match_operand:SF 0 "s_register_operand")
1669 (mult:SF (match_operand:SF 1 "s_register_operand")
1670 (match_operand:SF 2 "s_register_operand")))]
1671 "TARGET_32BIT && TARGET_HARD_FLOAT"
1672 "
1673 ")
1674
1675 (define_expand "muldf3"
1676 [(set (match_operand:DF 0 "s_register_operand")
1677 (mult:DF (match_operand:DF 1 "s_register_operand")
1678 (match_operand:DF 2 "s_register_operand")))]
1679 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1680 "
1681 ")
1682 \f
1683 ;; Division insns
1684
1685 (define_expand "divsf3"
1686 [(set (match_operand:SF 0 "s_register_operand")
1687 (div:SF (match_operand:SF 1 "s_register_operand")
1688 (match_operand:SF 2 "s_register_operand")))]
1689 "TARGET_32BIT && TARGET_HARD_FLOAT"
1690 "")
1691
1692 (define_expand "divdf3"
1693 [(set (match_operand:DF 0 "s_register_operand")
1694 (div:DF (match_operand:DF 1 "s_register_operand")
1695 (match_operand:DF 2 "s_register_operand")))]
1696 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1697 "")
1698 \f
1699
1700 ; Expand logical operations. The mid-end expander does not split off memory
1701 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1702 ; So an explicit expander is needed to generate better code.
1703
1704 (define_expand "<LOGICAL:optab>di3"
1705 [(set (match_operand:DI 0 "s_register_operand")
1706 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1707 (match_operand:DI 2 "arm_<optab>di_operand")))]
1708 "TARGET_32BIT"
1709 {
1710 rtx low = simplify_gen_binary (<CODE>, SImode,
1711 gen_lowpart (SImode, operands[1]),
1712 gen_lowpart (SImode, operands[2]));
1713 rtx high = simplify_gen_binary (<CODE>, SImode,
1714 gen_highpart (SImode, operands[1]),
1715 gen_highpart_mode (SImode, DImode,
1716 operands[2]));
1717
1718 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1719 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1720 DONE;
1721 }
1722 )
1723
1724 (define_expand "one_cmpldi2"
1725 [(set (match_operand:DI 0 "s_register_operand")
1726 (not:DI (match_operand:DI 1 "s_register_operand")))]
1727 "TARGET_32BIT"
1728 {
1729 rtx low = simplify_gen_unary (NOT, SImode,
1730 gen_lowpart (SImode, operands[1]),
1731 SImode);
1732 rtx high = simplify_gen_unary (NOT, SImode,
1733 gen_highpart_mode (SImode, DImode,
1734 operands[1]),
1735 SImode);
1736
1737 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1738 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1739 DONE;
1740 }
1741 )
1742
1743 ;; Split DImode and, ior, xor operations. Simply perform the logical
1744 ;; operation on the upper and lower halves of the registers.
1745 ;; This is needed for atomic operations in arm_split_atomic_op.
1746 ;; Avoid splitting IWMMXT instructions.
1747 (define_split
1748 [(set (match_operand:DI 0 "s_register_operand" "")
1749 (match_operator:DI 6 "logical_binary_operator"
1750 [(match_operand:DI 1 "s_register_operand" "")
1751 (match_operand:DI 2 "s_register_operand" "")]))]
1752 "TARGET_32BIT && reload_completed
1753 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1754 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1755 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1756 "
1757 {
1758 operands[3] = gen_highpart (SImode, operands[0]);
1759 operands[0] = gen_lowpart (SImode, operands[0]);
1760 operands[4] = gen_highpart (SImode, operands[1]);
1761 operands[1] = gen_lowpart (SImode, operands[1]);
1762 operands[5] = gen_highpart (SImode, operands[2]);
1763 operands[2] = gen_lowpart (SImode, operands[2]);
1764 }"
1765 )
1766
1767 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1768 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1769 (define_split
1770 [(set (match_operand:DI 0 "s_register_operand")
1771 (not:DI (match_operand:DI 1 "s_register_operand")))]
1772 "TARGET_32BIT"
1773 [(set (match_dup 0) (not:SI (match_dup 1)))
1774 (set (match_dup 2) (not:SI (match_dup 3)))]
1775 "
1776 {
1777 operands[2] = gen_highpart (SImode, operands[0]);
1778 operands[0] = gen_lowpart (SImode, operands[0]);
1779 operands[3] = gen_highpart (SImode, operands[1]);
1780 operands[1] = gen_lowpart (SImode, operands[1]);
1781 }"
1782 )
1783
1784 (define_expand "andsi3"
1785 [(set (match_operand:SI 0 "s_register_operand")
1786 (and:SI (match_operand:SI 1 "s_register_operand")
1787 (match_operand:SI 2 "reg_or_int_operand")))]
1788 "TARGET_EITHER"
1789 "
1790 if (TARGET_32BIT)
1791 {
1792 if (CONST_INT_P (operands[2]))
1793 {
1794 if (INTVAL (operands[2]) == 255 && arm_arch6)
1795 {
1796 operands[1] = convert_to_mode (QImode, operands[1], 1);
1797 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1798 operands[1]));
1799 DONE;
1800 }
1801 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1802 operands[2] = force_reg (SImode, operands[2]);
1803 else
1804 {
1805 arm_split_constant (AND, SImode, NULL_RTX,
1806 INTVAL (operands[2]), operands[0],
1807 operands[1],
1808 optimize && can_create_pseudo_p ());
1809
1810 DONE;
1811 }
1812 }
1813 }
1814 else /* TARGET_THUMB1 */
1815 {
1816 if (!CONST_INT_P (operands[2]))
1817 {
1818 rtx tmp = force_reg (SImode, operands[2]);
1819 if (rtx_equal_p (operands[0], operands[1]))
1820 operands[2] = tmp;
1821 else
1822 {
1823 operands[2] = operands[1];
1824 operands[1] = tmp;
1825 }
1826 }
1827 else
1828 {
1829 int i;
1830
1831 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1832 {
1833 operands[2] = force_reg (SImode,
1834 GEN_INT (~INTVAL (operands[2])));
1835
1836 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
1837
1838 DONE;
1839 }
1840
1841 for (i = 9; i <= 31; i++)
1842 {
1843 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
1844 {
1845 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1846 const0_rtx));
1847 DONE;
1848 }
1849 else if ((HOST_WIDE_INT_1 << i) - 1
1850 == ~INTVAL (operands[2]))
1851 {
1852 rtx shift = GEN_INT (i);
1853 rtx reg = gen_reg_rtx (SImode);
1854
1855 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1856 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1857
1858 DONE;
1859 }
1860 }
1861
1862 operands[2] = force_reg (SImode, operands[2]);
1863 }
1864 }
1865 "
1866 )
1867
1868 ; ??? Check split length for Thumb-2
1869 (define_insn_and_split "*arm_andsi3_insn"
1870 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
1871 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
1872 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
1873 "TARGET_32BIT"
1874 "@
1875 and%?\\t%0, %1, %2
1876 and%?\\t%0, %1, %2
1877 bic%?\\t%0, %1, #%B2
1878 and%?\\t%0, %1, %2
1879 #"
1880 "TARGET_32BIT
1881 && CONST_INT_P (operands[2])
1882 && !(const_ok_for_arm (INTVAL (operands[2]))
1883 || const_ok_for_arm (~INTVAL (operands[2])))"
1884 [(clobber (const_int 0))]
1885 "
1886 arm_split_constant (AND, SImode, curr_insn,
1887 INTVAL (operands[2]), operands[0], operands[1], 0);
1888 DONE;
1889 "
1890 [(set_attr "length" "4,4,4,4,16")
1891 (set_attr "predicable" "yes")
1892 (set_attr "predicable_short_it" "no,yes,no,no,no")
1893 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
1894 )
1895
1896 (define_insn "*andsi3_compare0"
1897 [(set (reg:CC_NOOV CC_REGNUM)
1898 (compare:CC_NOOV
1899 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1900 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
1901 (const_int 0)))
1902 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1903 (and:SI (match_dup 1) (match_dup 2)))]
1904 "TARGET_32BIT"
1905 "@
1906 ands%?\\t%0, %1, %2
1907 bics%?\\t%0, %1, #%B2
1908 ands%?\\t%0, %1, %2"
1909 [(set_attr "conds" "set")
1910 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1911 )
1912
1913 (define_insn "*andsi3_compare0_scratch"
1914 [(set (reg:CC_NOOV CC_REGNUM)
1915 (compare:CC_NOOV
1916 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1917 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
1918 (const_int 0)))
1919 (clobber (match_scratch:SI 2 "=X,r,X"))]
1920 "TARGET_32BIT"
1921 "@
1922 tst%?\\t%0, %1
1923 bics%?\\t%2, %0, #%B1
1924 tst%?\\t%0, %1"
1925 [(set_attr "conds" "set")
1926 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1927 )
1928
1929 (define_insn "*zeroextractsi_compare0_scratch"
1930 [(set (reg:CC_NOOV CC_REGNUM)
1931 (compare:CC_NOOV (zero_extract:SI
1932 (match_operand:SI 0 "s_register_operand" "r")
1933 (match_operand 1 "const_int_operand" "n")
1934 (match_operand 2 "const_int_operand" "n"))
1935 (const_int 0)))]
1936 "TARGET_32BIT
1937 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1938 && INTVAL (operands[1]) > 0
1939 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1940 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1941 "*
1942 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1943 << INTVAL (operands[2]));
1944 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1945 return \"\";
1946 "
1947 [(set_attr "conds" "set")
1948 (set_attr "predicable" "yes")
1949 (set_attr "type" "logics_imm")]
1950 )
1951
1952 (define_insn_and_split "*ne_zeroextractsi"
1953 [(set (match_operand:SI 0 "s_register_operand" "=r")
1954 (ne:SI (zero_extract:SI
1955 (match_operand:SI 1 "s_register_operand" "r")
1956 (match_operand:SI 2 "const_int_operand" "n")
1957 (match_operand:SI 3 "const_int_operand" "n"))
1958 (const_int 0)))
1959 (clobber (reg:CC CC_REGNUM))]
1960 "TARGET_32BIT
1961 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1962 && INTVAL (operands[2]) > 0
1963 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1964 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1965 "#"
1966 "TARGET_32BIT
1967 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1968 && INTVAL (operands[2]) > 0
1969 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1970 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1971 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1972 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1973 (const_int 0)))
1974 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1975 (set (match_dup 0)
1976 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1977 (match_dup 0) (const_int 1)))]
1978 "
1979 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1980 << INTVAL (operands[3]));
1981 "
1982 [(set_attr "conds" "clob")
1983 (set (attr "length")
1984 (if_then_else (eq_attr "is_thumb" "yes")
1985 (const_int 12)
1986 (const_int 8)))
1987 (set_attr "type" "multiple")]
1988 )
1989
1990 (define_insn_and_split "*ne_zeroextractsi_shifted"
1991 [(set (match_operand:SI 0 "s_register_operand" "=r")
1992 (ne:SI (zero_extract:SI
1993 (match_operand:SI 1 "s_register_operand" "r")
1994 (match_operand:SI 2 "const_int_operand" "n")
1995 (const_int 0))
1996 (const_int 0)))
1997 (clobber (reg:CC CC_REGNUM))]
1998 "TARGET_ARM"
1999 "#"
2000 "TARGET_ARM"
2001 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2002 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2003 (const_int 0)))
2004 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2005 (set (match_dup 0)
2006 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2007 (match_dup 0) (const_int 1)))]
2008 "
2009 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2010 "
2011 [(set_attr "conds" "clob")
2012 (set_attr "length" "8")
2013 (set_attr "type" "multiple")]
2014 )
2015
2016 (define_insn_and_split "*ite_ne_zeroextractsi"
2017 [(set (match_operand:SI 0 "s_register_operand" "=r")
2018 (if_then_else:SI (ne (zero_extract:SI
2019 (match_operand:SI 1 "s_register_operand" "r")
2020 (match_operand:SI 2 "const_int_operand" "n")
2021 (match_operand:SI 3 "const_int_operand" "n"))
2022 (const_int 0))
2023 (match_operand:SI 4 "arm_not_operand" "rIK")
2024 (const_int 0)))
2025 (clobber (reg:CC CC_REGNUM))]
2026 "TARGET_ARM
2027 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2028 && INTVAL (operands[2]) > 0
2029 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2030 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2031 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2032 "#"
2033 "TARGET_ARM
2034 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2035 && INTVAL (operands[2]) > 0
2036 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2037 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2038 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2039 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2040 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2041 (const_int 0)))
2042 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2043 (set (match_dup 0)
2044 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2045 (match_dup 0) (match_dup 4)))]
2046 "
2047 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2048 << INTVAL (operands[3]));
2049 "
2050 [(set_attr "conds" "clob")
2051 (set_attr "length" "8")
2052 (set_attr "type" "multiple")]
2053 )
2054
2055 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2056 [(set (match_operand:SI 0 "s_register_operand" "=r")
2057 (if_then_else:SI (ne (zero_extract:SI
2058 (match_operand:SI 1 "s_register_operand" "r")
2059 (match_operand:SI 2 "const_int_operand" "n")
2060 (const_int 0))
2061 (const_int 0))
2062 (match_operand:SI 3 "arm_not_operand" "rIK")
2063 (const_int 0)))
2064 (clobber (reg:CC CC_REGNUM))]
2065 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2066 "#"
2067 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2068 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2069 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2070 (const_int 0)))
2071 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2072 (set (match_dup 0)
2073 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2074 (match_dup 0) (match_dup 3)))]
2075 "
2076 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2077 "
2078 [(set_attr "conds" "clob")
2079 (set_attr "length" "8")
2080 (set_attr "type" "multiple")]
2081 )
2082
2083 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2084 (define_split
2085 [(set (match_operand:SI 0 "s_register_operand" "")
2086 (match_operator:SI 1 "shiftable_operator"
2087 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2088 (match_operand:SI 3 "const_int_operand" "")
2089 (match_operand:SI 4 "const_int_operand" ""))
2090 (match_operand:SI 5 "s_register_operand" "")]))
2091 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2092 "TARGET_ARM"
2093 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2094 (set (match_dup 0)
2095 (match_op_dup 1
2096 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2097 (match_dup 5)]))]
2098 "{
2099 HOST_WIDE_INT temp = INTVAL (operands[3]);
2100
2101 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2102 operands[4] = GEN_INT (32 - temp);
2103 }"
2104 )
2105
2106 (define_split
2107 [(set (match_operand:SI 0 "s_register_operand" "")
2108 (match_operator:SI 1 "shiftable_operator"
2109 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2110 (match_operand:SI 3 "const_int_operand" "")
2111 (match_operand:SI 4 "const_int_operand" ""))
2112 (match_operand:SI 5 "s_register_operand" "")]))
2113 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2114 "TARGET_ARM"
2115 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2116 (set (match_dup 0)
2117 (match_op_dup 1
2118 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2119 (match_dup 5)]))]
2120 "{
2121 HOST_WIDE_INT temp = INTVAL (operands[3]);
2122
2123 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2124 operands[4] = GEN_INT (32 - temp);
2125 }"
2126 )
2127
2128 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2129 ;;; represented by the bitfield, then this will produce incorrect results.
2130 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2131 ;;; which have a real bit-field insert instruction, the truncation happens
2132 ;;; in the bit-field insert instruction itself. Since arm does not have a
2133 ;;; bit-field insert instruction, we would have to emit code here to truncate
2134 ;;; the value before we insert. This loses some of the advantage of having
2135 ;;; this insv pattern, so this pattern needs to be reevalutated.
2136
2137 (define_expand "insv"
2138 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2139 (match_operand 1 "general_operand")
2140 (match_operand 2 "general_operand"))
2141 (match_operand 3 "reg_or_int_operand"))]
2142 "TARGET_ARM || arm_arch_thumb2"
2143 "
2144 {
2145 int start_bit = INTVAL (operands[2]);
2146 int width = INTVAL (operands[1]);
2147 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2148 rtx target, subtarget;
2149
2150 if (arm_arch_thumb2)
2151 {
2152 if (unaligned_access && MEM_P (operands[0])
2153 && s_register_operand (operands[3], GET_MODE (operands[3]))
2154 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2155 {
2156 rtx base_addr;
2157
2158 if (BYTES_BIG_ENDIAN)
2159 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2160 - start_bit;
2161
2162 if (width == 32)
2163 {
2164 base_addr = adjust_address (operands[0], SImode,
2165 start_bit / BITS_PER_UNIT);
2166 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2167 }
2168 else
2169 {
2170 rtx tmp = gen_reg_rtx (HImode);
2171
2172 base_addr = adjust_address (operands[0], HImode,
2173 start_bit / BITS_PER_UNIT);
2174 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2175 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2176 }
2177 DONE;
2178 }
2179 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2180 {
2181 bool use_bfi = TRUE;
2182
2183 if (CONST_INT_P (operands[3]))
2184 {
2185 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2186
2187 if (val == 0)
2188 {
2189 emit_insn (gen_insv_zero (operands[0], operands[1],
2190 operands[2]));
2191 DONE;
2192 }
2193
2194 /* See if the set can be done with a single orr instruction. */
2195 if (val == mask && const_ok_for_arm (val << start_bit))
2196 use_bfi = FALSE;
2197 }
2198
2199 if (use_bfi)
2200 {
2201 if (!REG_P (operands[3]))
2202 operands[3] = force_reg (SImode, operands[3]);
2203
2204 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2205 operands[3]));
2206 DONE;
2207 }
2208 }
2209 else
2210 FAIL;
2211 }
2212
2213 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2214 FAIL;
2215
2216 target = copy_rtx (operands[0]);
2217 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2218 subreg as the final target. */
2219 if (GET_CODE (target) == SUBREG)
2220 {
2221 subtarget = gen_reg_rtx (SImode);
2222 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2223 < GET_MODE_SIZE (SImode))
2224 target = SUBREG_REG (target);
2225 }
2226 else
2227 subtarget = target;
2228
2229 if (CONST_INT_P (operands[3]))
2230 {
2231 /* Since we are inserting a known constant, we may be able to
2232 reduce the number of bits that we have to clear so that
2233 the mask becomes simple. */
2234 /* ??? This code does not check to see if the new mask is actually
2235 simpler. It may not be. */
2236 rtx op1 = gen_reg_rtx (SImode);
2237 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2238 start of this pattern. */
2239 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2240 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2241
2242 emit_insn (gen_andsi3 (op1, operands[0],
2243 gen_int_mode (~mask2, SImode)));
2244 emit_insn (gen_iorsi3 (subtarget, op1,
2245 gen_int_mode (op3_value << start_bit, SImode)));
2246 }
2247 else if (start_bit == 0
2248 && !(const_ok_for_arm (mask)
2249 || const_ok_for_arm (~mask)))
2250 {
2251 /* A Trick, since we are setting the bottom bits in the word,
2252 we can shift operand[3] up, operand[0] down, OR them together
2253 and rotate the result back again. This takes 3 insns, and
2254 the third might be mergeable into another op. */
2255 /* The shift up copes with the possibility that operand[3] is
2256 wider than the bitfield. */
2257 rtx op0 = gen_reg_rtx (SImode);
2258 rtx op1 = gen_reg_rtx (SImode);
2259
2260 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2261 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2262 emit_insn (gen_iorsi3 (op1, op1, op0));
2263 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2264 }
2265 else if ((width + start_bit == 32)
2266 && !(const_ok_for_arm (mask)
2267 || const_ok_for_arm (~mask)))
2268 {
2269 /* Similar trick, but slightly less efficient. */
2270
2271 rtx op0 = gen_reg_rtx (SImode);
2272 rtx op1 = gen_reg_rtx (SImode);
2273
2274 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2275 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2276 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2277 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2278 }
2279 else
2280 {
2281 rtx op0 = gen_int_mode (mask, SImode);
2282 rtx op1 = gen_reg_rtx (SImode);
2283 rtx op2 = gen_reg_rtx (SImode);
2284
2285 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2286 {
2287 rtx tmp = gen_reg_rtx (SImode);
2288
2289 emit_insn (gen_movsi (tmp, op0));
2290 op0 = tmp;
2291 }
2292
2293 /* Mask out any bits in operand[3] that are not needed. */
2294 emit_insn (gen_andsi3 (op1, operands[3], op0));
2295
2296 if (CONST_INT_P (op0)
2297 && (const_ok_for_arm (mask << start_bit)
2298 || const_ok_for_arm (~(mask << start_bit))))
2299 {
2300 op0 = gen_int_mode (~(mask << start_bit), SImode);
2301 emit_insn (gen_andsi3 (op2, operands[0], op0));
2302 }
2303 else
2304 {
2305 if (CONST_INT_P (op0))
2306 {
2307 rtx tmp = gen_reg_rtx (SImode);
2308
2309 emit_insn (gen_movsi (tmp, op0));
2310 op0 = tmp;
2311 }
2312
2313 if (start_bit != 0)
2314 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2315
2316 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2317 }
2318
2319 if (start_bit != 0)
2320 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2321
2322 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2323 }
2324
2325 if (subtarget != target)
2326 {
2327 /* If TARGET is still a SUBREG, then it must be wider than a word,
2328 so we must be careful only to set the subword we were asked to. */
2329 if (GET_CODE (target) == SUBREG)
2330 emit_move_insn (target, subtarget);
2331 else
2332 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2333 }
2334
2335 DONE;
2336 }"
2337 )
2338
2339 (define_insn "insv_zero"
2340 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2341 (match_operand:SI 1 "const_int_M_operand" "M")
2342 (match_operand:SI 2 "const_int_M_operand" "M"))
2343 (const_int 0))]
2344 "arm_arch_thumb2"
2345 "bfc%?\t%0, %2, %1"
2346 [(set_attr "length" "4")
2347 (set_attr "predicable" "yes")
2348 (set_attr "type" "bfm")]
2349 )
2350
2351 (define_insn "insv_t2"
2352 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2353 (match_operand:SI 1 "const_int_M_operand" "M")
2354 (match_operand:SI 2 "const_int_M_operand" "M"))
2355 (match_operand:SI 3 "s_register_operand" "r"))]
2356 "arm_arch_thumb2"
2357 "bfi%?\t%0, %3, %2, %1"
2358 [(set_attr "length" "4")
2359 (set_attr "predicable" "yes")
2360 (set_attr "type" "bfm")]
2361 )
2362
2363 (define_insn "andsi_notsi_si"
2364 [(set (match_operand:SI 0 "s_register_operand" "=r")
2365 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2366 (match_operand:SI 1 "s_register_operand" "r")))]
2367 "TARGET_32BIT"
2368 "bic%?\\t%0, %1, %2"
2369 [(set_attr "predicable" "yes")
2370 (set_attr "type" "logic_reg")]
2371 )
2372
2373 (define_insn "andsi_not_shiftsi_si"
2374 [(set (match_operand:SI 0 "s_register_operand" "=r")
2375 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2376 [(match_operand:SI 2 "s_register_operand" "r")
2377 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2378 (match_operand:SI 1 "s_register_operand" "r")))]
2379 "TARGET_ARM"
2380 "bic%?\\t%0, %1, %2%S4"
2381 [(set_attr "predicable" "yes")
2382 (set_attr "shift" "2")
2383 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2384 (const_string "logic_shift_imm")
2385 (const_string "logic_shift_reg")))]
2386 )
2387
2388 ;; Shifted bics pattern used to set up CC status register and not reusing
2389 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2390 ;; does not support shift by register.
2391 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2392 [(set (reg:CC_NOOV CC_REGNUM)
2393 (compare:CC_NOOV
2394 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2395 [(match_operand:SI 1 "s_register_operand" "r")
2396 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2397 (match_operand:SI 3 "s_register_operand" "r"))
2398 (const_int 0)))
2399 (clobber (match_scratch:SI 4 "=r"))]
2400 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2401 "bics%?\\t%4, %3, %1%S0"
2402 [(set_attr "predicable" "yes")
2403 (set_attr "conds" "set")
2404 (set_attr "shift" "1")
2405 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2406 (const_string "logic_shift_imm")
2407 (const_string "logic_shift_reg")))]
2408 )
2409
2410 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2411 ;; getting reused later.
2412 (define_insn "andsi_not_shiftsi_si_scc"
2413 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2414 (compare:CC_NOOV
2415 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2416 [(match_operand:SI 1 "s_register_operand" "r")
2417 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2418 (match_operand:SI 3 "s_register_operand" "r"))
2419 (const_int 0)))
2420 (set (match_operand:SI 4 "s_register_operand" "=r")
2421 (and:SI (not:SI (match_op_dup 0
2422 [(match_dup 1)
2423 (match_dup 2)]))
2424 (match_dup 3)))])]
2425 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2426 "bics%?\\t%4, %3, %1%S0"
2427 [(set_attr "predicable" "yes")
2428 (set_attr "conds" "set")
2429 (set_attr "shift" "1")
2430 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2431 (const_string "logic_shift_imm")
2432 (const_string "logic_shift_reg")))]
2433 )
2434
2435 (define_insn "*andsi_notsi_si_compare0"
2436 [(set (reg:CC_NOOV CC_REGNUM)
2437 (compare:CC_NOOV
2438 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2439 (match_operand:SI 1 "s_register_operand" "r"))
2440 (const_int 0)))
2441 (set (match_operand:SI 0 "s_register_operand" "=r")
2442 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2443 "TARGET_32BIT"
2444 "bics\\t%0, %1, %2"
2445 [(set_attr "conds" "set")
2446 (set_attr "type" "logics_shift_reg")]
2447 )
2448
2449 (define_insn "*andsi_notsi_si_compare0_scratch"
2450 [(set (reg:CC_NOOV CC_REGNUM)
2451 (compare:CC_NOOV
2452 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2453 (match_operand:SI 1 "s_register_operand" "r"))
2454 (const_int 0)))
2455 (clobber (match_scratch:SI 0 "=r"))]
2456 "TARGET_32BIT"
2457 "bics\\t%0, %1, %2"
2458 [(set_attr "conds" "set")
2459 (set_attr "type" "logics_shift_reg")]
2460 )
2461
2462 (define_expand "iorsi3"
2463 [(set (match_operand:SI 0 "s_register_operand")
2464 (ior:SI (match_operand:SI 1 "s_register_operand")
2465 (match_operand:SI 2 "reg_or_int_operand")))]
2466 "TARGET_EITHER"
2467 "
2468 if (CONST_INT_P (operands[2]))
2469 {
2470 if (TARGET_32BIT)
2471 {
2472 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2473 operands[2] = force_reg (SImode, operands[2]);
2474 else
2475 {
2476 arm_split_constant (IOR, SImode, NULL_RTX,
2477 INTVAL (operands[2]), operands[0],
2478 operands[1],
2479 optimize && can_create_pseudo_p ());
2480 DONE;
2481 }
2482 }
2483 else /* TARGET_THUMB1 */
2484 {
2485 rtx tmp = force_reg (SImode, operands[2]);
2486 if (rtx_equal_p (operands[0], operands[1]))
2487 operands[2] = tmp;
2488 else
2489 {
2490 operands[2] = operands[1];
2491 operands[1] = tmp;
2492 }
2493 }
2494 }
2495 "
2496 )
2497
2498 (define_insn_and_split "*iorsi3_insn"
2499 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2500 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2501 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2502 "TARGET_32BIT"
2503 "@
2504 orr%?\\t%0, %1, %2
2505 orr%?\\t%0, %1, %2
2506 orn%?\\t%0, %1, #%B2
2507 orr%?\\t%0, %1, %2
2508 #"
2509 "TARGET_32BIT
2510 && CONST_INT_P (operands[2])
2511 && !(const_ok_for_arm (INTVAL (operands[2]))
2512 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2513 [(clobber (const_int 0))]
2514 {
2515 arm_split_constant (IOR, SImode, curr_insn,
2516 INTVAL (operands[2]), operands[0], operands[1], 0);
2517 DONE;
2518 }
2519 [(set_attr "length" "4,4,4,4,16")
2520 (set_attr "arch" "32,t2,t2,32,32")
2521 (set_attr "predicable" "yes")
2522 (set_attr "predicable_short_it" "no,yes,no,no,no")
2523 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2524 )
2525
2526 (define_peephole2
2527 [(match_scratch:SI 3 "r")
2528 (set (match_operand:SI 0 "arm_general_register_operand" "")
2529 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2530 (match_operand:SI 2 "const_int_operand" "")))]
2531 "TARGET_ARM
2532 && !const_ok_for_arm (INTVAL (operands[2]))
2533 && const_ok_for_arm (~INTVAL (operands[2]))"
2534 [(set (match_dup 3) (match_dup 2))
2535 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2536 ""
2537 )
2538
2539 (define_insn "*iorsi3_compare0"
2540 [(set (reg:CC_NOOV CC_REGNUM)
2541 (compare:CC_NOOV
2542 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2543 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2544 (const_int 0)))
2545 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2546 (ior:SI (match_dup 1) (match_dup 2)))]
2547 "TARGET_32BIT"
2548 "orrs%?\\t%0, %1, %2"
2549 [(set_attr "conds" "set")
2550 (set_attr "arch" "*,t2,*")
2551 (set_attr "length" "4,2,4")
2552 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2553 )
2554
2555 (define_insn "*iorsi3_compare0_scratch"
2556 [(set (reg:CC_NOOV CC_REGNUM)
2557 (compare:CC_NOOV
2558 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2559 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2560 (const_int 0)))
2561 (clobber (match_scratch:SI 0 "=r,l,r"))]
2562 "TARGET_32BIT"
2563 "orrs%?\\t%0, %1, %2"
2564 [(set_attr "conds" "set")
2565 (set_attr "arch" "*,t2,*")
2566 (set_attr "length" "4,2,4")
2567 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2568 )
2569
2570 (define_expand "xorsi3"
2571 [(set (match_operand:SI 0 "s_register_operand")
2572 (xor:SI (match_operand:SI 1 "s_register_operand")
2573 (match_operand:SI 2 "reg_or_int_operand")))]
2574 "TARGET_EITHER"
2575 "if (CONST_INT_P (operands[2]))
2576 {
2577 if (TARGET_32BIT)
2578 {
2579 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2580 operands[2] = force_reg (SImode, operands[2]);
2581 else
2582 {
2583 arm_split_constant (XOR, SImode, NULL_RTX,
2584 INTVAL (operands[2]), operands[0],
2585 operands[1],
2586 optimize && can_create_pseudo_p ());
2587 DONE;
2588 }
2589 }
2590 else /* TARGET_THUMB1 */
2591 {
2592 rtx tmp = force_reg (SImode, operands[2]);
2593 if (rtx_equal_p (operands[0], operands[1]))
2594 operands[2] = tmp;
2595 else
2596 {
2597 operands[2] = operands[1];
2598 operands[1] = tmp;
2599 }
2600 }
2601 }"
2602 )
2603
2604 (define_insn_and_split "*arm_xorsi3"
2605 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2606 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2607 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2608 "TARGET_32BIT"
2609 "@
2610 eor%?\\t%0, %1, %2
2611 eor%?\\t%0, %1, %2
2612 eor%?\\t%0, %1, %2
2613 #"
2614 "TARGET_32BIT
2615 && CONST_INT_P (operands[2])
2616 && !const_ok_for_arm (INTVAL (operands[2]))"
2617 [(clobber (const_int 0))]
2618 {
2619 arm_split_constant (XOR, SImode, curr_insn,
2620 INTVAL (operands[2]), operands[0], operands[1], 0);
2621 DONE;
2622 }
2623 [(set_attr "length" "4,4,4,16")
2624 (set_attr "predicable" "yes")
2625 (set_attr "predicable_short_it" "no,yes,no,no")
2626 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2627 )
2628
2629 (define_insn "*xorsi3_compare0"
2630 [(set (reg:CC_NOOV CC_REGNUM)
2631 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2632 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2633 (const_int 0)))
2634 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2635 (xor:SI (match_dup 1) (match_dup 2)))]
2636 "TARGET_32BIT"
2637 "eors%?\\t%0, %1, %2"
2638 [(set_attr "conds" "set")
2639 (set_attr "type" "logics_imm,logics_reg")]
2640 )
2641
2642 (define_insn "*xorsi3_compare0_scratch"
2643 [(set (reg:CC_NOOV CC_REGNUM)
2644 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2645 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2646 (const_int 0)))]
2647 "TARGET_32BIT"
2648 "teq%?\\t%0, %1"
2649 [(set_attr "conds" "set")
2650 (set_attr "type" "logics_imm,logics_reg")]
2651 )
2652
2653 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2654 ; (NOT D) we can sometimes merge the final NOT into one of the following
2655 ; insns.
2656
2657 (define_split
2658 [(set (match_operand:SI 0 "s_register_operand" "")
2659 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2660 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2661 (match_operand:SI 3 "arm_rhs_operand" "")))
2662 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2663 "TARGET_32BIT"
2664 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2665 (not:SI (match_dup 3))))
2666 (set (match_dup 0) (not:SI (match_dup 4)))]
2667 ""
2668 )
2669
2670 (define_insn_and_split "*andsi_iorsi3_notsi"
2671 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2672 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2673 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2674 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2675 "TARGET_32BIT"
2676 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2677 "&& reload_completed"
2678 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2679 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2680 {
2681 /* If operands[3] is a constant make sure to fold the NOT into it
2682 to avoid creating a NOT of a CONST_INT. */
2683 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2684 if (CONST_INT_P (not_rtx))
2685 {
2686 operands[4] = operands[0];
2687 operands[5] = not_rtx;
2688 }
2689 else
2690 {
2691 operands[5] = operands[0];
2692 operands[4] = not_rtx;
2693 }
2694 }
2695 [(set_attr "length" "8")
2696 (set_attr "ce_count" "2")
2697 (set_attr "predicable" "yes")
2698 (set_attr "type" "multiple")]
2699 )
2700
2701 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2702 ; insns are available?
2703 (define_split
2704 [(set (match_operand:SI 0 "s_register_operand" "")
2705 (match_operator:SI 1 "logical_binary_operator"
2706 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2707 (match_operand:SI 3 "const_int_operand" "")
2708 (match_operand:SI 4 "const_int_operand" ""))
2709 (match_operator:SI 9 "logical_binary_operator"
2710 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2711 (match_operand:SI 6 "const_int_operand" ""))
2712 (match_operand:SI 7 "s_register_operand" "")])]))
2713 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2714 "TARGET_32BIT
2715 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2716 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2717 [(set (match_dup 8)
2718 (match_op_dup 1
2719 [(ashift:SI (match_dup 2) (match_dup 4))
2720 (match_dup 5)]))
2721 (set (match_dup 0)
2722 (match_op_dup 1
2723 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2724 (match_dup 7)]))]
2725 "
2726 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2727 ")
2728
2729 (define_split
2730 [(set (match_operand:SI 0 "s_register_operand" "")
2731 (match_operator:SI 1 "logical_binary_operator"
2732 [(match_operator:SI 9 "logical_binary_operator"
2733 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2734 (match_operand:SI 6 "const_int_operand" ""))
2735 (match_operand:SI 7 "s_register_operand" "")])
2736 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2737 (match_operand:SI 3 "const_int_operand" "")
2738 (match_operand:SI 4 "const_int_operand" ""))]))
2739 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2740 "TARGET_32BIT
2741 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2742 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2743 [(set (match_dup 8)
2744 (match_op_dup 1
2745 [(ashift:SI (match_dup 2) (match_dup 4))
2746 (match_dup 5)]))
2747 (set (match_dup 0)
2748 (match_op_dup 1
2749 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2750 (match_dup 7)]))]
2751 "
2752 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2753 ")
2754
2755 (define_split
2756 [(set (match_operand:SI 0 "s_register_operand" "")
2757 (match_operator:SI 1 "logical_binary_operator"
2758 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2759 (match_operand:SI 3 "const_int_operand" "")
2760 (match_operand:SI 4 "const_int_operand" ""))
2761 (match_operator:SI 9 "logical_binary_operator"
2762 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2763 (match_operand:SI 6 "const_int_operand" ""))
2764 (match_operand:SI 7 "s_register_operand" "")])]))
2765 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2766 "TARGET_32BIT
2767 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2768 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2769 [(set (match_dup 8)
2770 (match_op_dup 1
2771 [(ashift:SI (match_dup 2) (match_dup 4))
2772 (match_dup 5)]))
2773 (set (match_dup 0)
2774 (match_op_dup 1
2775 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2776 (match_dup 7)]))]
2777 "
2778 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2779 ")
2780
2781 (define_split
2782 [(set (match_operand:SI 0 "s_register_operand" "")
2783 (match_operator:SI 1 "logical_binary_operator"
2784 [(match_operator:SI 9 "logical_binary_operator"
2785 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2786 (match_operand:SI 6 "const_int_operand" ""))
2787 (match_operand:SI 7 "s_register_operand" "")])
2788 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2789 (match_operand:SI 3 "const_int_operand" "")
2790 (match_operand:SI 4 "const_int_operand" ""))]))
2791 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2792 "TARGET_32BIT
2793 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2794 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2795 [(set (match_dup 8)
2796 (match_op_dup 1
2797 [(ashift:SI (match_dup 2) (match_dup 4))
2798 (match_dup 5)]))
2799 (set (match_dup 0)
2800 (match_op_dup 1
2801 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2802 (match_dup 7)]))]
2803 "
2804 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2805 ")
2806 \f
2807
2808 ;; Minimum and maximum insns
2809
2810 (define_expand "smaxsi3"
2811 [(parallel [
2812 (set (match_operand:SI 0 "s_register_operand")
2813 (smax:SI (match_operand:SI 1 "s_register_operand")
2814 (match_operand:SI 2 "arm_rhs_operand")))
2815 (clobber (reg:CC CC_REGNUM))])]
2816 "TARGET_32BIT"
2817 "
2818 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2819 {
2820 /* No need for a clobber of the condition code register here. */
2821 emit_insn (gen_rtx_SET (operands[0],
2822 gen_rtx_SMAX (SImode, operands[1],
2823 operands[2])));
2824 DONE;
2825 }
2826 ")
2827
2828 (define_insn "*smax_0"
2829 [(set (match_operand:SI 0 "s_register_operand" "=r")
2830 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2831 (const_int 0)))]
2832 "TARGET_32BIT"
2833 "bic%?\\t%0, %1, %1, asr #31"
2834 [(set_attr "predicable" "yes")
2835 (set_attr "type" "logic_shift_reg")]
2836 )
2837
2838 (define_insn "*smax_m1"
2839 [(set (match_operand:SI 0 "s_register_operand" "=r")
2840 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2841 (const_int -1)))]
2842 "TARGET_32BIT"
2843 "orr%?\\t%0, %1, %1, asr #31"
2844 [(set_attr "predicable" "yes")
2845 (set_attr "type" "logic_shift_reg")]
2846 )
2847
2848 (define_insn_and_split "*arm_smax_insn"
2849 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2850 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2851 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2852 (clobber (reg:CC CC_REGNUM))]
2853 "TARGET_ARM"
2854 "#"
2855 ; cmp\\t%1, %2\;movlt\\t%0, %2
2856 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2857 "TARGET_ARM"
2858 [(set (reg:CC CC_REGNUM)
2859 (compare:CC (match_dup 1) (match_dup 2)))
2860 (set (match_dup 0)
2861 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
2862 (match_dup 1)
2863 (match_dup 2)))]
2864 ""
2865 [(set_attr "conds" "clob")
2866 (set_attr "length" "8,12")
2867 (set_attr "type" "multiple")]
2868 )
2869
2870 (define_expand "sminsi3"
2871 [(parallel [
2872 (set (match_operand:SI 0 "s_register_operand")
2873 (smin:SI (match_operand:SI 1 "s_register_operand")
2874 (match_operand:SI 2 "arm_rhs_operand")))
2875 (clobber (reg:CC CC_REGNUM))])]
2876 "TARGET_32BIT"
2877 "
2878 if (operands[2] == const0_rtx)
2879 {
2880 /* No need for a clobber of the condition code register here. */
2881 emit_insn (gen_rtx_SET (operands[0],
2882 gen_rtx_SMIN (SImode, operands[1],
2883 operands[2])));
2884 DONE;
2885 }
2886 ")
2887
2888 (define_insn "*smin_0"
2889 [(set (match_operand:SI 0 "s_register_operand" "=r")
2890 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2891 (const_int 0)))]
2892 "TARGET_32BIT"
2893 "and%?\\t%0, %1, %1, asr #31"
2894 [(set_attr "predicable" "yes")
2895 (set_attr "type" "logic_shift_reg")]
2896 )
2897
2898 (define_insn_and_split "*arm_smin_insn"
2899 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2900 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2901 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2902 (clobber (reg:CC CC_REGNUM))]
2903 "TARGET_ARM"
2904 "#"
2905 ; cmp\\t%1, %2\;movge\\t%0, %2
2906 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2907 "TARGET_ARM"
2908 [(set (reg:CC CC_REGNUM)
2909 (compare:CC (match_dup 1) (match_dup 2)))
2910 (set (match_dup 0)
2911 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
2912 (match_dup 1)
2913 (match_dup 2)))]
2914 ""
2915 [(set_attr "conds" "clob")
2916 (set_attr "length" "8,12")
2917 (set_attr "type" "multiple,multiple")]
2918 )
2919
2920 (define_expand "umaxsi3"
2921 [(parallel [
2922 (set (match_operand:SI 0 "s_register_operand")
2923 (umax:SI (match_operand:SI 1 "s_register_operand")
2924 (match_operand:SI 2 "arm_rhs_operand")))
2925 (clobber (reg:CC CC_REGNUM))])]
2926 "TARGET_32BIT"
2927 ""
2928 )
2929
2930 (define_insn_and_split "*arm_umaxsi3"
2931 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2932 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2933 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2934 (clobber (reg:CC CC_REGNUM))]
2935 "TARGET_ARM"
2936 "#"
2937 ; cmp\\t%1, %2\;movcc\\t%0, %2
2938 ; cmp\\t%1, %2\;movcs\\t%0, %1
2939 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2940 "TARGET_ARM"
2941 [(set (reg:CC CC_REGNUM)
2942 (compare:CC (match_dup 1) (match_dup 2)))
2943 (set (match_dup 0)
2944 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
2945 (match_dup 1)
2946 (match_dup 2)))]
2947 ""
2948 [(set_attr "conds" "clob")
2949 (set_attr "length" "8,8,12")
2950 (set_attr "type" "store_4")]
2951 )
2952
2953 (define_expand "uminsi3"
2954 [(parallel [
2955 (set (match_operand:SI 0 "s_register_operand")
2956 (umin:SI (match_operand:SI 1 "s_register_operand")
2957 (match_operand:SI 2 "arm_rhs_operand")))
2958 (clobber (reg:CC CC_REGNUM))])]
2959 "TARGET_32BIT"
2960 ""
2961 )
2962
2963 (define_insn_and_split "*arm_uminsi3"
2964 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2965 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2966 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2967 (clobber (reg:CC CC_REGNUM))]
2968 "TARGET_ARM"
2969 "#"
2970 ; cmp\\t%1, %2\;movcs\\t%0, %2
2971 ; cmp\\t%1, %2\;movcc\\t%0, %1
2972 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2973 "TARGET_ARM"
2974 [(set (reg:CC CC_REGNUM)
2975 (compare:CC (match_dup 1) (match_dup 2)))
2976 (set (match_dup 0)
2977 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
2978 (match_dup 1)
2979 (match_dup 2)))]
2980 ""
2981 [(set_attr "conds" "clob")
2982 (set_attr "length" "8,8,12")
2983 (set_attr "type" "store_4")]
2984 )
2985
2986 (define_insn "*store_minmaxsi"
2987 [(set (match_operand:SI 0 "memory_operand" "=m")
2988 (match_operator:SI 3 "minmax_operator"
2989 [(match_operand:SI 1 "s_register_operand" "r")
2990 (match_operand:SI 2 "s_register_operand" "r")]))
2991 (clobber (reg:CC CC_REGNUM))]
2992 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
2993 "*
2994 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2995 operands[1], operands[2]);
2996 output_asm_insn (\"cmp\\t%1, %2\", operands);
2997 if (TARGET_THUMB2)
2998 output_asm_insn (\"ite\t%d3\", operands);
2999 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3000 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3001 return \"\";
3002 "
3003 [(set_attr "conds" "clob")
3004 (set (attr "length")
3005 (if_then_else (eq_attr "is_thumb" "yes")
3006 (const_int 14)
3007 (const_int 12)))
3008 (set_attr "type" "store_4")]
3009 )
3010
3011 ; Reject the frame pointer in operand[1], since reloading this after
3012 ; it has been eliminated can cause carnage.
3013 (define_insn "*minmax_arithsi"
3014 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3015 (match_operator:SI 4 "shiftable_operator"
3016 [(match_operator:SI 5 "minmax_operator"
3017 [(match_operand:SI 2 "s_register_operand" "r,r")
3018 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3019 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3020 (clobber (reg:CC CC_REGNUM))]
3021 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3022 "*
3023 {
3024 enum rtx_code code = GET_CODE (operands[4]);
3025 bool need_else;
3026
3027 if (which_alternative != 0 || operands[3] != const0_rtx
3028 || (code != PLUS && code != IOR && code != XOR))
3029 need_else = true;
3030 else
3031 need_else = false;
3032
3033 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3034 operands[2], operands[3]);
3035 output_asm_insn (\"cmp\\t%2, %3\", operands);
3036 if (TARGET_THUMB2)
3037 {
3038 if (need_else)
3039 output_asm_insn (\"ite\\t%d5\", operands);
3040 else
3041 output_asm_insn (\"it\\t%d5\", operands);
3042 }
3043 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3044 if (need_else)
3045 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3046 return \"\";
3047 }"
3048 [(set_attr "conds" "clob")
3049 (set (attr "length")
3050 (if_then_else (eq_attr "is_thumb" "yes")
3051 (const_int 14)
3052 (const_int 12)))
3053 (set_attr "type" "multiple")]
3054 )
3055
3056 ; Reject the frame pointer in operand[1], since reloading this after
3057 ; it has been eliminated can cause carnage.
3058 (define_insn_and_split "*minmax_arithsi_non_canon"
3059 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3060 (minus:SI
3061 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3062 (match_operator:SI 4 "minmax_operator"
3063 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3064 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3065 (clobber (reg:CC CC_REGNUM))]
3066 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3067 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3068 "#"
3069 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3070 [(set (reg:CC CC_REGNUM)
3071 (compare:CC (match_dup 2) (match_dup 3)))
3072
3073 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3074 (set (match_dup 0)
3075 (minus:SI (match_dup 1)
3076 (match_dup 2))))
3077 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3078 (set (match_dup 0)
3079 (match_dup 6)))]
3080 {
3081 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3082 operands[2], operands[3]);
3083 enum rtx_code rc = minmax_code (operands[4]);
3084 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3085 operands[2], operands[3]);
3086
3087 if (mode == CCFPmode || mode == CCFPEmode)
3088 rc = reverse_condition_maybe_unordered (rc);
3089 else
3090 rc = reverse_condition (rc);
3091 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3092 if (CONST_INT_P (operands[3]))
3093 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3094 else
3095 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3096 }
3097 [(set_attr "conds" "clob")
3098 (set (attr "length")
3099 (if_then_else (eq_attr "is_thumb" "yes")
3100 (const_int 14)
3101 (const_int 12)))
3102 (set_attr "type" "multiple")]
3103 )
3104
3105 (define_code_iterator SAT [smin smax])
3106 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3107 (define_code_attr SATlo [(smin "1") (smax "2")])
3108 (define_code_attr SAThi [(smin "2") (smax "1")])
3109
3110 (define_insn "*satsi_<SAT:code>"
3111 [(set (match_operand:SI 0 "s_register_operand" "=r")
3112 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3113 (match_operand:SI 1 "const_int_operand" "i"))
3114 (match_operand:SI 2 "const_int_operand" "i")))]
3115 "TARGET_32BIT && arm_arch6
3116 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3117 {
3118 int mask;
3119 bool signed_sat;
3120 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3121 &mask, &signed_sat))
3122 gcc_unreachable ();
3123
3124 operands[1] = GEN_INT (mask);
3125 if (signed_sat)
3126 return "ssat%?\t%0, %1, %3";
3127 else
3128 return "usat%?\t%0, %1, %3";
3129 }
3130 [(set_attr "predicable" "yes")
3131 (set_attr "type" "alus_imm")]
3132 )
3133
3134 (define_insn "*satsi_<SAT:code>_shift"
3135 [(set (match_operand:SI 0 "s_register_operand" "=r")
3136 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3137 [(match_operand:SI 4 "s_register_operand" "r")
3138 (match_operand:SI 5 "const_int_operand" "i")])
3139 (match_operand:SI 1 "const_int_operand" "i"))
3140 (match_operand:SI 2 "const_int_operand" "i")))]
3141 "TARGET_32BIT && arm_arch6
3142 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3143 {
3144 int mask;
3145 bool signed_sat;
3146 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3147 &mask, &signed_sat))
3148 gcc_unreachable ();
3149
3150 operands[1] = GEN_INT (mask);
3151 if (signed_sat)
3152 return "ssat%?\t%0, %1, %4%S3";
3153 else
3154 return "usat%?\t%0, %1, %4%S3";
3155 }
3156 [(set_attr "predicable" "yes")
3157 (set_attr "shift" "3")
3158 (set_attr "type" "logic_shift_reg")])
3159 \f
3160 ;; Shift and rotation insns
3161
3162 (define_expand "ashldi3"
3163 [(set (match_operand:DI 0 "s_register_operand")
3164 (ashift:DI (match_operand:DI 1 "s_register_operand")
3165 (match_operand:SI 2 "reg_or_int_operand")))]
3166 "TARGET_32BIT"
3167 "
3168 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3169 operands[2], gen_reg_rtx (SImode),
3170 gen_reg_rtx (SImode));
3171 DONE;
3172 ")
3173
3174 (define_expand "ashlsi3"
3175 [(set (match_operand:SI 0 "s_register_operand")
3176 (ashift:SI (match_operand:SI 1 "s_register_operand")
3177 (match_operand:SI 2 "arm_rhs_operand")))]
3178 "TARGET_EITHER"
3179 "
3180 if (CONST_INT_P (operands[2])
3181 && (UINTVAL (operands[2])) > 31)
3182 {
3183 emit_insn (gen_movsi (operands[0], const0_rtx));
3184 DONE;
3185 }
3186 "
3187 )
3188
3189 (define_expand "ashrdi3"
3190 [(set (match_operand:DI 0 "s_register_operand")
3191 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3192 (match_operand:SI 2 "reg_or_int_operand")))]
3193 "TARGET_32BIT"
3194 "
3195 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3196 operands[2], gen_reg_rtx (SImode),
3197 gen_reg_rtx (SImode));
3198 DONE;
3199 ")
3200
3201 (define_expand "ashrsi3"
3202 [(set (match_operand:SI 0 "s_register_operand")
3203 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3204 (match_operand:SI 2 "arm_rhs_operand")))]
3205 "TARGET_EITHER"
3206 "
3207 if (CONST_INT_P (operands[2])
3208 && UINTVAL (operands[2]) > 31)
3209 operands[2] = GEN_INT (31);
3210 "
3211 )
3212
3213 (define_expand "lshrdi3"
3214 [(set (match_operand:DI 0 "s_register_operand")
3215 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3216 (match_operand:SI 2 "reg_or_int_operand")))]
3217 "TARGET_32BIT"
3218 "
3219 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3220 operands[2], gen_reg_rtx (SImode),
3221 gen_reg_rtx (SImode));
3222 DONE;
3223 ")
3224
3225 (define_expand "lshrsi3"
3226 [(set (match_operand:SI 0 "s_register_operand")
3227 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3228 (match_operand:SI 2 "arm_rhs_operand")))]
3229 "TARGET_EITHER"
3230 "
3231 if (CONST_INT_P (operands[2])
3232 && (UINTVAL (operands[2])) > 31)
3233 {
3234 emit_insn (gen_movsi (operands[0], const0_rtx));
3235 DONE;
3236 }
3237 "
3238 )
3239
3240 (define_expand "rotlsi3"
3241 [(set (match_operand:SI 0 "s_register_operand")
3242 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3243 (match_operand:SI 2 "reg_or_int_operand")))]
3244 "TARGET_32BIT"
3245 "
3246 if (CONST_INT_P (operands[2]))
3247 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3248 else
3249 {
3250 rtx reg = gen_reg_rtx (SImode);
3251 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3252 operands[2] = reg;
3253 }
3254 "
3255 )
3256
3257 (define_expand "rotrsi3"
3258 [(set (match_operand:SI 0 "s_register_operand")
3259 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3260 (match_operand:SI 2 "arm_rhs_operand")))]
3261 "TARGET_EITHER"
3262 "
3263 if (TARGET_32BIT)
3264 {
3265 if (CONST_INT_P (operands[2])
3266 && UINTVAL (operands[2]) > 31)
3267 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3268 }
3269 else /* TARGET_THUMB1 */
3270 {
3271 if (CONST_INT_P (operands [2]))
3272 operands [2] = force_reg (SImode, operands[2]);
3273 }
3274 "
3275 )
3276
3277 (define_insn "*arm_shiftsi3"
3278 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3279 (match_operator:SI 3 "shift_operator"
3280 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3281 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3282 "TARGET_32BIT"
3283 "* return arm_output_shift(operands, 0);"
3284 [(set_attr "predicable" "yes")
3285 (set_attr "arch" "t2,t2,*,*")
3286 (set_attr "predicable_short_it" "yes,yes,no,no")
3287 (set_attr "length" "4")
3288 (set_attr "shift" "1")
3289 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3290 )
3291
3292 (define_insn "*shiftsi3_compare0"
3293 [(set (reg:CC_NOOV CC_REGNUM)
3294 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3295 [(match_operand:SI 1 "s_register_operand" "r,r")
3296 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3297 (const_int 0)))
3298 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3299 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3300 "TARGET_32BIT"
3301 "* return arm_output_shift(operands, 1);"
3302 [(set_attr "conds" "set")
3303 (set_attr "shift" "1")
3304 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3305 )
3306
3307 (define_insn "*shiftsi3_compare0_scratch"
3308 [(set (reg:CC_NOOV CC_REGNUM)
3309 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3310 [(match_operand:SI 1 "s_register_operand" "r,r")
3311 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3312 (const_int 0)))
3313 (clobber (match_scratch:SI 0 "=r,r"))]
3314 "TARGET_32BIT"
3315 "* return arm_output_shift(operands, 1);"
3316 [(set_attr "conds" "set")
3317 (set_attr "shift" "1")
3318 (set_attr "type" "shift_imm,shift_reg")]
3319 )
3320
3321 (define_insn "*not_shiftsi"
3322 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3323 (not:SI (match_operator:SI 3 "shift_operator"
3324 [(match_operand:SI 1 "s_register_operand" "r,r")
3325 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3326 "TARGET_32BIT"
3327 "mvn%?\\t%0, %1%S3"
3328 [(set_attr "predicable" "yes")
3329 (set_attr "shift" "1")
3330 (set_attr "arch" "32,a")
3331 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3332
3333 (define_insn "*not_shiftsi_compare0"
3334 [(set (reg:CC_NOOV CC_REGNUM)
3335 (compare:CC_NOOV
3336 (not:SI (match_operator:SI 3 "shift_operator"
3337 [(match_operand:SI 1 "s_register_operand" "r,r")
3338 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3339 (const_int 0)))
3340 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3341 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3342 "TARGET_32BIT"
3343 "mvns%?\\t%0, %1%S3"
3344 [(set_attr "conds" "set")
3345 (set_attr "shift" "1")
3346 (set_attr "arch" "32,a")
3347 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3348
3349 (define_insn "*not_shiftsi_compare0_scratch"
3350 [(set (reg:CC_NOOV CC_REGNUM)
3351 (compare:CC_NOOV
3352 (not:SI (match_operator:SI 3 "shift_operator"
3353 [(match_operand:SI 1 "s_register_operand" "r,r")
3354 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3355 (const_int 0)))
3356 (clobber (match_scratch:SI 0 "=r,r"))]
3357 "TARGET_32BIT"
3358 "mvns%?\\t%0, %1%S3"
3359 [(set_attr "conds" "set")
3360 (set_attr "shift" "1")
3361 (set_attr "arch" "32,a")
3362 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3363
3364 ;; We don't really have extzv, but defining this using shifts helps
3365 ;; to reduce register pressure later on.
3366
3367 (define_expand "extzv"
3368 [(set (match_operand 0 "s_register_operand")
3369 (zero_extract (match_operand 1 "nonimmediate_operand")
3370 (match_operand 2 "const_int_operand")
3371 (match_operand 3 "const_int_operand")))]
3372 "TARGET_THUMB1 || arm_arch_thumb2"
3373 "
3374 {
3375 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3376 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3377
3378 if (arm_arch_thumb2)
3379 {
3380 HOST_WIDE_INT width = INTVAL (operands[2]);
3381 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3382
3383 if (unaligned_access && MEM_P (operands[1])
3384 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3385 {
3386 rtx base_addr;
3387
3388 if (BYTES_BIG_ENDIAN)
3389 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3390 - bitpos;
3391
3392 if (width == 32)
3393 {
3394 base_addr = adjust_address (operands[1], SImode,
3395 bitpos / BITS_PER_UNIT);
3396 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3397 }
3398 else
3399 {
3400 rtx dest = operands[0];
3401 rtx tmp = gen_reg_rtx (SImode);
3402
3403 /* We may get a paradoxical subreg here. Strip it off. */
3404 if (GET_CODE (dest) == SUBREG
3405 && GET_MODE (dest) == SImode
3406 && GET_MODE (SUBREG_REG (dest)) == HImode)
3407 dest = SUBREG_REG (dest);
3408
3409 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3410 FAIL;
3411
3412 base_addr = adjust_address (operands[1], HImode,
3413 bitpos / BITS_PER_UNIT);
3414 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3415 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3416 }
3417 DONE;
3418 }
3419 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3420 {
3421 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3422 operands[3]));
3423 DONE;
3424 }
3425 else
3426 FAIL;
3427 }
3428
3429 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3430 FAIL;
3431
3432 operands[3] = GEN_INT (rshift);
3433
3434 if (lshift == 0)
3435 {
3436 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3437 DONE;
3438 }
3439
3440 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3441 operands[3], gen_reg_rtx (SImode)));
3442 DONE;
3443 }"
3444 )
3445
3446 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3447
3448 (define_expand "extzv_t1"
3449 [(set (match_operand:SI 4 "s_register_operand")
3450 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3451 (match_operand:SI 2 "const_int_operand")))
3452 (set (match_operand:SI 0 "s_register_operand")
3453 (lshiftrt:SI (match_dup 4)
3454 (match_operand:SI 3 "const_int_operand")))]
3455 "TARGET_THUMB1"
3456 "")
3457
3458 (define_expand "extv"
3459 [(set (match_operand 0 "s_register_operand")
3460 (sign_extract (match_operand 1 "nonimmediate_operand")
3461 (match_operand 2 "const_int_operand")
3462 (match_operand 3 "const_int_operand")))]
3463 "arm_arch_thumb2"
3464 {
3465 HOST_WIDE_INT width = INTVAL (operands[2]);
3466 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3467
3468 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3469 && (bitpos % BITS_PER_UNIT) == 0)
3470 {
3471 rtx base_addr;
3472
3473 if (BYTES_BIG_ENDIAN)
3474 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3475
3476 if (width == 32)
3477 {
3478 base_addr = adjust_address (operands[1], SImode,
3479 bitpos / BITS_PER_UNIT);
3480 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3481 }
3482 else
3483 {
3484 rtx dest = operands[0];
3485 rtx tmp = gen_reg_rtx (SImode);
3486
3487 /* We may get a paradoxical subreg here. Strip it off. */
3488 if (GET_CODE (dest) == SUBREG
3489 && GET_MODE (dest) == SImode
3490 && GET_MODE (SUBREG_REG (dest)) == HImode)
3491 dest = SUBREG_REG (dest);
3492
3493 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3494 FAIL;
3495
3496 base_addr = adjust_address (operands[1], HImode,
3497 bitpos / BITS_PER_UNIT);
3498 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3499 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3500 }
3501
3502 DONE;
3503 }
3504 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3505 FAIL;
3506 else if (GET_MODE (operands[0]) == SImode
3507 && GET_MODE (operands[1]) == SImode)
3508 {
3509 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3510 operands[3]));
3511 DONE;
3512 }
3513
3514 FAIL;
3515 })
3516
3517 ; Helper to expand register forms of extv with the proper modes.
3518
3519 (define_expand "extv_regsi"
3520 [(set (match_operand:SI 0 "s_register_operand")
3521 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3522 (match_operand 2 "const_int_operand")
3523 (match_operand 3 "const_int_operand")))]
3524 ""
3525 {
3526 })
3527
3528 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3529
3530 (define_insn "unaligned_loaddi"
3531 [(set (match_operand:DI 0 "s_register_operand" "=r")
3532 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3533 UNSPEC_UNALIGNED_LOAD))]
3534 "TARGET_32BIT && TARGET_LDRD"
3535 "*
3536 return output_move_double (operands, true, NULL);
3537 "
3538 [(set_attr "length" "8")
3539 (set_attr "type" "load_8")])
3540
3541 (define_insn "unaligned_loadsi"
3542 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3543 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3544 UNSPEC_UNALIGNED_LOAD))]
3545 "unaligned_access"
3546 "@
3547 ldr\t%0, %1\t@ unaligned
3548 ldr%?\t%0, %1\t@ unaligned
3549 ldr%?\t%0, %1\t@ unaligned"
3550 [(set_attr "arch" "t1,t2,32")
3551 (set_attr "length" "2,2,4")
3552 (set_attr "predicable" "no,yes,yes")
3553 (set_attr "predicable_short_it" "no,yes,no")
3554 (set_attr "type" "load_4")])
3555
3556 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3557 ;; address (there's no immediate format). That's tricky to support
3558 ;; here and we don't really need this pattern for that case, so only
3559 ;; enable for 32-bit ISAs.
3560 (define_insn "unaligned_loadhis"
3561 [(set (match_operand:SI 0 "s_register_operand" "=r")
3562 (sign_extend:SI
3563 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3564 UNSPEC_UNALIGNED_LOAD)))]
3565 "unaligned_access && TARGET_32BIT"
3566 "ldrsh%?\t%0, %1\t@ unaligned"
3567 [(set_attr "predicable" "yes")
3568 (set_attr "type" "load_byte")])
3569
3570 (define_insn "unaligned_loadhiu"
3571 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3572 (zero_extend:SI
3573 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3574 UNSPEC_UNALIGNED_LOAD)))]
3575 "unaligned_access"
3576 "@
3577 ldrh\t%0, %1\t@ unaligned
3578 ldrh%?\t%0, %1\t@ unaligned
3579 ldrh%?\t%0, %1\t@ unaligned"
3580 [(set_attr "arch" "t1,t2,32")
3581 (set_attr "length" "2,2,4")
3582 (set_attr "predicable" "no,yes,yes")
3583 (set_attr "predicable_short_it" "no,yes,no")
3584 (set_attr "type" "load_byte")])
3585
3586 (define_insn "unaligned_storedi"
3587 [(set (match_operand:DI 0 "memory_operand" "=m")
3588 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3589 UNSPEC_UNALIGNED_STORE))]
3590 "TARGET_32BIT && TARGET_LDRD"
3591 "*
3592 return output_move_double (operands, true, NULL);
3593 "
3594 [(set_attr "length" "8")
3595 (set_attr "type" "store_8")])
3596
3597 (define_insn "unaligned_storesi"
3598 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3599 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3600 UNSPEC_UNALIGNED_STORE))]
3601 "unaligned_access"
3602 "@
3603 str\t%1, %0\t@ unaligned
3604 str%?\t%1, %0\t@ unaligned
3605 str%?\t%1, %0\t@ unaligned"
3606 [(set_attr "arch" "t1,t2,32")
3607 (set_attr "length" "2,2,4")
3608 (set_attr "predicable" "no,yes,yes")
3609 (set_attr "predicable_short_it" "no,yes,no")
3610 (set_attr "type" "store_4")])
3611
3612 (define_insn "unaligned_storehi"
3613 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3614 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3615 UNSPEC_UNALIGNED_STORE))]
3616 "unaligned_access"
3617 "@
3618 strh\t%1, %0\t@ unaligned
3619 strh%?\t%1, %0\t@ unaligned
3620 strh%?\t%1, %0\t@ unaligned"
3621 [(set_attr "arch" "t1,t2,32")
3622 (set_attr "length" "2,2,4")
3623 (set_attr "predicable" "no,yes,yes")
3624 (set_attr "predicable_short_it" "no,yes,no")
3625 (set_attr "type" "store_4")])
3626
3627
3628 (define_insn "*extv_reg"
3629 [(set (match_operand:SI 0 "s_register_operand" "=r")
3630 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3631 (match_operand:SI 2 "const_int_operand" "n")
3632 (match_operand:SI 3 "const_int_operand" "n")))]
3633 "arm_arch_thumb2
3634 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3635 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3636 "sbfx%?\t%0, %1, %3, %2"
3637 [(set_attr "length" "4")
3638 (set_attr "predicable" "yes")
3639 (set_attr "type" "bfm")]
3640 )
3641
3642 (define_insn "extzv_t2"
3643 [(set (match_operand:SI 0 "s_register_operand" "=r")
3644 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3645 (match_operand:SI 2 "const_int_operand" "n")
3646 (match_operand:SI 3 "const_int_operand" "n")))]
3647 "arm_arch_thumb2
3648 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3649 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3650 "ubfx%?\t%0, %1, %3, %2"
3651 [(set_attr "length" "4")
3652 (set_attr "predicable" "yes")
3653 (set_attr "type" "bfm")]
3654 )
3655
3656
3657 ;; Division instructions
3658 (define_insn "divsi3"
3659 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3660 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3661 (match_operand:SI 2 "s_register_operand" "r,r")))]
3662 "TARGET_IDIV"
3663 "@
3664 sdiv%?\t%0, %1, %2
3665 sdiv\t%0, %1, %2"
3666 [(set_attr "arch" "32,v8mb")
3667 (set_attr "predicable" "yes")
3668 (set_attr "type" "sdiv")]
3669 )
3670
3671 (define_insn "udivsi3"
3672 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3673 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3674 (match_operand:SI 2 "s_register_operand" "r,r")))]
3675 "TARGET_IDIV"
3676 "@
3677 udiv%?\t%0, %1, %2
3678 udiv\t%0, %1, %2"
3679 [(set_attr "arch" "32,v8mb")
3680 (set_attr "predicable" "yes")
3681 (set_attr "type" "udiv")]
3682 )
3683
3684 \f
3685 ;; Unary arithmetic insns
3686
3687 (define_expand "negvsi3"
3688 [(match_operand:SI 0 "register_operand")
3689 (match_operand:SI 1 "register_operand")
3690 (match_operand 2 "")]
3691 "TARGET_32BIT"
3692 {
3693 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3694 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3695
3696 DONE;
3697 })
3698
3699 (define_expand "negvdi3"
3700 [(match_operand:DI 0 "s_register_operand")
3701 (match_operand:DI 1 "s_register_operand")
3702 (match_operand 2 "")]
3703 "TARGET_ARM"
3704 {
3705 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3706 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3707
3708 DONE;
3709 })
3710
3711
3712 (define_insn "negdi2_compare"
3713 [(set (reg:CC CC_REGNUM)
3714 (compare:CC
3715 (const_int 0)
3716 (match_operand:DI 1 "register_operand" "r,r")))
3717 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3718 (minus:DI (const_int 0) (match_dup 1)))]
3719 "TARGET_ARM"
3720 "@
3721 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3722 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3723 [(set_attr "conds" "set")
3724 (set_attr "arch" "a,t2")
3725 (set_attr "length" "8")
3726 (set_attr "type" "multiple")]
3727 )
3728
3729 (define_expand "negdi2"
3730 [(parallel
3731 [(set (match_operand:DI 0 "s_register_operand")
3732 (neg:DI (match_operand:DI 1 "s_register_operand")))
3733 (clobber (reg:CC CC_REGNUM))])]
3734 "TARGET_EITHER"
3735 )
3736
3737 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3738 (define_insn "*negdi2_insn"
3739 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3740 (neg:DI (match_operand:DI 1 "s_register_operand" "r,r")))
3741 (clobber (reg:CC CC_REGNUM))]
3742 "TARGET_32BIT"
3743 "@
3744 rsbs\\t%Q0, %Q1, #0; rsc\\t%R0, %R1, #0
3745 negs\\t%Q0, %Q1; sbc\\t%R0, %R1, %R1, lsl #1"
3746 [(set_attr "conds" "clob")
3747 (set_attr "arch" "a,t2")
3748 (set_attr "length" "8")
3749 (set_attr "type" "multiple")]
3750 )
3751
3752 (define_expand "negsi2"
3753 [(set (match_operand:SI 0 "s_register_operand")
3754 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3755 "TARGET_EITHER"
3756 ""
3757 )
3758
3759 (define_insn "*arm_negsi2"
3760 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3761 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3762 "TARGET_32BIT"
3763 "rsb%?\\t%0, %1, #0"
3764 [(set_attr "predicable" "yes")
3765 (set_attr "predicable_short_it" "yes,no")
3766 (set_attr "arch" "t2,*")
3767 (set_attr "length" "4")
3768 (set_attr "type" "alu_sreg")]
3769 )
3770
3771 (define_expand "negsf2"
3772 [(set (match_operand:SF 0 "s_register_operand")
3773 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3774 "TARGET_32BIT && TARGET_HARD_FLOAT"
3775 ""
3776 )
3777
3778 (define_expand "negdf2"
3779 [(set (match_operand:DF 0 "s_register_operand")
3780 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3781 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3782 "")
3783
3784 (define_insn_and_split "*zextendsidi_negsi"
3785 [(set (match_operand:DI 0 "s_register_operand" "=r")
3786 (zero_extend:DI (neg:SI (match_operand:SI 1 "s_register_operand" "r"))))]
3787 "TARGET_32BIT"
3788 "#"
3789 ""
3790 [(set (match_dup 2)
3791 (neg:SI (match_dup 1)))
3792 (set (match_dup 3)
3793 (const_int 0))]
3794 {
3795 operands[2] = gen_lowpart (SImode, operands[0]);
3796 operands[3] = gen_highpart (SImode, operands[0]);
3797 }
3798 [(set_attr "length" "8")
3799 (set_attr "type" "multiple")]
3800 )
3801
3802 ;; Negate an extended 32-bit value.
3803 (define_insn_and_split "*negdi_extendsidi"
3804 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
3805 (neg:DI (sign_extend:DI
3806 (match_operand:SI 1 "s_register_operand" "l,r"))))
3807 (clobber (reg:CC CC_REGNUM))]
3808 "TARGET_32BIT"
3809 "#"
3810 "&& reload_completed"
3811 [(const_int 0)]
3812 {
3813 rtx low = gen_lowpart (SImode, operands[0]);
3814 rtx high = gen_highpart (SImode, operands[0]);
3815
3816 if (reg_overlap_mentioned_p (low, operands[1]))
3817 {
3818 /* Input overlaps the low word of the output. Use:
3819 asr Rhi, Rin, #31
3820 rsbs Rlo, Rin, #0
3821 rsc Rhi, Rhi, #0 (thumb2: sbc Rhi, Rhi, Rhi, lsl #1). */
3822 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
3823
3824 emit_insn (gen_rtx_SET (high,
3825 gen_rtx_ASHIFTRT (SImode, operands[1],
3826 GEN_INT (31))));
3827
3828 emit_insn (gen_subsi3_compare (low, const0_rtx, operands[1]));
3829 if (TARGET_ARM)
3830 emit_insn (gen_rtx_SET (high,
3831 gen_rtx_MINUS (SImode,
3832 gen_rtx_MINUS (SImode,
3833 const0_rtx,
3834 high),
3835 gen_rtx_LTU (SImode,
3836 cc_reg,
3837 const0_rtx))));
3838 else
3839 {
3840 rtx two_x = gen_rtx_ASHIFT (SImode, high, GEN_INT (1));
3841 emit_insn (gen_rtx_SET (high,
3842 gen_rtx_MINUS (SImode,
3843 gen_rtx_MINUS (SImode,
3844 high,
3845 two_x),
3846 gen_rtx_LTU (SImode,
3847 cc_reg,
3848 const0_rtx))));
3849 }
3850 }
3851 else
3852 {
3853 /* No overlap, or overlap on high word. Use:
3854 rsb Rlo, Rin, #0
3855 bic Rhi, Rlo, Rin
3856 asr Rhi, Rhi, #31
3857 Flags not needed for this sequence. */
3858 emit_insn (gen_rtx_SET (low, gen_rtx_NEG (SImode, operands[1])));
3859 emit_insn (gen_rtx_SET (high,
3860 gen_rtx_AND (SImode,
3861 gen_rtx_NOT (SImode, operands[1]),
3862 low)));
3863 emit_insn (gen_rtx_SET (high,
3864 gen_rtx_ASHIFTRT (SImode, high,
3865 GEN_INT (31))));
3866 }
3867 DONE;
3868 }
3869 [(set_attr "length" "12")
3870 (set_attr "arch" "t2,*")
3871 (set_attr "type" "multiple")]
3872 )
3873
3874 ;; abssi2 doesn't really clobber the condition codes if a different register
3875 ;; is being set. To keep things simple, assume during rtl manipulations that
3876 ;; it does, but tell the final scan operator the truth. Similarly for
3877 ;; (neg (abs...))
3878
3879 (define_expand "abssi2"
3880 [(parallel
3881 [(set (match_operand:SI 0 "s_register_operand")
3882 (abs:SI (match_operand:SI 1 "s_register_operand")))
3883 (clobber (match_dup 2))])]
3884 "TARGET_EITHER"
3885 "
3886 if (TARGET_THUMB1)
3887 operands[2] = gen_rtx_SCRATCH (SImode);
3888 else
3889 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3890 ")
3891
3892 (define_insn_and_split "*arm_abssi2"
3893 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3894 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3895 (clobber (reg:CC CC_REGNUM))]
3896 "TARGET_ARM"
3897 "#"
3898 "&& reload_completed"
3899 [(const_int 0)]
3900 {
3901 /* if (which_alternative == 0) */
3902 if (REGNO(operands[0]) == REGNO(operands[1]))
3903 {
3904 /* Emit the pattern:
3905 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3906 [(set (reg:CC CC_REGNUM)
3907 (compare:CC (match_dup 0) (const_int 0)))
3908 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
3909 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
3910 */
3911 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3912 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3913 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3914 (gen_rtx_LT (SImode,
3915 gen_rtx_REG (CCmode, CC_REGNUM),
3916 const0_rtx)),
3917 (gen_rtx_SET (operands[0],
3918 (gen_rtx_MINUS (SImode,
3919 const0_rtx,
3920 operands[1]))))));
3921 DONE;
3922 }
3923 else
3924 {
3925 /* Emit the pattern:
3926 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
3927 [(set (match_dup 0)
3928 (xor:SI (match_dup 1)
3929 (ashiftrt:SI (match_dup 1) (const_int 31))))
3930 (set (match_dup 0)
3931 (minus:SI (match_dup 0)
3932 (ashiftrt:SI (match_dup 1) (const_int 31))))]
3933 */
3934 emit_insn (gen_rtx_SET (operands[0],
3935 gen_rtx_XOR (SImode,
3936 gen_rtx_ASHIFTRT (SImode,
3937 operands[1],
3938 GEN_INT (31)),
3939 operands[1])));
3940 emit_insn (gen_rtx_SET (operands[0],
3941 gen_rtx_MINUS (SImode,
3942 operands[0],
3943 gen_rtx_ASHIFTRT (SImode,
3944 operands[1],
3945 GEN_INT (31)))));
3946 DONE;
3947 }
3948 }
3949 [(set_attr "conds" "clob,*")
3950 (set_attr "shift" "1")
3951 (set_attr "predicable" "no, yes")
3952 (set_attr "length" "8")
3953 (set_attr "type" "multiple")]
3954 )
3955
3956 (define_insn_and_split "*arm_neg_abssi2"
3957 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3958 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3959 (clobber (reg:CC CC_REGNUM))]
3960 "TARGET_ARM"
3961 "#"
3962 "&& reload_completed"
3963 [(const_int 0)]
3964 {
3965 /* if (which_alternative == 0) */
3966 if (REGNO (operands[0]) == REGNO (operands[1]))
3967 {
3968 /* Emit the pattern:
3969 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3970 */
3971 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3972 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3973 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3974 gen_rtx_GT (SImode,
3975 gen_rtx_REG (CCmode, CC_REGNUM),
3976 const0_rtx),
3977 gen_rtx_SET (operands[0],
3978 (gen_rtx_MINUS (SImode,
3979 const0_rtx,
3980 operands[1])))));
3981 }
3982 else
3983 {
3984 /* Emit the pattern:
3985 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
3986 */
3987 emit_insn (gen_rtx_SET (operands[0],
3988 gen_rtx_XOR (SImode,
3989 gen_rtx_ASHIFTRT (SImode,
3990 operands[1],
3991 GEN_INT (31)),
3992 operands[1])));
3993 emit_insn (gen_rtx_SET (operands[0],
3994 gen_rtx_MINUS (SImode,
3995 gen_rtx_ASHIFTRT (SImode,
3996 operands[1],
3997 GEN_INT (31)),
3998 operands[0])));
3999 }
4000 DONE;
4001 }
4002 [(set_attr "conds" "clob,*")
4003 (set_attr "shift" "1")
4004 (set_attr "predicable" "no, yes")
4005 (set_attr "length" "8")
4006 (set_attr "type" "multiple")]
4007 )
4008
4009 (define_expand "abssf2"
4010 [(set (match_operand:SF 0 "s_register_operand")
4011 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4012 "TARGET_32BIT && TARGET_HARD_FLOAT"
4013 "")
4014
4015 (define_expand "absdf2"
4016 [(set (match_operand:DF 0 "s_register_operand")
4017 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4018 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4019 "")
4020
4021 (define_expand "sqrtsf2"
4022 [(set (match_operand:SF 0 "s_register_operand")
4023 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4024 "TARGET_32BIT && TARGET_HARD_FLOAT"
4025 "")
4026
4027 (define_expand "sqrtdf2"
4028 [(set (match_operand:DF 0 "s_register_operand")
4029 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4030 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4031 "")
4032
4033 (define_expand "one_cmplsi2"
4034 [(set (match_operand:SI 0 "s_register_operand")
4035 (not:SI (match_operand:SI 1 "s_register_operand")))]
4036 "TARGET_EITHER"
4037 ""
4038 )
4039
4040 (define_insn "*arm_one_cmplsi2"
4041 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4042 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4043 "TARGET_32BIT"
4044 "mvn%?\\t%0, %1"
4045 [(set_attr "predicable" "yes")
4046 (set_attr "predicable_short_it" "yes,no")
4047 (set_attr "arch" "t2,*")
4048 (set_attr "length" "4")
4049 (set_attr "type" "mvn_reg")]
4050 )
4051
4052 (define_insn "*notsi_compare0"
4053 [(set (reg:CC_NOOV CC_REGNUM)
4054 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4055 (const_int 0)))
4056 (set (match_operand:SI 0 "s_register_operand" "=r")
4057 (not:SI (match_dup 1)))]
4058 "TARGET_32BIT"
4059 "mvns%?\\t%0, %1"
4060 [(set_attr "conds" "set")
4061 (set_attr "type" "mvn_reg")]
4062 )
4063
4064 (define_insn "*notsi_compare0_scratch"
4065 [(set (reg:CC_NOOV CC_REGNUM)
4066 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4067 (const_int 0)))
4068 (clobber (match_scratch:SI 0 "=r"))]
4069 "TARGET_32BIT"
4070 "mvns%?\\t%0, %1"
4071 [(set_attr "conds" "set")
4072 (set_attr "type" "mvn_reg")]
4073 )
4074 \f
4075 ;; Fixed <--> Floating conversion insns
4076
4077 (define_expand "floatsihf2"
4078 [(set (match_operand:HF 0 "general_operand")
4079 (float:HF (match_operand:SI 1 "general_operand")))]
4080 "TARGET_EITHER"
4081 "
4082 {
4083 rtx op1 = gen_reg_rtx (SFmode);
4084 expand_float (op1, operands[1], 0);
4085 op1 = convert_to_mode (HFmode, op1, 0);
4086 emit_move_insn (operands[0], op1);
4087 DONE;
4088 }"
4089 )
4090
4091 (define_expand "floatdihf2"
4092 [(set (match_operand:HF 0 "general_operand")
4093 (float:HF (match_operand:DI 1 "general_operand")))]
4094 "TARGET_EITHER"
4095 "
4096 {
4097 rtx op1 = gen_reg_rtx (SFmode);
4098 expand_float (op1, operands[1], 0);
4099 op1 = convert_to_mode (HFmode, op1, 0);
4100 emit_move_insn (operands[0], op1);
4101 DONE;
4102 }"
4103 )
4104
4105 (define_expand "floatsisf2"
4106 [(set (match_operand:SF 0 "s_register_operand")
4107 (float:SF (match_operand:SI 1 "s_register_operand")))]
4108 "TARGET_32BIT && TARGET_HARD_FLOAT"
4109 "
4110 ")
4111
4112 (define_expand "floatsidf2"
4113 [(set (match_operand:DF 0 "s_register_operand")
4114 (float:DF (match_operand:SI 1 "s_register_operand")))]
4115 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4116 "
4117 ")
4118
4119 (define_expand "fix_trunchfsi2"
4120 [(set (match_operand:SI 0 "general_operand")
4121 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4122 "TARGET_EITHER"
4123 "
4124 {
4125 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4126 expand_fix (operands[0], op1, 0);
4127 DONE;
4128 }"
4129 )
4130
4131 (define_expand "fix_trunchfdi2"
4132 [(set (match_operand:DI 0 "general_operand")
4133 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4134 "TARGET_EITHER"
4135 "
4136 {
4137 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4138 expand_fix (operands[0], op1, 0);
4139 DONE;
4140 }"
4141 )
4142
4143 (define_expand "fix_truncsfsi2"
4144 [(set (match_operand:SI 0 "s_register_operand")
4145 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4146 "TARGET_32BIT && TARGET_HARD_FLOAT"
4147 "
4148 ")
4149
4150 (define_expand "fix_truncdfsi2"
4151 [(set (match_operand:SI 0 "s_register_operand")
4152 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4153 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4154 "
4155 ")
4156
4157 ;; Truncation insns
4158
4159 (define_expand "truncdfsf2"
4160 [(set (match_operand:SF 0 "s_register_operand")
4161 (float_truncate:SF
4162 (match_operand:DF 1 "s_register_operand")))]
4163 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4164 ""
4165 )
4166
4167 ;; DFmode to HFmode conversions on targets without a single-step hardware
4168 ;; instruction for it would have to go through SFmode. This is dangerous
4169 ;; as it introduces double rounding.
4170 ;;
4171 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4172 ;; a single-step instruction.
4173
4174 (define_expand "truncdfhf2"
4175 [(set (match_operand:HF 0 "s_register_operand")
4176 (float_truncate:HF
4177 (match_operand:DF 1 "s_register_operand")))]
4178 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4179 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4180 {
4181 /* We don't have a direct instruction for this, so we must be in
4182 an unsafe math mode, and going via SFmode. */
4183
4184 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4185 {
4186 rtx op1;
4187 op1 = convert_to_mode (SFmode, operands[1], 0);
4188 op1 = convert_to_mode (HFmode, op1, 0);
4189 emit_move_insn (operands[0], op1);
4190 DONE;
4191 }
4192 /* Otherwise, we will pick this up as a single instruction with
4193 no intermediary rounding. */
4194 }
4195 )
4196 \f
4197 ;; Zero and sign extension instructions.
4198
4199 (define_expand "zero_extend<mode>di2"
4200 [(set (match_operand:DI 0 "s_register_operand" "")
4201 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4202 "TARGET_32BIT <qhs_zextenddi_cond>"
4203 {
4204 rtx res_lo, res_hi, op0_lo, op0_hi;
4205 res_lo = gen_lowpart (SImode, operands[0]);
4206 res_hi = gen_highpart (SImode, operands[0]);
4207 if (can_create_pseudo_p ())
4208 {
4209 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4210 op0_hi = gen_reg_rtx (SImode);
4211 }
4212 else
4213 {
4214 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4215 op0_hi = res_hi;
4216 }
4217 if (<MODE>mode != SImode)
4218 emit_insn (gen_rtx_SET (op0_lo,
4219 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4220 emit_insn (gen_movsi (op0_hi, const0_rtx));
4221 if (res_lo != op0_lo)
4222 emit_move_insn (res_lo, op0_lo);
4223 if (res_hi != op0_hi)
4224 emit_move_insn (res_hi, op0_hi);
4225 DONE;
4226 }
4227 )
4228
4229 (define_expand "extend<mode>di2"
4230 [(set (match_operand:DI 0 "s_register_operand" "")
4231 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4232 "TARGET_32BIT <qhs_sextenddi_cond>"
4233 {
4234 rtx res_lo, res_hi, op0_lo, op0_hi;
4235 res_lo = gen_lowpart (SImode, operands[0]);
4236 res_hi = gen_highpart (SImode, operands[0]);
4237 if (can_create_pseudo_p ())
4238 {
4239 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4240 op0_hi = gen_reg_rtx (SImode);
4241 }
4242 else
4243 {
4244 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4245 op0_hi = res_hi;
4246 }
4247 if (<MODE>mode != SImode)
4248 emit_insn (gen_rtx_SET (op0_lo,
4249 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4250 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4251 if (res_lo != op0_lo)
4252 emit_move_insn (res_lo, op0_lo);
4253 if (res_hi != op0_hi)
4254 emit_move_insn (res_hi, op0_hi);
4255 DONE;
4256 }
4257 )
4258
4259 ;; Splits for all extensions to DImode
4260 (define_split
4261 [(set (match_operand:DI 0 "s_register_operand" "")
4262 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4263 "TARGET_32BIT"
4264 [(set (match_dup 0) (match_dup 1))]
4265 {
4266 rtx lo_part = gen_lowpart (SImode, operands[0]);
4267 machine_mode src_mode = GET_MODE (operands[1]);
4268
4269 if (src_mode == SImode)
4270 emit_move_insn (lo_part, operands[1]);
4271 else
4272 emit_insn (gen_rtx_SET (lo_part,
4273 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4274 operands[0] = gen_highpart (SImode, operands[0]);
4275 operands[1] = const0_rtx;
4276 })
4277
4278 (define_split
4279 [(set (match_operand:DI 0 "s_register_operand" "")
4280 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4281 "TARGET_32BIT"
4282 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4283 {
4284 rtx lo_part = gen_lowpart (SImode, operands[0]);
4285 machine_mode src_mode = GET_MODE (operands[1]);
4286
4287 if (src_mode == SImode)
4288 emit_move_insn (lo_part, operands[1]);
4289 else
4290 emit_insn (gen_rtx_SET (lo_part,
4291 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4292 operands[1] = lo_part;
4293 operands[0] = gen_highpart (SImode, operands[0]);
4294 })
4295
4296 (define_expand "zero_extendhisi2"
4297 [(set (match_operand:SI 0 "s_register_operand")
4298 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4299 "TARGET_EITHER"
4300 {
4301 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4302 {
4303 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4304 DONE;
4305 }
4306 if (!arm_arch6 && !MEM_P (operands[1]))
4307 {
4308 rtx t = gen_lowpart (SImode, operands[1]);
4309 rtx tmp = gen_reg_rtx (SImode);
4310 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4311 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4312 DONE;
4313 }
4314 })
4315
4316 (define_split
4317 [(set (match_operand:SI 0 "s_register_operand" "")
4318 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4319 "!TARGET_THUMB2 && !arm_arch6"
4320 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4321 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4322 {
4323 operands[2] = gen_lowpart (SImode, operands[1]);
4324 })
4325
4326 (define_insn "*arm_zero_extendhisi2"
4327 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4328 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4329 "TARGET_ARM && arm_arch4 && !arm_arch6"
4330 "@
4331 #
4332 ldrh%?\\t%0, %1"
4333 [(set_attr "type" "alu_shift_reg,load_byte")
4334 (set_attr "predicable" "yes")]
4335 )
4336
4337 (define_insn "*arm_zero_extendhisi2_v6"
4338 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4339 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4340 "TARGET_ARM && arm_arch6"
4341 "@
4342 uxth%?\\t%0, %1
4343 ldrh%?\\t%0, %1"
4344 [(set_attr "predicable" "yes")
4345 (set_attr "type" "extend,load_byte")]
4346 )
4347
4348 (define_insn "*arm_zero_extendhisi2addsi"
4349 [(set (match_operand:SI 0 "s_register_operand" "=r")
4350 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4351 (match_operand:SI 2 "s_register_operand" "r")))]
4352 "TARGET_INT_SIMD"
4353 "uxtah%?\\t%0, %2, %1"
4354 [(set_attr "type" "alu_shift_reg")
4355 (set_attr "predicable" "yes")]
4356 )
4357
4358 (define_expand "zero_extendqisi2"
4359 [(set (match_operand:SI 0 "s_register_operand")
4360 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4361 "TARGET_EITHER"
4362 {
4363 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4364 {
4365 emit_insn (gen_andsi3 (operands[0],
4366 gen_lowpart (SImode, operands[1]),
4367 GEN_INT (255)));
4368 DONE;
4369 }
4370 if (!arm_arch6 && !MEM_P (operands[1]))
4371 {
4372 rtx t = gen_lowpart (SImode, operands[1]);
4373 rtx tmp = gen_reg_rtx (SImode);
4374 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4375 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4376 DONE;
4377 }
4378 })
4379
4380 (define_split
4381 [(set (match_operand:SI 0 "s_register_operand" "")
4382 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4383 "!arm_arch6"
4384 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4385 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4386 {
4387 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4388 if (TARGET_ARM)
4389 {
4390 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4391 DONE;
4392 }
4393 })
4394
4395 (define_insn "*arm_zero_extendqisi2"
4396 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4397 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4398 "TARGET_ARM && !arm_arch6"
4399 "@
4400 #
4401 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4402 [(set_attr "length" "8,4")
4403 (set_attr "type" "alu_shift_reg,load_byte")
4404 (set_attr "predicable" "yes")]
4405 )
4406
4407 (define_insn "*arm_zero_extendqisi2_v6"
4408 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4409 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4410 "TARGET_ARM && arm_arch6"
4411 "@
4412 uxtb%?\\t%0, %1
4413 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4414 [(set_attr "type" "extend,load_byte")
4415 (set_attr "predicable" "yes")]
4416 )
4417
4418 (define_insn "*arm_zero_extendqisi2addsi"
4419 [(set (match_operand:SI 0 "s_register_operand" "=r")
4420 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4421 (match_operand:SI 2 "s_register_operand" "r")))]
4422 "TARGET_INT_SIMD"
4423 "uxtab%?\\t%0, %2, %1"
4424 [(set_attr "predicable" "yes")
4425 (set_attr "type" "alu_shift_reg")]
4426 )
4427
4428 (define_split
4429 [(set (match_operand:SI 0 "s_register_operand" "")
4430 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4431 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4432 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4433 [(set (match_dup 2) (match_dup 1))
4434 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4435 ""
4436 )
4437
4438 (define_split
4439 [(set (match_operand:SI 0 "s_register_operand" "")
4440 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4441 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4442 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4443 [(set (match_dup 2) (match_dup 1))
4444 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4445 ""
4446 )
4447
4448
4449 (define_split
4450 [(set (match_operand:SI 0 "s_register_operand" "")
4451 (IOR_XOR:SI (and:SI (ashift:SI
4452 (match_operand:SI 1 "s_register_operand" "")
4453 (match_operand:SI 2 "const_int_operand" ""))
4454 (match_operand:SI 3 "const_int_operand" ""))
4455 (zero_extend:SI
4456 (match_operator 5 "subreg_lowpart_operator"
4457 [(match_operand:SI 4 "s_register_operand" "")]))))]
4458 "TARGET_32BIT
4459 && (UINTVAL (operands[3])
4460 == (GET_MODE_MASK (GET_MODE (operands[5]))
4461 & (GET_MODE_MASK (GET_MODE (operands[5]))
4462 << (INTVAL (operands[2])))))"
4463 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4464 (match_dup 4)))
4465 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4466 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4467 )
4468
4469 (define_insn "*compareqi_eq0"
4470 [(set (reg:CC_Z CC_REGNUM)
4471 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4472 (const_int 0)))]
4473 "TARGET_32BIT"
4474 "tst%?\\t%0, #255"
4475 [(set_attr "conds" "set")
4476 (set_attr "predicable" "yes")
4477 (set_attr "type" "logic_imm")]
4478 )
4479
4480 (define_expand "extendhisi2"
4481 [(set (match_operand:SI 0 "s_register_operand")
4482 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4483 "TARGET_EITHER"
4484 {
4485 if (TARGET_THUMB1)
4486 {
4487 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4488 DONE;
4489 }
4490 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4491 {
4492 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4493 DONE;
4494 }
4495
4496 if (!arm_arch6 && !MEM_P (operands[1]))
4497 {
4498 rtx t = gen_lowpart (SImode, operands[1]);
4499 rtx tmp = gen_reg_rtx (SImode);
4500 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4501 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4502 DONE;
4503 }
4504 })
4505
4506 (define_split
4507 [(parallel
4508 [(set (match_operand:SI 0 "register_operand" "")
4509 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4510 (clobber (match_scratch:SI 2 ""))])]
4511 "!arm_arch6"
4512 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4513 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4514 {
4515 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4516 })
4517
4518 ;; This pattern will only be used when ldsh is not available
4519 (define_expand "extendhisi2_mem"
4520 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4521 (set (match_dup 3)
4522 (zero_extend:SI (match_dup 7)))
4523 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4524 (set (match_operand:SI 0 "" "")
4525 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4526 "TARGET_ARM"
4527 "
4528 {
4529 rtx mem1, mem2;
4530 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4531
4532 mem1 = change_address (operands[1], QImode, addr);
4533 mem2 = change_address (operands[1], QImode,
4534 plus_constant (Pmode, addr, 1));
4535 operands[0] = gen_lowpart (SImode, operands[0]);
4536 operands[1] = mem1;
4537 operands[2] = gen_reg_rtx (SImode);
4538 operands[3] = gen_reg_rtx (SImode);
4539 operands[6] = gen_reg_rtx (SImode);
4540 operands[7] = mem2;
4541
4542 if (BYTES_BIG_ENDIAN)
4543 {
4544 operands[4] = operands[2];
4545 operands[5] = operands[3];
4546 }
4547 else
4548 {
4549 operands[4] = operands[3];
4550 operands[5] = operands[2];
4551 }
4552 }"
4553 )
4554
4555 (define_split
4556 [(set (match_operand:SI 0 "register_operand" "")
4557 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4558 "!arm_arch6"
4559 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4560 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4561 {
4562 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4563 })
4564
4565 (define_insn "*arm_extendhisi2"
4566 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4567 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4568 "TARGET_ARM && arm_arch4 && !arm_arch6"
4569 "@
4570 #
4571 ldrsh%?\\t%0, %1"
4572 [(set_attr "length" "8,4")
4573 (set_attr "type" "alu_shift_reg,load_byte")
4574 (set_attr "predicable" "yes")]
4575 )
4576
4577 ;; ??? Check Thumb-2 pool range
4578 (define_insn "*arm_extendhisi2_v6"
4579 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4580 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4581 "TARGET_32BIT && arm_arch6"
4582 "@
4583 sxth%?\\t%0, %1
4584 ldrsh%?\\t%0, %1"
4585 [(set_attr "type" "extend,load_byte")
4586 (set_attr "predicable" "yes")]
4587 )
4588
4589 (define_insn "*arm_extendhisi2addsi"
4590 [(set (match_operand:SI 0 "s_register_operand" "=r")
4591 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4592 (match_operand:SI 2 "s_register_operand" "r")))]
4593 "TARGET_INT_SIMD"
4594 "sxtah%?\\t%0, %2, %1"
4595 [(set_attr "type" "alu_shift_reg")]
4596 )
4597
4598 (define_expand "extendqihi2"
4599 [(set (match_dup 2)
4600 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4601 (const_int 24)))
4602 (set (match_operand:HI 0 "s_register_operand")
4603 (ashiftrt:SI (match_dup 2)
4604 (const_int 24)))]
4605 "TARGET_ARM"
4606 "
4607 {
4608 if (arm_arch4 && MEM_P (operands[1]))
4609 {
4610 emit_insn (gen_rtx_SET (operands[0],
4611 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4612 DONE;
4613 }
4614 if (!s_register_operand (operands[1], QImode))
4615 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4616 operands[0] = gen_lowpart (SImode, operands[0]);
4617 operands[1] = gen_lowpart (SImode, operands[1]);
4618 operands[2] = gen_reg_rtx (SImode);
4619 }"
4620 )
4621
4622 (define_insn "*arm_extendqihi_insn"
4623 [(set (match_operand:HI 0 "s_register_operand" "=r")
4624 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4625 "TARGET_ARM && arm_arch4"
4626 "ldrsb%?\\t%0, %1"
4627 [(set_attr "type" "load_byte")
4628 (set_attr "predicable" "yes")]
4629 )
4630
4631 (define_expand "extendqisi2"
4632 [(set (match_operand:SI 0 "s_register_operand")
4633 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4634 "TARGET_EITHER"
4635 {
4636 if (!arm_arch4 && MEM_P (operands[1]))
4637 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4638
4639 if (!arm_arch6 && !MEM_P (operands[1]))
4640 {
4641 rtx t = gen_lowpart (SImode, operands[1]);
4642 rtx tmp = gen_reg_rtx (SImode);
4643 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4644 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4645 DONE;
4646 }
4647 })
4648
4649 (define_split
4650 [(set (match_operand:SI 0 "register_operand" "")
4651 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4652 "!arm_arch6"
4653 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4654 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4655 {
4656 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4657 })
4658
4659 (define_insn "*arm_extendqisi"
4660 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4661 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4662 "TARGET_ARM && arm_arch4 && !arm_arch6"
4663 "@
4664 #
4665 ldrsb%?\\t%0, %1"
4666 [(set_attr "length" "8,4")
4667 (set_attr "type" "alu_shift_reg,load_byte")
4668 (set_attr "predicable" "yes")]
4669 )
4670
4671 (define_insn "*arm_extendqisi_v6"
4672 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4673 (sign_extend:SI
4674 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4675 "TARGET_ARM && arm_arch6"
4676 "@
4677 sxtb%?\\t%0, %1
4678 ldrsb%?\\t%0, %1"
4679 [(set_attr "type" "extend,load_byte")
4680 (set_attr "predicable" "yes")]
4681 )
4682
4683 (define_insn "*arm_extendqisi2addsi"
4684 [(set (match_operand:SI 0 "s_register_operand" "=r")
4685 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4686 (match_operand:SI 2 "s_register_operand" "r")))]
4687 "TARGET_INT_SIMD"
4688 "sxtab%?\\t%0, %2, %1"
4689 [(set_attr "type" "alu_shift_reg")
4690 (set_attr "predicable" "yes")]
4691 )
4692
4693 (define_insn "arm_<sup>xtb16"
4694 [(set (match_operand:SI 0 "s_register_operand" "=r")
4695 (unspec:SI
4696 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4697 "TARGET_INT_SIMD"
4698 "<sup>xtb16%?\\t%0, %1"
4699 [(set_attr "predicable" "yes")
4700 (set_attr "type" "alu_dsp_reg")])
4701
4702 (define_insn "arm_<simd32_op>"
4703 [(set (match_operand:SI 0 "s_register_operand" "=r")
4704 (unspec:SI
4705 [(match_operand:SI 1 "s_register_operand" "r")
4706 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4707 "TARGET_INT_SIMD"
4708 "<simd32_op>%?\\t%0, %1, %2"
4709 [(set_attr "predicable" "yes")
4710 (set_attr "type" "alu_dsp_reg")])
4711
4712 (define_insn "arm_usada8"
4713 [(set (match_operand:SI 0 "s_register_operand" "=r")
4714 (unspec:SI
4715 [(match_operand:SI 1 "s_register_operand" "r")
4716 (match_operand:SI 2 "s_register_operand" "r")
4717 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4718 "TARGET_INT_SIMD"
4719 "usada8%?\\t%0, %1, %2, %3"
4720 [(set_attr "predicable" "yes")
4721 (set_attr "type" "alu_dsp_reg")])
4722
4723 (define_insn "arm_<simd32_op>"
4724 [(set (match_operand:DI 0 "s_register_operand" "=r")
4725 (unspec:DI
4726 [(match_operand:SI 1 "s_register_operand" "r")
4727 (match_operand:SI 2 "s_register_operand" "r")
4728 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4729 "TARGET_INT_SIMD"
4730 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4731 [(set_attr "predicable" "yes")
4732 (set_attr "type" "smlald")])
4733
4734 (define_expand "extendsfdf2"
4735 [(set (match_operand:DF 0 "s_register_operand")
4736 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4737 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4738 ""
4739 )
4740
4741 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4742 ;; must go through SFmode.
4743 ;;
4744 ;; This is always safe for an extend.
4745
4746 (define_expand "extendhfdf2"
4747 [(set (match_operand:DF 0 "s_register_operand")
4748 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4749 "TARGET_EITHER"
4750 {
4751 /* We don't have a direct instruction for this, so go via SFmode. */
4752 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4753 {
4754 rtx op1;
4755 op1 = convert_to_mode (SFmode, operands[1], 0);
4756 op1 = convert_to_mode (DFmode, op1, 0);
4757 emit_insn (gen_movdf (operands[0], op1));
4758 DONE;
4759 }
4760 /* Otherwise, we're done producing RTL and will pick up the correct
4761 pattern to do this with one rounding-step in a single instruction. */
4762 }
4763 )
4764 \f
4765 ;; Move insns (including loads and stores)
4766
4767 ;; XXX Just some ideas about movti.
4768 ;; I don't think these are a good idea on the arm, there just aren't enough
4769 ;; registers
4770 ;;(define_expand "loadti"
4771 ;; [(set (match_operand:TI 0 "s_register_operand")
4772 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4773 ;; "" "")
4774
4775 ;;(define_expand "storeti"
4776 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4777 ;; (match_operand:TI 1 "s_register_operand"))]
4778 ;; "" "")
4779
4780 ;;(define_expand "movti"
4781 ;; [(set (match_operand:TI 0 "general_operand")
4782 ;; (match_operand:TI 1 "general_operand"))]
4783 ;; ""
4784 ;; "
4785 ;;{
4786 ;; rtx insn;
4787 ;;
4788 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4789 ;; operands[1] = copy_to_reg (operands[1]);
4790 ;; if (MEM_P (operands[0]))
4791 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4792 ;; else if (MEM_P (operands[1]))
4793 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4794 ;; else
4795 ;; FAIL;
4796 ;;
4797 ;; emit_insn (insn);
4798 ;; DONE;
4799 ;;}")
4800
4801 ;; Recognize garbage generated above.
4802
4803 ;;(define_insn ""
4804 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4805 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4806 ;; ""
4807 ;; "*
4808 ;; {
4809 ;; register mem = (which_alternative < 3);
4810 ;; register const char *template;
4811 ;;
4812 ;; operands[mem] = XEXP (operands[mem], 0);
4813 ;; switch (which_alternative)
4814 ;; {
4815 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4816 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4817 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4818 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4819 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4820 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4821 ;; }
4822 ;; output_asm_insn (template, operands);
4823 ;; return \"\";
4824 ;; }")
4825
4826 (define_expand "movdi"
4827 [(set (match_operand:DI 0 "general_operand")
4828 (match_operand:DI 1 "general_operand"))]
4829 "TARGET_EITHER"
4830 "
4831 gcc_checking_assert (aligned_operand (operands[0], DImode));
4832 gcc_checking_assert (aligned_operand (operands[1], DImode));
4833 if (can_create_pseudo_p ())
4834 {
4835 if (!REG_P (operands[0]))
4836 operands[1] = force_reg (DImode, operands[1]);
4837 }
4838 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4839 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4840 {
4841 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4842 when expanding function calls. */
4843 gcc_assert (can_create_pseudo_p ());
4844 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4845 {
4846 /* Perform load into legal reg pair first, then move. */
4847 rtx reg = gen_reg_rtx (DImode);
4848 emit_insn (gen_movdi (reg, operands[1]));
4849 operands[1] = reg;
4850 }
4851 emit_move_insn (gen_lowpart (SImode, operands[0]),
4852 gen_lowpart (SImode, operands[1]));
4853 emit_move_insn (gen_highpart (SImode, operands[0]),
4854 gen_highpart (SImode, operands[1]));
4855 DONE;
4856 }
4857 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4858 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4859 {
4860 /* Avoid STRD's from an odd-numbered register pair in ARM state
4861 when expanding function prologue. */
4862 gcc_assert (can_create_pseudo_p ());
4863 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4864 ? gen_reg_rtx (DImode)
4865 : operands[0];
4866 emit_move_insn (gen_lowpart (SImode, split_dest),
4867 gen_lowpart (SImode, operands[1]));
4868 emit_move_insn (gen_highpart (SImode, split_dest),
4869 gen_highpart (SImode, operands[1]));
4870 if (split_dest != operands[0])
4871 emit_insn (gen_movdi (operands[0], split_dest));
4872 DONE;
4873 }
4874 "
4875 )
4876
4877 (define_insn "*arm_movdi"
4878 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4879 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4880 "TARGET_32BIT
4881 && !(TARGET_HARD_FLOAT)
4882 && !TARGET_IWMMXT
4883 && ( register_operand (operands[0], DImode)
4884 || register_operand (operands[1], DImode))"
4885 "*
4886 switch (which_alternative)
4887 {
4888 case 0:
4889 case 1:
4890 case 2:
4891 return \"#\";
4892 case 3:
4893 /* Cannot load it directly, split to load it via MOV / MOVT. */
4894 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4895 return \"#\";
4896 /* Fall through. */
4897 default:
4898 return output_move_double (operands, true, NULL);
4899 }
4900 "
4901 [(set_attr "length" "8,12,16,8,8")
4902 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4903 (set_attr "arm_pool_range" "*,*,*,1020,*")
4904 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4905 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4906 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4907 )
4908
4909 (define_split
4910 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4911 (match_operand:ANY64 1 "immediate_operand" ""))]
4912 "TARGET_32BIT
4913 && reload_completed
4914 && (arm_disable_literal_pool
4915 || (arm_const_double_inline_cost (operands[1])
4916 <= arm_max_const_double_inline_cost ()))"
4917 [(const_int 0)]
4918 "
4919 arm_split_constant (SET, SImode, curr_insn,
4920 INTVAL (gen_lowpart (SImode, operands[1])),
4921 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4922 arm_split_constant (SET, SImode, curr_insn,
4923 INTVAL (gen_highpart_mode (SImode,
4924 GET_MODE (operands[0]),
4925 operands[1])),
4926 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4927 DONE;
4928 "
4929 )
4930
4931 ; If optimizing for size, or if we have load delay slots, then
4932 ; we want to split the constant into two separate operations.
4933 ; In both cases this may split a trivial part into a single data op
4934 ; leaving a single complex constant to load. We can also get longer
4935 ; offsets in a LDR which means we get better chances of sharing the pool
4936 ; entries. Finally, we can normally do a better job of scheduling
4937 ; LDR instructions than we can with LDM.
4938 ; This pattern will only match if the one above did not.
4939 (define_split
4940 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4941 (match_operand:ANY64 1 "const_double_operand" ""))]
4942 "TARGET_ARM && reload_completed
4943 && arm_const_double_by_parts (operands[1])"
4944 [(set (match_dup 0) (match_dup 1))
4945 (set (match_dup 2) (match_dup 3))]
4946 "
4947 operands[2] = gen_highpart (SImode, operands[0]);
4948 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4949 operands[1]);
4950 operands[0] = gen_lowpart (SImode, operands[0]);
4951 operands[1] = gen_lowpart (SImode, operands[1]);
4952 "
4953 )
4954
4955 (define_split
4956 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4957 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4958 "TARGET_EITHER && reload_completed"
4959 [(set (match_dup 0) (match_dup 1))
4960 (set (match_dup 2) (match_dup 3))]
4961 "
4962 operands[2] = gen_highpart (SImode, operands[0]);
4963 operands[3] = gen_highpart (SImode, operands[1]);
4964 operands[0] = gen_lowpart (SImode, operands[0]);
4965 operands[1] = gen_lowpart (SImode, operands[1]);
4966
4967 /* Handle a partial overlap. */
4968 if (rtx_equal_p (operands[0], operands[3]))
4969 {
4970 rtx tmp0 = operands[0];
4971 rtx tmp1 = operands[1];
4972
4973 operands[0] = operands[2];
4974 operands[1] = operands[3];
4975 operands[2] = tmp0;
4976 operands[3] = tmp1;
4977 }
4978 "
4979 )
4980
4981 ;; We can't actually do base+index doubleword loads if the index and
4982 ;; destination overlap. Split here so that we at least have chance to
4983 ;; schedule.
4984 (define_split
4985 [(set (match_operand:DI 0 "s_register_operand" "")
4986 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4987 (match_operand:SI 2 "s_register_operand" ""))))]
4988 "TARGET_LDRD
4989 && reg_overlap_mentioned_p (operands[0], operands[1])
4990 && reg_overlap_mentioned_p (operands[0], operands[2])"
4991 [(set (match_dup 4)
4992 (plus:SI (match_dup 1)
4993 (match_dup 2)))
4994 (set (match_dup 0)
4995 (mem:DI (match_dup 4)))]
4996 "
4997 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4998 "
4999 )
5000
5001 (define_expand "movsi"
5002 [(set (match_operand:SI 0 "general_operand")
5003 (match_operand:SI 1 "general_operand"))]
5004 "TARGET_EITHER"
5005 "
5006 {
5007 rtx base, offset, tmp;
5008
5009 gcc_checking_assert (aligned_operand (operands[0], SImode));
5010 gcc_checking_assert (aligned_operand (operands[1], SImode));
5011 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5012 {
5013 /* Everything except mem = const or mem = mem can be done easily. */
5014 if (MEM_P (operands[0]))
5015 operands[1] = force_reg (SImode, operands[1]);
5016 if (arm_general_register_operand (operands[0], SImode)
5017 && CONST_INT_P (operands[1])
5018 && !(const_ok_for_arm (INTVAL (operands[1]))
5019 || const_ok_for_arm (~INTVAL (operands[1]))))
5020 {
5021 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5022 {
5023 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5024 DONE;
5025 }
5026 else
5027 {
5028 arm_split_constant (SET, SImode, NULL_RTX,
5029 INTVAL (operands[1]), operands[0], NULL_RTX,
5030 optimize && can_create_pseudo_p ());
5031 DONE;
5032 }
5033 }
5034 }
5035 else /* Target doesn't have MOVT... */
5036 {
5037 if (can_create_pseudo_p ())
5038 {
5039 if (!REG_P (operands[0]))
5040 operands[1] = force_reg (SImode, operands[1]);
5041 }
5042 }
5043
5044 split_const (operands[1], &base, &offset);
5045 if (INTVAL (offset) != 0
5046 && targetm.cannot_force_const_mem (SImode, operands[1]))
5047 {
5048 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5049 emit_move_insn (tmp, base);
5050 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5051 DONE;
5052 }
5053
5054 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5055
5056 /* Recognize the case where operand[1] is a reference to thread-local
5057 data and load its address to a register. Offsets have been split off
5058 already. */
5059 if (arm_tls_referenced_p (operands[1]))
5060 operands[1] = legitimize_tls_address (operands[1], tmp);
5061 else if (flag_pic
5062 && (CONSTANT_P (operands[1])
5063 || symbol_mentioned_p (operands[1])
5064 || label_mentioned_p (operands[1])))
5065 operands[1] =
5066 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5067 }
5068 "
5069 )
5070
5071 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5072 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5073 ;; so this does not matter.
5074 (define_insn "*arm_movt"
5075 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5076 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5077 (match_operand:SI 2 "general_operand" "i,i")))]
5078 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5079 "@
5080 movt%?\t%0, #:upper16:%c2
5081 movt\t%0, #:upper16:%c2"
5082 [(set_attr "arch" "32,v8mb")
5083 (set_attr "predicable" "yes")
5084 (set_attr "length" "4")
5085 (set_attr "type" "alu_sreg")]
5086 )
5087
5088 (define_insn "*arm_movsi_insn"
5089 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5090 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5091 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5092 && ( register_operand (operands[0], SImode)
5093 || register_operand (operands[1], SImode))"
5094 "@
5095 mov%?\\t%0, %1
5096 mov%?\\t%0, %1
5097 mvn%?\\t%0, #%B1
5098 movw%?\\t%0, %1
5099 ldr%?\\t%0, %1
5100 str%?\\t%1, %0"
5101 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5102 (set_attr "predicable" "yes")
5103 (set_attr "arch" "*,*,*,v6t2,*,*")
5104 (set_attr "pool_range" "*,*,*,*,4096,*")
5105 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5106 )
5107
5108 (define_split
5109 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5110 (match_operand:SI 1 "const_int_operand" ""))]
5111 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5112 && (!(const_ok_for_arm (INTVAL (operands[1]))
5113 || const_ok_for_arm (~INTVAL (operands[1]))))"
5114 [(clobber (const_int 0))]
5115 "
5116 arm_split_constant (SET, SImode, NULL_RTX,
5117 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5118 DONE;
5119 "
5120 )
5121
5122 ;; A normal way to do (symbol + offset) requires three instructions at least
5123 ;; (depends on how big the offset is) as below:
5124 ;; movw r0, #:lower16:g
5125 ;; movw r0, #:upper16:g
5126 ;; adds r0, #4
5127 ;;
5128 ;; A better way would be:
5129 ;; movw r0, #:lower16:g+4
5130 ;; movw r0, #:upper16:g+4
5131 ;;
5132 ;; The limitation of this way is that the length of offset should be a 16-bit
5133 ;; signed value, because current assembler only supports REL type relocation for
5134 ;; such case. If the more powerful RELA type is supported in future, we should
5135 ;; update this pattern to go with better way.
5136 (define_split
5137 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5138 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5139 (match_operand:SI 2 "const_int_operand" ""))))]
5140 "TARGET_THUMB
5141 && TARGET_HAVE_MOVT
5142 && arm_disable_literal_pool
5143 && reload_completed
5144 && GET_CODE (operands[1]) == SYMBOL_REF"
5145 [(clobber (const_int 0))]
5146 "
5147 int offset = INTVAL (operands[2]);
5148
5149 if (offset < -0x8000 || offset > 0x7fff)
5150 {
5151 arm_emit_movpair (operands[0], operands[1]);
5152 emit_insn (gen_rtx_SET (operands[0],
5153 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5154 }
5155 else
5156 {
5157 rtx op = gen_rtx_CONST (SImode,
5158 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5159 arm_emit_movpair (operands[0], op);
5160 }
5161 "
5162 )
5163
5164 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5165 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5166 ;; and lo_sum would be merged back into memory load at cprop. However,
5167 ;; if the default is to prefer movt/movw rather than a load from the constant
5168 ;; pool, the performance is better.
5169 (define_split
5170 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5171 (match_operand:SI 1 "general_operand" ""))]
5172 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5173 && !target_word_relocations
5174 && !arm_tls_referenced_p (operands[1])"
5175 [(clobber (const_int 0))]
5176 {
5177 arm_emit_movpair (operands[0], operands[1]);
5178 DONE;
5179 })
5180
5181 ;; When generating pic, we need to load the symbol offset into a register.
5182 ;; So that the optimizer does not confuse this with a normal symbol load
5183 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5184 ;; since that is the only type of relocation we can use.
5185
5186 ;; Wrap calculation of the whole PIC address in a single pattern for the
5187 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5188 ;; a PIC address involves two loads from memory, so we want to CSE it
5189 ;; as often as possible.
5190 ;; This pattern will be split into one of the pic_load_addr_* patterns
5191 ;; and a move after GCSE optimizations.
5192 ;;
5193 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5194 (define_expand "calculate_pic_address"
5195 [(set (match_operand:SI 0 "register_operand")
5196 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5197 (unspec:SI [(match_operand:SI 2 "" "")]
5198 UNSPEC_PIC_SYM))))]
5199 "flag_pic"
5200 )
5201
5202 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5203 (define_split
5204 [(set (match_operand:SI 0 "register_operand" "")
5205 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5206 (unspec:SI [(match_operand:SI 2 "" "")]
5207 UNSPEC_PIC_SYM))))]
5208 "flag_pic"
5209 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5210 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5211 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5212 )
5213
5214 ;; operand1 is the memory address to go into
5215 ;; pic_load_addr_32bit.
5216 ;; operand2 is the PIC label to be emitted
5217 ;; from pic_add_dot_plus_eight.
5218 ;; We do this to allow hoisting of the entire insn.
5219 (define_insn_and_split "pic_load_addr_unified"
5220 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5221 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5222 (match_operand:SI 2 "" "")]
5223 UNSPEC_PIC_UNIFIED))]
5224 "flag_pic"
5225 "#"
5226 "&& reload_completed"
5227 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5228 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5229 (match_dup 2)] UNSPEC_PIC_BASE))]
5230 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5231 [(set_attr "type" "load_4,load_4,load_4")
5232 (set_attr "pool_range" "4096,4094,1022")
5233 (set_attr "neg_pool_range" "4084,0,0")
5234 (set_attr "arch" "a,t2,t1")
5235 (set_attr "length" "8,6,4")]
5236 )
5237
5238 ;; The rather odd constraints on the following are to force reload to leave
5239 ;; the insn alone, and to force the minipool generation pass to then move
5240 ;; the GOT symbol to memory.
5241
5242 (define_insn "pic_load_addr_32bit"
5243 [(set (match_operand:SI 0 "s_register_operand" "=r")
5244 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5245 "TARGET_32BIT && flag_pic"
5246 "ldr%?\\t%0, %1"
5247 [(set_attr "type" "load_4")
5248 (set (attr "pool_range")
5249 (if_then_else (eq_attr "is_thumb" "no")
5250 (const_int 4096)
5251 (const_int 4094)))
5252 (set (attr "neg_pool_range")
5253 (if_then_else (eq_attr "is_thumb" "no")
5254 (const_int 4084)
5255 (const_int 0)))]
5256 )
5257
5258 (define_insn "pic_load_addr_thumb1"
5259 [(set (match_operand:SI 0 "s_register_operand" "=l")
5260 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5261 "TARGET_THUMB1 && flag_pic"
5262 "ldr\\t%0, %1"
5263 [(set_attr "type" "load_4")
5264 (set (attr "pool_range") (const_int 1018))]
5265 )
5266
5267 (define_insn "pic_add_dot_plus_four"
5268 [(set (match_operand:SI 0 "register_operand" "=r")
5269 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5270 (const_int 4)
5271 (match_operand 2 "" "")]
5272 UNSPEC_PIC_BASE))]
5273 "TARGET_THUMB"
5274 "*
5275 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5276 INTVAL (operands[2]));
5277 return \"add\\t%0, %|pc\";
5278 "
5279 [(set_attr "length" "2")
5280 (set_attr "type" "alu_sreg")]
5281 )
5282
5283 (define_insn "pic_add_dot_plus_eight"
5284 [(set (match_operand:SI 0 "register_operand" "=r")
5285 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5286 (const_int 8)
5287 (match_operand 2 "" "")]
5288 UNSPEC_PIC_BASE))]
5289 "TARGET_ARM"
5290 "*
5291 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5292 INTVAL (operands[2]));
5293 return \"add%?\\t%0, %|pc, %1\";
5294 "
5295 [(set_attr "predicable" "yes")
5296 (set_attr "type" "alu_sreg")]
5297 )
5298
5299 (define_insn "tls_load_dot_plus_eight"
5300 [(set (match_operand:SI 0 "register_operand" "=r")
5301 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5302 (const_int 8)
5303 (match_operand 2 "" "")]
5304 UNSPEC_PIC_BASE)))]
5305 "TARGET_ARM"
5306 "*
5307 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5308 INTVAL (operands[2]));
5309 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5310 "
5311 [(set_attr "predicable" "yes")
5312 (set_attr "type" "load_4")]
5313 )
5314
5315 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5316 ;; followed by a load. These sequences can be crunched down to
5317 ;; tls_load_dot_plus_eight by a peephole.
5318
5319 (define_peephole2
5320 [(set (match_operand:SI 0 "register_operand" "")
5321 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5322 (const_int 8)
5323 (match_operand 1 "" "")]
5324 UNSPEC_PIC_BASE))
5325 (set (match_operand:SI 2 "arm_general_register_operand" "")
5326 (mem:SI (match_dup 0)))]
5327 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5328 [(set (match_dup 2)
5329 (mem:SI (unspec:SI [(match_dup 3)
5330 (const_int 8)
5331 (match_dup 1)]
5332 UNSPEC_PIC_BASE)))]
5333 ""
5334 )
5335
5336 (define_insn "pic_offset_arm"
5337 [(set (match_operand:SI 0 "register_operand" "=r")
5338 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5339 (unspec:SI [(match_operand:SI 2 "" "X")]
5340 UNSPEC_PIC_OFFSET))))]
5341 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5342 "ldr%?\\t%0, [%1,%2]"
5343 [(set_attr "type" "load_4")]
5344 )
5345
5346 (define_expand "builtin_setjmp_receiver"
5347 [(label_ref (match_operand 0 "" ""))]
5348 "flag_pic"
5349 "
5350 {
5351 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5352 register. */
5353 if (arm_pic_register != INVALID_REGNUM)
5354 arm_load_pic_register (1UL << 3, NULL_RTX);
5355 DONE;
5356 }")
5357
5358 ;; If copying one reg to another we can set the condition codes according to
5359 ;; its value. Such a move is common after a return from subroutine and the
5360 ;; result is being tested against zero.
5361
5362 (define_insn "*movsi_compare0"
5363 [(set (reg:CC CC_REGNUM)
5364 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5365 (const_int 0)))
5366 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5367 (match_dup 1))]
5368 "TARGET_32BIT"
5369 "@
5370 cmp%?\\t%0, #0
5371 subs%?\\t%0, %1, #0"
5372 [(set_attr "conds" "set")
5373 (set_attr "type" "alus_imm,alus_imm")]
5374 )
5375
5376 ;; Subroutine to store a half word from a register into memory.
5377 ;; Operand 0 is the source register (HImode)
5378 ;; Operand 1 is the destination address in a register (SImode)
5379
5380 ;; In both this routine and the next, we must be careful not to spill
5381 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5382 ;; can generate unrecognizable rtl.
5383
5384 (define_expand "storehi"
5385 [;; store the low byte
5386 (set (match_operand 1 "" "") (match_dup 3))
5387 ;; extract the high byte
5388 (set (match_dup 2)
5389 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5390 ;; store the high byte
5391 (set (match_dup 4) (match_dup 5))]
5392 "TARGET_ARM"
5393 "
5394 {
5395 rtx op1 = operands[1];
5396 rtx addr = XEXP (op1, 0);
5397 enum rtx_code code = GET_CODE (addr);
5398
5399 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5400 || code == MINUS)
5401 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5402
5403 operands[4] = adjust_address (op1, QImode, 1);
5404 operands[1] = adjust_address (operands[1], QImode, 0);
5405 operands[3] = gen_lowpart (QImode, operands[0]);
5406 operands[0] = gen_lowpart (SImode, operands[0]);
5407 operands[2] = gen_reg_rtx (SImode);
5408 operands[5] = gen_lowpart (QImode, operands[2]);
5409 }"
5410 )
5411
5412 (define_expand "storehi_bigend"
5413 [(set (match_dup 4) (match_dup 3))
5414 (set (match_dup 2)
5415 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5416 (set (match_operand 1 "" "") (match_dup 5))]
5417 "TARGET_ARM"
5418 "
5419 {
5420 rtx op1 = operands[1];
5421 rtx addr = XEXP (op1, 0);
5422 enum rtx_code code = GET_CODE (addr);
5423
5424 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5425 || code == MINUS)
5426 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5427
5428 operands[4] = adjust_address (op1, QImode, 1);
5429 operands[1] = adjust_address (operands[1], QImode, 0);
5430 operands[3] = gen_lowpart (QImode, operands[0]);
5431 operands[0] = gen_lowpart (SImode, operands[0]);
5432 operands[2] = gen_reg_rtx (SImode);
5433 operands[5] = gen_lowpart (QImode, operands[2]);
5434 }"
5435 )
5436
5437 ;; Subroutine to store a half word integer constant into memory.
5438 (define_expand "storeinthi"
5439 [(set (match_operand 0 "" "")
5440 (match_operand 1 "" ""))
5441 (set (match_dup 3) (match_dup 2))]
5442 "TARGET_ARM"
5443 "
5444 {
5445 HOST_WIDE_INT value = INTVAL (operands[1]);
5446 rtx addr = XEXP (operands[0], 0);
5447 rtx op0 = operands[0];
5448 enum rtx_code code = GET_CODE (addr);
5449
5450 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5451 || code == MINUS)
5452 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5453
5454 operands[1] = gen_reg_rtx (SImode);
5455 if (BYTES_BIG_ENDIAN)
5456 {
5457 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5458 if ((value & 255) == ((value >> 8) & 255))
5459 operands[2] = operands[1];
5460 else
5461 {
5462 operands[2] = gen_reg_rtx (SImode);
5463 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5464 }
5465 }
5466 else
5467 {
5468 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5469 if ((value & 255) == ((value >> 8) & 255))
5470 operands[2] = operands[1];
5471 else
5472 {
5473 operands[2] = gen_reg_rtx (SImode);
5474 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5475 }
5476 }
5477
5478 operands[3] = adjust_address (op0, QImode, 1);
5479 operands[0] = adjust_address (operands[0], QImode, 0);
5480 operands[2] = gen_lowpart (QImode, operands[2]);
5481 operands[1] = gen_lowpart (QImode, operands[1]);
5482 }"
5483 )
5484
5485 (define_expand "storehi_single_op"
5486 [(set (match_operand:HI 0 "memory_operand")
5487 (match_operand:HI 1 "general_operand"))]
5488 "TARGET_32BIT && arm_arch4"
5489 "
5490 if (!s_register_operand (operands[1], HImode))
5491 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5492 "
5493 )
5494
5495 (define_expand "movhi"
5496 [(set (match_operand:HI 0 "general_operand")
5497 (match_operand:HI 1 "general_operand"))]
5498 "TARGET_EITHER"
5499 "
5500 gcc_checking_assert (aligned_operand (operands[0], HImode));
5501 gcc_checking_assert (aligned_operand (operands[1], HImode));
5502 if (TARGET_ARM)
5503 {
5504 if (can_create_pseudo_p ())
5505 {
5506 if (MEM_P (operands[0]))
5507 {
5508 if (arm_arch4)
5509 {
5510 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5511 DONE;
5512 }
5513 if (CONST_INT_P (operands[1]))
5514 emit_insn (gen_storeinthi (operands[0], operands[1]));
5515 else
5516 {
5517 if (MEM_P (operands[1]))
5518 operands[1] = force_reg (HImode, operands[1]);
5519 if (BYTES_BIG_ENDIAN)
5520 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5521 else
5522 emit_insn (gen_storehi (operands[1], operands[0]));
5523 }
5524 DONE;
5525 }
5526 /* Sign extend a constant, and keep it in an SImode reg. */
5527 else if (CONST_INT_P (operands[1]))
5528 {
5529 rtx reg = gen_reg_rtx (SImode);
5530 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5531
5532 /* If the constant is already valid, leave it alone. */
5533 if (!const_ok_for_arm (val))
5534 {
5535 /* If setting all the top bits will make the constant
5536 loadable in a single instruction, then set them.
5537 Otherwise, sign extend the number. */
5538
5539 if (const_ok_for_arm (~(val | ~0xffff)))
5540 val |= ~0xffff;
5541 else if (val & 0x8000)
5542 val |= ~0xffff;
5543 }
5544
5545 emit_insn (gen_movsi (reg, GEN_INT (val)));
5546 operands[1] = gen_lowpart (HImode, reg);
5547 }
5548 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5549 && MEM_P (operands[1]))
5550 {
5551 rtx reg = gen_reg_rtx (SImode);
5552
5553 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5554 operands[1] = gen_lowpart (HImode, reg);
5555 }
5556 else if (!arm_arch4)
5557 {
5558 if (MEM_P (operands[1]))
5559 {
5560 rtx base;
5561 rtx offset = const0_rtx;
5562 rtx reg = gen_reg_rtx (SImode);
5563
5564 if ((REG_P (base = XEXP (operands[1], 0))
5565 || (GET_CODE (base) == PLUS
5566 && (CONST_INT_P (offset = XEXP (base, 1)))
5567 && ((INTVAL(offset) & 1) != 1)
5568 && REG_P (base = XEXP (base, 0))))
5569 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5570 {
5571 rtx new_rtx;
5572
5573 new_rtx = widen_memory_access (operands[1], SImode,
5574 ((INTVAL (offset) & ~3)
5575 - INTVAL (offset)));
5576 emit_insn (gen_movsi (reg, new_rtx));
5577 if (((INTVAL (offset) & 2) != 0)
5578 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5579 {
5580 rtx reg2 = gen_reg_rtx (SImode);
5581
5582 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5583 reg = reg2;
5584 }
5585 }
5586 else
5587 emit_insn (gen_movhi_bytes (reg, operands[1]));
5588
5589 operands[1] = gen_lowpart (HImode, reg);
5590 }
5591 }
5592 }
5593 /* Handle loading a large integer during reload. */
5594 else if (CONST_INT_P (operands[1])
5595 && !const_ok_for_arm (INTVAL (operands[1]))
5596 && !const_ok_for_arm (~INTVAL (operands[1])))
5597 {
5598 /* Writing a constant to memory needs a scratch, which should
5599 be handled with SECONDARY_RELOADs. */
5600 gcc_assert (REG_P (operands[0]));
5601
5602 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5603 emit_insn (gen_movsi (operands[0], operands[1]));
5604 DONE;
5605 }
5606 }
5607 else if (TARGET_THUMB2)
5608 {
5609 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5610 if (can_create_pseudo_p ())
5611 {
5612 if (!REG_P (operands[0]))
5613 operands[1] = force_reg (HImode, operands[1]);
5614 /* Zero extend a constant, and keep it in an SImode reg. */
5615 else if (CONST_INT_P (operands[1]))
5616 {
5617 rtx reg = gen_reg_rtx (SImode);
5618 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5619
5620 emit_insn (gen_movsi (reg, GEN_INT (val)));
5621 operands[1] = gen_lowpart (HImode, reg);
5622 }
5623 }
5624 }
5625 else /* TARGET_THUMB1 */
5626 {
5627 if (can_create_pseudo_p ())
5628 {
5629 if (CONST_INT_P (operands[1]))
5630 {
5631 rtx reg = gen_reg_rtx (SImode);
5632
5633 emit_insn (gen_movsi (reg, operands[1]));
5634 operands[1] = gen_lowpart (HImode, reg);
5635 }
5636
5637 /* ??? We shouldn't really get invalid addresses here, but this can
5638 happen if we are passed a SP (never OK for HImode/QImode) or
5639 virtual register (also rejected as illegitimate for HImode/QImode)
5640 relative address. */
5641 /* ??? This should perhaps be fixed elsewhere, for instance, in
5642 fixup_stack_1, by checking for other kinds of invalid addresses,
5643 e.g. a bare reference to a virtual register. This may confuse the
5644 alpha though, which must handle this case differently. */
5645 if (MEM_P (operands[0])
5646 && !memory_address_p (GET_MODE (operands[0]),
5647 XEXP (operands[0], 0)))
5648 operands[0]
5649 = replace_equiv_address (operands[0],
5650 copy_to_reg (XEXP (operands[0], 0)));
5651
5652 if (MEM_P (operands[1])
5653 && !memory_address_p (GET_MODE (operands[1]),
5654 XEXP (operands[1], 0)))
5655 operands[1]
5656 = replace_equiv_address (operands[1],
5657 copy_to_reg (XEXP (operands[1], 0)));
5658
5659 if (MEM_P (operands[1]) && optimize > 0)
5660 {
5661 rtx reg = gen_reg_rtx (SImode);
5662
5663 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5664 operands[1] = gen_lowpart (HImode, reg);
5665 }
5666
5667 if (MEM_P (operands[0]))
5668 operands[1] = force_reg (HImode, operands[1]);
5669 }
5670 else if (CONST_INT_P (operands[1])
5671 && !satisfies_constraint_I (operands[1]))
5672 {
5673 /* Handle loading a large integer during reload. */
5674
5675 /* Writing a constant to memory needs a scratch, which should
5676 be handled with SECONDARY_RELOADs. */
5677 gcc_assert (REG_P (operands[0]));
5678
5679 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5680 emit_insn (gen_movsi (operands[0], operands[1]));
5681 DONE;
5682 }
5683 }
5684 "
5685 )
5686
5687 (define_expand "movhi_bytes"
5688 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5689 (set (match_dup 3)
5690 (zero_extend:SI (match_dup 6)))
5691 (set (match_operand:SI 0 "" "")
5692 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5693 "TARGET_ARM"
5694 "
5695 {
5696 rtx mem1, mem2;
5697 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5698
5699 mem1 = change_address (operands[1], QImode, addr);
5700 mem2 = change_address (operands[1], QImode,
5701 plus_constant (Pmode, addr, 1));
5702 operands[0] = gen_lowpart (SImode, operands[0]);
5703 operands[1] = mem1;
5704 operands[2] = gen_reg_rtx (SImode);
5705 operands[3] = gen_reg_rtx (SImode);
5706 operands[6] = mem2;
5707
5708 if (BYTES_BIG_ENDIAN)
5709 {
5710 operands[4] = operands[2];
5711 operands[5] = operands[3];
5712 }
5713 else
5714 {
5715 operands[4] = operands[3];
5716 operands[5] = operands[2];
5717 }
5718 }"
5719 )
5720
5721 (define_expand "movhi_bigend"
5722 [(set (match_dup 2)
5723 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5724 (const_int 16)))
5725 (set (match_dup 3)
5726 (ashiftrt:SI (match_dup 2) (const_int 16)))
5727 (set (match_operand:HI 0 "s_register_operand")
5728 (match_dup 4))]
5729 "TARGET_ARM"
5730 "
5731 operands[2] = gen_reg_rtx (SImode);
5732 operands[3] = gen_reg_rtx (SImode);
5733 operands[4] = gen_lowpart (HImode, operands[3]);
5734 "
5735 )
5736
5737 ;; Pattern to recognize insn generated default case above
5738 (define_insn "*movhi_insn_arch4"
5739 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5740 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5741 "TARGET_ARM
5742 && arm_arch4 && !TARGET_HARD_FLOAT
5743 && (register_operand (operands[0], HImode)
5744 || register_operand (operands[1], HImode))"
5745 "@
5746 mov%?\\t%0, %1\\t%@ movhi
5747 mvn%?\\t%0, #%B1\\t%@ movhi
5748 movw%?\\t%0, %L1\\t%@ movhi
5749 strh%?\\t%1, %0\\t%@ movhi
5750 ldrh%?\\t%0, %1\\t%@ movhi"
5751 [(set_attr "predicable" "yes")
5752 (set_attr "pool_range" "*,*,*,*,256")
5753 (set_attr "neg_pool_range" "*,*,*,*,244")
5754 (set_attr "arch" "*,*,v6t2,*,*")
5755 (set_attr_alternative "type"
5756 [(if_then_else (match_operand 1 "const_int_operand" "")
5757 (const_string "mov_imm" )
5758 (const_string "mov_reg"))
5759 (const_string "mvn_imm")
5760 (const_string "mov_imm")
5761 (const_string "store_4")
5762 (const_string "load_4")])]
5763 )
5764
5765 (define_insn "*movhi_bytes"
5766 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5767 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5768 "TARGET_ARM && !TARGET_HARD_FLOAT"
5769 "@
5770 mov%?\\t%0, %1\\t%@ movhi
5771 mov%?\\t%0, %1\\t%@ movhi
5772 mvn%?\\t%0, #%B1\\t%@ movhi"
5773 [(set_attr "predicable" "yes")
5774 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5775 )
5776
5777 ;; We use a DImode scratch because we may occasionally need an additional
5778 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5779 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5780 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5781 ;; to be correctly handled in default_secondary_reload function.
5782 (define_expand "reload_outhi"
5783 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5784 (match_operand:HI 1 "s_register_operand" "r")
5785 (match_operand:DI 2 "s_register_operand" "=&l")])]
5786 "TARGET_EITHER"
5787 "if (TARGET_ARM)
5788 arm_reload_out_hi (operands);
5789 else
5790 thumb_reload_out_hi (operands);
5791 DONE;
5792 "
5793 )
5794
5795 (define_expand "reload_inhi"
5796 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5797 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5798 (match_operand:DI 2 "s_register_operand" "=&r")])]
5799 "TARGET_EITHER"
5800 "
5801 if (TARGET_ARM)
5802 arm_reload_in_hi (operands);
5803 else
5804 thumb_reload_out_hi (operands);
5805 DONE;
5806 ")
5807
5808 (define_expand "movqi"
5809 [(set (match_operand:QI 0 "general_operand")
5810 (match_operand:QI 1 "general_operand"))]
5811 "TARGET_EITHER"
5812 "
5813 /* Everything except mem = const or mem = mem can be done easily */
5814
5815 if (can_create_pseudo_p ())
5816 {
5817 if (CONST_INT_P (operands[1]))
5818 {
5819 rtx reg = gen_reg_rtx (SImode);
5820
5821 /* For thumb we want an unsigned immediate, then we are more likely
5822 to be able to use a movs insn. */
5823 if (TARGET_THUMB)
5824 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5825
5826 emit_insn (gen_movsi (reg, operands[1]));
5827 operands[1] = gen_lowpart (QImode, reg);
5828 }
5829
5830 if (TARGET_THUMB)
5831 {
5832 /* ??? We shouldn't really get invalid addresses here, but this can
5833 happen if we are passed a SP (never OK for HImode/QImode) or
5834 virtual register (also rejected as illegitimate for HImode/QImode)
5835 relative address. */
5836 /* ??? This should perhaps be fixed elsewhere, for instance, in
5837 fixup_stack_1, by checking for other kinds of invalid addresses,
5838 e.g. a bare reference to a virtual register. This may confuse the
5839 alpha though, which must handle this case differently. */
5840 if (MEM_P (operands[0])
5841 && !memory_address_p (GET_MODE (operands[0]),
5842 XEXP (operands[0], 0)))
5843 operands[0]
5844 = replace_equiv_address (operands[0],
5845 copy_to_reg (XEXP (operands[0], 0)));
5846 if (MEM_P (operands[1])
5847 && !memory_address_p (GET_MODE (operands[1]),
5848 XEXP (operands[1], 0)))
5849 operands[1]
5850 = replace_equiv_address (operands[1],
5851 copy_to_reg (XEXP (operands[1], 0)));
5852 }
5853
5854 if (MEM_P (operands[1]) && optimize > 0)
5855 {
5856 rtx reg = gen_reg_rtx (SImode);
5857
5858 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5859 operands[1] = gen_lowpart (QImode, reg);
5860 }
5861
5862 if (MEM_P (operands[0]))
5863 operands[1] = force_reg (QImode, operands[1]);
5864 }
5865 else if (TARGET_THUMB
5866 && CONST_INT_P (operands[1])
5867 && !satisfies_constraint_I (operands[1]))
5868 {
5869 /* Handle loading a large integer during reload. */
5870
5871 /* Writing a constant to memory needs a scratch, which should
5872 be handled with SECONDARY_RELOADs. */
5873 gcc_assert (REG_P (operands[0]));
5874
5875 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5876 emit_insn (gen_movsi (operands[0], operands[1]));
5877 DONE;
5878 }
5879 "
5880 )
5881
5882 (define_insn "*arm_movqi_insn"
5883 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5884 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5885 "TARGET_32BIT
5886 && ( register_operand (operands[0], QImode)
5887 || register_operand (operands[1], QImode))"
5888 "@
5889 mov%?\\t%0, %1
5890 mov%?\\t%0, %1
5891 mov%?\\t%0, %1
5892 mov%?\\t%0, %1
5893 mvn%?\\t%0, #%B1
5894 ldrb%?\\t%0, %1
5895 strb%?\\t%1, %0
5896 ldrb%?\\t%0, %1
5897 strb%?\\t%1, %0"
5898 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5899 (set_attr "predicable" "yes")
5900 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5901 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5902 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5903 )
5904
5905 ;; HFmode moves
5906 (define_expand "movhf"
5907 [(set (match_operand:HF 0 "general_operand")
5908 (match_operand:HF 1 "general_operand"))]
5909 "TARGET_EITHER"
5910 "
5911 gcc_checking_assert (aligned_operand (operands[0], HFmode));
5912 gcc_checking_assert (aligned_operand (operands[1], HFmode));
5913 if (TARGET_32BIT)
5914 {
5915 if (MEM_P (operands[0]))
5916 operands[1] = force_reg (HFmode, operands[1]);
5917 }
5918 else /* TARGET_THUMB1 */
5919 {
5920 if (can_create_pseudo_p ())
5921 {
5922 if (!REG_P (operands[0]))
5923 operands[1] = force_reg (HFmode, operands[1]);
5924 }
5925 }
5926 "
5927 )
5928
5929 (define_insn "*arm32_movhf"
5930 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5931 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5932 "TARGET_32BIT && !TARGET_HARD_FLOAT
5933 && ( s_register_operand (operands[0], HFmode)
5934 || s_register_operand (operands[1], HFmode))"
5935 "*
5936 switch (which_alternative)
5937 {
5938 case 0: /* ARM register from memory */
5939 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
5940 case 1: /* memory from ARM register */
5941 return \"strh%?\\t%1, %0\\t%@ __fp16\";
5942 case 2: /* ARM register from ARM register */
5943 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5944 case 3: /* ARM register from constant */
5945 {
5946 long bits;
5947 rtx ops[4];
5948
5949 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
5950 HFmode);
5951 ops[0] = operands[0];
5952 ops[1] = GEN_INT (bits);
5953 ops[2] = GEN_INT (bits & 0xff00);
5954 ops[3] = GEN_INT (bits & 0x00ff);
5955
5956 if (arm_arch_thumb2)
5957 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5958 else
5959 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5960 return \"\";
5961 }
5962 default:
5963 gcc_unreachable ();
5964 }
5965 "
5966 [(set_attr "conds" "unconditional")
5967 (set_attr "type" "load_4,store_4,mov_reg,multiple")
5968 (set_attr "length" "4,4,4,8")
5969 (set_attr "predicable" "yes")]
5970 )
5971
5972 (define_expand "movsf"
5973 [(set (match_operand:SF 0 "general_operand")
5974 (match_operand:SF 1 "general_operand"))]
5975 "TARGET_EITHER"
5976 "
5977 gcc_checking_assert (aligned_operand (operands[0], SFmode));
5978 gcc_checking_assert (aligned_operand (operands[1], SFmode));
5979 if (TARGET_32BIT)
5980 {
5981 if (MEM_P (operands[0]))
5982 operands[1] = force_reg (SFmode, operands[1]);
5983 }
5984 else /* TARGET_THUMB1 */
5985 {
5986 if (can_create_pseudo_p ())
5987 {
5988 if (!REG_P (operands[0]))
5989 operands[1] = force_reg (SFmode, operands[1]);
5990 }
5991 }
5992
5993 /* Cannot load it directly, generate a load with clobber so that it can be
5994 loaded via GPR with MOV / MOVT. */
5995 if (arm_disable_literal_pool
5996 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
5997 && CONST_DOUBLE_P (operands[1])
5998 && TARGET_HARD_FLOAT
5999 && !vfp3_const_double_rtx (operands[1]))
6000 {
6001 rtx clobreg = gen_reg_rtx (SFmode);
6002 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6003 clobreg));
6004 DONE;
6005 }
6006 "
6007 )
6008
6009 ;; Transform a floating-point move of a constant into a core register into
6010 ;; an SImode operation.
6011 (define_split
6012 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6013 (match_operand:SF 1 "immediate_operand" ""))]
6014 "TARGET_EITHER
6015 && reload_completed
6016 && CONST_DOUBLE_P (operands[1])"
6017 [(set (match_dup 2) (match_dup 3))]
6018 "
6019 operands[2] = gen_lowpart (SImode, operands[0]);
6020 operands[3] = gen_lowpart (SImode, operands[1]);
6021 if (operands[2] == 0 || operands[3] == 0)
6022 FAIL;
6023 "
6024 )
6025
6026 (define_insn "*arm_movsf_soft_insn"
6027 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6028 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6029 "TARGET_32BIT
6030 && TARGET_SOFT_FLOAT
6031 && (!MEM_P (operands[0])
6032 || register_operand (operands[1], SFmode))"
6033 {
6034 switch (which_alternative)
6035 {
6036 case 0: return \"mov%?\\t%0, %1\";
6037 case 1:
6038 /* Cannot load it directly, split to load it via MOV / MOVT. */
6039 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6040 return \"#\";
6041 return \"ldr%?\\t%0, %1\\t%@ float\";
6042 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6043 default: gcc_unreachable ();
6044 }
6045 }
6046 [(set_attr "predicable" "yes")
6047 (set_attr "type" "mov_reg,load_4,store_4")
6048 (set_attr "arm_pool_range" "*,4096,*")
6049 (set_attr "thumb2_pool_range" "*,4094,*")
6050 (set_attr "arm_neg_pool_range" "*,4084,*")
6051 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6052 )
6053
6054 ;; Splitter for the above.
6055 (define_split
6056 [(set (match_operand:SF 0 "s_register_operand")
6057 (match_operand:SF 1 "const_double_operand"))]
6058 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6059 [(const_int 0)]
6060 {
6061 long buf;
6062 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6063 rtx cst = gen_int_mode (buf, SImode);
6064 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6065 DONE;
6066 }
6067 )
6068
6069 (define_expand "movdf"
6070 [(set (match_operand:DF 0 "general_operand")
6071 (match_operand:DF 1 "general_operand"))]
6072 "TARGET_EITHER"
6073 "
6074 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6075 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6076 if (TARGET_32BIT)
6077 {
6078 if (MEM_P (operands[0]))
6079 operands[1] = force_reg (DFmode, operands[1]);
6080 }
6081 else /* TARGET_THUMB */
6082 {
6083 if (can_create_pseudo_p ())
6084 {
6085 if (!REG_P (operands[0]))
6086 operands[1] = force_reg (DFmode, operands[1]);
6087 }
6088 }
6089
6090 /* Cannot load it directly, generate a load with clobber so that it can be
6091 loaded via GPR with MOV / MOVT. */
6092 if (arm_disable_literal_pool
6093 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6094 && CONSTANT_P (operands[1])
6095 && TARGET_HARD_FLOAT
6096 && !arm_const_double_rtx (operands[1])
6097 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6098 {
6099 rtx clobreg = gen_reg_rtx (DFmode);
6100 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6101 clobreg));
6102 DONE;
6103 }
6104 "
6105 )
6106
6107 ;; Reloading a df mode value stored in integer regs to memory can require a
6108 ;; scratch reg.
6109 ;; Another reload_out<m> pattern that requires special constraints.
6110 (define_expand "reload_outdf"
6111 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6112 (match_operand:DF 1 "s_register_operand" "r")
6113 (match_operand:SI 2 "s_register_operand" "=&r")]
6114 "TARGET_THUMB2"
6115 "
6116 {
6117 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6118
6119 if (code == REG)
6120 operands[2] = XEXP (operands[0], 0);
6121 else if (code == POST_INC || code == PRE_DEC)
6122 {
6123 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6124 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6125 emit_insn (gen_movdi (operands[0], operands[1]));
6126 DONE;
6127 }
6128 else if (code == PRE_INC)
6129 {
6130 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6131
6132 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6133 operands[2] = reg;
6134 }
6135 else if (code == POST_DEC)
6136 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6137 else
6138 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6139 XEXP (XEXP (operands[0], 0), 1)));
6140
6141 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6142 operands[1]));
6143
6144 if (code == POST_DEC)
6145 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6146
6147 DONE;
6148 }"
6149 )
6150
6151 (define_insn "*movdf_soft_insn"
6152 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6153 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6154 "TARGET_32BIT && TARGET_SOFT_FLOAT
6155 && ( register_operand (operands[0], DFmode)
6156 || register_operand (operands[1], DFmode))"
6157 "*
6158 switch (which_alternative)
6159 {
6160 case 0:
6161 case 1:
6162 case 2:
6163 return \"#\";
6164 case 3:
6165 /* Cannot load it directly, split to load it via MOV / MOVT. */
6166 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6167 return \"#\";
6168 /* Fall through. */
6169 default:
6170 return output_move_double (operands, true, NULL);
6171 }
6172 "
6173 [(set_attr "length" "8,12,16,8,8")
6174 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6175 (set_attr "arm_pool_range" "*,*,*,1020,*")
6176 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6177 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6178 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6179 )
6180
6181 ;; Splitter for the above.
6182 (define_split
6183 [(set (match_operand:DF 0 "s_register_operand")
6184 (match_operand:DF 1 "const_double_operand"))]
6185 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6186 [(const_int 0)]
6187 {
6188 long buf[2];
6189 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6190 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6191 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6192 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6193 rtx cst = gen_int_mode (ival, DImode);
6194 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6195 DONE;
6196 }
6197 )
6198 \f
6199
6200 ;; load- and store-multiple insns
6201 ;; The arm can load/store any set of registers, provided that they are in
6202 ;; ascending order, but these expanders assume a contiguous set.
6203
6204 (define_expand "load_multiple"
6205 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6206 (match_operand:SI 1 "" ""))
6207 (use (match_operand:SI 2 "" ""))])]
6208 "TARGET_32BIT"
6209 {
6210 HOST_WIDE_INT offset = 0;
6211
6212 /* Support only fixed point registers. */
6213 if (!CONST_INT_P (operands[2])
6214 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6215 || INTVAL (operands[2]) < 2
6216 || !MEM_P (operands[1])
6217 || !REG_P (operands[0])
6218 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6219 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6220 FAIL;
6221
6222 operands[3]
6223 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6224 INTVAL (operands[2]),
6225 force_reg (SImode, XEXP (operands[1], 0)),
6226 FALSE, operands[1], &offset);
6227 })
6228
6229 (define_expand "store_multiple"
6230 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6231 (match_operand:SI 1 "" ""))
6232 (use (match_operand:SI 2 "" ""))])]
6233 "TARGET_32BIT"
6234 {
6235 HOST_WIDE_INT offset = 0;
6236
6237 /* Support only fixed point registers. */
6238 if (!CONST_INT_P (operands[2])
6239 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6240 || INTVAL (operands[2]) < 2
6241 || !REG_P (operands[1])
6242 || !MEM_P (operands[0])
6243 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6244 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6245 FAIL;
6246
6247 operands[3]
6248 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6249 INTVAL (operands[2]),
6250 force_reg (SImode, XEXP (operands[0], 0)),
6251 FALSE, operands[0], &offset);
6252 })
6253
6254
6255 (define_expand "setmemsi"
6256 [(match_operand:BLK 0 "general_operand")
6257 (match_operand:SI 1 "const_int_operand")
6258 (match_operand:SI 2 "const_int_operand")
6259 (match_operand:SI 3 "const_int_operand")]
6260 "TARGET_32BIT"
6261 {
6262 if (arm_gen_setmem (operands))
6263 DONE;
6264
6265 FAIL;
6266 })
6267
6268
6269 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6270 ;; We could let this apply for blocks of less than this, but it clobbers so
6271 ;; many registers that there is then probably a better way.
6272
6273 (define_expand "cpymemqi"
6274 [(match_operand:BLK 0 "general_operand")
6275 (match_operand:BLK 1 "general_operand")
6276 (match_operand:SI 2 "const_int_operand")
6277 (match_operand:SI 3 "const_int_operand")]
6278 ""
6279 "
6280 if (TARGET_32BIT)
6281 {
6282 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6283 && !optimize_function_for_size_p (cfun))
6284 {
6285 if (gen_cpymem_ldrd_strd (operands))
6286 DONE;
6287 FAIL;
6288 }
6289
6290 if (arm_gen_cpymemqi (operands))
6291 DONE;
6292 FAIL;
6293 }
6294 else /* TARGET_THUMB1 */
6295 {
6296 if ( INTVAL (operands[3]) != 4
6297 || INTVAL (operands[2]) > 48)
6298 FAIL;
6299
6300 thumb_expand_cpymemqi (operands);
6301 DONE;
6302 }
6303 "
6304 )
6305 \f
6306
6307 ;; Compare & branch insns
6308 ;; The range calculations are based as follows:
6309 ;; For forward branches, the address calculation returns the address of
6310 ;; the next instruction. This is 2 beyond the branch instruction.
6311 ;; For backward branches, the address calculation returns the address of
6312 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6313 ;; instruction for the shortest sequence, and 4 before the branch instruction
6314 ;; if we have to jump around an unconditional branch.
6315 ;; To the basic branch range the PC offset must be added (this is +4).
6316 ;; So for forward branches we have
6317 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6318 ;; And for backward branches we have
6319 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6320 ;;
6321 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6322 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6323
6324 (define_expand "cbranchsi4"
6325 [(set (pc) (if_then_else
6326 (match_operator 0 "expandable_comparison_operator"
6327 [(match_operand:SI 1 "s_register_operand")
6328 (match_operand:SI 2 "nonmemory_operand")])
6329 (label_ref (match_operand 3 "" ""))
6330 (pc)))]
6331 "TARGET_EITHER"
6332 "
6333 if (!TARGET_THUMB1)
6334 {
6335 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6336 FAIL;
6337 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6338 operands[3]));
6339 DONE;
6340 }
6341 if (thumb1_cmpneg_operand (operands[2], SImode))
6342 {
6343 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6344 operands[3], operands[0]));
6345 DONE;
6346 }
6347 if (!thumb1_cmp_operand (operands[2], SImode))
6348 operands[2] = force_reg (SImode, operands[2]);
6349 ")
6350
6351 (define_expand "cbranchsf4"
6352 [(set (pc) (if_then_else
6353 (match_operator 0 "expandable_comparison_operator"
6354 [(match_operand:SF 1 "s_register_operand")
6355 (match_operand:SF 2 "vfp_compare_operand")])
6356 (label_ref (match_operand 3 "" ""))
6357 (pc)))]
6358 "TARGET_32BIT && TARGET_HARD_FLOAT"
6359 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6360 operands[3])); DONE;"
6361 )
6362
6363 (define_expand "cbranchdf4"
6364 [(set (pc) (if_then_else
6365 (match_operator 0 "expandable_comparison_operator"
6366 [(match_operand:DF 1 "s_register_operand")
6367 (match_operand:DF 2 "vfp_compare_operand")])
6368 (label_ref (match_operand 3 "" ""))
6369 (pc)))]
6370 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6371 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6372 operands[3])); DONE;"
6373 )
6374
6375 (define_expand "cbranchdi4"
6376 [(set (pc) (if_then_else
6377 (match_operator 0 "expandable_comparison_operator"
6378 [(match_operand:DI 1 "s_register_operand")
6379 (match_operand:DI 2 "cmpdi_operand")])
6380 (label_ref (match_operand 3 "" ""))
6381 (pc)))]
6382 "TARGET_32BIT"
6383 "{
6384 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6385 FAIL;
6386 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6387 operands[3]));
6388 DONE;
6389 }"
6390 )
6391
6392 ;; Comparison and test insns
6393
6394 (define_insn "*arm_cmpsi_insn"
6395 [(set (reg:CC CC_REGNUM)
6396 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6397 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6398 "TARGET_32BIT"
6399 "@
6400 cmp%?\\t%0, %1
6401 cmp%?\\t%0, %1
6402 cmp%?\\t%0, %1
6403 cmp%?\\t%0, %1
6404 cmn%?\\t%0, #%n1"
6405 [(set_attr "conds" "set")
6406 (set_attr "arch" "t2,t2,any,any,any")
6407 (set_attr "length" "2,2,4,4,4")
6408 (set_attr "predicable" "yes")
6409 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6410 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6411 )
6412
6413 (define_insn "*cmpsi_shiftsi"
6414 [(set (reg:CC CC_REGNUM)
6415 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6416 (match_operator:SI 3 "shift_operator"
6417 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6418 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6419 "TARGET_32BIT"
6420 "cmp\\t%0, %1%S3"
6421 [(set_attr "conds" "set")
6422 (set_attr "shift" "1")
6423 (set_attr "arch" "32,a,a")
6424 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6425
6426 (define_insn "*cmpsi_shiftsi_swp"
6427 [(set (reg:CC_SWP CC_REGNUM)
6428 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6429 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6430 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6431 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6432 "TARGET_32BIT"
6433 "cmp%?\\t%0, %1%S3"
6434 [(set_attr "conds" "set")
6435 (set_attr "shift" "1")
6436 (set_attr "arch" "32,a,a")
6437 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6438
6439 (define_insn "*arm_cmpsi_negshiftsi_si"
6440 [(set (reg:CC_Z CC_REGNUM)
6441 (compare:CC_Z
6442 (neg:SI (match_operator:SI 1 "shift_operator"
6443 [(match_operand:SI 2 "s_register_operand" "r")
6444 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6445 (match_operand:SI 0 "s_register_operand" "r")))]
6446 "TARGET_ARM"
6447 "cmn%?\\t%0, %2%S1"
6448 [(set_attr "conds" "set")
6449 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6450 (const_string "alus_shift_imm")
6451 (const_string "alus_shift_reg")))
6452 (set_attr "predicable" "yes")]
6453 )
6454
6455 ;; DImode comparisons. The generic code generates branches that
6456 ;; if-conversion cannot reduce to a conditional compare, so we do
6457 ;; that directly.
6458
6459 (define_insn "*arm_cmpdi_insn"
6460 [(set (reg:CC_NCV CC_REGNUM)
6461 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6462 (match_operand:DI 1 "arm_di_operand" "rDi")))
6463 (clobber (match_scratch:SI 2 "=r"))]
6464 "TARGET_32BIT"
6465 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6466 [(set_attr "conds" "set")
6467 (set_attr "length" "8")
6468 (set_attr "type" "multiple")]
6469 )
6470
6471 (define_insn_and_split "*arm_cmpdi_unsigned"
6472 [(set (reg:CC_CZ CC_REGNUM)
6473 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6474 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6475
6476 "TARGET_32BIT"
6477 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6478 "&& reload_completed"
6479 [(set (reg:CC CC_REGNUM)
6480 (compare:CC (match_dup 2) (match_dup 3)))
6481 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6482 (set (reg:CC CC_REGNUM)
6483 (compare:CC (match_dup 0) (match_dup 1))))]
6484 {
6485 operands[2] = gen_highpart (SImode, operands[0]);
6486 operands[0] = gen_lowpart (SImode, operands[0]);
6487 if (CONST_INT_P (operands[1]))
6488 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6489 else
6490 operands[3] = gen_highpart (SImode, operands[1]);
6491 operands[1] = gen_lowpart (SImode, operands[1]);
6492 }
6493 [(set_attr "conds" "set")
6494 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6495 (set_attr "arch" "t2,t2,t2,a")
6496 (set_attr "length" "6,6,10,8")
6497 (set_attr "type" "multiple")]
6498 )
6499
6500 (define_insn "*arm_cmpdi_zero"
6501 [(set (reg:CC_Z CC_REGNUM)
6502 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
6503 (const_int 0)))
6504 (clobber (match_scratch:SI 1 "=r"))]
6505 "TARGET_32BIT"
6506 "orrs%?\\t%1, %Q0, %R0"
6507 [(set_attr "conds" "set")
6508 (set_attr "type" "logics_reg")]
6509 )
6510
6511 ; This insn allows redundant compares to be removed by cse, nothing should
6512 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6513 ; is deleted later on. The match_dup will match the mode here, so that
6514 ; mode changes of the condition codes aren't lost by this even though we don't
6515 ; specify what they are.
6516
6517 (define_insn "*deleted_compare"
6518 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6519 "TARGET_32BIT"
6520 "\\t%@ deleted compare"
6521 [(set_attr "conds" "set")
6522 (set_attr "length" "0")
6523 (set_attr "type" "no_insn")]
6524 )
6525
6526 \f
6527 ;; Conditional branch insns
6528
6529 (define_expand "cbranch_cc"
6530 [(set (pc)
6531 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6532 (match_operand 2 "" "")])
6533 (label_ref (match_operand 3 "" ""))
6534 (pc)))]
6535 "TARGET_32BIT"
6536 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6537 operands[1], operands[2], NULL_RTX);
6538 operands[2] = const0_rtx;"
6539 )
6540
6541 ;;
6542 ;; Patterns to match conditional branch insns.
6543 ;;
6544
6545 (define_insn "arm_cond_branch"
6546 [(set (pc)
6547 (if_then_else (match_operator 1 "arm_comparison_operator"
6548 [(match_operand 2 "cc_register" "") (const_int 0)])
6549 (label_ref (match_operand 0 "" ""))
6550 (pc)))]
6551 "TARGET_32BIT"
6552 "*
6553 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6554 {
6555 arm_ccfsm_state += 2;
6556 return \"\";
6557 }
6558 return \"b%d1\\t%l0\";
6559 "
6560 [(set_attr "conds" "use")
6561 (set_attr "type" "branch")
6562 (set (attr "length")
6563 (if_then_else
6564 (and (match_test "TARGET_THUMB2")
6565 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6566 (le (minus (match_dup 0) (pc)) (const_int 256))))
6567 (const_int 2)
6568 (const_int 4)))]
6569 )
6570
6571 (define_insn "*arm_cond_branch_reversed"
6572 [(set (pc)
6573 (if_then_else (match_operator 1 "arm_comparison_operator"
6574 [(match_operand 2 "cc_register" "") (const_int 0)])
6575 (pc)
6576 (label_ref (match_operand 0 "" ""))))]
6577 "TARGET_32BIT"
6578 "*
6579 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6580 {
6581 arm_ccfsm_state += 2;
6582 return \"\";
6583 }
6584 return \"b%D1\\t%l0\";
6585 "
6586 [(set_attr "conds" "use")
6587 (set_attr "type" "branch")
6588 (set (attr "length")
6589 (if_then_else
6590 (and (match_test "TARGET_THUMB2")
6591 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6592 (le (minus (match_dup 0) (pc)) (const_int 256))))
6593 (const_int 2)
6594 (const_int 4)))]
6595 )
6596
6597 \f
6598
6599 ; scc insns
6600
6601 (define_expand "cstore_cc"
6602 [(set (match_operand:SI 0 "s_register_operand")
6603 (match_operator:SI 1 "" [(match_operand 2 "" "")
6604 (match_operand 3 "" "")]))]
6605 "TARGET_32BIT"
6606 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6607 operands[2], operands[3], NULL_RTX);
6608 operands[3] = const0_rtx;"
6609 )
6610
6611 (define_insn_and_split "*mov_scc"
6612 [(set (match_operand:SI 0 "s_register_operand" "=r")
6613 (match_operator:SI 1 "arm_comparison_operator_mode"
6614 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6615 "TARGET_ARM"
6616 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6617 "TARGET_ARM"
6618 [(set (match_dup 0)
6619 (if_then_else:SI (match_dup 1)
6620 (const_int 1)
6621 (const_int 0)))]
6622 ""
6623 [(set_attr "conds" "use")
6624 (set_attr "length" "8")
6625 (set_attr "type" "multiple")]
6626 )
6627
6628 (define_insn_and_split "*mov_negscc"
6629 [(set (match_operand:SI 0 "s_register_operand" "=r")
6630 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6631 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6632 "TARGET_ARM"
6633 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6634 "TARGET_ARM"
6635 [(set (match_dup 0)
6636 (if_then_else:SI (match_dup 1)
6637 (match_dup 3)
6638 (const_int 0)))]
6639 {
6640 operands[3] = GEN_INT (~0);
6641 }
6642 [(set_attr "conds" "use")
6643 (set_attr "length" "8")
6644 (set_attr "type" "multiple")]
6645 )
6646
6647 (define_insn_and_split "*mov_notscc"
6648 [(set (match_operand:SI 0 "s_register_operand" "=r")
6649 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6650 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6651 "TARGET_ARM"
6652 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6653 "TARGET_ARM"
6654 [(set (match_dup 0)
6655 (if_then_else:SI (match_dup 1)
6656 (match_dup 3)
6657 (match_dup 4)))]
6658 {
6659 operands[3] = GEN_INT (~1);
6660 operands[4] = GEN_INT (~0);
6661 }
6662 [(set_attr "conds" "use")
6663 (set_attr "length" "8")
6664 (set_attr "type" "multiple")]
6665 )
6666
6667 (define_expand "cstoresi4"
6668 [(set (match_operand:SI 0 "s_register_operand")
6669 (match_operator:SI 1 "expandable_comparison_operator"
6670 [(match_operand:SI 2 "s_register_operand")
6671 (match_operand:SI 3 "reg_or_int_operand")]))]
6672 "TARGET_32BIT || TARGET_THUMB1"
6673 "{
6674 rtx op3, scratch, scratch2;
6675
6676 if (!TARGET_THUMB1)
6677 {
6678 if (!arm_add_operand (operands[3], SImode))
6679 operands[3] = force_reg (SImode, operands[3]);
6680 emit_insn (gen_cstore_cc (operands[0], operands[1],
6681 operands[2], operands[3]));
6682 DONE;
6683 }
6684
6685 if (operands[3] == const0_rtx)
6686 {
6687 switch (GET_CODE (operands[1]))
6688 {
6689 case EQ:
6690 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6691 break;
6692
6693 case NE:
6694 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6695 break;
6696
6697 case LE:
6698 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6699 NULL_RTX, 0, OPTAB_WIDEN);
6700 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6701 NULL_RTX, 0, OPTAB_WIDEN);
6702 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6703 operands[0], 1, OPTAB_WIDEN);
6704 break;
6705
6706 case GE:
6707 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6708 NULL_RTX, 1);
6709 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6710 NULL_RTX, 1, OPTAB_WIDEN);
6711 break;
6712
6713 case GT:
6714 scratch = expand_binop (SImode, ashr_optab, operands[2],
6715 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6716 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6717 NULL_RTX, 0, OPTAB_WIDEN);
6718 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6719 0, OPTAB_WIDEN);
6720 break;
6721
6722 /* LT is handled by generic code. No need for unsigned with 0. */
6723 default:
6724 FAIL;
6725 }
6726 DONE;
6727 }
6728
6729 switch (GET_CODE (operands[1]))
6730 {
6731 case EQ:
6732 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6733 NULL_RTX, 0, OPTAB_WIDEN);
6734 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6735 break;
6736
6737 case NE:
6738 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6739 NULL_RTX, 0, OPTAB_WIDEN);
6740 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6741 break;
6742
6743 case LE:
6744 op3 = force_reg (SImode, operands[3]);
6745
6746 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6747 NULL_RTX, 1, OPTAB_WIDEN);
6748 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6749 NULL_RTX, 0, OPTAB_WIDEN);
6750 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6751 op3, operands[2]));
6752 break;
6753
6754 case GE:
6755 op3 = operands[3];
6756 if (!thumb1_cmp_operand (op3, SImode))
6757 op3 = force_reg (SImode, op3);
6758 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6759 NULL_RTX, 0, OPTAB_WIDEN);
6760 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6761 NULL_RTX, 1, OPTAB_WIDEN);
6762 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6763 operands[2], op3));
6764 break;
6765
6766 case LEU:
6767 op3 = force_reg (SImode, operands[3]);
6768 scratch = force_reg (SImode, const0_rtx);
6769 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6770 op3, operands[2]));
6771 break;
6772
6773 case GEU:
6774 op3 = operands[3];
6775 if (!thumb1_cmp_operand (op3, SImode))
6776 op3 = force_reg (SImode, op3);
6777 scratch = force_reg (SImode, const0_rtx);
6778 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6779 operands[2], op3));
6780 break;
6781
6782 case LTU:
6783 op3 = operands[3];
6784 if (!thumb1_cmp_operand (op3, SImode))
6785 op3 = force_reg (SImode, op3);
6786 scratch = gen_reg_rtx (SImode);
6787 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6788 break;
6789
6790 case GTU:
6791 op3 = force_reg (SImode, operands[3]);
6792 scratch = gen_reg_rtx (SImode);
6793 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6794 break;
6795
6796 /* No good sequences for GT, LT. */
6797 default:
6798 FAIL;
6799 }
6800 DONE;
6801 }")
6802
6803 (define_expand "cstorehf4"
6804 [(set (match_operand:SI 0 "s_register_operand")
6805 (match_operator:SI 1 "expandable_comparison_operator"
6806 [(match_operand:HF 2 "s_register_operand")
6807 (match_operand:HF 3 "vfp_compare_operand")]))]
6808 "TARGET_VFP_FP16INST"
6809 {
6810 if (!arm_validize_comparison (&operands[1],
6811 &operands[2],
6812 &operands[3]))
6813 FAIL;
6814
6815 emit_insn (gen_cstore_cc (operands[0], operands[1],
6816 operands[2], operands[3]));
6817 DONE;
6818 }
6819 )
6820
6821 (define_expand "cstoresf4"
6822 [(set (match_operand:SI 0 "s_register_operand")
6823 (match_operator:SI 1 "expandable_comparison_operator"
6824 [(match_operand:SF 2 "s_register_operand")
6825 (match_operand:SF 3 "vfp_compare_operand")]))]
6826 "TARGET_32BIT && TARGET_HARD_FLOAT"
6827 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6828 operands[2], operands[3])); DONE;"
6829 )
6830
6831 (define_expand "cstoredf4"
6832 [(set (match_operand:SI 0 "s_register_operand")
6833 (match_operator:SI 1 "expandable_comparison_operator"
6834 [(match_operand:DF 2 "s_register_operand")
6835 (match_operand:DF 3 "vfp_compare_operand")]))]
6836 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6837 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6838 operands[2], operands[3])); DONE;"
6839 )
6840
6841 (define_expand "cstoredi4"
6842 [(set (match_operand:SI 0 "s_register_operand")
6843 (match_operator:SI 1 "expandable_comparison_operator"
6844 [(match_operand:DI 2 "s_register_operand")
6845 (match_operand:DI 3 "cmpdi_operand")]))]
6846 "TARGET_32BIT"
6847 "{
6848 if (!arm_validize_comparison (&operands[1],
6849 &operands[2],
6850 &operands[3]))
6851 FAIL;
6852 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6853 operands[3]));
6854 DONE;
6855 }"
6856 )
6857
6858 \f
6859 ;; Conditional move insns
6860
6861 (define_expand "movsicc"
6862 [(set (match_operand:SI 0 "s_register_operand")
6863 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6864 (match_operand:SI 2 "arm_not_operand")
6865 (match_operand:SI 3 "arm_not_operand")))]
6866 "TARGET_32BIT"
6867 "
6868 {
6869 enum rtx_code code;
6870 rtx ccreg;
6871
6872 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6873 &XEXP (operands[1], 1)))
6874 FAIL;
6875
6876 code = GET_CODE (operands[1]);
6877 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6878 XEXP (operands[1], 1), NULL_RTX);
6879 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6880 }"
6881 )
6882
6883 (define_expand "movhfcc"
6884 [(set (match_operand:HF 0 "s_register_operand")
6885 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6886 (match_operand:HF 2 "s_register_operand")
6887 (match_operand:HF 3 "s_register_operand")))]
6888 "TARGET_VFP_FP16INST"
6889 "
6890 {
6891 enum rtx_code code = GET_CODE (operands[1]);
6892 rtx ccreg;
6893
6894 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6895 &XEXP (operands[1], 1)))
6896 FAIL;
6897
6898 code = GET_CODE (operands[1]);
6899 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6900 XEXP (operands[1], 1), NULL_RTX);
6901 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6902 }"
6903 )
6904
6905 (define_expand "movsfcc"
6906 [(set (match_operand:SF 0 "s_register_operand")
6907 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6908 (match_operand:SF 2 "s_register_operand")
6909 (match_operand:SF 3 "s_register_operand")))]
6910 "TARGET_32BIT && TARGET_HARD_FLOAT"
6911 "
6912 {
6913 enum rtx_code code = GET_CODE (operands[1]);
6914 rtx ccreg;
6915
6916 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6917 &XEXP (operands[1], 1)))
6918 FAIL;
6919
6920 code = GET_CODE (operands[1]);
6921 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6922 XEXP (operands[1], 1), NULL_RTX);
6923 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6924 }"
6925 )
6926
6927 (define_expand "movdfcc"
6928 [(set (match_operand:DF 0 "s_register_operand")
6929 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
6930 (match_operand:DF 2 "s_register_operand")
6931 (match_operand:DF 3 "s_register_operand")))]
6932 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
6933 "
6934 {
6935 enum rtx_code code = GET_CODE (operands[1]);
6936 rtx ccreg;
6937
6938 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6939 &XEXP (operands[1], 1)))
6940 FAIL;
6941 code = GET_CODE (operands[1]);
6942 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6943 XEXP (operands[1], 1), NULL_RTX);
6944 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6945 }"
6946 )
6947
6948 (define_insn "*cmov<mode>"
6949 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
6950 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
6951 [(match_operand 2 "cc_register" "") (const_int 0)])
6952 (match_operand:SDF 3 "s_register_operand"
6953 "<F_constraint>")
6954 (match_operand:SDF 4 "s_register_operand"
6955 "<F_constraint>")))]
6956 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
6957 "*
6958 {
6959 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6960 switch (code)
6961 {
6962 case ARM_GE:
6963 case ARM_GT:
6964 case ARM_EQ:
6965 case ARM_VS:
6966 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
6967 case ARM_LT:
6968 case ARM_LE:
6969 case ARM_NE:
6970 case ARM_VC:
6971 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
6972 default:
6973 gcc_unreachable ();
6974 }
6975 return \"\";
6976 }"
6977 [(set_attr "conds" "use")
6978 (set_attr "type" "fcsel")]
6979 )
6980
6981 (define_insn "*cmovhf"
6982 [(set (match_operand:HF 0 "s_register_operand" "=t")
6983 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
6984 [(match_operand 2 "cc_register" "") (const_int 0)])
6985 (match_operand:HF 3 "s_register_operand" "t")
6986 (match_operand:HF 4 "s_register_operand" "t")))]
6987 "TARGET_VFP_FP16INST"
6988 "*
6989 {
6990 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6991 switch (code)
6992 {
6993 case ARM_GE:
6994 case ARM_GT:
6995 case ARM_EQ:
6996 case ARM_VS:
6997 return \"vsel%d1.f16\\t%0, %3, %4\";
6998 case ARM_LT:
6999 case ARM_LE:
7000 case ARM_NE:
7001 case ARM_VC:
7002 return \"vsel%D1.f16\\t%0, %4, %3\";
7003 default:
7004 gcc_unreachable ();
7005 }
7006 return \"\";
7007 }"
7008 [(set_attr "conds" "use")
7009 (set_attr "type" "fcsel")]
7010 )
7011
7012 (define_insn_and_split "*movsicc_insn"
7013 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7014 (if_then_else:SI
7015 (match_operator 3 "arm_comparison_operator"
7016 [(match_operand 4 "cc_register" "") (const_int 0)])
7017 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7018 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7019 "TARGET_ARM"
7020 "@
7021 mov%D3\\t%0, %2
7022 mvn%D3\\t%0, #%B2
7023 mov%d3\\t%0, %1
7024 mvn%d3\\t%0, #%B1
7025 #
7026 #
7027 #
7028 #"
7029 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7030 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7031 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7032 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7033 "&& reload_completed"
7034 [(const_int 0)]
7035 {
7036 enum rtx_code rev_code;
7037 machine_mode mode;
7038 rtx rev_cond;
7039
7040 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7041 operands[3],
7042 gen_rtx_SET (operands[0], operands[1])));
7043
7044 rev_code = GET_CODE (operands[3]);
7045 mode = GET_MODE (operands[4]);
7046 if (mode == CCFPmode || mode == CCFPEmode)
7047 rev_code = reverse_condition_maybe_unordered (rev_code);
7048 else
7049 rev_code = reverse_condition (rev_code);
7050
7051 rev_cond = gen_rtx_fmt_ee (rev_code,
7052 VOIDmode,
7053 operands[4],
7054 const0_rtx);
7055 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7056 rev_cond,
7057 gen_rtx_SET (operands[0], operands[2])));
7058 DONE;
7059 }
7060 [(set_attr "length" "4,4,4,4,8,8,8,8")
7061 (set_attr "conds" "use")
7062 (set_attr_alternative "type"
7063 [(if_then_else (match_operand 2 "const_int_operand" "")
7064 (const_string "mov_imm")
7065 (const_string "mov_reg"))
7066 (const_string "mvn_imm")
7067 (if_then_else (match_operand 1 "const_int_operand" "")
7068 (const_string "mov_imm")
7069 (const_string "mov_reg"))
7070 (const_string "mvn_imm")
7071 (const_string "multiple")
7072 (const_string "multiple")
7073 (const_string "multiple")
7074 (const_string "multiple")])]
7075 )
7076
7077 (define_insn "*movsfcc_soft_insn"
7078 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7079 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7080 [(match_operand 4 "cc_register" "") (const_int 0)])
7081 (match_operand:SF 1 "s_register_operand" "0,r")
7082 (match_operand:SF 2 "s_register_operand" "r,0")))]
7083 "TARGET_ARM && TARGET_SOFT_FLOAT"
7084 "@
7085 mov%D3\\t%0, %2
7086 mov%d3\\t%0, %1"
7087 [(set_attr "conds" "use")
7088 (set_attr "type" "mov_reg")]
7089 )
7090
7091 \f
7092 ;; Jump and linkage insns
7093
7094 (define_expand "jump"
7095 [(set (pc)
7096 (label_ref (match_operand 0 "" "")))]
7097 "TARGET_EITHER"
7098 ""
7099 )
7100
7101 (define_insn "*arm_jump"
7102 [(set (pc)
7103 (label_ref (match_operand 0 "" "")))]
7104 "TARGET_32BIT"
7105 "*
7106 {
7107 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7108 {
7109 arm_ccfsm_state += 2;
7110 return \"\";
7111 }
7112 return \"b%?\\t%l0\";
7113 }
7114 "
7115 [(set_attr "predicable" "yes")
7116 (set (attr "length")
7117 (if_then_else
7118 (and (match_test "TARGET_THUMB2")
7119 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7120 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7121 (const_int 2)
7122 (const_int 4)))
7123 (set_attr "type" "branch")]
7124 )
7125
7126 (define_expand "call"
7127 [(parallel [(call (match_operand 0 "memory_operand")
7128 (match_operand 1 "general_operand"))
7129 (use (match_operand 2 "" ""))
7130 (clobber (reg:SI LR_REGNUM))])]
7131 "TARGET_EITHER"
7132 "
7133 {
7134 rtx callee, pat;
7135 tree addr = MEM_EXPR (operands[0]);
7136
7137 /* In an untyped call, we can get NULL for operand 2. */
7138 if (operands[2] == NULL_RTX)
7139 operands[2] = const0_rtx;
7140
7141 /* Decide if we should generate indirect calls by loading the
7142 32-bit address of the callee into a register before performing the
7143 branch and link. */
7144 callee = XEXP (operands[0], 0);
7145 if (GET_CODE (callee) == SYMBOL_REF
7146 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7147 : !REG_P (callee))
7148 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7149
7150 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7151 /* Indirect call: set r9 with FDPIC value of callee. */
7152 XEXP (operands[0], 0)
7153 = arm_load_function_descriptor (XEXP (operands[0], 0));
7154
7155 if (detect_cmse_nonsecure_call (addr))
7156 {
7157 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7158 operands[2]);
7159 emit_call_insn (pat);
7160 }
7161 else
7162 {
7163 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7164 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7165 }
7166
7167 /* Restore FDPIC register (r9) after call. */
7168 if (TARGET_FDPIC)
7169 {
7170 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7171 rtx initial_fdpic_reg
7172 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7173
7174 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7175 initial_fdpic_reg));
7176 }
7177
7178 DONE;
7179 }"
7180 )
7181
7182 (define_insn "restore_pic_register_after_call"
7183 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7184 (unspec:SI [(match_dup 0)
7185 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7186 UNSPEC_PIC_RESTORE))]
7187 ""
7188 "@
7189 mov\t%0, %1
7190 ldr\t%0, %1"
7191 )
7192
7193 (define_expand "call_internal"
7194 [(parallel [(call (match_operand 0 "memory_operand")
7195 (match_operand 1 "general_operand"))
7196 (use (match_operand 2 "" ""))
7197 (clobber (reg:SI LR_REGNUM))])])
7198
7199 (define_expand "nonsecure_call_internal"
7200 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7201 UNSPEC_NONSECURE_MEM)
7202 (match_operand 1 "general_operand"))
7203 (use (match_operand 2 "" ""))
7204 (clobber (reg:SI LR_REGNUM))])]
7205 "use_cmse"
7206 "
7207 {
7208 rtx tmp;
7209 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7210 gen_rtx_REG (SImode, R4_REGNUM),
7211 SImode);
7212
7213 operands[0] = replace_equiv_address (operands[0], tmp);
7214 }")
7215
7216 (define_insn "*call_reg_armv5"
7217 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7218 (match_operand 1 "" ""))
7219 (use (match_operand 2 "" ""))
7220 (clobber (reg:SI LR_REGNUM))]
7221 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7222 "blx%?\\t%0"
7223 [(set_attr "type" "call")]
7224 )
7225
7226 (define_insn "*call_reg_arm"
7227 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7228 (match_operand 1 "" ""))
7229 (use (match_operand 2 "" ""))
7230 (clobber (reg:SI LR_REGNUM))]
7231 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7232 "*
7233 return output_call (operands);
7234 "
7235 ;; length is worst case, normally it is only two
7236 [(set_attr "length" "12")
7237 (set_attr "type" "call")]
7238 )
7239
7240
7241 (define_expand "call_value"
7242 [(parallel [(set (match_operand 0 "" "")
7243 (call (match_operand 1 "memory_operand")
7244 (match_operand 2 "general_operand")))
7245 (use (match_operand 3 "" ""))
7246 (clobber (reg:SI LR_REGNUM))])]
7247 "TARGET_EITHER"
7248 "
7249 {
7250 rtx pat, callee;
7251 tree addr = MEM_EXPR (operands[1]);
7252
7253 /* In an untyped call, we can get NULL for operand 2. */
7254 if (operands[3] == 0)
7255 operands[3] = const0_rtx;
7256
7257 /* Decide if we should generate indirect calls by loading the
7258 32-bit address of the callee into a register before performing the
7259 branch and link. */
7260 callee = XEXP (operands[1], 0);
7261 if (GET_CODE (callee) == SYMBOL_REF
7262 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7263 : !REG_P (callee))
7264 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7265
7266 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7267 /* Indirect call: set r9 with FDPIC value of callee. */
7268 XEXP (operands[1], 0)
7269 = arm_load_function_descriptor (XEXP (operands[1], 0));
7270
7271 if (detect_cmse_nonsecure_call (addr))
7272 {
7273 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7274 operands[2], operands[3]);
7275 emit_call_insn (pat);
7276 }
7277 else
7278 {
7279 pat = gen_call_value_internal (operands[0], operands[1],
7280 operands[2], operands[3]);
7281 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7282 }
7283
7284 /* Restore FDPIC register (r9) after call. */
7285 if (TARGET_FDPIC)
7286 {
7287 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7288 rtx initial_fdpic_reg
7289 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7290
7291 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7292 initial_fdpic_reg));
7293 }
7294
7295 DONE;
7296 }"
7297 )
7298
7299 (define_expand "call_value_internal"
7300 [(parallel [(set (match_operand 0 "" "")
7301 (call (match_operand 1 "memory_operand")
7302 (match_operand 2 "general_operand")))
7303 (use (match_operand 3 "" ""))
7304 (clobber (reg:SI LR_REGNUM))])])
7305
7306 (define_expand "nonsecure_call_value_internal"
7307 [(parallel [(set (match_operand 0 "" "")
7308 (call (unspec:SI [(match_operand 1 "memory_operand")]
7309 UNSPEC_NONSECURE_MEM)
7310 (match_operand 2 "general_operand")))
7311 (use (match_operand 3 "" ""))
7312 (clobber (reg:SI LR_REGNUM))])]
7313 "use_cmse"
7314 "
7315 {
7316 rtx tmp;
7317 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7318 gen_rtx_REG (SImode, R4_REGNUM),
7319 SImode);
7320
7321 operands[1] = replace_equiv_address (operands[1], tmp);
7322 }")
7323
7324 (define_insn "*call_value_reg_armv5"
7325 [(set (match_operand 0 "" "")
7326 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7327 (match_operand 2 "" "")))
7328 (use (match_operand 3 "" ""))
7329 (clobber (reg:SI LR_REGNUM))]
7330 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7331 "blx%?\\t%1"
7332 [(set_attr "type" "call")]
7333 )
7334
7335 (define_insn "*call_value_reg_arm"
7336 [(set (match_operand 0 "" "")
7337 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7338 (match_operand 2 "" "")))
7339 (use (match_operand 3 "" ""))
7340 (clobber (reg:SI LR_REGNUM))]
7341 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7342 "*
7343 return output_call (&operands[1]);
7344 "
7345 [(set_attr "length" "12")
7346 (set_attr "type" "call")]
7347 )
7348
7349 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7350 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7351
7352 (define_insn "*call_symbol"
7353 [(call (mem:SI (match_operand:SI 0 "" ""))
7354 (match_operand 1 "" ""))
7355 (use (match_operand 2 "" ""))
7356 (clobber (reg:SI LR_REGNUM))]
7357 "TARGET_32BIT
7358 && !SIBLING_CALL_P (insn)
7359 && (GET_CODE (operands[0]) == SYMBOL_REF)
7360 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7361 "*
7362 {
7363 rtx op = operands[0];
7364
7365 /* Switch mode now when possible. */
7366 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7367 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7368 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7369
7370 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7371 }"
7372 [(set_attr "type" "call")]
7373 )
7374
7375 (define_insn "*call_value_symbol"
7376 [(set (match_operand 0 "" "")
7377 (call (mem:SI (match_operand:SI 1 "" ""))
7378 (match_operand:SI 2 "" "")))
7379 (use (match_operand 3 "" ""))
7380 (clobber (reg:SI LR_REGNUM))]
7381 "TARGET_32BIT
7382 && !SIBLING_CALL_P (insn)
7383 && (GET_CODE (operands[1]) == SYMBOL_REF)
7384 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7385 "*
7386 {
7387 rtx op = operands[1];
7388
7389 /* Switch mode now when possible. */
7390 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7391 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7392 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7393
7394 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7395 }"
7396 [(set_attr "type" "call")]
7397 )
7398
7399 (define_expand "sibcall_internal"
7400 [(parallel [(call (match_operand 0 "memory_operand")
7401 (match_operand 1 "general_operand"))
7402 (return)
7403 (use (match_operand 2 "" ""))])])
7404
7405 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7406 (define_expand "sibcall"
7407 [(parallel [(call (match_operand 0 "memory_operand")
7408 (match_operand 1 "general_operand"))
7409 (return)
7410 (use (match_operand 2 "" ""))])]
7411 "TARGET_32BIT"
7412 "
7413 {
7414 rtx pat;
7415
7416 if ((!REG_P (XEXP (operands[0], 0))
7417 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7418 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7419 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7420 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7421
7422 if (operands[2] == NULL_RTX)
7423 operands[2] = const0_rtx;
7424
7425 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7426 arm_emit_call_insn (pat, operands[0], true);
7427 DONE;
7428 }"
7429 )
7430
7431 (define_expand "sibcall_value_internal"
7432 [(parallel [(set (match_operand 0 "" "")
7433 (call (match_operand 1 "memory_operand")
7434 (match_operand 2 "general_operand")))
7435 (return)
7436 (use (match_operand 3 "" ""))])])
7437
7438 (define_expand "sibcall_value"
7439 [(parallel [(set (match_operand 0 "" "")
7440 (call (match_operand 1 "memory_operand")
7441 (match_operand 2 "general_operand")))
7442 (return)
7443 (use (match_operand 3 "" ""))])]
7444 "TARGET_32BIT"
7445 "
7446 {
7447 rtx pat;
7448
7449 if ((!REG_P (XEXP (operands[1], 0))
7450 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7451 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7452 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7453 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7454
7455 if (operands[3] == NULL_RTX)
7456 operands[3] = const0_rtx;
7457
7458 pat = gen_sibcall_value_internal (operands[0], operands[1],
7459 operands[2], operands[3]);
7460 arm_emit_call_insn (pat, operands[1], true);
7461 DONE;
7462 }"
7463 )
7464
7465 (define_insn "*sibcall_insn"
7466 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7467 (match_operand 1 "" ""))
7468 (return)
7469 (use (match_operand 2 "" ""))]
7470 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7471 "*
7472 if (which_alternative == 1)
7473 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7474 else
7475 {
7476 if (arm_arch5t || arm_arch4t)
7477 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7478 else
7479 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7480 }
7481 "
7482 [(set_attr "type" "call")]
7483 )
7484
7485 (define_insn "*sibcall_value_insn"
7486 [(set (match_operand 0 "" "")
7487 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7488 (match_operand 2 "" "")))
7489 (return)
7490 (use (match_operand 3 "" ""))]
7491 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7492 "*
7493 if (which_alternative == 1)
7494 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7495 else
7496 {
7497 if (arm_arch5t || arm_arch4t)
7498 return \"bx%?\\t%1\";
7499 else
7500 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7501 }
7502 "
7503 [(set_attr "type" "call")]
7504 )
7505
7506 (define_expand "<return_str>return"
7507 [(RETURNS)]
7508 "(TARGET_ARM || (TARGET_THUMB2
7509 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7510 && !IS_STACKALIGN (arm_current_func_type ())))
7511 <return_cond_false>"
7512 "
7513 {
7514 if (TARGET_THUMB2)
7515 {
7516 thumb2_expand_return (<return_simple_p>);
7517 DONE;
7518 }
7519 }
7520 "
7521 )
7522
7523 ;; Often the return insn will be the same as loading from memory, so set attr
7524 (define_insn "*arm_return"
7525 [(return)]
7526 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7527 "*
7528 {
7529 if (arm_ccfsm_state == 2)
7530 {
7531 arm_ccfsm_state += 2;
7532 return \"\";
7533 }
7534 return output_return_instruction (const_true_rtx, true, false, false);
7535 }"
7536 [(set_attr "type" "load_4")
7537 (set_attr "length" "12")
7538 (set_attr "predicable" "yes")]
7539 )
7540
7541 (define_insn "*cond_<return_str>return"
7542 [(set (pc)
7543 (if_then_else (match_operator 0 "arm_comparison_operator"
7544 [(match_operand 1 "cc_register" "") (const_int 0)])
7545 (RETURNS)
7546 (pc)))]
7547 "TARGET_ARM <return_cond_true>"
7548 "*
7549 {
7550 if (arm_ccfsm_state == 2)
7551 {
7552 arm_ccfsm_state += 2;
7553 return \"\";
7554 }
7555 return output_return_instruction (operands[0], true, false,
7556 <return_simple_p>);
7557 }"
7558 [(set_attr "conds" "use")
7559 (set_attr "length" "12")
7560 (set_attr "type" "load_4")]
7561 )
7562
7563 (define_insn "*cond_<return_str>return_inverted"
7564 [(set (pc)
7565 (if_then_else (match_operator 0 "arm_comparison_operator"
7566 [(match_operand 1 "cc_register" "") (const_int 0)])
7567 (pc)
7568 (RETURNS)))]
7569 "TARGET_ARM <return_cond_true>"
7570 "*
7571 {
7572 if (arm_ccfsm_state == 2)
7573 {
7574 arm_ccfsm_state += 2;
7575 return \"\";
7576 }
7577 return output_return_instruction (operands[0], true, true,
7578 <return_simple_p>);
7579 }"
7580 [(set_attr "conds" "use")
7581 (set_attr "length" "12")
7582 (set_attr "type" "load_4")]
7583 )
7584
7585 (define_insn "*arm_simple_return"
7586 [(simple_return)]
7587 "TARGET_ARM"
7588 "*
7589 {
7590 if (arm_ccfsm_state == 2)
7591 {
7592 arm_ccfsm_state += 2;
7593 return \"\";
7594 }
7595 return output_return_instruction (const_true_rtx, true, false, true);
7596 }"
7597 [(set_attr "type" "branch")
7598 (set_attr "length" "4")
7599 (set_attr "predicable" "yes")]
7600 )
7601
7602 ;; Generate a sequence of instructions to determine if the processor is
7603 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7604 ;; mask.
7605
7606 (define_expand "return_addr_mask"
7607 [(set (match_dup 1)
7608 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7609 (const_int 0)))
7610 (set (match_operand:SI 0 "s_register_operand")
7611 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7612 (const_int -1)
7613 (const_int 67108860)))] ; 0x03fffffc
7614 "TARGET_ARM"
7615 "
7616 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7617 ")
7618
7619 (define_insn "*check_arch2"
7620 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7621 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7622 (const_int 0)))]
7623 "TARGET_ARM"
7624 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7625 [(set_attr "length" "8")
7626 (set_attr "conds" "set")
7627 (set_attr "type" "multiple")]
7628 )
7629
7630 ;; Call subroutine returning any type.
7631
7632 (define_expand "untyped_call"
7633 [(parallel [(call (match_operand 0 "" "")
7634 (const_int 0))
7635 (match_operand 1 "" "")
7636 (match_operand 2 "" "")])]
7637 "TARGET_EITHER && !TARGET_FDPIC"
7638 "
7639 {
7640 int i;
7641 rtx par = gen_rtx_PARALLEL (VOIDmode,
7642 rtvec_alloc (XVECLEN (operands[2], 0)));
7643 rtx addr = gen_reg_rtx (Pmode);
7644 rtx mem;
7645 int size = 0;
7646
7647 emit_move_insn (addr, XEXP (operands[1], 0));
7648 mem = change_address (operands[1], BLKmode, addr);
7649
7650 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7651 {
7652 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7653
7654 /* Default code only uses r0 as a return value, but we could
7655 be using anything up to 4 registers. */
7656 if (REGNO (src) == R0_REGNUM)
7657 src = gen_rtx_REG (TImode, R0_REGNUM);
7658
7659 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7660 GEN_INT (size));
7661 size += GET_MODE_SIZE (GET_MODE (src));
7662 }
7663
7664 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7665
7666 size = 0;
7667
7668 for (i = 0; i < XVECLEN (par, 0); i++)
7669 {
7670 HOST_WIDE_INT offset = 0;
7671 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7672
7673 if (size != 0)
7674 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7675
7676 mem = change_address (mem, GET_MODE (reg), NULL);
7677 if (REGNO (reg) == R0_REGNUM)
7678 {
7679 /* On thumb we have to use a write-back instruction. */
7680 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7681 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7682 size = TARGET_ARM ? 16 : 0;
7683 }
7684 else
7685 {
7686 emit_move_insn (mem, reg);
7687 size = GET_MODE_SIZE (GET_MODE (reg));
7688 }
7689 }
7690
7691 /* The optimizer does not know that the call sets the function value
7692 registers we stored in the result block. We avoid problems by
7693 claiming that all hard registers are used and clobbered at this
7694 point. */
7695 emit_insn (gen_blockage ());
7696
7697 DONE;
7698 }"
7699 )
7700
7701 (define_expand "untyped_return"
7702 [(match_operand:BLK 0 "memory_operand")
7703 (match_operand 1 "" "")]
7704 "TARGET_EITHER && !TARGET_FDPIC"
7705 "
7706 {
7707 int i;
7708 rtx addr = gen_reg_rtx (Pmode);
7709 rtx mem;
7710 int size = 0;
7711
7712 emit_move_insn (addr, XEXP (operands[0], 0));
7713 mem = change_address (operands[0], BLKmode, addr);
7714
7715 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7716 {
7717 HOST_WIDE_INT offset = 0;
7718 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7719
7720 if (size != 0)
7721 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7722
7723 mem = change_address (mem, GET_MODE (reg), NULL);
7724 if (REGNO (reg) == R0_REGNUM)
7725 {
7726 /* On thumb we have to use a write-back instruction. */
7727 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7728 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7729 size = TARGET_ARM ? 16 : 0;
7730 }
7731 else
7732 {
7733 emit_move_insn (reg, mem);
7734 size = GET_MODE_SIZE (GET_MODE (reg));
7735 }
7736 }
7737
7738 /* Emit USE insns before the return. */
7739 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7740 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7741
7742 /* Construct the return. */
7743 expand_naked_return ();
7744
7745 DONE;
7746 }"
7747 )
7748
7749 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7750 ;; all of memory. This blocks insns from being moved across this point.
7751
7752 (define_insn "blockage"
7753 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7754 "TARGET_EITHER"
7755 ""
7756 [(set_attr "length" "0")
7757 (set_attr "type" "block")]
7758 )
7759
7760 ;; Since we hard code r0 here use the 'o' constraint to prevent
7761 ;; provoking undefined behaviour in the hardware with putting out
7762 ;; auto-increment operations with potentially r0 as the base register.
7763 (define_insn "probe_stack"
7764 [(set (match_operand:SI 0 "memory_operand" "=o")
7765 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7766 "TARGET_32BIT"
7767 "str%?\\tr0, %0"
7768 [(set_attr "type" "store_4")
7769 (set_attr "predicable" "yes")]
7770 )
7771
7772 (define_insn "probe_stack_range"
7773 [(set (match_operand:SI 0 "register_operand" "=r")
7774 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7775 (match_operand:SI 2 "register_operand" "r")]
7776 VUNSPEC_PROBE_STACK_RANGE))]
7777 "TARGET_32BIT"
7778 {
7779 return output_probe_stack_range (operands[0], operands[2]);
7780 }
7781 [(set_attr "type" "multiple")
7782 (set_attr "conds" "clob")]
7783 )
7784
7785 ;; Named patterns for stack smashing protection.
7786 (define_expand "stack_protect_combined_set"
7787 [(parallel
7788 [(set (match_operand:SI 0 "memory_operand")
7789 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7790 UNSPEC_SP_SET))
7791 (clobber (match_scratch:SI 2 ""))
7792 (clobber (match_scratch:SI 3 ""))])]
7793 ""
7794 ""
7795 )
7796
7797 ;; Use a separate insn from the above expand to be able to have the mem outside
7798 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7799 ;; try to reload the guard since we need to control how PIC access is done in
7800 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7801 ;; legitimize_pic_address ()).
7802 (define_insn_and_split "*stack_protect_combined_set_insn"
7803 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7804 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7805 UNSPEC_SP_SET))
7806 (clobber (match_scratch:SI 2 "=&l,&r"))
7807 (clobber (match_scratch:SI 3 "=&l,&r"))]
7808 ""
7809 "#"
7810 "reload_completed"
7811 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7812 UNSPEC_SP_SET))
7813 (clobber (match_dup 2))])]
7814 "
7815 {
7816 if (flag_pic)
7817 {
7818 rtx pic_reg;
7819
7820 if (TARGET_FDPIC)
7821 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7822 else
7823 pic_reg = operands[3];
7824
7825 /* Forces recomputing of GOT base now. */
7826 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7827 true /*compute_now*/);
7828 }
7829 else
7830 {
7831 if (address_operand (operands[1], SImode))
7832 operands[2] = operands[1];
7833 else
7834 {
7835 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7836 emit_move_insn (operands[2], mem);
7837 }
7838 }
7839 }"
7840 [(set_attr "arch" "t1,32")]
7841 )
7842
7843 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7844 ;; canary value does not live beyond the life of this sequence.
7845 (define_insn "*stack_protect_set_insn"
7846 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7847 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7848 UNSPEC_SP_SET))
7849 (clobber (match_dup 1))]
7850 ""
7851 "@
7852 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7853 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7854 [(set_attr "length" "8,12")
7855 (set_attr "conds" "clob,nocond")
7856 (set_attr "type" "multiple")
7857 (set_attr "arch" "t1,32")]
7858 )
7859
7860 (define_expand "stack_protect_combined_test"
7861 [(parallel
7862 [(set (pc)
7863 (if_then_else
7864 (eq (match_operand:SI 0 "memory_operand")
7865 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7866 UNSPEC_SP_TEST))
7867 (label_ref (match_operand 2))
7868 (pc)))
7869 (clobber (match_scratch:SI 3 ""))
7870 (clobber (match_scratch:SI 4 ""))
7871 (clobber (reg:CC CC_REGNUM))])]
7872 ""
7873 ""
7874 )
7875
7876 ;; Use a separate insn from the above expand to be able to have the mem outside
7877 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7878 ;; try to reload the guard since we need to control how PIC access is done in
7879 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7880 ;; legitimize_pic_address ()).
7881 (define_insn_and_split "*stack_protect_combined_test_insn"
7882 [(set (pc)
7883 (if_then_else
7884 (eq (match_operand:SI 0 "memory_operand" "m,m")
7885 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7886 UNSPEC_SP_TEST))
7887 (label_ref (match_operand 2))
7888 (pc)))
7889 (clobber (match_scratch:SI 3 "=&l,&r"))
7890 (clobber (match_scratch:SI 4 "=&l,&r"))
7891 (clobber (reg:CC CC_REGNUM))]
7892 ""
7893 "#"
7894 "reload_completed"
7895 [(const_int 0)]
7896 {
7897 rtx eq;
7898
7899 if (flag_pic)
7900 {
7901 rtx pic_reg;
7902
7903 if (TARGET_FDPIC)
7904 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7905 else
7906 pic_reg = operands[4];
7907
7908 /* Forces recomputing of GOT base now. */
7909 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7910 true /*compute_now*/);
7911 }
7912 else
7913 {
7914 if (address_operand (operands[1], SImode))
7915 operands[3] = operands[1];
7916 else
7917 {
7918 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7919 emit_move_insn (operands[3], mem);
7920 }
7921 }
7922 if (TARGET_32BIT)
7923 {
7924 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
7925 operands[3]));
7926 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
7927 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
7928 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
7929 }
7930 else
7931 {
7932 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
7933 operands[3]));
7934 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
7935 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
7936 operands[2]));
7937 }
7938 DONE;
7939 }
7940 [(set_attr "arch" "t1,32")]
7941 )
7942
7943 (define_insn "arm_stack_protect_test_insn"
7944 [(set (reg:CC_Z CC_REGNUM)
7945 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
7946 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
7947 UNSPEC_SP_TEST)
7948 (const_int 0)))
7949 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
7950 (clobber (match_dup 2))]
7951 "TARGET_32BIT"
7952 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
7953 [(set_attr "length" "8,12")
7954 (set_attr "conds" "set")
7955 (set_attr "type" "multiple")
7956 (set_attr "arch" "t,32")]
7957 )
7958
7959 (define_expand "casesi"
7960 [(match_operand:SI 0 "s_register_operand") ; index to jump on
7961 (match_operand:SI 1 "const_int_operand") ; lower bound
7962 (match_operand:SI 2 "const_int_operand") ; total range
7963 (match_operand:SI 3 "" "") ; table label
7964 (match_operand:SI 4 "" "")] ; Out of range label
7965 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
7966 "
7967 {
7968 enum insn_code code;
7969 if (operands[1] != const0_rtx)
7970 {
7971 rtx reg = gen_reg_rtx (SImode);
7972
7973 emit_insn (gen_addsi3 (reg, operands[0],
7974 gen_int_mode (-INTVAL (operands[1]),
7975 SImode)));
7976 operands[0] = reg;
7977 }
7978
7979 if (TARGET_ARM)
7980 code = CODE_FOR_arm_casesi_internal;
7981 else if (TARGET_THUMB1)
7982 code = CODE_FOR_thumb1_casesi_internal_pic;
7983 else if (flag_pic)
7984 code = CODE_FOR_thumb2_casesi_internal_pic;
7985 else
7986 code = CODE_FOR_thumb2_casesi_internal;
7987
7988 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
7989 operands[2] = force_reg (SImode, operands[2]);
7990
7991 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
7992 operands[3], operands[4]));
7993 DONE;
7994 }"
7995 )
7996
7997 ;; The USE in this pattern is needed to tell flow analysis that this is
7998 ;; a CASESI insn. It has no other purpose.
7999 (define_expand "arm_casesi_internal"
8000 [(parallel [(set (pc)
8001 (if_then_else
8002 (leu (match_operand:SI 0 "s_register_operand")
8003 (match_operand:SI 1 "arm_rhs_operand"))
8004 (match_dup 4)
8005 (label_ref:SI (match_operand 3 ""))))
8006 (clobber (reg:CC CC_REGNUM))
8007 (use (label_ref:SI (match_operand 2 "")))])]
8008 "TARGET_ARM"
8009 {
8010 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8011 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8012 gen_rtx_LABEL_REF (SImode, operands[2]));
8013 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8014 MEM_READONLY_P (operands[4]) = 1;
8015 MEM_NOTRAP_P (operands[4]) = 1;
8016 })
8017
8018 (define_insn "*arm_casesi_internal"
8019 [(parallel [(set (pc)
8020 (if_then_else
8021 (leu (match_operand:SI 0 "s_register_operand" "r")
8022 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8023 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8024 (label_ref:SI (match_operand 2 "" ""))))
8025 (label_ref:SI (match_operand 3 "" ""))))
8026 (clobber (reg:CC CC_REGNUM))
8027 (use (label_ref:SI (match_dup 2)))])]
8028 "TARGET_ARM"
8029 "*
8030 if (flag_pic)
8031 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8032 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8033 "
8034 [(set_attr "conds" "clob")
8035 (set_attr "length" "12")
8036 (set_attr "type" "multiple")]
8037 )
8038
8039 (define_expand "indirect_jump"
8040 [(set (pc)
8041 (match_operand:SI 0 "s_register_operand"))]
8042 "TARGET_EITHER"
8043 "
8044 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8045 address and use bx. */
8046 if (TARGET_THUMB2)
8047 {
8048 rtx tmp;
8049 tmp = gen_reg_rtx (SImode);
8050 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8051 operands[0] = tmp;
8052 }
8053 "
8054 )
8055
8056 ;; NB Never uses BX.
8057 (define_insn "*arm_indirect_jump"
8058 [(set (pc)
8059 (match_operand:SI 0 "s_register_operand" "r"))]
8060 "TARGET_ARM"
8061 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8062 [(set_attr "predicable" "yes")
8063 (set_attr "type" "branch")]
8064 )
8065
8066 (define_insn "*load_indirect_jump"
8067 [(set (pc)
8068 (match_operand:SI 0 "memory_operand" "m"))]
8069 "TARGET_ARM"
8070 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8071 [(set_attr "type" "load_4")
8072 (set_attr "pool_range" "4096")
8073 (set_attr "neg_pool_range" "4084")
8074 (set_attr "predicable" "yes")]
8075 )
8076
8077 \f
8078 ;; Misc insns
8079
8080 (define_insn "nop"
8081 [(const_int 0)]
8082 "TARGET_EITHER"
8083 "nop"
8084 [(set (attr "length")
8085 (if_then_else (eq_attr "is_thumb" "yes")
8086 (const_int 2)
8087 (const_int 4)))
8088 (set_attr "type" "mov_reg")]
8089 )
8090
8091 (define_insn "trap"
8092 [(trap_if (const_int 1) (const_int 0))]
8093 ""
8094 "*
8095 if (TARGET_ARM)
8096 return \".inst\\t0xe7f000f0\";
8097 else
8098 return \".inst\\t0xdeff\";
8099 "
8100 [(set (attr "length")
8101 (if_then_else (eq_attr "is_thumb" "yes")
8102 (const_int 2)
8103 (const_int 4)))
8104 (set_attr "type" "trap")
8105 (set_attr "conds" "unconditional")]
8106 )
8107
8108 \f
8109 ;; Patterns to allow combination of arithmetic, cond code and shifts
8110
8111 (define_insn "*<arith_shift_insn>_multsi"
8112 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8113 (SHIFTABLE_OPS:SI
8114 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8115 (match_operand:SI 3 "power_of_two_operand" ""))
8116 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8117 "TARGET_32BIT"
8118 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8119 [(set_attr "predicable" "yes")
8120 (set_attr "shift" "2")
8121 (set_attr "arch" "a,t2")
8122 (set_attr "type" "alu_shift_imm")])
8123
8124 (define_insn "*<arith_shift_insn>_shiftsi"
8125 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8126 (SHIFTABLE_OPS:SI
8127 (match_operator:SI 2 "shift_nomul_operator"
8128 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8129 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8130 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8131 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8132 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8133 [(set_attr "predicable" "yes")
8134 (set_attr "shift" "3")
8135 (set_attr "arch" "a,t2,a")
8136 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8137
8138 (define_split
8139 [(set (match_operand:SI 0 "s_register_operand" "")
8140 (match_operator:SI 1 "shiftable_operator"
8141 [(match_operator:SI 2 "shiftable_operator"
8142 [(match_operator:SI 3 "shift_operator"
8143 [(match_operand:SI 4 "s_register_operand" "")
8144 (match_operand:SI 5 "reg_or_int_operand" "")])
8145 (match_operand:SI 6 "s_register_operand" "")])
8146 (match_operand:SI 7 "arm_rhs_operand" "")]))
8147 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8148 "TARGET_32BIT"
8149 [(set (match_dup 8)
8150 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8151 (match_dup 6)]))
8152 (set (match_dup 0)
8153 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8154 "")
8155
8156 (define_insn "*arith_shiftsi_compare0"
8157 [(set (reg:CC_NOOV CC_REGNUM)
8158 (compare:CC_NOOV
8159 (match_operator:SI 1 "shiftable_operator"
8160 [(match_operator:SI 3 "shift_operator"
8161 [(match_operand:SI 4 "s_register_operand" "r,r")
8162 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8163 (match_operand:SI 2 "s_register_operand" "r,r")])
8164 (const_int 0)))
8165 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8166 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8167 (match_dup 2)]))]
8168 "TARGET_32BIT"
8169 "%i1s%?\\t%0, %2, %4%S3"
8170 [(set_attr "conds" "set")
8171 (set_attr "shift" "4")
8172 (set_attr "arch" "32,a")
8173 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8174
8175 (define_insn "*arith_shiftsi_compare0_scratch"
8176 [(set (reg:CC_NOOV CC_REGNUM)
8177 (compare:CC_NOOV
8178 (match_operator:SI 1 "shiftable_operator"
8179 [(match_operator:SI 3 "shift_operator"
8180 [(match_operand:SI 4 "s_register_operand" "r,r")
8181 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8182 (match_operand:SI 2 "s_register_operand" "r,r")])
8183 (const_int 0)))
8184 (clobber (match_scratch:SI 0 "=r,r"))]
8185 "TARGET_32BIT"
8186 "%i1s%?\\t%0, %2, %4%S3"
8187 [(set_attr "conds" "set")
8188 (set_attr "shift" "4")
8189 (set_attr "arch" "32,a")
8190 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8191
8192 (define_insn "*sub_shiftsi"
8193 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8194 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8195 (match_operator:SI 2 "shift_operator"
8196 [(match_operand:SI 3 "s_register_operand" "r,r")
8197 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8198 "TARGET_32BIT"
8199 "sub%?\\t%0, %1, %3%S2"
8200 [(set_attr "predicable" "yes")
8201 (set_attr "predicable_short_it" "no")
8202 (set_attr "shift" "3")
8203 (set_attr "arch" "32,a")
8204 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8205
8206 (define_insn "*sub_shiftsi_compare0"
8207 [(set (reg:CC_NOOV CC_REGNUM)
8208 (compare:CC_NOOV
8209 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8210 (match_operator:SI 2 "shift_operator"
8211 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8212 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8213 (const_int 0)))
8214 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8215 (minus:SI (match_dup 1)
8216 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8217 "TARGET_32BIT"
8218 "subs%?\\t%0, %1, %3%S2"
8219 [(set_attr "conds" "set")
8220 (set_attr "shift" "3")
8221 (set_attr "arch" "32,a,a")
8222 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8223
8224 (define_insn "*sub_shiftsi_compare0_scratch"
8225 [(set (reg:CC_NOOV CC_REGNUM)
8226 (compare:CC_NOOV
8227 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8228 (match_operator:SI 2 "shift_operator"
8229 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8230 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8231 (const_int 0)))
8232 (clobber (match_scratch:SI 0 "=r,r,r"))]
8233 "TARGET_32BIT"
8234 "subs%?\\t%0, %1, %3%S2"
8235 [(set_attr "conds" "set")
8236 (set_attr "shift" "3")
8237 (set_attr "arch" "32,a,a")
8238 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8239 \f
8240
8241 (define_insn_and_split "*and_scc"
8242 [(set (match_operand:SI 0 "s_register_operand" "=r")
8243 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8244 [(match_operand 2 "cc_register" "") (const_int 0)])
8245 (match_operand:SI 3 "s_register_operand" "r")))]
8246 "TARGET_ARM"
8247 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8248 "&& reload_completed"
8249 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8250 (cond_exec (match_dup 4) (set (match_dup 0)
8251 (and:SI (match_dup 3) (const_int 1))))]
8252 {
8253 machine_mode mode = GET_MODE (operands[2]);
8254 enum rtx_code rc = GET_CODE (operands[1]);
8255
8256 /* Note that operands[4] is the same as operands[1],
8257 but with VOIDmode as the result. */
8258 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8259 if (mode == CCFPmode || mode == CCFPEmode)
8260 rc = reverse_condition_maybe_unordered (rc);
8261 else
8262 rc = reverse_condition (rc);
8263 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8264 }
8265 [(set_attr "conds" "use")
8266 (set_attr "type" "multiple")
8267 (set_attr "length" "8")]
8268 )
8269
8270 (define_insn_and_split "*ior_scc"
8271 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8272 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8273 [(match_operand 2 "cc_register" "") (const_int 0)])
8274 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8275 "TARGET_ARM"
8276 "@
8277 orr%d1\\t%0, %3, #1
8278 #"
8279 "&& reload_completed
8280 && REGNO (operands [0]) != REGNO (operands[3])"
8281 ;; && which_alternative == 1
8282 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8283 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8284 (cond_exec (match_dup 4) (set (match_dup 0)
8285 (ior:SI (match_dup 3) (const_int 1))))]
8286 {
8287 machine_mode mode = GET_MODE (operands[2]);
8288 enum rtx_code rc = GET_CODE (operands[1]);
8289
8290 /* Note that operands[4] is the same as operands[1],
8291 but with VOIDmode as the result. */
8292 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8293 if (mode == CCFPmode || mode == CCFPEmode)
8294 rc = reverse_condition_maybe_unordered (rc);
8295 else
8296 rc = reverse_condition (rc);
8297 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8298 }
8299 [(set_attr "conds" "use")
8300 (set_attr "length" "4,8")
8301 (set_attr "type" "logic_imm,multiple")]
8302 )
8303
8304 ; A series of splitters for the compare_scc pattern below. Note that
8305 ; order is important.
8306 (define_split
8307 [(set (match_operand:SI 0 "s_register_operand" "")
8308 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8309 (const_int 0)))
8310 (clobber (reg:CC CC_REGNUM))]
8311 "TARGET_32BIT && reload_completed"
8312 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8313
8314 (define_split
8315 [(set (match_operand:SI 0 "s_register_operand" "")
8316 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8317 (const_int 0)))
8318 (clobber (reg:CC CC_REGNUM))]
8319 "TARGET_32BIT && reload_completed"
8320 [(set (match_dup 0) (not:SI (match_dup 1)))
8321 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8322
8323 (define_split
8324 [(set (match_operand:SI 0 "s_register_operand" "")
8325 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8326 (const_int 0)))
8327 (clobber (reg:CC CC_REGNUM))]
8328 "arm_arch5t && TARGET_32BIT"
8329 [(set (match_dup 0) (clz:SI (match_dup 1)))
8330 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8331 )
8332
8333 (define_split
8334 [(set (match_operand:SI 0 "s_register_operand" "")
8335 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8336 (const_int 0)))
8337 (clobber (reg:CC CC_REGNUM))]
8338 "TARGET_32BIT && reload_completed"
8339 [(parallel
8340 [(set (reg:CC CC_REGNUM)
8341 (compare:CC (const_int 1) (match_dup 1)))
8342 (set (match_dup 0)
8343 (minus:SI (const_int 1) (match_dup 1)))])
8344 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8345 (set (match_dup 0) (const_int 0)))])
8346
8347 (define_split
8348 [(set (match_operand:SI 0 "s_register_operand" "")
8349 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8350 (match_operand:SI 2 "const_int_operand" "")))
8351 (clobber (reg:CC CC_REGNUM))]
8352 "TARGET_32BIT && reload_completed"
8353 [(parallel
8354 [(set (reg:CC CC_REGNUM)
8355 (compare:CC (match_dup 1) (match_dup 2)))
8356 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8357 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8358 (set (match_dup 0) (const_int 1)))]
8359 {
8360 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8361 })
8362
8363 (define_split
8364 [(set (match_operand:SI 0 "s_register_operand" "")
8365 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8366 (match_operand:SI 2 "arm_add_operand" "")))
8367 (clobber (reg:CC CC_REGNUM))]
8368 "TARGET_32BIT && reload_completed"
8369 [(parallel
8370 [(set (reg:CC_NOOV CC_REGNUM)
8371 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8372 (const_int 0)))
8373 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8374 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8375 (set (match_dup 0) (const_int 1)))])
8376
8377 (define_insn_and_split "*compare_scc"
8378 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8379 (match_operator:SI 1 "arm_comparison_operator"
8380 [(match_operand:SI 2 "s_register_operand" "r,r")
8381 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8382 (clobber (reg:CC CC_REGNUM))]
8383 "TARGET_32BIT"
8384 "#"
8385 "&& reload_completed"
8386 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8387 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8388 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8389 {
8390 rtx tmp1;
8391 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8392 operands[2], operands[3]);
8393 enum rtx_code rc = GET_CODE (operands[1]);
8394
8395 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8396
8397 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8398 if (mode == CCFPmode || mode == CCFPEmode)
8399 rc = reverse_condition_maybe_unordered (rc);
8400 else
8401 rc = reverse_condition (rc);
8402 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8403 }
8404 [(set_attr "type" "multiple")]
8405 )
8406
8407 ;; Attempt to improve the sequence generated by the compare_scc splitters
8408 ;; not to use conditional execution.
8409
8410 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8411 ;; clz Rd, reg1
8412 ;; lsr Rd, Rd, #5
8413 (define_peephole2
8414 [(set (reg:CC CC_REGNUM)
8415 (compare:CC (match_operand:SI 1 "register_operand" "")
8416 (const_int 0)))
8417 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8418 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8419 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8420 (set (match_dup 0) (const_int 1)))]
8421 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8422 [(set (match_dup 0) (clz:SI (match_dup 1)))
8423 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8424 )
8425
8426 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8427 ;; negs Rd, reg1
8428 ;; adc Rd, Rd, reg1
8429 (define_peephole2
8430 [(set (reg:CC CC_REGNUM)
8431 (compare:CC (match_operand:SI 1 "register_operand" "")
8432 (const_int 0)))
8433 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8434 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8435 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8436 (set (match_dup 0) (const_int 1)))
8437 (match_scratch:SI 2 "r")]
8438 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8439 [(parallel
8440 [(set (reg:CC CC_REGNUM)
8441 (compare:CC (const_int 0) (match_dup 1)))
8442 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8443 (set (match_dup 0)
8444 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8445 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8446 )
8447
8448 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8449 ;; sub Rd, Reg1, reg2
8450 ;; clz Rd, Rd
8451 ;; lsr Rd, Rd, #5
8452 (define_peephole2
8453 [(set (reg:CC CC_REGNUM)
8454 (compare:CC (match_operand:SI 1 "register_operand" "")
8455 (match_operand:SI 2 "arm_rhs_operand" "")))
8456 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8457 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8458 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8459 (set (match_dup 0) (const_int 1)))]
8460 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8461 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8462 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8463 (set (match_dup 0) (clz:SI (match_dup 0)))
8464 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8465 )
8466
8467
8468 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8469 ;; sub T1, Reg1, reg2
8470 ;; negs Rd, T1
8471 ;; adc Rd, Rd, T1
8472 (define_peephole2
8473 [(set (reg:CC CC_REGNUM)
8474 (compare:CC (match_operand:SI 1 "register_operand" "")
8475 (match_operand:SI 2 "arm_rhs_operand" "")))
8476 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8477 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8478 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8479 (set (match_dup 0) (const_int 1)))
8480 (match_scratch:SI 3 "r")]
8481 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8482 [(set (match_dup 3) (match_dup 4))
8483 (parallel
8484 [(set (reg:CC CC_REGNUM)
8485 (compare:CC (const_int 0) (match_dup 3)))
8486 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8487 (set (match_dup 0)
8488 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8489 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8490 "
8491 if (CONST_INT_P (operands[2]))
8492 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8493 else
8494 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8495 ")
8496
8497 (define_insn "*cond_move"
8498 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8499 (if_then_else:SI (match_operator 3 "equality_operator"
8500 [(match_operator 4 "arm_comparison_operator"
8501 [(match_operand 5 "cc_register" "") (const_int 0)])
8502 (const_int 0)])
8503 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8504 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8505 "TARGET_ARM"
8506 "*
8507 if (GET_CODE (operands[3]) == NE)
8508 {
8509 if (which_alternative != 1)
8510 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8511 if (which_alternative != 0)
8512 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8513 return \"\";
8514 }
8515 if (which_alternative != 0)
8516 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8517 if (which_alternative != 1)
8518 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8519 return \"\";
8520 "
8521 [(set_attr "conds" "use")
8522 (set_attr_alternative "type"
8523 [(if_then_else (match_operand 2 "const_int_operand" "")
8524 (const_string "mov_imm")
8525 (const_string "mov_reg"))
8526 (if_then_else (match_operand 1 "const_int_operand" "")
8527 (const_string "mov_imm")
8528 (const_string "mov_reg"))
8529 (const_string "multiple")])
8530 (set_attr "length" "4,4,8")]
8531 )
8532
8533 (define_insn "*cond_arith"
8534 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8535 (match_operator:SI 5 "shiftable_operator"
8536 [(match_operator:SI 4 "arm_comparison_operator"
8537 [(match_operand:SI 2 "s_register_operand" "r,r")
8538 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8539 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8540 (clobber (reg:CC CC_REGNUM))]
8541 "TARGET_ARM"
8542 "*
8543 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8544 return \"%i5\\t%0, %1, %2, lsr #31\";
8545
8546 output_asm_insn (\"cmp\\t%2, %3\", operands);
8547 if (GET_CODE (operands[5]) == AND)
8548 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8549 else if (GET_CODE (operands[5]) == MINUS)
8550 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8551 else if (which_alternative != 0)
8552 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8553 return \"%i5%d4\\t%0, %1, #1\";
8554 "
8555 [(set_attr "conds" "clob")
8556 (set_attr "length" "12")
8557 (set_attr "type" "multiple")]
8558 )
8559
8560 (define_insn "*cond_sub"
8561 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8562 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8563 (match_operator:SI 4 "arm_comparison_operator"
8564 [(match_operand:SI 2 "s_register_operand" "r,r")
8565 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8566 (clobber (reg:CC CC_REGNUM))]
8567 "TARGET_ARM"
8568 "*
8569 output_asm_insn (\"cmp\\t%2, %3\", operands);
8570 if (which_alternative != 0)
8571 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8572 return \"sub%d4\\t%0, %1, #1\";
8573 "
8574 [(set_attr "conds" "clob")
8575 (set_attr "length" "8,12")
8576 (set_attr "type" "multiple")]
8577 )
8578
8579 (define_insn "*cmp_ite0"
8580 [(set (match_operand 6 "dominant_cc_register" "")
8581 (compare
8582 (if_then_else:SI
8583 (match_operator 4 "arm_comparison_operator"
8584 [(match_operand:SI 0 "s_register_operand"
8585 "l,l,l,r,r,r,r,r,r")
8586 (match_operand:SI 1 "arm_add_operand"
8587 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8588 (match_operator:SI 5 "arm_comparison_operator"
8589 [(match_operand:SI 2 "s_register_operand"
8590 "l,r,r,l,l,r,r,r,r")
8591 (match_operand:SI 3 "arm_add_operand"
8592 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8593 (const_int 0))
8594 (const_int 0)))]
8595 "TARGET_32BIT"
8596 "*
8597 {
8598 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8599 {
8600 {\"cmp%d5\\t%0, %1\",
8601 \"cmp%d4\\t%2, %3\"},
8602 {\"cmn%d5\\t%0, #%n1\",
8603 \"cmp%d4\\t%2, %3\"},
8604 {\"cmp%d5\\t%0, %1\",
8605 \"cmn%d4\\t%2, #%n3\"},
8606 {\"cmn%d5\\t%0, #%n1\",
8607 \"cmn%d4\\t%2, #%n3\"}
8608 };
8609 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8610 {
8611 {\"cmp\\t%2, %3\",
8612 \"cmp\\t%0, %1\"},
8613 {\"cmp\\t%2, %3\",
8614 \"cmn\\t%0, #%n1\"},
8615 {\"cmn\\t%2, #%n3\",
8616 \"cmp\\t%0, %1\"},
8617 {\"cmn\\t%2, #%n3\",
8618 \"cmn\\t%0, #%n1\"}
8619 };
8620 static const char * const ite[2] =
8621 {
8622 \"it\\t%d5\",
8623 \"it\\t%d4\"
8624 };
8625 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8626 CMP_CMP, CMN_CMP, CMP_CMP,
8627 CMN_CMP, CMP_CMN, CMN_CMN};
8628 int swap =
8629 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8630
8631 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8632 if (TARGET_THUMB2) {
8633 output_asm_insn (ite[swap], operands);
8634 }
8635 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8636 return \"\";
8637 }"
8638 [(set_attr "conds" "set")
8639 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8640 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8641 (set_attr "type" "multiple")
8642 (set_attr_alternative "length"
8643 [(const_int 6)
8644 (const_int 8)
8645 (const_int 8)
8646 (const_int 8)
8647 (const_int 8)
8648 (if_then_else (eq_attr "is_thumb" "no")
8649 (const_int 8)
8650 (const_int 10))
8651 (if_then_else (eq_attr "is_thumb" "no")
8652 (const_int 8)
8653 (const_int 10))
8654 (if_then_else (eq_attr "is_thumb" "no")
8655 (const_int 8)
8656 (const_int 10))
8657 (if_then_else (eq_attr "is_thumb" "no")
8658 (const_int 8)
8659 (const_int 10))])]
8660 )
8661
8662 (define_insn "*cmp_ite1"
8663 [(set (match_operand 6 "dominant_cc_register" "")
8664 (compare
8665 (if_then_else:SI
8666 (match_operator 4 "arm_comparison_operator"
8667 [(match_operand:SI 0 "s_register_operand"
8668 "l,l,l,r,r,r,r,r,r")
8669 (match_operand:SI 1 "arm_add_operand"
8670 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8671 (match_operator:SI 5 "arm_comparison_operator"
8672 [(match_operand:SI 2 "s_register_operand"
8673 "l,r,r,l,l,r,r,r,r")
8674 (match_operand:SI 3 "arm_add_operand"
8675 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8676 (const_int 1))
8677 (const_int 0)))]
8678 "TARGET_32BIT"
8679 "*
8680 {
8681 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8682 {
8683 {\"cmp\\t%0, %1\",
8684 \"cmp\\t%2, %3\"},
8685 {\"cmn\\t%0, #%n1\",
8686 \"cmp\\t%2, %3\"},
8687 {\"cmp\\t%0, %1\",
8688 \"cmn\\t%2, #%n3\"},
8689 {\"cmn\\t%0, #%n1\",
8690 \"cmn\\t%2, #%n3\"}
8691 };
8692 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8693 {
8694 {\"cmp%d4\\t%2, %3\",
8695 \"cmp%D5\\t%0, %1\"},
8696 {\"cmp%d4\\t%2, %3\",
8697 \"cmn%D5\\t%0, #%n1\"},
8698 {\"cmn%d4\\t%2, #%n3\",
8699 \"cmp%D5\\t%0, %1\"},
8700 {\"cmn%d4\\t%2, #%n3\",
8701 \"cmn%D5\\t%0, #%n1\"}
8702 };
8703 static const char * const ite[2] =
8704 {
8705 \"it\\t%d4\",
8706 \"it\\t%D5\"
8707 };
8708 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8709 CMP_CMP, CMN_CMP, CMP_CMP,
8710 CMN_CMP, CMP_CMN, CMN_CMN};
8711 int swap =
8712 comparison_dominates_p (GET_CODE (operands[5]),
8713 reverse_condition (GET_CODE (operands[4])));
8714
8715 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8716 if (TARGET_THUMB2) {
8717 output_asm_insn (ite[swap], operands);
8718 }
8719 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8720 return \"\";
8721 }"
8722 [(set_attr "conds" "set")
8723 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8724 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8725 (set_attr_alternative "length"
8726 [(const_int 6)
8727 (const_int 8)
8728 (const_int 8)
8729 (const_int 8)
8730 (const_int 8)
8731 (if_then_else (eq_attr "is_thumb" "no")
8732 (const_int 8)
8733 (const_int 10))
8734 (if_then_else (eq_attr "is_thumb" "no")
8735 (const_int 8)
8736 (const_int 10))
8737 (if_then_else (eq_attr "is_thumb" "no")
8738 (const_int 8)
8739 (const_int 10))
8740 (if_then_else (eq_attr "is_thumb" "no")
8741 (const_int 8)
8742 (const_int 10))])
8743 (set_attr "type" "multiple")]
8744 )
8745
8746 (define_insn "*cmp_and"
8747 [(set (match_operand 6 "dominant_cc_register" "")
8748 (compare
8749 (and:SI
8750 (match_operator 4 "arm_comparison_operator"
8751 [(match_operand:SI 0 "s_register_operand"
8752 "l,l,l,r,r,r,r,r,r,r")
8753 (match_operand:SI 1 "arm_add_operand"
8754 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8755 (match_operator:SI 5 "arm_comparison_operator"
8756 [(match_operand:SI 2 "s_register_operand"
8757 "l,r,r,l,l,r,r,r,r,r")
8758 (match_operand:SI 3 "arm_add_operand"
8759 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8760 (const_int 0)))]
8761 "TARGET_32BIT"
8762 "*
8763 {
8764 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8765 {
8766 {\"cmp%d5\\t%0, %1\",
8767 \"cmp%d4\\t%2, %3\"},
8768 {\"cmn%d5\\t%0, #%n1\",
8769 \"cmp%d4\\t%2, %3\"},
8770 {\"cmp%d5\\t%0, %1\",
8771 \"cmn%d4\\t%2, #%n3\"},
8772 {\"cmn%d5\\t%0, #%n1\",
8773 \"cmn%d4\\t%2, #%n3\"}
8774 };
8775 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8776 {
8777 {\"cmp\\t%2, %3\",
8778 \"cmp\\t%0, %1\"},
8779 {\"cmp\\t%2, %3\",
8780 \"cmn\\t%0, #%n1\"},
8781 {\"cmn\\t%2, #%n3\",
8782 \"cmp\\t%0, %1\"},
8783 {\"cmn\\t%2, #%n3\",
8784 \"cmn\\t%0, #%n1\"}
8785 };
8786 static const char *const ite[2] =
8787 {
8788 \"it\\t%d5\",
8789 \"it\\t%d4\"
8790 };
8791 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8792 CMP_CMP, CMN_CMP, CMP_CMP,
8793 CMP_CMP, CMN_CMP, CMP_CMN,
8794 CMN_CMN};
8795 int swap =
8796 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8797
8798 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8799 if (TARGET_THUMB2) {
8800 output_asm_insn (ite[swap], operands);
8801 }
8802 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8803 return \"\";
8804 }"
8805 [(set_attr "conds" "set")
8806 (set_attr "predicable" "no")
8807 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8808 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8809 (set_attr_alternative "length"
8810 [(const_int 6)
8811 (const_int 8)
8812 (const_int 8)
8813 (const_int 8)
8814 (const_int 8)
8815 (const_int 6)
8816 (if_then_else (eq_attr "is_thumb" "no")
8817 (const_int 8)
8818 (const_int 10))
8819 (if_then_else (eq_attr "is_thumb" "no")
8820 (const_int 8)
8821 (const_int 10))
8822 (if_then_else (eq_attr "is_thumb" "no")
8823 (const_int 8)
8824 (const_int 10))
8825 (if_then_else (eq_attr "is_thumb" "no")
8826 (const_int 8)
8827 (const_int 10))])
8828 (set_attr "type" "multiple")]
8829 )
8830
8831 (define_insn "*cmp_ior"
8832 [(set (match_operand 6 "dominant_cc_register" "")
8833 (compare
8834 (ior:SI
8835 (match_operator 4 "arm_comparison_operator"
8836 [(match_operand:SI 0 "s_register_operand"
8837 "l,l,l,r,r,r,r,r,r,r")
8838 (match_operand:SI 1 "arm_add_operand"
8839 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8840 (match_operator:SI 5 "arm_comparison_operator"
8841 [(match_operand:SI 2 "s_register_operand"
8842 "l,r,r,l,l,r,r,r,r,r")
8843 (match_operand:SI 3 "arm_add_operand"
8844 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8845 (const_int 0)))]
8846 "TARGET_32BIT"
8847 "*
8848 {
8849 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8850 {
8851 {\"cmp\\t%0, %1\",
8852 \"cmp\\t%2, %3\"},
8853 {\"cmn\\t%0, #%n1\",
8854 \"cmp\\t%2, %3\"},
8855 {\"cmp\\t%0, %1\",
8856 \"cmn\\t%2, #%n3\"},
8857 {\"cmn\\t%0, #%n1\",
8858 \"cmn\\t%2, #%n3\"}
8859 };
8860 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8861 {
8862 {\"cmp%D4\\t%2, %3\",
8863 \"cmp%D5\\t%0, %1\"},
8864 {\"cmp%D4\\t%2, %3\",
8865 \"cmn%D5\\t%0, #%n1\"},
8866 {\"cmn%D4\\t%2, #%n3\",
8867 \"cmp%D5\\t%0, %1\"},
8868 {\"cmn%D4\\t%2, #%n3\",
8869 \"cmn%D5\\t%0, #%n1\"}
8870 };
8871 static const char *const ite[2] =
8872 {
8873 \"it\\t%D4\",
8874 \"it\\t%D5\"
8875 };
8876 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8877 CMP_CMP, CMN_CMP, CMP_CMP,
8878 CMP_CMP, CMN_CMP, CMP_CMN,
8879 CMN_CMN};
8880 int swap =
8881 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8882
8883 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8884 if (TARGET_THUMB2) {
8885 output_asm_insn (ite[swap], operands);
8886 }
8887 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8888 return \"\";
8889 }
8890 "
8891 [(set_attr "conds" "set")
8892 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8893 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8894 (set_attr_alternative "length"
8895 [(const_int 6)
8896 (const_int 8)
8897 (const_int 8)
8898 (const_int 8)
8899 (const_int 8)
8900 (const_int 6)
8901 (if_then_else (eq_attr "is_thumb" "no")
8902 (const_int 8)
8903 (const_int 10))
8904 (if_then_else (eq_attr "is_thumb" "no")
8905 (const_int 8)
8906 (const_int 10))
8907 (if_then_else (eq_attr "is_thumb" "no")
8908 (const_int 8)
8909 (const_int 10))
8910 (if_then_else (eq_attr "is_thumb" "no")
8911 (const_int 8)
8912 (const_int 10))])
8913 (set_attr "type" "multiple")]
8914 )
8915
8916 (define_insn_and_split "*ior_scc_scc"
8917 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8918 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8919 [(match_operand:SI 1 "s_register_operand" "l,r")
8920 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8921 (match_operator:SI 6 "arm_comparison_operator"
8922 [(match_operand:SI 4 "s_register_operand" "l,r")
8923 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8924 (clobber (reg:CC CC_REGNUM))]
8925 "TARGET_32BIT
8926 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8927 != CCmode)"
8928 "#"
8929 "TARGET_32BIT && reload_completed"
8930 [(set (match_dup 7)
8931 (compare
8932 (ior:SI
8933 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8934 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8935 (const_int 0)))
8936 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8937 "operands[7]
8938 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8939 DOM_CC_X_OR_Y),
8940 CC_REGNUM);"
8941 [(set_attr "conds" "clob")
8942 (set_attr "enabled_for_short_it" "yes,no")
8943 (set_attr "length" "16")
8944 (set_attr "type" "multiple")]
8945 )
8946
8947 ; If the above pattern is followed by a CMP insn, then the compare is
8948 ; redundant, since we can rework the conditional instruction that follows.
8949 (define_insn_and_split "*ior_scc_scc_cmp"
8950 [(set (match_operand 0 "dominant_cc_register" "")
8951 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8952 [(match_operand:SI 1 "s_register_operand" "l,r")
8953 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8954 (match_operator:SI 6 "arm_comparison_operator"
8955 [(match_operand:SI 4 "s_register_operand" "l,r")
8956 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
8957 (const_int 0)))
8958 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
8959 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8960 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8961 "TARGET_32BIT"
8962 "#"
8963 "TARGET_32BIT && reload_completed"
8964 [(set (match_dup 0)
8965 (compare
8966 (ior:SI
8967 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8968 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8969 (const_int 0)))
8970 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8971 ""
8972 [(set_attr "conds" "set")
8973 (set_attr "enabled_for_short_it" "yes,no")
8974 (set_attr "length" "16")
8975 (set_attr "type" "multiple")]
8976 )
8977
8978 (define_insn_and_split "*and_scc_scc"
8979 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8980 (and:SI (match_operator:SI 3 "arm_comparison_operator"
8981 [(match_operand:SI 1 "s_register_operand" "l,r")
8982 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8983 (match_operator:SI 6 "arm_comparison_operator"
8984 [(match_operand:SI 4 "s_register_operand" "l,r")
8985 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8986 (clobber (reg:CC CC_REGNUM))]
8987 "TARGET_32BIT
8988 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8989 != CCmode)"
8990 "#"
8991 "TARGET_32BIT && reload_completed
8992 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8993 != CCmode)"
8994 [(set (match_dup 7)
8995 (compare
8996 (and:SI
8997 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8998 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8999 (const_int 0)))
9000 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9001 "operands[7]
9002 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9003 DOM_CC_X_AND_Y),
9004 CC_REGNUM);"
9005 [(set_attr "conds" "clob")
9006 (set_attr "enabled_for_short_it" "yes,no")
9007 (set_attr "length" "16")
9008 (set_attr "type" "multiple")]
9009 )
9010
9011 ; If the above pattern is followed by a CMP insn, then the compare is
9012 ; redundant, since we can rework the conditional instruction that follows.
9013 (define_insn_and_split "*and_scc_scc_cmp"
9014 [(set (match_operand 0 "dominant_cc_register" "")
9015 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9016 [(match_operand:SI 1 "s_register_operand" "l,r")
9017 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9018 (match_operator:SI 6 "arm_comparison_operator"
9019 [(match_operand:SI 4 "s_register_operand" "l,r")
9020 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9021 (const_int 0)))
9022 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9023 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9024 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9025 "TARGET_32BIT"
9026 "#"
9027 "TARGET_32BIT && reload_completed"
9028 [(set (match_dup 0)
9029 (compare
9030 (and:SI
9031 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9032 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9033 (const_int 0)))
9034 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9035 ""
9036 [(set_attr "conds" "set")
9037 (set_attr "enabled_for_short_it" "yes,no")
9038 (set_attr "length" "16")
9039 (set_attr "type" "multiple")]
9040 )
9041
9042 ;; If there is no dominance in the comparison, then we can still save an
9043 ;; instruction in the AND case, since we can know that the second compare
9044 ;; need only zero the value if false (if true, then the value is already
9045 ;; correct).
9046 (define_insn_and_split "*and_scc_scc_nodom"
9047 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9048 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9049 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9050 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9051 (match_operator:SI 6 "arm_comparison_operator"
9052 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9053 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9054 (clobber (reg:CC CC_REGNUM))]
9055 "TARGET_32BIT
9056 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9057 == CCmode)"
9058 "#"
9059 "TARGET_32BIT && reload_completed"
9060 [(parallel [(set (match_dup 0)
9061 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9062 (clobber (reg:CC CC_REGNUM))])
9063 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9064 (set (match_dup 0)
9065 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9066 (match_dup 0)
9067 (const_int 0)))]
9068 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9069 operands[4], operands[5]),
9070 CC_REGNUM);
9071 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9072 operands[5]);"
9073 [(set_attr "conds" "clob")
9074 (set_attr "length" "20")
9075 (set_attr "type" "multiple")]
9076 )
9077
9078 (define_split
9079 [(set (reg:CC_NOOV CC_REGNUM)
9080 (compare:CC_NOOV (ior:SI
9081 (and:SI (match_operand:SI 0 "s_register_operand" "")
9082 (const_int 1))
9083 (match_operator:SI 1 "arm_comparison_operator"
9084 [(match_operand:SI 2 "s_register_operand" "")
9085 (match_operand:SI 3 "arm_add_operand" "")]))
9086 (const_int 0)))
9087 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9088 "TARGET_ARM"
9089 [(set (match_dup 4)
9090 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9091 (match_dup 0)))
9092 (set (reg:CC_NOOV CC_REGNUM)
9093 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9094 (const_int 0)))]
9095 "")
9096
9097 (define_split
9098 [(set (reg:CC_NOOV CC_REGNUM)
9099 (compare:CC_NOOV (ior:SI
9100 (match_operator:SI 1 "arm_comparison_operator"
9101 [(match_operand:SI 2 "s_register_operand" "")
9102 (match_operand:SI 3 "arm_add_operand" "")])
9103 (and:SI (match_operand:SI 0 "s_register_operand" "")
9104 (const_int 1)))
9105 (const_int 0)))
9106 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9107 "TARGET_ARM"
9108 [(set (match_dup 4)
9109 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9110 (match_dup 0)))
9111 (set (reg:CC_NOOV CC_REGNUM)
9112 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9113 (const_int 0)))]
9114 "")
9115 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9116
9117 (define_insn_and_split "*negscc"
9118 [(set (match_operand:SI 0 "s_register_operand" "=r")
9119 (neg:SI (match_operator 3 "arm_comparison_operator"
9120 [(match_operand:SI 1 "s_register_operand" "r")
9121 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9122 (clobber (reg:CC CC_REGNUM))]
9123 "TARGET_ARM"
9124 "#"
9125 "&& reload_completed"
9126 [(const_int 0)]
9127 {
9128 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9129
9130 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9131 {
9132 /* Emit mov\\t%0, %1, asr #31 */
9133 emit_insn (gen_rtx_SET (operands[0],
9134 gen_rtx_ASHIFTRT (SImode,
9135 operands[1],
9136 GEN_INT (31))));
9137 DONE;
9138 }
9139 else if (GET_CODE (operands[3]) == NE)
9140 {
9141 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9142 if (CONST_INT_P (operands[2]))
9143 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9144 gen_int_mode (-INTVAL (operands[2]),
9145 SImode)));
9146 else
9147 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9148
9149 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9150 gen_rtx_NE (SImode,
9151 cc_reg,
9152 const0_rtx),
9153 gen_rtx_SET (operands[0],
9154 GEN_INT (~0))));
9155 DONE;
9156 }
9157 else
9158 {
9159 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9160 emit_insn (gen_rtx_SET (cc_reg,
9161 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9162 enum rtx_code rc = GET_CODE (operands[3]);
9163
9164 rc = reverse_condition (rc);
9165 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9166 gen_rtx_fmt_ee (rc,
9167 VOIDmode,
9168 cc_reg,
9169 const0_rtx),
9170 gen_rtx_SET (operands[0], const0_rtx)));
9171 rc = GET_CODE (operands[3]);
9172 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9173 gen_rtx_fmt_ee (rc,
9174 VOIDmode,
9175 cc_reg,
9176 const0_rtx),
9177 gen_rtx_SET (operands[0],
9178 GEN_INT (~0))));
9179 DONE;
9180 }
9181 FAIL;
9182 }
9183 [(set_attr "conds" "clob")
9184 (set_attr "length" "12")
9185 (set_attr "type" "multiple")]
9186 )
9187
9188 (define_insn_and_split "movcond_addsi"
9189 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9190 (if_then_else:SI
9191 (match_operator 5 "comparison_operator"
9192 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9193 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9194 (const_int 0)])
9195 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9196 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9197 (clobber (reg:CC CC_REGNUM))]
9198 "TARGET_32BIT"
9199 "#"
9200 "&& reload_completed"
9201 [(set (reg:CC_NOOV CC_REGNUM)
9202 (compare:CC_NOOV
9203 (plus:SI (match_dup 3)
9204 (match_dup 4))
9205 (const_int 0)))
9206 (set (match_dup 0) (match_dup 1))
9207 (cond_exec (match_dup 6)
9208 (set (match_dup 0) (match_dup 2)))]
9209 "
9210 {
9211 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9212 operands[3], operands[4]);
9213 enum rtx_code rc = GET_CODE (operands[5]);
9214 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9215 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9216 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9217 rc = reverse_condition (rc);
9218 else
9219 std::swap (operands[1], operands[2]);
9220
9221 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9222 }
9223 "
9224 [(set_attr "conds" "clob")
9225 (set_attr "enabled_for_short_it" "no,yes,yes")
9226 (set_attr "type" "multiple")]
9227 )
9228
9229 (define_insn "movcond"
9230 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9231 (if_then_else:SI
9232 (match_operator 5 "arm_comparison_operator"
9233 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9234 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9235 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9236 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9237 (clobber (reg:CC CC_REGNUM))]
9238 "TARGET_ARM"
9239 "*
9240 if (GET_CODE (operands[5]) == LT
9241 && (operands[4] == const0_rtx))
9242 {
9243 if (which_alternative != 1 && REG_P (operands[1]))
9244 {
9245 if (operands[2] == const0_rtx)
9246 return \"and\\t%0, %1, %3, asr #31\";
9247 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9248 }
9249 else if (which_alternative != 0 && REG_P (operands[2]))
9250 {
9251 if (operands[1] == const0_rtx)
9252 return \"bic\\t%0, %2, %3, asr #31\";
9253 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9254 }
9255 /* The only case that falls through to here is when both ops 1 & 2
9256 are constants. */
9257 }
9258
9259 if (GET_CODE (operands[5]) == GE
9260 && (operands[4] == const0_rtx))
9261 {
9262 if (which_alternative != 1 && REG_P (operands[1]))
9263 {
9264 if (operands[2] == const0_rtx)
9265 return \"bic\\t%0, %1, %3, asr #31\";
9266 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9267 }
9268 else if (which_alternative != 0 && REG_P (operands[2]))
9269 {
9270 if (operands[1] == const0_rtx)
9271 return \"and\\t%0, %2, %3, asr #31\";
9272 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9273 }
9274 /* The only case that falls through to here is when both ops 1 & 2
9275 are constants. */
9276 }
9277 if (CONST_INT_P (operands[4])
9278 && !const_ok_for_arm (INTVAL (operands[4])))
9279 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9280 else
9281 output_asm_insn (\"cmp\\t%3, %4\", operands);
9282 if (which_alternative != 0)
9283 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9284 if (which_alternative != 1)
9285 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9286 return \"\";
9287 "
9288 [(set_attr "conds" "clob")
9289 (set_attr "length" "8,8,12")
9290 (set_attr "type" "multiple")]
9291 )
9292
9293 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9294
9295 (define_insn "*ifcompare_plus_move"
9296 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9297 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9298 [(match_operand:SI 4 "s_register_operand" "r,r")
9299 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9300 (plus:SI
9301 (match_operand:SI 2 "s_register_operand" "r,r")
9302 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9303 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9304 (clobber (reg:CC CC_REGNUM))]
9305 "TARGET_ARM"
9306 "#"
9307 [(set_attr "conds" "clob")
9308 (set_attr "length" "8,12")
9309 (set_attr "type" "multiple")]
9310 )
9311
9312 (define_insn "*if_plus_move"
9313 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9314 (if_then_else:SI
9315 (match_operator 4 "arm_comparison_operator"
9316 [(match_operand 5 "cc_register" "") (const_int 0)])
9317 (plus:SI
9318 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9319 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9320 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9321 "TARGET_ARM"
9322 "@
9323 add%d4\\t%0, %2, %3
9324 sub%d4\\t%0, %2, #%n3
9325 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9326 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9327 [(set_attr "conds" "use")
9328 (set_attr "length" "4,4,8,8")
9329 (set_attr_alternative "type"
9330 [(if_then_else (match_operand 3 "const_int_operand" "")
9331 (const_string "alu_imm" )
9332 (const_string "alu_sreg"))
9333 (const_string "alu_imm")
9334 (const_string "multiple")
9335 (const_string "multiple")])]
9336 )
9337
9338 (define_insn "*ifcompare_move_plus"
9339 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9340 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9341 [(match_operand:SI 4 "s_register_operand" "r,r")
9342 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9343 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9344 (plus:SI
9345 (match_operand:SI 2 "s_register_operand" "r,r")
9346 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9347 (clobber (reg:CC CC_REGNUM))]
9348 "TARGET_ARM"
9349 "#"
9350 [(set_attr "conds" "clob")
9351 (set_attr "length" "8,12")
9352 (set_attr "type" "multiple")]
9353 )
9354
9355 (define_insn "*if_move_plus"
9356 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9357 (if_then_else:SI
9358 (match_operator 4 "arm_comparison_operator"
9359 [(match_operand 5 "cc_register" "") (const_int 0)])
9360 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9361 (plus:SI
9362 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9363 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9364 "TARGET_ARM"
9365 "@
9366 add%D4\\t%0, %2, %3
9367 sub%D4\\t%0, %2, #%n3
9368 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9369 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9370 [(set_attr "conds" "use")
9371 (set_attr "length" "4,4,8,8")
9372 (set_attr_alternative "type"
9373 [(if_then_else (match_operand 3 "const_int_operand" "")
9374 (const_string "alu_imm" )
9375 (const_string "alu_sreg"))
9376 (const_string "alu_imm")
9377 (const_string "multiple")
9378 (const_string "multiple")])]
9379 )
9380
9381 (define_insn "*ifcompare_arith_arith"
9382 [(set (match_operand:SI 0 "s_register_operand" "=r")
9383 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9384 [(match_operand:SI 5 "s_register_operand" "r")
9385 (match_operand:SI 6 "arm_add_operand" "rIL")])
9386 (match_operator:SI 8 "shiftable_operator"
9387 [(match_operand:SI 1 "s_register_operand" "r")
9388 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9389 (match_operator:SI 7 "shiftable_operator"
9390 [(match_operand:SI 3 "s_register_operand" "r")
9391 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9392 (clobber (reg:CC CC_REGNUM))]
9393 "TARGET_ARM"
9394 "#"
9395 [(set_attr "conds" "clob")
9396 (set_attr "length" "12")
9397 (set_attr "type" "multiple")]
9398 )
9399
9400 (define_insn "*if_arith_arith"
9401 [(set (match_operand:SI 0 "s_register_operand" "=r")
9402 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9403 [(match_operand 8 "cc_register" "") (const_int 0)])
9404 (match_operator:SI 6 "shiftable_operator"
9405 [(match_operand:SI 1 "s_register_operand" "r")
9406 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9407 (match_operator:SI 7 "shiftable_operator"
9408 [(match_operand:SI 3 "s_register_operand" "r")
9409 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9410 "TARGET_ARM"
9411 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9412 [(set_attr "conds" "use")
9413 (set_attr "length" "8")
9414 (set_attr "type" "multiple")]
9415 )
9416
9417 (define_insn "*ifcompare_arith_move"
9418 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9419 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9420 [(match_operand:SI 2 "s_register_operand" "r,r")
9421 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9422 (match_operator:SI 7 "shiftable_operator"
9423 [(match_operand:SI 4 "s_register_operand" "r,r")
9424 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9425 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9426 (clobber (reg:CC CC_REGNUM))]
9427 "TARGET_ARM"
9428 "*
9429 /* If we have an operation where (op x 0) is the identity operation and
9430 the conditional operator is LT or GE and we are comparing against zero and
9431 everything is in registers then we can do this in two instructions. */
9432 if (operands[3] == const0_rtx
9433 && GET_CODE (operands[7]) != AND
9434 && REG_P (operands[5])
9435 && REG_P (operands[1])
9436 && REGNO (operands[1]) == REGNO (operands[4])
9437 && REGNO (operands[4]) != REGNO (operands[0]))
9438 {
9439 if (GET_CODE (operands[6]) == LT)
9440 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9441 else if (GET_CODE (operands[6]) == GE)
9442 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9443 }
9444 if (CONST_INT_P (operands[3])
9445 && !const_ok_for_arm (INTVAL (operands[3])))
9446 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9447 else
9448 output_asm_insn (\"cmp\\t%2, %3\", operands);
9449 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9450 if (which_alternative != 0)
9451 return \"mov%D6\\t%0, %1\";
9452 return \"\";
9453 "
9454 [(set_attr "conds" "clob")
9455 (set_attr "length" "8,12")
9456 (set_attr "type" "multiple")]
9457 )
9458
9459 (define_insn "*if_arith_move"
9460 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9461 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9462 [(match_operand 6 "cc_register" "") (const_int 0)])
9463 (match_operator:SI 5 "shiftable_operator"
9464 [(match_operand:SI 2 "s_register_operand" "r,r")
9465 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9466 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9467 "TARGET_ARM"
9468 "@
9469 %I5%d4\\t%0, %2, %3
9470 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9471 [(set_attr "conds" "use")
9472 (set_attr "length" "4,8")
9473 (set_attr_alternative "type"
9474 [(if_then_else (match_operand 3 "const_int_operand" "")
9475 (const_string "alu_shift_imm" )
9476 (const_string "alu_shift_reg"))
9477 (const_string "multiple")])]
9478 )
9479
9480 (define_insn "*ifcompare_move_arith"
9481 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9482 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9483 [(match_operand:SI 4 "s_register_operand" "r,r")
9484 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9485 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9486 (match_operator:SI 7 "shiftable_operator"
9487 [(match_operand:SI 2 "s_register_operand" "r,r")
9488 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9489 (clobber (reg:CC CC_REGNUM))]
9490 "TARGET_ARM"
9491 "*
9492 /* If we have an operation where (op x 0) is the identity operation and
9493 the conditional operator is LT or GE and we are comparing against zero and
9494 everything is in registers then we can do this in two instructions */
9495 if (operands[5] == const0_rtx
9496 && GET_CODE (operands[7]) != AND
9497 && REG_P (operands[3])
9498 && REG_P (operands[1])
9499 && REGNO (operands[1]) == REGNO (operands[2])
9500 && REGNO (operands[2]) != REGNO (operands[0]))
9501 {
9502 if (GET_CODE (operands[6]) == GE)
9503 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9504 else if (GET_CODE (operands[6]) == LT)
9505 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9506 }
9507
9508 if (CONST_INT_P (operands[5])
9509 && !const_ok_for_arm (INTVAL (operands[5])))
9510 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9511 else
9512 output_asm_insn (\"cmp\\t%4, %5\", operands);
9513
9514 if (which_alternative != 0)
9515 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9516 return \"%I7%D6\\t%0, %2, %3\";
9517 "
9518 [(set_attr "conds" "clob")
9519 (set_attr "length" "8,12")
9520 (set_attr "type" "multiple")]
9521 )
9522
9523 (define_insn "*if_move_arith"
9524 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9525 (if_then_else:SI
9526 (match_operator 4 "arm_comparison_operator"
9527 [(match_operand 6 "cc_register" "") (const_int 0)])
9528 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9529 (match_operator:SI 5 "shiftable_operator"
9530 [(match_operand:SI 2 "s_register_operand" "r,r")
9531 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9532 "TARGET_ARM"
9533 "@
9534 %I5%D4\\t%0, %2, %3
9535 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9536 [(set_attr "conds" "use")
9537 (set_attr "length" "4,8")
9538 (set_attr_alternative "type"
9539 [(if_then_else (match_operand 3 "const_int_operand" "")
9540 (const_string "alu_shift_imm" )
9541 (const_string "alu_shift_reg"))
9542 (const_string "multiple")])]
9543 )
9544
9545 (define_insn "*ifcompare_move_not"
9546 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9547 (if_then_else:SI
9548 (match_operator 5 "arm_comparison_operator"
9549 [(match_operand:SI 3 "s_register_operand" "r,r")
9550 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9551 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9552 (not:SI
9553 (match_operand:SI 2 "s_register_operand" "r,r"))))
9554 (clobber (reg:CC CC_REGNUM))]
9555 "TARGET_ARM"
9556 "#"
9557 [(set_attr "conds" "clob")
9558 (set_attr "length" "8,12")
9559 (set_attr "type" "multiple")]
9560 )
9561
9562 (define_insn "*if_move_not"
9563 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9564 (if_then_else:SI
9565 (match_operator 4 "arm_comparison_operator"
9566 [(match_operand 3 "cc_register" "") (const_int 0)])
9567 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9568 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9569 "TARGET_ARM"
9570 "@
9571 mvn%D4\\t%0, %2
9572 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9573 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9574 [(set_attr "conds" "use")
9575 (set_attr "type" "mvn_reg")
9576 (set_attr "length" "4,8,8")
9577 (set_attr "type" "mvn_reg,multiple,multiple")]
9578 )
9579
9580 (define_insn "*ifcompare_not_move"
9581 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9582 (if_then_else:SI
9583 (match_operator 5 "arm_comparison_operator"
9584 [(match_operand:SI 3 "s_register_operand" "r,r")
9585 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9586 (not:SI
9587 (match_operand:SI 2 "s_register_operand" "r,r"))
9588 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9589 (clobber (reg:CC CC_REGNUM))]
9590 "TARGET_ARM"
9591 "#"
9592 [(set_attr "conds" "clob")
9593 (set_attr "length" "8,12")
9594 (set_attr "type" "multiple")]
9595 )
9596
9597 (define_insn "*if_not_move"
9598 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9599 (if_then_else:SI
9600 (match_operator 4 "arm_comparison_operator"
9601 [(match_operand 3 "cc_register" "") (const_int 0)])
9602 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9603 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9604 "TARGET_ARM"
9605 "@
9606 mvn%d4\\t%0, %2
9607 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9608 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9609 [(set_attr "conds" "use")
9610 (set_attr "type" "mvn_reg,multiple,multiple")
9611 (set_attr "length" "4,8,8")]
9612 )
9613
9614 (define_insn "*ifcompare_shift_move"
9615 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9616 (if_then_else:SI
9617 (match_operator 6 "arm_comparison_operator"
9618 [(match_operand:SI 4 "s_register_operand" "r,r")
9619 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9620 (match_operator:SI 7 "shift_operator"
9621 [(match_operand:SI 2 "s_register_operand" "r,r")
9622 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9623 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9624 (clobber (reg:CC CC_REGNUM))]
9625 "TARGET_ARM"
9626 "#"
9627 [(set_attr "conds" "clob")
9628 (set_attr "length" "8,12")
9629 (set_attr "type" "multiple")]
9630 )
9631
9632 (define_insn "*if_shift_move"
9633 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9634 (if_then_else:SI
9635 (match_operator 5 "arm_comparison_operator"
9636 [(match_operand 6 "cc_register" "") (const_int 0)])
9637 (match_operator:SI 4 "shift_operator"
9638 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9639 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9640 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9641 "TARGET_ARM"
9642 "@
9643 mov%d5\\t%0, %2%S4
9644 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9645 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9646 [(set_attr "conds" "use")
9647 (set_attr "shift" "2")
9648 (set_attr "length" "4,8,8")
9649 (set_attr_alternative "type"
9650 [(if_then_else (match_operand 3 "const_int_operand" "")
9651 (const_string "mov_shift" )
9652 (const_string "mov_shift_reg"))
9653 (const_string "multiple")
9654 (const_string "multiple")])]
9655 )
9656
9657 (define_insn "*ifcompare_move_shift"
9658 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9659 (if_then_else:SI
9660 (match_operator 6 "arm_comparison_operator"
9661 [(match_operand:SI 4 "s_register_operand" "r,r")
9662 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9663 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9664 (match_operator:SI 7 "shift_operator"
9665 [(match_operand:SI 2 "s_register_operand" "r,r")
9666 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9667 (clobber (reg:CC CC_REGNUM))]
9668 "TARGET_ARM"
9669 "#"
9670 [(set_attr "conds" "clob")
9671 (set_attr "length" "8,12")
9672 (set_attr "type" "multiple")]
9673 )
9674
9675 (define_insn "*if_move_shift"
9676 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9677 (if_then_else:SI
9678 (match_operator 5 "arm_comparison_operator"
9679 [(match_operand 6 "cc_register" "") (const_int 0)])
9680 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9681 (match_operator:SI 4 "shift_operator"
9682 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9683 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9684 "TARGET_ARM"
9685 "@
9686 mov%D5\\t%0, %2%S4
9687 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9688 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9689 [(set_attr "conds" "use")
9690 (set_attr "shift" "2")
9691 (set_attr "length" "4,8,8")
9692 (set_attr_alternative "type"
9693 [(if_then_else (match_operand 3 "const_int_operand" "")
9694 (const_string "mov_shift" )
9695 (const_string "mov_shift_reg"))
9696 (const_string "multiple")
9697 (const_string "multiple")])]
9698 )
9699
9700 (define_insn "*ifcompare_shift_shift"
9701 [(set (match_operand:SI 0 "s_register_operand" "=r")
9702 (if_then_else:SI
9703 (match_operator 7 "arm_comparison_operator"
9704 [(match_operand:SI 5 "s_register_operand" "r")
9705 (match_operand:SI 6 "arm_add_operand" "rIL")])
9706 (match_operator:SI 8 "shift_operator"
9707 [(match_operand:SI 1 "s_register_operand" "r")
9708 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9709 (match_operator:SI 9 "shift_operator"
9710 [(match_operand:SI 3 "s_register_operand" "r")
9711 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9712 (clobber (reg:CC CC_REGNUM))]
9713 "TARGET_ARM"
9714 "#"
9715 [(set_attr "conds" "clob")
9716 (set_attr "length" "12")
9717 (set_attr "type" "multiple")]
9718 )
9719
9720 (define_insn "*if_shift_shift"
9721 [(set (match_operand:SI 0 "s_register_operand" "=r")
9722 (if_then_else:SI
9723 (match_operator 5 "arm_comparison_operator"
9724 [(match_operand 8 "cc_register" "") (const_int 0)])
9725 (match_operator:SI 6 "shift_operator"
9726 [(match_operand:SI 1 "s_register_operand" "r")
9727 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9728 (match_operator:SI 7 "shift_operator"
9729 [(match_operand:SI 3 "s_register_operand" "r")
9730 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9731 "TARGET_ARM"
9732 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9733 [(set_attr "conds" "use")
9734 (set_attr "shift" "1")
9735 (set_attr "length" "8")
9736 (set (attr "type") (if_then_else
9737 (and (match_operand 2 "const_int_operand" "")
9738 (match_operand 4 "const_int_operand" ""))
9739 (const_string "mov_shift")
9740 (const_string "mov_shift_reg")))]
9741 )
9742
9743 (define_insn "*ifcompare_not_arith"
9744 [(set (match_operand:SI 0 "s_register_operand" "=r")
9745 (if_then_else:SI
9746 (match_operator 6 "arm_comparison_operator"
9747 [(match_operand:SI 4 "s_register_operand" "r")
9748 (match_operand:SI 5 "arm_add_operand" "rIL")])
9749 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9750 (match_operator:SI 7 "shiftable_operator"
9751 [(match_operand:SI 2 "s_register_operand" "r")
9752 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9753 (clobber (reg:CC CC_REGNUM))]
9754 "TARGET_ARM"
9755 "#"
9756 [(set_attr "conds" "clob")
9757 (set_attr "length" "12")
9758 (set_attr "type" "multiple")]
9759 )
9760
9761 (define_insn "*if_not_arith"
9762 [(set (match_operand:SI 0 "s_register_operand" "=r")
9763 (if_then_else:SI
9764 (match_operator 5 "arm_comparison_operator"
9765 [(match_operand 4 "cc_register" "") (const_int 0)])
9766 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9767 (match_operator:SI 6 "shiftable_operator"
9768 [(match_operand:SI 2 "s_register_operand" "r")
9769 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9770 "TARGET_ARM"
9771 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9772 [(set_attr "conds" "use")
9773 (set_attr "type" "mvn_reg")
9774 (set_attr "length" "8")]
9775 )
9776
9777 (define_insn "*ifcompare_arith_not"
9778 [(set (match_operand:SI 0 "s_register_operand" "=r")
9779 (if_then_else:SI
9780 (match_operator 6 "arm_comparison_operator"
9781 [(match_operand:SI 4 "s_register_operand" "r")
9782 (match_operand:SI 5 "arm_add_operand" "rIL")])
9783 (match_operator:SI 7 "shiftable_operator"
9784 [(match_operand:SI 2 "s_register_operand" "r")
9785 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9786 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9787 (clobber (reg:CC CC_REGNUM))]
9788 "TARGET_ARM"
9789 "#"
9790 [(set_attr "conds" "clob")
9791 (set_attr "length" "12")
9792 (set_attr "type" "multiple")]
9793 )
9794
9795 (define_insn "*if_arith_not"
9796 [(set (match_operand:SI 0 "s_register_operand" "=r")
9797 (if_then_else:SI
9798 (match_operator 5 "arm_comparison_operator"
9799 [(match_operand 4 "cc_register" "") (const_int 0)])
9800 (match_operator:SI 6 "shiftable_operator"
9801 [(match_operand:SI 2 "s_register_operand" "r")
9802 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9803 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9804 "TARGET_ARM"
9805 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9806 [(set_attr "conds" "use")
9807 (set_attr "type" "multiple")
9808 (set_attr "length" "8")]
9809 )
9810
9811 (define_insn "*ifcompare_neg_move"
9812 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9813 (if_then_else:SI
9814 (match_operator 5 "arm_comparison_operator"
9815 [(match_operand:SI 3 "s_register_operand" "r,r")
9816 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9817 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9818 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9819 (clobber (reg:CC CC_REGNUM))]
9820 "TARGET_ARM"
9821 "#"
9822 [(set_attr "conds" "clob")
9823 (set_attr "length" "8,12")
9824 (set_attr "type" "multiple")]
9825 )
9826
9827 (define_insn_and_split "*if_neg_move"
9828 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9829 (if_then_else:SI
9830 (match_operator 4 "arm_comparison_operator"
9831 [(match_operand 3 "cc_register" "") (const_int 0)])
9832 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9833 (match_operand:SI 1 "s_register_operand" "0,0")))]
9834 "TARGET_32BIT"
9835 "#"
9836 "&& reload_completed"
9837 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9838 (set (match_dup 0) (neg:SI (match_dup 2))))]
9839 ""
9840 [(set_attr "conds" "use")
9841 (set_attr "length" "4")
9842 (set_attr "arch" "t2,32")
9843 (set_attr "enabled_for_short_it" "yes,no")
9844 (set_attr "type" "logic_shift_imm")]
9845 )
9846
9847 (define_insn "*ifcompare_move_neg"
9848 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9849 (if_then_else:SI
9850 (match_operator 5 "arm_comparison_operator"
9851 [(match_operand:SI 3 "s_register_operand" "r,r")
9852 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9853 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9854 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9855 (clobber (reg:CC CC_REGNUM))]
9856 "TARGET_ARM"
9857 "#"
9858 [(set_attr "conds" "clob")
9859 (set_attr "length" "8,12")
9860 (set_attr "type" "multiple")]
9861 )
9862
9863 (define_insn_and_split "*if_move_neg"
9864 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9865 (if_then_else:SI
9866 (match_operator 4 "arm_comparison_operator"
9867 [(match_operand 3 "cc_register" "") (const_int 0)])
9868 (match_operand:SI 1 "s_register_operand" "0,0")
9869 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9870 "TARGET_32BIT"
9871 "#"
9872 "&& reload_completed"
9873 [(cond_exec (match_dup 5)
9874 (set (match_dup 0) (neg:SI (match_dup 2))))]
9875 {
9876 machine_mode mode = GET_MODE (operands[3]);
9877 rtx_code rc = GET_CODE (operands[4]);
9878
9879 if (mode == CCFPmode || mode == CCFPEmode)
9880 rc = reverse_condition_maybe_unordered (rc);
9881 else
9882 rc = reverse_condition (rc);
9883
9884 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9885 }
9886 [(set_attr "conds" "use")
9887 (set_attr "length" "4")
9888 (set_attr "arch" "t2,32")
9889 (set_attr "enabled_for_short_it" "yes,no")
9890 (set_attr "type" "logic_shift_imm")]
9891 )
9892
9893 (define_insn "*arith_adjacentmem"
9894 [(set (match_operand:SI 0 "s_register_operand" "=r")
9895 (match_operator:SI 1 "shiftable_operator"
9896 [(match_operand:SI 2 "memory_operand" "m")
9897 (match_operand:SI 3 "memory_operand" "m")]))
9898 (clobber (match_scratch:SI 4 "=r"))]
9899 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9900 "*
9901 {
9902 rtx ldm[3];
9903 rtx arith[4];
9904 rtx base_reg;
9905 HOST_WIDE_INT val1 = 0, val2 = 0;
9906
9907 if (REGNO (operands[0]) > REGNO (operands[4]))
9908 {
9909 ldm[1] = operands[4];
9910 ldm[2] = operands[0];
9911 }
9912 else
9913 {
9914 ldm[1] = operands[0];
9915 ldm[2] = operands[4];
9916 }
9917
9918 base_reg = XEXP (operands[2], 0);
9919
9920 if (!REG_P (base_reg))
9921 {
9922 val1 = INTVAL (XEXP (base_reg, 1));
9923 base_reg = XEXP (base_reg, 0);
9924 }
9925
9926 if (!REG_P (XEXP (operands[3], 0)))
9927 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9928
9929 arith[0] = operands[0];
9930 arith[3] = operands[1];
9931
9932 if (val1 < val2)
9933 {
9934 arith[1] = ldm[1];
9935 arith[2] = ldm[2];
9936 }
9937 else
9938 {
9939 arith[1] = ldm[2];
9940 arith[2] = ldm[1];
9941 }
9942
9943 ldm[0] = base_reg;
9944 if (val1 !=0 && val2 != 0)
9945 {
9946 rtx ops[3];
9947
9948 if (val1 == 4 || val2 == 4)
9949 /* Other val must be 8, since we know they are adjacent and neither
9950 is zero. */
9951 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
9952 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9953 {
9954 ldm[0] = ops[0] = operands[4];
9955 ops[1] = base_reg;
9956 ops[2] = GEN_INT (val1);
9957 output_add_immediate (ops);
9958 if (val1 < val2)
9959 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9960 else
9961 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9962 }
9963 else
9964 {
9965 /* Offset is out of range for a single add, so use two ldr. */
9966 ops[0] = ldm[1];
9967 ops[1] = base_reg;
9968 ops[2] = GEN_INT (val1);
9969 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9970 ops[0] = ldm[2];
9971 ops[2] = GEN_INT (val2);
9972 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9973 }
9974 }
9975 else if (val1 != 0)
9976 {
9977 if (val1 < val2)
9978 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9979 else
9980 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9981 }
9982 else
9983 {
9984 if (val1 < val2)
9985 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9986 else
9987 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9988 }
9989 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9990 return \"\";
9991 }"
9992 [(set_attr "length" "12")
9993 (set_attr "predicable" "yes")
9994 (set_attr "type" "load_4")]
9995 )
9996
9997 ; This pattern is never tried by combine, so do it as a peephole
9998
9999 (define_peephole2
10000 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10001 (match_operand:SI 1 "arm_general_register_operand" ""))
10002 (set (reg:CC CC_REGNUM)
10003 (compare:CC (match_dup 1) (const_int 0)))]
10004 "TARGET_ARM"
10005 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10006 (set (match_dup 0) (match_dup 1))])]
10007 ""
10008 )
10009
10010 (define_split
10011 [(set (match_operand:SI 0 "s_register_operand" "")
10012 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10013 (const_int 0))
10014 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10015 [(match_operand:SI 3 "s_register_operand" "")
10016 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10017 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10018 "TARGET_ARM"
10019 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10020 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10021 (match_dup 5)))]
10022 ""
10023 )
10024
10025 ;; This split can be used because CC_Z mode implies that the following
10026 ;; branch will be an equality, or an unsigned inequality, so the sign
10027 ;; extension is not needed.
10028
10029 (define_split
10030 [(set (reg:CC_Z CC_REGNUM)
10031 (compare:CC_Z
10032 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10033 (const_int 24))
10034 (match_operand 1 "const_int_operand" "")))
10035 (clobber (match_scratch:SI 2 ""))]
10036 "TARGET_ARM
10037 && ((UINTVAL (operands[1]))
10038 == ((UINTVAL (operands[1])) >> 24) << 24)"
10039 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10040 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10041 "
10042 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10043 "
10044 )
10045 ;; ??? Check the patterns above for Thumb-2 usefulness
10046
10047 (define_expand "prologue"
10048 [(clobber (const_int 0))]
10049 "TARGET_EITHER"
10050 "if (TARGET_32BIT)
10051 arm_expand_prologue ();
10052 else
10053 thumb1_expand_prologue ();
10054 DONE;
10055 "
10056 )
10057
10058 (define_expand "epilogue"
10059 [(clobber (const_int 0))]
10060 "TARGET_EITHER"
10061 "
10062 if (crtl->calls_eh_return)
10063 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10064 if (TARGET_THUMB1)
10065 {
10066 thumb1_expand_epilogue ();
10067 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10068 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10069 }
10070 else if (HAVE_return)
10071 {
10072 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10073 no need for explicit testing again. */
10074 emit_jump_insn (gen_return ());
10075 }
10076 else if (TARGET_32BIT)
10077 {
10078 arm_expand_epilogue (true);
10079 }
10080 DONE;
10081 "
10082 )
10083
10084 ;; Note - although unspec_volatile's USE all hard registers,
10085 ;; USEs are ignored after relaod has completed. Thus we need
10086 ;; to add an unspec of the link register to ensure that flow
10087 ;; does not think that it is unused by the sibcall branch that
10088 ;; will replace the standard function epilogue.
10089 (define_expand "sibcall_epilogue"
10090 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10091 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10092 "TARGET_32BIT"
10093 "
10094 arm_expand_epilogue (false);
10095 DONE;
10096 "
10097 )
10098
10099 (define_expand "eh_epilogue"
10100 [(use (match_operand:SI 0 "register_operand"))
10101 (use (match_operand:SI 1 "register_operand"))
10102 (use (match_operand:SI 2 "register_operand"))]
10103 "TARGET_EITHER"
10104 "
10105 {
10106 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10107 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10108 {
10109 rtx ra = gen_rtx_REG (Pmode, 2);
10110
10111 emit_move_insn (ra, operands[2]);
10112 operands[2] = ra;
10113 }
10114 /* This is a hack -- we may have crystalized the function type too
10115 early. */
10116 cfun->machine->func_type = 0;
10117 }"
10118 )
10119
10120 ;; This split is only used during output to reduce the number of patterns
10121 ;; that need assembler instructions adding to them. We allowed the setting
10122 ;; of the conditions to be implicit during rtl generation so that
10123 ;; the conditional compare patterns would work. However this conflicts to
10124 ;; some extent with the conditional data operations, so we have to split them
10125 ;; up again here.
10126
10127 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10128 ;; conditional execution sufficient?
10129
10130 (define_split
10131 [(set (match_operand:SI 0 "s_register_operand" "")
10132 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10133 [(match_operand 2 "" "") (match_operand 3 "" "")])
10134 (match_dup 0)
10135 (match_operand 4 "" "")))
10136 (clobber (reg:CC CC_REGNUM))]
10137 "TARGET_ARM && reload_completed"
10138 [(set (match_dup 5) (match_dup 6))
10139 (cond_exec (match_dup 7)
10140 (set (match_dup 0) (match_dup 4)))]
10141 "
10142 {
10143 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10144 operands[2], operands[3]);
10145 enum rtx_code rc = GET_CODE (operands[1]);
10146
10147 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10148 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10149 if (mode == CCFPmode || mode == CCFPEmode)
10150 rc = reverse_condition_maybe_unordered (rc);
10151 else
10152 rc = reverse_condition (rc);
10153
10154 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10155 }"
10156 )
10157
10158 (define_split
10159 [(set (match_operand:SI 0 "s_register_operand" "")
10160 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10161 [(match_operand 2 "" "") (match_operand 3 "" "")])
10162 (match_operand 4 "" "")
10163 (match_dup 0)))
10164 (clobber (reg:CC CC_REGNUM))]
10165 "TARGET_ARM && reload_completed"
10166 [(set (match_dup 5) (match_dup 6))
10167 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10168 (set (match_dup 0) (match_dup 4)))]
10169 "
10170 {
10171 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10172 operands[2], operands[3]);
10173
10174 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10175 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10176 }"
10177 )
10178
10179 (define_split
10180 [(set (match_operand:SI 0 "s_register_operand" "")
10181 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10182 [(match_operand 2 "" "") (match_operand 3 "" "")])
10183 (match_operand 4 "" "")
10184 (match_operand 5 "" "")))
10185 (clobber (reg:CC CC_REGNUM))]
10186 "TARGET_ARM && reload_completed"
10187 [(set (match_dup 6) (match_dup 7))
10188 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10189 (set (match_dup 0) (match_dup 4)))
10190 (cond_exec (match_dup 8)
10191 (set (match_dup 0) (match_dup 5)))]
10192 "
10193 {
10194 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10195 operands[2], operands[3]);
10196 enum rtx_code rc = GET_CODE (operands[1]);
10197
10198 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10199 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10200 if (mode == CCFPmode || mode == CCFPEmode)
10201 rc = reverse_condition_maybe_unordered (rc);
10202 else
10203 rc = reverse_condition (rc);
10204
10205 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10206 }"
10207 )
10208
10209 (define_split
10210 [(set (match_operand:SI 0 "s_register_operand" "")
10211 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10212 [(match_operand:SI 2 "s_register_operand" "")
10213 (match_operand:SI 3 "arm_add_operand" "")])
10214 (match_operand:SI 4 "arm_rhs_operand" "")
10215 (not:SI
10216 (match_operand:SI 5 "s_register_operand" ""))))
10217 (clobber (reg:CC CC_REGNUM))]
10218 "TARGET_ARM && reload_completed"
10219 [(set (match_dup 6) (match_dup 7))
10220 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10221 (set (match_dup 0) (match_dup 4)))
10222 (cond_exec (match_dup 8)
10223 (set (match_dup 0) (not:SI (match_dup 5))))]
10224 "
10225 {
10226 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10227 operands[2], operands[3]);
10228 enum rtx_code rc = GET_CODE (operands[1]);
10229
10230 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10231 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10232 if (mode == CCFPmode || mode == CCFPEmode)
10233 rc = reverse_condition_maybe_unordered (rc);
10234 else
10235 rc = reverse_condition (rc);
10236
10237 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10238 }"
10239 )
10240
10241 (define_insn "*cond_move_not"
10242 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10243 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10244 [(match_operand 3 "cc_register" "") (const_int 0)])
10245 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10246 (not:SI
10247 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10248 "TARGET_ARM"
10249 "@
10250 mvn%D4\\t%0, %2
10251 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10252 [(set_attr "conds" "use")
10253 (set_attr "type" "mvn_reg,multiple")
10254 (set_attr "length" "4,8")]
10255 )
10256
10257 ;; The next two patterns occur when an AND operation is followed by a
10258 ;; scc insn sequence
10259
10260 (define_insn "*sign_extract_onebit"
10261 [(set (match_operand:SI 0 "s_register_operand" "=r")
10262 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10263 (const_int 1)
10264 (match_operand:SI 2 "const_int_operand" "n")))
10265 (clobber (reg:CC CC_REGNUM))]
10266 "TARGET_ARM"
10267 "*
10268 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10269 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10270 return \"mvnne\\t%0, #0\";
10271 "
10272 [(set_attr "conds" "clob")
10273 (set_attr "length" "8")
10274 (set_attr "type" "multiple")]
10275 )
10276
10277 (define_insn "*not_signextract_onebit"
10278 [(set (match_operand:SI 0 "s_register_operand" "=r")
10279 (not:SI
10280 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10281 (const_int 1)
10282 (match_operand:SI 2 "const_int_operand" "n"))))
10283 (clobber (reg:CC CC_REGNUM))]
10284 "TARGET_ARM"
10285 "*
10286 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10287 output_asm_insn (\"tst\\t%1, %2\", operands);
10288 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10289 return \"movne\\t%0, #0\";
10290 "
10291 [(set_attr "conds" "clob")
10292 (set_attr "length" "12")
10293 (set_attr "type" "multiple")]
10294 )
10295 ;; ??? The above patterns need auditing for Thumb-2
10296
10297 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10298 ;; expressions. For simplicity, the first register is also in the unspec
10299 ;; part.
10300 ;; To avoid the usage of GNU extension, the length attribute is computed
10301 ;; in a C function arm_attr_length_push_multi.
10302 (define_insn "*push_multi"
10303 [(match_parallel 2 "multi_register_push"
10304 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10305 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10306 UNSPEC_PUSH_MULT))])]
10307 ""
10308 "*
10309 {
10310 int num_saves = XVECLEN (operands[2], 0);
10311
10312 /* For the StrongARM at least it is faster to
10313 use STR to store only a single register.
10314 In Thumb mode always use push, and the assembler will pick
10315 something appropriate. */
10316 if (num_saves == 1 && TARGET_ARM)
10317 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10318 else
10319 {
10320 int i;
10321 char pattern[100];
10322
10323 if (TARGET_32BIT)
10324 strcpy (pattern, \"push%?\\t{%1\");
10325 else
10326 strcpy (pattern, \"push\\t{%1\");
10327
10328 for (i = 1; i < num_saves; i++)
10329 {
10330 strcat (pattern, \", %|\");
10331 strcat (pattern,
10332 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10333 }
10334
10335 strcat (pattern, \"}\");
10336 output_asm_insn (pattern, operands);
10337 }
10338
10339 return \"\";
10340 }"
10341 [(set_attr "type" "store_16")
10342 (set (attr "length")
10343 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10344 )
10345
10346 (define_insn "stack_tie"
10347 [(set (mem:BLK (scratch))
10348 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10349 (match_operand:SI 1 "s_register_operand" "rk")]
10350 UNSPEC_PRLG_STK))]
10351 ""
10352 ""
10353 [(set_attr "length" "0")
10354 (set_attr "type" "block")]
10355 )
10356
10357 ;; Pop (as used in epilogue RTL)
10358 ;;
10359 (define_insn "*load_multiple_with_writeback"
10360 [(match_parallel 0 "load_multiple_operation"
10361 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10362 (plus:SI (match_dup 1)
10363 (match_operand:SI 2 "const_int_I_operand" "I")))
10364 (set (match_operand:SI 3 "s_register_operand" "=rk")
10365 (mem:SI (match_dup 1)))
10366 ])]
10367 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10368 "*
10369 {
10370 arm_output_multireg_pop (operands, /*return_pc=*/false,
10371 /*cond=*/const_true_rtx,
10372 /*reverse=*/false,
10373 /*update=*/true);
10374 return \"\";
10375 }
10376 "
10377 [(set_attr "type" "load_16")
10378 (set_attr "predicable" "yes")
10379 (set (attr "length")
10380 (symbol_ref "arm_attr_length_pop_multi (operands,
10381 /*return_pc=*/false,
10382 /*write_back_p=*/true)"))]
10383 )
10384
10385 ;; Pop with return (as used in epilogue RTL)
10386 ;;
10387 ;; This instruction is generated when the registers are popped at the end of
10388 ;; epilogue. Here, instead of popping the value into LR and then generating
10389 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10390 ;; with (return).
10391 (define_insn "*pop_multiple_with_writeback_and_return"
10392 [(match_parallel 0 "pop_multiple_return"
10393 [(return)
10394 (set (match_operand:SI 1 "s_register_operand" "+rk")
10395 (plus:SI (match_dup 1)
10396 (match_operand:SI 2 "const_int_I_operand" "I")))
10397 (set (match_operand:SI 3 "s_register_operand" "=rk")
10398 (mem:SI (match_dup 1)))
10399 ])]
10400 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10401 "*
10402 {
10403 arm_output_multireg_pop (operands, /*return_pc=*/true,
10404 /*cond=*/const_true_rtx,
10405 /*reverse=*/false,
10406 /*update=*/true);
10407 return \"\";
10408 }
10409 "
10410 [(set_attr "type" "load_16")
10411 (set_attr "predicable" "yes")
10412 (set (attr "length")
10413 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10414 /*write_back_p=*/true)"))]
10415 )
10416
10417 (define_insn "*pop_multiple_with_return"
10418 [(match_parallel 0 "pop_multiple_return"
10419 [(return)
10420 (set (match_operand:SI 2 "s_register_operand" "=rk")
10421 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10422 ])]
10423 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10424 "*
10425 {
10426 arm_output_multireg_pop (operands, /*return_pc=*/true,
10427 /*cond=*/const_true_rtx,
10428 /*reverse=*/false,
10429 /*update=*/false);
10430 return \"\";
10431 }
10432 "
10433 [(set_attr "type" "load_16")
10434 (set_attr "predicable" "yes")
10435 (set (attr "length")
10436 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10437 /*write_back_p=*/false)"))]
10438 )
10439
10440 ;; Load into PC and return
10441 (define_insn "*ldr_with_return"
10442 [(return)
10443 (set (reg:SI PC_REGNUM)
10444 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10445 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10446 "ldr%?\t%|pc, [%0], #4"
10447 [(set_attr "type" "load_4")
10448 (set_attr "predicable" "yes")]
10449 )
10450 ;; Pop for floating point registers (as used in epilogue RTL)
10451 (define_insn "*vfp_pop_multiple_with_writeback"
10452 [(match_parallel 0 "pop_multiple_fp"
10453 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10454 (plus:SI (match_dup 1)
10455 (match_operand:SI 2 "const_int_I_operand" "I")))
10456 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10457 (mem:DF (match_dup 1)))])]
10458 "TARGET_32BIT && TARGET_HARD_FLOAT"
10459 "*
10460 {
10461 int num_regs = XVECLEN (operands[0], 0);
10462 char pattern[100];
10463 rtx op_list[2];
10464 strcpy (pattern, \"vldm\\t\");
10465 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10466 strcat (pattern, \"!, {\");
10467 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10468 strcat (pattern, \"%P0\");
10469 if ((num_regs - 1) > 1)
10470 {
10471 strcat (pattern, \"-%P1\");
10472 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10473 }
10474
10475 strcat (pattern, \"}\");
10476 output_asm_insn (pattern, op_list);
10477 return \"\";
10478 }
10479 "
10480 [(set_attr "type" "load_16")
10481 (set_attr "conds" "unconditional")
10482 (set_attr "predicable" "no")]
10483 )
10484
10485 ;; Special patterns for dealing with the constant pool
10486
10487 (define_insn "align_4"
10488 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10489 "TARGET_EITHER"
10490 "*
10491 assemble_align (32);
10492 return \"\";
10493 "
10494 [(set_attr "type" "no_insn")]
10495 )
10496
10497 (define_insn "align_8"
10498 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10499 "TARGET_EITHER"
10500 "*
10501 assemble_align (64);
10502 return \"\";
10503 "
10504 [(set_attr "type" "no_insn")]
10505 )
10506
10507 (define_insn "consttable_end"
10508 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10509 "TARGET_EITHER"
10510 "*
10511 making_const_table = FALSE;
10512 return \"\";
10513 "
10514 [(set_attr "type" "no_insn")]
10515 )
10516
10517 (define_insn "consttable_1"
10518 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10519 "TARGET_EITHER"
10520 "*
10521 making_const_table = TRUE;
10522 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10523 assemble_zeros (3);
10524 return \"\";
10525 "
10526 [(set_attr "length" "4")
10527 (set_attr "type" "no_insn")]
10528 )
10529
10530 (define_insn "consttable_2"
10531 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10532 "TARGET_EITHER"
10533 "*
10534 {
10535 rtx x = operands[0];
10536 making_const_table = TRUE;
10537 switch (GET_MODE_CLASS (GET_MODE (x)))
10538 {
10539 case MODE_FLOAT:
10540 arm_emit_fp16_const (x);
10541 break;
10542 default:
10543 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10544 assemble_zeros (2);
10545 break;
10546 }
10547 return \"\";
10548 }"
10549 [(set_attr "length" "4")
10550 (set_attr "type" "no_insn")]
10551 )
10552
10553 (define_insn "consttable_4"
10554 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10555 "TARGET_EITHER"
10556 "*
10557 {
10558 rtx x = operands[0];
10559 making_const_table = TRUE;
10560 scalar_float_mode float_mode;
10561 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10562 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10563 else
10564 {
10565 /* XXX: Sometimes gcc does something really dumb and ends up with
10566 a HIGH in a constant pool entry, usually because it's trying to
10567 load into a VFP register. We know this will always be used in
10568 combination with a LO_SUM which ignores the high bits, so just
10569 strip off the HIGH. */
10570 if (GET_CODE (x) == HIGH)
10571 x = XEXP (x, 0);
10572 assemble_integer (x, 4, BITS_PER_WORD, 1);
10573 mark_symbol_refs_as_used (x);
10574 }
10575 return \"\";
10576 }"
10577 [(set_attr "length" "4")
10578 (set_attr "type" "no_insn")]
10579 )
10580
10581 (define_insn "consttable_8"
10582 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10583 "TARGET_EITHER"
10584 "*
10585 {
10586 making_const_table = TRUE;
10587 scalar_float_mode float_mode;
10588 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10589 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10590 float_mode, BITS_PER_WORD);
10591 else
10592 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10593 return \"\";
10594 }"
10595 [(set_attr "length" "8")
10596 (set_attr "type" "no_insn")]
10597 )
10598
10599 (define_insn "consttable_16"
10600 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10601 "TARGET_EITHER"
10602 "*
10603 {
10604 making_const_table = TRUE;
10605 scalar_float_mode float_mode;
10606 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10607 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10608 float_mode, BITS_PER_WORD);
10609 else
10610 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10611 return \"\";
10612 }"
10613 [(set_attr "length" "16")
10614 (set_attr "type" "no_insn")]
10615 )
10616
10617 ;; V5 Instructions,
10618
10619 (define_insn "clzsi2"
10620 [(set (match_operand:SI 0 "s_register_operand" "=r")
10621 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10622 "TARGET_32BIT && arm_arch5t"
10623 "clz%?\\t%0, %1"
10624 [(set_attr "predicable" "yes")
10625 (set_attr "type" "clz")])
10626
10627 (define_insn "rbitsi2"
10628 [(set (match_operand:SI 0 "s_register_operand" "=r")
10629 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10630 "TARGET_32BIT && arm_arch_thumb2"
10631 "rbit%?\\t%0, %1"
10632 [(set_attr "predicable" "yes")
10633 (set_attr "type" "clz")])
10634
10635 ;; Keep this as a CTZ expression until after reload and then split
10636 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10637 ;; to fold with any other expression.
10638
10639 (define_insn_and_split "ctzsi2"
10640 [(set (match_operand:SI 0 "s_register_operand" "=r")
10641 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10642 "TARGET_32BIT && arm_arch_thumb2"
10643 "#"
10644 "&& reload_completed"
10645 [(const_int 0)]
10646 "
10647 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10648 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10649 DONE;
10650 ")
10651
10652 ;; V5E instructions.
10653
10654 (define_insn "prefetch"
10655 [(prefetch (match_operand:SI 0 "address_operand" "p")
10656 (match_operand:SI 1 "" "")
10657 (match_operand:SI 2 "" ""))]
10658 "TARGET_32BIT && arm_arch5te"
10659 "pld\\t%a0"
10660 [(set_attr "type" "load_4")]
10661 )
10662
10663 ;; General predication pattern
10664
10665 (define_cond_exec
10666 [(match_operator 0 "arm_comparison_operator"
10667 [(match_operand 1 "cc_register" "")
10668 (const_int 0)])]
10669 "TARGET_32BIT
10670 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10671 ""
10672 [(set_attr "predicated" "yes")]
10673 )
10674
10675 (define_insn "force_register_use"
10676 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10677 ""
10678 "%@ %0 needed"
10679 [(set_attr "length" "0")
10680 (set_attr "type" "no_insn")]
10681 )
10682
10683
10684 ;; Patterns for exception handling
10685
10686 (define_expand "eh_return"
10687 [(use (match_operand 0 "general_operand"))]
10688 "TARGET_EITHER"
10689 "
10690 {
10691 if (TARGET_32BIT)
10692 emit_insn (gen_arm_eh_return (operands[0]));
10693 else
10694 emit_insn (gen_thumb_eh_return (operands[0]));
10695 DONE;
10696 }"
10697 )
10698
10699 ;; We can't expand this before we know where the link register is stored.
10700 (define_insn_and_split "arm_eh_return"
10701 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10702 VUNSPEC_EH_RETURN)
10703 (clobber (match_scratch:SI 1 "=&r"))]
10704 "TARGET_ARM"
10705 "#"
10706 "&& reload_completed"
10707 [(const_int 0)]
10708 "
10709 {
10710 arm_set_return_address (operands[0], operands[1]);
10711 DONE;
10712 }"
10713 )
10714
10715 \f
10716 ;; TLS support
10717
10718 (define_insn "load_tp_hard"
10719 [(set (match_operand:SI 0 "register_operand" "=r")
10720 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10721 "TARGET_HARD_TP"
10722 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10723 [(set_attr "predicable" "yes")
10724 (set_attr "type" "mrs")]
10725 )
10726
10727 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10728 (define_insn "load_tp_soft_fdpic"
10729 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10730 (clobber (reg:SI FDPIC_REGNUM))
10731 (clobber (reg:SI LR_REGNUM))
10732 (clobber (reg:SI IP_REGNUM))
10733 (clobber (reg:CC CC_REGNUM))]
10734 "TARGET_SOFT_TP && TARGET_FDPIC"
10735 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10736 [(set_attr "conds" "clob")
10737 (set_attr "type" "branch")]
10738 )
10739
10740 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10741 (define_insn "load_tp_soft"
10742 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10743 (clobber (reg:SI LR_REGNUM))
10744 (clobber (reg:SI IP_REGNUM))
10745 (clobber (reg:CC CC_REGNUM))]
10746 "TARGET_SOFT_TP && !TARGET_FDPIC"
10747 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10748 [(set_attr "conds" "clob")
10749 (set_attr "type" "branch")]
10750 )
10751
10752 ;; tls descriptor call
10753 (define_insn "tlscall"
10754 [(set (reg:SI R0_REGNUM)
10755 (unspec:SI [(reg:SI R0_REGNUM)
10756 (match_operand:SI 0 "" "X")
10757 (match_operand 1 "" "")] UNSPEC_TLS))
10758 (clobber (reg:SI R1_REGNUM))
10759 (clobber (reg:SI LR_REGNUM))
10760 (clobber (reg:SI CC_REGNUM))]
10761 "TARGET_GNU2_TLS"
10762 {
10763 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10764 INTVAL (operands[1]));
10765 return "bl\\t%c0(tlscall)";
10766 }
10767 [(set_attr "conds" "clob")
10768 (set_attr "length" "4")
10769 (set_attr "type" "branch")]
10770 )
10771
10772 ;; For thread pointer builtin
10773 (define_expand "get_thread_pointersi"
10774 [(match_operand:SI 0 "s_register_operand")]
10775 ""
10776 "
10777 {
10778 arm_load_tp (operands[0]);
10779 DONE;
10780 }")
10781
10782 ;;
10783
10784 ;; We only care about the lower 16 bits of the constant
10785 ;; being inserted into the upper 16 bits of the register.
10786 (define_insn "*arm_movtas_ze"
10787 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10788 (const_int 16)
10789 (const_int 16))
10790 (match_operand:SI 1 "const_int_operand" ""))]
10791 "TARGET_HAVE_MOVT"
10792 "@
10793 movt%?\t%0, %L1
10794 movt\t%0, %L1"
10795 [(set_attr "arch" "32,v8mb")
10796 (set_attr "predicable" "yes")
10797 (set_attr "length" "4")
10798 (set_attr "type" "alu_sreg")]
10799 )
10800
10801 (define_insn "*arm_rev"
10802 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10803 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10804 "arm_arch6"
10805 "@
10806 rev\t%0, %1
10807 rev%?\t%0, %1
10808 rev%?\t%0, %1"
10809 [(set_attr "arch" "t1,t2,32")
10810 (set_attr "length" "2,2,4")
10811 (set_attr "predicable" "no,yes,yes")
10812 (set_attr "type" "rev")]
10813 )
10814
10815 (define_expand "arm_legacy_rev"
10816 [(set (match_operand:SI 2 "s_register_operand")
10817 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10818 (const_int 16))
10819 (match_dup 1)))
10820 (set (match_dup 2)
10821 (lshiftrt:SI (match_dup 2)
10822 (const_int 8)))
10823 (set (match_operand:SI 3 "s_register_operand")
10824 (rotatert:SI (match_dup 1)
10825 (const_int 8)))
10826 (set (match_dup 2)
10827 (and:SI (match_dup 2)
10828 (const_int -65281)))
10829 (set (match_operand:SI 0 "s_register_operand")
10830 (xor:SI (match_dup 3)
10831 (match_dup 2)))]
10832 "TARGET_32BIT"
10833 ""
10834 )
10835
10836 ;; Reuse temporaries to keep register pressure down.
10837 (define_expand "thumb_legacy_rev"
10838 [(set (match_operand:SI 2 "s_register_operand")
10839 (ashift:SI (match_operand:SI 1 "s_register_operand")
10840 (const_int 24)))
10841 (set (match_operand:SI 3 "s_register_operand")
10842 (lshiftrt:SI (match_dup 1)
10843 (const_int 24)))
10844 (set (match_dup 3)
10845 (ior:SI (match_dup 3)
10846 (match_dup 2)))
10847 (set (match_operand:SI 4 "s_register_operand")
10848 (const_int 16))
10849 (set (match_operand:SI 5 "s_register_operand")
10850 (rotatert:SI (match_dup 1)
10851 (match_dup 4)))
10852 (set (match_dup 2)
10853 (ashift:SI (match_dup 5)
10854 (const_int 24)))
10855 (set (match_dup 5)
10856 (lshiftrt:SI (match_dup 5)
10857 (const_int 24)))
10858 (set (match_dup 5)
10859 (ior:SI (match_dup 5)
10860 (match_dup 2)))
10861 (set (match_dup 5)
10862 (rotatert:SI (match_dup 5)
10863 (match_dup 4)))
10864 (set (match_operand:SI 0 "s_register_operand")
10865 (ior:SI (match_dup 5)
10866 (match_dup 3)))]
10867 "TARGET_THUMB"
10868 ""
10869 )
10870
10871 ;; ARM-specific expansion of signed mod by power of 2
10872 ;; using conditional negate.
10873 ;; For r0 % n where n is a power of 2 produce:
10874 ;; rsbs r1, r0, #0
10875 ;; and r0, r0, #(n - 1)
10876 ;; and r1, r1, #(n - 1)
10877 ;; rsbpl r0, r1, #0
10878
10879 (define_expand "modsi3"
10880 [(match_operand:SI 0 "register_operand")
10881 (match_operand:SI 1 "register_operand")
10882 (match_operand:SI 2 "const_int_operand")]
10883 "TARGET_32BIT"
10884 {
10885 HOST_WIDE_INT val = INTVAL (operands[2]);
10886
10887 if (val <= 0
10888 || exact_log2 (val) <= 0)
10889 FAIL;
10890
10891 rtx mask = GEN_INT (val - 1);
10892
10893 /* In the special case of x0 % 2 we can do the even shorter:
10894 cmp r0, #0
10895 and r0, r0, #1
10896 rsblt r0, r0, #0. */
10897
10898 if (val == 2)
10899 {
10900 rtx cc_reg = arm_gen_compare_reg (LT,
10901 operands[1], const0_rtx, NULL_RTX);
10902 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10903 rtx masked = gen_reg_rtx (SImode);
10904
10905 emit_insn (gen_andsi3 (masked, operands[1], mask));
10906 emit_move_insn (operands[0],
10907 gen_rtx_IF_THEN_ELSE (SImode, cond,
10908 gen_rtx_NEG (SImode,
10909 masked),
10910 masked));
10911 DONE;
10912 }
10913
10914 rtx neg_op = gen_reg_rtx (SImode);
10915 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
10916 operands[1]));
10917
10918 /* Extract the condition register and mode. */
10919 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
10920 rtx cc_reg = SET_DEST (cmp);
10921 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
10922
10923 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
10924
10925 rtx masked_neg = gen_reg_rtx (SImode);
10926 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
10927
10928 /* We want a conditional negate here, but emitting COND_EXEC rtxes
10929 during expand does not always work. Do an IF_THEN_ELSE instead. */
10930 emit_move_insn (operands[0],
10931 gen_rtx_IF_THEN_ELSE (SImode, cond,
10932 gen_rtx_NEG (SImode, masked_neg),
10933 operands[0]));
10934
10935
10936 DONE;
10937 }
10938 )
10939
10940 (define_expand "bswapsi2"
10941 [(set (match_operand:SI 0 "s_register_operand")
10942 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
10943 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10944 "
10945 if (!arm_arch6)
10946 {
10947 rtx op2 = gen_reg_rtx (SImode);
10948 rtx op3 = gen_reg_rtx (SImode);
10949
10950 if (TARGET_THUMB)
10951 {
10952 rtx op4 = gen_reg_rtx (SImode);
10953 rtx op5 = gen_reg_rtx (SImode);
10954
10955 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10956 op2, op3, op4, op5));
10957 }
10958 else
10959 {
10960 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10961 op2, op3));
10962 }
10963
10964 DONE;
10965 }
10966 "
10967 )
10968
10969 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
10970 ;; and unsigned variants, respectively. For rev16, expose
10971 ;; byte-swapping in the lower 16 bits only.
10972 (define_insn "*arm_revsh"
10973 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10974 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
10975 "arm_arch6"
10976 "@
10977 revsh\t%0, %1
10978 revsh%?\t%0, %1
10979 revsh%?\t%0, %1"
10980 [(set_attr "arch" "t1,t2,32")
10981 (set_attr "length" "2,2,4")
10982 (set_attr "type" "rev")]
10983 )
10984
10985 (define_insn "*arm_rev16"
10986 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
10987 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
10988 "arm_arch6"
10989 "@
10990 rev16\t%0, %1
10991 rev16%?\t%0, %1
10992 rev16%?\t%0, %1"
10993 [(set_attr "arch" "t1,t2,32")
10994 (set_attr "length" "2,2,4")
10995 (set_attr "type" "rev")]
10996 )
10997
10998 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
10999 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11000 ;; each valid permutation.
11001
11002 (define_insn "arm_rev16si2"
11003 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11004 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11005 (const_int 8))
11006 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11007 (and:SI (lshiftrt:SI (match_dup 1)
11008 (const_int 8))
11009 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11010 "arm_arch6
11011 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11012 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11013 "rev16\\t%0, %1"
11014 [(set_attr "arch" "t1,t2,32")
11015 (set_attr "length" "2,2,4")
11016 (set_attr "type" "rev")]
11017 )
11018
11019 (define_insn "arm_rev16si2_alt"
11020 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11021 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11022 (const_int 8))
11023 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11024 (and:SI (ashift:SI (match_dup 1)
11025 (const_int 8))
11026 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11027 "arm_arch6
11028 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11029 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11030 "rev16\\t%0, %1"
11031 [(set_attr "arch" "t1,t2,32")
11032 (set_attr "length" "2,2,4")
11033 (set_attr "type" "rev")]
11034 )
11035
11036 (define_expand "bswaphi2"
11037 [(set (match_operand:HI 0 "s_register_operand")
11038 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11039 "arm_arch6"
11040 ""
11041 )
11042
11043 ;; Patterns for LDRD/STRD in Thumb2 mode
11044
11045 (define_insn "*thumb2_ldrd"
11046 [(set (match_operand:SI 0 "s_register_operand" "=r")
11047 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11048 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11049 (set (match_operand:SI 3 "s_register_operand" "=r")
11050 (mem:SI (plus:SI (match_dup 1)
11051 (match_operand:SI 4 "const_int_operand" ""))))]
11052 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11053 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11054 && (operands_ok_ldrd_strd (operands[0], operands[3],
11055 operands[1], INTVAL (operands[2]),
11056 false, true))"
11057 "ldrd%?\t%0, %3, [%1, %2]"
11058 [(set_attr "type" "load_8")
11059 (set_attr "predicable" "yes")])
11060
11061 (define_insn "*thumb2_ldrd_base"
11062 [(set (match_operand:SI 0 "s_register_operand" "=r")
11063 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11064 (set (match_operand:SI 2 "s_register_operand" "=r")
11065 (mem:SI (plus:SI (match_dup 1)
11066 (const_int 4))))]
11067 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11068 && (operands_ok_ldrd_strd (operands[0], operands[2],
11069 operands[1], 0, false, true))"
11070 "ldrd%?\t%0, %2, [%1]"
11071 [(set_attr "type" "load_8")
11072 (set_attr "predicable" "yes")])
11073
11074 (define_insn "*thumb2_ldrd_base_neg"
11075 [(set (match_operand:SI 0 "s_register_operand" "=r")
11076 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11077 (const_int -4))))
11078 (set (match_operand:SI 2 "s_register_operand" "=r")
11079 (mem:SI (match_dup 1)))]
11080 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11081 && (operands_ok_ldrd_strd (operands[0], operands[2],
11082 operands[1], -4, false, true))"
11083 "ldrd%?\t%0, %2, [%1, #-4]"
11084 [(set_attr "type" "load_8")
11085 (set_attr "predicable" "yes")])
11086
11087 (define_insn "*thumb2_strd"
11088 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11089 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11090 (match_operand:SI 2 "s_register_operand" "r"))
11091 (set (mem:SI (plus:SI (match_dup 0)
11092 (match_operand:SI 3 "const_int_operand" "")))
11093 (match_operand:SI 4 "s_register_operand" "r"))]
11094 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11095 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11096 && (operands_ok_ldrd_strd (operands[2], operands[4],
11097 operands[0], INTVAL (operands[1]),
11098 false, false))"
11099 "strd%?\t%2, %4, [%0, %1]"
11100 [(set_attr "type" "store_8")
11101 (set_attr "predicable" "yes")])
11102
11103 (define_insn "*thumb2_strd_base"
11104 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11105 (match_operand:SI 1 "s_register_operand" "r"))
11106 (set (mem:SI (plus:SI (match_dup 0)
11107 (const_int 4)))
11108 (match_operand:SI 2 "s_register_operand" "r"))]
11109 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11110 && (operands_ok_ldrd_strd (operands[1], operands[2],
11111 operands[0], 0, false, false))"
11112 "strd%?\t%1, %2, [%0]"
11113 [(set_attr "type" "store_8")
11114 (set_attr "predicable" "yes")])
11115
11116 (define_insn "*thumb2_strd_base_neg"
11117 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11118 (const_int -4)))
11119 (match_operand:SI 1 "s_register_operand" "r"))
11120 (set (mem:SI (match_dup 0))
11121 (match_operand:SI 2 "s_register_operand" "r"))]
11122 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11123 && (operands_ok_ldrd_strd (operands[1], operands[2],
11124 operands[0], -4, false, false))"
11125 "strd%?\t%1, %2, [%0, #-4]"
11126 [(set_attr "type" "store_8")
11127 (set_attr "predicable" "yes")])
11128
11129 ;; ARMv8 CRC32 instructions.
11130 (define_insn "arm_<crc_variant>"
11131 [(set (match_operand:SI 0 "s_register_operand" "=r")
11132 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11133 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11134 CRC))]
11135 "TARGET_CRC32"
11136 "<crc_variant>\\t%0, %1, %2"
11137 [(set_attr "type" "crc")
11138 (set_attr "conds" "unconditional")]
11139 )
11140
11141 ;; Load the load/store double peephole optimizations.
11142 (include "ldrdstrd.md")
11143
11144 ;; Load the load/store multiple patterns
11145 (include "ldmstm.md")
11146
11147 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11148 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11149 ;; The operands are validated through the load_multiple_operation
11150 ;; match_parallel predicate rather than through constraints so enable it only
11151 ;; after reload.
11152 (define_insn "*load_multiple"
11153 [(match_parallel 0 "load_multiple_operation"
11154 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11155 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11156 ])]
11157 "TARGET_32BIT && reload_completed"
11158 "*
11159 {
11160 arm_output_multireg_pop (operands, /*return_pc=*/false,
11161 /*cond=*/const_true_rtx,
11162 /*reverse=*/false,
11163 /*update=*/false);
11164 return \"\";
11165 }
11166 "
11167 [(set_attr "predicable" "yes")]
11168 )
11169
11170 (define_expand "copysignsf3"
11171 [(match_operand:SF 0 "register_operand")
11172 (match_operand:SF 1 "register_operand")
11173 (match_operand:SF 2 "register_operand")]
11174 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11175 "{
11176 emit_move_insn (operands[0], operands[2]);
11177 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11178 GEN_INT (31), GEN_INT (0),
11179 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11180 DONE;
11181 }"
11182 )
11183
11184 (define_expand "copysigndf3"
11185 [(match_operand:DF 0 "register_operand")
11186 (match_operand:DF 1 "register_operand")
11187 (match_operand:DF 2 "register_operand")]
11188 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11189 "{
11190 rtx op0_low = gen_lowpart (SImode, operands[0]);
11191 rtx op0_high = gen_highpart (SImode, operands[0]);
11192 rtx op1_low = gen_lowpart (SImode, operands[1]);
11193 rtx op1_high = gen_highpart (SImode, operands[1]);
11194 rtx op2_high = gen_highpart (SImode, operands[2]);
11195
11196 rtx scratch1 = gen_reg_rtx (SImode);
11197 rtx scratch2 = gen_reg_rtx (SImode);
11198 emit_move_insn (scratch1, op2_high);
11199 emit_move_insn (scratch2, op1_high);
11200
11201 emit_insn(gen_rtx_SET(scratch1,
11202 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11203 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11204 emit_move_insn (op0_low, op1_low);
11205 emit_move_insn (op0_high, scratch2);
11206
11207 DONE;
11208 }"
11209 )
11210
11211 ;; movmisalign patterns for HImode and SImode.
11212 (define_expand "movmisalign<mode>"
11213 [(match_operand:HSI 0 "general_operand")
11214 (match_operand:HSI 1 "general_operand")]
11215 "unaligned_access"
11216 {
11217 /* This pattern is not permitted to fail during expansion: if both arguments
11218 are non-registers (e.g. memory := constant), force operand 1 into a
11219 register. */
11220 rtx (* gen_unaligned_load)(rtx, rtx);
11221 rtx tmp_dest = operands[0];
11222 if (!s_register_operand (operands[0], <MODE>mode)
11223 && !s_register_operand (operands[1], <MODE>mode))
11224 operands[1] = force_reg (<MODE>mode, operands[1]);
11225
11226 if (<MODE>mode == HImode)
11227 {
11228 gen_unaligned_load = gen_unaligned_loadhiu;
11229 tmp_dest = gen_reg_rtx (SImode);
11230 }
11231 else
11232 gen_unaligned_load = gen_unaligned_loadsi;
11233
11234 if (MEM_P (operands[1]))
11235 {
11236 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11237 if (<MODE>mode == HImode)
11238 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11239 }
11240 else
11241 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11242
11243 DONE;
11244 })
11245
11246 (define_insn "arm_<cdp>"
11247 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11248 (match_operand:SI 1 "immediate_operand" "n")
11249 (match_operand:SI 2 "immediate_operand" "n")
11250 (match_operand:SI 3 "immediate_operand" "n")
11251 (match_operand:SI 4 "immediate_operand" "n")
11252 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11253 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11254 {
11255 arm_const_bounds (operands[0], 0, 16);
11256 arm_const_bounds (operands[1], 0, 16);
11257 arm_const_bounds (operands[2], 0, (1 << 5));
11258 arm_const_bounds (operands[3], 0, (1 << 5));
11259 arm_const_bounds (operands[4], 0, (1 << 5));
11260 arm_const_bounds (operands[5], 0, 8);
11261 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11262 }
11263 [(set_attr "length" "4")
11264 (set_attr "type" "coproc")])
11265
11266 (define_insn "*ldc"
11267 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11268 (match_operand:SI 1 "immediate_operand" "n")
11269 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11270 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11271 {
11272 arm_const_bounds (operands[0], 0, 16);
11273 arm_const_bounds (operands[1], 0, (1 << 5));
11274 return "<ldc>\\tp%c0, CR%c1, %2";
11275 }
11276 [(set_attr "length" "4")
11277 (set_attr "type" "coproc")])
11278
11279 (define_insn "*stc"
11280 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11281 (match_operand:SI 1 "immediate_operand" "n")
11282 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11283 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11284 {
11285 arm_const_bounds (operands[0], 0, 16);
11286 arm_const_bounds (operands[1], 0, (1 << 5));
11287 return "<stc>\\tp%c0, CR%c1, %2";
11288 }
11289 [(set_attr "length" "4")
11290 (set_attr "type" "coproc")])
11291
11292 (define_expand "arm_<ldc>"
11293 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11294 (match_operand:SI 1 "immediate_operand")
11295 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11296 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11297
11298 (define_expand "arm_<stc>"
11299 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11300 (match_operand:SI 1 "immediate_operand")
11301 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11302 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11303
11304 (define_insn "arm_<mcr>"
11305 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11306 (match_operand:SI 1 "immediate_operand" "n")
11307 (match_operand:SI 2 "s_register_operand" "r")
11308 (match_operand:SI 3 "immediate_operand" "n")
11309 (match_operand:SI 4 "immediate_operand" "n")
11310 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11311 (use (match_dup 2))]
11312 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11313 {
11314 arm_const_bounds (operands[0], 0, 16);
11315 arm_const_bounds (operands[1], 0, 8);
11316 arm_const_bounds (operands[3], 0, (1 << 5));
11317 arm_const_bounds (operands[4], 0, (1 << 5));
11318 arm_const_bounds (operands[5], 0, 8);
11319 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11320 }
11321 [(set_attr "length" "4")
11322 (set_attr "type" "coproc")])
11323
11324 (define_insn "arm_<mrc>"
11325 [(set (match_operand:SI 0 "s_register_operand" "=r")
11326 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11327 (match_operand:SI 2 "immediate_operand" "n")
11328 (match_operand:SI 3 "immediate_operand" "n")
11329 (match_operand:SI 4 "immediate_operand" "n")
11330 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11331 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11332 {
11333 arm_const_bounds (operands[1], 0, 16);
11334 arm_const_bounds (operands[2], 0, 8);
11335 arm_const_bounds (operands[3], 0, (1 << 5));
11336 arm_const_bounds (operands[4], 0, (1 << 5));
11337 arm_const_bounds (operands[5], 0, 8);
11338 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11339 }
11340 [(set_attr "length" "4")
11341 (set_attr "type" "coproc")])
11342
11343 (define_insn "arm_<mcrr>"
11344 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11345 (match_operand:SI 1 "immediate_operand" "n")
11346 (match_operand:DI 2 "s_register_operand" "r")
11347 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11348 (use (match_dup 2))]
11349 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11350 {
11351 arm_const_bounds (operands[0], 0, 16);
11352 arm_const_bounds (operands[1], 0, 8);
11353 arm_const_bounds (operands[3], 0, (1 << 5));
11354 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11355 }
11356 [(set_attr "length" "4")
11357 (set_attr "type" "coproc")])
11358
11359 (define_insn "arm_<mrrc>"
11360 [(set (match_operand:DI 0 "s_register_operand" "=r")
11361 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11362 (match_operand:SI 2 "immediate_operand" "n")
11363 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11364 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11365 {
11366 arm_const_bounds (operands[1], 0, 16);
11367 arm_const_bounds (operands[2], 0, 8);
11368 arm_const_bounds (operands[3], 0, (1 << 5));
11369 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11370 }
11371 [(set_attr "length" "4")
11372 (set_attr "type" "coproc")])
11373
11374 (define_expand "speculation_barrier"
11375 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11376 "TARGET_EITHER"
11377 "
11378 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11379 have a usable barrier (and probably don't need one in practice).
11380 But to be safe if such code is run on later architectures, call a
11381 helper function in libgcc that will do the thing for the active
11382 system. */
11383 if (!(arm_arch7 || arm_arch8))
11384 {
11385 arm_emit_speculation_barrier_function ();
11386 DONE;
11387 }
11388 "
11389 )
11390
11391 ;; Generate a hard speculation barrier when we have not enabled speculation
11392 ;; tracking.
11393 (define_insn "*speculation_barrier_insn"
11394 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11395 "arm_arch7 || arm_arch8"
11396 "isb\;dsb\\tsy"
11397 [(set_attr "type" "block")
11398 (set_attr "length" "8")]
11399 )
11400
11401 ;; Vector bits common to IWMMXT and Neon
11402 (include "vec-common.md")
11403 ;; Load the Intel Wireless Multimedia Extension patterns
11404 (include "iwmmxt.md")
11405 ;; Load the VFP co-processor patterns
11406 (include "vfp.md")
11407 ;; Thumb-1 patterns
11408 (include "thumb1.md")
11409 ;; Thumb-2 patterns
11410 (include "thumb2.md")
11411 ;; Neon patterns
11412 (include "neon.md")
11413 ;; Crypto patterns
11414 (include "crypto.md")
11415 ;; Synchronization Primitives
11416 (include "sync.md")
11417 ;; Fixed-point patterns
11418 (include "arm-fixed.md")