]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
[arm] Handle immediate values in uaddvsi4
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51 ;; conditional compare combination
52 (define_constants
53 [(CMP_CMP 0)
54 (CMN_CMP 1)
55 (CMP_CMN 2)
56 (CMN_CMN 3)
57 (NUM_OF_COND_CMP 4)
58 ]
59 )
60
61 \f
62 ;;---------------------------------------------------------------------------
63 ;; Attributes
64
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
67
68 ;; Instruction classification types
69 (include "types.md")
70
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
77
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
85
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
101
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
104 ;; registers.
105 (define_attr "fp" "no,yes" (const_string "no"))
106
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
112
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
117
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
120 (const_int 4))
121
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
131
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
134 (const_string "yes")
135
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
138 (const_string "yes")
139
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
142 (const_string "yes")
143
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
182 (const_string "yes")
183 ]
184
185 (const_string "no")))
186
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
189
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
192 (const_string "yes")
193
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
196 (const_string "yes")
197
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
202
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
208
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
225 (const_string "no")
226
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
229 (const_string "no")
230
231 (eq_attr "arch_enabled" "no")
232 (const_string "no")]
233 (const_string "yes")))
234
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
254
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
262
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
270 ;
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
273 ; inlined branches
274 ;
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
277 ;
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
280 ;
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
283 ;
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
286
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (if_then_else
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
295
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
301
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
312 "block,call,load_4")
313 (const_string "yes")
314 (const_string "no")))
315
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
338
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
342
343
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
347
348 ;;---------------------------------------------------------------------------
349 ;; Unspecs
350
351 (include "unspecs.md")
352
353 ;;---------------------------------------------------------------------------
354 ;; Mode iterators
355
356 (include "iterators.md")
357
358 ;;---------------------------------------------------------------------------
359 ;; Predicates
360
361 (include "predicates.md")
362 (include "constraints.md")
363
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
366
367 (define_attr "tune_cortexr4" "yes,no"
368 (const (if_then_else
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "yes")
371 (const_string "no"))))
372
373 ;; True if the generic scheduling description should be used.
374
375 (define_attr "generic_sched" "yes,no"
376 (const (if_then_else
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "no")
385 (const_string "yes"))))
386
387 (define_attr "generic_vfp" "yes,no"
388 (const (if_then_else
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
394 (const_string "yes")
395 (const_string "no"))))
396
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
403 (include "fa526.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
422 (include "vfp11.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
425
426 \f
427 ;;---------------------------------------------------------------------------
428 ;; Insn patterns
429 ;;
430 ;; Addition insns.
431
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
435
436 (define_expand "adddi3"
437 [(parallel
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
442 "TARGET_EITHER"
443 "
444 if (TARGET_THUMB1)
445 {
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
448 }
449 else
450 {
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
454 &lo_op2, &hi_op2);
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
457
458 if (lo_op2 == const0_rtx)
459 {
460 lo_dest = lo_op1;
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
465 }
466 else
467 {
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
472
473 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
475 const0_rtx);
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
478 else
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
480 }
481
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
486 DONE;
487 }
488 "
489 )
490
491 (define_expand "addv<mode>4"
492 [(match_operand:SIDI 0 "register_operand")
493 (match_operand:SIDI 1 "register_operand")
494 (match_operand:SIDI 2 "register_operand")
495 (match_operand 3 "")]
496 "TARGET_32BIT"
497 {
498 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
499 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
500
501 DONE;
502 })
503
504 (define_expand "uaddvsi4"
505 [(match_operand:SI 0 "s_register_operand")
506 (match_operand:SI 1 "s_register_operand")
507 (match_operand:SI 2 "arm_add_operand")
508 (match_operand 3 "")]
509 "TARGET_32BIT"
510 {
511 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
512 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
513
514 DONE;
515 })
516
517 (define_expand "uaddvdi4"
518 [(match_operand:DI 0 "s_register_operand")
519 (match_operand:DI 1 "s_register_operand")
520 (match_operand:DI 2 "s_register_operand")
521 (match_operand 3 "")]
522 "TARGET_32BIT"
523 {
524 emit_insn (gen_adddi3_compareC (operands[0], operands[1], operands[2]));
525 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
526
527 DONE;
528 })
529
530 (define_expand "addsi3"
531 [(set (match_operand:SI 0 "s_register_operand")
532 (plus:SI (match_operand:SI 1 "s_register_operand")
533 (match_operand:SI 2 "reg_or_int_operand")))]
534 "TARGET_EITHER"
535 "
536 if (TARGET_32BIT && CONST_INT_P (operands[2]))
537 {
538 arm_split_constant (PLUS, SImode, NULL_RTX,
539 INTVAL (operands[2]), operands[0], operands[1],
540 optimize && can_create_pseudo_p ());
541 DONE;
542 }
543 "
544 )
545
546 ; If there is a scratch available, this will be faster than synthesizing the
547 ; addition.
548 (define_peephole2
549 [(match_scratch:SI 3 "r")
550 (set (match_operand:SI 0 "arm_general_register_operand" "")
551 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
552 (match_operand:SI 2 "const_int_operand" "")))]
553 "TARGET_32BIT &&
554 !(const_ok_for_arm (INTVAL (operands[2]))
555 || const_ok_for_arm (-INTVAL (operands[2])))
556 && const_ok_for_arm (~INTVAL (operands[2]))"
557 [(set (match_dup 3) (match_dup 2))
558 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
559 ""
560 )
561
562 ;; The r/r/k alternative is required when reloading the address
563 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
564 ;; put the duplicated register first, and not try the commutative version.
565 (define_insn_and_split "*arm_addsi3"
566 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
567 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
568 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
569 "TARGET_32BIT"
570 "@
571 add%?\\t%0, %0, %2
572 add%?\\t%0, %1, %2
573 add%?\\t%0, %1, %2
574 add%?\\t%0, %1, %2
575 add%?\\t%0, %1, %2
576 add%?\\t%0, %1, %2
577 add%?\\t%0, %2, %1
578 add%?\\t%0, %1, %2
579 addw%?\\t%0, %1, %2
580 addw%?\\t%0, %1, %2
581 sub%?\\t%0, %1, #%n2
582 sub%?\\t%0, %1, #%n2
583 sub%?\\t%0, %1, #%n2
584 subw%?\\t%0, %1, #%n2
585 subw%?\\t%0, %1, #%n2
586 #"
587 "TARGET_32BIT
588 && CONST_INT_P (operands[2])
589 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
590 && (reload_completed || !arm_eliminable_register (operands[1]))"
591 [(clobber (const_int 0))]
592 "
593 arm_split_constant (PLUS, SImode, curr_insn,
594 INTVAL (operands[2]), operands[0],
595 operands[1], 0);
596 DONE;
597 "
598 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
599 (set_attr "predicable" "yes")
600 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
601 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
602 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
603 (const_string "alu_imm")
604 (const_string "alu_sreg")))
605 ]
606 )
607
608 (define_insn "adddi3_compareV"
609 [(set (reg:CC_V CC_REGNUM)
610 (ne:CC_V
611 (plus:TI
612 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
613 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
614 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
615 (set (match_operand:DI 0 "s_register_operand" "=&r")
616 (plus:DI (match_dup 1) (match_dup 2)))]
617 "TARGET_32BIT"
618 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
619 [(set_attr "conds" "set")
620 (set_attr "length" "8")
621 (set_attr "type" "multiple")]
622 )
623
624 (define_insn "addsi3_compareV"
625 [(set (reg:CC_V CC_REGNUM)
626 (ne:CC_V
627 (plus:DI
628 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
629 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
630 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
631 (set (match_operand:SI 0 "register_operand" "=r")
632 (plus:SI (match_dup 1) (match_dup 2)))]
633 "TARGET_32BIT"
634 "adds%?\\t%0, %1, %2"
635 [(set_attr "conds" "set")
636 (set_attr "type" "alus_sreg")]
637 )
638
639 (define_insn "adddi3_compareC"
640 [(set (reg:CC_C CC_REGNUM)
641 (compare:CC_C
642 (plus:DI
643 (match_operand:DI 1 "register_operand" "r")
644 (match_operand:DI 2 "register_operand" "r"))
645 (match_dup 1)))
646 (set (match_operand:DI 0 "register_operand" "=&r")
647 (plus:DI (match_dup 1) (match_dup 2)))]
648 "TARGET_32BIT"
649 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
650 [(set_attr "conds" "set")
651 (set_attr "length" "8")
652 (set_attr "type" "multiple")]
653 )
654
655 (define_insn "addsi3_compare0"
656 [(set (reg:CC_NOOV CC_REGNUM)
657 (compare:CC_NOOV
658 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
659 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
660 (const_int 0)))
661 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
662 (plus:SI (match_dup 1) (match_dup 2)))]
663 "TARGET_ARM"
664 "@
665 adds%?\\t%0, %1, %2
666 subs%?\\t%0, %1, #%n2
667 adds%?\\t%0, %1, %2"
668 [(set_attr "conds" "set")
669 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
670 )
671
672 (define_insn "*addsi3_compare0_scratch"
673 [(set (reg:CC_NOOV CC_REGNUM)
674 (compare:CC_NOOV
675 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
676 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
677 (const_int 0)))]
678 "TARGET_ARM"
679 "@
680 cmn%?\\t%0, %1
681 cmp%?\\t%0, #%n1
682 cmn%?\\t%0, %1"
683 [(set_attr "conds" "set")
684 (set_attr "predicable" "yes")
685 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
686 )
687
688 (define_insn "*compare_negsi_si"
689 [(set (reg:CC_Z CC_REGNUM)
690 (compare:CC_Z
691 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
692 (match_operand:SI 1 "s_register_operand" "l,r")))]
693 "TARGET_32BIT"
694 "cmn%?\\t%1, %0"
695 [(set_attr "conds" "set")
696 (set_attr "predicable" "yes")
697 (set_attr "arch" "t2,*")
698 (set_attr "length" "2,4")
699 (set_attr "predicable_short_it" "yes,no")
700 (set_attr "type" "alus_sreg")]
701 )
702
703 ;; This is the canonicalization of subsi3_compare when the
704 ;; addend is a constant.
705 (define_insn "cmpsi2_addneg"
706 [(set (reg:CC CC_REGNUM)
707 (compare:CC
708 (match_operand:SI 1 "s_register_operand" "r,r")
709 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
710 (set (match_operand:SI 0 "s_register_operand" "=r,r")
711 (plus:SI (match_dup 1)
712 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
713 "TARGET_32BIT
714 && (INTVAL (operands[2])
715 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
716 {
717 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
718 in different condition codes (like cmn rather than like cmp), so that
719 alternative comes first. Both alternatives can match for any 0x??000000
720 where except for 0 and INT_MIN it doesn't matter what we choose, and also
721 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
722 as it is shorter. */
723 if (which_alternative == 0 && operands[3] != const1_rtx)
724 return "subs%?\\t%0, %1, #%n3";
725 else
726 return "adds%?\\t%0, %1, %3";
727 }
728 [(set_attr "conds" "set")
729 (set_attr "type" "alus_sreg")]
730 )
731
732 ;; Convert the sequence
733 ;; sub rd, rn, #1
734 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
735 ;; bne dest
736 ;; into
737 ;; subs rd, rn, #1
738 ;; bcs dest ((unsigned)rn >= 1)
739 ;; similarly for the beq variant using bcc.
740 ;; This is a common looping idiom (while (n--))
741 (define_peephole2
742 [(set (match_operand:SI 0 "arm_general_register_operand" "")
743 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
744 (const_int -1)))
745 (set (match_operand 2 "cc_register" "")
746 (compare (match_dup 0) (const_int -1)))
747 (set (pc)
748 (if_then_else (match_operator 3 "equality_operator"
749 [(match_dup 2) (const_int 0)])
750 (match_operand 4 "" "")
751 (match_operand 5 "" "")))]
752 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
753 [(parallel[
754 (set (match_dup 2)
755 (compare:CC
756 (match_dup 1) (const_int 1)))
757 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
758 (set (pc)
759 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
760 (match_dup 4)
761 (match_dup 5)))]
762 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
763 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
764 ? GEU : LTU),
765 VOIDmode,
766 operands[2], const0_rtx);"
767 )
768
769 ;; The next four insns work because they compare the result with one of
770 ;; the operands, and we know that the use of the condition code is
771 ;; either GEU or LTU, so we can use the carry flag from the addition
772 ;; instead of doing the compare a second time.
773 (define_insn "addsi3_compare_op1"
774 [(set (reg:CC_C CC_REGNUM)
775 (compare:CC_C
776 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
777 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
778 (match_dup 1)))
779 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
780 (plus:SI (match_dup 1) (match_dup 2)))]
781 "TARGET_32BIT"
782 "@
783 adds%?\\t%0, %1, %2
784 adds%?\\t%0, %0, %2
785 subs%?\\t%0, %1, #%n2
786 subs%?\\t%0, %0, #%n2
787 adds%?\\t%0, %1, %2
788 subs%?\\t%0, %1, #%n2"
789 [(set_attr "conds" "set")
790 (set_attr "arch" "t2,t2,t2,t2,*,*")
791 (set_attr "length" "2,2,2,2,4,4")
792 (set (attr "type")
793 (if_then_else (match_operand 2 "const_int_operand")
794 (const_string "alu_imm")
795 (const_string "alu_sreg")))]
796 )
797
798 (define_insn "*addsi3_compare_op2"
799 [(set (reg:CC_C CC_REGNUM)
800 (compare:CC_C
801 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
802 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
803 (match_dup 2)))
804 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
805 (plus:SI (match_dup 1) (match_dup 2)))]
806 "TARGET_32BIT"
807 "@
808 adds%?\\t%0, %1, %2
809 adds%?\\t%0, %0, %2
810 subs%?\\t%0, %1, #%n2
811 subs%?\\t%0, %0, #%n2
812 adds%?\\t%0, %1, %2
813 subs%?\\t%0, %1, #%n2"
814 [(set_attr "conds" "set")
815 (set_attr "arch" "t2,t2,t2,t2,*,*")
816 (set_attr "length" "2,2,2,2,4,4")
817 (set (attr "type")
818 (if_then_else (match_operand 2 "const_int_operand")
819 (const_string "alu_imm")
820 (const_string "alu_sreg")))]
821 )
822
823 (define_insn "*compare_addsi2_op0"
824 [(set (reg:CC_C CC_REGNUM)
825 (compare:CC_C
826 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
827 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
828 (match_dup 0)))]
829 "TARGET_32BIT"
830 "@
831 cmn%?\\t%0, %1
832 cmp%?\\t%0, #%n1
833 cmn%?\\t%0, %1
834 cmp%?\\t%0, #%n1"
835 [(set_attr "conds" "set")
836 (set_attr "predicable" "yes")
837 (set_attr "arch" "t2,t2,*,*")
838 (set_attr "predicable_short_it" "yes,yes,no,no")
839 (set_attr "length" "2,2,4,4")
840 (set (attr "type")
841 (if_then_else (match_operand 1 "const_int_operand")
842 (const_string "alu_imm")
843 (const_string "alu_sreg")))]
844 )
845
846 (define_insn "*compare_addsi2_op1"
847 [(set (reg:CC_C CC_REGNUM)
848 (compare:CC_C
849 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
850 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
851 (match_dup 1)))]
852 "TARGET_32BIT"
853 "@
854 cmn%?\\t%0, %1
855 cmp%?\\t%0, #%n1
856 cmn%?\\t%0, %1
857 cmp%?\\t%0, #%n1"
858 [(set_attr "conds" "set")
859 (set_attr "predicable" "yes")
860 (set_attr "arch" "t2,t2,*,*")
861 (set_attr "predicable_short_it" "yes,yes,no,no")
862 (set_attr "length" "2,2,4,4")
863 (set (attr "type")
864 (if_then_else (match_operand 1 "const_int_operand")
865 (const_string "alu_imm")
866 (const_string "alu_sreg")))]
867 )
868
869 (define_insn "addsi3_carryin"
870 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
871 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
872 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
873 (match_operand:SI 3 "arm_carry_operation" "")))]
874 "TARGET_32BIT"
875 "@
876 adc%?\\t%0, %1, %2
877 adc%?\\t%0, %1, %2
878 sbc%?\\t%0, %1, #%B2"
879 [(set_attr "conds" "use")
880 (set_attr "predicable" "yes")
881 (set_attr "arch" "t2,*,*")
882 (set_attr "length" "4")
883 (set_attr "predicable_short_it" "yes,no,no")
884 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
885 )
886
887 ;; Canonicalization of the above when the immediate is zero.
888 (define_insn "add0si3_carryin"
889 [(set (match_operand:SI 0 "s_register_operand" "=r")
890 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
891 (match_operand:SI 1 "arm_not_operand" "r")))]
892 "TARGET_32BIT"
893 "adc%?\\t%0, %1, #0"
894 [(set_attr "conds" "use")
895 (set_attr "predicable" "yes")
896 (set_attr "length" "4")
897 (set_attr "type" "adc_imm")]
898 )
899
900 (define_insn "*addsi3_carryin_alt2"
901 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
902 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
903 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
904 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
905 "TARGET_32BIT"
906 "@
907 adc%?\\t%0, %1, %2
908 adc%?\\t%0, %1, %2
909 sbc%?\\t%0, %1, #%B2"
910 [(set_attr "conds" "use")
911 (set_attr "predicable" "yes")
912 (set_attr "arch" "t2,*,*")
913 (set_attr "length" "4")
914 (set_attr "predicable_short_it" "yes,no,no")
915 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
916 )
917
918 (define_insn "*addsi3_carryin_shift"
919 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
920 (plus:SI (plus:SI
921 (match_operator:SI 2 "shift_operator"
922 [(match_operand:SI 3 "s_register_operand" "r,r")
923 (match_operand:SI 4 "shift_amount_operand" "M,r")])
924 (match_operand:SI 5 "arm_carry_operation" ""))
925 (match_operand:SI 1 "s_register_operand" "r,r")))]
926 "TARGET_32BIT"
927 "adc%?\\t%0, %1, %3%S2"
928 [(set_attr "conds" "use")
929 (set_attr "arch" "32,a")
930 (set_attr "shift" "3")
931 (set_attr "predicable" "yes")
932 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
933 (const_string "alu_shift_imm")
934 (const_string "alu_shift_reg")))]
935 )
936
937 (define_insn "*addsi3_carryin_clobercc"
938 [(set (match_operand:SI 0 "s_register_operand" "=r")
939 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
940 (match_operand:SI 2 "arm_rhs_operand" "rI"))
941 (match_operand:SI 3 "arm_carry_operation" "")))
942 (clobber (reg:CC CC_REGNUM))]
943 "TARGET_32BIT"
944 "adcs%?\\t%0, %1, %2"
945 [(set_attr "conds" "set")
946 (set_attr "type" "adcs_reg")]
947 )
948
949 (define_expand "subv<mode>4"
950 [(match_operand:SIDI 0 "register_operand")
951 (match_operand:SIDI 1 "register_operand")
952 (match_operand:SIDI 2 "register_operand")
953 (match_operand 3 "")]
954 "TARGET_32BIT"
955 {
956 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
957 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
958
959 DONE;
960 })
961
962 (define_expand "usubv<mode>4"
963 [(match_operand:SIDI 0 "register_operand")
964 (match_operand:SIDI 1 "register_operand")
965 (match_operand:SIDI 2 "register_operand")
966 (match_operand 3 "")]
967 "TARGET_32BIT"
968 {
969 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
970 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
971
972 DONE;
973 })
974
975 (define_insn "subdi3_compare1"
976 [(set (reg:CC CC_REGNUM)
977 (compare:CC
978 (match_operand:DI 1 "s_register_operand" "r")
979 (match_operand:DI 2 "s_register_operand" "r")))
980 (set (match_operand:DI 0 "s_register_operand" "=&r")
981 (minus:DI (match_dup 1) (match_dup 2)))]
982 "TARGET_32BIT"
983 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
984 [(set_attr "conds" "set")
985 (set_attr "length" "8")
986 (set_attr "type" "multiple")]
987 )
988
989 (define_insn "subsi3_compare1"
990 [(set (reg:CC CC_REGNUM)
991 (compare:CC
992 (match_operand:SI 1 "register_operand" "r")
993 (match_operand:SI 2 "register_operand" "r")))
994 (set (match_operand:SI 0 "register_operand" "=r")
995 (minus:SI (match_dup 1) (match_dup 2)))]
996 "TARGET_32BIT"
997 "subs%?\\t%0, %1, %2"
998 [(set_attr "conds" "set")
999 (set_attr "type" "alus_sreg")]
1000 )
1001
1002 (define_insn "subsi3_carryin"
1003 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1004 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1005 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1006 (match_operand:SI 3 "arm_borrow_operation" "")))]
1007 "TARGET_32BIT"
1008 "@
1009 sbc%?\\t%0, %1, %2
1010 rsc%?\\t%0, %2, %1
1011 sbc%?\\t%0, %2, %2, lsl #1"
1012 [(set_attr "conds" "use")
1013 (set_attr "arch" "*,a,t2")
1014 (set_attr "predicable" "yes")
1015 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1016 )
1017
1018 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1019 [(set (reg:<CC_EXTEND> CC_REGNUM)
1020 (compare:<CC_EXTEND>
1021 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1022 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1023 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1024 (clobber (match_scratch:SI 0 "=l,r"))]
1025 "TARGET_32BIT"
1026 "sbcs\\t%0, %1, %2"
1027 [(set_attr "conds" "set")
1028 (set_attr "arch" "t2,*")
1029 (set_attr "length" "2,4")
1030 (set_attr "type" "adc_reg")]
1031 )
1032
1033 ;; Similar to the above, but handling a constant which has a different
1034 ;; canonicalization.
1035 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1036 [(set (reg:<CC_EXTEND> CC_REGNUM)
1037 (compare:<CC_EXTEND>
1038 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1039 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1040 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1041 (clobber (match_scratch:SI 0 "=l,r"))]
1042 "TARGET_32BIT"
1043 "@
1044 sbcs\\t%0, %1, %2
1045 adcs\\t%0, %1, #%B2"
1046 [(set_attr "conds" "set")
1047 (set_attr "type" "adc_imm")]
1048 )
1049
1050 ;; Further canonicalization when the constant is zero.
1051 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1052 [(set (reg:<CC_EXTEND> CC_REGNUM)
1053 (compare:<CC_EXTEND>
1054 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1055 (match_operand:DI 2 "arm_borrow_operation" "")))
1056 (clobber (match_scratch:SI 0 "=l,r"))]
1057 "TARGET_32BIT"
1058 "sbcs\\t%0, %1, #0"
1059 [(set_attr "conds" "set")
1060 (set_attr "type" "adc_imm")]
1061 )
1062
1063 (define_insn "*subsi3_carryin_const"
1064 [(set (match_operand:SI 0 "s_register_operand" "=r")
1065 (minus:SI (plus:SI
1066 (match_operand:SI 1 "s_register_operand" "r")
1067 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1068 (match_operand:SI 3 "arm_borrow_operation" "")))]
1069 "TARGET_32BIT"
1070 "sbc\\t%0, %1, #%n2"
1071 [(set_attr "conds" "use")
1072 (set_attr "type" "adc_imm")]
1073 )
1074
1075 (define_insn "*subsi3_carryin_const0"
1076 [(set (match_operand:SI 0 "s_register_operand" "=r")
1077 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1078 (match_operand:SI 2 "arm_borrow_operation" "")))]
1079 "TARGET_32BIT"
1080 "sbc\\t%0, %1, #0"
1081 [(set_attr "conds" "use")
1082 (set_attr "type" "adc_imm")]
1083 )
1084
1085 (define_insn "*subsi3_carryin_shift"
1086 [(set (match_operand:SI 0 "s_register_operand" "=r")
1087 (minus:SI (minus:SI
1088 (match_operand:SI 1 "s_register_operand" "r")
1089 (match_operator:SI 2 "shift_operator"
1090 [(match_operand:SI 3 "s_register_operand" "r")
1091 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1092 (match_operand:SI 5 "arm_borrow_operation" "")))]
1093 "TARGET_32BIT"
1094 "sbc%?\\t%0, %1, %3%S2"
1095 [(set_attr "conds" "use")
1096 (set_attr "predicable" "yes")
1097 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1098 (const_string "alu_shift_imm")
1099 (const_string "alu_shift_reg")))]
1100 )
1101
1102 (define_insn "*subsi3_carryin_shift_alt"
1103 [(set (match_operand:SI 0 "s_register_operand" "=r")
1104 (minus:SI (minus:SI
1105 (match_operand:SI 1 "s_register_operand" "r")
1106 (match_operand:SI 5 "arm_borrow_operation" ""))
1107 (match_operator:SI 2 "shift_operator"
1108 [(match_operand:SI 3 "s_register_operand" "r")
1109 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1110 "TARGET_32BIT"
1111 "sbc%?\\t%0, %1, %3%S2"
1112 [(set_attr "conds" "use")
1113 (set_attr "predicable" "yes")
1114 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1115 (const_string "alu_shift_imm")
1116 (const_string "alu_shift_reg")))]
1117 )
1118
1119 (define_insn "*rsbsi3_carryin_shift"
1120 [(set (match_operand:SI 0 "s_register_operand" "=r")
1121 (minus:SI (minus:SI
1122 (match_operator:SI 2 "shift_operator"
1123 [(match_operand:SI 3 "s_register_operand" "r")
1124 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1125 (match_operand:SI 1 "s_register_operand" "r"))
1126 (match_operand:SI 5 "arm_borrow_operation" "")))]
1127 "TARGET_ARM"
1128 "rsc%?\\t%0, %1, %3%S2"
1129 [(set_attr "conds" "use")
1130 (set_attr "predicable" "yes")
1131 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1132 (const_string "alu_shift_imm")
1133 (const_string "alu_shift_reg")))]
1134 )
1135
1136 (define_insn "*rsbsi3_carryin_shift_alt"
1137 [(set (match_operand:SI 0 "s_register_operand" "=r")
1138 (minus:SI (minus:SI
1139 (match_operator:SI 2 "shift_operator"
1140 [(match_operand:SI 3 "s_register_operand" "r")
1141 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1142 (match_operand:SI 5 "arm_borrow_operation" ""))
1143 (match_operand:SI 1 "s_register_operand" "r")))]
1144 "TARGET_ARM"
1145 "rsc%?\\t%0, %1, %3%S2"
1146 [(set_attr "conds" "use")
1147 (set_attr "predicable" "yes")
1148 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1149 (const_string "alu_shift_imm")
1150 (const_string "alu_shift_reg")))]
1151 )
1152
1153 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1154 (define_split
1155 [(set (match_operand:SI 0 "s_register_operand" "")
1156 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1157 (match_operand:SI 2 "s_register_operand" ""))
1158 (const_int -1)))
1159 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1160 "TARGET_32BIT"
1161 [(set (match_dup 3) (match_dup 1))
1162 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1163 "
1164 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1165 ")
1166
1167 (define_expand "addsf3"
1168 [(set (match_operand:SF 0 "s_register_operand")
1169 (plus:SF (match_operand:SF 1 "s_register_operand")
1170 (match_operand:SF 2 "s_register_operand")))]
1171 "TARGET_32BIT && TARGET_HARD_FLOAT"
1172 "
1173 ")
1174
1175 (define_expand "adddf3"
1176 [(set (match_operand:DF 0 "s_register_operand")
1177 (plus:DF (match_operand:DF 1 "s_register_operand")
1178 (match_operand:DF 2 "s_register_operand")))]
1179 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1180 "
1181 ")
1182
1183 (define_expand "subdi3"
1184 [(parallel
1185 [(set (match_operand:DI 0 "s_register_operand")
1186 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1187 (match_operand:DI 2 "s_register_operand")))
1188 (clobber (reg:CC CC_REGNUM))])]
1189 "TARGET_EITHER"
1190 "
1191 if (TARGET_THUMB1)
1192 {
1193 if (!REG_P (operands[1]))
1194 operands[1] = force_reg (DImode, operands[1]);
1195 }
1196 else
1197 {
1198 rtx lo_result, hi_result, lo_dest, hi_dest;
1199 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1200 rtx condition;
1201
1202 /* Since operands[1] may be an integer, pass it second, so that
1203 any necessary simplifications will be done on the decomposed
1204 constant. */
1205 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1206 &lo_op1, &hi_op1);
1207 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1208 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1209
1210 if (!arm_rhs_operand (lo_op1, SImode))
1211 lo_op1 = force_reg (SImode, lo_op1);
1212
1213 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1214 || !arm_rhs_operand (hi_op1, SImode))
1215 hi_op1 = force_reg (SImode, hi_op1);
1216
1217 rtx cc_reg;
1218 if (lo_op1 == const0_rtx)
1219 {
1220 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1221 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1222 }
1223 else if (CONST_INT_P (lo_op1))
1224 {
1225 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1226 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1227 GEN_INT (~UINTVAL (lo_op1))));
1228 }
1229 else
1230 {
1231 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1232 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1233 }
1234
1235 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1236
1237 if (hi_op1 == const0_rtx)
1238 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1239 else
1240 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1241
1242 if (lo_result != lo_dest)
1243 emit_move_insn (lo_result, lo_dest);
1244
1245 if (hi_result != hi_dest)
1246 emit_move_insn (hi_result, hi_dest);
1247
1248 DONE;
1249 }
1250 "
1251 )
1252
1253 (define_expand "subsi3"
1254 [(set (match_operand:SI 0 "s_register_operand")
1255 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1256 (match_operand:SI 2 "s_register_operand")))]
1257 "TARGET_EITHER"
1258 "
1259 if (CONST_INT_P (operands[1]))
1260 {
1261 if (TARGET_32BIT)
1262 {
1263 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1264 operands[1] = force_reg (SImode, operands[1]);
1265 else
1266 {
1267 arm_split_constant (MINUS, SImode, NULL_RTX,
1268 INTVAL (operands[1]), operands[0],
1269 operands[2],
1270 optimize && can_create_pseudo_p ());
1271 DONE;
1272 }
1273 }
1274 else /* TARGET_THUMB1 */
1275 operands[1] = force_reg (SImode, operands[1]);
1276 }
1277 "
1278 )
1279
1280 ; ??? Check Thumb-2 split length
1281 (define_insn_and_split "*arm_subsi3_insn"
1282 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1283 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1284 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1285 "TARGET_32BIT"
1286 "@
1287 sub%?\\t%0, %1, %2
1288 sub%?\\t%0, %2
1289 sub%?\\t%0, %1, %2
1290 rsb%?\\t%0, %2, %1
1291 rsb%?\\t%0, %2, %1
1292 sub%?\\t%0, %1, %2
1293 sub%?\\t%0, %1, %2
1294 sub%?\\t%0, %1, %2
1295 #"
1296 "&& (CONST_INT_P (operands[1])
1297 && !const_ok_for_arm (INTVAL (operands[1])))"
1298 [(clobber (const_int 0))]
1299 "
1300 arm_split_constant (MINUS, SImode, curr_insn,
1301 INTVAL (operands[1]), operands[0], operands[2], 0);
1302 DONE;
1303 "
1304 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1305 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1306 (set_attr "predicable" "yes")
1307 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1308 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1309 )
1310
1311 (define_peephole2
1312 [(match_scratch:SI 3 "r")
1313 (set (match_operand:SI 0 "arm_general_register_operand" "")
1314 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1315 (match_operand:SI 2 "arm_general_register_operand" "")))]
1316 "TARGET_32BIT
1317 && !const_ok_for_arm (INTVAL (operands[1]))
1318 && const_ok_for_arm (~INTVAL (operands[1]))"
1319 [(set (match_dup 3) (match_dup 1))
1320 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1321 ""
1322 )
1323
1324 (define_insn "subsi3_compare0"
1325 [(set (reg:CC_NOOV CC_REGNUM)
1326 (compare:CC_NOOV
1327 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1328 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1329 (const_int 0)))
1330 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1331 (minus:SI (match_dup 1) (match_dup 2)))]
1332 "TARGET_32BIT"
1333 "@
1334 subs%?\\t%0, %1, %2
1335 subs%?\\t%0, %1, %2
1336 rsbs%?\\t%0, %2, %1"
1337 [(set_attr "conds" "set")
1338 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1339 )
1340
1341 (define_insn "subsi3_compare"
1342 [(set (reg:CC CC_REGNUM)
1343 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1344 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1345 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1346 (minus:SI (match_dup 1) (match_dup 2)))]
1347 "TARGET_32BIT"
1348 "@
1349 subs%?\\t%0, %1, %2
1350 subs%?\\t%0, %1, %2
1351 rsbs%?\\t%0, %2, %1"
1352 [(set_attr "conds" "set")
1353 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1354 )
1355
1356 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1357 ;; rather than (0 cmp reg). This gives the same results for unsigned
1358 ;; and equality compares which is what we mostly need here.
1359 (define_insn "rsb_imm_compare"
1360 [(set (reg:CC_RSB CC_REGNUM)
1361 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1362 (match_operand 3 "const_int_operand" "")))
1363 (set (match_operand:SI 0 "s_register_operand" "=r")
1364 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1365 (match_dup 2)))]
1366 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1367 "rsbs\\t%0, %2, %1"
1368 [(set_attr "conds" "set")
1369 (set_attr "type" "alus_imm")]
1370 )
1371
1372 ;; Similarly, but the result is unused.
1373 (define_insn "rsb_imm_compare_scratch"
1374 [(set (reg:CC_RSB CC_REGNUM)
1375 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1376 (match_operand 1 "arm_not_immediate_operand" "K")))
1377 (clobber (match_scratch:SI 0 "=r"))]
1378 "TARGET_32BIT"
1379 "rsbs\\t%0, %2, #%B1"
1380 [(set_attr "conds" "set")
1381 (set_attr "type" "alus_imm")]
1382 )
1383
1384 ;; Compare the sum of a value plus a carry against a constant. Uses
1385 ;; RSC, so the result is swapped. Only available on Arm
1386 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
1387 [(set (reg:CC_SWP CC_REGNUM)
1388 (compare:CC_SWP
1389 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
1390 (match_operand:DI 3 "arm_borrow_operation" ""))
1391 (match_operand 1 "arm_immediate_operand" "I")))
1392 (clobber (match_scratch:SI 0 "=r"))]
1393 "TARGET_ARM"
1394 "rscs\\t%0, %2, %1"
1395 [(set_attr "conds" "set")
1396 (set_attr "type" "alus_imm")]
1397 )
1398
1399 (define_expand "subsf3"
1400 [(set (match_operand:SF 0 "s_register_operand")
1401 (minus:SF (match_operand:SF 1 "s_register_operand")
1402 (match_operand:SF 2 "s_register_operand")))]
1403 "TARGET_32BIT && TARGET_HARD_FLOAT"
1404 "
1405 ")
1406
1407 (define_expand "subdf3"
1408 [(set (match_operand:DF 0 "s_register_operand")
1409 (minus:DF (match_operand:DF 1 "s_register_operand")
1410 (match_operand:DF 2 "s_register_operand")))]
1411 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1412 "
1413 ")
1414
1415 \f
1416 ;; Multiplication insns
1417
1418 (define_expand "mulhi3"
1419 [(set (match_operand:HI 0 "s_register_operand")
1420 (mult:HI (match_operand:HI 1 "s_register_operand")
1421 (match_operand:HI 2 "s_register_operand")))]
1422 "TARGET_DSP_MULTIPLY"
1423 "
1424 {
1425 rtx result = gen_reg_rtx (SImode);
1426 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1427 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1428 DONE;
1429 }"
1430 )
1431
1432 (define_expand "mulsi3"
1433 [(set (match_operand:SI 0 "s_register_operand")
1434 (mult:SI (match_operand:SI 2 "s_register_operand")
1435 (match_operand:SI 1 "s_register_operand")))]
1436 "TARGET_EITHER"
1437 ""
1438 )
1439
1440 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1441 (define_insn "*mul"
1442 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1443 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1444 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1445 "TARGET_32BIT"
1446 "mul%?\\t%0, %2, %1"
1447 [(set_attr "type" "mul")
1448 (set_attr "predicable" "yes")
1449 (set_attr "arch" "t2,v6,nov6,nov6")
1450 (set_attr "length" "4")
1451 (set_attr "predicable_short_it" "yes,no,*,*")]
1452 )
1453
1454 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1455 ;; reusing the same register.
1456
1457 (define_insn "*mla"
1458 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1459 (plus:SI
1460 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1461 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1462 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1463 "TARGET_32BIT"
1464 "mla%?\\t%0, %3, %2, %1"
1465 [(set_attr "type" "mla")
1466 (set_attr "predicable" "yes")
1467 (set_attr "arch" "v6,nov6,nov6,nov6")]
1468 )
1469
1470 (define_insn "*mls"
1471 [(set (match_operand:SI 0 "s_register_operand" "=r")
1472 (minus:SI
1473 (match_operand:SI 1 "s_register_operand" "r")
1474 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1475 (match_operand:SI 2 "s_register_operand" "r"))))]
1476 "TARGET_32BIT && arm_arch_thumb2"
1477 "mls%?\\t%0, %3, %2, %1"
1478 [(set_attr "type" "mla")
1479 (set_attr "predicable" "yes")]
1480 )
1481
1482 (define_insn "*mulsi3_compare0"
1483 [(set (reg:CC_NOOV CC_REGNUM)
1484 (compare:CC_NOOV (mult:SI
1485 (match_operand:SI 2 "s_register_operand" "r,r")
1486 (match_operand:SI 1 "s_register_operand" "%0,r"))
1487 (const_int 0)))
1488 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1489 (mult:SI (match_dup 2) (match_dup 1)))]
1490 "TARGET_ARM && !arm_arch6"
1491 "muls%?\\t%0, %2, %1"
1492 [(set_attr "conds" "set")
1493 (set_attr "type" "muls")]
1494 )
1495
1496 (define_insn "*mulsi3_compare0_v6"
1497 [(set (reg:CC_NOOV CC_REGNUM)
1498 (compare:CC_NOOV (mult:SI
1499 (match_operand:SI 2 "s_register_operand" "r")
1500 (match_operand:SI 1 "s_register_operand" "r"))
1501 (const_int 0)))
1502 (set (match_operand:SI 0 "s_register_operand" "=r")
1503 (mult:SI (match_dup 2) (match_dup 1)))]
1504 "TARGET_ARM && arm_arch6 && optimize_size"
1505 "muls%?\\t%0, %2, %1"
1506 [(set_attr "conds" "set")
1507 (set_attr "type" "muls")]
1508 )
1509
1510 (define_insn "*mulsi_compare0_scratch"
1511 [(set (reg:CC_NOOV CC_REGNUM)
1512 (compare:CC_NOOV (mult:SI
1513 (match_operand:SI 2 "s_register_operand" "r,r")
1514 (match_operand:SI 1 "s_register_operand" "%0,r"))
1515 (const_int 0)))
1516 (clobber (match_scratch:SI 0 "=&r,&r"))]
1517 "TARGET_ARM && !arm_arch6"
1518 "muls%?\\t%0, %2, %1"
1519 [(set_attr "conds" "set")
1520 (set_attr "type" "muls")]
1521 )
1522
1523 (define_insn "*mulsi_compare0_scratch_v6"
1524 [(set (reg:CC_NOOV CC_REGNUM)
1525 (compare:CC_NOOV (mult:SI
1526 (match_operand:SI 2 "s_register_operand" "r")
1527 (match_operand:SI 1 "s_register_operand" "r"))
1528 (const_int 0)))
1529 (clobber (match_scratch:SI 0 "=r"))]
1530 "TARGET_ARM && arm_arch6 && optimize_size"
1531 "muls%?\\t%0, %2, %1"
1532 [(set_attr "conds" "set")
1533 (set_attr "type" "muls")]
1534 )
1535
1536 (define_insn "*mulsi3addsi_compare0"
1537 [(set (reg:CC_NOOV CC_REGNUM)
1538 (compare:CC_NOOV
1539 (plus:SI (mult:SI
1540 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1541 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1542 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1543 (const_int 0)))
1544 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1545 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1546 (match_dup 3)))]
1547 "TARGET_ARM && arm_arch6"
1548 "mlas%?\\t%0, %2, %1, %3"
1549 [(set_attr "conds" "set")
1550 (set_attr "type" "mlas")]
1551 )
1552
1553 (define_insn "*mulsi3addsi_compare0_v6"
1554 [(set (reg:CC_NOOV CC_REGNUM)
1555 (compare:CC_NOOV
1556 (plus:SI (mult:SI
1557 (match_operand:SI 2 "s_register_operand" "r")
1558 (match_operand:SI 1 "s_register_operand" "r"))
1559 (match_operand:SI 3 "s_register_operand" "r"))
1560 (const_int 0)))
1561 (set (match_operand:SI 0 "s_register_operand" "=r")
1562 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1563 (match_dup 3)))]
1564 "TARGET_ARM && arm_arch6 && optimize_size"
1565 "mlas%?\\t%0, %2, %1, %3"
1566 [(set_attr "conds" "set")
1567 (set_attr "type" "mlas")]
1568 )
1569
1570 (define_insn "*mulsi3addsi_compare0_scratch"
1571 [(set (reg:CC_NOOV CC_REGNUM)
1572 (compare:CC_NOOV
1573 (plus:SI (mult:SI
1574 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1575 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1576 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1577 (const_int 0)))
1578 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1579 "TARGET_ARM && !arm_arch6"
1580 "mlas%?\\t%0, %2, %1, %3"
1581 [(set_attr "conds" "set")
1582 (set_attr "type" "mlas")]
1583 )
1584
1585 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1586 [(set (reg:CC_NOOV CC_REGNUM)
1587 (compare:CC_NOOV
1588 (plus:SI (mult:SI
1589 (match_operand:SI 2 "s_register_operand" "r")
1590 (match_operand:SI 1 "s_register_operand" "r"))
1591 (match_operand:SI 3 "s_register_operand" "r"))
1592 (const_int 0)))
1593 (clobber (match_scratch:SI 0 "=r"))]
1594 "TARGET_ARM && arm_arch6 && optimize_size"
1595 "mlas%?\\t%0, %2, %1, %3"
1596 [(set_attr "conds" "set")
1597 (set_attr "type" "mlas")]
1598 )
1599
1600 ;; 32x32->64 widening multiply.
1601 ;; The only difference between the v3-5 and v6+ versions is the requirement
1602 ;; that the output does not overlap with either input.
1603
1604 (define_expand "<Us>mulsidi3"
1605 [(set (match_operand:DI 0 "s_register_operand")
1606 (mult:DI
1607 (SE:DI (match_operand:SI 1 "s_register_operand"))
1608 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1609 "TARGET_32BIT"
1610 {
1611 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1612 gen_highpart (SImode, operands[0]),
1613 operands[1], operands[2]));
1614 DONE;
1615 }
1616 )
1617
1618 (define_insn "<US>mull"
1619 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1620 (mult:SI
1621 (match_operand:SI 2 "s_register_operand" "%r,r")
1622 (match_operand:SI 3 "s_register_operand" "r,r")))
1623 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1624 (truncate:SI
1625 (lshiftrt:DI
1626 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1627 (const_int 32))))]
1628 "TARGET_32BIT"
1629 "<US>mull%?\\t%0, %1, %2, %3"
1630 [(set_attr "type" "umull")
1631 (set_attr "predicable" "yes")
1632 (set_attr "arch" "v6,nov6")]
1633 )
1634
1635 (define_expand "<Us>maddsidi4"
1636 [(set (match_operand:DI 0 "s_register_operand")
1637 (plus:DI
1638 (mult:DI
1639 (SE:DI (match_operand:SI 1 "s_register_operand"))
1640 (SE:DI (match_operand:SI 2 "s_register_operand")))
1641 (match_operand:DI 3 "s_register_operand")))]
1642 "TARGET_32BIT"
1643 {
1644 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1645 gen_lowpart (SImode, operands[3]),
1646 gen_highpart (SImode, operands[0]),
1647 gen_highpart (SImode, operands[3]),
1648 operands[1], operands[2]));
1649 DONE;
1650 }
1651 )
1652
1653 (define_insn "<US>mlal"
1654 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1655 (plus:SI
1656 (mult:SI
1657 (match_operand:SI 4 "s_register_operand" "%r,r")
1658 (match_operand:SI 5 "s_register_operand" "r,r"))
1659 (match_operand:SI 1 "s_register_operand" "0,0")))
1660 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1661 (plus:SI
1662 (truncate:SI
1663 (lshiftrt:DI
1664 (plus:DI
1665 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1666 (zero_extend:DI (match_dup 1)))
1667 (const_int 32)))
1668 (match_operand:SI 3 "s_register_operand" "2,2")))]
1669 "TARGET_32BIT"
1670 "<US>mlal%?\\t%0, %2, %4, %5"
1671 [(set_attr "type" "umlal")
1672 (set_attr "predicable" "yes")
1673 (set_attr "arch" "v6,nov6")]
1674 )
1675
1676 (define_expand "<US>mulsi3_highpart"
1677 [(parallel
1678 [(set (match_operand:SI 0 "s_register_operand")
1679 (truncate:SI
1680 (lshiftrt:DI
1681 (mult:DI
1682 (SE:DI (match_operand:SI 1 "s_register_operand"))
1683 (SE:DI (match_operand:SI 2 "s_register_operand")))
1684 (const_int 32))))
1685 (clobber (match_scratch:SI 3 ""))])]
1686 "TARGET_32BIT"
1687 ""
1688 )
1689
1690 (define_insn "*<US>mull_high"
1691 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1692 (truncate:SI
1693 (lshiftrt:DI
1694 (mult:DI
1695 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1696 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1697 (const_int 32))))
1698 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1699 "TARGET_32BIT"
1700 "<US>mull%?\\t%3, %0, %2, %1"
1701 [(set_attr "type" "umull")
1702 (set_attr "predicable" "yes")
1703 (set_attr "arch" "v6,nov6,nov6")]
1704 )
1705
1706 (define_insn "mulhisi3"
1707 [(set (match_operand:SI 0 "s_register_operand" "=r")
1708 (mult:SI (sign_extend:SI
1709 (match_operand:HI 1 "s_register_operand" "%r"))
1710 (sign_extend:SI
1711 (match_operand:HI 2 "s_register_operand" "r"))))]
1712 "TARGET_DSP_MULTIPLY"
1713 "smulbb%?\\t%0, %1, %2"
1714 [(set_attr "type" "smulxy")
1715 (set_attr "predicable" "yes")]
1716 )
1717
1718 (define_insn "*mulhisi3tb"
1719 [(set (match_operand:SI 0 "s_register_operand" "=r")
1720 (mult:SI (ashiftrt:SI
1721 (match_operand:SI 1 "s_register_operand" "r")
1722 (const_int 16))
1723 (sign_extend:SI
1724 (match_operand:HI 2 "s_register_operand" "r"))))]
1725 "TARGET_DSP_MULTIPLY"
1726 "smultb%?\\t%0, %1, %2"
1727 [(set_attr "type" "smulxy")
1728 (set_attr "predicable" "yes")]
1729 )
1730
1731 (define_insn "*mulhisi3bt"
1732 [(set (match_operand:SI 0 "s_register_operand" "=r")
1733 (mult:SI (sign_extend:SI
1734 (match_operand:HI 1 "s_register_operand" "r"))
1735 (ashiftrt:SI
1736 (match_operand:SI 2 "s_register_operand" "r")
1737 (const_int 16))))]
1738 "TARGET_DSP_MULTIPLY"
1739 "smulbt%?\\t%0, %1, %2"
1740 [(set_attr "type" "smulxy")
1741 (set_attr "predicable" "yes")]
1742 )
1743
1744 (define_insn "*mulhisi3tt"
1745 [(set (match_operand:SI 0 "s_register_operand" "=r")
1746 (mult:SI (ashiftrt:SI
1747 (match_operand:SI 1 "s_register_operand" "r")
1748 (const_int 16))
1749 (ashiftrt:SI
1750 (match_operand:SI 2 "s_register_operand" "r")
1751 (const_int 16))))]
1752 "TARGET_DSP_MULTIPLY"
1753 "smultt%?\\t%0, %1, %2"
1754 [(set_attr "type" "smulxy")
1755 (set_attr "predicable" "yes")]
1756 )
1757
1758 (define_insn "maddhisi4"
1759 [(set (match_operand:SI 0 "s_register_operand" "=r")
1760 (plus:SI (mult:SI (sign_extend:SI
1761 (match_operand:HI 1 "s_register_operand" "r"))
1762 (sign_extend:SI
1763 (match_operand:HI 2 "s_register_operand" "r")))
1764 (match_operand:SI 3 "s_register_operand" "r")))]
1765 "TARGET_DSP_MULTIPLY"
1766 "smlabb%?\\t%0, %1, %2, %3"
1767 [(set_attr "type" "smlaxy")
1768 (set_attr "predicable" "yes")]
1769 )
1770
1771 ;; Note: there is no maddhisi4ibt because this one is canonical form
1772 (define_insn "*maddhisi4tb"
1773 [(set (match_operand:SI 0 "s_register_operand" "=r")
1774 (plus:SI (mult:SI (ashiftrt:SI
1775 (match_operand:SI 1 "s_register_operand" "r")
1776 (const_int 16))
1777 (sign_extend:SI
1778 (match_operand:HI 2 "s_register_operand" "r")))
1779 (match_operand:SI 3 "s_register_operand" "r")))]
1780 "TARGET_DSP_MULTIPLY"
1781 "smlatb%?\\t%0, %1, %2, %3"
1782 [(set_attr "type" "smlaxy")
1783 (set_attr "predicable" "yes")]
1784 )
1785
1786 (define_insn "*maddhisi4tt"
1787 [(set (match_operand:SI 0 "s_register_operand" "=r")
1788 (plus:SI (mult:SI (ashiftrt:SI
1789 (match_operand:SI 1 "s_register_operand" "r")
1790 (const_int 16))
1791 (ashiftrt:SI
1792 (match_operand:SI 2 "s_register_operand" "r")
1793 (const_int 16)))
1794 (match_operand:SI 3 "s_register_operand" "r")))]
1795 "TARGET_DSP_MULTIPLY"
1796 "smlatt%?\\t%0, %1, %2, %3"
1797 [(set_attr "type" "smlaxy")
1798 (set_attr "predicable" "yes")]
1799 )
1800
1801 (define_insn "maddhidi4"
1802 [(set (match_operand:DI 0 "s_register_operand" "=r")
1803 (plus:DI
1804 (mult:DI (sign_extend:DI
1805 (match_operand:HI 1 "s_register_operand" "r"))
1806 (sign_extend:DI
1807 (match_operand:HI 2 "s_register_operand" "r")))
1808 (match_operand:DI 3 "s_register_operand" "0")))]
1809 "TARGET_DSP_MULTIPLY"
1810 "smlalbb%?\\t%Q0, %R0, %1, %2"
1811 [(set_attr "type" "smlalxy")
1812 (set_attr "predicable" "yes")])
1813
1814 ;; Note: there is no maddhidi4ibt because this one is canonical form
1815 (define_insn "*maddhidi4tb"
1816 [(set (match_operand:DI 0 "s_register_operand" "=r")
1817 (plus:DI
1818 (mult:DI (sign_extend:DI
1819 (ashiftrt:SI
1820 (match_operand:SI 1 "s_register_operand" "r")
1821 (const_int 16)))
1822 (sign_extend:DI
1823 (match_operand:HI 2 "s_register_operand" "r")))
1824 (match_operand:DI 3 "s_register_operand" "0")))]
1825 "TARGET_DSP_MULTIPLY"
1826 "smlaltb%?\\t%Q0, %R0, %1, %2"
1827 [(set_attr "type" "smlalxy")
1828 (set_attr "predicable" "yes")])
1829
1830 (define_insn "*maddhidi4tt"
1831 [(set (match_operand:DI 0 "s_register_operand" "=r")
1832 (plus:DI
1833 (mult:DI (sign_extend:DI
1834 (ashiftrt:SI
1835 (match_operand:SI 1 "s_register_operand" "r")
1836 (const_int 16)))
1837 (sign_extend:DI
1838 (ashiftrt:SI
1839 (match_operand:SI 2 "s_register_operand" "r")
1840 (const_int 16))))
1841 (match_operand:DI 3 "s_register_operand" "0")))]
1842 "TARGET_DSP_MULTIPLY"
1843 "smlaltt%?\\t%Q0, %R0, %1, %2"
1844 [(set_attr "type" "smlalxy")
1845 (set_attr "predicable" "yes")])
1846
1847 (define_expand "mulsf3"
1848 [(set (match_operand:SF 0 "s_register_operand")
1849 (mult:SF (match_operand:SF 1 "s_register_operand")
1850 (match_operand:SF 2 "s_register_operand")))]
1851 "TARGET_32BIT && TARGET_HARD_FLOAT"
1852 "
1853 ")
1854
1855 (define_expand "muldf3"
1856 [(set (match_operand:DF 0 "s_register_operand")
1857 (mult:DF (match_operand:DF 1 "s_register_operand")
1858 (match_operand:DF 2 "s_register_operand")))]
1859 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1860 "
1861 ")
1862 \f
1863 ;; Division insns
1864
1865 (define_expand "divsf3"
1866 [(set (match_operand:SF 0 "s_register_operand")
1867 (div:SF (match_operand:SF 1 "s_register_operand")
1868 (match_operand:SF 2 "s_register_operand")))]
1869 "TARGET_32BIT && TARGET_HARD_FLOAT"
1870 "")
1871
1872 (define_expand "divdf3"
1873 [(set (match_operand:DF 0 "s_register_operand")
1874 (div:DF (match_operand:DF 1 "s_register_operand")
1875 (match_operand:DF 2 "s_register_operand")))]
1876 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1877 "")
1878 \f
1879
1880 ; Expand logical operations. The mid-end expander does not split off memory
1881 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1882 ; So an explicit expander is needed to generate better code.
1883
1884 (define_expand "<LOGICAL:optab>di3"
1885 [(set (match_operand:DI 0 "s_register_operand")
1886 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1887 (match_operand:DI 2 "arm_<optab>di_operand")))]
1888 "TARGET_32BIT"
1889 {
1890 rtx low = simplify_gen_binary (<CODE>, SImode,
1891 gen_lowpart (SImode, operands[1]),
1892 gen_lowpart (SImode, operands[2]));
1893 rtx high = simplify_gen_binary (<CODE>, SImode,
1894 gen_highpart (SImode, operands[1]),
1895 gen_highpart_mode (SImode, DImode,
1896 operands[2]));
1897
1898 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1899 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1900 DONE;
1901 }
1902 )
1903
1904 (define_expand "one_cmpldi2"
1905 [(set (match_operand:DI 0 "s_register_operand")
1906 (not:DI (match_operand:DI 1 "s_register_operand")))]
1907 "TARGET_32BIT"
1908 {
1909 rtx low = simplify_gen_unary (NOT, SImode,
1910 gen_lowpart (SImode, operands[1]),
1911 SImode);
1912 rtx high = simplify_gen_unary (NOT, SImode,
1913 gen_highpart_mode (SImode, DImode,
1914 operands[1]),
1915 SImode);
1916
1917 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1918 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1919 DONE;
1920 }
1921 )
1922
1923 ;; Split DImode and, ior, xor operations. Simply perform the logical
1924 ;; operation on the upper and lower halves of the registers.
1925 ;; This is needed for atomic operations in arm_split_atomic_op.
1926 ;; Avoid splitting IWMMXT instructions.
1927 (define_split
1928 [(set (match_operand:DI 0 "s_register_operand" "")
1929 (match_operator:DI 6 "logical_binary_operator"
1930 [(match_operand:DI 1 "s_register_operand" "")
1931 (match_operand:DI 2 "s_register_operand" "")]))]
1932 "TARGET_32BIT && reload_completed
1933 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1934 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1935 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1936 "
1937 {
1938 operands[3] = gen_highpart (SImode, operands[0]);
1939 operands[0] = gen_lowpart (SImode, operands[0]);
1940 operands[4] = gen_highpart (SImode, operands[1]);
1941 operands[1] = gen_lowpart (SImode, operands[1]);
1942 operands[5] = gen_highpart (SImode, operands[2]);
1943 operands[2] = gen_lowpart (SImode, operands[2]);
1944 }"
1945 )
1946
1947 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1948 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1949 (define_split
1950 [(set (match_operand:DI 0 "s_register_operand")
1951 (not:DI (match_operand:DI 1 "s_register_operand")))]
1952 "TARGET_32BIT"
1953 [(set (match_dup 0) (not:SI (match_dup 1)))
1954 (set (match_dup 2) (not:SI (match_dup 3)))]
1955 "
1956 {
1957 operands[2] = gen_highpart (SImode, operands[0]);
1958 operands[0] = gen_lowpart (SImode, operands[0]);
1959 operands[3] = gen_highpart (SImode, operands[1]);
1960 operands[1] = gen_lowpart (SImode, operands[1]);
1961 }"
1962 )
1963
1964 (define_expand "andsi3"
1965 [(set (match_operand:SI 0 "s_register_operand")
1966 (and:SI (match_operand:SI 1 "s_register_operand")
1967 (match_operand:SI 2 "reg_or_int_operand")))]
1968 "TARGET_EITHER"
1969 "
1970 if (TARGET_32BIT)
1971 {
1972 if (CONST_INT_P (operands[2]))
1973 {
1974 if (INTVAL (operands[2]) == 255 && arm_arch6)
1975 {
1976 operands[1] = convert_to_mode (QImode, operands[1], 1);
1977 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1978 operands[1]));
1979 DONE;
1980 }
1981 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1982 operands[2] = force_reg (SImode, operands[2]);
1983 else
1984 {
1985 arm_split_constant (AND, SImode, NULL_RTX,
1986 INTVAL (operands[2]), operands[0],
1987 operands[1],
1988 optimize && can_create_pseudo_p ());
1989
1990 DONE;
1991 }
1992 }
1993 }
1994 else /* TARGET_THUMB1 */
1995 {
1996 if (!CONST_INT_P (operands[2]))
1997 {
1998 rtx tmp = force_reg (SImode, operands[2]);
1999 if (rtx_equal_p (operands[0], operands[1]))
2000 operands[2] = tmp;
2001 else
2002 {
2003 operands[2] = operands[1];
2004 operands[1] = tmp;
2005 }
2006 }
2007 else
2008 {
2009 int i;
2010
2011 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2012 {
2013 operands[2] = force_reg (SImode,
2014 GEN_INT (~INTVAL (operands[2])));
2015
2016 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2017
2018 DONE;
2019 }
2020
2021 for (i = 9; i <= 31; i++)
2022 {
2023 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2024 {
2025 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2026 const0_rtx));
2027 DONE;
2028 }
2029 else if ((HOST_WIDE_INT_1 << i) - 1
2030 == ~INTVAL (operands[2]))
2031 {
2032 rtx shift = GEN_INT (i);
2033 rtx reg = gen_reg_rtx (SImode);
2034
2035 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2036 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2037
2038 DONE;
2039 }
2040 }
2041
2042 operands[2] = force_reg (SImode, operands[2]);
2043 }
2044 }
2045 "
2046 )
2047
2048 ; ??? Check split length for Thumb-2
2049 (define_insn_and_split "*arm_andsi3_insn"
2050 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2051 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2052 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2053 "TARGET_32BIT"
2054 "@
2055 and%?\\t%0, %1, %2
2056 and%?\\t%0, %1, %2
2057 bic%?\\t%0, %1, #%B2
2058 and%?\\t%0, %1, %2
2059 #"
2060 "TARGET_32BIT
2061 && CONST_INT_P (operands[2])
2062 && !(const_ok_for_arm (INTVAL (operands[2]))
2063 || const_ok_for_arm (~INTVAL (operands[2])))"
2064 [(clobber (const_int 0))]
2065 "
2066 arm_split_constant (AND, SImode, curr_insn,
2067 INTVAL (operands[2]), operands[0], operands[1], 0);
2068 DONE;
2069 "
2070 [(set_attr "length" "4,4,4,4,16")
2071 (set_attr "predicable" "yes")
2072 (set_attr "predicable_short_it" "no,yes,no,no,no")
2073 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2074 )
2075
2076 (define_insn "*andsi3_compare0"
2077 [(set (reg:CC_NOOV CC_REGNUM)
2078 (compare:CC_NOOV
2079 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2080 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2081 (const_int 0)))
2082 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2083 (and:SI (match_dup 1) (match_dup 2)))]
2084 "TARGET_32BIT"
2085 "@
2086 ands%?\\t%0, %1, %2
2087 bics%?\\t%0, %1, #%B2
2088 ands%?\\t%0, %1, %2"
2089 [(set_attr "conds" "set")
2090 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2091 )
2092
2093 (define_insn "*andsi3_compare0_scratch"
2094 [(set (reg:CC_NOOV CC_REGNUM)
2095 (compare:CC_NOOV
2096 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2097 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2098 (const_int 0)))
2099 (clobber (match_scratch:SI 2 "=X,r,X"))]
2100 "TARGET_32BIT"
2101 "@
2102 tst%?\\t%0, %1
2103 bics%?\\t%2, %0, #%B1
2104 tst%?\\t%0, %1"
2105 [(set_attr "conds" "set")
2106 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2107 )
2108
2109 (define_insn "*zeroextractsi_compare0_scratch"
2110 [(set (reg:CC_NOOV CC_REGNUM)
2111 (compare:CC_NOOV (zero_extract:SI
2112 (match_operand:SI 0 "s_register_operand" "r")
2113 (match_operand 1 "const_int_operand" "n")
2114 (match_operand 2 "const_int_operand" "n"))
2115 (const_int 0)))]
2116 "TARGET_32BIT
2117 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2118 && INTVAL (operands[1]) > 0
2119 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2120 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2121 "*
2122 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2123 << INTVAL (operands[2]));
2124 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2125 return \"\";
2126 "
2127 [(set_attr "conds" "set")
2128 (set_attr "predicable" "yes")
2129 (set_attr "type" "logics_imm")]
2130 )
2131
2132 (define_insn_and_split "*ne_zeroextractsi"
2133 [(set (match_operand:SI 0 "s_register_operand" "=r")
2134 (ne:SI (zero_extract:SI
2135 (match_operand:SI 1 "s_register_operand" "r")
2136 (match_operand:SI 2 "const_int_operand" "n")
2137 (match_operand:SI 3 "const_int_operand" "n"))
2138 (const_int 0)))
2139 (clobber (reg:CC CC_REGNUM))]
2140 "TARGET_32BIT
2141 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2142 && INTVAL (operands[2]) > 0
2143 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2144 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2145 "#"
2146 "TARGET_32BIT
2147 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2148 && INTVAL (operands[2]) > 0
2149 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2150 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2151 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2152 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2153 (const_int 0)))
2154 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2155 (set (match_dup 0)
2156 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2157 (match_dup 0) (const_int 1)))]
2158 "
2159 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2160 << INTVAL (operands[3]));
2161 "
2162 [(set_attr "conds" "clob")
2163 (set (attr "length")
2164 (if_then_else (eq_attr "is_thumb" "yes")
2165 (const_int 12)
2166 (const_int 8)))
2167 (set_attr "type" "multiple")]
2168 )
2169
2170 (define_insn_and_split "*ne_zeroextractsi_shifted"
2171 [(set (match_operand:SI 0 "s_register_operand" "=r")
2172 (ne:SI (zero_extract:SI
2173 (match_operand:SI 1 "s_register_operand" "r")
2174 (match_operand:SI 2 "const_int_operand" "n")
2175 (const_int 0))
2176 (const_int 0)))
2177 (clobber (reg:CC CC_REGNUM))]
2178 "TARGET_ARM"
2179 "#"
2180 "TARGET_ARM"
2181 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2182 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2183 (const_int 0)))
2184 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2185 (set (match_dup 0)
2186 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2187 (match_dup 0) (const_int 1)))]
2188 "
2189 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2190 "
2191 [(set_attr "conds" "clob")
2192 (set_attr "length" "8")
2193 (set_attr "type" "multiple")]
2194 )
2195
2196 (define_insn_and_split "*ite_ne_zeroextractsi"
2197 [(set (match_operand:SI 0 "s_register_operand" "=r")
2198 (if_then_else:SI (ne (zero_extract:SI
2199 (match_operand:SI 1 "s_register_operand" "r")
2200 (match_operand:SI 2 "const_int_operand" "n")
2201 (match_operand:SI 3 "const_int_operand" "n"))
2202 (const_int 0))
2203 (match_operand:SI 4 "arm_not_operand" "rIK")
2204 (const_int 0)))
2205 (clobber (reg:CC CC_REGNUM))]
2206 "TARGET_ARM
2207 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2208 && INTVAL (operands[2]) > 0
2209 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2210 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2211 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2212 "#"
2213 "TARGET_ARM
2214 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2215 && INTVAL (operands[2]) > 0
2216 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2217 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2218 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2219 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2220 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2221 (const_int 0)))
2222 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2223 (set (match_dup 0)
2224 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2225 (match_dup 0) (match_dup 4)))]
2226 "
2227 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2228 << INTVAL (operands[3]));
2229 "
2230 [(set_attr "conds" "clob")
2231 (set_attr "length" "8")
2232 (set_attr "type" "multiple")]
2233 )
2234
2235 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2236 [(set (match_operand:SI 0 "s_register_operand" "=r")
2237 (if_then_else:SI (ne (zero_extract:SI
2238 (match_operand:SI 1 "s_register_operand" "r")
2239 (match_operand:SI 2 "const_int_operand" "n")
2240 (const_int 0))
2241 (const_int 0))
2242 (match_operand:SI 3 "arm_not_operand" "rIK")
2243 (const_int 0)))
2244 (clobber (reg:CC CC_REGNUM))]
2245 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2246 "#"
2247 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2248 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2249 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2250 (const_int 0)))
2251 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2252 (set (match_dup 0)
2253 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2254 (match_dup 0) (match_dup 3)))]
2255 "
2256 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2257 "
2258 [(set_attr "conds" "clob")
2259 (set_attr "length" "8")
2260 (set_attr "type" "multiple")]
2261 )
2262
2263 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2264 (define_split
2265 [(set (match_operand:SI 0 "s_register_operand" "")
2266 (match_operator:SI 1 "shiftable_operator"
2267 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2268 (match_operand:SI 3 "const_int_operand" "")
2269 (match_operand:SI 4 "const_int_operand" ""))
2270 (match_operand:SI 5 "s_register_operand" "")]))
2271 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2272 "TARGET_ARM"
2273 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2274 (set (match_dup 0)
2275 (match_op_dup 1
2276 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2277 (match_dup 5)]))]
2278 "{
2279 HOST_WIDE_INT temp = INTVAL (operands[3]);
2280
2281 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2282 operands[4] = GEN_INT (32 - temp);
2283 }"
2284 )
2285
2286 (define_split
2287 [(set (match_operand:SI 0 "s_register_operand" "")
2288 (match_operator:SI 1 "shiftable_operator"
2289 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2290 (match_operand:SI 3 "const_int_operand" "")
2291 (match_operand:SI 4 "const_int_operand" ""))
2292 (match_operand:SI 5 "s_register_operand" "")]))
2293 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2294 "TARGET_ARM"
2295 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2296 (set (match_dup 0)
2297 (match_op_dup 1
2298 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2299 (match_dup 5)]))]
2300 "{
2301 HOST_WIDE_INT temp = INTVAL (operands[3]);
2302
2303 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2304 operands[4] = GEN_INT (32 - temp);
2305 }"
2306 )
2307
2308 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2309 ;;; represented by the bitfield, then this will produce incorrect results.
2310 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2311 ;;; which have a real bit-field insert instruction, the truncation happens
2312 ;;; in the bit-field insert instruction itself. Since arm does not have a
2313 ;;; bit-field insert instruction, we would have to emit code here to truncate
2314 ;;; the value before we insert. This loses some of the advantage of having
2315 ;;; this insv pattern, so this pattern needs to be reevalutated.
2316
2317 (define_expand "insv"
2318 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2319 (match_operand 1 "general_operand")
2320 (match_operand 2 "general_operand"))
2321 (match_operand 3 "reg_or_int_operand"))]
2322 "TARGET_ARM || arm_arch_thumb2"
2323 "
2324 {
2325 int start_bit = INTVAL (operands[2]);
2326 int width = INTVAL (operands[1]);
2327 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2328 rtx target, subtarget;
2329
2330 if (arm_arch_thumb2)
2331 {
2332 if (unaligned_access && MEM_P (operands[0])
2333 && s_register_operand (operands[3], GET_MODE (operands[3]))
2334 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2335 {
2336 rtx base_addr;
2337
2338 if (BYTES_BIG_ENDIAN)
2339 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2340 - start_bit;
2341
2342 if (width == 32)
2343 {
2344 base_addr = adjust_address (operands[0], SImode,
2345 start_bit / BITS_PER_UNIT);
2346 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2347 }
2348 else
2349 {
2350 rtx tmp = gen_reg_rtx (HImode);
2351
2352 base_addr = adjust_address (operands[0], HImode,
2353 start_bit / BITS_PER_UNIT);
2354 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2355 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2356 }
2357 DONE;
2358 }
2359 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2360 {
2361 bool use_bfi = TRUE;
2362
2363 if (CONST_INT_P (operands[3]))
2364 {
2365 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2366
2367 if (val == 0)
2368 {
2369 emit_insn (gen_insv_zero (operands[0], operands[1],
2370 operands[2]));
2371 DONE;
2372 }
2373
2374 /* See if the set can be done with a single orr instruction. */
2375 if (val == mask && const_ok_for_arm (val << start_bit))
2376 use_bfi = FALSE;
2377 }
2378
2379 if (use_bfi)
2380 {
2381 if (!REG_P (operands[3]))
2382 operands[3] = force_reg (SImode, operands[3]);
2383
2384 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2385 operands[3]));
2386 DONE;
2387 }
2388 }
2389 else
2390 FAIL;
2391 }
2392
2393 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2394 FAIL;
2395
2396 target = copy_rtx (operands[0]);
2397 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2398 subreg as the final target. */
2399 if (GET_CODE (target) == SUBREG)
2400 {
2401 subtarget = gen_reg_rtx (SImode);
2402 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2403 < GET_MODE_SIZE (SImode))
2404 target = SUBREG_REG (target);
2405 }
2406 else
2407 subtarget = target;
2408
2409 if (CONST_INT_P (operands[3]))
2410 {
2411 /* Since we are inserting a known constant, we may be able to
2412 reduce the number of bits that we have to clear so that
2413 the mask becomes simple. */
2414 /* ??? This code does not check to see if the new mask is actually
2415 simpler. It may not be. */
2416 rtx op1 = gen_reg_rtx (SImode);
2417 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2418 start of this pattern. */
2419 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2420 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2421
2422 emit_insn (gen_andsi3 (op1, operands[0],
2423 gen_int_mode (~mask2, SImode)));
2424 emit_insn (gen_iorsi3 (subtarget, op1,
2425 gen_int_mode (op3_value << start_bit, SImode)));
2426 }
2427 else if (start_bit == 0
2428 && !(const_ok_for_arm (mask)
2429 || const_ok_for_arm (~mask)))
2430 {
2431 /* A Trick, since we are setting the bottom bits in the word,
2432 we can shift operand[3] up, operand[0] down, OR them together
2433 and rotate the result back again. This takes 3 insns, and
2434 the third might be mergeable into another op. */
2435 /* The shift up copes with the possibility that operand[3] is
2436 wider than the bitfield. */
2437 rtx op0 = gen_reg_rtx (SImode);
2438 rtx op1 = gen_reg_rtx (SImode);
2439
2440 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2441 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2442 emit_insn (gen_iorsi3 (op1, op1, op0));
2443 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2444 }
2445 else if ((width + start_bit == 32)
2446 && !(const_ok_for_arm (mask)
2447 || const_ok_for_arm (~mask)))
2448 {
2449 /* Similar trick, but slightly less efficient. */
2450
2451 rtx op0 = gen_reg_rtx (SImode);
2452 rtx op1 = gen_reg_rtx (SImode);
2453
2454 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2455 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2456 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2457 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2458 }
2459 else
2460 {
2461 rtx op0 = gen_int_mode (mask, SImode);
2462 rtx op1 = gen_reg_rtx (SImode);
2463 rtx op2 = gen_reg_rtx (SImode);
2464
2465 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2466 {
2467 rtx tmp = gen_reg_rtx (SImode);
2468
2469 emit_insn (gen_movsi (tmp, op0));
2470 op0 = tmp;
2471 }
2472
2473 /* Mask out any bits in operand[3] that are not needed. */
2474 emit_insn (gen_andsi3 (op1, operands[3], op0));
2475
2476 if (CONST_INT_P (op0)
2477 && (const_ok_for_arm (mask << start_bit)
2478 || const_ok_for_arm (~(mask << start_bit))))
2479 {
2480 op0 = gen_int_mode (~(mask << start_bit), SImode);
2481 emit_insn (gen_andsi3 (op2, operands[0], op0));
2482 }
2483 else
2484 {
2485 if (CONST_INT_P (op0))
2486 {
2487 rtx tmp = gen_reg_rtx (SImode);
2488
2489 emit_insn (gen_movsi (tmp, op0));
2490 op0 = tmp;
2491 }
2492
2493 if (start_bit != 0)
2494 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2495
2496 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2497 }
2498
2499 if (start_bit != 0)
2500 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2501
2502 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2503 }
2504
2505 if (subtarget != target)
2506 {
2507 /* If TARGET is still a SUBREG, then it must be wider than a word,
2508 so we must be careful only to set the subword we were asked to. */
2509 if (GET_CODE (target) == SUBREG)
2510 emit_move_insn (target, subtarget);
2511 else
2512 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2513 }
2514
2515 DONE;
2516 }"
2517 )
2518
2519 (define_insn "insv_zero"
2520 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2521 (match_operand:SI 1 "const_int_M_operand" "M")
2522 (match_operand:SI 2 "const_int_M_operand" "M"))
2523 (const_int 0))]
2524 "arm_arch_thumb2"
2525 "bfc%?\t%0, %2, %1"
2526 [(set_attr "length" "4")
2527 (set_attr "predicable" "yes")
2528 (set_attr "type" "bfm")]
2529 )
2530
2531 (define_insn "insv_t2"
2532 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2533 (match_operand:SI 1 "const_int_M_operand" "M")
2534 (match_operand:SI 2 "const_int_M_operand" "M"))
2535 (match_operand:SI 3 "s_register_operand" "r"))]
2536 "arm_arch_thumb2"
2537 "bfi%?\t%0, %3, %2, %1"
2538 [(set_attr "length" "4")
2539 (set_attr "predicable" "yes")
2540 (set_attr "type" "bfm")]
2541 )
2542
2543 (define_insn "andsi_notsi_si"
2544 [(set (match_operand:SI 0 "s_register_operand" "=r")
2545 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2546 (match_operand:SI 1 "s_register_operand" "r")))]
2547 "TARGET_32BIT"
2548 "bic%?\\t%0, %1, %2"
2549 [(set_attr "predicable" "yes")
2550 (set_attr "type" "logic_reg")]
2551 )
2552
2553 (define_insn "andsi_not_shiftsi_si"
2554 [(set (match_operand:SI 0 "s_register_operand" "=r")
2555 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2556 [(match_operand:SI 2 "s_register_operand" "r")
2557 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2558 (match_operand:SI 1 "s_register_operand" "r")))]
2559 "TARGET_ARM"
2560 "bic%?\\t%0, %1, %2%S4"
2561 [(set_attr "predicable" "yes")
2562 (set_attr "shift" "2")
2563 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2564 (const_string "logic_shift_imm")
2565 (const_string "logic_shift_reg")))]
2566 )
2567
2568 ;; Shifted bics pattern used to set up CC status register and not reusing
2569 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2570 ;; does not support shift by register.
2571 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2572 [(set (reg:CC_NOOV CC_REGNUM)
2573 (compare:CC_NOOV
2574 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2575 [(match_operand:SI 1 "s_register_operand" "r")
2576 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2577 (match_operand:SI 3 "s_register_operand" "r"))
2578 (const_int 0)))
2579 (clobber (match_scratch:SI 4 "=r"))]
2580 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2581 "bics%?\\t%4, %3, %1%S0"
2582 [(set_attr "predicable" "yes")
2583 (set_attr "conds" "set")
2584 (set_attr "shift" "1")
2585 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2586 (const_string "logic_shift_imm")
2587 (const_string "logic_shift_reg")))]
2588 )
2589
2590 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2591 ;; getting reused later.
2592 (define_insn "andsi_not_shiftsi_si_scc"
2593 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2594 (compare:CC_NOOV
2595 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2596 [(match_operand:SI 1 "s_register_operand" "r")
2597 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2598 (match_operand:SI 3 "s_register_operand" "r"))
2599 (const_int 0)))
2600 (set (match_operand:SI 4 "s_register_operand" "=r")
2601 (and:SI (not:SI (match_op_dup 0
2602 [(match_dup 1)
2603 (match_dup 2)]))
2604 (match_dup 3)))])]
2605 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2606 "bics%?\\t%4, %3, %1%S0"
2607 [(set_attr "predicable" "yes")
2608 (set_attr "conds" "set")
2609 (set_attr "shift" "1")
2610 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2611 (const_string "logic_shift_imm")
2612 (const_string "logic_shift_reg")))]
2613 )
2614
2615 (define_insn "*andsi_notsi_si_compare0"
2616 [(set (reg:CC_NOOV CC_REGNUM)
2617 (compare:CC_NOOV
2618 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2619 (match_operand:SI 1 "s_register_operand" "r"))
2620 (const_int 0)))
2621 (set (match_operand:SI 0 "s_register_operand" "=r")
2622 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2623 "TARGET_32BIT"
2624 "bics\\t%0, %1, %2"
2625 [(set_attr "conds" "set")
2626 (set_attr "type" "logics_shift_reg")]
2627 )
2628
2629 (define_insn "*andsi_notsi_si_compare0_scratch"
2630 [(set (reg:CC_NOOV CC_REGNUM)
2631 (compare:CC_NOOV
2632 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2633 (match_operand:SI 1 "s_register_operand" "r"))
2634 (const_int 0)))
2635 (clobber (match_scratch:SI 0 "=r"))]
2636 "TARGET_32BIT"
2637 "bics\\t%0, %1, %2"
2638 [(set_attr "conds" "set")
2639 (set_attr "type" "logics_shift_reg")]
2640 )
2641
2642 (define_expand "iorsi3"
2643 [(set (match_operand:SI 0 "s_register_operand")
2644 (ior:SI (match_operand:SI 1 "s_register_operand")
2645 (match_operand:SI 2 "reg_or_int_operand")))]
2646 "TARGET_EITHER"
2647 "
2648 if (CONST_INT_P (operands[2]))
2649 {
2650 if (TARGET_32BIT)
2651 {
2652 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2653 operands[2] = force_reg (SImode, operands[2]);
2654 else
2655 {
2656 arm_split_constant (IOR, SImode, NULL_RTX,
2657 INTVAL (operands[2]), operands[0],
2658 operands[1],
2659 optimize && can_create_pseudo_p ());
2660 DONE;
2661 }
2662 }
2663 else /* TARGET_THUMB1 */
2664 {
2665 rtx tmp = force_reg (SImode, operands[2]);
2666 if (rtx_equal_p (operands[0], operands[1]))
2667 operands[2] = tmp;
2668 else
2669 {
2670 operands[2] = operands[1];
2671 operands[1] = tmp;
2672 }
2673 }
2674 }
2675 "
2676 )
2677
2678 (define_insn_and_split "*iorsi3_insn"
2679 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2680 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2681 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2682 "TARGET_32BIT"
2683 "@
2684 orr%?\\t%0, %1, %2
2685 orr%?\\t%0, %1, %2
2686 orn%?\\t%0, %1, #%B2
2687 orr%?\\t%0, %1, %2
2688 #"
2689 "TARGET_32BIT
2690 && CONST_INT_P (operands[2])
2691 && !(const_ok_for_arm (INTVAL (operands[2]))
2692 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2693 [(clobber (const_int 0))]
2694 {
2695 arm_split_constant (IOR, SImode, curr_insn,
2696 INTVAL (operands[2]), operands[0], operands[1], 0);
2697 DONE;
2698 }
2699 [(set_attr "length" "4,4,4,4,16")
2700 (set_attr "arch" "32,t2,t2,32,32")
2701 (set_attr "predicable" "yes")
2702 (set_attr "predicable_short_it" "no,yes,no,no,no")
2703 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2704 )
2705
2706 (define_peephole2
2707 [(match_scratch:SI 3 "r")
2708 (set (match_operand:SI 0 "arm_general_register_operand" "")
2709 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2710 (match_operand:SI 2 "const_int_operand" "")))]
2711 "TARGET_ARM
2712 && !const_ok_for_arm (INTVAL (operands[2]))
2713 && const_ok_for_arm (~INTVAL (operands[2]))"
2714 [(set (match_dup 3) (match_dup 2))
2715 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2716 ""
2717 )
2718
2719 (define_insn "*iorsi3_compare0"
2720 [(set (reg:CC_NOOV CC_REGNUM)
2721 (compare:CC_NOOV
2722 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2723 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2724 (const_int 0)))
2725 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2726 (ior:SI (match_dup 1) (match_dup 2)))]
2727 "TARGET_32BIT"
2728 "orrs%?\\t%0, %1, %2"
2729 [(set_attr "conds" "set")
2730 (set_attr "arch" "*,t2,*")
2731 (set_attr "length" "4,2,4")
2732 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2733 )
2734
2735 (define_insn "*iorsi3_compare0_scratch"
2736 [(set (reg:CC_NOOV CC_REGNUM)
2737 (compare:CC_NOOV
2738 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2739 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2740 (const_int 0)))
2741 (clobber (match_scratch:SI 0 "=r,l,r"))]
2742 "TARGET_32BIT"
2743 "orrs%?\\t%0, %1, %2"
2744 [(set_attr "conds" "set")
2745 (set_attr "arch" "*,t2,*")
2746 (set_attr "length" "4,2,4")
2747 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2748 )
2749
2750 (define_expand "xorsi3"
2751 [(set (match_operand:SI 0 "s_register_operand")
2752 (xor:SI (match_operand:SI 1 "s_register_operand")
2753 (match_operand:SI 2 "reg_or_int_operand")))]
2754 "TARGET_EITHER"
2755 "if (CONST_INT_P (operands[2]))
2756 {
2757 if (TARGET_32BIT)
2758 {
2759 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2760 operands[2] = force_reg (SImode, operands[2]);
2761 else
2762 {
2763 arm_split_constant (XOR, SImode, NULL_RTX,
2764 INTVAL (operands[2]), operands[0],
2765 operands[1],
2766 optimize && can_create_pseudo_p ());
2767 DONE;
2768 }
2769 }
2770 else /* TARGET_THUMB1 */
2771 {
2772 rtx tmp = force_reg (SImode, operands[2]);
2773 if (rtx_equal_p (operands[0], operands[1]))
2774 operands[2] = tmp;
2775 else
2776 {
2777 operands[2] = operands[1];
2778 operands[1] = tmp;
2779 }
2780 }
2781 }"
2782 )
2783
2784 (define_insn_and_split "*arm_xorsi3"
2785 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2786 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2787 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2788 "TARGET_32BIT"
2789 "@
2790 eor%?\\t%0, %1, %2
2791 eor%?\\t%0, %1, %2
2792 eor%?\\t%0, %1, %2
2793 #"
2794 "TARGET_32BIT
2795 && CONST_INT_P (operands[2])
2796 && !const_ok_for_arm (INTVAL (operands[2]))"
2797 [(clobber (const_int 0))]
2798 {
2799 arm_split_constant (XOR, SImode, curr_insn,
2800 INTVAL (operands[2]), operands[0], operands[1], 0);
2801 DONE;
2802 }
2803 [(set_attr "length" "4,4,4,16")
2804 (set_attr "predicable" "yes")
2805 (set_attr "predicable_short_it" "no,yes,no,no")
2806 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2807 )
2808
2809 (define_insn "*xorsi3_compare0"
2810 [(set (reg:CC_NOOV CC_REGNUM)
2811 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2812 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2813 (const_int 0)))
2814 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2815 (xor:SI (match_dup 1) (match_dup 2)))]
2816 "TARGET_32BIT"
2817 "eors%?\\t%0, %1, %2"
2818 [(set_attr "conds" "set")
2819 (set_attr "type" "logics_imm,logics_reg")]
2820 )
2821
2822 (define_insn "*xorsi3_compare0_scratch"
2823 [(set (reg:CC_NOOV CC_REGNUM)
2824 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2825 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2826 (const_int 0)))]
2827 "TARGET_32BIT"
2828 "teq%?\\t%0, %1"
2829 [(set_attr "conds" "set")
2830 (set_attr "type" "logics_imm,logics_reg")]
2831 )
2832
2833 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2834 ; (NOT D) we can sometimes merge the final NOT into one of the following
2835 ; insns.
2836
2837 (define_split
2838 [(set (match_operand:SI 0 "s_register_operand" "")
2839 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2840 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2841 (match_operand:SI 3 "arm_rhs_operand" "")))
2842 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2843 "TARGET_32BIT"
2844 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2845 (not:SI (match_dup 3))))
2846 (set (match_dup 0) (not:SI (match_dup 4)))]
2847 ""
2848 )
2849
2850 (define_insn_and_split "*andsi_iorsi3_notsi"
2851 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2852 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2853 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2854 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2855 "TARGET_32BIT"
2856 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2857 "&& reload_completed"
2858 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2859 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2860 {
2861 /* If operands[3] is a constant make sure to fold the NOT into it
2862 to avoid creating a NOT of a CONST_INT. */
2863 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2864 if (CONST_INT_P (not_rtx))
2865 {
2866 operands[4] = operands[0];
2867 operands[5] = not_rtx;
2868 }
2869 else
2870 {
2871 operands[5] = operands[0];
2872 operands[4] = not_rtx;
2873 }
2874 }
2875 [(set_attr "length" "8")
2876 (set_attr "ce_count" "2")
2877 (set_attr "predicable" "yes")
2878 (set_attr "type" "multiple")]
2879 )
2880
2881 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2882 ; insns are available?
2883 (define_split
2884 [(set (match_operand:SI 0 "s_register_operand" "")
2885 (match_operator:SI 1 "logical_binary_operator"
2886 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2887 (match_operand:SI 3 "const_int_operand" "")
2888 (match_operand:SI 4 "const_int_operand" ""))
2889 (match_operator:SI 9 "logical_binary_operator"
2890 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2891 (match_operand:SI 6 "const_int_operand" ""))
2892 (match_operand:SI 7 "s_register_operand" "")])]))
2893 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2894 "TARGET_32BIT
2895 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2896 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2897 [(set (match_dup 8)
2898 (match_op_dup 1
2899 [(ashift:SI (match_dup 2) (match_dup 4))
2900 (match_dup 5)]))
2901 (set (match_dup 0)
2902 (match_op_dup 1
2903 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2904 (match_dup 7)]))]
2905 "
2906 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2907 ")
2908
2909 (define_split
2910 [(set (match_operand:SI 0 "s_register_operand" "")
2911 (match_operator:SI 1 "logical_binary_operator"
2912 [(match_operator:SI 9 "logical_binary_operator"
2913 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2914 (match_operand:SI 6 "const_int_operand" ""))
2915 (match_operand:SI 7 "s_register_operand" "")])
2916 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2917 (match_operand:SI 3 "const_int_operand" "")
2918 (match_operand:SI 4 "const_int_operand" ""))]))
2919 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2920 "TARGET_32BIT
2921 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2922 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2923 [(set (match_dup 8)
2924 (match_op_dup 1
2925 [(ashift:SI (match_dup 2) (match_dup 4))
2926 (match_dup 5)]))
2927 (set (match_dup 0)
2928 (match_op_dup 1
2929 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2930 (match_dup 7)]))]
2931 "
2932 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2933 ")
2934
2935 (define_split
2936 [(set (match_operand:SI 0 "s_register_operand" "")
2937 (match_operator:SI 1 "logical_binary_operator"
2938 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2939 (match_operand:SI 3 "const_int_operand" "")
2940 (match_operand:SI 4 "const_int_operand" ""))
2941 (match_operator:SI 9 "logical_binary_operator"
2942 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2943 (match_operand:SI 6 "const_int_operand" ""))
2944 (match_operand:SI 7 "s_register_operand" "")])]))
2945 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2946 "TARGET_32BIT
2947 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2948 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2949 [(set (match_dup 8)
2950 (match_op_dup 1
2951 [(ashift:SI (match_dup 2) (match_dup 4))
2952 (match_dup 5)]))
2953 (set (match_dup 0)
2954 (match_op_dup 1
2955 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2956 (match_dup 7)]))]
2957 "
2958 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2959 ")
2960
2961 (define_split
2962 [(set (match_operand:SI 0 "s_register_operand" "")
2963 (match_operator:SI 1 "logical_binary_operator"
2964 [(match_operator:SI 9 "logical_binary_operator"
2965 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2966 (match_operand:SI 6 "const_int_operand" ""))
2967 (match_operand:SI 7 "s_register_operand" "")])
2968 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2969 (match_operand:SI 3 "const_int_operand" "")
2970 (match_operand:SI 4 "const_int_operand" ""))]))
2971 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2972 "TARGET_32BIT
2973 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2974 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2975 [(set (match_dup 8)
2976 (match_op_dup 1
2977 [(ashift:SI (match_dup 2) (match_dup 4))
2978 (match_dup 5)]))
2979 (set (match_dup 0)
2980 (match_op_dup 1
2981 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2982 (match_dup 7)]))]
2983 "
2984 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2985 ")
2986 \f
2987
2988 ;; Minimum and maximum insns
2989
2990 (define_expand "smaxsi3"
2991 [(parallel [
2992 (set (match_operand:SI 0 "s_register_operand")
2993 (smax:SI (match_operand:SI 1 "s_register_operand")
2994 (match_operand:SI 2 "arm_rhs_operand")))
2995 (clobber (reg:CC CC_REGNUM))])]
2996 "TARGET_32BIT"
2997 "
2998 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2999 {
3000 /* No need for a clobber of the condition code register here. */
3001 emit_insn (gen_rtx_SET (operands[0],
3002 gen_rtx_SMAX (SImode, operands[1],
3003 operands[2])));
3004 DONE;
3005 }
3006 ")
3007
3008 (define_insn "*smax_0"
3009 [(set (match_operand:SI 0 "s_register_operand" "=r")
3010 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3011 (const_int 0)))]
3012 "TARGET_32BIT"
3013 "bic%?\\t%0, %1, %1, asr #31"
3014 [(set_attr "predicable" "yes")
3015 (set_attr "type" "logic_shift_reg")]
3016 )
3017
3018 (define_insn "*smax_m1"
3019 [(set (match_operand:SI 0 "s_register_operand" "=r")
3020 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3021 (const_int -1)))]
3022 "TARGET_32BIT"
3023 "orr%?\\t%0, %1, %1, asr #31"
3024 [(set_attr "predicable" "yes")
3025 (set_attr "type" "logic_shift_reg")]
3026 )
3027
3028 (define_insn_and_split "*arm_smax_insn"
3029 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3030 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3031 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3032 (clobber (reg:CC CC_REGNUM))]
3033 "TARGET_ARM"
3034 "#"
3035 ; cmp\\t%1, %2\;movlt\\t%0, %2
3036 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3037 "TARGET_ARM"
3038 [(set (reg:CC CC_REGNUM)
3039 (compare:CC (match_dup 1) (match_dup 2)))
3040 (set (match_dup 0)
3041 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3042 (match_dup 1)
3043 (match_dup 2)))]
3044 ""
3045 [(set_attr "conds" "clob")
3046 (set_attr "length" "8,12")
3047 (set_attr "type" "multiple")]
3048 )
3049
3050 (define_expand "sminsi3"
3051 [(parallel [
3052 (set (match_operand:SI 0 "s_register_operand")
3053 (smin:SI (match_operand:SI 1 "s_register_operand")
3054 (match_operand:SI 2 "arm_rhs_operand")))
3055 (clobber (reg:CC CC_REGNUM))])]
3056 "TARGET_32BIT"
3057 "
3058 if (operands[2] == const0_rtx)
3059 {
3060 /* No need for a clobber of the condition code register here. */
3061 emit_insn (gen_rtx_SET (operands[0],
3062 gen_rtx_SMIN (SImode, operands[1],
3063 operands[2])));
3064 DONE;
3065 }
3066 ")
3067
3068 (define_insn "*smin_0"
3069 [(set (match_operand:SI 0 "s_register_operand" "=r")
3070 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3071 (const_int 0)))]
3072 "TARGET_32BIT"
3073 "and%?\\t%0, %1, %1, asr #31"
3074 [(set_attr "predicable" "yes")
3075 (set_attr "type" "logic_shift_reg")]
3076 )
3077
3078 (define_insn_and_split "*arm_smin_insn"
3079 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3080 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3081 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3082 (clobber (reg:CC CC_REGNUM))]
3083 "TARGET_ARM"
3084 "#"
3085 ; cmp\\t%1, %2\;movge\\t%0, %2
3086 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3087 "TARGET_ARM"
3088 [(set (reg:CC CC_REGNUM)
3089 (compare:CC (match_dup 1) (match_dup 2)))
3090 (set (match_dup 0)
3091 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3092 (match_dup 1)
3093 (match_dup 2)))]
3094 ""
3095 [(set_attr "conds" "clob")
3096 (set_attr "length" "8,12")
3097 (set_attr "type" "multiple,multiple")]
3098 )
3099
3100 (define_expand "umaxsi3"
3101 [(parallel [
3102 (set (match_operand:SI 0 "s_register_operand")
3103 (umax:SI (match_operand:SI 1 "s_register_operand")
3104 (match_operand:SI 2 "arm_rhs_operand")))
3105 (clobber (reg:CC CC_REGNUM))])]
3106 "TARGET_32BIT"
3107 ""
3108 )
3109
3110 (define_insn_and_split "*arm_umaxsi3"
3111 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3112 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3113 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3114 (clobber (reg:CC CC_REGNUM))]
3115 "TARGET_ARM"
3116 "#"
3117 ; cmp\\t%1, %2\;movcc\\t%0, %2
3118 ; cmp\\t%1, %2\;movcs\\t%0, %1
3119 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3120 "TARGET_ARM"
3121 [(set (reg:CC CC_REGNUM)
3122 (compare:CC (match_dup 1) (match_dup 2)))
3123 (set (match_dup 0)
3124 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3125 (match_dup 1)
3126 (match_dup 2)))]
3127 ""
3128 [(set_attr "conds" "clob")
3129 (set_attr "length" "8,8,12")
3130 (set_attr "type" "store_4")]
3131 )
3132
3133 (define_expand "uminsi3"
3134 [(parallel [
3135 (set (match_operand:SI 0 "s_register_operand")
3136 (umin:SI (match_operand:SI 1 "s_register_operand")
3137 (match_operand:SI 2 "arm_rhs_operand")))
3138 (clobber (reg:CC CC_REGNUM))])]
3139 "TARGET_32BIT"
3140 ""
3141 )
3142
3143 (define_insn_and_split "*arm_uminsi3"
3144 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3145 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3146 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3147 (clobber (reg:CC CC_REGNUM))]
3148 "TARGET_ARM"
3149 "#"
3150 ; cmp\\t%1, %2\;movcs\\t%0, %2
3151 ; cmp\\t%1, %2\;movcc\\t%0, %1
3152 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3153 "TARGET_ARM"
3154 [(set (reg:CC CC_REGNUM)
3155 (compare:CC (match_dup 1) (match_dup 2)))
3156 (set (match_dup 0)
3157 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3158 (match_dup 1)
3159 (match_dup 2)))]
3160 ""
3161 [(set_attr "conds" "clob")
3162 (set_attr "length" "8,8,12")
3163 (set_attr "type" "store_4")]
3164 )
3165
3166 (define_insn "*store_minmaxsi"
3167 [(set (match_operand:SI 0 "memory_operand" "=m")
3168 (match_operator:SI 3 "minmax_operator"
3169 [(match_operand:SI 1 "s_register_operand" "r")
3170 (match_operand:SI 2 "s_register_operand" "r")]))
3171 (clobber (reg:CC CC_REGNUM))]
3172 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3173 "*
3174 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3175 operands[1], operands[2]);
3176 output_asm_insn (\"cmp\\t%1, %2\", operands);
3177 if (TARGET_THUMB2)
3178 output_asm_insn (\"ite\t%d3\", operands);
3179 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3180 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3181 return \"\";
3182 "
3183 [(set_attr "conds" "clob")
3184 (set (attr "length")
3185 (if_then_else (eq_attr "is_thumb" "yes")
3186 (const_int 14)
3187 (const_int 12)))
3188 (set_attr "type" "store_4")]
3189 )
3190
3191 ; Reject the frame pointer in operand[1], since reloading this after
3192 ; it has been eliminated can cause carnage.
3193 (define_insn "*minmax_arithsi"
3194 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3195 (match_operator:SI 4 "shiftable_operator"
3196 [(match_operator:SI 5 "minmax_operator"
3197 [(match_operand:SI 2 "s_register_operand" "r,r")
3198 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3199 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3200 (clobber (reg:CC CC_REGNUM))]
3201 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3202 "*
3203 {
3204 enum rtx_code code = GET_CODE (operands[4]);
3205 bool need_else;
3206
3207 if (which_alternative != 0 || operands[3] != const0_rtx
3208 || (code != PLUS && code != IOR && code != XOR))
3209 need_else = true;
3210 else
3211 need_else = false;
3212
3213 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3214 operands[2], operands[3]);
3215 output_asm_insn (\"cmp\\t%2, %3\", operands);
3216 if (TARGET_THUMB2)
3217 {
3218 if (need_else)
3219 output_asm_insn (\"ite\\t%d5\", operands);
3220 else
3221 output_asm_insn (\"it\\t%d5\", operands);
3222 }
3223 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3224 if (need_else)
3225 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3226 return \"\";
3227 }"
3228 [(set_attr "conds" "clob")
3229 (set (attr "length")
3230 (if_then_else (eq_attr "is_thumb" "yes")
3231 (const_int 14)
3232 (const_int 12)))
3233 (set_attr "type" "multiple")]
3234 )
3235
3236 ; Reject the frame pointer in operand[1], since reloading this after
3237 ; it has been eliminated can cause carnage.
3238 (define_insn_and_split "*minmax_arithsi_non_canon"
3239 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3240 (minus:SI
3241 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3242 (match_operator:SI 4 "minmax_operator"
3243 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3244 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3245 (clobber (reg:CC CC_REGNUM))]
3246 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3247 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3248 "#"
3249 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3250 [(set (reg:CC CC_REGNUM)
3251 (compare:CC (match_dup 2) (match_dup 3)))
3252
3253 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3254 (set (match_dup 0)
3255 (minus:SI (match_dup 1)
3256 (match_dup 2))))
3257 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3258 (set (match_dup 0)
3259 (match_dup 6)))]
3260 {
3261 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3262 operands[2], operands[3]);
3263 enum rtx_code rc = minmax_code (operands[4]);
3264 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3265 operands[2], operands[3]);
3266
3267 if (mode == CCFPmode || mode == CCFPEmode)
3268 rc = reverse_condition_maybe_unordered (rc);
3269 else
3270 rc = reverse_condition (rc);
3271 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3272 if (CONST_INT_P (operands[3]))
3273 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3274 else
3275 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3276 }
3277 [(set_attr "conds" "clob")
3278 (set (attr "length")
3279 (if_then_else (eq_attr "is_thumb" "yes")
3280 (const_int 14)
3281 (const_int 12)))
3282 (set_attr "type" "multiple")]
3283 )
3284
3285 (define_code_iterator SAT [smin smax])
3286 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3287 (define_code_attr SATlo [(smin "1") (smax "2")])
3288 (define_code_attr SAThi [(smin "2") (smax "1")])
3289
3290 (define_insn "*satsi_<SAT:code>"
3291 [(set (match_operand:SI 0 "s_register_operand" "=r")
3292 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3293 (match_operand:SI 1 "const_int_operand" "i"))
3294 (match_operand:SI 2 "const_int_operand" "i")))]
3295 "TARGET_32BIT && arm_arch6
3296 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3297 {
3298 int mask;
3299 bool signed_sat;
3300 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3301 &mask, &signed_sat))
3302 gcc_unreachable ();
3303
3304 operands[1] = GEN_INT (mask);
3305 if (signed_sat)
3306 return "ssat%?\t%0, %1, %3";
3307 else
3308 return "usat%?\t%0, %1, %3";
3309 }
3310 [(set_attr "predicable" "yes")
3311 (set_attr "type" "alus_imm")]
3312 )
3313
3314 (define_insn "*satsi_<SAT:code>_shift"
3315 [(set (match_operand:SI 0 "s_register_operand" "=r")
3316 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3317 [(match_operand:SI 4 "s_register_operand" "r")
3318 (match_operand:SI 5 "const_int_operand" "i")])
3319 (match_operand:SI 1 "const_int_operand" "i"))
3320 (match_operand:SI 2 "const_int_operand" "i")))]
3321 "TARGET_32BIT && arm_arch6
3322 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3323 {
3324 int mask;
3325 bool signed_sat;
3326 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3327 &mask, &signed_sat))
3328 gcc_unreachable ();
3329
3330 operands[1] = GEN_INT (mask);
3331 if (signed_sat)
3332 return "ssat%?\t%0, %1, %4%S3";
3333 else
3334 return "usat%?\t%0, %1, %4%S3";
3335 }
3336 [(set_attr "predicable" "yes")
3337 (set_attr "shift" "3")
3338 (set_attr "type" "logic_shift_reg")])
3339 \f
3340 ;; Shift and rotation insns
3341
3342 (define_expand "ashldi3"
3343 [(set (match_operand:DI 0 "s_register_operand")
3344 (ashift:DI (match_operand:DI 1 "s_register_operand")
3345 (match_operand:SI 2 "reg_or_int_operand")))]
3346 "TARGET_32BIT"
3347 "
3348 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3349 operands[2], gen_reg_rtx (SImode),
3350 gen_reg_rtx (SImode));
3351 DONE;
3352 ")
3353
3354 (define_expand "ashlsi3"
3355 [(set (match_operand:SI 0 "s_register_operand")
3356 (ashift:SI (match_operand:SI 1 "s_register_operand")
3357 (match_operand:SI 2 "arm_rhs_operand")))]
3358 "TARGET_EITHER"
3359 "
3360 if (CONST_INT_P (operands[2])
3361 && (UINTVAL (operands[2])) > 31)
3362 {
3363 emit_insn (gen_movsi (operands[0], const0_rtx));
3364 DONE;
3365 }
3366 "
3367 )
3368
3369 (define_expand "ashrdi3"
3370 [(set (match_operand:DI 0 "s_register_operand")
3371 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3372 (match_operand:SI 2 "reg_or_int_operand")))]
3373 "TARGET_32BIT"
3374 "
3375 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3376 operands[2], gen_reg_rtx (SImode),
3377 gen_reg_rtx (SImode));
3378 DONE;
3379 ")
3380
3381 (define_expand "ashrsi3"
3382 [(set (match_operand:SI 0 "s_register_operand")
3383 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3384 (match_operand:SI 2 "arm_rhs_operand")))]
3385 "TARGET_EITHER"
3386 "
3387 if (CONST_INT_P (operands[2])
3388 && UINTVAL (operands[2]) > 31)
3389 operands[2] = GEN_INT (31);
3390 "
3391 )
3392
3393 (define_expand "lshrdi3"
3394 [(set (match_operand:DI 0 "s_register_operand")
3395 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3396 (match_operand:SI 2 "reg_or_int_operand")))]
3397 "TARGET_32BIT"
3398 "
3399 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3400 operands[2], gen_reg_rtx (SImode),
3401 gen_reg_rtx (SImode));
3402 DONE;
3403 ")
3404
3405 (define_expand "lshrsi3"
3406 [(set (match_operand:SI 0 "s_register_operand")
3407 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3408 (match_operand:SI 2 "arm_rhs_operand")))]
3409 "TARGET_EITHER"
3410 "
3411 if (CONST_INT_P (operands[2])
3412 && (UINTVAL (operands[2])) > 31)
3413 {
3414 emit_insn (gen_movsi (operands[0], const0_rtx));
3415 DONE;
3416 }
3417 "
3418 )
3419
3420 (define_expand "rotlsi3"
3421 [(set (match_operand:SI 0 "s_register_operand")
3422 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3423 (match_operand:SI 2 "reg_or_int_operand")))]
3424 "TARGET_32BIT"
3425 "
3426 if (CONST_INT_P (operands[2]))
3427 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3428 else
3429 {
3430 rtx reg = gen_reg_rtx (SImode);
3431 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3432 operands[2] = reg;
3433 }
3434 "
3435 )
3436
3437 (define_expand "rotrsi3"
3438 [(set (match_operand:SI 0 "s_register_operand")
3439 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3440 (match_operand:SI 2 "arm_rhs_operand")))]
3441 "TARGET_EITHER"
3442 "
3443 if (TARGET_32BIT)
3444 {
3445 if (CONST_INT_P (operands[2])
3446 && UINTVAL (operands[2]) > 31)
3447 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3448 }
3449 else /* TARGET_THUMB1 */
3450 {
3451 if (CONST_INT_P (operands [2]))
3452 operands [2] = force_reg (SImode, operands[2]);
3453 }
3454 "
3455 )
3456
3457 (define_insn "*arm_shiftsi3"
3458 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3459 (match_operator:SI 3 "shift_operator"
3460 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3461 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3462 "TARGET_32BIT"
3463 "* return arm_output_shift(operands, 0);"
3464 [(set_attr "predicable" "yes")
3465 (set_attr "arch" "t2,t2,*,*")
3466 (set_attr "predicable_short_it" "yes,yes,no,no")
3467 (set_attr "length" "4")
3468 (set_attr "shift" "1")
3469 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3470 )
3471
3472 (define_insn "*shiftsi3_compare0"
3473 [(set (reg:CC_NOOV CC_REGNUM)
3474 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3475 [(match_operand:SI 1 "s_register_operand" "r,r")
3476 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3477 (const_int 0)))
3478 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3479 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3480 "TARGET_32BIT"
3481 "* return arm_output_shift(operands, 1);"
3482 [(set_attr "conds" "set")
3483 (set_attr "shift" "1")
3484 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3485 )
3486
3487 (define_insn "*shiftsi3_compare0_scratch"
3488 [(set (reg:CC_NOOV CC_REGNUM)
3489 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3490 [(match_operand:SI 1 "s_register_operand" "r,r")
3491 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3492 (const_int 0)))
3493 (clobber (match_scratch:SI 0 "=r,r"))]
3494 "TARGET_32BIT"
3495 "* return arm_output_shift(operands, 1);"
3496 [(set_attr "conds" "set")
3497 (set_attr "shift" "1")
3498 (set_attr "type" "shift_imm,shift_reg")]
3499 )
3500
3501 (define_insn "*not_shiftsi"
3502 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3503 (not:SI (match_operator:SI 3 "shift_operator"
3504 [(match_operand:SI 1 "s_register_operand" "r,r")
3505 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3506 "TARGET_32BIT"
3507 "mvn%?\\t%0, %1%S3"
3508 [(set_attr "predicable" "yes")
3509 (set_attr "shift" "1")
3510 (set_attr "arch" "32,a")
3511 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3512
3513 (define_insn "*not_shiftsi_compare0"
3514 [(set (reg:CC_NOOV CC_REGNUM)
3515 (compare:CC_NOOV
3516 (not:SI (match_operator:SI 3 "shift_operator"
3517 [(match_operand:SI 1 "s_register_operand" "r,r")
3518 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3519 (const_int 0)))
3520 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3521 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3522 "TARGET_32BIT"
3523 "mvns%?\\t%0, %1%S3"
3524 [(set_attr "conds" "set")
3525 (set_attr "shift" "1")
3526 (set_attr "arch" "32,a")
3527 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3528
3529 (define_insn "*not_shiftsi_compare0_scratch"
3530 [(set (reg:CC_NOOV CC_REGNUM)
3531 (compare:CC_NOOV
3532 (not:SI (match_operator:SI 3 "shift_operator"
3533 [(match_operand:SI 1 "s_register_operand" "r,r")
3534 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3535 (const_int 0)))
3536 (clobber (match_scratch:SI 0 "=r,r"))]
3537 "TARGET_32BIT"
3538 "mvns%?\\t%0, %1%S3"
3539 [(set_attr "conds" "set")
3540 (set_attr "shift" "1")
3541 (set_attr "arch" "32,a")
3542 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3543
3544 ;; We don't really have extzv, but defining this using shifts helps
3545 ;; to reduce register pressure later on.
3546
3547 (define_expand "extzv"
3548 [(set (match_operand 0 "s_register_operand")
3549 (zero_extract (match_operand 1 "nonimmediate_operand")
3550 (match_operand 2 "const_int_operand")
3551 (match_operand 3 "const_int_operand")))]
3552 "TARGET_THUMB1 || arm_arch_thumb2"
3553 "
3554 {
3555 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3556 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3557
3558 if (arm_arch_thumb2)
3559 {
3560 HOST_WIDE_INT width = INTVAL (operands[2]);
3561 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3562
3563 if (unaligned_access && MEM_P (operands[1])
3564 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3565 {
3566 rtx base_addr;
3567
3568 if (BYTES_BIG_ENDIAN)
3569 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3570 - bitpos;
3571
3572 if (width == 32)
3573 {
3574 base_addr = adjust_address (operands[1], SImode,
3575 bitpos / BITS_PER_UNIT);
3576 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3577 }
3578 else
3579 {
3580 rtx dest = operands[0];
3581 rtx tmp = gen_reg_rtx (SImode);
3582
3583 /* We may get a paradoxical subreg here. Strip it off. */
3584 if (GET_CODE (dest) == SUBREG
3585 && GET_MODE (dest) == SImode
3586 && GET_MODE (SUBREG_REG (dest)) == HImode)
3587 dest = SUBREG_REG (dest);
3588
3589 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3590 FAIL;
3591
3592 base_addr = adjust_address (operands[1], HImode,
3593 bitpos / BITS_PER_UNIT);
3594 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3595 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3596 }
3597 DONE;
3598 }
3599 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3600 {
3601 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3602 operands[3]));
3603 DONE;
3604 }
3605 else
3606 FAIL;
3607 }
3608
3609 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3610 FAIL;
3611
3612 operands[3] = GEN_INT (rshift);
3613
3614 if (lshift == 0)
3615 {
3616 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3617 DONE;
3618 }
3619
3620 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3621 operands[3], gen_reg_rtx (SImode)));
3622 DONE;
3623 }"
3624 )
3625
3626 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3627
3628 (define_expand "extzv_t1"
3629 [(set (match_operand:SI 4 "s_register_operand")
3630 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3631 (match_operand:SI 2 "const_int_operand")))
3632 (set (match_operand:SI 0 "s_register_operand")
3633 (lshiftrt:SI (match_dup 4)
3634 (match_operand:SI 3 "const_int_operand")))]
3635 "TARGET_THUMB1"
3636 "")
3637
3638 (define_expand "extv"
3639 [(set (match_operand 0 "s_register_operand")
3640 (sign_extract (match_operand 1 "nonimmediate_operand")
3641 (match_operand 2 "const_int_operand")
3642 (match_operand 3 "const_int_operand")))]
3643 "arm_arch_thumb2"
3644 {
3645 HOST_WIDE_INT width = INTVAL (operands[2]);
3646 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3647
3648 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3649 && (bitpos % BITS_PER_UNIT) == 0)
3650 {
3651 rtx base_addr;
3652
3653 if (BYTES_BIG_ENDIAN)
3654 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3655
3656 if (width == 32)
3657 {
3658 base_addr = adjust_address (operands[1], SImode,
3659 bitpos / BITS_PER_UNIT);
3660 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3661 }
3662 else
3663 {
3664 rtx dest = operands[0];
3665 rtx tmp = gen_reg_rtx (SImode);
3666
3667 /* We may get a paradoxical subreg here. Strip it off. */
3668 if (GET_CODE (dest) == SUBREG
3669 && GET_MODE (dest) == SImode
3670 && GET_MODE (SUBREG_REG (dest)) == HImode)
3671 dest = SUBREG_REG (dest);
3672
3673 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3674 FAIL;
3675
3676 base_addr = adjust_address (operands[1], HImode,
3677 bitpos / BITS_PER_UNIT);
3678 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3679 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3680 }
3681
3682 DONE;
3683 }
3684 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3685 FAIL;
3686 else if (GET_MODE (operands[0]) == SImode
3687 && GET_MODE (operands[1]) == SImode)
3688 {
3689 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3690 operands[3]));
3691 DONE;
3692 }
3693
3694 FAIL;
3695 })
3696
3697 ; Helper to expand register forms of extv with the proper modes.
3698
3699 (define_expand "extv_regsi"
3700 [(set (match_operand:SI 0 "s_register_operand")
3701 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3702 (match_operand 2 "const_int_operand")
3703 (match_operand 3 "const_int_operand")))]
3704 ""
3705 {
3706 })
3707
3708 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3709
3710 (define_insn "unaligned_loaddi"
3711 [(set (match_operand:DI 0 "s_register_operand" "=r")
3712 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3713 UNSPEC_UNALIGNED_LOAD))]
3714 "TARGET_32BIT && TARGET_LDRD"
3715 "*
3716 return output_move_double (operands, true, NULL);
3717 "
3718 [(set_attr "length" "8")
3719 (set_attr "type" "load_8")])
3720
3721 (define_insn "unaligned_loadsi"
3722 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3723 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3724 UNSPEC_UNALIGNED_LOAD))]
3725 "unaligned_access"
3726 "@
3727 ldr\t%0, %1\t@ unaligned
3728 ldr%?\t%0, %1\t@ unaligned
3729 ldr%?\t%0, %1\t@ unaligned"
3730 [(set_attr "arch" "t1,t2,32")
3731 (set_attr "length" "2,2,4")
3732 (set_attr "predicable" "no,yes,yes")
3733 (set_attr "predicable_short_it" "no,yes,no")
3734 (set_attr "type" "load_4")])
3735
3736 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3737 ;; address (there's no immediate format). That's tricky to support
3738 ;; here and we don't really need this pattern for that case, so only
3739 ;; enable for 32-bit ISAs.
3740 (define_insn "unaligned_loadhis"
3741 [(set (match_operand:SI 0 "s_register_operand" "=r")
3742 (sign_extend:SI
3743 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3744 UNSPEC_UNALIGNED_LOAD)))]
3745 "unaligned_access && TARGET_32BIT"
3746 "ldrsh%?\t%0, %1\t@ unaligned"
3747 [(set_attr "predicable" "yes")
3748 (set_attr "type" "load_byte")])
3749
3750 (define_insn "unaligned_loadhiu"
3751 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3752 (zero_extend:SI
3753 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3754 UNSPEC_UNALIGNED_LOAD)))]
3755 "unaligned_access"
3756 "@
3757 ldrh\t%0, %1\t@ unaligned
3758 ldrh%?\t%0, %1\t@ unaligned
3759 ldrh%?\t%0, %1\t@ unaligned"
3760 [(set_attr "arch" "t1,t2,32")
3761 (set_attr "length" "2,2,4")
3762 (set_attr "predicable" "no,yes,yes")
3763 (set_attr "predicable_short_it" "no,yes,no")
3764 (set_attr "type" "load_byte")])
3765
3766 (define_insn "unaligned_storedi"
3767 [(set (match_operand:DI 0 "memory_operand" "=m")
3768 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3769 UNSPEC_UNALIGNED_STORE))]
3770 "TARGET_32BIT && TARGET_LDRD"
3771 "*
3772 return output_move_double (operands, true, NULL);
3773 "
3774 [(set_attr "length" "8")
3775 (set_attr "type" "store_8")])
3776
3777 (define_insn "unaligned_storesi"
3778 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3779 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3780 UNSPEC_UNALIGNED_STORE))]
3781 "unaligned_access"
3782 "@
3783 str\t%1, %0\t@ unaligned
3784 str%?\t%1, %0\t@ unaligned
3785 str%?\t%1, %0\t@ unaligned"
3786 [(set_attr "arch" "t1,t2,32")
3787 (set_attr "length" "2,2,4")
3788 (set_attr "predicable" "no,yes,yes")
3789 (set_attr "predicable_short_it" "no,yes,no")
3790 (set_attr "type" "store_4")])
3791
3792 (define_insn "unaligned_storehi"
3793 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3794 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3795 UNSPEC_UNALIGNED_STORE))]
3796 "unaligned_access"
3797 "@
3798 strh\t%1, %0\t@ unaligned
3799 strh%?\t%1, %0\t@ unaligned
3800 strh%?\t%1, %0\t@ unaligned"
3801 [(set_attr "arch" "t1,t2,32")
3802 (set_attr "length" "2,2,4")
3803 (set_attr "predicable" "no,yes,yes")
3804 (set_attr "predicable_short_it" "no,yes,no")
3805 (set_attr "type" "store_4")])
3806
3807
3808 (define_insn "*extv_reg"
3809 [(set (match_operand:SI 0 "s_register_operand" "=r")
3810 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3811 (match_operand:SI 2 "const_int_operand" "n")
3812 (match_operand:SI 3 "const_int_operand" "n")))]
3813 "arm_arch_thumb2
3814 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3815 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3816 "sbfx%?\t%0, %1, %3, %2"
3817 [(set_attr "length" "4")
3818 (set_attr "predicable" "yes")
3819 (set_attr "type" "bfm")]
3820 )
3821
3822 (define_insn "extzv_t2"
3823 [(set (match_operand:SI 0 "s_register_operand" "=r")
3824 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3825 (match_operand:SI 2 "const_int_operand" "n")
3826 (match_operand:SI 3 "const_int_operand" "n")))]
3827 "arm_arch_thumb2
3828 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3829 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3830 "ubfx%?\t%0, %1, %3, %2"
3831 [(set_attr "length" "4")
3832 (set_attr "predicable" "yes")
3833 (set_attr "type" "bfm")]
3834 )
3835
3836
3837 ;; Division instructions
3838 (define_insn "divsi3"
3839 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3840 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3841 (match_operand:SI 2 "s_register_operand" "r,r")))]
3842 "TARGET_IDIV"
3843 "@
3844 sdiv%?\t%0, %1, %2
3845 sdiv\t%0, %1, %2"
3846 [(set_attr "arch" "32,v8mb")
3847 (set_attr "predicable" "yes")
3848 (set_attr "type" "sdiv")]
3849 )
3850
3851 (define_insn "udivsi3"
3852 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3853 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3854 (match_operand:SI 2 "s_register_operand" "r,r")))]
3855 "TARGET_IDIV"
3856 "@
3857 udiv%?\t%0, %1, %2
3858 udiv\t%0, %1, %2"
3859 [(set_attr "arch" "32,v8mb")
3860 (set_attr "predicable" "yes")
3861 (set_attr "type" "udiv")]
3862 )
3863
3864 \f
3865 ;; Unary arithmetic insns
3866
3867 (define_expand "negvsi3"
3868 [(match_operand:SI 0 "register_operand")
3869 (match_operand:SI 1 "register_operand")
3870 (match_operand 2 "")]
3871 "TARGET_32BIT"
3872 {
3873 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3874 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3875
3876 DONE;
3877 })
3878
3879 (define_expand "negvdi3"
3880 [(match_operand:DI 0 "s_register_operand")
3881 (match_operand:DI 1 "s_register_operand")
3882 (match_operand 2 "")]
3883 "TARGET_ARM"
3884 {
3885 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3886 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3887
3888 DONE;
3889 })
3890
3891
3892 (define_insn "negdi2_compare"
3893 [(set (reg:CC CC_REGNUM)
3894 (compare:CC
3895 (const_int 0)
3896 (match_operand:DI 1 "register_operand" "r,r")))
3897 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3898 (minus:DI (const_int 0) (match_dup 1)))]
3899 "TARGET_ARM"
3900 "@
3901 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3902 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3903 [(set_attr "conds" "set")
3904 (set_attr "arch" "a,t2")
3905 (set_attr "length" "8")
3906 (set_attr "type" "multiple")]
3907 )
3908
3909 (define_expand "negsi2"
3910 [(set (match_operand:SI 0 "s_register_operand")
3911 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3912 "TARGET_EITHER"
3913 ""
3914 )
3915
3916 (define_insn "*arm_negsi2"
3917 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3918 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3919 "TARGET_32BIT"
3920 "rsb%?\\t%0, %1, #0"
3921 [(set_attr "predicable" "yes")
3922 (set_attr "predicable_short_it" "yes,no")
3923 (set_attr "arch" "t2,*")
3924 (set_attr "length" "4")
3925 (set_attr "type" "alu_imm")]
3926 )
3927
3928 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3929 ;; rather than (0 cmp reg). This gives the same results for unsigned
3930 ;; and equality compares which is what we mostly need here.
3931 (define_insn "negsi2_0compare"
3932 [(set (reg:CC_RSB CC_REGNUM)
3933 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3934 (const_int -1)))
3935 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3936 (neg:SI (match_dup 1)))]
3937 "TARGET_32BIT"
3938 "@
3939 negs\\t%0, %1
3940 rsbs\\t%0, %1, #0"
3941 [(set_attr "conds" "set")
3942 (set_attr "arch" "t2,*")
3943 (set_attr "length" "2,*")
3944 (set_attr "type" "alus_imm")]
3945 )
3946
3947 (define_insn "negsi2_carryin"
3948 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3949 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3950 (match_operand:SI 2 "arm_borrow_operation" "")))]
3951 "TARGET_32BIT"
3952 "@
3953 rsc\\t%0, %1, #0
3954 sbc\\t%0, %1, %1, lsl #1"
3955 [(set_attr "conds" "use")
3956 (set_attr "arch" "a,t2")
3957 (set_attr "type" "adc_imm,adc_reg")]
3958 )
3959
3960 (define_expand "negsf2"
3961 [(set (match_operand:SF 0 "s_register_operand")
3962 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3963 "TARGET_32BIT && TARGET_HARD_FLOAT"
3964 ""
3965 )
3966
3967 (define_expand "negdf2"
3968 [(set (match_operand:DF 0 "s_register_operand")
3969 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3970 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3971 "")
3972
3973 ;; abssi2 doesn't really clobber the condition codes if a different register
3974 ;; is being set. To keep things simple, assume during rtl manipulations that
3975 ;; it does, but tell the final scan operator the truth. Similarly for
3976 ;; (neg (abs...))
3977
3978 (define_expand "abssi2"
3979 [(parallel
3980 [(set (match_operand:SI 0 "s_register_operand")
3981 (abs:SI (match_operand:SI 1 "s_register_operand")))
3982 (clobber (match_dup 2))])]
3983 "TARGET_EITHER"
3984 "
3985 if (TARGET_THUMB1)
3986 operands[2] = gen_rtx_SCRATCH (SImode);
3987 else
3988 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3989 ")
3990
3991 (define_insn_and_split "*arm_abssi2"
3992 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3993 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3994 (clobber (reg:CC CC_REGNUM))]
3995 "TARGET_ARM"
3996 "#"
3997 "&& reload_completed"
3998 [(const_int 0)]
3999 {
4000 /* if (which_alternative == 0) */
4001 if (REGNO(operands[0]) == REGNO(operands[1]))
4002 {
4003 /* Emit the pattern:
4004 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4005 [(set (reg:CC CC_REGNUM)
4006 (compare:CC (match_dup 0) (const_int 0)))
4007 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4008 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4009 */
4010 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4011 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4012 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4013 (gen_rtx_LT (SImode,
4014 gen_rtx_REG (CCmode, CC_REGNUM),
4015 const0_rtx)),
4016 (gen_rtx_SET (operands[0],
4017 (gen_rtx_MINUS (SImode,
4018 const0_rtx,
4019 operands[1]))))));
4020 DONE;
4021 }
4022 else
4023 {
4024 /* Emit the pattern:
4025 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4026 [(set (match_dup 0)
4027 (xor:SI (match_dup 1)
4028 (ashiftrt:SI (match_dup 1) (const_int 31))))
4029 (set (match_dup 0)
4030 (minus:SI (match_dup 0)
4031 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4032 */
4033 emit_insn (gen_rtx_SET (operands[0],
4034 gen_rtx_XOR (SImode,
4035 gen_rtx_ASHIFTRT (SImode,
4036 operands[1],
4037 GEN_INT (31)),
4038 operands[1])));
4039 emit_insn (gen_rtx_SET (operands[0],
4040 gen_rtx_MINUS (SImode,
4041 operands[0],
4042 gen_rtx_ASHIFTRT (SImode,
4043 operands[1],
4044 GEN_INT (31)))));
4045 DONE;
4046 }
4047 }
4048 [(set_attr "conds" "clob,*")
4049 (set_attr "shift" "1")
4050 (set_attr "predicable" "no, yes")
4051 (set_attr "length" "8")
4052 (set_attr "type" "multiple")]
4053 )
4054
4055 (define_insn_and_split "*arm_neg_abssi2"
4056 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4057 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4058 (clobber (reg:CC CC_REGNUM))]
4059 "TARGET_ARM"
4060 "#"
4061 "&& reload_completed"
4062 [(const_int 0)]
4063 {
4064 /* if (which_alternative == 0) */
4065 if (REGNO (operands[0]) == REGNO (operands[1]))
4066 {
4067 /* Emit the pattern:
4068 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4069 */
4070 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4071 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4072 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4073 gen_rtx_GT (SImode,
4074 gen_rtx_REG (CCmode, CC_REGNUM),
4075 const0_rtx),
4076 gen_rtx_SET (operands[0],
4077 (gen_rtx_MINUS (SImode,
4078 const0_rtx,
4079 operands[1])))));
4080 }
4081 else
4082 {
4083 /* Emit the pattern:
4084 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4085 */
4086 emit_insn (gen_rtx_SET (operands[0],
4087 gen_rtx_XOR (SImode,
4088 gen_rtx_ASHIFTRT (SImode,
4089 operands[1],
4090 GEN_INT (31)),
4091 operands[1])));
4092 emit_insn (gen_rtx_SET (operands[0],
4093 gen_rtx_MINUS (SImode,
4094 gen_rtx_ASHIFTRT (SImode,
4095 operands[1],
4096 GEN_INT (31)),
4097 operands[0])));
4098 }
4099 DONE;
4100 }
4101 [(set_attr "conds" "clob,*")
4102 (set_attr "shift" "1")
4103 (set_attr "predicable" "no, yes")
4104 (set_attr "length" "8")
4105 (set_attr "type" "multiple")]
4106 )
4107
4108 (define_expand "abssf2"
4109 [(set (match_operand:SF 0 "s_register_operand")
4110 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4111 "TARGET_32BIT && TARGET_HARD_FLOAT"
4112 "")
4113
4114 (define_expand "absdf2"
4115 [(set (match_operand:DF 0 "s_register_operand")
4116 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4117 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4118 "")
4119
4120 (define_expand "sqrtsf2"
4121 [(set (match_operand:SF 0 "s_register_operand")
4122 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4123 "TARGET_32BIT && TARGET_HARD_FLOAT"
4124 "")
4125
4126 (define_expand "sqrtdf2"
4127 [(set (match_operand:DF 0 "s_register_operand")
4128 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4129 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4130 "")
4131
4132 (define_expand "one_cmplsi2"
4133 [(set (match_operand:SI 0 "s_register_operand")
4134 (not:SI (match_operand:SI 1 "s_register_operand")))]
4135 "TARGET_EITHER"
4136 ""
4137 )
4138
4139 (define_insn "*arm_one_cmplsi2"
4140 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4141 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4142 "TARGET_32BIT"
4143 "mvn%?\\t%0, %1"
4144 [(set_attr "predicable" "yes")
4145 (set_attr "predicable_short_it" "yes,no")
4146 (set_attr "arch" "t2,*")
4147 (set_attr "length" "4")
4148 (set_attr "type" "mvn_reg")]
4149 )
4150
4151 (define_insn "*notsi_compare0"
4152 [(set (reg:CC_NOOV CC_REGNUM)
4153 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4154 (const_int 0)))
4155 (set (match_operand:SI 0 "s_register_operand" "=r")
4156 (not:SI (match_dup 1)))]
4157 "TARGET_32BIT"
4158 "mvns%?\\t%0, %1"
4159 [(set_attr "conds" "set")
4160 (set_attr "type" "mvn_reg")]
4161 )
4162
4163 (define_insn "*notsi_compare0_scratch"
4164 [(set (reg:CC_NOOV CC_REGNUM)
4165 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4166 (const_int 0)))
4167 (clobber (match_scratch:SI 0 "=r"))]
4168 "TARGET_32BIT"
4169 "mvns%?\\t%0, %1"
4170 [(set_attr "conds" "set")
4171 (set_attr "type" "mvn_reg")]
4172 )
4173 \f
4174 ;; Fixed <--> Floating conversion insns
4175
4176 (define_expand "floatsihf2"
4177 [(set (match_operand:HF 0 "general_operand")
4178 (float:HF (match_operand:SI 1 "general_operand")))]
4179 "TARGET_EITHER"
4180 "
4181 {
4182 rtx op1 = gen_reg_rtx (SFmode);
4183 expand_float (op1, operands[1], 0);
4184 op1 = convert_to_mode (HFmode, op1, 0);
4185 emit_move_insn (operands[0], op1);
4186 DONE;
4187 }"
4188 )
4189
4190 (define_expand "floatdihf2"
4191 [(set (match_operand:HF 0 "general_operand")
4192 (float:HF (match_operand:DI 1 "general_operand")))]
4193 "TARGET_EITHER"
4194 "
4195 {
4196 rtx op1 = gen_reg_rtx (SFmode);
4197 expand_float (op1, operands[1], 0);
4198 op1 = convert_to_mode (HFmode, op1, 0);
4199 emit_move_insn (operands[0], op1);
4200 DONE;
4201 }"
4202 )
4203
4204 (define_expand "floatsisf2"
4205 [(set (match_operand:SF 0 "s_register_operand")
4206 (float:SF (match_operand:SI 1 "s_register_operand")))]
4207 "TARGET_32BIT && TARGET_HARD_FLOAT"
4208 "
4209 ")
4210
4211 (define_expand "floatsidf2"
4212 [(set (match_operand:DF 0 "s_register_operand")
4213 (float:DF (match_operand:SI 1 "s_register_operand")))]
4214 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4215 "
4216 ")
4217
4218 (define_expand "fix_trunchfsi2"
4219 [(set (match_operand:SI 0 "general_operand")
4220 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4221 "TARGET_EITHER"
4222 "
4223 {
4224 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4225 expand_fix (operands[0], op1, 0);
4226 DONE;
4227 }"
4228 )
4229
4230 (define_expand "fix_trunchfdi2"
4231 [(set (match_operand:DI 0 "general_operand")
4232 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4233 "TARGET_EITHER"
4234 "
4235 {
4236 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4237 expand_fix (operands[0], op1, 0);
4238 DONE;
4239 }"
4240 )
4241
4242 (define_expand "fix_truncsfsi2"
4243 [(set (match_operand:SI 0 "s_register_operand")
4244 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4245 "TARGET_32BIT && TARGET_HARD_FLOAT"
4246 "
4247 ")
4248
4249 (define_expand "fix_truncdfsi2"
4250 [(set (match_operand:SI 0 "s_register_operand")
4251 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4252 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4253 "
4254 ")
4255
4256 ;; Truncation insns
4257
4258 (define_expand "truncdfsf2"
4259 [(set (match_operand:SF 0 "s_register_operand")
4260 (float_truncate:SF
4261 (match_operand:DF 1 "s_register_operand")))]
4262 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4263 ""
4264 )
4265
4266 ;; DFmode to HFmode conversions on targets without a single-step hardware
4267 ;; instruction for it would have to go through SFmode. This is dangerous
4268 ;; as it introduces double rounding.
4269 ;;
4270 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4271 ;; a single-step instruction.
4272
4273 (define_expand "truncdfhf2"
4274 [(set (match_operand:HF 0 "s_register_operand")
4275 (float_truncate:HF
4276 (match_operand:DF 1 "s_register_operand")))]
4277 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4278 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4279 {
4280 /* We don't have a direct instruction for this, so we must be in
4281 an unsafe math mode, and going via SFmode. */
4282
4283 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4284 {
4285 rtx op1;
4286 op1 = convert_to_mode (SFmode, operands[1], 0);
4287 op1 = convert_to_mode (HFmode, op1, 0);
4288 emit_move_insn (operands[0], op1);
4289 DONE;
4290 }
4291 /* Otherwise, we will pick this up as a single instruction with
4292 no intermediary rounding. */
4293 }
4294 )
4295 \f
4296 ;; Zero and sign extension instructions.
4297
4298 (define_expand "zero_extend<mode>di2"
4299 [(set (match_operand:DI 0 "s_register_operand" "")
4300 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4301 "TARGET_32BIT <qhs_zextenddi_cond>"
4302 {
4303 rtx res_lo, res_hi, op0_lo, op0_hi;
4304 res_lo = gen_lowpart (SImode, operands[0]);
4305 res_hi = gen_highpart (SImode, operands[0]);
4306 if (can_create_pseudo_p ())
4307 {
4308 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4309 op0_hi = gen_reg_rtx (SImode);
4310 }
4311 else
4312 {
4313 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4314 op0_hi = res_hi;
4315 }
4316 if (<MODE>mode != SImode)
4317 emit_insn (gen_rtx_SET (op0_lo,
4318 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4319 emit_insn (gen_movsi (op0_hi, const0_rtx));
4320 if (res_lo != op0_lo)
4321 emit_move_insn (res_lo, op0_lo);
4322 if (res_hi != op0_hi)
4323 emit_move_insn (res_hi, op0_hi);
4324 DONE;
4325 }
4326 )
4327
4328 (define_expand "extend<mode>di2"
4329 [(set (match_operand:DI 0 "s_register_operand" "")
4330 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4331 "TARGET_32BIT <qhs_sextenddi_cond>"
4332 {
4333 rtx res_lo, res_hi, op0_lo, op0_hi;
4334 res_lo = gen_lowpart (SImode, operands[0]);
4335 res_hi = gen_highpart (SImode, operands[0]);
4336 if (can_create_pseudo_p ())
4337 {
4338 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4339 op0_hi = gen_reg_rtx (SImode);
4340 }
4341 else
4342 {
4343 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4344 op0_hi = res_hi;
4345 }
4346 if (<MODE>mode != SImode)
4347 emit_insn (gen_rtx_SET (op0_lo,
4348 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4349 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4350 if (res_lo != op0_lo)
4351 emit_move_insn (res_lo, op0_lo);
4352 if (res_hi != op0_hi)
4353 emit_move_insn (res_hi, op0_hi);
4354 DONE;
4355 }
4356 )
4357
4358 ;; Splits for all extensions to DImode
4359 (define_split
4360 [(set (match_operand:DI 0 "s_register_operand" "")
4361 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4362 "TARGET_32BIT"
4363 [(set (match_dup 0) (match_dup 1))]
4364 {
4365 rtx lo_part = gen_lowpart (SImode, operands[0]);
4366 machine_mode src_mode = GET_MODE (operands[1]);
4367
4368 if (src_mode == SImode)
4369 emit_move_insn (lo_part, operands[1]);
4370 else
4371 emit_insn (gen_rtx_SET (lo_part,
4372 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4373 operands[0] = gen_highpart (SImode, operands[0]);
4374 operands[1] = const0_rtx;
4375 })
4376
4377 (define_split
4378 [(set (match_operand:DI 0 "s_register_operand" "")
4379 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4380 "TARGET_32BIT"
4381 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4382 {
4383 rtx lo_part = gen_lowpart (SImode, operands[0]);
4384 machine_mode src_mode = GET_MODE (operands[1]);
4385
4386 if (src_mode == SImode)
4387 emit_move_insn (lo_part, operands[1]);
4388 else
4389 emit_insn (gen_rtx_SET (lo_part,
4390 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4391 operands[1] = lo_part;
4392 operands[0] = gen_highpart (SImode, operands[0]);
4393 })
4394
4395 (define_expand "zero_extendhisi2"
4396 [(set (match_operand:SI 0 "s_register_operand")
4397 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4398 "TARGET_EITHER"
4399 {
4400 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4401 {
4402 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4403 DONE;
4404 }
4405 if (!arm_arch6 && !MEM_P (operands[1]))
4406 {
4407 rtx t = gen_lowpart (SImode, operands[1]);
4408 rtx tmp = gen_reg_rtx (SImode);
4409 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4410 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4411 DONE;
4412 }
4413 })
4414
4415 (define_split
4416 [(set (match_operand:SI 0 "s_register_operand" "")
4417 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4418 "!TARGET_THUMB2 && !arm_arch6"
4419 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4420 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4421 {
4422 operands[2] = gen_lowpart (SImode, operands[1]);
4423 })
4424
4425 (define_insn "*arm_zero_extendhisi2"
4426 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4427 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4428 "TARGET_ARM && arm_arch4 && !arm_arch6"
4429 "@
4430 #
4431 ldrh%?\\t%0, %1"
4432 [(set_attr "type" "alu_shift_reg,load_byte")
4433 (set_attr "predicable" "yes")]
4434 )
4435
4436 (define_insn "*arm_zero_extendhisi2_v6"
4437 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4438 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4439 "TARGET_ARM && arm_arch6"
4440 "@
4441 uxth%?\\t%0, %1
4442 ldrh%?\\t%0, %1"
4443 [(set_attr "predicable" "yes")
4444 (set_attr "type" "extend,load_byte")]
4445 )
4446
4447 (define_insn "*arm_zero_extendhisi2addsi"
4448 [(set (match_operand:SI 0 "s_register_operand" "=r")
4449 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4450 (match_operand:SI 2 "s_register_operand" "r")))]
4451 "TARGET_INT_SIMD"
4452 "uxtah%?\\t%0, %2, %1"
4453 [(set_attr "type" "alu_shift_reg")
4454 (set_attr "predicable" "yes")]
4455 )
4456
4457 (define_expand "zero_extendqisi2"
4458 [(set (match_operand:SI 0 "s_register_operand")
4459 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4460 "TARGET_EITHER"
4461 {
4462 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4463 {
4464 emit_insn (gen_andsi3 (operands[0],
4465 gen_lowpart (SImode, operands[1]),
4466 GEN_INT (255)));
4467 DONE;
4468 }
4469 if (!arm_arch6 && !MEM_P (operands[1]))
4470 {
4471 rtx t = gen_lowpart (SImode, operands[1]);
4472 rtx tmp = gen_reg_rtx (SImode);
4473 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4474 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4475 DONE;
4476 }
4477 })
4478
4479 (define_split
4480 [(set (match_operand:SI 0 "s_register_operand" "")
4481 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4482 "!arm_arch6"
4483 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4484 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4485 {
4486 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4487 if (TARGET_ARM)
4488 {
4489 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4490 DONE;
4491 }
4492 })
4493
4494 (define_insn "*arm_zero_extendqisi2"
4495 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4496 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4497 "TARGET_ARM && !arm_arch6"
4498 "@
4499 #
4500 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4501 [(set_attr "length" "8,4")
4502 (set_attr "type" "alu_shift_reg,load_byte")
4503 (set_attr "predicable" "yes")]
4504 )
4505
4506 (define_insn "*arm_zero_extendqisi2_v6"
4507 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4508 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4509 "TARGET_ARM && arm_arch6"
4510 "@
4511 uxtb%?\\t%0, %1
4512 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4513 [(set_attr "type" "extend,load_byte")
4514 (set_attr "predicable" "yes")]
4515 )
4516
4517 (define_insn "*arm_zero_extendqisi2addsi"
4518 [(set (match_operand:SI 0 "s_register_operand" "=r")
4519 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4520 (match_operand:SI 2 "s_register_operand" "r")))]
4521 "TARGET_INT_SIMD"
4522 "uxtab%?\\t%0, %2, %1"
4523 [(set_attr "predicable" "yes")
4524 (set_attr "type" "alu_shift_reg")]
4525 )
4526
4527 (define_split
4528 [(set (match_operand:SI 0 "s_register_operand" "")
4529 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4530 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4531 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4532 [(set (match_dup 2) (match_dup 1))
4533 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4534 ""
4535 )
4536
4537 (define_split
4538 [(set (match_operand:SI 0 "s_register_operand" "")
4539 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4540 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4541 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4542 [(set (match_dup 2) (match_dup 1))
4543 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4544 ""
4545 )
4546
4547
4548 (define_split
4549 [(set (match_operand:SI 0 "s_register_operand" "")
4550 (IOR_XOR:SI (and:SI (ashift:SI
4551 (match_operand:SI 1 "s_register_operand" "")
4552 (match_operand:SI 2 "const_int_operand" ""))
4553 (match_operand:SI 3 "const_int_operand" ""))
4554 (zero_extend:SI
4555 (match_operator 5 "subreg_lowpart_operator"
4556 [(match_operand:SI 4 "s_register_operand" "")]))))]
4557 "TARGET_32BIT
4558 && (UINTVAL (operands[3])
4559 == (GET_MODE_MASK (GET_MODE (operands[5]))
4560 & (GET_MODE_MASK (GET_MODE (operands[5]))
4561 << (INTVAL (operands[2])))))"
4562 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4563 (match_dup 4)))
4564 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4565 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4566 )
4567
4568 (define_insn "*compareqi_eq0"
4569 [(set (reg:CC_Z CC_REGNUM)
4570 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4571 (const_int 0)))]
4572 "TARGET_32BIT"
4573 "tst%?\\t%0, #255"
4574 [(set_attr "conds" "set")
4575 (set_attr "predicable" "yes")
4576 (set_attr "type" "logic_imm")]
4577 )
4578
4579 (define_expand "extendhisi2"
4580 [(set (match_operand:SI 0 "s_register_operand")
4581 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4582 "TARGET_EITHER"
4583 {
4584 if (TARGET_THUMB1)
4585 {
4586 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4587 DONE;
4588 }
4589 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4590 {
4591 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4592 DONE;
4593 }
4594
4595 if (!arm_arch6 && !MEM_P (operands[1]))
4596 {
4597 rtx t = gen_lowpart (SImode, operands[1]);
4598 rtx tmp = gen_reg_rtx (SImode);
4599 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4600 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4601 DONE;
4602 }
4603 })
4604
4605 (define_split
4606 [(parallel
4607 [(set (match_operand:SI 0 "register_operand" "")
4608 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4609 (clobber (match_scratch:SI 2 ""))])]
4610 "!arm_arch6"
4611 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4612 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4613 {
4614 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4615 })
4616
4617 ;; This pattern will only be used when ldsh is not available
4618 (define_expand "extendhisi2_mem"
4619 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4620 (set (match_dup 3)
4621 (zero_extend:SI (match_dup 7)))
4622 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4623 (set (match_operand:SI 0 "" "")
4624 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4625 "TARGET_ARM"
4626 "
4627 {
4628 rtx mem1, mem2;
4629 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4630
4631 mem1 = change_address (operands[1], QImode, addr);
4632 mem2 = change_address (operands[1], QImode,
4633 plus_constant (Pmode, addr, 1));
4634 operands[0] = gen_lowpart (SImode, operands[0]);
4635 operands[1] = mem1;
4636 operands[2] = gen_reg_rtx (SImode);
4637 operands[3] = gen_reg_rtx (SImode);
4638 operands[6] = gen_reg_rtx (SImode);
4639 operands[7] = mem2;
4640
4641 if (BYTES_BIG_ENDIAN)
4642 {
4643 operands[4] = operands[2];
4644 operands[5] = operands[3];
4645 }
4646 else
4647 {
4648 operands[4] = operands[3];
4649 operands[5] = operands[2];
4650 }
4651 }"
4652 )
4653
4654 (define_split
4655 [(set (match_operand:SI 0 "register_operand" "")
4656 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4657 "!arm_arch6"
4658 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4659 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4660 {
4661 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4662 })
4663
4664 (define_insn "*arm_extendhisi2"
4665 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4666 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4667 "TARGET_ARM && arm_arch4 && !arm_arch6"
4668 "@
4669 #
4670 ldrsh%?\\t%0, %1"
4671 [(set_attr "length" "8,4")
4672 (set_attr "type" "alu_shift_reg,load_byte")
4673 (set_attr "predicable" "yes")]
4674 )
4675
4676 ;; ??? Check Thumb-2 pool range
4677 (define_insn "*arm_extendhisi2_v6"
4678 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4679 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4680 "TARGET_32BIT && arm_arch6"
4681 "@
4682 sxth%?\\t%0, %1
4683 ldrsh%?\\t%0, %1"
4684 [(set_attr "type" "extend,load_byte")
4685 (set_attr "predicable" "yes")]
4686 )
4687
4688 (define_insn "*arm_extendhisi2addsi"
4689 [(set (match_operand:SI 0 "s_register_operand" "=r")
4690 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4691 (match_operand:SI 2 "s_register_operand" "r")))]
4692 "TARGET_INT_SIMD"
4693 "sxtah%?\\t%0, %2, %1"
4694 [(set_attr "type" "alu_shift_reg")]
4695 )
4696
4697 (define_expand "extendqihi2"
4698 [(set (match_dup 2)
4699 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4700 (const_int 24)))
4701 (set (match_operand:HI 0 "s_register_operand")
4702 (ashiftrt:SI (match_dup 2)
4703 (const_int 24)))]
4704 "TARGET_ARM"
4705 "
4706 {
4707 if (arm_arch4 && MEM_P (operands[1]))
4708 {
4709 emit_insn (gen_rtx_SET (operands[0],
4710 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4711 DONE;
4712 }
4713 if (!s_register_operand (operands[1], QImode))
4714 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4715 operands[0] = gen_lowpart (SImode, operands[0]);
4716 operands[1] = gen_lowpart (SImode, operands[1]);
4717 operands[2] = gen_reg_rtx (SImode);
4718 }"
4719 )
4720
4721 (define_insn "*arm_extendqihi_insn"
4722 [(set (match_operand:HI 0 "s_register_operand" "=r")
4723 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4724 "TARGET_ARM && arm_arch4"
4725 "ldrsb%?\\t%0, %1"
4726 [(set_attr "type" "load_byte")
4727 (set_attr "predicable" "yes")]
4728 )
4729
4730 (define_expand "extendqisi2"
4731 [(set (match_operand:SI 0 "s_register_operand")
4732 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4733 "TARGET_EITHER"
4734 {
4735 if (!arm_arch4 && MEM_P (operands[1]))
4736 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4737
4738 if (!arm_arch6 && !MEM_P (operands[1]))
4739 {
4740 rtx t = gen_lowpart (SImode, operands[1]);
4741 rtx tmp = gen_reg_rtx (SImode);
4742 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4743 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4744 DONE;
4745 }
4746 })
4747
4748 (define_split
4749 [(set (match_operand:SI 0 "register_operand" "")
4750 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4751 "!arm_arch6"
4752 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4753 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4754 {
4755 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4756 })
4757
4758 (define_insn "*arm_extendqisi"
4759 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4760 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4761 "TARGET_ARM && arm_arch4 && !arm_arch6"
4762 "@
4763 #
4764 ldrsb%?\\t%0, %1"
4765 [(set_attr "length" "8,4")
4766 (set_attr "type" "alu_shift_reg,load_byte")
4767 (set_attr "predicable" "yes")]
4768 )
4769
4770 (define_insn "*arm_extendqisi_v6"
4771 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4772 (sign_extend:SI
4773 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4774 "TARGET_ARM && arm_arch6"
4775 "@
4776 sxtb%?\\t%0, %1
4777 ldrsb%?\\t%0, %1"
4778 [(set_attr "type" "extend,load_byte")
4779 (set_attr "predicable" "yes")]
4780 )
4781
4782 (define_insn "*arm_extendqisi2addsi"
4783 [(set (match_operand:SI 0 "s_register_operand" "=r")
4784 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4785 (match_operand:SI 2 "s_register_operand" "r")))]
4786 "TARGET_INT_SIMD"
4787 "sxtab%?\\t%0, %2, %1"
4788 [(set_attr "type" "alu_shift_reg")
4789 (set_attr "predicable" "yes")]
4790 )
4791
4792 (define_insn "arm_<sup>xtb16"
4793 [(set (match_operand:SI 0 "s_register_operand" "=r")
4794 (unspec:SI
4795 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4796 "TARGET_INT_SIMD"
4797 "<sup>xtb16%?\\t%0, %1"
4798 [(set_attr "predicable" "yes")
4799 (set_attr "type" "alu_dsp_reg")])
4800
4801 (define_insn "arm_<simd32_op>"
4802 [(set (match_operand:SI 0 "s_register_operand" "=r")
4803 (unspec:SI
4804 [(match_operand:SI 1 "s_register_operand" "r")
4805 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4806 "TARGET_INT_SIMD"
4807 "<simd32_op>%?\\t%0, %1, %2"
4808 [(set_attr "predicable" "yes")
4809 (set_attr "type" "alu_dsp_reg")])
4810
4811 (define_insn "arm_usada8"
4812 [(set (match_operand:SI 0 "s_register_operand" "=r")
4813 (unspec:SI
4814 [(match_operand:SI 1 "s_register_operand" "r")
4815 (match_operand:SI 2 "s_register_operand" "r")
4816 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4817 "TARGET_INT_SIMD"
4818 "usada8%?\\t%0, %1, %2, %3"
4819 [(set_attr "predicable" "yes")
4820 (set_attr "type" "alu_dsp_reg")])
4821
4822 (define_insn "arm_<simd32_op>"
4823 [(set (match_operand:DI 0 "s_register_operand" "=r")
4824 (unspec:DI
4825 [(match_operand:SI 1 "s_register_operand" "r")
4826 (match_operand:SI 2 "s_register_operand" "r")
4827 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4828 "TARGET_INT_SIMD"
4829 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4830 [(set_attr "predicable" "yes")
4831 (set_attr "type" "smlald")])
4832
4833 (define_expand "extendsfdf2"
4834 [(set (match_operand:DF 0 "s_register_operand")
4835 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4836 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4837 ""
4838 )
4839
4840 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4841 ;; must go through SFmode.
4842 ;;
4843 ;; This is always safe for an extend.
4844
4845 (define_expand "extendhfdf2"
4846 [(set (match_operand:DF 0 "s_register_operand")
4847 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4848 "TARGET_EITHER"
4849 {
4850 /* We don't have a direct instruction for this, so go via SFmode. */
4851 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4852 {
4853 rtx op1;
4854 op1 = convert_to_mode (SFmode, operands[1], 0);
4855 op1 = convert_to_mode (DFmode, op1, 0);
4856 emit_insn (gen_movdf (operands[0], op1));
4857 DONE;
4858 }
4859 /* Otherwise, we're done producing RTL and will pick up the correct
4860 pattern to do this with one rounding-step in a single instruction. */
4861 }
4862 )
4863 \f
4864 ;; Move insns (including loads and stores)
4865
4866 ;; XXX Just some ideas about movti.
4867 ;; I don't think these are a good idea on the arm, there just aren't enough
4868 ;; registers
4869 ;;(define_expand "loadti"
4870 ;; [(set (match_operand:TI 0 "s_register_operand")
4871 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4872 ;; "" "")
4873
4874 ;;(define_expand "storeti"
4875 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4876 ;; (match_operand:TI 1 "s_register_operand"))]
4877 ;; "" "")
4878
4879 ;;(define_expand "movti"
4880 ;; [(set (match_operand:TI 0 "general_operand")
4881 ;; (match_operand:TI 1 "general_operand"))]
4882 ;; ""
4883 ;; "
4884 ;;{
4885 ;; rtx insn;
4886 ;;
4887 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4888 ;; operands[1] = copy_to_reg (operands[1]);
4889 ;; if (MEM_P (operands[0]))
4890 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4891 ;; else if (MEM_P (operands[1]))
4892 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4893 ;; else
4894 ;; FAIL;
4895 ;;
4896 ;; emit_insn (insn);
4897 ;; DONE;
4898 ;;}")
4899
4900 ;; Recognize garbage generated above.
4901
4902 ;;(define_insn ""
4903 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4904 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4905 ;; ""
4906 ;; "*
4907 ;; {
4908 ;; register mem = (which_alternative < 3);
4909 ;; register const char *template;
4910 ;;
4911 ;; operands[mem] = XEXP (operands[mem], 0);
4912 ;; switch (which_alternative)
4913 ;; {
4914 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4915 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4916 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4917 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4918 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4919 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4920 ;; }
4921 ;; output_asm_insn (template, operands);
4922 ;; return \"\";
4923 ;; }")
4924
4925 (define_expand "movdi"
4926 [(set (match_operand:DI 0 "general_operand")
4927 (match_operand:DI 1 "general_operand"))]
4928 "TARGET_EITHER"
4929 "
4930 gcc_checking_assert (aligned_operand (operands[0], DImode));
4931 gcc_checking_assert (aligned_operand (operands[1], DImode));
4932 if (can_create_pseudo_p ())
4933 {
4934 if (!REG_P (operands[0]))
4935 operands[1] = force_reg (DImode, operands[1]);
4936 }
4937 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4938 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4939 {
4940 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4941 when expanding function calls. */
4942 gcc_assert (can_create_pseudo_p ());
4943 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4944 {
4945 /* Perform load into legal reg pair first, then move. */
4946 rtx reg = gen_reg_rtx (DImode);
4947 emit_insn (gen_movdi (reg, operands[1]));
4948 operands[1] = reg;
4949 }
4950 emit_move_insn (gen_lowpart (SImode, operands[0]),
4951 gen_lowpart (SImode, operands[1]));
4952 emit_move_insn (gen_highpart (SImode, operands[0]),
4953 gen_highpart (SImode, operands[1]));
4954 DONE;
4955 }
4956 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4957 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4958 {
4959 /* Avoid STRD's from an odd-numbered register pair in ARM state
4960 when expanding function prologue. */
4961 gcc_assert (can_create_pseudo_p ());
4962 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4963 ? gen_reg_rtx (DImode)
4964 : operands[0];
4965 emit_move_insn (gen_lowpart (SImode, split_dest),
4966 gen_lowpart (SImode, operands[1]));
4967 emit_move_insn (gen_highpart (SImode, split_dest),
4968 gen_highpart (SImode, operands[1]));
4969 if (split_dest != operands[0])
4970 emit_insn (gen_movdi (operands[0], split_dest));
4971 DONE;
4972 }
4973 "
4974 )
4975
4976 (define_insn "*arm_movdi"
4977 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4978 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4979 "TARGET_32BIT
4980 && !(TARGET_HARD_FLOAT)
4981 && !TARGET_IWMMXT
4982 && ( register_operand (operands[0], DImode)
4983 || register_operand (operands[1], DImode))"
4984 "*
4985 switch (which_alternative)
4986 {
4987 case 0:
4988 case 1:
4989 case 2:
4990 return \"#\";
4991 case 3:
4992 /* Cannot load it directly, split to load it via MOV / MOVT. */
4993 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4994 return \"#\";
4995 /* Fall through. */
4996 default:
4997 return output_move_double (operands, true, NULL);
4998 }
4999 "
5000 [(set_attr "length" "8,12,16,8,8")
5001 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
5002 (set_attr "arm_pool_range" "*,*,*,1020,*")
5003 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5004 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5005 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5006 )
5007
5008 (define_split
5009 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5010 (match_operand:ANY64 1 "immediate_operand" ""))]
5011 "TARGET_32BIT
5012 && reload_completed
5013 && (arm_disable_literal_pool
5014 || (arm_const_double_inline_cost (operands[1])
5015 <= arm_max_const_double_inline_cost ()))"
5016 [(const_int 0)]
5017 "
5018 arm_split_constant (SET, SImode, curr_insn,
5019 INTVAL (gen_lowpart (SImode, operands[1])),
5020 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5021 arm_split_constant (SET, SImode, curr_insn,
5022 INTVAL (gen_highpart_mode (SImode,
5023 GET_MODE (operands[0]),
5024 operands[1])),
5025 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5026 DONE;
5027 "
5028 )
5029
5030 ; If optimizing for size, or if we have load delay slots, then
5031 ; we want to split the constant into two separate operations.
5032 ; In both cases this may split a trivial part into a single data op
5033 ; leaving a single complex constant to load. We can also get longer
5034 ; offsets in a LDR which means we get better chances of sharing the pool
5035 ; entries. Finally, we can normally do a better job of scheduling
5036 ; LDR instructions than we can with LDM.
5037 ; This pattern will only match if the one above did not.
5038 (define_split
5039 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5040 (match_operand:ANY64 1 "const_double_operand" ""))]
5041 "TARGET_ARM && reload_completed
5042 && arm_const_double_by_parts (operands[1])"
5043 [(set (match_dup 0) (match_dup 1))
5044 (set (match_dup 2) (match_dup 3))]
5045 "
5046 operands[2] = gen_highpart (SImode, operands[0]);
5047 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5048 operands[1]);
5049 operands[0] = gen_lowpart (SImode, operands[0]);
5050 operands[1] = gen_lowpart (SImode, operands[1]);
5051 "
5052 )
5053
5054 (define_split
5055 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5056 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5057 "TARGET_EITHER && reload_completed"
5058 [(set (match_dup 0) (match_dup 1))
5059 (set (match_dup 2) (match_dup 3))]
5060 "
5061 operands[2] = gen_highpart (SImode, operands[0]);
5062 operands[3] = gen_highpart (SImode, operands[1]);
5063 operands[0] = gen_lowpart (SImode, operands[0]);
5064 operands[1] = gen_lowpart (SImode, operands[1]);
5065
5066 /* Handle a partial overlap. */
5067 if (rtx_equal_p (operands[0], operands[3]))
5068 {
5069 rtx tmp0 = operands[0];
5070 rtx tmp1 = operands[1];
5071
5072 operands[0] = operands[2];
5073 operands[1] = operands[3];
5074 operands[2] = tmp0;
5075 operands[3] = tmp1;
5076 }
5077 "
5078 )
5079
5080 ;; We can't actually do base+index doubleword loads if the index and
5081 ;; destination overlap. Split here so that we at least have chance to
5082 ;; schedule.
5083 (define_split
5084 [(set (match_operand:DI 0 "s_register_operand" "")
5085 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5086 (match_operand:SI 2 "s_register_operand" ""))))]
5087 "TARGET_LDRD
5088 && reg_overlap_mentioned_p (operands[0], operands[1])
5089 && reg_overlap_mentioned_p (operands[0], operands[2])"
5090 [(set (match_dup 4)
5091 (plus:SI (match_dup 1)
5092 (match_dup 2)))
5093 (set (match_dup 0)
5094 (mem:DI (match_dup 4)))]
5095 "
5096 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5097 "
5098 )
5099
5100 (define_expand "movsi"
5101 [(set (match_operand:SI 0 "general_operand")
5102 (match_operand:SI 1 "general_operand"))]
5103 "TARGET_EITHER"
5104 "
5105 {
5106 rtx base, offset, tmp;
5107
5108 gcc_checking_assert (aligned_operand (operands[0], SImode));
5109 gcc_checking_assert (aligned_operand (operands[1], SImode));
5110 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5111 {
5112 /* Everything except mem = const or mem = mem can be done easily. */
5113 if (MEM_P (operands[0]))
5114 operands[1] = force_reg (SImode, operands[1]);
5115 if (arm_general_register_operand (operands[0], SImode)
5116 && CONST_INT_P (operands[1])
5117 && !(const_ok_for_arm (INTVAL (operands[1]))
5118 || const_ok_for_arm (~INTVAL (operands[1]))))
5119 {
5120 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5121 {
5122 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5123 DONE;
5124 }
5125 else
5126 {
5127 arm_split_constant (SET, SImode, NULL_RTX,
5128 INTVAL (operands[1]), operands[0], NULL_RTX,
5129 optimize && can_create_pseudo_p ());
5130 DONE;
5131 }
5132 }
5133 }
5134 else /* Target doesn't have MOVT... */
5135 {
5136 if (can_create_pseudo_p ())
5137 {
5138 if (!REG_P (operands[0]))
5139 operands[1] = force_reg (SImode, operands[1]);
5140 }
5141 }
5142
5143 split_const (operands[1], &base, &offset);
5144 if (INTVAL (offset) != 0
5145 && targetm.cannot_force_const_mem (SImode, operands[1]))
5146 {
5147 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5148 emit_move_insn (tmp, base);
5149 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5150 DONE;
5151 }
5152
5153 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5154
5155 /* Recognize the case where operand[1] is a reference to thread-local
5156 data and load its address to a register. Offsets have been split off
5157 already. */
5158 if (arm_tls_referenced_p (operands[1]))
5159 operands[1] = legitimize_tls_address (operands[1], tmp);
5160 else if (flag_pic
5161 && (CONSTANT_P (operands[1])
5162 || symbol_mentioned_p (operands[1])
5163 || label_mentioned_p (operands[1])))
5164 operands[1] =
5165 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5166 }
5167 "
5168 )
5169
5170 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5171 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5172 ;; so this does not matter.
5173 (define_insn "*arm_movt"
5174 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5175 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5176 (match_operand:SI 2 "general_operand" "i,i")))]
5177 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5178 "@
5179 movt%?\t%0, #:upper16:%c2
5180 movt\t%0, #:upper16:%c2"
5181 [(set_attr "arch" "32,v8mb")
5182 (set_attr "predicable" "yes")
5183 (set_attr "length" "4")
5184 (set_attr "type" "alu_sreg")]
5185 )
5186
5187 (define_insn "*arm_movsi_insn"
5188 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5189 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5190 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5191 && ( register_operand (operands[0], SImode)
5192 || register_operand (operands[1], SImode))"
5193 "@
5194 mov%?\\t%0, %1
5195 mov%?\\t%0, %1
5196 mvn%?\\t%0, #%B1
5197 movw%?\\t%0, %1
5198 ldr%?\\t%0, %1
5199 str%?\\t%1, %0"
5200 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5201 (set_attr "predicable" "yes")
5202 (set_attr "arch" "*,*,*,v6t2,*,*")
5203 (set_attr "pool_range" "*,*,*,*,4096,*")
5204 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5205 )
5206
5207 (define_split
5208 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5209 (match_operand:SI 1 "const_int_operand" ""))]
5210 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5211 && (!(const_ok_for_arm (INTVAL (operands[1]))
5212 || const_ok_for_arm (~INTVAL (operands[1]))))"
5213 [(clobber (const_int 0))]
5214 "
5215 arm_split_constant (SET, SImode, NULL_RTX,
5216 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5217 DONE;
5218 "
5219 )
5220
5221 ;; A normal way to do (symbol + offset) requires three instructions at least
5222 ;; (depends on how big the offset is) as below:
5223 ;; movw r0, #:lower16:g
5224 ;; movw r0, #:upper16:g
5225 ;; adds r0, #4
5226 ;;
5227 ;; A better way would be:
5228 ;; movw r0, #:lower16:g+4
5229 ;; movw r0, #:upper16:g+4
5230 ;;
5231 ;; The limitation of this way is that the length of offset should be a 16-bit
5232 ;; signed value, because current assembler only supports REL type relocation for
5233 ;; such case. If the more powerful RELA type is supported in future, we should
5234 ;; update this pattern to go with better way.
5235 (define_split
5236 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5237 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5238 (match_operand:SI 2 "const_int_operand" ""))))]
5239 "TARGET_THUMB
5240 && TARGET_HAVE_MOVT
5241 && arm_disable_literal_pool
5242 && reload_completed
5243 && GET_CODE (operands[1]) == SYMBOL_REF"
5244 [(clobber (const_int 0))]
5245 "
5246 int offset = INTVAL (operands[2]);
5247
5248 if (offset < -0x8000 || offset > 0x7fff)
5249 {
5250 arm_emit_movpair (operands[0], operands[1]);
5251 emit_insn (gen_rtx_SET (operands[0],
5252 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5253 }
5254 else
5255 {
5256 rtx op = gen_rtx_CONST (SImode,
5257 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5258 arm_emit_movpair (operands[0], op);
5259 }
5260 "
5261 )
5262
5263 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5264 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5265 ;; and lo_sum would be merged back into memory load at cprop. However,
5266 ;; if the default is to prefer movt/movw rather than a load from the constant
5267 ;; pool, the performance is better.
5268 (define_split
5269 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5270 (match_operand:SI 1 "general_operand" ""))]
5271 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5272 && !target_word_relocations
5273 && !arm_tls_referenced_p (operands[1])"
5274 [(clobber (const_int 0))]
5275 {
5276 arm_emit_movpair (operands[0], operands[1]);
5277 DONE;
5278 })
5279
5280 ;; When generating pic, we need to load the symbol offset into a register.
5281 ;; So that the optimizer does not confuse this with a normal symbol load
5282 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5283 ;; since that is the only type of relocation we can use.
5284
5285 ;; Wrap calculation of the whole PIC address in a single pattern for the
5286 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5287 ;; a PIC address involves two loads from memory, so we want to CSE it
5288 ;; as often as possible.
5289 ;; This pattern will be split into one of the pic_load_addr_* patterns
5290 ;; and a move after GCSE optimizations.
5291 ;;
5292 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5293 (define_expand "calculate_pic_address"
5294 [(set (match_operand:SI 0 "register_operand")
5295 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5296 (unspec:SI [(match_operand:SI 2 "" "")]
5297 UNSPEC_PIC_SYM))))]
5298 "flag_pic"
5299 )
5300
5301 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5302 (define_split
5303 [(set (match_operand:SI 0 "register_operand" "")
5304 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5305 (unspec:SI [(match_operand:SI 2 "" "")]
5306 UNSPEC_PIC_SYM))))]
5307 "flag_pic"
5308 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5309 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5310 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5311 )
5312
5313 ;; operand1 is the memory address to go into
5314 ;; pic_load_addr_32bit.
5315 ;; operand2 is the PIC label to be emitted
5316 ;; from pic_add_dot_plus_eight.
5317 ;; We do this to allow hoisting of the entire insn.
5318 (define_insn_and_split "pic_load_addr_unified"
5319 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5320 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5321 (match_operand:SI 2 "" "")]
5322 UNSPEC_PIC_UNIFIED))]
5323 "flag_pic"
5324 "#"
5325 "&& reload_completed"
5326 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5327 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5328 (match_dup 2)] UNSPEC_PIC_BASE))]
5329 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5330 [(set_attr "type" "load_4,load_4,load_4")
5331 (set_attr "pool_range" "4096,4094,1022")
5332 (set_attr "neg_pool_range" "4084,0,0")
5333 (set_attr "arch" "a,t2,t1")
5334 (set_attr "length" "8,6,4")]
5335 )
5336
5337 ;; The rather odd constraints on the following are to force reload to leave
5338 ;; the insn alone, and to force the minipool generation pass to then move
5339 ;; the GOT symbol to memory.
5340
5341 (define_insn "pic_load_addr_32bit"
5342 [(set (match_operand:SI 0 "s_register_operand" "=r")
5343 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5344 "TARGET_32BIT && flag_pic"
5345 "ldr%?\\t%0, %1"
5346 [(set_attr "type" "load_4")
5347 (set (attr "pool_range")
5348 (if_then_else (eq_attr "is_thumb" "no")
5349 (const_int 4096)
5350 (const_int 4094)))
5351 (set (attr "neg_pool_range")
5352 (if_then_else (eq_attr "is_thumb" "no")
5353 (const_int 4084)
5354 (const_int 0)))]
5355 )
5356
5357 (define_insn "pic_load_addr_thumb1"
5358 [(set (match_operand:SI 0 "s_register_operand" "=l")
5359 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5360 "TARGET_THUMB1 && flag_pic"
5361 "ldr\\t%0, %1"
5362 [(set_attr "type" "load_4")
5363 (set (attr "pool_range") (const_int 1018))]
5364 )
5365
5366 (define_insn "pic_add_dot_plus_four"
5367 [(set (match_operand:SI 0 "register_operand" "=r")
5368 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5369 (const_int 4)
5370 (match_operand 2 "" "")]
5371 UNSPEC_PIC_BASE))]
5372 "TARGET_THUMB"
5373 "*
5374 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5375 INTVAL (operands[2]));
5376 return \"add\\t%0, %|pc\";
5377 "
5378 [(set_attr "length" "2")
5379 (set_attr "type" "alu_sreg")]
5380 )
5381
5382 (define_insn "pic_add_dot_plus_eight"
5383 [(set (match_operand:SI 0 "register_operand" "=r")
5384 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5385 (const_int 8)
5386 (match_operand 2 "" "")]
5387 UNSPEC_PIC_BASE))]
5388 "TARGET_ARM"
5389 "*
5390 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5391 INTVAL (operands[2]));
5392 return \"add%?\\t%0, %|pc, %1\";
5393 "
5394 [(set_attr "predicable" "yes")
5395 (set_attr "type" "alu_sreg")]
5396 )
5397
5398 (define_insn "tls_load_dot_plus_eight"
5399 [(set (match_operand:SI 0 "register_operand" "=r")
5400 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5401 (const_int 8)
5402 (match_operand 2 "" "")]
5403 UNSPEC_PIC_BASE)))]
5404 "TARGET_ARM"
5405 "*
5406 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5407 INTVAL (operands[2]));
5408 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5409 "
5410 [(set_attr "predicable" "yes")
5411 (set_attr "type" "load_4")]
5412 )
5413
5414 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5415 ;; followed by a load. These sequences can be crunched down to
5416 ;; tls_load_dot_plus_eight by a peephole.
5417
5418 (define_peephole2
5419 [(set (match_operand:SI 0 "register_operand" "")
5420 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5421 (const_int 8)
5422 (match_operand 1 "" "")]
5423 UNSPEC_PIC_BASE))
5424 (set (match_operand:SI 2 "arm_general_register_operand" "")
5425 (mem:SI (match_dup 0)))]
5426 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5427 [(set (match_dup 2)
5428 (mem:SI (unspec:SI [(match_dup 3)
5429 (const_int 8)
5430 (match_dup 1)]
5431 UNSPEC_PIC_BASE)))]
5432 ""
5433 )
5434
5435 (define_insn "pic_offset_arm"
5436 [(set (match_operand:SI 0 "register_operand" "=r")
5437 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5438 (unspec:SI [(match_operand:SI 2 "" "X")]
5439 UNSPEC_PIC_OFFSET))))]
5440 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5441 "ldr%?\\t%0, [%1,%2]"
5442 [(set_attr "type" "load_4")]
5443 )
5444
5445 (define_expand "builtin_setjmp_receiver"
5446 [(label_ref (match_operand 0 "" ""))]
5447 "flag_pic"
5448 "
5449 {
5450 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5451 register. */
5452 if (arm_pic_register != INVALID_REGNUM)
5453 arm_load_pic_register (1UL << 3, NULL_RTX);
5454 DONE;
5455 }")
5456
5457 ;; If copying one reg to another we can set the condition codes according to
5458 ;; its value. Such a move is common after a return from subroutine and the
5459 ;; result is being tested against zero.
5460
5461 (define_insn "*movsi_compare0"
5462 [(set (reg:CC CC_REGNUM)
5463 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5464 (const_int 0)))
5465 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5466 (match_dup 1))]
5467 "TARGET_32BIT"
5468 "@
5469 cmp%?\\t%0, #0
5470 subs%?\\t%0, %1, #0"
5471 [(set_attr "conds" "set")
5472 (set_attr "type" "alus_imm,alus_imm")]
5473 )
5474
5475 ;; Subroutine to store a half word from a register into memory.
5476 ;; Operand 0 is the source register (HImode)
5477 ;; Operand 1 is the destination address in a register (SImode)
5478
5479 ;; In both this routine and the next, we must be careful not to spill
5480 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5481 ;; can generate unrecognizable rtl.
5482
5483 (define_expand "storehi"
5484 [;; store the low byte
5485 (set (match_operand 1 "" "") (match_dup 3))
5486 ;; extract the high byte
5487 (set (match_dup 2)
5488 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5489 ;; store the high byte
5490 (set (match_dup 4) (match_dup 5))]
5491 "TARGET_ARM"
5492 "
5493 {
5494 rtx op1 = operands[1];
5495 rtx addr = XEXP (op1, 0);
5496 enum rtx_code code = GET_CODE (addr);
5497
5498 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5499 || code == MINUS)
5500 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5501
5502 operands[4] = adjust_address (op1, QImode, 1);
5503 operands[1] = adjust_address (operands[1], QImode, 0);
5504 operands[3] = gen_lowpart (QImode, operands[0]);
5505 operands[0] = gen_lowpart (SImode, operands[0]);
5506 operands[2] = gen_reg_rtx (SImode);
5507 operands[5] = gen_lowpart (QImode, operands[2]);
5508 }"
5509 )
5510
5511 (define_expand "storehi_bigend"
5512 [(set (match_dup 4) (match_dup 3))
5513 (set (match_dup 2)
5514 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5515 (set (match_operand 1 "" "") (match_dup 5))]
5516 "TARGET_ARM"
5517 "
5518 {
5519 rtx op1 = operands[1];
5520 rtx addr = XEXP (op1, 0);
5521 enum rtx_code code = GET_CODE (addr);
5522
5523 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5524 || code == MINUS)
5525 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5526
5527 operands[4] = adjust_address (op1, QImode, 1);
5528 operands[1] = adjust_address (operands[1], QImode, 0);
5529 operands[3] = gen_lowpart (QImode, operands[0]);
5530 operands[0] = gen_lowpart (SImode, operands[0]);
5531 operands[2] = gen_reg_rtx (SImode);
5532 operands[5] = gen_lowpart (QImode, operands[2]);
5533 }"
5534 )
5535
5536 ;; Subroutine to store a half word integer constant into memory.
5537 (define_expand "storeinthi"
5538 [(set (match_operand 0 "" "")
5539 (match_operand 1 "" ""))
5540 (set (match_dup 3) (match_dup 2))]
5541 "TARGET_ARM"
5542 "
5543 {
5544 HOST_WIDE_INT value = INTVAL (operands[1]);
5545 rtx addr = XEXP (operands[0], 0);
5546 rtx op0 = operands[0];
5547 enum rtx_code code = GET_CODE (addr);
5548
5549 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5550 || code == MINUS)
5551 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5552
5553 operands[1] = gen_reg_rtx (SImode);
5554 if (BYTES_BIG_ENDIAN)
5555 {
5556 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5557 if ((value & 255) == ((value >> 8) & 255))
5558 operands[2] = operands[1];
5559 else
5560 {
5561 operands[2] = gen_reg_rtx (SImode);
5562 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5563 }
5564 }
5565 else
5566 {
5567 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5568 if ((value & 255) == ((value >> 8) & 255))
5569 operands[2] = operands[1];
5570 else
5571 {
5572 operands[2] = gen_reg_rtx (SImode);
5573 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5574 }
5575 }
5576
5577 operands[3] = adjust_address (op0, QImode, 1);
5578 operands[0] = adjust_address (operands[0], QImode, 0);
5579 operands[2] = gen_lowpart (QImode, operands[2]);
5580 operands[1] = gen_lowpart (QImode, operands[1]);
5581 }"
5582 )
5583
5584 (define_expand "storehi_single_op"
5585 [(set (match_operand:HI 0 "memory_operand")
5586 (match_operand:HI 1 "general_operand"))]
5587 "TARGET_32BIT && arm_arch4"
5588 "
5589 if (!s_register_operand (operands[1], HImode))
5590 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5591 "
5592 )
5593
5594 (define_expand "movhi"
5595 [(set (match_operand:HI 0 "general_operand")
5596 (match_operand:HI 1 "general_operand"))]
5597 "TARGET_EITHER"
5598 "
5599 gcc_checking_assert (aligned_operand (operands[0], HImode));
5600 gcc_checking_assert (aligned_operand (operands[1], HImode));
5601 if (TARGET_ARM)
5602 {
5603 if (can_create_pseudo_p ())
5604 {
5605 if (MEM_P (operands[0]))
5606 {
5607 if (arm_arch4)
5608 {
5609 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5610 DONE;
5611 }
5612 if (CONST_INT_P (operands[1]))
5613 emit_insn (gen_storeinthi (operands[0], operands[1]));
5614 else
5615 {
5616 if (MEM_P (operands[1]))
5617 operands[1] = force_reg (HImode, operands[1]);
5618 if (BYTES_BIG_ENDIAN)
5619 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5620 else
5621 emit_insn (gen_storehi (operands[1], operands[0]));
5622 }
5623 DONE;
5624 }
5625 /* Sign extend a constant, and keep it in an SImode reg. */
5626 else if (CONST_INT_P (operands[1]))
5627 {
5628 rtx reg = gen_reg_rtx (SImode);
5629 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5630
5631 /* If the constant is already valid, leave it alone. */
5632 if (!const_ok_for_arm (val))
5633 {
5634 /* If setting all the top bits will make the constant
5635 loadable in a single instruction, then set them.
5636 Otherwise, sign extend the number. */
5637
5638 if (const_ok_for_arm (~(val | ~0xffff)))
5639 val |= ~0xffff;
5640 else if (val & 0x8000)
5641 val |= ~0xffff;
5642 }
5643
5644 emit_insn (gen_movsi (reg, GEN_INT (val)));
5645 operands[1] = gen_lowpart (HImode, reg);
5646 }
5647 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5648 && MEM_P (operands[1]))
5649 {
5650 rtx reg = gen_reg_rtx (SImode);
5651
5652 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5653 operands[1] = gen_lowpart (HImode, reg);
5654 }
5655 else if (!arm_arch4)
5656 {
5657 if (MEM_P (operands[1]))
5658 {
5659 rtx base;
5660 rtx offset = const0_rtx;
5661 rtx reg = gen_reg_rtx (SImode);
5662
5663 if ((REG_P (base = XEXP (operands[1], 0))
5664 || (GET_CODE (base) == PLUS
5665 && (CONST_INT_P (offset = XEXP (base, 1)))
5666 && ((INTVAL(offset) & 1) != 1)
5667 && REG_P (base = XEXP (base, 0))))
5668 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5669 {
5670 rtx new_rtx;
5671
5672 new_rtx = widen_memory_access (operands[1], SImode,
5673 ((INTVAL (offset) & ~3)
5674 - INTVAL (offset)));
5675 emit_insn (gen_movsi (reg, new_rtx));
5676 if (((INTVAL (offset) & 2) != 0)
5677 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5678 {
5679 rtx reg2 = gen_reg_rtx (SImode);
5680
5681 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5682 reg = reg2;
5683 }
5684 }
5685 else
5686 emit_insn (gen_movhi_bytes (reg, operands[1]));
5687
5688 operands[1] = gen_lowpart (HImode, reg);
5689 }
5690 }
5691 }
5692 /* Handle loading a large integer during reload. */
5693 else if (CONST_INT_P (operands[1])
5694 && !const_ok_for_arm (INTVAL (operands[1]))
5695 && !const_ok_for_arm (~INTVAL (operands[1])))
5696 {
5697 /* Writing a constant to memory needs a scratch, which should
5698 be handled with SECONDARY_RELOADs. */
5699 gcc_assert (REG_P (operands[0]));
5700
5701 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5702 emit_insn (gen_movsi (operands[0], operands[1]));
5703 DONE;
5704 }
5705 }
5706 else if (TARGET_THUMB2)
5707 {
5708 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5709 if (can_create_pseudo_p ())
5710 {
5711 if (!REG_P (operands[0]))
5712 operands[1] = force_reg (HImode, operands[1]);
5713 /* Zero extend a constant, and keep it in an SImode reg. */
5714 else if (CONST_INT_P (operands[1]))
5715 {
5716 rtx reg = gen_reg_rtx (SImode);
5717 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5718
5719 emit_insn (gen_movsi (reg, GEN_INT (val)));
5720 operands[1] = gen_lowpart (HImode, reg);
5721 }
5722 }
5723 }
5724 else /* TARGET_THUMB1 */
5725 {
5726 if (can_create_pseudo_p ())
5727 {
5728 if (CONST_INT_P (operands[1]))
5729 {
5730 rtx reg = gen_reg_rtx (SImode);
5731
5732 emit_insn (gen_movsi (reg, operands[1]));
5733 operands[1] = gen_lowpart (HImode, reg);
5734 }
5735
5736 /* ??? We shouldn't really get invalid addresses here, but this can
5737 happen if we are passed a SP (never OK for HImode/QImode) or
5738 virtual register (also rejected as illegitimate for HImode/QImode)
5739 relative address. */
5740 /* ??? This should perhaps be fixed elsewhere, for instance, in
5741 fixup_stack_1, by checking for other kinds of invalid addresses,
5742 e.g. a bare reference to a virtual register. This may confuse the
5743 alpha though, which must handle this case differently. */
5744 if (MEM_P (operands[0])
5745 && !memory_address_p (GET_MODE (operands[0]),
5746 XEXP (operands[0], 0)))
5747 operands[0]
5748 = replace_equiv_address (operands[0],
5749 copy_to_reg (XEXP (operands[0], 0)));
5750
5751 if (MEM_P (operands[1])
5752 && !memory_address_p (GET_MODE (operands[1]),
5753 XEXP (operands[1], 0)))
5754 operands[1]
5755 = replace_equiv_address (operands[1],
5756 copy_to_reg (XEXP (operands[1], 0)));
5757
5758 if (MEM_P (operands[1]) && optimize > 0)
5759 {
5760 rtx reg = gen_reg_rtx (SImode);
5761
5762 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5763 operands[1] = gen_lowpart (HImode, reg);
5764 }
5765
5766 if (MEM_P (operands[0]))
5767 operands[1] = force_reg (HImode, operands[1]);
5768 }
5769 else if (CONST_INT_P (operands[1])
5770 && !satisfies_constraint_I (operands[1]))
5771 {
5772 /* Handle loading a large integer during reload. */
5773
5774 /* Writing a constant to memory needs a scratch, which should
5775 be handled with SECONDARY_RELOADs. */
5776 gcc_assert (REG_P (operands[0]));
5777
5778 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5779 emit_insn (gen_movsi (operands[0], operands[1]));
5780 DONE;
5781 }
5782 }
5783 "
5784 )
5785
5786 (define_expand "movhi_bytes"
5787 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5788 (set (match_dup 3)
5789 (zero_extend:SI (match_dup 6)))
5790 (set (match_operand:SI 0 "" "")
5791 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5792 "TARGET_ARM"
5793 "
5794 {
5795 rtx mem1, mem2;
5796 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5797
5798 mem1 = change_address (operands[1], QImode, addr);
5799 mem2 = change_address (operands[1], QImode,
5800 plus_constant (Pmode, addr, 1));
5801 operands[0] = gen_lowpart (SImode, operands[0]);
5802 operands[1] = mem1;
5803 operands[2] = gen_reg_rtx (SImode);
5804 operands[3] = gen_reg_rtx (SImode);
5805 operands[6] = mem2;
5806
5807 if (BYTES_BIG_ENDIAN)
5808 {
5809 operands[4] = operands[2];
5810 operands[5] = operands[3];
5811 }
5812 else
5813 {
5814 operands[4] = operands[3];
5815 operands[5] = operands[2];
5816 }
5817 }"
5818 )
5819
5820 (define_expand "movhi_bigend"
5821 [(set (match_dup 2)
5822 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5823 (const_int 16)))
5824 (set (match_dup 3)
5825 (ashiftrt:SI (match_dup 2) (const_int 16)))
5826 (set (match_operand:HI 0 "s_register_operand")
5827 (match_dup 4))]
5828 "TARGET_ARM"
5829 "
5830 operands[2] = gen_reg_rtx (SImode);
5831 operands[3] = gen_reg_rtx (SImode);
5832 operands[4] = gen_lowpart (HImode, operands[3]);
5833 "
5834 )
5835
5836 ;; Pattern to recognize insn generated default case above
5837 (define_insn "*movhi_insn_arch4"
5838 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5839 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5840 "TARGET_ARM
5841 && arm_arch4 && !TARGET_HARD_FLOAT
5842 && (register_operand (operands[0], HImode)
5843 || register_operand (operands[1], HImode))"
5844 "@
5845 mov%?\\t%0, %1\\t%@ movhi
5846 mvn%?\\t%0, #%B1\\t%@ movhi
5847 movw%?\\t%0, %L1\\t%@ movhi
5848 strh%?\\t%1, %0\\t%@ movhi
5849 ldrh%?\\t%0, %1\\t%@ movhi"
5850 [(set_attr "predicable" "yes")
5851 (set_attr "pool_range" "*,*,*,*,256")
5852 (set_attr "neg_pool_range" "*,*,*,*,244")
5853 (set_attr "arch" "*,*,v6t2,*,*")
5854 (set_attr_alternative "type"
5855 [(if_then_else (match_operand 1 "const_int_operand" "")
5856 (const_string "mov_imm" )
5857 (const_string "mov_reg"))
5858 (const_string "mvn_imm")
5859 (const_string "mov_imm")
5860 (const_string "store_4")
5861 (const_string "load_4")])]
5862 )
5863
5864 (define_insn "*movhi_bytes"
5865 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5866 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5867 "TARGET_ARM && !TARGET_HARD_FLOAT"
5868 "@
5869 mov%?\\t%0, %1\\t%@ movhi
5870 mov%?\\t%0, %1\\t%@ movhi
5871 mvn%?\\t%0, #%B1\\t%@ movhi"
5872 [(set_attr "predicable" "yes")
5873 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5874 )
5875
5876 ;; We use a DImode scratch because we may occasionally need an additional
5877 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5878 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5879 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5880 ;; to be correctly handled in default_secondary_reload function.
5881 (define_expand "reload_outhi"
5882 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5883 (match_operand:HI 1 "s_register_operand" "r")
5884 (match_operand:DI 2 "s_register_operand" "=&l")])]
5885 "TARGET_EITHER"
5886 "if (TARGET_ARM)
5887 arm_reload_out_hi (operands);
5888 else
5889 thumb_reload_out_hi (operands);
5890 DONE;
5891 "
5892 )
5893
5894 (define_expand "reload_inhi"
5895 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5896 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5897 (match_operand:DI 2 "s_register_operand" "=&r")])]
5898 "TARGET_EITHER"
5899 "
5900 if (TARGET_ARM)
5901 arm_reload_in_hi (operands);
5902 else
5903 thumb_reload_out_hi (operands);
5904 DONE;
5905 ")
5906
5907 (define_expand "movqi"
5908 [(set (match_operand:QI 0 "general_operand")
5909 (match_operand:QI 1 "general_operand"))]
5910 "TARGET_EITHER"
5911 "
5912 /* Everything except mem = const or mem = mem can be done easily */
5913
5914 if (can_create_pseudo_p ())
5915 {
5916 if (CONST_INT_P (operands[1]))
5917 {
5918 rtx reg = gen_reg_rtx (SImode);
5919
5920 /* For thumb we want an unsigned immediate, then we are more likely
5921 to be able to use a movs insn. */
5922 if (TARGET_THUMB)
5923 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5924
5925 emit_insn (gen_movsi (reg, operands[1]));
5926 operands[1] = gen_lowpart (QImode, reg);
5927 }
5928
5929 if (TARGET_THUMB)
5930 {
5931 /* ??? We shouldn't really get invalid addresses here, but this can
5932 happen if we are passed a SP (never OK for HImode/QImode) or
5933 virtual register (also rejected as illegitimate for HImode/QImode)
5934 relative address. */
5935 /* ??? This should perhaps be fixed elsewhere, for instance, in
5936 fixup_stack_1, by checking for other kinds of invalid addresses,
5937 e.g. a bare reference to a virtual register. This may confuse the
5938 alpha though, which must handle this case differently. */
5939 if (MEM_P (operands[0])
5940 && !memory_address_p (GET_MODE (operands[0]),
5941 XEXP (operands[0], 0)))
5942 operands[0]
5943 = replace_equiv_address (operands[0],
5944 copy_to_reg (XEXP (operands[0], 0)));
5945 if (MEM_P (operands[1])
5946 && !memory_address_p (GET_MODE (operands[1]),
5947 XEXP (operands[1], 0)))
5948 operands[1]
5949 = replace_equiv_address (operands[1],
5950 copy_to_reg (XEXP (operands[1], 0)));
5951 }
5952
5953 if (MEM_P (operands[1]) && optimize > 0)
5954 {
5955 rtx reg = gen_reg_rtx (SImode);
5956
5957 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5958 operands[1] = gen_lowpart (QImode, reg);
5959 }
5960
5961 if (MEM_P (operands[0]))
5962 operands[1] = force_reg (QImode, operands[1]);
5963 }
5964 else if (TARGET_THUMB
5965 && CONST_INT_P (operands[1])
5966 && !satisfies_constraint_I (operands[1]))
5967 {
5968 /* Handle loading a large integer during reload. */
5969
5970 /* Writing a constant to memory needs a scratch, which should
5971 be handled with SECONDARY_RELOADs. */
5972 gcc_assert (REG_P (operands[0]));
5973
5974 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5975 emit_insn (gen_movsi (operands[0], operands[1]));
5976 DONE;
5977 }
5978 "
5979 )
5980
5981 (define_insn "*arm_movqi_insn"
5982 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5983 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5984 "TARGET_32BIT
5985 && ( register_operand (operands[0], QImode)
5986 || register_operand (operands[1], QImode))"
5987 "@
5988 mov%?\\t%0, %1
5989 mov%?\\t%0, %1
5990 mov%?\\t%0, %1
5991 mov%?\\t%0, %1
5992 mvn%?\\t%0, #%B1
5993 ldrb%?\\t%0, %1
5994 strb%?\\t%1, %0
5995 ldrb%?\\t%0, %1
5996 strb%?\\t%1, %0"
5997 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5998 (set_attr "predicable" "yes")
5999 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
6000 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
6001 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
6002 )
6003
6004 ;; HFmode moves
6005 (define_expand "movhf"
6006 [(set (match_operand:HF 0 "general_operand")
6007 (match_operand:HF 1 "general_operand"))]
6008 "TARGET_EITHER"
6009 "
6010 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6011 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6012 if (TARGET_32BIT)
6013 {
6014 if (MEM_P (operands[0]))
6015 operands[1] = force_reg (HFmode, operands[1]);
6016 }
6017 else /* TARGET_THUMB1 */
6018 {
6019 if (can_create_pseudo_p ())
6020 {
6021 if (!REG_P (operands[0]))
6022 operands[1] = force_reg (HFmode, operands[1]);
6023 }
6024 }
6025 "
6026 )
6027
6028 (define_insn "*arm32_movhf"
6029 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6030 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6031 "TARGET_32BIT && !TARGET_HARD_FLOAT
6032 && ( s_register_operand (operands[0], HFmode)
6033 || s_register_operand (operands[1], HFmode))"
6034 "*
6035 switch (which_alternative)
6036 {
6037 case 0: /* ARM register from memory */
6038 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6039 case 1: /* memory from ARM register */
6040 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6041 case 2: /* ARM register from ARM register */
6042 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6043 case 3: /* ARM register from constant */
6044 {
6045 long bits;
6046 rtx ops[4];
6047
6048 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6049 HFmode);
6050 ops[0] = operands[0];
6051 ops[1] = GEN_INT (bits);
6052 ops[2] = GEN_INT (bits & 0xff00);
6053 ops[3] = GEN_INT (bits & 0x00ff);
6054
6055 if (arm_arch_thumb2)
6056 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6057 else
6058 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6059 return \"\";
6060 }
6061 default:
6062 gcc_unreachable ();
6063 }
6064 "
6065 [(set_attr "conds" "unconditional")
6066 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6067 (set_attr "length" "4,4,4,8")
6068 (set_attr "predicable" "yes")]
6069 )
6070
6071 (define_expand "movsf"
6072 [(set (match_operand:SF 0 "general_operand")
6073 (match_operand:SF 1 "general_operand"))]
6074 "TARGET_EITHER"
6075 "
6076 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6077 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6078 if (TARGET_32BIT)
6079 {
6080 if (MEM_P (operands[0]))
6081 operands[1] = force_reg (SFmode, operands[1]);
6082 }
6083 else /* TARGET_THUMB1 */
6084 {
6085 if (can_create_pseudo_p ())
6086 {
6087 if (!REG_P (operands[0]))
6088 operands[1] = force_reg (SFmode, operands[1]);
6089 }
6090 }
6091
6092 /* Cannot load it directly, generate a load with clobber so that it can be
6093 loaded via GPR with MOV / MOVT. */
6094 if (arm_disable_literal_pool
6095 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6096 && CONST_DOUBLE_P (operands[1])
6097 && TARGET_HARD_FLOAT
6098 && !vfp3_const_double_rtx (operands[1]))
6099 {
6100 rtx clobreg = gen_reg_rtx (SFmode);
6101 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6102 clobreg));
6103 DONE;
6104 }
6105 "
6106 )
6107
6108 ;; Transform a floating-point move of a constant into a core register into
6109 ;; an SImode operation.
6110 (define_split
6111 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6112 (match_operand:SF 1 "immediate_operand" ""))]
6113 "TARGET_EITHER
6114 && reload_completed
6115 && CONST_DOUBLE_P (operands[1])"
6116 [(set (match_dup 2) (match_dup 3))]
6117 "
6118 operands[2] = gen_lowpart (SImode, operands[0]);
6119 operands[3] = gen_lowpart (SImode, operands[1]);
6120 if (operands[2] == 0 || operands[3] == 0)
6121 FAIL;
6122 "
6123 )
6124
6125 (define_insn "*arm_movsf_soft_insn"
6126 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6127 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6128 "TARGET_32BIT
6129 && TARGET_SOFT_FLOAT
6130 && (!MEM_P (operands[0])
6131 || register_operand (operands[1], SFmode))"
6132 {
6133 switch (which_alternative)
6134 {
6135 case 0: return \"mov%?\\t%0, %1\";
6136 case 1:
6137 /* Cannot load it directly, split to load it via MOV / MOVT. */
6138 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6139 return \"#\";
6140 return \"ldr%?\\t%0, %1\\t%@ float\";
6141 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6142 default: gcc_unreachable ();
6143 }
6144 }
6145 [(set_attr "predicable" "yes")
6146 (set_attr "type" "mov_reg,load_4,store_4")
6147 (set_attr "arm_pool_range" "*,4096,*")
6148 (set_attr "thumb2_pool_range" "*,4094,*")
6149 (set_attr "arm_neg_pool_range" "*,4084,*")
6150 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6151 )
6152
6153 ;; Splitter for the above.
6154 (define_split
6155 [(set (match_operand:SF 0 "s_register_operand")
6156 (match_operand:SF 1 "const_double_operand"))]
6157 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6158 [(const_int 0)]
6159 {
6160 long buf;
6161 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6162 rtx cst = gen_int_mode (buf, SImode);
6163 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6164 DONE;
6165 }
6166 )
6167
6168 (define_expand "movdf"
6169 [(set (match_operand:DF 0 "general_operand")
6170 (match_operand:DF 1 "general_operand"))]
6171 "TARGET_EITHER"
6172 "
6173 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6174 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6175 if (TARGET_32BIT)
6176 {
6177 if (MEM_P (operands[0]))
6178 operands[1] = force_reg (DFmode, operands[1]);
6179 }
6180 else /* TARGET_THUMB */
6181 {
6182 if (can_create_pseudo_p ())
6183 {
6184 if (!REG_P (operands[0]))
6185 operands[1] = force_reg (DFmode, operands[1]);
6186 }
6187 }
6188
6189 /* Cannot load it directly, generate a load with clobber so that it can be
6190 loaded via GPR with MOV / MOVT. */
6191 if (arm_disable_literal_pool
6192 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6193 && CONSTANT_P (operands[1])
6194 && TARGET_HARD_FLOAT
6195 && !arm_const_double_rtx (operands[1])
6196 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6197 {
6198 rtx clobreg = gen_reg_rtx (DFmode);
6199 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6200 clobreg));
6201 DONE;
6202 }
6203 "
6204 )
6205
6206 ;; Reloading a df mode value stored in integer regs to memory can require a
6207 ;; scratch reg.
6208 ;; Another reload_out<m> pattern that requires special constraints.
6209 (define_expand "reload_outdf"
6210 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6211 (match_operand:DF 1 "s_register_operand" "r")
6212 (match_operand:SI 2 "s_register_operand" "=&r")]
6213 "TARGET_THUMB2"
6214 "
6215 {
6216 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6217
6218 if (code == REG)
6219 operands[2] = XEXP (operands[0], 0);
6220 else if (code == POST_INC || code == PRE_DEC)
6221 {
6222 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6223 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6224 emit_insn (gen_movdi (operands[0], operands[1]));
6225 DONE;
6226 }
6227 else if (code == PRE_INC)
6228 {
6229 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6230
6231 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6232 operands[2] = reg;
6233 }
6234 else if (code == POST_DEC)
6235 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6236 else
6237 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6238 XEXP (XEXP (operands[0], 0), 1)));
6239
6240 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6241 operands[1]));
6242
6243 if (code == POST_DEC)
6244 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6245
6246 DONE;
6247 }"
6248 )
6249
6250 (define_insn "*movdf_soft_insn"
6251 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6252 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6253 "TARGET_32BIT && TARGET_SOFT_FLOAT
6254 && ( register_operand (operands[0], DFmode)
6255 || register_operand (operands[1], DFmode))"
6256 "*
6257 switch (which_alternative)
6258 {
6259 case 0:
6260 case 1:
6261 case 2:
6262 return \"#\";
6263 case 3:
6264 /* Cannot load it directly, split to load it via MOV / MOVT. */
6265 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6266 return \"#\";
6267 /* Fall through. */
6268 default:
6269 return output_move_double (operands, true, NULL);
6270 }
6271 "
6272 [(set_attr "length" "8,12,16,8,8")
6273 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6274 (set_attr "arm_pool_range" "*,*,*,1020,*")
6275 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6276 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6277 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6278 )
6279
6280 ;; Splitter for the above.
6281 (define_split
6282 [(set (match_operand:DF 0 "s_register_operand")
6283 (match_operand:DF 1 "const_double_operand"))]
6284 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6285 [(const_int 0)]
6286 {
6287 long buf[2];
6288 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6289 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6290 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6291 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6292 rtx cst = gen_int_mode (ival, DImode);
6293 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6294 DONE;
6295 }
6296 )
6297 \f
6298
6299 ;; load- and store-multiple insns
6300 ;; The arm can load/store any set of registers, provided that they are in
6301 ;; ascending order, but these expanders assume a contiguous set.
6302
6303 (define_expand "load_multiple"
6304 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6305 (match_operand:SI 1 "" ""))
6306 (use (match_operand:SI 2 "" ""))])]
6307 "TARGET_32BIT"
6308 {
6309 HOST_WIDE_INT offset = 0;
6310
6311 /* Support only fixed point registers. */
6312 if (!CONST_INT_P (operands[2])
6313 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6314 || INTVAL (operands[2]) < 2
6315 || !MEM_P (operands[1])
6316 || !REG_P (operands[0])
6317 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6318 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6319 FAIL;
6320
6321 operands[3]
6322 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6323 INTVAL (operands[2]),
6324 force_reg (SImode, XEXP (operands[1], 0)),
6325 FALSE, operands[1], &offset);
6326 })
6327
6328 (define_expand "store_multiple"
6329 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6330 (match_operand:SI 1 "" ""))
6331 (use (match_operand:SI 2 "" ""))])]
6332 "TARGET_32BIT"
6333 {
6334 HOST_WIDE_INT offset = 0;
6335
6336 /* Support only fixed point registers. */
6337 if (!CONST_INT_P (operands[2])
6338 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6339 || INTVAL (operands[2]) < 2
6340 || !REG_P (operands[1])
6341 || !MEM_P (operands[0])
6342 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6343 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6344 FAIL;
6345
6346 operands[3]
6347 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6348 INTVAL (operands[2]),
6349 force_reg (SImode, XEXP (operands[0], 0)),
6350 FALSE, operands[0], &offset);
6351 })
6352
6353
6354 (define_expand "setmemsi"
6355 [(match_operand:BLK 0 "general_operand")
6356 (match_operand:SI 1 "const_int_operand")
6357 (match_operand:SI 2 "const_int_operand")
6358 (match_operand:SI 3 "const_int_operand")]
6359 "TARGET_32BIT"
6360 {
6361 if (arm_gen_setmem (operands))
6362 DONE;
6363
6364 FAIL;
6365 })
6366
6367
6368 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6369 ;; We could let this apply for blocks of less than this, but it clobbers so
6370 ;; many registers that there is then probably a better way.
6371
6372 (define_expand "cpymemqi"
6373 [(match_operand:BLK 0 "general_operand")
6374 (match_operand:BLK 1 "general_operand")
6375 (match_operand:SI 2 "const_int_operand")
6376 (match_operand:SI 3 "const_int_operand")]
6377 ""
6378 "
6379 if (TARGET_32BIT)
6380 {
6381 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6382 && !optimize_function_for_size_p (cfun))
6383 {
6384 if (gen_cpymem_ldrd_strd (operands))
6385 DONE;
6386 FAIL;
6387 }
6388
6389 if (arm_gen_cpymemqi (operands))
6390 DONE;
6391 FAIL;
6392 }
6393 else /* TARGET_THUMB1 */
6394 {
6395 if ( INTVAL (operands[3]) != 4
6396 || INTVAL (operands[2]) > 48)
6397 FAIL;
6398
6399 thumb_expand_cpymemqi (operands);
6400 DONE;
6401 }
6402 "
6403 )
6404 \f
6405
6406 ;; Compare & branch insns
6407 ;; The range calculations are based as follows:
6408 ;; For forward branches, the address calculation returns the address of
6409 ;; the next instruction. This is 2 beyond the branch instruction.
6410 ;; For backward branches, the address calculation returns the address of
6411 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6412 ;; instruction for the shortest sequence, and 4 before the branch instruction
6413 ;; if we have to jump around an unconditional branch.
6414 ;; To the basic branch range the PC offset must be added (this is +4).
6415 ;; So for forward branches we have
6416 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6417 ;; And for backward branches we have
6418 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6419 ;;
6420 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6421 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6422
6423 (define_expand "cbranchsi4"
6424 [(set (pc) (if_then_else
6425 (match_operator 0 "expandable_comparison_operator"
6426 [(match_operand:SI 1 "s_register_operand")
6427 (match_operand:SI 2 "nonmemory_operand")])
6428 (label_ref (match_operand 3 "" ""))
6429 (pc)))]
6430 "TARGET_EITHER"
6431 "
6432 if (!TARGET_THUMB1)
6433 {
6434 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6435 FAIL;
6436 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6437 operands[3]));
6438 DONE;
6439 }
6440 if (thumb1_cmpneg_operand (operands[2], SImode))
6441 {
6442 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6443 operands[3], operands[0]));
6444 DONE;
6445 }
6446 if (!thumb1_cmp_operand (operands[2], SImode))
6447 operands[2] = force_reg (SImode, operands[2]);
6448 ")
6449
6450 (define_expand "cbranchsf4"
6451 [(set (pc) (if_then_else
6452 (match_operator 0 "expandable_comparison_operator"
6453 [(match_operand:SF 1 "s_register_operand")
6454 (match_operand:SF 2 "vfp_compare_operand")])
6455 (label_ref (match_operand 3 "" ""))
6456 (pc)))]
6457 "TARGET_32BIT && TARGET_HARD_FLOAT"
6458 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6459 operands[3])); DONE;"
6460 )
6461
6462 (define_expand "cbranchdf4"
6463 [(set (pc) (if_then_else
6464 (match_operator 0 "expandable_comparison_operator"
6465 [(match_operand:DF 1 "s_register_operand")
6466 (match_operand:DF 2 "vfp_compare_operand")])
6467 (label_ref (match_operand 3 "" ""))
6468 (pc)))]
6469 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6470 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6471 operands[3])); DONE;"
6472 )
6473
6474 (define_expand "cbranchdi4"
6475 [(set (pc) (if_then_else
6476 (match_operator 0 "expandable_comparison_operator"
6477 [(match_operand:DI 1 "s_register_operand")
6478 (match_operand:DI 2 "reg_or_int_operand")])
6479 (label_ref (match_operand 3 "" ""))
6480 (pc)))]
6481 "TARGET_32BIT"
6482 "{
6483 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6484 FAIL;
6485 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6486 operands[3]));
6487 DONE;
6488 }"
6489 )
6490
6491 ;; Comparison and test insns
6492
6493 (define_insn "*arm_cmpsi_insn"
6494 [(set (reg:CC CC_REGNUM)
6495 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6496 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6497 "TARGET_32BIT"
6498 "@
6499 cmp%?\\t%0, %1
6500 cmp%?\\t%0, %1
6501 cmp%?\\t%0, %1
6502 cmp%?\\t%0, %1
6503 cmn%?\\t%0, #%n1"
6504 [(set_attr "conds" "set")
6505 (set_attr "arch" "t2,t2,any,any,any")
6506 (set_attr "length" "2,2,4,4,4")
6507 (set_attr "predicable" "yes")
6508 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6509 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6510 )
6511
6512 (define_insn "*cmpsi_shiftsi"
6513 [(set (reg:CC CC_REGNUM)
6514 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6515 (match_operator:SI 3 "shift_operator"
6516 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6517 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6518 "TARGET_32BIT"
6519 "cmp\\t%0, %1%S3"
6520 [(set_attr "conds" "set")
6521 (set_attr "shift" "1")
6522 (set_attr "arch" "32,a,a")
6523 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6524
6525 (define_insn "*cmpsi_shiftsi_swp"
6526 [(set (reg:CC_SWP CC_REGNUM)
6527 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6528 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6529 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6530 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6531 "TARGET_32BIT"
6532 "cmp%?\\t%0, %1%S3"
6533 [(set_attr "conds" "set")
6534 (set_attr "shift" "1")
6535 (set_attr "arch" "32,a,a")
6536 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6537
6538 (define_insn "*arm_cmpsi_negshiftsi_si"
6539 [(set (reg:CC_Z CC_REGNUM)
6540 (compare:CC_Z
6541 (neg:SI (match_operator:SI 1 "shift_operator"
6542 [(match_operand:SI 2 "s_register_operand" "r")
6543 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6544 (match_operand:SI 0 "s_register_operand" "r")))]
6545 "TARGET_ARM"
6546 "cmn%?\\t%0, %2%S1"
6547 [(set_attr "conds" "set")
6548 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6549 (const_string "alus_shift_imm")
6550 (const_string "alus_shift_reg")))
6551 (set_attr "predicable" "yes")]
6552 )
6553
6554 ; This insn allows redundant compares to be removed by cse, nothing should
6555 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6556 ; is deleted later on. The match_dup will match the mode here, so that
6557 ; mode changes of the condition codes aren't lost by this even though we don't
6558 ; specify what they are.
6559
6560 (define_insn "*deleted_compare"
6561 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6562 "TARGET_32BIT"
6563 "\\t%@ deleted compare"
6564 [(set_attr "conds" "set")
6565 (set_attr "length" "0")
6566 (set_attr "type" "no_insn")]
6567 )
6568
6569 \f
6570 ;; Conditional branch insns
6571
6572 (define_expand "cbranch_cc"
6573 [(set (pc)
6574 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6575 (match_operand 2 "" "")])
6576 (label_ref (match_operand 3 "" ""))
6577 (pc)))]
6578 "TARGET_32BIT"
6579 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6580 operands[1], operands[2], NULL_RTX);
6581 operands[2] = const0_rtx;"
6582 )
6583
6584 ;;
6585 ;; Patterns to match conditional branch insns.
6586 ;;
6587
6588 (define_insn "arm_cond_branch"
6589 [(set (pc)
6590 (if_then_else (match_operator 1 "arm_comparison_operator"
6591 [(match_operand 2 "cc_register" "") (const_int 0)])
6592 (label_ref (match_operand 0 "" ""))
6593 (pc)))]
6594 "TARGET_32BIT"
6595 "*
6596 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6597 {
6598 arm_ccfsm_state += 2;
6599 return \"\";
6600 }
6601 return \"b%d1\\t%l0\";
6602 "
6603 [(set_attr "conds" "use")
6604 (set_attr "type" "branch")
6605 (set (attr "length")
6606 (if_then_else
6607 (and (match_test "TARGET_THUMB2")
6608 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6609 (le (minus (match_dup 0) (pc)) (const_int 256))))
6610 (const_int 2)
6611 (const_int 4)))]
6612 )
6613
6614 (define_insn "*arm_cond_branch_reversed"
6615 [(set (pc)
6616 (if_then_else (match_operator 1 "arm_comparison_operator"
6617 [(match_operand 2 "cc_register" "") (const_int 0)])
6618 (pc)
6619 (label_ref (match_operand 0 "" ""))))]
6620 "TARGET_32BIT"
6621 "*
6622 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6623 {
6624 arm_ccfsm_state += 2;
6625 return \"\";
6626 }
6627 return \"b%D1\\t%l0\";
6628 "
6629 [(set_attr "conds" "use")
6630 (set_attr "type" "branch")
6631 (set (attr "length")
6632 (if_then_else
6633 (and (match_test "TARGET_THUMB2")
6634 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6635 (le (minus (match_dup 0) (pc)) (const_int 256))))
6636 (const_int 2)
6637 (const_int 4)))]
6638 )
6639
6640 \f
6641
6642 ; scc insns
6643
6644 (define_expand "cstore_cc"
6645 [(set (match_operand:SI 0 "s_register_operand")
6646 (match_operator:SI 1 "" [(match_operand 2 "" "")
6647 (match_operand 3 "" "")]))]
6648 "TARGET_32BIT"
6649 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6650 operands[2], operands[3], NULL_RTX);
6651 operands[3] = const0_rtx;"
6652 )
6653
6654 (define_insn_and_split "*mov_scc"
6655 [(set (match_operand:SI 0 "s_register_operand" "=r")
6656 (match_operator:SI 1 "arm_comparison_operator_mode"
6657 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6658 "TARGET_ARM"
6659 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6660 "TARGET_ARM"
6661 [(set (match_dup 0)
6662 (if_then_else:SI (match_dup 1)
6663 (const_int 1)
6664 (const_int 0)))]
6665 ""
6666 [(set_attr "conds" "use")
6667 (set_attr "length" "8")
6668 (set_attr "type" "multiple")]
6669 )
6670
6671 (define_insn "*negscc_borrow"
6672 [(set (match_operand:SI 0 "s_register_operand" "=r")
6673 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
6674 "TARGET_32BIT"
6675 "sbc\\t%0, %0, %0"
6676 [(set_attr "conds" "use")
6677 (set_attr "length" "4")
6678 (set_attr "type" "adc_reg")]
6679 )
6680
6681 (define_insn_and_split "*mov_negscc"
6682 [(set (match_operand:SI 0 "s_register_operand" "=r")
6683 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6684 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6685 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
6686 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6687 "&& true"
6688 [(set (match_dup 0)
6689 (if_then_else:SI (match_dup 1)
6690 (match_dup 3)
6691 (const_int 0)))]
6692 {
6693 operands[3] = GEN_INT (~0);
6694 }
6695 [(set_attr "conds" "use")
6696 (set_attr "length" "8")
6697 (set_attr "type" "multiple")]
6698 )
6699
6700 (define_insn_and_split "*mov_notscc"
6701 [(set (match_operand:SI 0 "s_register_operand" "=r")
6702 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6703 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6704 "TARGET_ARM"
6705 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6706 "TARGET_ARM"
6707 [(set (match_dup 0)
6708 (if_then_else:SI (match_dup 1)
6709 (match_dup 3)
6710 (match_dup 4)))]
6711 {
6712 operands[3] = GEN_INT (~1);
6713 operands[4] = GEN_INT (~0);
6714 }
6715 [(set_attr "conds" "use")
6716 (set_attr "length" "8")
6717 (set_attr "type" "multiple")]
6718 )
6719
6720 (define_expand "cstoresi4"
6721 [(set (match_operand:SI 0 "s_register_operand")
6722 (match_operator:SI 1 "expandable_comparison_operator"
6723 [(match_operand:SI 2 "s_register_operand")
6724 (match_operand:SI 3 "reg_or_int_operand")]))]
6725 "TARGET_32BIT || TARGET_THUMB1"
6726 "{
6727 rtx op3, scratch, scratch2;
6728
6729 if (!TARGET_THUMB1)
6730 {
6731 if (!arm_add_operand (operands[3], SImode))
6732 operands[3] = force_reg (SImode, operands[3]);
6733 emit_insn (gen_cstore_cc (operands[0], operands[1],
6734 operands[2], operands[3]));
6735 DONE;
6736 }
6737
6738 if (operands[3] == const0_rtx)
6739 {
6740 switch (GET_CODE (operands[1]))
6741 {
6742 case EQ:
6743 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6744 break;
6745
6746 case NE:
6747 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6748 break;
6749
6750 case LE:
6751 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6752 NULL_RTX, 0, OPTAB_WIDEN);
6753 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6754 NULL_RTX, 0, OPTAB_WIDEN);
6755 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6756 operands[0], 1, OPTAB_WIDEN);
6757 break;
6758
6759 case GE:
6760 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6761 NULL_RTX, 1);
6762 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6763 NULL_RTX, 1, OPTAB_WIDEN);
6764 break;
6765
6766 case GT:
6767 scratch = expand_binop (SImode, ashr_optab, operands[2],
6768 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6769 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6770 NULL_RTX, 0, OPTAB_WIDEN);
6771 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6772 0, OPTAB_WIDEN);
6773 break;
6774
6775 /* LT is handled by generic code. No need for unsigned with 0. */
6776 default:
6777 FAIL;
6778 }
6779 DONE;
6780 }
6781
6782 switch (GET_CODE (operands[1]))
6783 {
6784 case EQ:
6785 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6786 NULL_RTX, 0, OPTAB_WIDEN);
6787 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6788 break;
6789
6790 case NE:
6791 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6792 NULL_RTX, 0, OPTAB_WIDEN);
6793 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6794 break;
6795
6796 case LE:
6797 op3 = force_reg (SImode, operands[3]);
6798
6799 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6800 NULL_RTX, 1, OPTAB_WIDEN);
6801 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6802 NULL_RTX, 0, OPTAB_WIDEN);
6803 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6804 op3, operands[2]));
6805 break;
6806
6807 case GE:
6808 op3 = operands[3];
6809 if (!thumb1_cmp_operand (op3, SImode))
6810 op3 = force_reg (SImode, op3);
6811 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6812 NULL_RTX, 0, OPTAB_WIDEN);
6813 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6814 NULL_RTX, 1, OPTAB_WIDEN);
6815 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6816 operands[2], op3));
6817 break;
6818
6819 case LEU:
6820 op3 = force_reg (SImode, operands[3]);
6821 scratch = force_reg (SImode, const0_rtx);
6822 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6823 op3, operands[2]));
6824 break;
6825
6826 case GEU:
6827 op3 = operands[3];
6828 if (!thumb1_cmp_operand (op3, SImode))
6829 op3 = force_reg (SImode, op3);
6830 scratch = force_reg (SImode, const0_rtx);
6831 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6832 operands[2], op3));
6833 break;
6834
6835 case LTU:
6836 op3 = operands[3];
6837 if (!thumb1_cmp_operand (op3, SImode))
6838 op3 = force_reg (SImode, op3);
6839 scratch = gen_reg_rtx (SImode);
6840 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6841 break;
6842
6843 case GTU:
6844 op3 = force_reg (SImode, operands[3]);
6845 scratch = gen_reg_rtx (SImode);
6846 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6847 break;
6848
6849 /* No good sequences for GT, LT. */
6850 default:
6851 FAIL;
6852 }
6853 DONE;
6854 }")
6855
6856 (define_expand "cstorehf4"
6857 [(set (match_operand:SI 0 "s_register_operand")
6858 (match_operator:SI 1 "expandable_comparison_operator"
6859 [(match_operand:HF 2 "s_register_operand")
6860 (match_operand:HF 3 "vfp_compare_operand")]))]
6861 "TARGET_VFP_FP16INST"
6862 {
6863 if (!arm_validize_comparison (&operands[1],
6864 &operands[2],
6865 &operands[3]))
6866 FAIL;
6867
6868 emit_insn (gen_cstore_cc (operands[0], operands[1],
6869 operands[2], operands[3]));
6870 DONE;
6871 }
6872 )
6873
6874 (define_expand "cstoresf4"
6875 [(set (match_operand:SI 0 "s_register_operand")
6876 (match_operator:SI 1 "expandable_comparison_operator"
6877 [(match_operand:SF 2 "s_register_operand")
6878 (match_operand:SF 3 "vfp_compare_operand")]))]
6879 "TARGET_32BIT && TARGET_HARD_FLOAT"
6880 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6881 operands[2], operands[3])); DONE;"
6882 )
6883
6884 (define_expand "cstoredf4"
6885 [(set (match_operand:SI 0 "s_register_operand")
6886 (match_operator:SI 1 "expandable_comparison_operator"
6887 [(match_operand:DF 2 "s_register_operand")
6888 (match_operand:DF 3 "vfp_compare_operand")]))]
6889 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6890 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6891 operands[2], operands[3])); DONE;"
6892 )
6893
6894 (define_expand "cstoredi4"
6895 [(set (match_operand:SI 0 "s_register_operand")
6896 (match_operator:SI 1 "expandable_comparison_operator"
6897 [(match_operand:DI 2 "s_register_operand")
6898 (match_operand:DI 3 "reg_or_int_operand")]))]
6899 "TARGET_32BIT"
6900 "{
6901 if (!arm_validize_comparison (&operands[1],
6902 &operands[2],
6903 &operands[3]))
6904 FAIL;
6905 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6906 operands[3]));
6907 DONE;
6908 }"
6909 )
6910
6911 \f
6912 ;; Conditional move insns
6913
6914 (define_expand "movsicc"
6915 [(set (match_operand:SI 0 "s_register_operand")
6916 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6917 (match_operand:SI 2 "arm_not_operand")
6918 (match_operand:SI 3 "arm_not_operand")))]
6919 "TARGET_32BIT"
6920 "
6921 {
6922 enum rtx_code code;
6923 rtx ccreg;
6924
6925 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6926 &XEXP (operands[1], 1)))
6927 FAIL;
6928
6929 code = GET_CODE (operands[1]);
6930 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6931 XEXP (operands[1], 1), NULL_RTX);
6932 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6933 }"
6934 )
6935
6936 (define_expand "movhfcc"
6937 [(set (match_operand:HF 0 "s_register_operand")
6938 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6939 (match_operand:HF 2 "s_register_operand")
6940 (match_operand:HF 3 "s_register_operand")))]
6941 "TARGET_VFP_FP16INST"
6942 "
6943 {
6944 enum rtx_code code = GET_CODE (operands[1]);
6945 rtx ccreg;
6946
6947 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6948 &XEXP (operands[1], 1)))
6949 FAIL;
6950
6951 code = GET_CODE (operands[1]);
6952 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6953 XEXP (operands[1], 1), NULL_RTX);
6954 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6955 }"
6956 )
6957
6958 (define_expand "movsfcc"
6959 [(set (match_operand:SF 0 "s_register_operand")
6960 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6961 (match_operand:SF 2 "s_register_operand")
6962 (match_operand:SF 3 "s_register_operand")))]
6963 "TARGET_32BIT && TARGET_HARD_FLOAT"
6964 "
6965 {
6966 enum rtx_code code = GET_CODE (operands[1]);
6967 rtx ccreg;
6968
6969 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6970 &XEXP (operands[1], 1)))
6971 FAIL;
6972
6973 code = GET_CODE (operands[1]);
6974 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6975 XEXP (operands[1], 1), NULL_RTX);
6976 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6977 }"
6978 )
6979
6980 (define_expand "movdfcc"
6981 [(set (match_operand:DF 0 "s_register_operand")
6982 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
6983 (match_operand:DF 2 "s_register_operand")
6984 (match_operand:DF 3 "s_register_operand")))]
6985 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
6986 "
6987 {
6988 enum rtx_code code = GET_CODE (operands[1]);
6989 rtx ccreg;
6990
6991 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6992 &XEXP (operands[1], 1)))
6993 FAIL;
6994 code = GET_CODE (operands[1]);
6995 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6996 XEXP (operands[1], 1), NULL_RTX);
6997 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6998 }"
6999 )
7000
7001 (define_insn "*cmov<mode>"
7002 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7003 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7004 [(match_operand 2 "cc_register" "") (const_int 0)])
7005 (match_operand:SDF 3 "s_register_operand"
7006 "<F_constraint>")
7007 (match_operand:SDF 4 "s_register_operand"
7008 "<F_constraint>")))]
7009 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7010 "*
7011 {
7012 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7013 switch (code)
7014 {
7015 case ARM_GE:
7016 case ARM_GT:
7017 case ARM_EQ:
7018 case ARM_VS:
7019 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7020 case ARM_LT:
7021 case ARM_LE:
7022 case ARM_NE:
7023 case ARM_VC:
7024 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7025 default:
7026 gcc_unreachable ();
7027 }
7028 return \"\";
7029 }"
7030 [(set_attr "conds" "use")
7031 (set_attr "type" "fcsel")]
7032 )
7033
7034 (define_insn "*cmovhf"
7035 [(set (match_operand:HF 0 "s_register_operand" "=t")
7036 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7037 [(match_operand 2 "cc_register" "") (const_int 0)])
7038 (match_operand:HF 3 "s_register_operand" "t")
7039 (match_operand:HF 4 "s_register_operand" "t")))]
7040 "TARGET_VFP_FP16INST"
7041 "*
7042 {
7043 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7044 switch (code)
7045 {
7046 case ARM_GE:
7047 case ARM_GT:
7048 case ARM_EQ:
7049 case ARM_VS:
7050 return \"vsel%d1.f16\\t%0, %3, %4\";
7051 case ARM_LT:
7052 case ARM_LE:
7053 case ARM_NE:
7054 case ARM_VC:
7055 return \"vsel%D1.f16\\t%0, %4, %3\";
7056 default:
7057 gcc_unreachable ();
7058 }
7059 return \"\";
7060 }"
7061 [(set_attr "conds" "use")
7062 (set_attr "type" "fcsel")]
7063 )
7064
7065 (define_insn_and_split "*movsicc_insn"
7066 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7067 (if_then_else:SI
7068 (match_operator 3 "arm_comparison_operator"
7069 [(match_operand 4 "cc_register" "") (const_int 0)])
7070 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7071 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7072 "TARGET_ARM"
7073 "@
7074 mov%D3\\t%0, %2
7075 mvn%D3\\t%0, #%B2
7076 mov%d3\\t%0, %1
7077 mvn%d3\\t%0, #%B1
7078 #
7079 #
7080 #
7081 #"
7082 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7083 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7084 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7085 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7086 "&& reload_completed"
7087 [(const_int 0)]
7088 {
7089 enum rtx_code rev_code;
7090 machine_mode mode;
7091 rtx rev_cond;
7092
7093 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7094 operands[3],
7095 gen_rtx_SET (operands[0], operands[1])));
7096
7097 rev_code = GET_CODE (operands[3]);
7098 mode = GET_MODE (operands[4]);
7099 if (mode == CCFPmode || mode == CCFPEmode)
7100 rev_code = reverse_condition_maybe_unordered (rev_code);
7101 else
7102 rev_code = reverse_condition (rev_code);
7103
7104 rev_cond = gen_rtx_fmt_ee (rev_code,
7105 VOIDmode,
7106 operands[4],
7107 const0_rtx);
7108 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7109 rev_cond,
7110 gen_rtx_SET (operands[0], operands[2])));
7111 DONE;
7112 }
7113 [(set_attr "length" "4,4,4,4,8,8,8,8")
7114 (set_attr "conds" "use")
7115 (set_attr_alternative "type"
7116 [(if_then_else (match_operand 2 "const_int_operand" "")
7117 (const_string "mov_imm")
7118 (const_string "mov_reg"))
7119 (const_string "mvn_imm")
7120 (if_then_else (match_operand 1 "const_int_operand" "")
7121 (const_string "mov_imm")
7122 (const_string "mov_reg"))
7123 (const_string "mvn_imm")
7124 (const_string "multiple")
7125 (const_string "multiple")
7126 (const_string "multiple")
7127 (const_string "multiple")])]
7128 )
7129
7130 (define_insn "*movsfcc_soft_insn"
7131 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7132 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7133 [(match_operand 4 "cc_register" "") (const_int 0)])
7134 (match_operand:SF 1 "s_register_operand" "0,r")
7135 (match_operand:SF 2 "s_register_operand" "r,0")))]
7136 "TARGET_ARM && TARGET_SOFT_FLOAT"
7137 "@
7138 mov%D3\\t%0, %2
7139 mov%d3\\t%0, %1"
7140 [(set_attr "conds" "use")
7141 (set_attr "type" "mov_reg")]
7142 )
7143
7144 \f
7145 ;; Jump and linkage insns
7146
7147 (define_expand "jump"
7148 [(set (pc)
7149 (label_ref (match_operand 0 "" "")))]
7150 "TARGET_EITHER"
7151 ""
7152 )
7153
7154 (define_insn "*arm_jump"
7155 [(set (pc)
7156 (label_ref (match_operand 0 "" "")))]
7157 "TARGET_32BIT"
7158 "*
7159 {
7160 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7161 {
7162 arm_ccfsm_state += 2;
7163 return \"\";
7164 }
7165 return \"b%?\\t%l0\";
7166 }
7167 "
7168 [(set_attr "predicable" "yes")
7169 (set (attr "length")
7170 (if_then_else
7171 (and (match_test "TARGET_THUMB2")
7172 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7173 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7174 (const_int 2)
7175 (const_int 4)))
7176 (set_attr "type" "branch")]
7177 )
7178
7179 (define_expand "call"
7180 [(parallel [(call (match_operand 0 "memory_operand")
7181 (match_operand 1 "general_operand"))
7182 (use (match_operand 2 "" ""))
7183 (clobber (reg:SI LR_REGNUM))])]
7184 "TARGET_EITHER"
7185 "
7186 {
7187 rtx callee, pat;
7188 tree addr = MEM_EXPR (operands[0]);
7189
7190 /* In an untyped call, we can get NULL for operand 2. */
7191 if (operands[2] == NULL_RTX)
7192 operands[2] = const0_rtx;
7193
7194 /* Decide if we should generate indirect calls by loading the
7195 32-bit address of the callee into a register before performing the
7196 branch and link. */
7197 callee = XEXP (operands[0], 0);
7198 if (GET_CODE (callee) == SYMBOL_REF
7199 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7200 : !REG_P (callee))
7201 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7202
7203 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7204 /* Indirect call: set r9 with FDPIC value of callee. */
7205 XEXP (operands[0], 0)
7206 = arm_load_function_descriptor (XEXP (operands[0], 0));
7207
7208 if (detect_cmse_nonsecure_call (addr))
7209 {
7210 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7211 operands[2]);
7212 emit_call_insn (pat);
7213 }
7214 else
7215 {
7216 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7217 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7218 }
7219
7220 /* Restore FDPIC register (r9) after call. */
7221 if (TARGET_FDPIC)
7222 {
7223 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7224 rtx initial_fdpic_reg
7225 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7226
7227 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7228 initial_fdpic_reg));
7229 }
7230
7231 DONE;
7232 }"
7233 )
7234
7235 (define_insn "restore_pic_register_after_call"
7236 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7237 (unspec:SI [(match_dup 0)
7238 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7239 UNSPEC_PIC_RESTORE))]
7240 ""
7241 "@
7242 mov\t%0, %1
7243 ldr\t%0, %1"
7244 )
7245
7246 (define_expand "call_internal"
7247 [(parallel [(call (match_operand 0 "memory_operand")
7248 (match_operand 1 "general_operand"))
7249 (use (match_operand 2 "" ""))
7250 (clobber (reg:SI LR_REGNUM))])])
7251
7252 (define_expand "nonsecure_call_internal"
7253 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7254 UNSPEC_NONSECURE_MEM)
7255 (match_operand 1 "general_operand"))
7256 (use (match_operand 2 "" ""))
7257 (clobber (reg:SI LR_REGNUM))])]
7258 "use_cmse"
7259 "
7260 {
7261 rtx tmp;
7262 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7263 gen_rtx_REG (SImode, R4_REGNUM),
7264 SImode);
7265
7266 operands[0] = replace_equiv_address (operands[0], tmp);
7267 }")
7268
7269 (define_insn "*call_reg_armv5"
7270 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7271 (match_operand 1 "" ""))
7272 (use (match_operand 2 "" ""))
7273 (clobber (reg:SI LR_REGNUM))]
7274 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7275 "blx%?\\t%0"
7276 [(set_attr "type" "call")]
7277 )
7278
7279 (define_insn "*call_reg_arm"
7280 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7281 (match_operand 1 "" ""))
7282 (use (match_operand 2 "" ""))
7283 (clobber (reg:SI LR_REGNUM))]
7284 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7285 "*
7286 return output_call (operands);
7287 "
7288 ;; length is worst case, normally it is only two
7289 [(set_attr "length" "12")
7290 (set_attr "type" "call")]
7291 )
7292
7293
7294 (define_expand "call_value"
7295 [(parallel [(set (match_operand 0 "" "")
7296 (call (match_operand 1 "memory_operand")
7297 (match_operand 2 "general_operand")))
7298 (use (match_operand 3 "" ""))
7299 (clobber (reg:SI LR_REGNUM))])]
7300 "TARGET_EITHER"
7301 "
7302 {
7303 rtx pat, callee;
7304 tree addr = MEM_EXPR (operands[1]);
7305
7306 /* In an untyped call, we can get NULL for operand 2. */
7307 if (operands[3] == 0)
7308 operands[3] = const0_rtx;
7309
7310 /* Decide if we should generate indirect calls by loading the
7311 32-bit address of the callee into a register before performing the
7312 branch and link. */
7313 callee = XEXP (operands[1], 0);
7314 if (GET_CODE (callee) == SYMBOL_REF
7315 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7316 : !REG_P (callee))
7317 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7318
7319 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7320 /* Indirect call: set r9 with FDPIC value of callee. */
7321 XEXP (operands[1], 0)
7322 = arm_load_function_descriptor (XEXP (operands[1], 0));
7323
7324 if (detect_cmse_nonsecure_call (addr))
7325 {
7326 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7327 operands[2], operands[3]);
7328 emit_call_insn (pat);
7329 }
7330 else
7331 {
7332 pat = gen_call_value_internal (operands[0], operands[1],
7333 operands[2], operands[3]);
7334 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7335 }
7336
7337 /* Restore FDPIC register (r9) after call. */
7338 if (TARGET_FDPIC)
7339 {
7340 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7341 rtx initial_fdpic_reg
7342 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7343
7344 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7345 initial_fdpic_reg));
7346 }
7347
7348 DONE;
7349 }"
7350 )
7351
7352 (define_expand "call_value_internal"
7353 [(parallel [(set (match_operand 0 "" "")
7354 (call (match_operand 1 "memory_operand")
7355 (match_operand 2 "general_operand")))
7356 (use (match_operand 3 "" ""))
7357 (clobber (reg:SI LR_REGNUM))])])
7358
7359 (define_expand "nonsecure_call_value_internal"
7360 [(parallel [(set (match_operand 0 "" "")
7361 (call (unspec:SI [(match_operand 1 "memory_operand")]
7362 UNSPEC_NONSECURE_MEM)
7363 (match_operand 2 "general_operand")))
7364 (use (match_operand 3 "" ""))
7365 (clobber (reg:SI LR_REGNUM))])]
7366 "use_cmse"
7367 "
7368 {
7369 rtx tmp;
7370 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7371 gen_rtx_REG (SImode, R4_REGNUM),
7372 SImode);
7373
7374 operands[1] = replace_equiv_address (operands[1], tmp);
7375 }")
7376
7377 (define_insn "*call_value_reg_armv5"
7378 [(set (match_operand 0 "" "")
7379 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7380 (match_operand 2 "" "")))
7381 (use (match_operand 3 "" ""))
7382 (clobber (reg:SI LR_REGNUM))]
7383 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7384 "blx%?\\t%1"
7385 [(set_attr "type" "call")]
7386 )
7387
7388 (define_insn "*call_value_reg_arm"
7389 [(set (match_operand 0 "" "")
7390 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7391 (match_operand 2 "" "")))
7392 (use (match_operand 3 "" ""))
7393 (clobber (reg:SI LR_REGNUM))]
7394 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7395 "*
7396 return output_call (&operands[1]);
7397 "
7398 [(set_attr "length" "12")
7399 (set_attr "type" "call")]
7400 )
7401
7402 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7403 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7404
7405 (define_insn "*call_symbol"
7406 [(call (mem:SI (match_operand:SI 0 "" ""))
7407 (match_operand 1 "" ""))
7408 (use (match_operand 2 "" ""))
7409 (clobber (reg:SI LR_REGNUM))]
7410 "TARGET_32BIT
7411 && !SIBLING_CALL_P (insn)
7412 && (GET_CODE (operands[0]) == SYMBOL_REF)
7413 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7414 "*
7415 {
7416 rtx op = operands[0];
7417
7418 /* Switch mode now when possible. */
7419 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7420 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7421 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7422
7423 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7424 }"
7425 [(set_attr "type" "call")]
7426 )
7427
7428 (define_insn "*call_value_symbol"
7429 [(set (match_operand 0 "" "")
7430 (call (mem:SI (match_operand:SI 1 "" ""))
7431 (match_operand:SI 2 "" "")))
7432 (use (match_operand 3 "" ""))
7433 (clobber (reg:SI LR_REGNUM))]
7434 "TARGET_32BIT
7435 && !SIBLING_CALL_P (insn)
7436 && (GET_CODE (operands[1]) == SYMBOL_REF)
7437 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7438 "*
7439 {
7440 rtx op = operands[1];
7441
7442 /* Switch mode now when possible. */
7443 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7444 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7445 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7446
7447 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7448 }"
7449 [(set_attr "type" "call")]
7450 )
7451
7452 (define_expand "sibcall_internal"
7453 [(parallel [(call (match_operand 0 "memory_operand")
7454 (match_operand 1 "general_operand"))
7455 (return)
7456 (use (match_operand 2 "" ""))])])
7457
7458 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7459 (define_expand "sibcall"
7460 [(parallel [(call (match_operand 0 "memory_operand")
7461 (match_operand 1 "general_operand"))
7462 (return)
7463 (use (match_operand 2 "" ""))])]
7464 "TARGET_32BIT"
7465 "
7466 {
7467 rtx pat;
7468
7469 if ((!REG_P (XEXP (operands[0], 0))
7470 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7471 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7472 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7473 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7474
7475 if (operands[2] == NULL_RTX)
7476 operands[2] = const0_rtx;
7477
7478 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7479 arm_emit_call_insn (pat, operands[0], true);
7480 DONE;
7481 }"
7482 )
7483
7484 (define_expand "sibcall_value_internal"
7485 [(parallel [(set (match_operand 0 "" "")
7486 (call (match_operand 1 "memory_operand")
7487 (match_operand 2 "general_operand")))
7488 (return)
7489 (use (match_operand 3 "" ""))])])
7490
7491 (define_expand "sibcall_value"
7492 [(parallel [(set (match_operand 0 "" "")
7493 (call (match_operand 1 "memory_operand")
7494 (match_operand 2 "general_operand")))
7495 (return)
7496 (use (match_operand 3 "" ""))])]
7497 "TARGET_32BIT"
7498 "
7499 {
7500 rtx pat;
7501
7502 if ((!REG_P (XEXP (operands[1], 0))
7503 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7504 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7505 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7506 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7507
7508 if (operands[3] == NULL_RTX)
7509 operands[3] = const0_rtx;
7510
7511 pat = gen_sibcall_value_internal (operands[0], operands[1],
7512 operands[2], operands[3]);
7513 arm_emit_call_insn (pat, operands[1], true);
7514 DONE;
7515 }"
7516 )
7517
7518 (define_insn "*sibcall_insn"
7519 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7520 (match_operand 1 "" ""))
7521 (return)
7522 (use (match_operand 2 "" ""))]
7523 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7524 "*
7525 if (which_alternative == 1)
7526 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7527 else
7528 {
7529 if (arm_arch5t || arm_arch4t)
7530 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7531 else
7532 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7533 }
7534 "
7535 [(set_attr "type" "call")]
7536 )
7537
7538 (define_insn "*sibcall_value_insn"
7539 [(set (match_operand 0 "" "")
7540 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7541 (match_operand 2 "" "")))
7542 (return)
7543 (use (match_operand 3 "" ""))]
7544 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7545 "*
7546 if (which_alternative == 1)
7547 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7548 else
7549 {
7550 if (arm_arch5t || arm_arch4t)
7551 return \"bx%?\\t%1\";
7552 else
7553 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7554 }
7555 "
7556 [(set_attr "type" "call")]
7557 )
7558
7559 (define_expand "<return_str>return"
7560 [(RETURNS)]
7561 "(TARGET_ARM || (TARGET_THUMB2
7562 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7563 && !IS_STACKALIGN (arm_current_func_type ())))
7564 <return_cond_false>"
7565 "
7566 {
7567 if (TARGET_THUMB2)
7568 {
7569 thumb2_expand_return (<return_simple_p>);
7570 DONE;
7571 }
7572 }
7573 "
7574 )
7575
7576 ;; Often the return insn will be the same as loading from memory, so set attr
7577 (define_insn "*arm_return"
7578 [(return)]
7579 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7580 "*
7581 {
7582 if (arm_ccfsm_state == 2)
7583 {
7584 arm_ccfsm_state += 2;
7585 return \"\";
7586 }
7587 return output_return_instruction (const_true_rtx, true, false, false);
7588 }"
7589 [(set_attr "type" "load_4")
7590 (set_attr "length" "12")
7591 (set_attr "predicable" "yes")]
7592 )
7593
7594 (define_insn "*cond_<return_str>return"
7595 [(set (pc)
7596 (if_then_else (match_operator 0 "arm_comparison_operator"
7597 [(match_operand 1 "cc_register" "") (const_int 0)])
7598 (RETURNS)
7599 (pc)))]
7600 "TARGET_ARM <return_cond_true>"
7601 "*
7602 {
7603 if (arm_ccfsm_state == 2)
7604 {
7605 arm_ccfsm_state += 2;
7606 return \"\";
7607 }
7608 return output_return_instruction (operands[0], true, false,
7609 <return_simple_p>);
7610 }"
7611 [(set_attr "conds" "use")
7612 (set_attr "length" "12")
7613 (set_attr "type" "load_4")]
7614 )
7615
7616 (define_insn "*cond_<return_str>return_inverted"
7617 [(set (pc)
7618 (if_then_else (match_operator 0 "arm_comparison_operator"
7619 [(match_operand 1 "cc_register" "") (const_int 0)])
7620 (pc)
7621 (RETURNS)))]
7622 "TARGET_ARM <return_cond_true>"
7623 "*
7624 {
7625 if (arm_ccfsm_state == 2)
7626 {
7627 arm_ccfsm_state += 2;
7628 return \"\";
7629 }
7630 return output_return_instruction (operands[0], true, true,
7631 <return_simple_p>);
7632 }"
7633 [(set_attr "conds" "use")
7634 (set_attr "length" "12")
7635 (set_attr "type" "load_4")]
7636 )
7637
7638 (define_insn "*arm_simple_return"
7639 [(simple_return)]
7640 "TARGET_ARM"
7641 "*
7642 {
7643 if (arm_ccfsm_state == 2)
7644 {
7645 arm_ccfsm_state += 2;
7646 return \"\";
7647 }
7648 return output_return_instruction (const_true_rtx, true, false, true);
7649 }"
7650 [(set_attr "type" "branch")
7651 (set_attr "length" "4")
7652 (set_attr "predicable" "yes")]
7653 )
7654
7655 ;; Generate a sequence of instructions to determine if the processor is
7656 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7657 ;; mask.
7658
7659 (define_expand "return_addr_mask"
7660 [(set (match_dup 1)
7661 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7662 (const_int 0)))
7663 (set (match_operand:SI 0 "s_register_operand")
7664 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7665 (const_int -1)
7666 (const_int 67108860)))] ; 0x03fffffc
7667 "TARGET_ARM"
7668 "
7669 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7670 ")
7671
7672 (define_insn "*check_arch2"
7673 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7674 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7675 (const_int 0)))]
7676 "TARGET_ARM"
7677 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7678 [(set_attr "length" "8")
7679 (set_attr "conds" "set")
7680 (set_attr "type" "multiple")]
7681 )
7682
7683 ;; Call subroutine returning any type.
7684
7685 (define_expand "untyped_call"
7686 [(parallel [(call (match_operand 0 "" "")
7687 (const_int 0))
7688 (match_operand 1 "" "")
7689 (match_operand 2 "" "")])]
7690 "TARGET_EITHER && !TARGET_FDPIC"
7691 "
7692 {
7693 int i;
7694 rtx par = gen_rtx_PARALLEL (VOIDmode,
7695 rtvec_alloc (XVECLEN (operands[2], 0)));
7696 rtx addr = gen_reg_rtx (Pmode);
7697 rtx mem;
7698 int size = 0;
7699
7700 emit_move_insn (addr, XEXP (operands[1], 0));
7701 mem = change_address (operands[1], BLKmode, addr);
7702
7703 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7704 {
7705 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7706
7707 /* Default code only uses r0 as a return value, but we could
7708 be using anything up to 4 registers. */
7709 if (REGNO (src) == R0_REGNUM)
7710 src = gen_rtx_REG (TImode, R0_REGNUM);
7711
7712 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7713 GEN_INT (size));
7714 size += GET_MODE_SIZE (GET_MODE (src));
7715 }
7716
7717 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7718
7719 size = 0;
7720
7721 for (i = 0; i < XVECLEN (par, 0); i++)
7722 {
7723 HOST_WIDE_INT offset = 0;
7724 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7725
7726 if (size != 0)
7727 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7728
7729 mem = change_address (mem, GET_MODE (reg), NULL);
7730 if (REGNO (reg) == R0_REGNUM)
7731 {
7732 /* On thumb we have to use a write-back instruction. */
7733 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7734 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7735 size = TARGET_ARM ? 16 : 0;
7736 }
7737 else
7738 {
7739 emit_move_insn (mem, reg);
7740 size = GET_MODE_SIZE (GET_MODE (reg));
7741 }
7742 }
7743
7744 /* The optimizer does not know that the call sets the function value
7745 registers we stored in the result block. We avoid problems by
7746 claiming that all hard registers are used and clobbered at this
7747 point. */
7748 emit_insn (gen_blockage ());
7749
7750 DONE;
7751 }"
7752 )
7753
7754 (define_expand "untyped_return"
7755 [(match_operand:BLK 0 "memory_operand")
7756 (match_operand 1 "" "")]
7757 "TARGET_EITHER && !TARGET_FDPIC"
7758 "
7759 {
7760 int i;
7761 rtx addr = gen_reg_rtx (Pmode);
7762 rtx mem;
7763 int size = 0;
7764
7765 emit_move_insn (addr, XEXP (operands[0], 0));
7766 mem = change_address (operands[0], BLKmode, addr);
7767
7768 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7769 {
7770 HOST_WIDE_INT offset = 0;
7771 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7772
7773 if (size != 0)
7774 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7775
7776 mem = change_address (mem, GET_MODE (reg), NULL);
7777 if (REGNO (reg) == R0_REGNUM)
7778 {
7779 /* On thumb we have to use a write-back instruction. */
7780 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7781 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7782 size = TARGET_ARM ? 16 : 0;
7783 }
7784 else
7785 {
7786 emit_move_insn (reg, mem);
7787 size = GET_MODE_SIZE (GET_MODE (reg));
7788 }
7789 }
7790
7791 /* Emit USE insns before the return. */
7792 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7793 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7794
7795 /* Construct the return. */
7796 expand_naked_return ();
7797
7798 DONE;
7799 }"
7800 )
7801
7802 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7803 ;; all of memory. This blocks insns from being moved across this point.
7804
7805 (define_insn "blockage"
7806 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7807 "TARGET_EITHER"
7808 ""
7809 [(set_attr "length" "0")
7810 (set_attr "type" "block")]
7811 )
7812
7813 ;; Since we hard code r0 here use the 'o' constraint to prevent
7814 ;; provoking undefined behaviour in the hardware with putting out
7815 ;; auto-increment operations with potentially r0 as the base register.
7816 (define_insn "probe_stack"
7817 [(set (match_operand:SI 0 "memory_operand" "=o")
7818 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7819 "TARGET_32BIT"
7820 "str%?\\tr0, %0"
7821 [(set_attr "type" "store_4")
7822 (set_attr "predicable" "yes")]
7823 )
7824
7825 (define_insn "probe_stack_range"
7826 [(set (match_operand:SI 0 "register_operand" "=r")
7827 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7828 (match_operand:SI 2 "register_operand" "r")]
7829 VUNSPEC_PROBE_STACK_RANGE))]
7830 "TARGET_32BIT"
7831 {
7832 return output_probe_stack_range (operands[0], operands[2]);
7833 }
7834 [(set_attr "type" "multiple")
7835 (set_attr "conds" "clob")]
7836 )
7837
7838 ;; Named patterns for stack smashing protection.
7839 (define_expand "stack_protect_combined_set"
7840 [(parallel
7841 [(set (match_operand:SI 0 "memory_operand")
7842 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7843 UNSPEC_SP_SET))
7844 (clobber (match_scratch:SI 2 ""))
7845 (clobber (match_scratch:SI 3 ""))])]
7846 ""
7847 ""
7848 )
7849
7850 ;; Use a separate insn from the above expand to be able to have the mem outside
7851 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7852 ;; try to reload the guard since we need to control how PIC access is done in
7853 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7854 ;; legitimize_pic_address ()).
7855 (define_insn_and_split "*stack_protect_combined_set_insn"
7856 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7857 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7858 UNSPEC_SP_SET))
7859 (clobber (match_scratch:SI 2 "=&l,&r"))
7860 (clobber (match_scratch:SI 3 "=&l,&r"))]
7861 ""
7862 "#"
7863 "reload_completed"
7864 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7865 UNSPEC_SP_SET))
7866 (clobber (match_dup 2))])]
7867 "
7868 {
7869 if (flag_pic)
7870 {
7871 rtx pic_reg;
7872
7873 if (TARGET_FDPIC)
7874 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7875 else
7876 pic_reg = operands[3];
7877
7878 /* Forces recomputing of GOT base now. */
7879 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7880 true /*compute_now*/);
7881 }
7882 else
7883 {
7884 if (address_operand (operands[1], SImode))
7885 operands[2] = operands[1];
7886 else
7887 {
7888 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7889 emit_move_insn (operands[2], mem);
7890 }
7891 }
7892 }"
7893 [(set_attr "arch" "t1,32")]
7894 )
7895
7896 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7897 ;; canary value does not live beyond the life of this sequence.
7898 (define_insn "*stack_protect_set_insn"
7899 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7900 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7901 UNSPEC_SP_SET))
7902 (clobber (match_dup 1))]
7903 ""
7904 "@
7905 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7906 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7907 [(set_attr "length" "8,12")
7908 (set_attr "conds" "clob,nocond")
7909 (set_attr "type" "multiple")
7910 (set_attr "arch" "t1,32")]
7911 )
7912
7913 (define_expand "stack_protect_combined_test"
7914 [(parallel
7915 [(set (pc)
7916 (if_then_else
7917 (eq (match_operand:SI 0 "memory_operand")
7918 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7919 UNSPEC_SP_TEST))
7920 (label_ref (match_operand 2))
7921 (pc)))
7922 (clobber (match_scratch:SI 3 ""))
7923 (clobber (match_scratch:SI 4 ""))
7924 (clobber (reg:CC CC_REGNUM))])]
7925 ""
7926 ""
7927 )
7928
7929 ;; Use a separate insn from the above expand to be able to have the mem outside
7930 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7931 ;; try to reload the guard since we need to control how PIC access is done in
7932 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7933 ;; legitimize_pic_address ()).
7934 (define_insn_and_split "*stack_protect_combined_test_insn"
7935 [(set (pc)
7936 (if_then_else
7937 (eq (match_operand:SI 0 "memory_operand" "m,m")
7938 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7939 UNSPEC_SP_TEST))
7940 (label_ref (match_operand 2))
7941 (pc)))
7942 (clobber (match_scratch:SI 3 "=&l,&r"))
7943 (clobber (match_scratch:SI 4 "=&l,&r"))
7944 (clobber (reg:CC CC_REGNUM))]
7945 ""
7946 "#"
7947 "reload_completed"
7948 [(const_int 0)]
7949 {
7950 rtx eq;
7951
7952 if (flag_pic)
7953 {
7954 rtx pic_reg;
7955
7956 if (TARGET_FDPIC)
7957 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7958 else
7959 pic_reg = operands[4];
7960
7961 /* Forces recomputing of GOT base now. */
7962 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7963 true /*compute_now*/);
7964 }
7965 else
7966 {
7967 if (address_operand (operands[1], SImode))
7968 operands[3] = operands[1];
7969 else
7970 {
7971 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7972 emit_move_insn (operands[3], mem);
7973 }
7974 }
7975 if (TARGET_32BIT)
7976 {
7977 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
7978 operands[3]));
7979 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
7980 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
7981 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
7982 }
7983 else
7984 {
7985 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
7986 operands[3]));
7987 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
7988 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
7989 operands[2]));
7990 }
7991 DONE;
7992 }
7993 [(set_attr "arch" "t1,32")]
7994 )
7995
7996 (define_insn "arm_stack_protect_test_insn"
7997 [(set (reg:CC_Z CC_REGNUM)
7998 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
7999 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8000 UNSPEC_SP_TEST)
8001 (const_int 0)))
8002 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8003 (clobber (match_dup 2))]
8004 "TARGET_32BIT"
8005 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8006 [(set_attr "length" "8,12")
8007 (set_attr "conds" "set")
8008 (set_attr "type" "multiple")
8009 (set_attr "arch" "t,32")]
8010 )
8011
8012 (define_expand "casesi"
8013 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8014 (match_operand:SI 1 "const_int_operand") ; lower bound
8015 (match_operand:SI 2 "const_int_operand") ; total range
8016 (match_operand:SI 3 "" "") ; table label
8017 (match_operand:SI 4 "" "")] ; Out of range label
8018 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8019 "
8020 {
8021 enum insn_code code;
8022 if (operands[1] != const0_rtx)
8023 {
8024 rtx reg = gen_reg_rtx (SImode);
8025
8026 emit_insn (gen_addsi3 (reg, operands[0],
8027 gen_int_mode (-INTVAL (operands[1]),
8028 SImode)));
8029 operands[0] = reg;
8030 }
8031
8032 if (TARGET_ARM)
8033 code = CODE_FOR_arm_casesi_internal;
8034 else if (TARGET_THUMB1)
8035 code = CODE_FOR_thumb1_casesi_internal_pic;
8036 else if (flag_pic)
8037 code = CODE_FOR_thumb2_casesi_internal_pic;
8038 else
8039 code = CODE_FOR_thumb2_casesi_internal;
8040
8041 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8042 operands[2] = force_reg (SImode, operands[2]);
8043
8044 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8045 operands[3], operands[4]));
8046 DONE;
8047 }"
8048 )
8049
8050 ;; The USE in this pattern is needed to tell flow analysis that this is
8051 ;; a CASESI insn. It has no other purpose.
8052 (define_expand "arm_casesi_internal"
8053 [(parallel [(set (pc)
8054 (if_then_else
8055 (leu (match_operand:SI 0 "s_register_operand")
8056 (match_operand:SI 1 "arm_rhs_operand"))
8057 (match_dup 4)
8058 (label_ref:SI (match_operand 3 ""))))
8059 (clobber (reg:CC CC_REGNUM))
8060 (use (label_ref:SI (match_operand 2 "")))])]
8061 "TARGET_ARM"
8062 {
8063 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8064 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8065 gen_rtx_LABEL_REF (SImode, operands[2]));
8066 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8067 MEM_READONLY_P (operands[4]) = 1;
8068 MEM_NOTRAP_P (operands[4]) = 1;
8069 })
8070
8071 (define_insn "*arm_casesi_internal"
8072 [(parallel [(set (pc)
8073 (if_then_else
8074 (leu (match_operand:SI 0 "s_register_operand" "r")
8075 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8076 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8077 (label_ref:SI (match_operand 2 "" ""))))
8078 (label_ref:SI (match_operand 3 "" ""))))
8079 (clobber (reg:CC CC_REGNUM))
8080 (use (label_ref:SI (match_dup 2)))])]
8081 "TARGET_ARM"
8082 "*
8083 if (flag_pic)
8084 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8085 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8086 "
8087 [(set_attr "conds" "clob")
8088 (set_attr "length" "12")
8089 (set_attr "type" "multiple")]
8090 )
8091
8092 (define_expand "indirect_jump"
8093 [(set (pc)
8094 (match_operand:SI 0 "s_register_operand"))]
8095 "TARGET_EITHER"
8096 "
8097 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8098 address and use bx. */
8099 if (TARGET_THUMB2)
8100 {
8101 rtx tmp;
8102 tmp = gen_reg_rtx (SImode);
8103 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8104 operands[0] = tmp;
8105 }
8106 "
8107 )
8108
8109 ;; NB Never uses BX.
8110 (define_insn "*arm_indirect_jump"
8111 [(set (pc)
8112 (match_operand:SI 0 "s_register_operand" "r"))]
8113 "TARGET_ARM"
8114 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8115 [(set_attr "predicable" "yes")
8116 (set_attr "type" "branch")]
8117 )
8118
8119 (define_insn "*load_indirect_jump"
8120 [(set (pc)
8121 (match_operand:SI 0 "memory_operand" "m"))]
8122 "TARGET_ARM"
8123 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8124 [(set_attr "type" "load_4")
8125 (set_attr "pool_range" "4096")
8126 (set_attr "neg_pool_range" "4084")
8127 (set_attr "predicable" "yes")]
8128 )
8129
8130 \f
8131 ;; Misc insns
8132
8133 (define_insn "nop"
8134 [(const_int 0)]
8135 "TARGET_EITHER"
8136 "nop"
8137 [(set (attr "length")
8138 (if_then_else (eq_attr "is_thumb" "yes")
8139 (const_int 2)
8140 (const_int 4)))
8141 (set_attr "type" "mov_reg")]
8142 )
8143
8144 (define_insn "trap"
8145 [(trap_if (const_int 1) (const_int 0))]
8146 ""
8147 "*
8148 if (TARGET_ARM)
8149 return \".inst\\t0xe7f000f0\";
8150 else
8151 return \".inst\\t0xdeff\";
8152 "
8153 [(set (attr "length")
8154 (if_then_else (eq_attr "is_thumb" "yes")
8155 (const_int 2)
8156 (const_int 4)))
8157 (set_attr "type" "trap")
8158 (set_attr "conds" "unconditional")]
8159 )
8160
8161 \f
8162 ;; Patterns to allow combination of arithmetic, cond code and shifts
8163
8164 (define_insn "*<arith_shift_insn>_multsi"
8165 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8166 (SHIFTABLE_OPS:SI
8167 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8168 (match_operand:SI 3 "power_of_two_operand" ""))
8169 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8170 "TARGET_32BIT"
8171 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8172 [(set_attr "predicable" "yes")
8173 (set_attr "shift" "2")
8174 (set_attr "arch" "a,t2")
8175 (set_attr "type" "alu_shift_imm")])
8176
8177 (define_insn "*<arith_shift_insn>_shiftsi"
8178 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8179 (SHIFTABLE_OPS:SI
8180 (match_operator:SI 2 "shift_nomul_operator"
8181 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8182 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8183 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8184 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8185 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8186 [(set_attr "predicable" "yes")
8187 (set_attr "shift" "3")
8188 (set_attr "arch" "a,t2,a")
8189 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8190
8191 (define_split
8192 [(set (match_operand:SI 0 "s_register_operand" "")
8193 (match_operator:SI 1 "shiftable_operator"
8194 [(match_operator:SI 2 "shiftable_operator"
8195 [(match_operator:SI 3 "shift_operator"
8196 [(match_operand:SI 4 "s_register_operand" "")
8197 (match_operand:SI 5 "reg_or_int_operand" "")])
8198 (match_operand:SI 6 "s_register_operand" "")])
8199 (match_operand:SI 7 "arm_rhs_operand" "")]))
8200 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8201 "TARGET_32BIT"
8202 [(set (match_dup 8)
8203 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8204 (match_dup 6)]))
8205 (set (match_dup 0)
8206 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8207 "")
8208
8209 (define_insn "*arith_shiftsi_compare0"
8210 [(set (reg:CC_NOOV CC_REGNUM)
8211 (compare:CC_NOOV
8212 (match_operator:SI 1 "shiftable_operator"
8213 [(match_operator:SI 3 "shift_operator"
8214 [(match_operand:SI 4 "s_register_operand" "r,r")
8215 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8216 (match_operand:SI 2 "s_register_operand" "r,r")])
8217 (const_int 0)))
8218 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8219 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8220 (match_dup 2)]))]
8221 "TARGET_32BIT"
8222 "%i1s%?\\t%0, %2, %4%S3"
8223 [(set_attr "conds" "set")
8224 (set_attr "shift" "4")
8225 (set_attr "arch" "32,a")
8226 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8227
8228 (define_insn "*arith_shiftsi_compare0_scratch"
8229 [(set (reg:CC_NOOV CC_REGNUM)
8230 (compare:CC_NOOV
8231 (match_operator:SI 1 "shiftable_operator"
8232 [(match_operator:SI 3 "shift_operator"
8233 [(match_operand:SI 4 "s_register_operand" "r,r")
8234 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8235 (match_operand:SI 2 "s_register_operand" "r,r")])
8236 (const_int 0)))
8237 (clobber (match_scratch:SI 0 "=r,r"))]
8238 "TARGET_32BIT"
8239 "%i1s%?\\t%0, %2, %4%S3"
8240 [(set_attr "conds" "set")
8241 (set_attr "shift" "4")
8242 (set_attr "arch" "32,a")
8243 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8244
8245 (define_insn "*sub_shiftsi"
8246 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8247 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8248 (match_operator:SI 2 "shift_operator"
8249 [(match_operand:SI 3 "s_register_operand" "r,r")
8250 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8251 "TARGET_32BIT"
8252 "sub%?\\t%0, %1, %3%S2"
8253 [(set_attr "predicable" "yes")
8254 (set_attr "predicable_short_it" "no")
8255 (set_attr "shift" "3")
8256 (set_attr "arch" "32,a")
8257 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8258
8259 (define_insn "*sub_shiftsi_compare0"
8260 [(set (reg:CC_NOOV CC_REGNUM)
8261 (compare:CC_NOOV
8262 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8263 (match_operator:SI 2 "shift_operator"
8264 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8265 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8266 (const_int 0)))
8267 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8268 (minus:SI (match_dup 1)
8269 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8270 "TARGET_32BIT"
8271 "subs%?\\t%0, %1, %3%S2"
8272 [(set_attr "conds" "set")
8273 (set_attr "shift" "3")
8274 (set_attr "arch" "32,a,a")
8275 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8276
8277 (define_insn "*sub_shiftsi_compare0_scratch"
8278 [(set (reg:CC_NOOV CC_REGNUM)
8279 (compare:CC_NOOV
8280 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8281 (match_operator:SI 2 "shift_operator"
8282 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8283 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8284 (const_int 0)))
8285 (clobber (match_scratch:SI 0 "=r,r,r"))]
8286 "TARGET_32BIT"
8287 "subs%?\\t%0, %1, %3%S2"
8288 [(set_attr "conds" "set")
8289 (set_attr "shift" "3")
8290 (set_attr "arch" "32,a,a")
8291 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8292 \f
8293
8294 (define_insn_and_split "*and_scc"
8295 [(set (match_operand:SI 0 "s_register_operand" "=r")
8296 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8297 [(match_operand 2 "cc_register" "") (const_int 0)])
8298 (match_operand:SI 3 "s_register_operand" "r")))]
8299 "TARGET_ARM"
8300 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8301 "&& reload_completed"
8302 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8303 (cond_exec (match_dup 4) (set (match_dup 0)
8304 (and:SI (match_dup 3) (const_int 1))))]
8305 {
8306 machine_mode mode = GET_MODE (operands[2]);
8307 enum rtx_code rc = GET_CODE (operands[1]);
8308
8309 /* Note that operands[4] is the same as operands[1],
8310 but with VOIDmode as the result. */
8311 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8312 if (mode == CCFPmode || mode == CCFPEmode)
8313 rc = reverse_condition_maybe_unordered (rc);
8314 else
8315 rc = reverse_condition (rc);
8316 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8317 }
8318 [(set_attr "conds" "use")
8319 (set_attr "type" "multiple")
8320 (set_attr "length" "8")]
8321 )
8322
8323 (define_insn_and_split "*ior_scc"
8324 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8325 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8326 [(match_operand 2 "cc_register" "") (const_int 0)])
8327 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8328 "TARGET_ARM"
8329 "@
8330 orr%d1\\t%0, %3, #1
8331 #"
8332 "&& reload_completed
8333 && REGNO (operands [0]) != REGNO (operands[3])"
8334 ;; && which_alternative == 1
8335 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8336 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8337 (cond_exec (match_dup 4) (set (match_dup 0)
8338 (ior:SI (match_dup 3) (const_int 1))))]
8339 {
8340 machine_mode mode = GET_MODE (operands[2]);
8341 enum rtx_code rc = GET_CODE (operands[1]);
8342
8343 /* Note that operands[4] is the same as operands[1],
8344 but with VOIDmode as the result. */
8345 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8346 if (mode == CCFPmode || mode == CCFPEmode)
8347 rc = reverse_condition_maybe_unordered (rc);
8348 else
8349 rc = reverse_condition (rc);
8350 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8351 }
8352 [(set_attr "conds" "use")
8353 (set_attr "length" "4,8")
8354 (set_attr "type" "logic_imm,multiple")]
8355 )
8356
8357 ; A series of splitters for the compare_scc pattern below. Note that
8358 ; order is important.
8359 (define_split
8360 [(set (match_operand:SI 0 "s_register_operand" "")
8361 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8362 (const_int 0)))
8363 (clobber (reg:CC CC_REGNUM))]
8364 "TARGET_32BIT && reload_completed"
8365 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8366
8367 (define_split
8368 [(set (match_operand:SI 0 "s_register_operand" "")
8369 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8370 (const_int 0)))
8371 (clobber (reg:CC CC_REGNUM))]
8372 "TARGET_32BIT && reload_completed"
8373 [(set (match_dup 0) (not:SI (match_dup 1)))
8374 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8375
8376 (define_split
8377 [(set (match_operand:SI 0 "s_register_operand" "")
8378 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8379 (const_int 0)))
8380 (clobber (reg:CC CC_REGNUM))]
8381 "arm_arch5t && TARGET_32BIT"
8382 [(set (match_dup 0) (clz:SI (match_dup 1)))
8383 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8384 )
8385
8386 (define_split
8387 [(set (match_operand:SI 0 "s_register_operand" "")
8388 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8389 (const_int 0)))
8390 (clobber (reg:CC CC_REGNUM))]
8391 "TARGET_32BIT && reload_completed"
8392 [(parallel
8393 [(set (reg:CC CC_REGNUM)
8394 (compare:CC (const_int 1) (match_dup 1)))
8395 (set (match_dup 0)
8396 (minus:SI (const_int 1) (match_dup 1)))])
8397 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8398 (set (match_dup 0) (const_int 0)))])
8399
8400 (define_split
8401 [(set (match_operand:SI 0 "s_register_operand" "")
8402 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8403 (match_operand:SI 2 "const_int_operand" "")))
8404 (clobber (reg:CC CC_REGNUM))]
8405 "TARGET_32BIT && reload_completed"
8406 [(parallel
8407 [(set (reg:CC CC_REGNUM)
8408 (compare:CC (match_dup 1) (match_dup 2)))
8409 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8410 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8411 (set (match_dup 0) (const_int 1)))]
8412 {
8413 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8414 })
8415
8416 (define_split
8417 [(set (match_operand:SI 0 "s_register_operand" "")
8418 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8419 (match_operand:SI 2 "arm_add_operand" "")))
8420 (clobber (reg:CC CC_REGNUM))]
8421 "TARGET_32BIT && reload_completed"
8422 [(parallel
8423 [(set (reg:CC_NOOV CC_REGNUM)
8424 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8425 (const_int 0)))
8426 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8427 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8428 (set (match_dup 0) (const_int 1)))])
8429
8430 (define_insn_and_split "*compare_scc"
8431 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8432 (match_operator:SI 1 "arm_comparison_operator"
8433 [(match_operand:SI 2 "s_register_operand" "r,r")
8434 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8435 (clobber (reg:CC CC_REGNUM))]
8436 "TARGET_32BIT"
8437 "#"
8438 "&& reload_completed"
8439 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8440 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8441 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8442 {
8443 rtx tmp1;
8444 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8445 operands[2], operands[3]);
8446 enum rtx_code rc = GET_CODE (operands[1]);
8447
8448 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8449
8450 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8451 if (mode == CCFPmode || mode == CCFPEmode)
8452 rc = reverse_condition_maybe_unordered (rc);
8453 else
8454 rc = reverse_condition (rc);
8455 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8456 }
8457 [(set_attr "type" "multiple")]
8458 )
8459
8460 ;; Attempt to improve the sequence generated by the compare_scc splitters
8461 ;; not to use conditional execution.
8462
8463 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8464 ;; clz Rd, reg1
8465 ;; lsr Rd, Rd, #5
8466 (define_peephole2
8467 [(set (reg:CC CC_REGNUM)
8468 (compare:CC (match_operand:SI 1 "register_operand" "")
8469 (const_int 0)))
8470 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8471 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8472 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8473 (set (match_dup 0) (const_int 1)))]
8474 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8475 [(set (match_dup 0) (clz:SI (match_dup 1)))
8476 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8477 )
8478
8479 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8480 ;; negs Rd, reg1
8481 ;; adc Rd, Rd, reg1
8482 (define_peephole2
8483 [(set (reg:CC CC_REGNUM)
8484 (compare:CC (match_operand:SI 1 "register_operand" "")
8485 (const_int 0)))
8486 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8487 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8488 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8489 (set (match_dup 0) (const_int 1)))
8490 (match_scratch:SI 2 "r")]
8491 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8492 [(parallel
8493 [(set (reg:CC CC_REGNUM)
8494 (compare:CC (const_int 0) (match_dup 1)))
8495 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8496 (set (match_dup 0)
8497 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8498 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8499 )
8500
8501 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8502 ;; sub Rd, Reg1, reg2
8503 ;; clz Rd, Rd
8504 ;; lsr Rd, Rd, #5
8505 (define_peephole2
8506 [(set (reg:CC CC_REGNUM)
8507 (compare:CC (match_operand:SI 1 "register_operand" "")
8508 (match_operand:SI 2 "arm_rhs_operand" "")))
8509 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8510 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8511 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8512 (set (match_dup 0) (const_int 1)))]
8513 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8514 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8515 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8516 (set (match_dup 0) (clz:SI (match_dup 0)))
8517 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8518 )
8519
8520
8521 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8522 ;; sub T1, Reg1, reg2
8523 ;; negs Rd, T1
8524 ;; adc Rd, Rd, T1
8525 (define_peephole2
8526 [(set (reg:CC CC_REGNUM)
8527 (compare:CC (match_operand:SI 1 "register_operand" "")
8528 (match_operand:SI 2 "arm_rhs_operand" "")))
8529 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8530 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8531 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8532 (set (match_dup 0) (const_int 1)))
8533 (match_scratch:SI 3 "r")]
8534 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8535 [(set (match_dup 3) (match_dup 4))
8536 (parallel
8537 [(set (reg:CC CC_REGNUM)
8538 (compare:CC (const_int 0) (match_dup 3)))
8539 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8540 (set (match_dup 0)
8541 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8542 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8543 "
8544 if (CONST_INT_P (operands[2]))
8545 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8546 else
8547 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8548 ")
8549
8550 (define_insn "*cond_move"
8551 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8552 (if_then_else:SI (match_operator 3 "equality_operator"
8553 [(match_operator 4 "arm_comparison_operator"
8554 [(match_operand 5 "cc_register" "") (const_int 0)])
8555 (const_int 0)])
8556 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8557 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8558 "TARGET_ARM"
8559 "*
8560 if (GET_CODE (operands[3]) == NE)
8561 {
8562 if (which_alternative != 1)
8563 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8564 if (which_alternative != 0)
8565 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8566 return \"\";
8567 }
8568 if (which_alternative != 0)
8569 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8570 if (which_alternative != 1)
8571 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8572 return \"\";
8573 "
8574 [(set_attr "conds" "use")
8575 (set_attr_alternative "type"
8576 [(if_then_else (match_operand 2 "const_int_operand" "")
8577 (const_string "mov_imm")
8578 (const_string "mov_reg"))
8579 (if_then_else (match_operand 1 "const_int_operand" "")
8580 (const_string "mov_imm")
8581 (const_string "mov_reg"))
8582 (const_string "multiple")])
8583 (set_attr "length" "4,4,8")]
8584 )
8585
8586 (define_insn "*cond_arith"
8587 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8588 (match_operator:SI 5 "shiftable_operator"
8589 [(match_operator:SI 4 "arm_comparison_operator"
8590 [(match_operand:SI 2 "s_register_operand" "r,r")
8591 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8592 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8593 (clobber (reg:CC CC_REGNUM))]
8594 "TARGET_ARM"
8595 "*
8596 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8597 return \"%i5\\t%0, %1, %2, lsr #31\";
8598
8599 output_asm_insn (\"cmp\\t%2, %3\", operands);
8600 if (GET_CODE (operands[5]) == AND)
8601 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8602 else if (GET_CODE (operands[5]) == MINUS)
8603 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8604 else if (which_alternative != 0)
8605 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8606 return \"%i5%d4\\t%0, %1, #1\";
8607 "
8608 [(set_attr "conds" "clob")
8609 (set_attr "length" "12")
8610 (set_attr "type" "multiple")]
8611 )
8612
8613 (define_insn "*cond_sub"
8614 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8615 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8616 (match_operator:SI 4 "arm_comparison_operator"
8617 [(match_operand:SI 2 "s_register_operand" "r,r")
8618 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8619 (clobber (reg:CC CC_REGNUM))]
8620 "TARGET_ARM"
8621 "*
8622 output_asm_insn (\"cmp\\t%2, %3\", operands);
8623 if (which_alternative != 0)
8624 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8625 return \"sub%d4\\t%0, %1, #1\";
8626 "
8627 [(set_attr "conds" "clob")
8628 (set_attr "length" "8,12")
8629 (set_attr "type" "multiple")]
8630 )
8631
8632 (define_insn "*cmp_ite0"
8633 [(set (match_operand 6 "dominant_cc_register" "")
8634 (compare
8635 (if_then_else:SI
8636 (match_operator 4 "arm_comparison_operator"
8637 [(match_operand:SI 0 "s_register_operand"
8638 "l,l,l,r,r,r,r,r,r")
8639 (match_operand:SI 1 "arm_add_operand"
8640 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8641 (match_operator:SI 5 "arm_comparison_operator"
8642 [(match_operand:SI 2 "s_register_operand"
8643 "l,r,r,l,l,r,r,r,r")
8644 (match_operand:SI 3 "arm_add_operand"
8645 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8646 (const_int 0))
8647 (const_int 0)))]
8648 "TARGET_32BIT"
8649 "*
8650 {
8651 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8652 {
8653 {\"cmp%d5\\t%0, %1\",
8654 \"cmp%d4\\t%2, %3\"},
8655 {\"cmn%d5\\t%0, #%n1\",
8656 \"cmp%d4\\t%2, %3\"},
8657 {\"cmp%d5\\t%0, %1\",
8658 \"cmn%d4\\t%2, #%n3\"},
8659 {\"cmn%d5\\t%0, #%n1\",
8660 \"cmn%d4\\t%2, #%n3\"}
8661 };
8662 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8663 {
8664 {\"cmp\\t%2, %3\",
8665 \"cmp\\t%0, %1\"},
8666 {\"cmp\\t%2, %3\",
8667 \"cmn\\t%0, #%n1\"},
8668 {\"cmn\\t%2, #%n3\",
8669 \"cmp\\t%0, %1\"},
8670 {\"cmn\\t%2, #%n3\",
8671 \"cmn\\t%0, #%n1\"}
8672 };
8673 static const char * const ite[2] =
8674 {
8675 \"it\\t%d5\",
8676 \"it\\t%d4\"
8677 };
8678 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8679 CMP_CMP, CMN_CMP, CMP_CMP,
8680 CMN_CMP, CMP_CMN, CMN_CMN};
8681 int swap =
8682 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8683
8684 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8685 if (TARGET_THUMB2) {
8686 output_asm_insn (ite[swap], operands);
8687 }
8688 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8689 return \"\";
8690 }"
8691 [(set_attr "conds" "set")
8692 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8693 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8694 (set_attr "type" "multiple")
8695 (set_attr_alternative "length"
8696 [(const_int 6)
8697 (const_int 8)
8698 (const_int 8)
8699 (const_int 8)
8700 (const_int 8)
8701 (if_then_else (eq_attr "is_thumb" "no")
8702 (const_int 8)
8703 (const_int 10))
8704 (if_then_else (eq_attr "is_thumb" "no")
8705 (const_int 8)
8706 (const_int 10))
8707 (if_then_else (eq_attr "is_thumb" "no")
8708 (const_int 8)
8709 (const_int 10))
8710 (if_then_else (eq_attr "is_thumb" "no")
8711 (const_int 8)
8712 (const_int 10))])]
8713 )
8714
8715 (define_insn "*cmp_ite1"
8716 [(set (match_operand 6 "dominant_cc_register" "")
8717 (compare
8718 (if_then_else:SI
8719 (match_operator 4 "arm_comparison_operator"
8720 [(match_operand:SI 0 "s_register_operand"
8721 "l,l,l,r,r,r,r,r,r")
8722 (match_operand:SI 1 "arm_add_operand"
8723 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8724 (match_operator:SI 5 "arm_comparison_operator"
8725 [(match_operand:SI 2 "s_register_operand"
8726 "l,r,r,l,l,r,r,r,r")
8727 (match_operand:SI 3 "arm_add_operand"
8728 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8729 (const_int 1))
8730 (const_int 0)))]
8731 "TARGET_32BIT"
8732 "*
8733 {
8734 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8735 {
8736 {\"cmp\\t%0, %1\",
8737 \"cmp\\t%2, %3\"},
8738 {\"cmn\\t%0, #%n1\",
8739 \"cmp\\t%2, %3\"},
8740 {\"cmp\\t%0, %1\",
8741 \"cmn\\t%2, #%n3\"},
8742 {\"cmn\\t%0, #%n1\",
8743 \"cmn\\t%2, #%n3\"}
8744 };
8745 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8746 {
8747 {\"cmp%d4\\t%2, %3\",
8748 \"cmp%D5\\t%0, %1\"},
8749 {\"cmp%d4\\t%2, %3\",
8750 \"cmn%D5\\t%0, #%n1\"},
8751 {\"cmn%d4\\t%2, #%n3\",
8752 \"cmp%D5\\t%0, %1\"},
8753 {\"cmn%d4\\t%2, #%n3\",
8754 \"cmn%D5\\t%0, #%n1\"}
8755 };
8756 static const char * const ite[2] =
8757 {
8758 \"it\\t%d4\",
8759 \"it\\t%D5\"
8760 };
8761 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8762 CMP_CMP, CMN_CMP, CMP_CMP,
8763 CMN_CMP, CMP_CMN, CMN_CMN};
8764 int swap =
8765 comparison_dominates_p (GET_CODE (operands[5]),
8766 reverse_condition (GET_CODE (operands[4])));
8767
8768 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8769 if (TARGET_THUMB2) {
8770 output_asm_insn (ite[swap], operands);
8771 }
8772 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8773 return \"\";
8774 }"
8775 [(set_attr "conds" "set")
8776 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8777 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8778 (set_attr_alternative "length"
8779 [(const_int 6)
8780 (const_int 8)
8781 (const_int 8)
8782 (const_int 8)
8783 (const_int 8)
8784 (if_then_else (eq_attr "is_thumb" "no")
8785 (const_int 8)
8786 (const_int 10))
8787 (if_then_else (eq_attr "is_thumb" "no")
8788 (const_int 8)
8789 (const_int 10))
8790 (if_then_else (eq_attr "is_thumb" "no")
8791 (const_int 8)
8792 (const_int 10))
8793 (if_then_else (eq_attr "is_thumb" "no")
8794 (const_int 8)
8795 (const_int 10))])
8796 (set_attr "type" "multiple")]
8797 )
8798
8799 (define_insn "*cmp_and"
8800 [(set (match_operand 6 "dominant_cc_register" "")
8801 (compare
8802 (and:SI
8803 (match_operator 4 "arm_comparison_operator"
8804 [(match_operand:SI 0 "s_register_operand"
8805 "l,l,l,r,r,r,r,r,r,r")
8806 (match_operand:SI 1 "arm_add_operand"
8807 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8808 (match_operator:SI 5 "arm_comparison_operator"
8809 [(match_operand:SI 2 "s_register_operand"
8810 "l,r,r,l,l,r,r,r,r,r")
8811 (match_operand:SI 3 "arm_add_operand"
8812 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8813 (const_int 0)))]
8814 "TARGET_32BIT"
8815 "*
8816 {
8817 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8818 {
8819 {\"cmp%d5\\t%0, %1\",
8820 \"cmp%d4\\t%2, %3\"},
8821 {\"cmn%d5\\t%0, #%n1\",
8822 \"cmp%d4\\t%2, %3\"},
8823 {\"cmp%d5\\t%0, %1\",
8824 \"cmn%d4\\t%2, #%n3\"},
8825 {\"cmn%d5\\t%0, #%n1\",
8826 \"cmn%d4\\t%2, #%n3\"}
8827 };
8828 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8829 {
8830 {\"cmp\\t%2, %3\",
8831 \"cmp\\t%0, %1\"},
8832 {\"cmp\\t%2, %3\",
8833 \"cmn\\t%0, #%n1\"},
8834 {\"cmn\\t%2, #%n3\",
8835 \"cmp\\t%0, %1\"},
8836 {\"cmn\\t%2, #%n3\",
8837 \"cmn\\t%0, #%n1\"}
8838 };
8839 static const char *const ite[2] =
8840 {
8841 \"it\\t%d5\",
8842 \"it\\t%d4\"
8843 };
8844 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8845 CMP_CMP, CMN_CMP, CMP_CMP,
8846 CMP_CMP, CMN_CMP, CMP_CMN,
8847 CMN_CMN};
8848 int swap =
8849 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8850
8851 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8852 if (TARGET_THUMB2) {
8853 output_asm_insn (ite[swap], operands);
8854 }
8855 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8856 return \"\";
8857 }"
8858 [(set_attr "conds" "set")
8859 (set_attr "predicable" "no")
8860 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8861 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8862 (set_attr_alternative "length"
8863 [(const_int 6)
8864 (const_int 8)
8865 (const_int 8)
8866 (const_int 8)
8867 (const_int 8)
8868 (const_int 6)
8869 (if_then_else (eq_attr "is_thumb" "no")
8870 (const_int 8)
8871 (const_int 10))
8872 (if_then_else (eq_attr "is_thumb" "no")
8873 (const_int 8)
8874 (const_int 10))
8875 (if_then_else (eq_attr "is_thumb" "no")
8876 (const_int 8)
8877 (const_int 10))
8878 (if_then_else (eq_attr "is_thumb" "no")
8879 (const_int 8)
8880 (const_int 10))])
8881 (set_attr "type" "multiple")]
8882 )
8883
8884 (define_insn "*cmp_ior"
8885 [(set (match_operand 6 "dominant_cc_register" "")
8886 (compare
8887 (ior:SI
8888 (match_operator 4 "arm_comparison_operator"
8889 [(match_operand:SI 0 "s_register_operand"
8890 "l,l,l,r,r,r,r,r,r,r")
8891 (match_operand:SI 1 "arm_add_operand"
8892 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8893 (match_operator:SI 5 "arm_comparison_operator"
8894 [(match_operand:SI 2 "s_register_operand"
8895 "l,r,r,l,l,r,r,r,r,r")
8896 (match_operand:SI 3 "arm_add_operand"
8897 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8898 (const_int 0)))]
8899 "TARGET_32BIT"
8900 "*
8901 {
8902 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8903 {
8904 {\"cmp\\t%0, %1\",
8905 \"cmp\\t%2, %3\"},
8906 {\"cmn\\t%0, #%n1\",
8907 \"cmp\\t%2, %3\"},
8908 {\"cmp\\t%0, %1\",
8909 \"cmn\\t%2, #%n3\"},
8910 {\"cmn\\t%0, #%n1\",
8911 \"cmn\\t%2, #%n3\"}
8912 };
8913 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8914 {
8915 {\"cmp%D4\\t%2, %3\",
8916 \"cmp%D5\\t%0, %1\"},
8917 {\"cmp%D4\\t%2, %3\",
8918 \"cmn%D5\\t%0, #%n1\"},
8919 {\"cmn%D4\\t%2, #%n3\",
8920 \"cmp%D5\\t%0, %1\"},
8921 {\"cmn%D4\\t%2, #%n3\",
8922 \"cmn%D5\\t%0, #%n1\"}
8923 };
8924 static const char *const ite[2] =
8925 {
8926 \"it\\t%D4\",
8927 \"it\\t%D5\"
8928 };
8929 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8930 CMP_CMP, CMN_CMP, CMP_CMP,
8931 CMP_CMP, CMN_CMP, CMP_CMN,
8932 CMN_CMN};
8933 int swap =
8934 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8935
8936 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8937 if (TARGET_THUMB2) {
8938 output_asm_insn (ite[swap], operands);
8939 }
8940 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8941 return \"\";
8942 }
8943 "
8944 [(set_attr "conds" "set")
8945 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8946 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8947 (set_attr_alternative "length"
8948 [(const_int 6)
8949 (const_int 8)
8950 (const_int 8)
8951 (const_int 8)
8952 (const_int 8)
8953 (const_int 6)
8954 (if_then_else (eq_attr "is_thumb" "no")
8955 (const_int 8)
8956 (const_int 10))
8957 (if_then_else (eq_attr "is_thumb" "no")
8958 (const_int 8)
8959 (const_int 10))
8960 (if_then_else (eq_attr "is_thumb" "no")
8961 (const_int 8)
8962 (const_int 10))
8963 (if_then_else (eq_attr "is_thumb" "no")
8964 (const_int 8)
8965 (const_int 10))])
8966 (set_attr "type" "multiple")]
8967 )
8968
8969 (define_insn_and_split "*ior_scc_scc"
8970 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8971 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8972 [(match_operand:SI 1 "s_register_operand" "l,r")
8973 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8974 (match_operator:SI 6 "arm_comparison_operator"
8975 [(match_operand:SI 4 "s_register_operand" "l,r")
8976 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8977 (clobber (reg:CC CC_REGNUM))]
8978 "TARGET_32BIT
8979 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8980 != CCmode)"
8981 "#"
8982 "TARGET_32BIT && reload_completed"
8983 [(set (match_dup 7)
8984 (compare
8985 (ior:SI
8986 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8987 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8988 (const_int 0)))
8989 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8990 "operands[7]
8991 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8992 DOM_CC_X_OR_Y),
8993 CC_REGNUM);"
8994 [(set_attr "conds" "clob")
8995 (set_attr "enabled_for_short_it" "yes,no")
8996 (set_attr "length" "16")
8997 (set_attr "type" "multiple")]
8998 )
8999
9000 ; If the above pattern is followed by a CMP insn, then the compare is
9001 ; redundant, since we can rework the conditional instruction that follows.
9002 (define_insn_and_split "*ior_scc_scc_cmp"
9003 [(set (match_operand 0 "dominant_cc_register" "")
9004 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9005 [(match_operand:SI 1 "s_register_operand" "l,r")
9006 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9007 (match_operator:SI 6 "arm_comparison_operator"
9008 [(match_operand:SI 4 "s_register_operand" "l,r")
9009 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9010 (const_int 0)))
9011 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9012 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9013 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9014 "TARGET_32BIT"
9015 "#"
9016 "TARGET_32BIT && reload_completed"
9017 [(set (match_dup 0)
9018 (compare
9019 (ior:SI
9020 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9021 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9022 (const_int 0)))
9023 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9024 ""
9025 [(set_attr "conds" "set")
9026 (set_attr "enabled_for_short_it" "yes,no")
9027 (set_attr "length" "16")
9028 (set_attr "type" "multiple")]
9029 )
9030
9031 (define_insn_and_split "*and_scc_scc"
9032 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9033 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9034 [(match_operand:SI 1 "s_register_operand" "l,r")
9035 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9036 (match_operator:SI 6 "arm_comparison_operator"
9037 [(match_operand:SI 4 "s_register_operand" "l,r")
9038 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9039 (clobber (reg:CC CC_REGNUM))]
9040 "TARGET_32BIT
9041 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9042 != CCmode)"
9043 "#"
9044 "TARGET_32BIT && reload_completed
9045 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9046 != CCmode)"
9047 [(set (match_dup 7)
9048 (compare
9049 (and:SI
9050 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9051 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9052 (const_int 0)))
9053 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9054 "operands[7]
9055 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9056 DOM_CC_X_AND_Y),
9057 CC_REGNUM);"
9058 [(set_attr "conds" "clob")
9059 (set_attr "enabled_for_short_it" "yes,no")
9060 (set_attr "length" "16")
9061 (set_attr "type" "multiple")]
9062 )
9063
9064 ; If the above pattern is followed by a CMP insn, then the compare is
9065 ; redundant, since we can rework the conditional instruction that follows.
9066 (define_insn_and_split "*and_scc_scc_cmp"
9067 [(set (match_operand 0 "dominant_cc_register" "")
9068 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9069 [(match_operand:SI 1 "s_register_operand" "l,r")
9070 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9071 (match_operator:SI 6 "arm_comparison_operator"
9072 [(match_operand:SI 4 "s_register_operand" "l,r")
9073 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9074 (const_int 0)))
9075 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9076 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9077 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9078 "TARGET_32BIT"
9079 "#"
9080 "TARGET_32BIT && reload_completed"
9081 [(set (match_dup 0)
9082 (compare
9083 (and:SI
9084 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9085 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9086 (const_int 0)))
9087 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9088 ""
9089 [(set_attr "conds" "set")
9090 (set_attr "enabled_for_short_it" "yes,no")
9091 (set_attr "length" "16")
9092 (set_attr "type" "multiple")]
9093 )
9094
9095 ;; If there is no dominance in the comparison, then we can still save an
9096 ;; instruction in the AND case, since we can know that the second compare
9097 ;; need only zero the value if false (if true, then the value is already
9098 ;; correct).
9099 (define_insn_and_split "*and_scc_scc_nodom"
9100 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9101 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9102 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9103 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9104 (match_operator:SI 6 "arm_comparison_operator"
9105 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9106 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9107 (clobber (reg:CC CC_REGNUM))]
9108 "TARGET_32BIT
9109 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9110 == CCmode)"
9111 "#"
9112 "TARGET_32BIT && reload_completed"
9113 [(parallel [(set (match_dup 0)
9114 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9115 (clobber (reg:CC CC_REGNUM))])
9116 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9117 (set (match_dup 0)
9118 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9119 (match_dup 0)
9120 (const_int 0)))]
9121 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9122 operands[4], operands[5]),
9123 CC_REGNUM);
9124 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9125 operands[5]);"
9126 [(set_attr "conds" "clob")
9127 (set_attr "length" "20")
9128 (set_attr "type" "multiple")]
9129 )
9130
9131 (define_split
9132 [(set (reg:CC_NOOV CC_REGNUM)
9133 (compare:CC_NOOV (ior:SI
9134 (and:SI (match_operand:SI 0 "s_register_operand" "")
9135 (const_int 1))
9136 (match_operator:SI 1 "arm_comparison_operator"
9137 [(match_operand:SI 2 "s_register_operand" "")
9138 (match_operand:SI 3 "arm_add_operand" "")]))
9139 (const_int 0)))
9140 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9141 "TARGET_ARM"
9142 [(set (match_dup 4)
9143 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9144 (match_dup 0)))
9145 (set (reg:CC_NOOV CC_REGNUM)
9146 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9147 (const_int 0)))]
9148 "")
9149
9150 (define_split
9151 [(set (reg:CC_NOOV CC_REGNUM)
9152 (compare:CC_NOOV (ior:SI
9153 (match_operator:SI 1 "arm_comparison_operator"
9154 [(match_operand:SI 2 "s_register_operand" "")
9155 (match_operand:SI 3 "arm_add_operand" "")])
9156 (and:SI (match_operand:SI 0 "s_register_operand" "")
9157 (const_int 1)))
9158 (const_int 0)))
9159 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9160 "TARGET_ARM"
9161 [(set (match_dup 4)
9162 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9163 (match_dup 0)))
9164 (set (reg:CC_NOOV CC_REGNUM)
9165 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9166 (const_int 0)))]
9167 "")
9168 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9169
9170 (define_insn_and_split "*negscc"
9171 [(set (match_operand:SI 0 "s_register_operand" "=r")
9172 (neg:SI (match_operator 3 "arm_comparison_operator"
9173 [(match_operand:SI 1 "s_register_operand" "r")
9174 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9175 (clobber (reg:CC CC_REGNUM))]
9176 "TARGET_ARM"
9177 "#"
9178 "&& reload_completed"
9179 [(const_int 0)]
9180 {
9181 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9182
9183 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9184 {
9185 /* Emit mov\\t%0, %1, asr #31 */
9186 emit_insn (gen_rtx_SET (operands[0],
9187 gen_rtx_ASHIFTRT (SImode,
9188 operands[1],
9189 GEN_INT (31))));
9190 DONE;
9191 }
9192 else if (GET_CODE (operands[3]) == NE)
9193 {
9194 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9195 if (CONST_INT_P (operands[2]))
9196 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9197 gen_int_mode (-INTVAL (operands[2]),
9198 SImode)));
9199 else
9200 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9201
9202 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9203 gen_rtx_NE (SImode,
9204 cc_reg,
9205 const0_rtx),
9206 gen_rtx_SET (operands[0],
9207 GEN_INT (~0))));
9208 DONE;
9209 }
9210 else
9211 {
9212 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9213 emit_insn (gen_rtx_SET (cc_reg,
9214 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9215 enum rtx_code rc = GET_CODE (operands[3]);
9216
9217 rc = reverse_condition (rc);
9218 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9219 gen_rtx_fmt_ee (rc,
9220 VOIDmode,
9221 cc_reg,
9222 const0_rtx),
9223 gen_rtx_SET (operands[0], const0_rtx)));
9224 rc = GET_CODE (operands[3]);
9225 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9226 gen_rtx_fmt_ee (rc,
9227 VOIDmode,
9228 cc_reg,
9229 const0_rtx),
9230 gen_rtx_SET (operands[0],
9231 GEN_INT (~0))));
9232 DONE;
9233 }
9234 FAIL;
9235 }
9236 [(set_attr "conds" "clob")
9237 (set_attr "length" "12")
9238 (set_attr "type" "multiple")]
9239 )
9240
9241 (define_insn_and_split "movcond_addsi"
9242 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9243 (if_then_else:SI
9244 (match_operator 5 "comparison_operator"
9245 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9246 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9247 (const_int 0)])
9248 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9249 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9250 (clobber (reg:CC CC_REGNUM))]
9251 "TARGET_32BIT"
9252 "#"
9253 "&& reload_completed"
9254 [(set (reg:CC_NOOV CC_REGNUM)
9255 (compare:CC_NOOV
9256 (plus:SI (match_dup 3)
9257 (match_dup 4))
9258 (const_int 0)))
9259 (set (match_dup 0) (match_dup 1))
9260 (cond_exec (match_dup 6)
9261 (set (match_dup 0) (match_dup 2)))]
9262 "
9263 {
9264 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9265 operands[3], operands[4]);
9266 enum rtx_code rc = GET_CODE (operands[5]);
9267 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9268 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9269 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9270 rc = reverse_condition (rc);
9271 else
9272 std::swap (operands[1], operands[2]);
9273
9274 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9275 }
9276 "
9277 [(set_attr "conds" "clob")
9278 (set_attr "enabled_for_short_it" "no,yes,yes")
9279 (set_attr "type" "multiple")]
9280 )
9281
9282 (define_insn "movcond"
9283 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9284 (if_then_else:SI
9285 (match_operator 5 "arm_comparison_operator"
9286 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9287 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9288 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9289 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9290 (clobber (reg:CC CC_REGNUM))]
9291 "TARGET_ARM"
9292 "*
9293 if (GET_CODE (operands[5]) == LT
9294 && (operands[4] == const0_rtx))
9295 {
9296 if (which_alternative != 1 && REG_P (operands[1]))
9297 {
9298 if (operands[2] == const0_rtx)
9299 return \"and\\t%0, %1, %3, asr #31\";
9300 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9301 }
9302 else if (which_alternative != 0 && REG_P (operands[2]))
9303 {
9304 if (operands[1] == const0_rtx)
9305 return \"bic\\t%0, %2, %3, asr #31\";
9306 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9307 }
9308 /* The only case that falls through to here is when both ops 1 & 2
9309 are constants. */
9310 }
9311
9312 if (GET_CODE (operands[5]) == GE
9313 && (operands[4] == const0_rtx))
9314 {
9315 if (which_alternative != 1 && REG_P (operands[1]))
9316 {
9317 if (operands[2] == const0_rtx)
9318 return \"bic\\t%0, %1, %3, asr #31\";
9319 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9320 }
9321 else if (which_alternative != 0 && REG_P (operands[2]))
9322 {
9323 if (operands[1] == const0_rtx)
9324 return \"and\\t%0, %2, %3, asr #31\";
9325 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9326 }
9327 /* The only case that falls through to here is when both ops 1 & 2
9328 are constants. */
9329 }
9330 if (CONST_INT_P (operands[4])
9331 && !const_ok_for_arm (INTVAL (operands[4])))
9332 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9333 else
9334 output_asm_insn (\"cmp\\t%3, %4\", operands);
9335 if (which_alternative != 0)
9336 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9337 if (which_alternative != 1)
9338 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9339 return \"\";
9340 "
9341 [(set_attr "conds" "clob")
9342 (set_attr "length" "8,8,12")
9343 (set_attr "type" "multiple")]
9344 )
9345
9346 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9347
9348 (define_insn "*ifcompare_plus_move"
9349 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9350 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9351 [(match_operand:SI 4 "s_register_operand" "r,r")
9352 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9353 (plus:SI
9354 (match_operand:SI 2 "s_register_operand" "r,r")
9355 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9356 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9357 (clobber (reg:CC CC_REGNUM))]
9358 "TARGET_ARM"
9359 "#"
9360 [(set_attr "conds" "clob")
9361 (set_attr "length" "8,12")
9362 (set_attr "type" "multiple")]
9363 )
9364
9365 (define_insn "*if_plus_move"
9366 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9367 (if_then_else:SI
9368 (match_operator 4 "arm_comparison_operator"
9369 [(match_operand 5 "cc_register" "") (const_int 0)])
9370 (plus:SI
9371 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9372 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9373 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9374 "TARGET_ARM"
9375 "@
9376 add%d4\\t%0, %2, %3
9377 sub%d4\\t%0, %2, #%n3
9378 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9379 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9380 [(set_attr "conds" "use")
9381 (set_attr "length" "4,4,8,8")
9382 (set_attr_alternative "type"
9383 [(if_then_else (match_operand 3 "const_int_operand" "")
9384 (const_string "alu_imm" )
9385 (const_string "alu_sreg"))
9386 (const_string "alu_imm")
9387 (const_string "multiple")
9388 (const_string "multiple")])]
9389 )
9390
9391 (define_insn "*ifcompare_move_plus"
9392 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9393 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9394 [(match_operand:SI 4 "s_register_operand" "r,r")
9395 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9396 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9397 (plus:SI
9398 (match_operand:SI 2 "s_register_operand" "r,r")
9399 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9400 (clobber (reg:CC CC_REGNUM))]
9401 "TARGET_ARM"
9402 "#"
9403 [(set_attr "conds" "clob")
9404 (set_attr "length" "8,12")
9405 (set_attr "type" "multiple")]
9406 )
9407
9408 (define_insn "*if_move_plus"
9409 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9410 (if_then_else:SI
9411 (match_operator 4 "arm_comparison_operator"
9412 [(match_operand 5 "cc_register" "") (const_int 0)])
9413 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9414 (plus:SI
9415 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9416 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9417 "TARGET_ARM"
9418 "@
9419 add%D4\\t%0, %2, %3
9420 sub%D4\\t%0, %2, #%n3
9421 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9422 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9423 [(set_attr "conds" "use")
9424 (set_attr "length" "4,4,8,8")
9425 (set_attr_alternative "type"
9426 [(if_then_else (match_operand 3 "const_int_operand" "")
9427 (const_string "alu_imm" )
9428 (const_string "alu_sreg"))
9429 (const_string "alu_imm")
9430 (const_string "multiple")
9431 (const_string "multiple")])]
9432 )
9433
9434 (define_insn "*ifcompare_arith_arith"
9435 [(set (match_operand:SI 0 "s_register_operand" "=r")
9436 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9437 [(match_operand:SI 5 "s_register_operand" "r")
9438 (match_operand:SI 6 "arm_add_operand" "rIL")])
9439 (match_operator:SI 8 "shiftable_operator"
9440 [(match_operand:SI 1 "s_register_operand" "r")
9441 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9442 (match_operator:SI 7 "shiftable_operator"
9443 [(match_operand:SI 3 "s_register_operand" "r")
9444 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9445 (clobber (reg:CC CC_REGNUM))]
9446 "TARGET_ARM"
9447 "#"
9448 [(set_attr "conds" "clob")
9449 (set_attr "length" "12")
9450 (set_attr "type" "multiple")]
9451 )
9452
9453 (define_insn "*if_arith_arith"
9454 [(set (match_operand:SI 0 "s_register_operand" "=r")
9455 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9456 [(match_operand 8 "cc_register" "") (const_int 0)])
9457 (match_operator:SI 6 "shiftable_operator"
9458 [(match_operand:SI 1 "s_register_operand" "r")
9459 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9460 (match_operator:SI 7 "shiftable_operator"
9461 [(match_operand:SI 3 "s_register_operand" "r")
9462 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9463 "TARGET_ARM"
9464 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9465 [(set_attr "conds" "use")
9466 (set_attr "length" "8")
9467 (set_attr "type" "multiple")]
9468 )
9469
9470 (define_insn "*ifcompare_arith_move"
9471 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9472 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9473 [(match_operand:SI 2 "s_register_operand" "r,r")
9474 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9475 (match_operator:SI 7 "shiftable_operator"
9476 [(match_operand:SI 4 "s_register_operand" "r,r")
9477 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9478 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9479 (clobber (reg:CC CC_REGNUM))]
9480 "TARGET_ARM"
9481 "*
9482 /* If we have an operation where (op x 0) is the identity operation and
9483 the conditional operator is LT or GE and we are comparing against zero and
9484 everything is in registers then we can do this in two instructions. */
9485 if (operands[3] == const0_rtx
9486 && GET_CODE (operands[7]) != AND
9487 && REG_P (operands[5])
9488 && REG_P (operands[1])
9489 && REGNO (operands[1]) == REGNO (operands[4])
9490 && REGNO (operands[4]) != REGNO (operands[0]))
9491 {
9492 if (GET_CODE (operands[6]) == LT)
9493 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9494 else if (GET_CODE (operands[6]) == GE)
9495 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9496 }
9497 if (CONST_INT_P (operands[3])
9498 && !const_ok_for_arm (INTVAL (operands[3])))
9499 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9500 else
9501 output_asm_insn (\"cmp\\t%2, %3\", operands);
9502 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9503 if (which_alternative != 0)
9504 return \"mov%D6\\t%0, %1\";
9505 return \"\";
9506 "
9507 [(set_attr "conds" "clob")
9508 (set_attr "length" "8,12")
9509 (set_attr "type" "multiple")]
9510 )
9511
9512 (define_insn "*if_arith_move"
9513 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9514 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9515 [(match_operand 6 "cc_register" "") (const_int 0)])
9516 (match_operator:SI 5 "shiftable_operator"
9517 [(match_operand:SI 2 "s_register_operand" "r,r")
9518 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9519 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9520 "TARGET_ARM"
9521 "@
9522 %I5%d4\\t%0, %2, %3
9523 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9524 [(set_attr "conds" "use")
9525 (set_attr "length" "4,8")
9526 (set_attr_alternative "type"
9527 [(if_then_else (match_operand 3 "const_int_operand" "")
9528 (const_string "alu_shift_imm" )
9529 (const_string "alu_shift_reg"))
9530 (const_string "multiple")])]
9531 )
9532
9533 (define_insn "*ifcompare_move_arith"
9534 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9535 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9536 [(match_operand:SI 4 "s_register_operand" "r,r")
9537 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9538 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9539 (match_operator:SI 7 "shiftable_operator"
9540 [(match_operand:SI 2 "s_register_operand" "r,r")
9541 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9542 (clobber (reg:CC CC_REGNUM))]
9543 "TARGET_ARM"
9544 "*
9545 /* If we have an operation where (op x 0) is the identity operation and
9546 the conditional operator is LT or GE and we are comparing against zero and
9547 everything is in registers then we can do this in two instructions */
9548 if (operands[5] == const0_rtx
9549 && GET_CODE (operands[7]) != AND
9550 && REG_P (operands[3])
9551 && REG_P (operands[1])
9552 && REGNO (operands[1]) == REGNO (operands[2])
9553 && REGNO (operands[2]) != REGNO (operands[0]))
9554 {
9555 if (GET_CODE (operands[6]) == GE)
9556 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9557 else if (GET_CODE (operands[6]) == LT)
9558 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9559 }
9560
9561 if (CONST_INT_P (operands[5])
9562 && !const_ok_for_arm (INTVAL (operands[5])))
9563 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9564 else
9565 output_asm_insn (\"cmp\\t%4, %5\", operands);
9566
9567 if (which_alternative != 0)
9568 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9569 return \"%I7%D6\\t%0, %2, %3\";
9570 "
9571 [(set_attr "conds" "clob")
9572 (set_attr "length" "8,12")
9573 (set_attr "type" "multiple")]
9574 )
9575
9576 (define_insn "*if_move_arith"
9577 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9578 (if_then_else:SI
9579 (match_operator 4 "arm_comparison_operator"
9580 [(match_operand 6 "cc_register" "") (const_int 0)])
9581 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9582 (match_operator:SI 5 "shiftable_operator"
9583 [(match_operand:SI 2 "s_register_operand" "r,r")
9584 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9585 "TARGET_ARM"
9586 "@
9587 %I5%D4\\t%0, %2, %3
9588 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9589 [(set_attr "conds" "use")
9590 (set_attr "length" "4,8")
9591 (set_attr_alternative "type"
9592 [(if_then_else (match_operand 3 "const_int_operand" "")
9593 (const_string "alu_shift_imm" )
9594 (const_string "alu_shift_reg"))
9595 (const_string "multiple")])]
9596 )
9597
9598 (define_insn "*ifcompare_move_not"
9599 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9600 (if_then_else:SI
9601 (match_operator 5 "arm_comparison_operator"
9602 [(match_operand:SI 3 "s_register_operand" "r,r")
9603 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9604 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9605 (not:SI
9606 (match_operand:SI 2 "s_register_operand" "r,r"))))
9607 (clobber (reg:CC CC_REGNUM))]
9608 "TARGET_ARM"
9609 "#"
9610 [(set_attr "conds" "clob")
9611 (set_attr "length" "8,12")
9612 (set_attr "type" "multiple")]
9613 )
9614
9615 (define_insn "*if_move_not"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9617 (if_then_else:SI
9618 (match_operator 4 "arm_comparison_operator"
9619 [(match_operand 3 "cc_register" "") (const_int 0)])
9620 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9621 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9622 "TARGET_ARM"
9623 "@
9624 mvn%D4\\t%0, %2
9625 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9626 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9627 [(set_attr "conds" "use")
9628 (set_attr "type" "mvn_reg")
9629 (set_attr "length" "4,8,8")
9630 (set_attr "type" "mvn_reg,multiple,multiple")]
9631 )
9632
9633 (define_insn "*ifcompare_not_move"
9634 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9635 (if_then_else:SI
9636 (match_operator 5 "arm_comparison_operator"
9637 [(match_operand:SI 3 "s_register_operand" "r,r")
9638 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9639 (not:SI
9640 (match_operand:SI 2 "s_register_operand" "r,r"))
9641 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9642 (clobber (reg:CC CC_REGNUM))]
9643 "TARGET_ARM"
9644 "#"
9645 [(set_attr "conds" "clob")
9646 (set_attr "length" "8,12")
9647 (set_attr "type" "multiple")]
9648 )
9649
9650 (define_insn "*if_not_move"
9651 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9652 (if_then_else:SI
9653 (match_operator 4 "arm_comparison_operator"
9654 [(match_operand 3 "cc_register" "") (const_int 0)])
9655 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9656 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9657 "TARGET_ARM"
9658 "@
9659 mvn%d4\\t%0, %2
9660 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9661 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9662 [(set_attr "conds" "use")
9663 (set_attr "type" "mvn_reg,multiple,multiple")
9664 (set_attr "length" "4,8,8")]
9665 )
9666
9667 (define_insn "*ifcompare_shift_move"
9668 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9669 (if_then_else:SI
9670 (match_operator 6 "arm_comparison_operator"
9671 [(match_operand:SI 4 "s_register_operand" "r,r")
9672 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9673 (match_operator:SI 7 "shift_operator"
9674 [(match_operand:SI 2 "s_register_operand" "r,r")
9675 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9676 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9677 (clobber (reg:CC CC_REGNUM))]
9678 "TARGET_ARM"
9679 "#"
9680 [(set_attr "conds" "clob")
9681 (set_attr "length" "8,12")
9682 (set_attr "type" "multiple")]
9683 )
9684
9685 (define_insn "*if_shift_move"
9686 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9687 (if_then_else:SI
9688 (match_operator 5 "arm_comparison_operator"
9689 [(match_operand 6 "cc_register" "") (const_int 0)])
9690 (match_operator:SI 4 "shift_operator"
9691 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9692 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9693 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9694 "TARGET_ARM"
9695 "@
9696 mov%d5\\t%0, %2%S4
9697 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9698 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9699 [(set_attr "conds" "use")
9700 (set_attr "shift" "2")
9701 (set_attr "length" "4,8,8")
9702 (set_attr_alternative "type"
9703 [(if_then_else (match_operand 3 "const_int_operand" "")
9704 (const_string "mov_shift" )
9705 (const_string "mov_shift_reg"))
9706 (const_string "multiple")
9707 (const_string "multiple")])]
9708 )
9709
9710 (define_insn "*ifcompare_move_shift"
9711 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9712 (if_then_else:SI
9713 (match_operator 6 "arm_comparison_operator"
9714 [(match_operand:SI 4 "s_register_operand" "r,r")
9715 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9716 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9717 (match_operator:SI 7 "shift_operator"
9718 [(match_operand:SI 2 "s_register_operand" "r,r")
9719 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9720 (clobber (reg:CC CC_REGNUM))]
9721 "TARGET_ARM"
9722 "#"
9723 [(set_attr "conds" "clob")
9724 (set_attr "length" "8,12")
9725 (set_attr "type" "multiple")]
9726 )
9727
9728 (define_insn "*if_move_shift"
9729 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9730 (if_then_else:SI
9731 (match_operator 5 "arm_comparison_operator"
9732 [(match_operand 6 "cc_register" "") (const_int 0)])
9733 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9734 (match_operator:SI 4 "shift_operator"
9735 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9736 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9737 "TARGET_ARM"
9738 "@
9739 mov%D5\\t%0, %2%S4
9740 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9741 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9742 [(set_attr "conds" "use")
9743 (set_attr "shift" "2")
9744 (set_attr "length" "4,8,8")
9745 (set_attr_alternative "type"
9746 [(if_then_else (match_operand 3 "const_int_operand" "")
9747 (const_string "mov_shift" )
9748 (const_string "mov_shift_reg"))
9749 (const_string "multiple")
9750 (const_string "multiple")])]
9751 )
9752
9753 (define_insn "*ifcompare_shift_shift"
9754 [(set (match_operand:SI 0 "s_register_operand" "=r")
9755 (if_then_else:SI
9756 (match_operator 7 "arm_comparison_operator"
9757 [(match_operand:SI 5 "s_register_operand" "r")
9758 (match_operand:SI 6 "arm_add_operand" "rIL")])
9759 (match_operator:SI 8 "shift_operator"
9760 [(match_operand:SI 1 "s_register_operand" "r")
9761 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9762 (match_operator:SI 9 "shift_operator"
9763 [(match_operand:SI 3 "s_register_operand" "r")
9764 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9765 (clobber (reg:CC CC_REGNUM))]
9766 "TARGET_ARM"
9767 "#"
9768 [(set_attr "conds" "clob")
9769 (set_attr "length" "12")
9770 (set_attr "type" "multiple")]
9771 )
9772
9773 (define_insn "*if_shift_shift"
9774 [(set (match_operand:SI 0 "s_register_operand" "=r")
9775 (if_then_else:SI
9776 (match_operator 5 "arm_comparison_operator"
9777 [(match_operand 8 "cc_register" "") (const_int 0)])
9778 (match_operator:SI 6 "shift_operator"
9779 [(match_operand:SI 1 "s_register_operand" "r")
9780 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9781 (match_operator:SI 7 "shift_operator"
9782 [(match_operand:SI 3 "s_register_operand" "r")
9783 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9784 "TARGET_ARM"
9785 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9786 [(set_attr "conds" "use")
9787 (set_attr "shift" "1")
9788 (set_attr "length" "8")
9789 (set (attr "type") (if_then_else
9790 (and (match_operand 2 "const_int_operand" "")
9791 (match_operand 4 "const_int_operand" ""))
9792 (const_string "mov_shift")
9793 (const_string "mov_shift_reg")))]
9794 )
9795
9796 (define_insn "*ifcompare_not_arith"
9797 [(set (match_operand:SI 0 "s_register_operand" "=r")
9798 (if_then_else:SI
9799 (match_operator 6 "arm_comparison_operator"
9800 [(match_operand:SI 4 "s_register_operand" "r")
9801 (match_operand:SI 5 "arm_add_operand" "rIL")])
9802 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9803 (match_operator:SI 7 "shiftable_operator"
9804 [(match_operand:SI 2 "s_register_operand" "r")
9805 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9806 (clobber (reg:CC CC_REGNUM))]
9807 "TARGET_ARM"
9808 "#"
9809 [(set_attr "conds" "clob")
9810 (set_attr "length" "12")
9811 (set_attr "type" "multiple")]
9812 )
9813
9814 (define_insn "*if_not_arith"
9815 [(set (match_operand:SI 0 "s_register_operand" "=r")
9816 (if_then_else:SI
9817 (match_operator 5 "arm_comparison_operator"
9818 [(match_operand 4 "cc_register" "") (const_int 0)])
9819 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9820 (match_operator:SI 6 "shiftable_operator"
9821 [(match_operand:SI 2 "s_register_operand" "r")
9822 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9823 "TARGET_ARM"
9824 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9825 [(set_attr "conds" "use")
9826 (set_attr "type" "mvn_reg")
9827 (set_attr "length" "8")]
9828 )
9829
9830 (define_insn "*ifcompare_arith_not"
9831 [(set (match_operand:SI 0 "s_register_operand" "=r")
9832 (if_then_else:SI
9833 (match_operator 6 "arm_comparison_operator"
9834 [(match_operand:SI 4 "s_register_operand" "r")
9835 (match_operand:SI 5 "arm_add_operand" "rIL")])
9836 (match_operator:SI 7 "shiftable_operator"
9837 [(match_operand:SI 2 "s_register_operand" "r")
9838 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9839 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9840 (clobber (reg:CC CC_REGNUM))]
9841 "TARGET_ARM"
9842 "#"
9843 [(set_attr "conds" "clob")
9844 (set_attr "length" "12")
9845 (set_attr "type" "multiple")]
9846 )
9847
9848 (define_insn "*if_arith_not"
9849 [(set (match_operand:SI 0 "s_register_operand" "=r")
9850 (if_then_else:SI
9851 (match_operator 5 "arm_comparison_operator"
9852 [(match_operand 4 "cc_register" "") (const_int 0)])
9853 (match_operator:SI 6 "shiftable_operator"
9854 [(match_operand:SI 2 "s_register_operand" "r")
9855 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9856 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9857 "TARGET_ARM"
9858 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9859 [(set_attr "conds" "use")
9860 (set_attr "type" "multiple")
9861 (set_attr "length" "8")]
9862 )
9863
9864 (define_insn "*ifcompare_neg_move"
9865 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9866 (if_then_else:SI
9867 (match_operator 5 "arm_comparison_operator"
9868 [(match_operand:SI 3 "s_register_operand" "r,r")
9869 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9870 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9871 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9872 (clobber (reg:CC CC_REGNUM))]
9873 "TARGET_ARM"
9874 "#"
9875 [(set_attr "conds" "clob")
9876 (set_attr "length" "8,12")
9877 (set_attr "type" "multiple")]
9878 )
9879
9880 (define_insn_and_split "*if_neg_move"
9881 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9882 (if_then_else:SI
9883 (match_operator 4 "arm_comparison_operator"
9884 [(match_operand 3 "cc_register" "") (const_int 0)])
9885 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9886 (match_operand:SI 1 "s_register_operand" "0,0")))]
9887 "TARGET_32BIT"
9888 "#"
9889 "&& reload_completed"
9890 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9891 (set (match_dup 0) (neg:SI (match_dup 2))))]
9892 ""
9893 [(set_attr "conds" "use")
9894 (set_attr "length" "4")
9895 (set_attr "arch" "t2,32")
9896 (set_attr "enabled_for_short_it" "yes,no")
9897 (set_attr "type" "logic_shift_imm")]
9898 )
9899
9900 (define_insn "*ifcompare_move_neg"
9901 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9902 (if_then_else:SI
9903 (match_operator 5 "arm_comparison_operator"
9904 [(match_operand:SI 3 "s_register_operand" "r,r")
9905 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9906 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9907 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9908 (clobber (reg:CC CC_REGNUM))]
9909 "TARGET_ARM"
9910 "#"
9911 [(set_attr "conds" "clob")
9912 (set_attr "length" "8,12")
9913 (set_attr "type" "multiple")]
9914 )
9915
9916 (define_insn_and_split "*if_move_neg"
9917 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9918 (if_then_else:SI
9919 (match_operator 4 "arm_comparison_operator"
9920 [(match_operand 3 "cc_register" "") (const_int 0)])
9921 (match_operand:SI 1 "s_register_operand" "0,0")
9922 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9923 "TARGET_32BIT"
9924 "#"
9925 "&& reload_completed"
9926 [(cond_exec (match_dup 5)
9927 (set (match_dup 0) (neg:SI (match_dup 2))))]
9928 {
9929 machine_mode mode = GET_MODE (operands[3]);
9930 rtx_code rc = GET_CODE (operands[4]);
9931
9932 if (mode == CCFPmode || mode == CCFPEmode)
9933 rc = reverse_condition_maybe_unordered (rc);
9934 else
9935 rc = reverse_condition (rc);
9936
9937 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9938 }
9939 [(set_attr "conds" "use")
9940 (set_attr "length" "4")
9941 (set_attr "arch" "t2,32")
9942 (set_attr "enabled_for_short_it" "yes,no")
9943 (set_attr "type" "logic_shift_imm")]
9944 )
9945
9946 (define_insn "*arith_adjacentmem"
9947 [(set (match_operand:SI 0 "s_register_operand" "=r")
9948 (match_operator:SI 1 "shiftable_operator"
9949 [(match_operand:SI 2 "memory_operand" "m")
9950 (match_operand:SI 3 "memory_operand" "m")]))
9951 (clobber (match_scratch:SI 4 "=r"))]
9952 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9953 "*
9954 {
9955 rtx ldm[3];
9956 rtx arith[4];
9957 rtx base_reg;
9958 HOST_WIDE_INT val1 = 0, val2 = 0;
9959
9960 if (REGNO (operands[0]) > REGNO (operands[4]))
9961 {
9962 ldm[1] = operands[4];
9963 ldm[2] = operands[0];
9964 }
9965 else
9966 {
9967 ldm[1] = operands[0];
9968 ldm[2] = operands[4];
9969 }
9970
9971 base_reg = XEXP (operands[2], 0);
9972
9973 if (!REG_P (base_reg))
9974 {
9975 val1 = INTVAL (XEXP (base_reg, 1));
9976 base_reg = XEXP (base_reg, 0);
9977 }
9978
9979 if (!REG_P (XEXP (operands[3], 0)))
9980 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9981
9982 arith[0] = operands[0];
9983 arith[3] = operands[1];
9984
9985 if (val1 < val2)
9986 {
9987 arith[1] = ldm[1];
9988 arith[2] = ldm[2];
9989 }
9990 else
9991 {
9992 arith[1] = ldm[2];
9993 arith[2] = ldm[1];
9994 }
9995
9996 ldm[0] = base_reg;
9997 if (val1 !=0 && val2 != 0)
9998 {
9999 rtx ops[3];
10000
10001 if (val1 == 4 || val2 == 4)
10002 /* Other val must be 8, since we know they are adjacent and neither
10003 is zero. */
10004 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10005 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10006 {
10007 ldm[0] = ops[0] = operands[4];
10008 ops[1] = base_reg;
10009 ops[2] = GEN_INT (val1);
10010 output_add_immediate (ops);
10011 if (val1 < val2)
10012 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10013 else
10014 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10015 }
10016 else
10017 {
10018 /* Offset is out of range for a single add, so use two ldr. */
10019 ops[0] = ldm[1];
10020 ops[1] = base_reg;
10021 ops[2] = GEN_INT (val1);
10022 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10023 ops[0] = ldm[2];
10024 ops[2] = GEN_INT (val2);
10025 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10026 }
10027 }
10028 else if (val1 != 0)
10029 {
10030 if (val1 < val2)
10031 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10032 else
10033 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10034 }
10035 else
10036 {
10037 if (val1 < val2)
10038 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10039 else
10040 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10041 }
10042 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10043 return \"\";
10044 }"
10045 [(set_attr "length" "12")
10046 (set_attr "predicable" "yes")
10047 (set_attr "type" "load_4")]
10048 )
10049
10050 ; This pattern is never tried by combine, so do it as a peephole
10051
10052 (define_peephole2
10053 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10054 (match_operand:SI 1 "arm_general_register_operand" ""))
10055 (set (reg:CC CC_REGNUM)
10056 (compare:CC (match_dup 1) (const_int 0)))]
10057 "TARGET_ARM"
10058 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10059 (set (match_dup 0) (match_dup 1))])]
10060 ""
10061 )
10062
10063 (define_split
10064 [(set (match_operand:SI 0 "s_register_operand" "")
10065 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10066 (const_int 0))
10067 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10068 [(match_operand:SI 3 "s_register_operand" "")
10069 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10070 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10071 "TARGET_ARM"
10072 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10073 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10074 (match_dup 5)))]
10075 ""
10076 )
10077
10078 ;; This split can be used because CC_Z mode implies that the following
10079 ;; branch will be an equality, or an unsigned inequality, so the sign
10080 ;; extension is not needed.
10081
10082 (define_split
10083 [(set (reg:CC_Z CC_REGNUM)
10084 (compare:CC_Z
10085 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10086 (const_int 24))
10087 (match_operand 1 "const_int_operand" "")))
10088 (clobber (match_scratch:SI 2 ""))]
10089 "TARGET_ARM
10090 && ((UINTVAL (operands[1]))
10091 == ((UINTVAL (operands[1])) >> 24) << 24)"
10092 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10093 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10094 "
10095 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10096 "
10097 )
10098 ;; ??? Check the patterns above for Thumb-2 usefulness
10099
10100 (define_expand "prologue"
10101 [(clobber (const_int 0))]
10102 "TARGET_EITHER"
10103 "if (TARGET_32BIT)
10104 arm_expand_prologue ();
10105 else
10106 thumb1_expand_prologue ();
10107 DONE;
10108 "
10109 )
10110
10111 (define_expand "epilogue"
10112 [(clobber (const_int 0))]
10113 "TARGET_EITHER"
10114 "
10115 if (crtl->calls_eh_return)
10116 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10117 if (TARGET_THUMB1)
10118 {
10119 thumb1_expand_epilogue ();
10120 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10121 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10122 }
10123 else if (HAVE_return)
10124 {
10125 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10126 no need for explicit testing again. */
10127 emit_jump_insn (gen_return ());
10128 }
10129 else if (TARGET_32BIT)
10130 {
10131 arm_expand_epilogue (true);
10132 }
10133 DONE;
10134 "
10135 )
10136
10137 ;; Note - although unspec_volatile's USE all hard registers,
10138 ;; USEs are ignored after relaod has completed. Thus we need
10139 ;; to add an unspec of the link register to ensure that flow
10140 ;; does not think that it is unused by the sibcall branch that
10141 ;; will replace the standard function epilogue.
10142 (define_expand "sibcall_epilogue"
10143 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10144 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10145 "TARGET_32BIT"
10146 "
10147 arm_expand_epilogue (false);
10148 DONE;
10149 "
10150 )
10151
10152 (define_expand "eh_epilogue"
10153 [(use (match_operand:SI 0 "register_operand"))
10154 (use (match_operand:SI 1 "register_operand"))
10155 (use (match_operand:SI 2 "register_operand"))]
10156 "TARGET_EITHER"
10157 "
10158 {
10159 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10160 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10161 {
10162 rtx ra = gen_rtx_REG (Pmode, 2);
10163
10164 emit_move_insn (ra, operands[2]);
10165 operands[2] = ra;
10166 }
10167 /* This is a hack -- we may have crystalized the function type too
10168 early. */
10169 cfun->machine->func_type = 0;
10170 }"
10171 )
10172
10173 ;; This split is only used during output to reduce the number of patterns
10174 ;; that need assembler instructions adding to them. We allowed the setting
10175 ;; of the conditions to be implicit during rtl generation so that
10176 ;; the conditional compare patterns would work. However this conflicts to
10177 ;; some extent with the conditional data operations, so we have to split them
10178 ;; up again here.
10179
10180 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10181 ;; conditional execution sufficient?
10182
10183 (define_split
10184 [(set (match_operand:SI 0 "s_register_operand" "")
10185 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10186 [(match_operand 2 "" "") (match_operand 3 "" "")])
10187 (match_dup 0)
10188 (match_operand 4 "" "")))
10189 (clobber (reg:CC CC_REGNUM))]
10190 "TARGET_ARM && reload_completed"
10191 [(set (match_dup 5) (match_dup 6))
10192 (cond_exec (match_dup 7)
10193 (set (match_dup 0) (match_dup 4)))]
10194 "
10195 {
10196 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10197 operands[2], operands[3]);
10198 enum rtx_code rc = GET_CODE (operands[1]);
10199
10200 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10201 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10202 if (mode == CCFPmode || mode == CCFPEmode)
10203 rc = reverse_condition_maybe_unordered (rc);
10204 else
10205 rc = reverse_condition (rc);
10206
10207 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10208 }"
10209 )
10210
10211 (define_split
10212 [(set (match_operand:SI 0 "s_register_operand" "")
10213 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10214 [(match_operand 2 "" "") (match_operand 3 "" "")])
10215 (match_operand 4 "" "")
10216 (match_dup 0)))
10217 (clobber (reg:CC CC_REGNUM))]
10218 "TARGET_ARM && reload_completed"
10219 [(set (match_dup 5) (match_dup 6))
10220 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10221 (set (match_dup 0) (match_dup 4)))]
10222 "
10223 {
10224 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10225 operands[2], operands[3]);
10226
10227 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10228 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10229 }"
10230 )
10231
10232 (define_split
10233 [(set (match_operand:SI 0 "s_register_operand" "")
10234 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10235 [(match_operand 2 "" "") (match_operand 3 "" "")])
10236 (match_operand 4 "" "")
10237 (match_operand 5 "" "")))
10238 (clobber (reg:CC CC_REGNUM))]
10239 "TARGET_ARM && reload_completed"
10240 [(set (match_dup 6) (match_dup 7))
10241 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10242 (set (match_dup 0) (match_dup 4)))
10243 (cond_exec (match_dup 8)
10244 (set (match_dup 0) (match_dup 5)))]
10245 "
10246 {
10247 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10248 operands[2], operands[3]);
10249 enum rtx_code rc = GET_CODE (operands[1]);
10250
10251 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10252 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10253 if (mode == CCFPmode || mode == CCFPEmode)
10254 rc = reverse_condition_maybe_unordered (rc);
10255 else
10256 rc = reverse_condition (rc);
10257
10258 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10259 }"
10260 )
10261
10262 (define_split
10263 [(set (match_operand:SI 0 "s_register_operand" "")
10264 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10265 [(match_operand:SI 2 "s_register_operand" "")
10266 (match_operand:SI 3 "arm_add_operand" "")])
10267 (match_operand:SI 4 "arm_rhs_operand" "")
10268 (not:SI
10269 (match_operand:SI 5 "s_register_operand" ""))))
10270 (clobber (reg:CC CC_REGNUM))]
10271 "TARGET_ARM && reload_completed"
10272 [(set (match_dup 6) (match_dup 7))
10273 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10274 (set (match_dup 0) (match_dup 4)))
10275 (cond_exec (match_dup 8)
10276 (set (match_dup 0) (not:SI (match_dup 5))))]
10277 "
10278 {
10279 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10280 operands[2], operands[3]);
10281 enum rtx_code rc = GET_CODE (operands[1]);
10282
10283 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10284 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10285 if (mode == CCFPmode || mode == CCFPEmode)
10286 rc = reverse_condition_maybe_unordered (rc);
10287 else
10288 rc = reverse_condition (rc);
10289
10290 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10291 }"
10292 )
10293
10294 (define_insn "*cond_move_not"
10295 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10296 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10297 [(match_operand 3 "cc_register" "") (const_int 0)])
10298 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10299 (not:SI
10300 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10301 "TARGET_ARM"
10302 "@
10303 mvn%D4\\t%0, %2
10304 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10305 [(set_attr "conds" "use")
10306 (set_attr "type" "mvn_reg,multiple")
10307 (set_attr "length" "4,8")]
10308 )
10309
10310 ;; The next two patterns occur when an AND operation is followed by a
10311 ;; scc insn sequence
10312
10313 (define_insn "*sign_extract_onebit"
10314 [(set (match_operand:SI 0 "s_register_operand" "=r")
10315 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10316 (const_int 1)
10317 (match_operand:SI 2 "const_int_operand" "n")))
10318 (clobber (reg:CC CC_REGNUM))]
10319 "TARGET_ARM"
10320 "*
10321 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10322 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10323 return \"mvnne\\t%0, #0\";
10324 "
10325 [(set_attr "conds" "clob")
10326 (set_attr "length" "8")
10327 (set_attr "type" "multiple")]
10328 )
10329
10330 (define_insn "*not_signextract_onebit"
10331 [(set (match_operand:SI 0 "s_register_operand" "=r")
10332 (not:SI
10333 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10334 (const_int 1)
10335 (match_operand:SI 2 "const_int_operand" "n"))))
10336 (clobber (reg:CC CC_REGNUM))]
10337 "TARGET_ARM"
10338 "*
10339 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10340 output_asm_insn (\"tst\\t%1, %2\", operands);
10341 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10342 return \"movne\\t%0, #0\";
10343 "
10344 [(set_attr "conds" "clob")
10345 (set_attr "length" "12")
10346 (set_attr "type" "multiple")]
10347 )
10348 ;; ??? The above patterns need auditing for Thumb-2
10349
10350 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10351 ;; expressions. For simplicity, the first register is also in the unspec
10352 ;; part.
10353 ;; To avoid the usage of GNU extension, the length attribute is computed
10354 ;; in a C function arm_attr_length_push_multi.
10355 (define_insn "*push_multi"
10356 [(match_parallel 2 "multi_register_push"
10357 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10358 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10359 UNSPEC_PUSH_MULT))])]
10360 ""
10361 "*
10362 {
10363 int num_saves = XVECLEN (operands[2], 0);
10364
10365 /* For the StrongARM at least it is faster to
10366 use STR to store only a single register.
10367 In Thumb mode always use push, and the assembler will pick
10368 something appropriate. */
10369 if (num_saves == 1 && TARGET_ARM)
10370 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10371 else
10372 {
10373 int i;
10374 char pattern[100];
10375
10376 if (TARGET_32BIT)
10377 strcpy (pattern, \"push%?\\t{%1\");
10378 else
10379 strcpy (pattern, \"push\\t{%1\");
10380
10381 for (i = 1; i < num_saves; i++)
10382 {
10383 strcat (pattern, \", %|\");
10384 strcat (pattern,
10385 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10386 }
10387
10388 strcat (pattern, \"}\");
10389 output_asm_insn (pattern, operands);
10390 }
10391
10392 return \"\";
10393 }"
10394 [(set_attr "type" "store_16")
10395 (set (attr "length")
10396 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10397 )
10398
10399 (define_insn "stack_tie"
10400 [(set (mem:BLK (scratch))
10401 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10402 (match_operand:SI 1 "s_register_operand" "rk")]
10403 UNSPEC_PRLG_STK))]
10404 ""
10405 ""
10406 [(set_attr "length" "0")
10407 (set_attr "type" "block")]
10408 )
10409
10410 ;; Pop (as used in epilogue RTL)
10411 ;;
10412 (define_insn "*load_multiple_with_writeback"
10413 [(match_parallel 0 "load_multiple_operation"
10414 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10415 (plus:SI (match_dup 1)
10416 (match_operand:SI 2 "const_int_I_operand" "I")))
10417 (set (match_operand:SI 3 "s_register_operand" "=rk")
10418 (mem:SI (match_dup 1)))
10419 ])]
10420 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10421 "*
10422 {
10423 arm_output_multireg_pop (operands, /*return_pc=*/false,
10424 /*cond=*/const_true_rtx,
10425 /*reverse=*/false,
10426 /*update=*/true);
10427 return \"\";
10428 }
10429 "
10430 [(set_attr "type" "load_16")
10431 (set_attr "predicable" "yes")
10432 (set (attr "length")
10433 (symbol_ref "arm_attr_length_pop_multi (operands,
10434 /*return_pc=*/false,
10435 /*write_back_p=*/true)"))]
10436 )
10437
10438 ;; Pop with return (as used in epilogue RTL)
10439 ;;
10440 ;; This instruction is generated when the registers are popped at the end of
10441 ;; epilogue. Here, instead of popping the value into LR and then generating
10442 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10443 ;; with (return).
10444 (define_insn "*pop_multiple_with_writeback_and_return"
10445 [(match_parallel 0 "pop_multiple_return"
10446 [(return)
10447 (set (match_operand:SI 1 "s_register_operand" "+rk")
10448 (plus:SI (match_dup 1)
10449 (match_operand:SI 2 "const_int_I_operand" "I")))
10450 (set (match_operand:SI 3 "s_register_operand" "=rk")
10451 (mem:SI (match_dup 1)))
10452 ])]
10453 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10454 "*
10455 {
10456 arm_output_multireg_pop (operands, /*return_pc=*/true,
10457 /*cond=*/const_true_rtx,
10458 /*reverse=*/false,
10459 /*update=*/true);
10460 return \"\";
10461 }
10462 "
10463 [(set_attr "type" "load_16")
10464 (set_attr "predicable" "yes")
10465 (set (attr "length")
10466 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10467 /*write_back_p=*/true)"))]
10468 )
10469
10470 (define_insn "*pop_multiple_with_return"
10471 [(match_parallel 0 "pop_multiple_return"
10472 [(return)
10473 (set (match_operand:SI 2 "s_register_operand" "=rk")
10474 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10475 ])]
10476 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10477 "*
10478 {
10479 arm_output_multireg_pop (operands, /*return_pc=*/true,
10480 /*cond=*/const_true_rtx,
10481 /*reverse=*/false,
10482 /*update=*/false);
10483 return \"\";
10484 }
10485 "
10486 [(set_attr "type" "load_16")
10487 (set_attr "predicable" "yes")
10488 (set (attr "length")
10489 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10490 /*write_back_p=*/false)"))]
10491 )
10492
10493 ;; Load into PC and return
10494 (define_insn "*ldr_with_return"
10495 [(return)
10496 (set (reg:SI PC_REGNUM)
10497 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10498 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10499 "ldr%?\t%|pc, [%0], #4"
10500 [(set_attr "type" "load_4")
10501 (set_attr "predicable" "yes")]
10502 )
10503 ;; Pop for floating point registers (as used in epilogue RTL)
10504 (define_insn "*vfp_pop_multiple_with_writeback"
10505 [(match_parallel 0 "pop_multiple_fp"
10506 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10507 (plus:SI (match_dup 1)
10508 (match_operand:SI 2 "const_int_I_operand" "I")))
10509 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10510 (mem:DF (match_dup 1)))])]
10511 "TARGET_32BIT && TARGET_HARD_FLOAT"
10512 "*
10513 {
10514 int num_regs = XVECLEN (operands[0], 0);
10515 char pattern[100];
10516 rtx op_list[2];
10517 strcpy (pattern, \"vldm\\t\");
10518 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10519 strcat (pattern, \"!, {\");
10520 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10521 strcat (pattern, \"%P0\");
10522 if ((num_regs - 1) > 1)
10523 {
10524 strcat (pattern, \"-%P1\");
10525 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10526 }
10527
10528 strcat (pattern, \"}\");
10529 output_asm_insn (pattern, op_list);
10530 return \"\";
10531 }
10532 "
10533 [(set_attr "type" "load_16")
10534 (set_attr "conds" "unconditional")
10535 (set_attr "predicable" "no")]
10536 )
10537
10538 ;; Special patterns for dealing with the constant pool
10539
10540 (define_insn "align_4"
10541 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10542 "TARGET_EITHER"
10543 "*
10544 assemble_align (32);
10545 return \"\";
10546 "
10547 [(set_attr "type" "no_insn")]
10548 )
10549
10550 (define_insn "align_8"
10551 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10552 "TARGET_EITHER"
10553 "*
10554 assemble_align (64);
10555 return \"\";
10556 "
10557 [(set_attr "type" "no_insn")]
10558 )
10559
10560 (define_insn "consttable_end"
10561 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10562 "TARGET_EITHER"
10563 "*
10564 making_const_table = FALSE;
10565 return \"\";
10566 "
10567 [(set_attr "type" "no_insn")]
10568 )
10569
10570 (define_insn "consttable_1"
10571 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10572 "TARGET_EITHER"
10573 "*
10574 making_const_table = TRUE;
10575 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10576 assemble_zeros (3);
10577 return \"\";
10578 "
10579 [(set_attr "length" "4")
10580 (set_attr "type" "no_insn")]
10581 )
10582
10583 (define_insn "consttable_2"
10584 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10585 "TARGET_EITHER"
10586 "*
10587 {
10588 rtx x = operands[0];
10589 making_const_table = TRUE;
10590 switch (GET_MODE_CLASS (GET_MODE (x)))
10591 {
10592 case MODE_FLOAT:
10593 arm_emit_fp16_const (x);
10594 break;
10595 default:
10596 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10597 assemble_zeros (2);
10598 break;
10599 }
10600 return \"\";
10601 }"
10602 [(set_attr "length" "4")
10603 (set_attr "type" "no_insn")]
10604 )
10605
10606 (define_insn "consttable_4"
10607 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10608 "TARGET_EITHER"
10609 "*
10610 {
10611 rtx x = operands[0];
10612 making_const_table = TRUE;
10613 scalar_float_mode float_mode;
10614 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10615 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10616 else
10617 {
10618 /* XXX: Sometimes gcc does something really dumb and ends up with
10619 a HIGH in a constant pool entry, usually because it's trying to
10620 load into a VFP register. We know this will always be used in
10621 combination with a LO_SUM which ignores the high bits, so just
10622 strip off the HIGH. */
10623 if (GET_CODE (x) == HIGH)
10624 x = XEXP (x, 0);
10625 assemble_integer (x, 4, BITS_PER_WORD, 1);
10626 mark_symbol_refs_as_used (x);
10627 }
10628 return \"\";
10629 }"
10630 [(set_attr "length" "4")
10631 (set_attr "type" "no_insn")]
10632 )
10633
10634 (define_insn "consttable_8"
10635 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10636 "TARGET_EITHER"
10637 "*
10638 {
10639 making_const_table = TRUE;
10640 scalar_float_mode float_mode;
10641 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10642 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10643 float_mode, BITS_PER_WORD);
10644 else
10645 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10646 return \"\";
10647 }"
10648 [(set_attr "length" "8")
10649 (set_attr "type" "no_insn")]
10650 )
10651
10652 (define_insn "consttable_16"
10653 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10654 "TARGET_EITHER"
10655 "*
10656 {
10657 making_const_table = TRUE;
10658 scalar_float_mode float_mode;
10659 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10660 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10661 float_mode, BITS_PER_WORD);
10662 else
10663 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10664 return \"\";
10665 }"
10666 [(set_attr "length" "16")
10667 (set_attr "type" "no_insn")]
10668 )
10669
10670 ;; V5 Instructions,
10671
10672 (define_insn "clzsi2"
10673 [(set (match_operand:SI 0 "s_register_operand" "=r")
10674 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10675 "TARGET_32BIT && arm_arch5t"
10676 "clz%?\\t%0, %1"
10677 [(set_attr "predicable" "yes")
10678 (set_attr "type" "clz")])
10679
10680 (define_insn "rbitsi2"
10681 [(set (match_operand:SI 0 "s_register_operand" "=r")
10682 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10683 "TARGET_32BIT && arm_arch_thumb2"
10684 "rbit%?\\t%0, %1"
10685 [(set_attr "predicable" "yes")
10686 (set_attr "type" "clz")])
10687
10688 ;; Keep this as a CTZ expression until after reload and then split
10689 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10690 ;; to fold with any other expression.
10691
10692 (define_insn_and_split "ctzsi2"
10693 [(set (match_operand:SI 0 "s_register_operand" "=r")
10694 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10695 "TARGET_32BIT && arm_arch_thumb2"
10696 "#"
10697 "&& reload_completed"
10698 [(const_int 0)]
10699 "
10700 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10701 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10702 DONE;
10703 ")
10704
10705 ;; V5E instructions.
10706
10707 (define_insn "prefetch"
10708 [(prefetch (match_operand:SI 0 "address_operand" "p")
10709 (match_operand:SI 1 "" "")
10710 (match_operand:SI 2 "" ""))]
10711 "TARGET_32BIT && arm_arch5te"
10712 "pld\\t%a0"
10713 [(set_attr "type" "load_4")]
10714 )
10715
10716 ;; General predication pattern
10717
10718 (define_cond_exec
10719 [(match_operator 0 "arm_comparison_operator"
10720 [(match_operand 1 "cc_register" "")
10721 (const_int 0)])]
10722 "TARGET_32BIT
10723 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10724 ""
10725 [(set_attr "predicated" "yes")]
10726 )
10727
10728 (define_insn "force_register_use"
10729 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10730 ""
10731 "%@ %0 needed"
10732 [(set_attr "length" "0")
10733 (set_attr "type" "no_insn")]
10734 )
10735
10736
10737 ;; Patterns for exception handling
10738
10739 (define_expand "eh_return"
10740 [(use (match_operand 0 "general_operand"))]
10741 "TARGET_EITHER"
10742 "
10743 {
10744 if (TARGET_32BIT)
10745 emit_insn (gen_arm_eh_return (operands[0]));
10746 else
10747 emit_insn (gen_thumb_eh_return (operands[0]));
10748 DONE;
10749 }"
10750 )
10751
10752 ;; We can't expand this before we know where the link register is stored.
10753 (define_insn_and_split "arm_eh_return"
10754 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10755 VUNSPEC_EH_RETURN)
10756 (clobber (match_scratch:SI 1 "=&r"))]
10757 "TARGET_ARM"
10758 "#"
10759 "&& reload_completed"
10760 [(const_int 0)]
10761 "
10762 {
10763 arm_set_return_address (operands[0], operands[1]);
10764 DONE;
10765 }"
10766 )
10767
10768 \f
10769 ;; TLS support
10770
10771 (define_insn "load_tp_hard"
10772 [(set (match_operand:SI 0 "register_operand" "=r")
10773 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10774 "TARGET_HARD_TP"
10775 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10776 [(set_attr "predicable" "yes")
10777 (set_attr "type" "mrs")]
10778 )
10779
10780 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10781 (define_insn "load_tp_soft_fdpic"
10782 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10783 (clobber (reg:SI FDPIC_REGNUM))
10784 (clobber (reg:SI LR_REGNUM))
10785 (clobber (reg:SI IP_REGNUM))
10786 (clobber (reg:CC CC_REGNUM))]
10787 "TARGET_SOFT_TP && TARGET_FDPIC"
10788 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10789 [(set_attr "conds" "clob")
10790 (set_attr "type" "branch")]
10791 )
10792
10793 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10794 (define_insn "load_tp_soft"
10795 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10796 (clobber (reg:SI LR_REGNUM))
10797 (clobber (reg:SI IP_REGNUM))
10798 (clobber (reg:CC CC_REGNUM))]
10799 "TARGET_SOFT_TP && !TARGET_FDPIC"
10800 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10801 [(set_attr "conds" "clob")
10802 (set_attr "type" "branch")]
10803 )
10804
10805 ;; tls descriptor call
10806 (define_insn "tlscall"
10807 [(set (reg:SI R0_REGNUM)
10808 (unspec:SI [(reg:SI R0_REGNUM)
10809 (match_operand:SI 0 "" "X")
10810 (match_operand 1 "" "")] UNSPEC_TLS))
10811 (clobber (reg:SI R1_REGNUM))
10812 (clobber (reg:SI LR_REGNUM))
10813 (clobber (reg:SI CC_REGNUM))]
10814 "TARGET_GNU2_TLS"
10815 {
10816 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10817 INTVAL (operands[1]));
10818 return "bl\\t%c0(tlscall)";
10819 }
10820 [(set_attr "conds" "clob")
10821 (set_attr "length" "4")
10822 (set_attr "type" "branch")]
10823 )
10824
10825 ;; For thread pointer builtin
10826 (define_expand "get_thread_pointersi"
10827 [(match_operand:SI 0 "s_register_operand")]
10828 ""
10829 "
10830 {
10831 arm_load_tp (operands[0]);
10832 DONE;
10833 }")
10834
10835 ;;
10836
10837 ;; We only care about the lower 16 bits of the constant
10838 ;; being inserted into the upper 16 bits of the register.
10839 (define_insn "*arm_movtas_ze"
10840 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10841 (const_int 16)
10842 (const_int 16))
10843 (match_operand:SI 1 "const_int_operand" ""))]
10844 "TARGET_HAVE_MOVT"
10845 "@
10846 movt%?\t%0, %L1
10847 movt\t%0, %L1"
10848 [(set_attr "arch" "32,v8mb")
10849 (set_attr "predicable" "yes")
10850 (set_attr "length" "4")
10851 (set_attr "type" "alu_sreg")]
10852 )
10853
10854 (define_insn "*arm_rev"
10855 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10856 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10857 "arm_arch6"
10858 "@
10859 rev\t%0, %1
10860 rev%?\t%0, %1
10861 rev%?\t%0, %1"
10862 [(set_attr "arch" "t1,t2,32")
10863 (set_attr "length" "2,2,4")
10864 (set_attr "predicable" "no,yes,yes")
10865 (set_attr "type" "rev")]
10866 )
10867
10868 (define_expand "arm_legacy_rev"
10869 [(set (match_operand:SI 2 "s_register_operand")
10870 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10871 (const_int 16))
10872 (match_dup 1)))
10873 (set (match_dup 2)
10874 (lshiftrt:SI (match_dup 2)
10875 (const_int 8)))
10876 (set (match_operand:SI 3 "s_register_operand")
10877 (rotatert:SI (match_dup 1)
10878 (const_int 8)))
10879 (set (match_dup 2)
10880 (and:SI (match_dup 2)
10881 (const_int -65281)))
10882 (set (match_operand:SI 0 "s_register_operand")
10883 (xor:SI (match_dup 3)
10884 (match_dup 2)))]
10885 "TARGET_32BIT"
10886 ""
10887 )
10888
10889 ;; Reuse temporaries to keep register pressure down.
10890 (define_expand "thumb_legacy_rev"
10891 [(set (match_operand:SI 2 "s_register_operand")
10892 (ashift:SI (match_operand:SI 1 "s_register_operand")
10893 (const_int 24)))
10894 (set (match_operand:SI 3 "s_register_operand")
10895 (lshiftrt:SI (match_dup 1)
10896 (const_int 24)))
10897 (set (match_dup 3)
10898 (ior:SI (match_dup 3)
10899 (match_dup 2)))
10900 (set (match_operand:SI 4 "s_register_operand")
10901 (const_int 16))
10902 (set (match_operand:SI 5 "s_register_operand")
10903 (rotatert:SI (match_dup 1)
10904 (match_dup 4)))
10905 (set (match_dup 2)
10906 (ashift:SI (match_dup 5)
10907 (const_int 24)))
10908 (set (match_dup 5)
10909 (lshiftrt:SI (match_dup 5)
10910 (const_int 24)))
10911 (set (match_dup 5)
10912 (ior:SI (match_dup 5)
10913 (match_dup 2)))
10914 (set (match_dup 5)
10915 (rotatert:SI (match_dup 5)
10916 (match_dup 4)))
10917 (set (match_operand:SI 0 "s_register_operand")
10918 (ior:SI (match_dup 5)
10919 (match_dup 3)))]
10920 "TARGET_THUMB"
10921 ""
10922 )
10923
10924 ;; ARM-specific expansion of signed mod by power of 2
10925 ;; using conditional negate.
10926 ;; For r0 % n where n is a power of 2 produce:
10927 ;; rsbs r1, r0, #0
10928 ;; and r0, r0, #(n - 1)
10929 ;; and r1, r1, #(n - 1)
10930 ;; rsbpl r0, r1, #0
10931
10932 (define_expand "modsi3"
10933 [(match_operand:SI 0 "register_operand")
10934 (match_operand:SI 1 "register_operand")
10935 (match_operand:SI 2 "const_int_operand")]
10936 "TARGET_32BIT"
10937 {
10938 HOST_WIDE_INT val = INTVAL (operands[2]);
10939
10940 if (val <= 0
10941 || exact_log2 (val) <= 0)
10942 FAIL;
10943
10944 rtx mask = GEN_INT (val - 1);
10945
10946 /* In the special case of x0 % 2 we can do the even shorter:
10947 cmp r0, #0
10948 and r0, r0, #1
10949 rsblt r0, r0, #0. */
10950
10951 if (val == 2)
10952 {
10953 rtx cc_reg = arm_gen_compare_reg (LT,
10954 operands[1], const0_rtx, NULL_RTX);
10955 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10956 rtx masked = gen_reg_rtx (SImode);
10957
10958 emit_insn (gen_andsi3 (masked, operands[1], mask));
10959 emit_move_insn (operands[0],
10960 gen_rtx_IF_THEN_ELSE (SImode, cond,
10961 gen_rtx_NEG (SImode,
10962 masked),
10963 masked));
10964 DONE;
10965 }
10966
10967 rtx neg_op = gen_reg_rtx (SImode);
10968 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
10969 operands[1]));
10970
10971 /* Extract the condition register and mode. */
10972 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
10973 rtx cc_reg = SET_DEST (cmp);
10974 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
10975
10976 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
10977
10978 rtx masked_neg = gen_reg_rtx (SImode);
10979 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
10980
10981 /* We want a conditional negate here, but emitting COND_EXEC rtxes
10982 during expand does not always work. Do an IF_THEN_ELSE instead. */
10983 emit_move_insn (operands[0],
10984 gen_rtx_IF_THEN_ELSE (SImode, cond,
10985 gen_rtx_NEG (SImode, masked_neg),
10986 operands[0]));
10987
10988
10989 DONE;
10990 }
10991 )
10992
10993 (define_expand "bswapsi2"
10994 [(set (match_operand:SI 0 "s_register_operand")
10995 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
10996 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10997 "
10998 if (!arm_arch6)
10999 {
11000 rtx op2 = gen_reg_rtx (SImode);
11001 rtx op3 = gen_reg_rtx (SImode);
11002
11003 if (TARGET_THUMB)
11004 {
11005 rtx op4 = gen_reg_rtx (SImode);
11006 rtx op5 = gen_reg_rtx (SImode);
11007
11008 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11009 op2, op3, op4, op5));
11010 }
11011 else
11012 {
11013 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11014 op2, op3));
11015 }
11016
11017 DONE;
11018 }
11019 "
11020 )
11021
11022 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11023 ;; and unsigned variants, respectively. For rev16, expose
11024 ;; byte-swapping in the lower 16 bits only.
11025 (define_insn "*arm_revsh"
11026 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11027 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11028 "arm_arch6"
11029 "@
11030 revsh\t%0, %1
11031 revsh%?\t%0, %1
11032 revsh%?\t%0, %1"
11033 [(set_attr "arch" "t1,t2,32")
11034 (set_attr "length" "2,2,4")
11035 (set_attr "type" "rev")]
11036 )
11037
11038 (define_insn "*arm_rev16"
11039 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11040 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11041 "arm_arch6"
11042 "@
11043 rev16\t%0, %1
11044 rev16%?\t%0, %1
11045 rev16%?\t%0, %1"
11046 [(set_attr "arch" "t1,t2,32")
11047 (set_attr "length" "2,2,4")
11048 (set_attr "type" "rev")]
11049 )
11050
11051 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11052 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11053 ;; each valid permutation.
11054
11055 (define_insn "arm_rev16si2"
11056 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11057 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11058 (const_int 8))
11059 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11060 (and:SI (lshiftrt:SI (match_dup 1)
11061 (const_int 8))
11062 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11063 "arm_arch6
11064 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11065 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11066 "rev16\\t%0, %1"
11067 [(set_attr "arch" "t1,t2,32")
11068 (set_attr "length" "2,2,4")
11069 (set_attr "type" "rev")]
11070 )
11071
11072 (define_insn "arm_rev16si2_alt"
11073 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11074 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11075 (const_int 8))
11076 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11077 (and:SI (ashift:SI (match_dup 1)
11078 (const_int 8))
11079 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11080 "arm_arch6
11081 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11082 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11083 "rev16\\t%0, %1"
11084 [(set_attr "arch" "t1,t2,32")
11085 (set_attr "length" "2,2,4")
11086 (set_attr "type" "rev")]
11087 )
11088
11089 (define_expand "bswaphi2"
11090 [(set (match_operand:HI 0 "s_register_operand")
11091 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11092 "arm_arch6"
11093 ""
11094 )
11095
11096 ;; Patterns for LDRD/STRD in Thumb2 mode
11097
11098 (define_insn "*thumb2_ldrd"
11099 [(set (match_operand:SI 0 "s_register_operand" "=r")
11100 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11101 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11102 (set (match_operand:SI 3 "s_register_operand" "=r")
11103 (mem:SI (plus:SI (match_dup 1)
11104 (match_operand:SI 4 "const_int_operand" ""))))]
11105 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11106 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11107 && (operands_ok_ldrd_strd (operands[0], operands[3],
11108 operands[1], INTVAL (operands[2]),
11109 false, true))"
11110 "ldrd%?\t%0, %3, [%1, %2]"
11111 [(set_attr "type" "load_8")
11112 (set_attr "predicable" "yes")])
11113
11114 (define_insn "*thumb2_ldrd_base"
11115 [(set (match_operand:SI 0 "s_register_operand" "=r")
11116 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11117 (set (match_operand:SI 2 "s_register_operand" "=r")
11118 (mem:SI (plus:SI (match_dup 1)
11119 (const_int 4))))]
11120 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11121 && (operands_ok_ldrd_strd (operands[0], operands[2],
11122 operands[1], 0, false, true))"
11123 "ldrd%?\t%0, %2, [%1]"
11124 [(set_attr "type" "load_8")
11125 (set_attr "predicable" "yes")])
11126
11127 (define_insn "*thumb2_ldrd_base_neg"
11128 [(set (match_operand:SI 0 "s_register_operand" "=r")
11129 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11130 (const_int -4))))
11131 (set (match_operand:SI 2 "s_register_operand" "=r")
11132 (mem:SI (match_dup 1)))]
11133 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11134 && (operands_ok_ldrd_strd (operands[0], operands[2],
11135 operands[1], -4, false, true))"
11136 "ldrd%?\t%0, %2, [%1, #-4]"
11137 [(set_attr "type" "load_8")
11138 (set_attr "predicable" "yes")])
11139
11140 (define_insn "*thumb2_strd"
11141 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11142 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11143 (match_operand:SI 2 "s_register_operand" "r"))
11144 (set (mem:SI (plus:SI (match_dup 0)
11145 (match_operand:SI 3 "const_int_operand" "")))
11146 (match_operand:SI 4 "s_register_operand" "r"))]
11147 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11148 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11149 && (operands_ok_ldrd_strd (operands[2], operands[4],
11150 operands[0], INTVAL (operands[1]),
11151 false, false))"
11152 "strd%?\t%2, %4, [%0, %1]"
11153 [(set_attr "type" "store_8")
11154 (set_attr "predicable" "yes")])
11155
11156 (define_insn "*thumb2_strd_base"
11157 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11158 (match_operand:SI 1 "s_register_operand" "r"))
11159 (set (mem:SI (plus:SI (match_dup 0)
11160 (const_int 4)))
11161 (match_operand:SI 2 "s_register_operand" "r"))]
11162 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11163 && (operands_ok_ldrd_strd (operands[1], operands[2],
11164 operands[0], 0, false, false))"
11165 "strd%?\t%1, %2, [%0]"
11166 [(set_attr "type" "store_8")
11167 (set_attr "predicable" "yes")])
11168
11169 (define_insn "*thumb2_strd_base_neg"
11170 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11171 (const_int -4)))
11172 (match_operand:SI 1 "s_register_operand" "r"))
11173 (set (mem:SI (match_dup 0))
11174 (match_operand:SI 2 "s_register_operand" "r"))]
11175 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11176 && (operands_ok_ldrd_strd (operands[1], operands[2],
11177 operands[0], -4, false, false))"
11178 "strd%?\t%1, %2, [%0, #-4]"
11179 [(set_attr "type" "store_8")
11180 (set_attr "predicable" "yes")])
11181
11182 ;; ARMv8 CRC32 instructions.
11183 (define_insn "arm_<crc_variant>"
11184 [(set (match_operand:SI 0 "s_register_operand" "=r")
11185 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11186 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11187 CRC))]
11188 "TARGET_CRC32"
11189 "<crc_variant>\\t%0, %1, %2"
11190 [(set_attr "type" "crc")
11191 (set_attr "conds" "unconditional")]
11192 )
11193
11194 ;; Load the load/store double peephole optimizations.
11195 (include "ldrdstrd.md")
11196
11197 ;; Load the load/store multiple patterns
11198 (include "ldmstm.md")
11199
11200 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11201 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11202 ;; The operands are validated through the load_multiple_operation
11203 ;; match_parallel predicate rather than through constraints so enable it only
11204 ;; after reload.
11205 (define_insn "*load_multiple"
11206 [(match_parallel 0 "load_multiple_operation"
11207 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11208 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11209 ])]
11210 "TARGET_32BIT && reload_completed"
11211 "*
11212 {
11213 arm_output_multireg_pop (operands, /*return_pc=*/false,
11214 /*cond=*/const_true_rtx,
11215 /*reverse=*/false,
11216 /*update=*/false);
11217 return \"\";
11218 }
11219 "
11220 [(set_attr "predicable" "yes")]
11221 )
11222
11223 (define_expand "copysignsf3"
11224 [(match_operand:SF 0 "register_operand")
11225 (match_operand:SF 1 "register_operand")
11226 (match_operand:SF 2 "register_operand")]
11227 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11228 "{
11229 emit_move_insn (operands[0], operands[2]);
11230 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11231 GEN_INT (31), GEN_INT (0),
11232 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11233 DONE;
11234 }"
11235 )
11236
11237 (define_expand "copysigndf3"
11238 [(match_operand:DF 0 "register_operand")
11239 (match_operand:DF 1 "register_operand")
11240 (match_operand:DF 2 "register_operand")]
11241 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11242 "{
11243 rtx op0_low = gen_lowpart (SImode, operands[0]);
11244 rtx op0_high = gen_highpart (SImode, operands[0]);
11245 rtx op1_low = gen_lowpart (SImode, operands[1]);
11246 rtx op1_high = gen_highpart (SImode, operands[1]);
11247 rtx op2_high = gen_highpart (SImode, operands[2]);
11248
11249 rtx scratch1 = gen_reg_rtx (SImode);
11250 rtx scratch2 = gen_reg_rtx (SImode);
11251 emit_move_insn (scratch1, op2_high);
11252 emit_move_insn (scratch2, op1_high);
11253
11254 emit_insn(gen_rtx_SET(scratch1,
11255 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11256 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11257 emit_move_insn (op0_low, op1_low);
11258 emit_move_insn (op0_high, scratch2);
11259
11260 DONE;
11261 }"
11262 )
11263
11264 ;; movmisalign patterns for HImode and SImode.
11265 (define_expand "movmisalign<mode>"
11266 [(match_operand:HSI 0 "general_operand")
11267 (match_operand:HSI 1 "general_operand")]
11268 "unaligned_access"
11269 {
11270 /* This pattern is not permitted to fail during expansion: if both arguments
11271 are non-registers (e.g. memory := constant), force operand 1 into a
11272 register. */
11273 rtx (* gen_unaligned_load)(rtx, rtx);
11274 rtx tmp_dest = operands[0];
11275 if (!s_register_operand (operands[0], <MODE>mode)
11276 && !s_register_operand (operands[1], <MODE>mode))
11277 operands[1] = force_reg (<MODE>mode, operands[1]);
11278
11279 if (<MODE>mode == HImode)
11280 {
11281 gen_unaligned_load = gen_unaligned_loadhiu;
11282 tmp_dest = gen_reg_rtx (SImode);
11283 }
11284 else
11285 gen_unaligned_load = gen_unaligned_loadsi;
11286
11287 if (MEM_P (operands[1]))
11288 {
11289 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11290 if (<MODE>mode == HImode)
11291 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11292 }
11293 else
11294 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11295
11296 DONE;
11297 })
11298
11299 (define_insn "arm_<cdp>"
11300 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11301 (match_operand:SI 1 "immediate_operand" "n")
11302 (match_operand:SI 2 "immediate_operand" "n")
11303 (match_operand:SI 3 "immediate_operand" "n")
11304 (match_operand:SI 4 "immediate_operand" "n")
11305 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11306 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11307 {
11308 arm_const_bounds (operands[0], 0, 16);
11309 arm_const_bounds (operands[1], 0, 16);
11310 arm_const_bounds (operands[2], 0, (1 << 5));
11311 arm_const_bounds (operands[3], 0, (1 << 5));
11312 arm_const_bounds (operands[4], 0, (1 << 5));
11313 arm_const_bounds (operands[5], 0, 8);
11314 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11315 }
11316 [(set_attr "length" "4")
11317 (set_attr "type" "coproc")])
11318
11319 (define_insn "*ldc"
11320 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11321 (match_operand:SI 1 "immediate_operand" "n")
11322 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11323 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11324 {
11325 arm_const_bounds (operands[0], 0, 16);
11326 arm_const_bounds (operands[1], 0, (1 << 5));
11327 return "<ldc>\\tp%c0, CR%c1, %2";
11328 }
11329 [(set_attr "length" "4")
11330 (set_attr "type" "coproc")])
11331
11332 (define_insn "*stc"
11333 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11334 (match_operand:SI 1 "immediate_operand" "n")
11335 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11336 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11337 {
11338 arm_const_bounds (operands[0], 0, 16);
11339 arm_const_bounds (operands[1], 0, (1 << 5));
11340 return "<stc>\\tp%c0, CR%c1, %2";
11341 }
11342 [(set_attr "length" "4")
11343 (set_attr "type" "coproc")])
11344
11345 (define_expand "arm_<ldc>"
11346 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11347 (match_operand:SI 1 "immediate_operand")
11348 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11349 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11350
11351 (define_expand "arm_<stc>"
11352 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11353 (match_operand:SI 1 "immediate_operand")
11354 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11355 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11356
11357 (define_insn "arm_<mcr>"
11358 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11359 (match_operand:SI 1 "immediate_operand" "n")
11360 (match_operand:SI 2 "s_register_operand" "r")
11361 (match_operand:SI 3 "immediate_operand" "n")
11362 (match_operand:SI 4 "immediate_operand" "n")
11363 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11364 (use (match_dup 2))]
11365 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11366 {
11367 arm_const_bounds (operands[0], 0, 16);
11368 arm_const_bounds (operands[1], 0, 8);
11369 arm_const_bounds (operands[3], 0, (1 << 5));
11370 arm_const_bounds (operands[4], 0, (1 << 5));
11371 arm_const_bounds (operands[5], 0, 8);
11372 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11373 }
11374 [(set_attr "length" "4")
11375 (set_attr "type" "coproc")])
11376
11377 (define_insn "arm_<mrc>"
11378 [(set (match_operand:SI 0 "s_register_operand" "=r")
11379 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11380 (match_operand:SI 2 "immediate_operand" "n")
11381 (match_operand:SI 3 "immediate_operand" "n")
11382 (match_operand:SI 4 "immediate_operand" "n")
11383 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11384 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11385 {
11386 arm_const_bounds (operands[1], 0, 16);
11387 arm_const_bounds (operands[2], 0, 8);
11388 arm_const_bounds (operands[3], 0, (1 << 5));
11389 arm_const_bounds (operands[4], 0, (1 << 5));
11390 arm_const_bounds (operands[5], 0, 8);
11391 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11392 }
11393 [(set_attr "length" "4")
11394 (set_attr "type" "coproc")])
11395
11396 (define_insn "arm_<mcrr>"
11397 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11398 (match_operand:SI 1 "immediate_operand" "n")
11399 (match_operand:DI 2 "s_register_operand" "r")
11400 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11401 (use (match_dup 2))]
11402 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11403 {
11404 arm_const_bounds (operands[0], 0, 16);
11405 arm_const_bounds (operands[1], 0, 8);
11406 arm_const_bounds (operands[3], 0, (1 << 5));
11407 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11408 }
11409 [(set_attr "length" "4")
11410 (set_attr "type" "coproc")])
11411
11412 (define_insn "arm_<mrrc>"
11413 [(set (match_operand:DI 0 "s_register_operand" "=r")
11414 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11415 (match_operand:SI 2 "immediate_operand" "n")
11416 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11417 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11418 {
11419 arm_const_bounds (operands[1], 0, 16);
11420 arm_const_bounds (operands[2], 0, 8);
11421 arm_const_bounds (operands[3], 0, (1 << 5));
11422 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11423 }
11424 [(set_attr "length" "4")
11425 (set_attr "type" "coproc")])
11426
11427 (define_expand "speculation_barrier"
11428 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11429 "TARGET_EITHER"
11430 "
11431 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11432 have a usable barrier (and probably don't need one in practice).
11433 But to be safe if such code is run on later architectures, call a
11434 helper function in libgcc that will do the thing for the active
11435 system. */
11436 if (!(arm_arch7 || arm_arch8))
11437 {
11438 arm_emit_speculation_barrier_function ();
11439 DONE;
11440 }
11441 "
11442 )
11443
11444 ;; Generate a hard speculation barrier when we have not enabled speculation
11445 ;; tracking.
11446 (define_insn "*speculation_barrier_insn"
11447 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11448 "arm_arch7 || arm_arch8"
11449 "isb\;dsb\\tsy"
11450 [(set_attr "type" "block")
11451 (set_attr "length" "8")]
11452 )
11453
11454 ;; Vector bits common to IWMMXT and Neon
11455 (include "vec-common.md")
11456 ;; Load the Intel Wireless Multimedia Extension patterns
11457 (include "iwmmxt.md")
11458 ;; Load the VFP co-processor patterns
11459 (include "vfp.md")
11460 ;; Thumb-1 patterns
11461 (include "thumb1.md")
11462 ;; Thumb-2 patterns
11463 (include "thumb2.md")
11464 ;; Neon patterns
11465 (include "neon.md")
11466 ;; Crypto patterns
11467 (include "crypto.md")
11468 ;; Synchronization Primitives
11469 (include "sync.md")
11470 ;; Fixed-point patterns
11471 (include "arm-fixed.md")