]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
[arm] Early expansion of usubvdi4.
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51 ;; conditional compare combination
52 (define_constants
53 [(CMP_CMP 0)
54 (CMN_CMP 1)
55 (CMP_CMN 2)
56 (CMN_CMN 3)
57 (NUM_OF_COND_CMP 4)
58 ]
59 )
60
61 \f
62 ;;---------------------------------------------------------------------------
63 ;; Attributes
64
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
67
68 ;; Instruction classification types
69 (include "types.md")
70
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
77
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
85
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
101
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
104 ;; registers.
105 (define_attr "fp" "no,yes" (const_string "no"))
106
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
112
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
117
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
120 (const_int 4))
121
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
131
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
134 (const_string "yes")
135
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
138 (const_string "yes")
139
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
142 (const_string "yes")
143
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
182 (const_string "yes")
183 ]
184
185 (const_string "no")))
186
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
189
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
192 (const_string "yes")
193
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
196 (const_string "yes")
197
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
202
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
208
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
225 (const_string "no")
226
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
229 (const_string "no")
230
231 (eq_attr "arch_enabled" "no")
232 (const_string "no")]
233 (const_string "yes")))
234
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
254
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
262
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
270 ;
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
273 ; inlined branches
274 ;
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
277 ;
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
280 ;
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
283 ;
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
286
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (if_then_else
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
295
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
301
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
312 "block,call,load_4")
313 (const_string "yes")
314 (const_string "no")))
315
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
338
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
342
343
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
347
348 ;;---------------------------------------------------------------------------
349 ;; Unspecs
350
351 (include "unspecs.md")
352
353 ;;---------------------------------------------------------------------------
354 ;; Mode iterators
355
356 (include "iterators.md")
357
358 ;;---------------------------------------------------------------------------
359 ;; Predicates
360
361 (include "predicates.md")
362 (include "constraints.md")
363
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
366
367 (define_attr "tune_cortexr4" "yes,no"
368 (const (if_then_else
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "yes")
371 (const_string "no"))))
372
373 ;; True if the generic scheduling description should be used.
374
375 (define_attr "generic_sched" "yes,no"
376 (const (if_then_else
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "no")
385 (const_string "yes"))))
386
387 (define_attr "generic_vfp" "yes,no"
388 (const (if_then_else
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
394 (const_string "yes")
395 (const_string "no"))))
396
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
403 (include "fa526.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
422 (include "vfp11.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
425
426 \f
427 ;;---------------------------------------------------------------------------
428 ;; Insn patterns
429 ;;
430 ;; Addition insns.
431
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
435
436 (define_expand "adddi3"
437 [(parallel
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
442 "TARGET_EITHER"
443 "
444 if (TARGET_THUMB1)
445 {
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
448 }
449 else
450 {
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
454 &lo_op2, &hi_op2);
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
457
458 if (lo_op2 == const0_rtx)
459 {
460 lo_dest = lo_op1;
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
465 }
466 else
467 {
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
472
473 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
475 const0_rtx);
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
478 else
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
480 }
481
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
486 DONE;
487 }
488 "
489 )
490
491 (define_expand "addvsi4"
492 [(match_operand:SI 0 "s_register_operand")
493 (match_operand:SI 1 "s_register_operand")
494 (match_operand:SI 2 "arm_add_operand")
495 (match_operand 3 "")]
496 "TARGET_32BIT"
497 {
498 if (CONST_INT_P (operands[2]))
499 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
500 else
501 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
502 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
503
504 DONE;
505 })
506
507 (define_expand "addvdi4"
508 [(match_operand:DI 0 "s_register_operand")
509 (match_operand:DI 1 "s_register_operand")
510 (match_operand:DI 2 "reg_or_int_operand")
511 (match_operand 3 "")]
512 "TARGET_32BIT"
513 {
514 rtx lo_result, hi_result;
515 rtx lo_op1, hi_op1, lo_op2, hi_op2;
516 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
517 &lo_op2, &hi_op2);
518 lo_result = gen_lowpart (SImode, operands[0]);
519 hi_result = gen_highpart (SImode, operands[0]);
520
521 if (lo_op2 == const0_rtx)
522 {
523 emit_move_insn (lo_result, lo_op1);
524 if (!arm_add_operand (hi_op2, SImode))
525 hi_op2 = force_reg (SImode, hi_op2);
526
527 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
528 }
529 else
530 {
531 if (!arm_add_operand (lo_op2, SImode))
532 lo_op2 = force_reg (SImode, lo_op2);
533 if (!arm_not_operand (hi_op2, SImode))
534 hi_op2 = force_reg (SImode, hi_op2);
535
536 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
537
538 if (hi_op2 == const0_rtx)
539 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
540 else if (CONST_INT_P (hi_op2))
541 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
542 else
543 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
544
545 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
546 }
547
548 DONE;
549 })
550
551 (define_expand "addsi3_cin_vout_reg"
552 [(parallel
553 [(set (match_dup 3)
554 (compare:CC_V
555 (plus:DI
556 (plus:DI (match_dup 4)
557 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
558 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
559 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
560 (match_dup 2)))))
561 (set (match_operand:SI 0 "s_register_operand")
562 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
563 (match_dup 2)))])]
564 "TARGET_32BIT"
565 {
566 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
567 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
568 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
569 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
570 }
571 )
572
573 (define_insn "*addsi3_cin_vout_reg_insn"
574 [(set (reg:CC_V CC_REGNUM)
575 (compare:CC_V
576 (plus:DI
577 (plus:DI
578 (match_operand:DI 3 "arm_carry_operation" "")
579 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
581 (sign_extend:DI
582 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
583 (match_dup 1))
584 (match_dup 2)))))
585 (set (match_operand:SI 0 "s_register_operand" "=l,r")
586 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
587 (match_dup 2)))]
588 "TARGET_32BIT"
589 "@
590 adcs%?\\t%0, %0, %2
591 adcs%?\\t%0, %1, %2"
592 [(set_attr "type" "alus_sreg")
593 (set_attr "arch" "t2,*")
594 (set_attr "length" "2,4")]
595 )
596
597 (define_expand "addsi3_cin_vout_imm"
598 [(parallel
599 [(set (match_dup 3)
600 (compare:CC_V
601 (plus:DI
602 (plus:DI (match_dup 4)
603 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
604 (match_dup 2))
605 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
606 (match_dup 2)))))
607 (set (match_operand:SI 0 "s_register_operand")
608 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
609 (match_operand 2 "arm_adcimm_operand")))])]
610 "TARGET_32BIT"
611 {
612 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
613 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
614 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
615 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
616 }
617 )
618
619 (define_insn "*addsi3_cin_vout_imm_insn"
620 [(set (reg:CC_V CC_REGNUM)
621 (compare:CC_V
622 (plus:DI
623 (plus:DI
624 (match_operand:DI 3 "arm_carry_operation" "")
625 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
626 (match_operand 2 "arm_adcimm_operand" "I,K"))
627 (sign_extend:DI
628 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
629 (match_dup 1))
630 (match_dup 2)))))
631 (set (match_operand:SI 0 "s_register_operand" "=r,r")
632 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
633 (match_dup 2)))]
634 "TARGET_32BIT"
635 "@
636 adcs%?\\t%0, %1, %2
637 sbcs%?\\t%0, %1, #%B2"
638 [(set_attr "type" "alus_imm")]
639 )
640
641 (define_expand "addsi3_cin_vout_0"
642 [(parallel
643 [(set (match_dup 2)
644 (compare:CC_V
645 (plus:DI (match_dup 3)
646 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
647 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
648 (set (match_operand:SI 0 "s_register_operand")
649 (plus:SI (match_dup 4) (match_dup 1)))])]
650 "TARGET_32BIT"
651 {
652 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
656 }
657 )
658
659 (define_insn "*addsi3_cin_vout_0_insn"
660 [(set (reg:CC_V CC_REGNUM)
661 (compare:CC_V
662 (plus:DI
663 (match_operand:DI 2 "arm_carry_operation" "")
664 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
665 (sign_extend:DI (plus:SI
666 (match_operand:SI 3 "arm_carry_operation" "")
667 (match_dup 1)))))
668 (set (match_operand:SI 0 "s_register_operand" "=r")
669 (plus:SI (match_dup 3) (match_dup 1)))]
670 "TARGET_32BIT"
671 "adcs%?\\t%0, %1, #0"
672 [(set_attr "type" "alus_imm")]
673 )
674
675 (define_expand "uaddvsi4"
676 [(match_operand:SI 0 "s_register_operand")
677 (match_operand:SI 1 "s_register_operand")
678 (match_operand:SI 2 "arm_add_operand")
679 (match_operand 3 "")]
680 "TARGET_32BIT"
681 {
682 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
683 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
684
685 DONE;
686 })
687
688 (define_expand "uaddvdi4"
689 [(match_operand:DI 0 "s_register_operand")
690 (match_operand:DI 1 "s_register_operand")
691 (match_operand:DI 2 "reg_or_int_operand")
692 (match_operand 3 "")]
693 "TARGET_32BIT"
694 {
695 rtx lo_result, hi_result;
696 rtx lo_op1, hi_op1, lo_op2, hi_op2;
697 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
698 &lo_op2, &hi_op2);
699 lo_result = gen_lowpart (SImode, operands[0]);
700 hi_result = gen_highpart (SImode, operands[0]);
701
702 if (lo_op2 == const0_rtx)
703 {
704 emit_move_insn (lo_result, lo_op1);
705 if (!arm_add_operand (hi_op2, SImode))
706 hi_op2 = force_reg (SImode, hi_op2);
707
708 gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]);
709 }
710 else
711 {
712 if (!arm_add_operand (lo_op2, SImode))
713 lo_op2 = force_reg (SImode, lo_op2);
714 if (!arm_not_operand (hi_op2, SImode))
715 hi_op2 = force_reg (SImode, hi_op2);
716
717 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
718
719 if (hi_op2 == const0_rtx)
720 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
721 else if (CONST_INT_P (hi_op2))
722 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
723 else
724 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
725
726 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
727 }
728
729 DONE;
730 })
731
732 (define_expand "addsi3_cin_cout_reg"
733 [(parallel
734 [(set (match_dup 3)
735 (compare:CC_ADC
736 (plus:DI
737 (plus:DI (match_dup 4)
738 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
739 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
740 (const_int 4294967296)))
741 (set (match_operand:SI 0 "s_register_operand")
742 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
743 (match_dup 2)))])]
744 "TARGET_32BIT"
745 {
746 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
747 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
748 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
749 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
750 }
751 )
752
753 (define_insn "*addsi3_cin_cout_reg_insn"
754 [(set (reg:CC_ADC CC_REGNUM)
755 (compare:CC_ADC
756 (plus:DI
757 (plus:DI
758 (match_operand:DI 3 "arm_carry_operation" "")
759 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
760 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
761 (const_int 4294967296)))
762 (set (match_operand:SI 0 "s_register_operand" "=l,r")
763 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
764 (match_dup 1))
765 (match_dup 2)))]
766 "TARGET_32BIT"
767 "@
768 adcs%?\\t%0, %0, %2
769 adcs%?\\t%0, %1, %2"
770 [(set_attr "type" "alus_sreg")
771 (set_attr "arch" "t2,*")
772 (set_attr "length" "2,4")]
773 )
774
775 (define_expand "addsi3_cin_cout_imm"
776 [(parallel
777 [(set (match_dup 3)
778 (compare:CC_ADC
779 (plus:DI
780 (plus:DI (match_dup 4)
781 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
782 (match_dup 6))
783 (const_int 4294967296)))
784 (set (match_operand:SI 0 "s_register_operand")
785 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
786 (match_operand:SI 2 "arm_adcimm_operand")))])]
787 "TARGET_32BIT"
788 {
789 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
790 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
791 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
792 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
793 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
794 }
795 )
796
797 (define_insn "*addsi3_cin_cout_imm_insn"
798 [(set (reg:CC_ADC CC_REGNUM)
799 (compare:CC_ADC
800 (plus:DI
801 (plus:DI
802 (match_operand:DI 3 "arm_carry_operation" "")
803 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
804 (match_operand:DI 5 "const_int_operand" "n,n"))
805 (const_int 4294967296)))
806 (set (match_operand:SI 0 "s_register_operand" "=r,r")
807 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
808 (match_dup 1))
809 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
810 "TARGET_32BIT
811 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
812 "@
813 adcs%?\\t%0, %1, %2
814 sbcs%?\\t%0, %1, #%B2"
815 [(set_attr "type" "alus_imm")]
816 )
817
818 (define_expand "addsi3_cin_cout_0"
819 [(parallel
820 [(set (match_dup 2)
821 (compare:CC_ADC
822 (plus:DI (match_dup 3)
823 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
824 (const_int 4294967296)))
825 (set (match_operand:SI 0 "s_register_operand")
826 (plus:SI (match_dup 4) (match_dup 1)))])]
827 "TARGET_32BIT"
828 {
829 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
833 }
834 )
835
836 (define_insn "*addsi3_cin_cout_0_insn"
837 [(set (reg:CC_ADC CC_REGNUM)
838 (compare:CC_ADC
839 (plus:DI
840 (match_operand:DI 2 "arm_carry_operation" "")
841 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
842 (const_int 4294967296)))
843 (set (match_operand:SI 0 "s_register_operand" "=r")
844 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
845 "TARGET_32BIT"
846 "adcs%?\\t%0, %1, #0"
847 [(set_attr "type" "alus_imm")]
848 )
849
850 (define_expand "addsi3"
851 [(set (match_operand:SI 0 "s_register_operand")
852 (plus:SI (match_operand:SI 1 "s_register_operand")
853 (match_operand:SI 2 "reg_or_int_operand")))]
854 "TARGET_EITHER"
855 "
856 if (TARGET_32BIT && CONST_INT_P (operands[2]))
857 {
858 arm_split_constant (PLUS, SImode, NULL_RTX,
859 INTVAL (operands[2]), operands[0], operands[1],
860 optimize && can_create_pseudo_p ());
861 DONE;
862 }
863 "
864 )
865
866 ; If there is a scratch available, this will be faster than synthesizing the
867 ; addition.
868 (define_peephole2
869 [(match_scratch:SI 3 "r")
870 (set (match_operand:SI 0 "arm_general_register_operand" "")
871 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
872 (match_operand:SI 2 "const_int_operand" "")))]
873 "TARGET_32BIT &&
874 !(const_ok_for_arm (INTVAL (operands[2]))
875 || const_ok_for_arm (-INTVAL (operands[2])))
876 && const_ok_for_arm (~INTVAL (operands[2]))"
877 [(set (match_dup 3) (match_dup 2))
878 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
879 ""
880 )
881
882 ;; The r/r/k alternative is required when reloading the address
883 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
884 ;; put the duplicated register first, and not try the commutative version.
885 (define_insn_and_split "*arm_addsi3"
886 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
887 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
888 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
889 "TARGET_32BIT"
890 "@
891 add%?\\t%0, %0, %2
892 add%?\\t%0, %1, %2
893 add%?\\t%0, %1, %2
894 add%?\\t%0, %1, %2
895 add%?\\t%0, %1, %2
896 add%?\\t%0, %1, %2
897 add%?\\t%0, %2, %1
898 add%?\\t%0, %1, %2
899 addw%?\\t%0, %1, %2
900 addw%?\\t%0, %1, %2
901 sub%?\\t%0, %1, #%n2
902 sub%?\\t%0, %1, #%n2
903 sub%?\\t%0, %1, #%n2
904 subw%?\\t%0, %1, #%n2
905 subw%?\\t%0, %1, #%n2
906 #"
907 "TARGET_32BIT
908 && CONST_INT_P (operands[2])
909 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
910 && (reload_completed || !arm_eliminable_register (operands[1]))"
911 [(clobber (const_int 0))]
912 "
913 arm_split_constant (PLUS, SImode, curr_insn,
914 INTVAL (operands[2]), operands[0],
915 operands[1], 0);
916 DONE;
917 "
918 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
919 (set_attr "predicable" "yes")
920 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
921 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
922 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
923 (const_string "alu_imm")
924 (const_string "alu_sreg")))
925 ]
926 )
927
928 (define_insn "addsi3_compareV_reg"
929 [(set (reg:CC_V CC_REGNUM)
930 (compare:CC_V
931 (plus:DI
932 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
933 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
934 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
935 (set (match_operand:SI 0 "register_operand" "=l,r,r")
936 (plus:SI (match_dup 1) (match_dup 2)))]
937 "TARGET_32BIT"
938 "adds%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "arch" "t2,t2,*")
941 (set_attr "length" "2,2,4")
942 (set_attr "type" "alus_sreg")]
943 )
944
945 (define_insn "*addsi3_compareV_reg_nosum"
946 [(set (reg:CC_V CC_REGNUM)
947 (compare:CC_V
948 (plus:DI
949 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
950 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
951 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
952 "TARGET_32BIT"
953 "cmn%?\\t%0, %1"
954 [(set_attr "conds" "set")
955 (set_attr "arch" "t2,*")
956 (set_attr "length" "2,4")
957 (set_attr "type" "alus_sreg")]
958 )
959
960 (define_insn "addsi3_compareV_imm"
961 [(set (reg:CC_V CC_REGNUM)
962 (compare:CC_V
963 (plus:DI
964 (sign_extend:DI
965 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
966 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
967 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
968 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
969 (plus:SI (match_dup 1) (match_dup 2)))]
970 "TARGET_32BIT
971 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
972 "@
973 adds%?\\t%0, %1, %2
974 adds%?\\t%0, %0, %2
975 subs%?\\t%0, %1, #%n2
976 subs%?\\t%0, %0, #%n2
977 adds%?\\t%0, %1, %2
978 subs%?\\t%0, %1, #%n2"
979 [(set_attr "conds" "set")
980 (set_attr "arch" "t2,t2,t2,t2,*,*")
981 (set_attr "length" "2,2,2,2,4,4")
982 (set_attr "type" "alus_imm")]
983 )
984
985 (define_insn "addsi3_compareV_imm_nosum"
986 [(set (reg:CC_V CC_REGNUM)
987 (compare:CC_V
988 (plus:DI
989 (sign_extend:DI
990 (match_operand:SI 0 "register_operand" "l,r,r"))
991 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
992 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
993 "TARGET_32BIT
994 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
995 "@
996 cmp%?\\t%0, #%n1
997 cmn%?\\t%0, %1
998 cmp%?\\t%0, #%n1"
999 [(set_attr "conds" "set")
1000 (set_attr "arch" "t2,*,*")
1001 (set_attr "length" "2,4,4")
1002 (set_attr "type" "alus_imm")]
1003 )
1004
1005 ;; We can handle more constants efficently if we can clobber either a scratch
1006 ;; or the other source operand. We deliberately leave this late as in
1007 ;; high register pressure situations it's not worth forcing any reloads.
1008 (define_peephole2
1009 [(match_scratch:SI 2 "l")
1010 (set (reg:CC_V CC_REGNUM)
1011 (compare:CC_V
1012 (plus:DI
1013 (sign_extend:DI
1014 (match_operand:SI 0 "low_register_operand"))
1015 (match_operand 1 "const_int_operand"))
1016 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1017 "TARGET_THUMB2
1018 && satisfies_constraint_Pd (operands[1])"
1019 [(parallel[
1020 (set (reg:CC_V CC_REGNUM)
1021 (compare:CC_V
1022 (plus:DI (sign_extend:DI (match_dup 0))
1023 (sign_extend:DI (match_dup 1)))
1024 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1025 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1026 )
1027
1028 (define_peephole2
1029 [(set (reg:CC_V CC_REGNUM)
1030 (compare:CC_V
1031 (plus:DI
1032 (sign_extend:DI
1033 (match_operand:SI 0 "low_register_operand"))
1034 (match_operand 1 "const_int_operand"))
1035 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1036 "TARGET_THUMB2
1037 && dead_or_set_p (peep2_next_insn (0), operands[0])
1038 && satisfies_constraint_Py (operands[1])"
1039 [(parallel[
1040 (set (reg:CC_V CC_REGNUM)
1041 (compare:CC_V
1042 (plus:DI (sign_extend:DI (match_dup 0))
1043 (sign_extend:DI (match_dup 1)))
1044 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1045 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1046 )
1047
1048 (define_insn "addsi3_compare0"
1049 [(set (reg:CC_NOOV CC_REGNUM)
1050 (compare:CC_NOOV
1051 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1052 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1053 (const_int 0)))
1054 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1055 (plus:SI (match_dup 1) (match_dup 2)))]
1056 "TARGET_ARM"
1057 "@
1058 adds%?\\t%0, %1, %2
1059 subs%?\\t%0, %1, #%n2
1060 adds%?\\t%0, %1, %2"
1061 [(set_attr "conds" "set")
1062 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1063 )
1064
1065 (define_insn "*addsi3_compare0_scratch"
1066 [(set (reg:CC_NOOV CC_REGNUM)
1067 (compare:CC_NOOV
1068 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1069 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1070 (const_int 0)))]
1071 "TARGET_ARM"
1072 "@
1073 cmn%?\\t%0, %1
1074 cmp%?\\t%0, #%n1
1075 cmn%?\\t%0, %1"
1076 [(set_attr "conds" "set")
1077 (set_attr "predicable" "yes")
1078 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1079 )
1080
1081 (define_insn "*compare_negsi_si"
1082 [(set (reg:CC_Z CC_REGNUM)
1083 (compare:CC_Z
1084 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1085 (match_operand:SI 1 "s_register_operand" "l,r")))]
1086 "TARGET_32BIT"
1087 "cmn%?\\t%1, %0"
1088 [(set_attr "conds" "set")
1089 (set_attr "predicable" "yes")
1090 (set_attr "arch" "t2,*")
1091 (set_attr "length" "2,4")
1092 (set_attr "predicable_short_it" "yes,no")
1093 (set_attr "type" "alus_sreg")]
1094 )
1095
1096 ;; This is the canonicalization of subsi3_compare when the
1097 ;; addend is a constant.
1098 (define_insn "cmpsi2_addneg"
1099 [(set (reg:CC CC_REGNUM)
1100 (compare:CC
1101 (match_operand:SI 1 "s_register_operand" "r,r")
1102 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1103 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1104 (plus:SI (match_dup 1)
1105 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1106 "TARGET_32BIT
1107 && (INTVAL (operands[2])
1108 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1109 {
1110 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1111 in different condition codes (like cmn rather than like cmp), so that
1112 alternative comes first. Both alternatives can match for any 0x??000000
1113 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1114 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1115 as it is shorter. */
1116 if (which_alternative == 0 && operands[3] != const1_rtx)
1117 return "subs%?\\t%0, %1, #%n3";
1118 else
1119 return "adds%?\\t%0, %1, %3";
1120 }
1121 [(set_attr "conds" "set")
1122 (set_attr "type" "alus_sreg")]
1123 )
1124
1125 ;; Convert the sequence
1126 ;; sub rd, rn, #1
1127 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1128 ;; bne dest
1129 ;; into
1130 ;; subs rd, rn, #1
1131 ;; bcs dest ((unsigned)rn >= 1)
1132 ;; similarly for the beq variant using bcc.
1133 ;; This is a common looping idiom (while (n--))
1134 (define_peephole2
1135 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1136 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1137 (const_int -1)))
1138 (set (match_operand 2 "cc_register" "")
1139 (compare (match_dup 0) (const_int -1)))
1140 (set (pc)
1141 (if_then_else (match_operator 3 "equality_operator"
1142 [(match_dup 2) (const_int 0)])
1143 (match_operand 4 "" "")
1144 (match_operand 5 "" "")))]
1145 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1146 [(parallel[
1147 (set (match_dup 2)
1148 (compare:CC
1149 (match_dup 1) (const_int 1)))
1150 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1151 (set (pc)
1152 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1153 (match_dup 4)
1154 (match_dup 5)))]
1155 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1156 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1157 ? GEU : LTU),
1158 VOIDmode,
1159 operands[2], const0_rtx);"
1160 )
1161
1162 ;; The next four insns work because they compare the result with one of
1163 ;; the operands, and we know that the use of the condition code is
1164 ;; either GEU or LTU, so we can use the carry flag from the addition
1165 ;; instead of doing the compare a second time.
1166 (define_insn "addsi3_compare_op1"
1167 [(set (reg:CC_C CC_REGNUM)
1168 (compare:CC_C
1169 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1170 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1171 (match_dup 1)))
1172 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1173 (plus:SI (match_dup 1) (match_dup 2)))]
1174 "TARGET_32BIT"
1175 "@
1176 adds%?\\t%0, %1, %2
1177 adds%?\\t%0, %0, %2
1178 subs%?\\t%0, %1, #%n2
1179 subs%?\\t%0, %0, #%n2
1180 adds%?\\t%0, %1, %2
1181 subs%?\\t%0, %1, #%n2"
1182 [(set_attr "conds" "set")
1183 (set_attr "arch" "t2,t2,t2,t2,*,*")
1184 (set_attr "length" "2,2,2,2,4,4")
1185 (set (attr "type")
1186 (if_then_else (match_operand 2 "const_int_operand")
1187 (const_string "alu_imm")
1188 (const_string "alu_sreg")))]
1189 )
1190
1191 (define_insn "*addsi3_compare_op2"
1192 [(set (reg:CC_C CC_REGNUM)
1193 (compare:CC_C
1194 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1195 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1196 (match_dup 2)))
1197 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1198 (plus:SI (match_dup 1) (match_dup 2)))]
1199 "TARGET_32BIT"
1200 "@
1201 adds%?\\t%0, %1, %2
1202 adds%?\\t%0, %0, %2
1203 subs%?\\t%0, %1, #%n2
1204 subs%?\\t%0, %0, #%n2
1205 adds%?\\t%0, %1, %2
1206 subs%?\\t%0, %1, #%n2"
1207 [(set_attr "conds" "set")
1208 (set_attr "arch" "t2,t2,t2,t2,*,*")
1209 (set_attr "length" "2,2,2,2,4,4")
1210 (set (attr "type")
1211 (if_then_else (match_operand 2 "const_int_operand")
1212 (const_string "alu_imm")
1213 (const_string "alu_sreg")))]
1214 )
1215
1216 (define_insn "*compare_addsi2_op0"
1217 [(set (reg:CC_C CC_REGNUM)
1218 (compare:CC_C
1219 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1220 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1221 (match_dup 0)))]
1222 "TARGET_32BIT"
1223 "@
1224 cmn%?\\t%0, %1
1225 cmp%?\\t%0, #%n1
1226 cmn%?\\t%0, %1
1227 cmp%?\\t%0, #%n1"
1228 [(set_attr "conds" "set")
1229 (set_attr "predicable" "yes")
1230 (set_attr "arch" "t2,t2,*,*")
1231 (set_attr "predicable_short_it" "yes,yes,no,no")
1232 (set_attr "length" "2,2,4,4")
1233 (set (attr "type")
1234 (if_then_else (match_operand 1 "const_int_operand")
1235 (const_string "alu_imm")
1236 (const_string "alu_sreg")))]
1237 )
1238
1239 (define_insn "*compare_addsi2_op1"
1240 [(set (reg:CC_C CC_REGNUM)
1241 (compare:CC_C
1242 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1243 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1244 (match_dup 1)))]
1245 "TARGET_32BIT"
1246 "@
1247 cmn%?\\t%0, %1
1248 cmp%?\\t%0, #%n1
1249 cmn%?\\t%0, %1
1250 cmp%?\\t%0, #%n1"
1251 [(set_attr "conds" "set")
1252 (set_attr "predicable" "yes")
1253 (set_attr "arch" "t2,t2,*,*")
1254 (set_attr "predicable_short_it" "yes,yes,no,no")
1255 (set_attr "length" "2,2,4,4")
1256 (set (attr "type")
1257 (if_then_else (match_operand 1 "const_int_operand")
1258 (const_string "alu_imm")
1259 (const_string "alu_sreg")))]
1260 )
1261
1262 (define_insn "addsi3_carryin"
1263 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1264 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1265 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1266 (match_operand:SI 3 "arm_carry_operation" "")))]
1267 "TARGET_32BIT"
1268 "@
1269 adc%?\\t%0, %1, %2
1270 adc%?\\t%0, %1, %2
1271 sbc%?\\t%0, %1, #%B2"
1272 [(set_attr "conds" "use")
1273 (set_attr "predicable" "yes")
1274 (set_attr "arch" "t2,*,*")
1275 (set_attr "length" "4")
1276 (set_attr "predicable_short_it" "yes,no,no")
1277 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1278 )
1279
1280 ;; Canonicalization of the above when the immediate is zero.
1281 (define_insn "add0si3_carryin"
1282 [(set (match_operand:SI 0 "s_register_operand" "=r")
1283 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1284 (match_operand:SI 1 "arm_not_operand" "r")))]
1285 "TARGET_32BIT"
1286 "adc%?\\t%0, %1, #0"
1287 [(set_attr "conds" "use")
1288 (set_attr "predicable" "yes")
1289 (set_attr "length" "4")
1290 (set_attr "type" "adc_imm")]
1291 )
1292
1293 (define_insn "*addsi3_carryin_alt2"
1294 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1295 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1296 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1297 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1298 "TARGET_32BIT"
1299 "@
1300 adc%?\\t%0, %1, %2
1301 adc%?\\t%0, %1, %2
1302 sbc%?\\t%0, %1, #%B2"
1303 [(set_attr "conds" "use")
1304 (set_attr "predicable" "yes")
1305 (set_attr "arch" "t2,*,*")
1306 (set_attr "length" "4")
1307 (set_attr "predicable_short_it" "yes,no,no")
1308 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1309 )
1310
1311 (define_insn "*addsi3_carryin_shift"
1312 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1313 (plus:SI (plus:SI
1314 (match_operator:SI 2 "shift_operator"
1315 [(match_operand:SI 3 "s_register_operand" "r,r")
1316 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1317 (match_operand:SI 5 "arm_carry_operation" ""))
1318 (match_operand:SI 1 "s_register_operand" "r,r")))]
1319 "TARGET_32BIT"
1320 "adc%?\\t%0, %1, %3%S2"
1321 [(set_attr "conds" "use")
1322 (set_attr "arch" "32,a")
1323 (set_attr "shift" "3")
1324 (set_attr "predicable" "yes")
1325 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1326 (const_string "alu_shift_imm")
1327 (const_string "alu_shift_reg")))]
1328 )
1329
1330 (define_insn "*addsi3_carryin_clobercc"
1331 [(set (match_operand:SI 0 "s_register_operand" "=r")
1332 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1333 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1334 (match_operand:SI 3 "arm_carry_operation" "")))
1335 (clobber (reg:CC CC_REGNUM))]
1336 "TARGET_32BIT"
1337 "adcs%?\\t%0, %1, %2"
1338 [(set_attr "conds" "set")
1339 (set_attr "type" "adcs_reg")]
1340 )
1341
1342 (define_expand "subv<mode>4"
1343 [(match_operand:SIDI 0 "register_operand")
1344 (match_operand:SIDI 1 "register_operand")
1345 (match_operand:SIDI 2 "register_operand")
1346 (match_operand 3 "")]
1347 "TARGET_32BIT"
1348 {
1349 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
1350 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1351
1352 DONE;
1353 })
1354
1355 (define_expand "usubvsi4"
1356 [(match_operand:SI 0 "s_register_operand")
1357 (match_operand:SI 1 "arm_rhs_operand")
1358 (match_operand:SI 2 "arm_add_operand")
1359 (match_operand 3 "")]
1360 "TARGET_32BIT"
1361 {
1362 machine_mode mode = CCmode;
1363 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1364 {
1365 /* If both operands are constants we can decide the result statically. */
1366 wi::overflow_type overflow;
1367 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1368 rtx_mode_t (operands[2], SImode),
1369 UNSIGNED, &overflow);
1370 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1371 if (overflow != wi::OVF_NONE)
1372 emit_jump_insn (gen_jump (operands[3]));
1373 DONE;
1374 }
1375 else if (CONST_INT_P (operands[2]))
1376 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1377 GEN_INT (-INTVAL (operands[2]))));
1378 else if (CONST_INT_P (operands[1]))
1379 {
1380 mode = CC_RSBmode;
1381 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1382 GEN_INT (~UINTVAL (operands[1]))));
1383 }
1384 else
1385 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1386 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1387
1388 DONE;
1389 })
1390
1391 (define_expand "usubvdi4"
1392 [(match_operand:DI 0 "s_register_operand")
1393 (match_operand:DI 1 "reg_or_int_operand")
1394 (match_operand:DI 2 "reg_or_int_operand")
1395 (match_operand 3 "")]
1396 "TARGET_32BIT"
1397 {
1398 rtx lo_result, hi_result;
1399 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1400 lo_result = gen_lowpart (SImode, operands[0]);
1401 hi_result = gen_highpart (SImode, operands[0]);
1402 machine_mode mode = CCmode;
1403
1404 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1405 {
1406 /* If both operands are constants we can decide the result statically. */
1407 wi::overflow_type overflow;
1408 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1409 rtx_mode_t (operands[2], DImode),
1410 UNSIGNED, &overflow);
1411 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1412 if (overflow != wi::OVF_NONE)
1413 emit_jump_insn (gen_jump (operands[3]));
1414 DONE;
1415 }
1416 else if (CONST_INT_P (operands[1]))
1417 {
1418 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1419 &lo_op1, &hi_op1);
1420 if (const_ok_for_arm (INTVAL (lo_op1)))
1421 {
1422 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1423 GEN_INT (~UINTVAL (lo_op1))));
1424 /* We could potentially use RSC here in Arm state, but not
1425 in Thumb, so it's probably not worth the effort of handling
1426 this. */
1427 hi_op1 = force_reg (SImode, hi_op1);
1428 mode = CC_RSBmode;
1429 goto highpart;
1430 }
1431 operands[1] = force_reg (DImode, operands[1]);
1432 }
1433
1434 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1435 &lo_op2, &hi_op2);
1436 if (lo_op2 == const0_rtx)
1437 {
1438 emit_move_insn (lo_result, lo_op1);
1439 if (!arm_add_operand (hi_op2, SImode))
1440 hi_op2 = force_reg (SImode, hi_op2);
1441 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1442 DONE;
1443 }
1444
1445 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1446 lo_op2 = force_reg (SImode, lo_op2);
1447 if (CONST_INT_P (lo_op2))
1448 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1449 GEN_INT (-INTVAL (lo_op2))));
1450 else
1451 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1452
1453 highpart:
1454 if (!arm_not_operand (hi_op2, SImode))
1455 hi_op2 = force_reg (SImode, hi_op2);
1456 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1457 if (CONST_INT_P (hi_op2))
1458 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1459 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1460 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1461 gen_rtx_LTU (DImode, ccreg,
1462 const0_rtx)));
1463 else
1464 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1465 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1466 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1467 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1468
1469 DONE;
1470 })
1471
1472 (define_insn "subdi3_compare1"
1473 [(set (reg:CC CC_REGNUM)
1474 (compare:CC
1475 (match_operand:DI 1 "s_register_operand" "r")
1476 (match_operand:DI 2 "s_register_operand" "r")))
1477 (set (match_operand:DI 0 "s_register_operand" "=&r")
1478 (minus:DI (match_dup 1) (match_dup 2)))]
1479 "TARGET_32BIT"
1480 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
1481 [(set_attr "conds" "set")
1482 (set_attr "length" "8")
1483 (set_attr "type" "multiple")]
1484 )
1485
1486 (define_insn "subsi3_compare1"
1487 [(set (reg:CC CC_REGNUM)
1488 (compare:CC
1489 (match_operand:SI 1 "register_operand" "r")
1490 (match_operand:SI 2 "register_operand" "r")))
1491 (set (match_operand:SI 0 "register_operand" "=r")
1492 (minus:SI (match_dup 1) (match_dup 2)))]
1493 "TARGET_32BIT"
1494 "subs%?\\t%0, %1, %2"
1495 [(set_attr "conds" "set")
1496 (set_attr "type" "alus_sreg")]
1497 )
1498
1499 (define_insn "subsi3_carryin"
1500 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1501 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1502 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1503 (match_operand:SI 3 "arm_borrow_operation" "")))]
1504 "TARGET_32BIT"
1505 "@
1506 sbc%?\\t%0, %1, %2
1507 rsc%?\\t%0, %2, %1
1508 sbc%?\\t%0, %2, %2, lsl #1"
1509 [(set_attr "conds" "use")
1510 (set_attr "arch" "*,a,t2")
1511 (set_attr "predicable" "yes")
1512 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1513 )
1514
1515 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1516 [(set (reg:<CC_EXTEND> CC_REGNUM)
1517 (compare:<CC_EXTEND>
1518 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1519 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1520 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1521 (clobber (match_scratch:SI 0 "=l,r"))]
1522 "TARGET_32BIT"
1523 "sbcs\\t%0, %1, %2"
1524 [(set_attr "conds" "set")
1525 (set_attr "arch" "t2,*")
1526 (set_attr "length" "2,4")
1527 (set_attr "type" "adc_reg")]
1528 )
1529
1530 ;; Similar to the above, but handling a constant which has a different
1531 ;; canonicalization.
1532 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1533 [(set (reg:<CC_EXTEND> CC_REGNUM)
1534 (compare:<CC_EXTEND>
1535 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1536 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1537 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1538 (clobber (match_scratch:SI 0 "=l,r"))]
1539 "TARGET_32BIT"
1540 "@
1541 sbcs\\t%0, %1, %2
1542 adcs\\t%0, %1, #%B2"
1543 [(set_attr "conds" "set")
1544 (set_attr "type" "adc_imm")]
1545 )
1546
1547 ;; Further canonicalization when the constant is zero.
1548 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1549 [(set (reg:<CC_EXTEND> CC_REGNUM)
1550 (compare:<CC_EXTEND>
1551 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1552 (match_operand:DI 2 "arm_borrow_operation" "")))
1553 (clobber (match_scratch:SI 0 "=l,r"))]
1554 "TARGET_32BIT"
1555 "sbcs\\t%0, %1, #0"
1556 [(set_attr "conds" "set")
1557 (set_attr "type" "adc_imm")]
1558 )
1559
1560 (define_insn "*subsi3_carryin_const"
1561 [(set (match_operand:SI 0 "s_register_operand" "=r")
1562 (minus:SI (plus:SI
1563 (match_operand:SI 1 "s_register_operand" "r")
1564 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1565 (match_operand:SI 3 "arm_borrow_operation" "")))]
1566 "TARGET_32BIT"
1567 "sbc\\t%0, %1, #%n2"
1568 [(set_attr "conds" "use")
1569 (set_attr "type" "adc_imm")]
1570 )
1571
1572 (define_insn "*subsi3_carryin_const0"
1573 [(set (match_operand:SI 0 "s_register_operand" "=r")
1574 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1575 (match_operand:SI 2 "arm_borrow_operation" "")))]
1576 "TARGET_32BIT"
1577 "sbc\\t%0, %1, #0"
1578 [(set_attr "conds" "use")
1579 (set_attr "type" "adc_imm")]
1580 )
1581
1582 (define_insn "*subsi3_carryin_shift"
1583 [(set (match_operand:SI 0 "s_register_operand" "=r")
1584 (minus:SI (minus:SI
1585 (match_operand:SI 1 "s_register_operand" "r")
1586 (match_operator:SI 2 "shift_operator"
1587 [(match_operand:SI 3 "s_register_operand" "r")
1588 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1589 (match_operand:SI 5 "arm_borrow_operation" "")))]
1590 "TARGET_32BIT"
1591 "sbc%?\\t%0, %1, %3%S2"
1592 [(set_attr "conds" "use")
1593 (set_attr "predicable" "yes")
1594 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1595 (const_string "alu_shift_imm")
1596 (const_string "alu_shift_reg")))]
1597 )
1598
1599 (define_insn "*subsi3_carryin_shift_alt"
1600 [(set (match_operand:SI 0 "s_register_operand" "=r")
1601 (minus:SI (minus:SI
1602 (match_operand:SI 1 "s_register_operand" "r")
1603 (match_operand:SI 5 "arm_borrow_operation" ""))
1604 (match_operator:SI 2 "shift_operator"
1605 [(match_operand:SI 3 "s_register_operand" "r")
1606 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1607 "TARGET_32BIT"
1608 "sbc%?\\t%0, %1, %3%S2"
1609 [(set_attr "conds" "use")
1610 (set_attr "predicable" "yes")
1611 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1612 (const_string "alu_shift_imm")
1613 (const_string "alu_shift_reg")))]
1614 )
1615
1616 (define_insn "*rsbsi3_carryin_shift"
1617 [(set (match_operand:SI 0 "s_register_operand" "=r")
1618 (minus:SI (minus:SI
1619 (match_operator:SI 2 "shift_operator"
1620 [(match_operand:SI 3 "s_register_operand" "r")
1621 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1622 (match_operand:SI 1 "s_register_operand" "r"))
1623 (match_operand:SI 5 "arm_borrow_operation" "")))]
1624 "TARGET_ARM"
1625 "rsc%?\\t%0, %1, %3%S2"
1626 [(set_attr "conds" "use")
1627 (set_attr "predicable" "yes")
1628 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1629 (const_string "alu_shift_imm")
1630 (const_string "alu_shift_reg")))]
1631 )
1632
1633 (define_insn "*rsbsi3_carryin_shift_alt"
1634 [(set (match_operand:SI 0 "s_register_operand" "=r")
1635 (minus:SI (minus:SI
1636 (match_operator:SI 2 "shift_operator"
1637 [(match_operand:SI 3 "s_register_operand" "r")
1638 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1639 (match_operand:SI 5 "arm_borrow_operation" ""))
1640 (match_operand:SI 1 "s_register_operand" "r")))]
1641 "TARGET_ARM"
1642 "rsc%?\\t%0, %1, %3%S2"
1643 [(set_attr "conds" "use")
1644 (set_attr "predicable" "yes")
1645 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1646 (const_string "alu_shift_imm")
1647 (const_string "alu_shift_reg")))]
1648 )
1649
1650 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1651 (define_split
1652 [(set (match_operand:SI 0 "s_register_operand" "")
1653 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1654 (match_operand:SI 2 "s_register_operand" ""))
1655 (const_int -1)))
1656 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1657 "TARGET_32BIT"
1658 [(set (match_dup 3) (match_dup 1))
1659 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1660 "
1661 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1662 ")
1663
1664 (define_expand "addsf3"
1665 [(set (match_operand:SF 0 "s_register_operand")
1666 (plus:SF (match_operand:SF 1 "s_register_operand")
1667 (match_operand:SF 2 "s_register_operand")))]
1668 "TARGET_32BIT && TARGET_HARD_FLOAT"
1669 "
1670 ")
1671
1672 (define_expand "adddf3"
1673 [(set (match_operand:DF 0 "s_register_operand")
1674 (plus:DF (match_operand:DF 1 "s_register_operand")
1675 (match_operand:DF 2 "s_register_operand")))]
1676 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1677 "
1678 ")
1679
1680 (define_expand "subdi3"
1681 [(parallel
1682 [(set (match_operand:DI 0 "s_register_operand")
1683 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1684 (match_operand:DI 2 "s_register_operand")))
1685 (clobber (reg:CC CC_REGNUM))])]
1686 "TARGET_EITHER"
1687 "
1688 if (TARGET_THUMB1)
1689 {
1690 if (!REG_P (operands[1]))
1691 operands[1] = force_reg (DImode, operands[1]);
1692 }
1693 else
1694 {
1695 rtx lo_result, hi_result, lo_dest, hi_dest;
1696 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1697 rtx condition;
1698
1699 /* Since operands[1] may be an integer, pass it second, so that
1700 any necessary simplifications will be done on the decomposed
1701 constant. */
1702 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1703 &lo_op1, &hi_op1);
1704 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1705 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1706
1707 if (!arm_rhs_operand (lo_op1, SImode))
1708 lo_op1 = force_reg (SImode, lo_op1);
1709
1710 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1711 || !arm_rhs_operand (hi_op1, SImode))
1712 hi_op1 = force_reg (SImode, hi_op1);
1713
1714 rtx cc_reg;
1715 if (lo_op1 == const0_rtx)
1716 {
1717 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1718 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1719 }
1720 else if (CONST_INT_P (lo_op1))
1721 {
1722 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1723 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1724 GEN_INT (~UINTVAL (lo_op1))));
1725 }
1726 else
1727 {
1728 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1729 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1730 }
1731
1732 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1733
1734 if (hi_op1 == const0_rtx)
1735 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1736 else
1737 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1738
1739 if (lo_result != lo_dest)
1740 emit_move_insn (lo_result, lo_dest);
1741
1742 if (hi_result != hi_dest)
1743 emit_move_insn (hi_result, hi_dest);
1744
1745 DONE;
1746 }
1747 "
1748 )
1749
1750 (define_expand "subsi3"
1751 [(set (match_operand:SI 0 "s_register_operand")
1752 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1753 (match_operand:SI 2 "s_register_operand")))]
1754 "TARGET_EITHER"
1755 "
1756 if (CONST_INT_P (operands[1]))
1757 {
1758 if (TARGET_32BIT)
1759 {
1760 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1761 operands[1] = force_reg (SImode, operands[1]);
1762 else
1763 {
1764 arm_split_constant (MINUS, SImode, NULL_RTX,
1765 INTVAL (operands[1]), operands[0],
1766 operands[2],
1767 optimize && can_create_pseudo_p ());
1768 DONE;
1769 }
1770 }
1771 else /* TARGET_THUMB1 */
1772 operands[1] = force_reg (SImode, operands[1]);
1773 }
1774 "
1775 )
1776
1777 ; ??? Check Thumb-2 split length
1778 (define_insn_and_split "*arm_subsi3_insn"
1779 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1780 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1781 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1782 "TARGET_32BIT"
1783 "@
1784 sub%?\\t%0, %1, %2
1785 sub%?\\t%0, %2
1786 sub%?\\t%0, %1, %2
1787 rsb%?\\t%0, %2, %1
1788 rsb%?\\t%0, %2, %1
1789 sub%?\\t%0, %1, %2
1790 sub%?\\t%0, %1, %2
1791 sub%?\\t%0, %1, %2
1792 #"
1793 "&& (CONST_INT_P (operands[1])
1794 && !const_ok_for_arm (INTVAL (operands[1])))"
1795 [(clobber (const_int 0))]
1796 "
1797 arm_split_constant (MINUS, SImode, curr_insn,
1798 INTVAL (operands[1]), operands[0], operands[2], 0);
1799 DONE;
1800 "
1801 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1802 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1803 (set_attr "predicable" "yes")
1804 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1805 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1806 )
1807
1808 (define_peephole2
1809 [(match_scratch:SI 3 "r")
1810 (set (match_operand:SI 0 "arm_general_register_operand" "")
1811 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1812 (match_operand:SI 2 "arm_general_register_operand" "")))]
1813 "TARGET_32BIT
1814 && !const_ok_for_arm (INTVAL (operands[1]))
1815 && const_ok_for_arm (~INTVAL (operands[1]))"
1816 [(set (match_dup 3) (match_dup 1))
1817 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1818 ""
1819 )
1820
1821 (define_insn "subsi3_compare0"
1822 [(set (reg:CC_NOOV CC_REGNUM)
1823 (compare:CC_NOOV
1824 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1825 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1826 (const_int 0)))
1827 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1828 (minus:SI (match_dup 1) (match_dup 2)))]
1829 "TARGET_32BIT"
1830 "@
1831 subs%?\\t%0, %1, %2
1832 subs%?\\t%0, %1, %2
1833 rsbs%?\\t%0, %2, %1"
1834 [(set_attr "conds" "set")
1835 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1836 )
1837
1838 (define_insn "subsi3_compare"
1839 [(set (reg:CC CC_REGNUM)
1840 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1841 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1842 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1843 (minus:SI (match_dup 1) (match_dup 2)))]
1844 "TARGET_32BIT"
1845 "@
1846 subs%?\\t%0, %1, %2
1847 subs%?\\t%0, %1, %2
1848 rsbs%?\\t%0, %2, %1"
1849 [(set_attr "conds" "set")
1850 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1851 )
1852
1853 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1854 ;; rather than (0 cmp reg). This gives the same results for unsigned
1855 ;; and equality compares which is what we mostly need here.
1856 (define_insn "rsb_imm_compare"
1857 [(set (reg:CC_RSB CC_REGNUM)
1858 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1859 (match_operand 3 "const_int_operand" "")))
1860 (set (match_operand:SI 0 "s_register_operand" "=r")
1861 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1862 (match_dup 2)))]
1863 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1864 "rsbs\\t%0, %2, %1"
1865 [(set_attr "conds" "set")
1866 (set_attr "type" "alus_imm")]
1867 )
1868
1869 ;; Similarly, but the result is unused.
1870 (define_insn "rsb_imm_compare_scratch"
1871 [(set (reg:CC_RSB CC_REGNUM)
1872 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1873 (match_operand 1 "arm_not_immediate_operand" "K")))
1874 (clobber (match_scratch:SI 0 "=r"))]
1875 "TARGET_32BIT"
1876 "rsbs\\t%0, %2, #%B1"
1877 [(set_attr "conds" "set")
1878 (set_attr "type" "alus_imm")]
1879 )
1880
1881 ;; Compare the sum of a value plus a carry against a constant. Uses
1882 ;; RSC, so the result is swapped. Only available on Arm
1883 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
1884 [(set (reg:CC_SWP CC_REGNUM)
1885 (compare:CC_SWP
1886 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
1887 (match_operand:DI 3 "arm_borrow_operation" ""))
1888 (match_operand 1 "arm_immediate_operand" "I")))
1889 (clobber (match_scratch:SI 0 "=r"))]
1890 "TARGET_ARM"
1891 "rscs\\t%0, %2, %1"
1892 [(set_attr "conds" "set")
1893 (set_attr "type" "alus_imm")]
1894 )
1895
1896 (define_insn "usubvsi3_borrow"
1897 [(set (reg:CC_B CC_REGNUM)
1898 (compare:CC_B
1899 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1900 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
1901 (zero_extend:DI
1902 (match_operand:SI 2 "s_register_operand" "l,r")))))
1903 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1904 (minus:SI (match_dup 1)
1905 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
1906 (match_dup 2))))]
1907 "TARGET_32BIT"
1908 "sbcs%?\\t%0, %1, %2"
1909 [(set_attr "conds" "set")
1910 (set_attr "arch" "t2,*")
1911 (set_attr "length" "2,4")]
1912 )
1913
1914 (define_insn "usubvsi3_borrow_imm"
1915 [(set (reg:CC_B CC_REGNUM)
1916 (compare:CC_B
1917 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1918 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
1919 (match_operand:DI 3 "const_int_operand" "n,n"))))
1920 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1921 (minus:SI (match_dup 1)
1922 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
1923 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
1924 "TARGET_32BIT
1925 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
1926 "@
1927 sbcs%?\\t%0, %1, %2
1928 adcs%?\\t%0, %1, #%B2"
1929 [(set_attr "conds" "set")
1930 (set_attr "type" "alus_imm")]
1931 )
1932
1933 (define_expand "subsf3"
1934 [(set (match_operand:SF 0 "s_register_operand")
1935 (minus:SF (match_operand:SF 1 "s_register_operand")
1936 (match_operand:SF 2 "s_register_operand")))]
1937 "TARGET_32BIT && TARGET_HARD_FLOAT"
1938 "
1939 ")
1940
1941 (define_expand "subdf3"
1942 [(set (match_operand:DF 0 "s_register_operand")
1943 (minus:DF (match_operand:DF 1 "s_register_operand")
1944 (match_operand:DF 2 "s_register_operand")))]
1945 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1946 "
1947 ")
1948
1949 \f
1950 ;; Multiplication insns
1951
1952 (define_expand "mulhi3"
1953 [(set (match_operand:HI 0 "s_register_operand")
1954 (mult:HI (match_operand:HI 1 "s_register_operand")
1955 (match_operand:HI 2 "s_register_operand")))]
1956 "TARGET_DSP_MULTIPLY"
1957 "
1958 {
1959 rtx result = gen_reg_rtx (SImode);
1960 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1961 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1962 DONE;
1963 }"
1964 )
1965
1966 (define_expand "mulsi3"
1967 [(set (match_operand:SI 0 "s_register_operand")
1968 (mult:SI (match_operand:SI 2 "s_register_operand")
1969 (match_operand:SI 1 "s_register_operand")))]
1970 "TARGET_EITHER"
1971 ""
1972 )
1973
1974 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1975 (define_insn "*mul"
1976 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1977 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1978 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1979 "TARGET_32BIT"
1980 "mul%?\\t%0, %2, %1"
1981 [(set_attr "type" "mul")
1982 (set_attr "predicable" "yes")
1983 (set_attr "arch" "t2,v6,nov6,nov6")
1984 (set_attr "length" "4")
1985 (set_attr "predicable_short_it" "yes,no,*,*")]
1986 )
1987
1988 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1989 ;; reusing the same register.
1990
1991 (define_insn "*mla"
1992 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1993 (plus:SI
1994 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1995 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1996 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1997 "TARGET_32BIT"
1998 "mla%?\\t%0, %3, %2, %1"
1999 [(set_attr "type" "mla")
2000 (set_attr "predicable" "yes")
2001 (set_attr "arch" "v6,nov6,nov6,nov6")]
2002 )
2003
2004 (define_insn "*mls"
2005 [(set (match_operand:SI 0 "s_register_operand" "=r")
2006 (minus:SI
2007 (match_operand:SI 1 "s_register_operand" "r")
2008 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2009 (match_operand:SI 2 "s_register_operand" "r"))))]
2010 "TARGET_32BIT && arm_arch_thumb2"
2011 "mls%?\\t%0, %3, %2, %1"
2012 [(set_attr "type" "mla")
2013 (set_attr "predicable" "yes")]
2014 )
2015
2016 (define_insn "*mulsi3_compare0"
2017 [(set (reg:CC_NOOV CC_REGNUM)
2018 (compare:CC_NOOV (mult:SI
2019 (match_operand:SI 2 "s_register_operand" "r,r")
2020 (match_operand:SI 1 "s_register_operand" "%0,r"))
2021 (const_int 0)))
2022 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2023 (mult:SI (match_dup 2) (match_dup 1)))]
2024 "TARGET_ARM && !arm_arch6"
2025 "muls%?\\t%0, %2, %1"
2026 [(set_attr "conds" "set")
2027 (set_attr "type" "muls")]
2028 )
2029
2030 (define_insn "*mulsi3_compare0_v6"
2031 [(set (reg:CC_NOOV CC_REGNUM)
2032 (compare:CC_NOOV (mult:SI
2033 (match_operand:SI 2 "s_register_operand" "r")
2034 (match_operand:SI 1 "s_register_operand" "r"))
2035 (const_int 0)))
2036 (set (match_operand:SI 0 "s_register_operand" "=r")
2037 (mult:SI (match_dup 2) (match_dup 1)))]
2038 "TARGET_ARM && arm_arch6 && optimize_size"
2039 "muls%?\\t%0, %2, %1"
2040 [(set_attr "conds" "set")
2041 (set_attr "type" "muls")]
2042 )
2043
2044 (define_insn "*mulsi_compare0_scratch"
2045 [(set (reg:CC_NOOV CC_REGNUM)
2046 (compare:CC_NOOV (mult:SI
2047 (match_operand:SI 2 "s_register_operand" "r,r")
2048 (match_operand:SI 1 "s_register_operand" "%0,r"))
2049 (const_int 0)))
2050 (clobber (match_scratch:SI 0 "=&r,&r"))]
2051 "TARGET_ARM && !arm_arch6"
2052 "muls%?\\t%0, %2, %1"
2053 [(set_attr "conds" "set")
2054 (set_attr "type" "muls")]
2055 )
2056
2057 (define_insn "*mulsi_compare0_scratch_v6"
2058 [(set (reg:CC_NOOV CC_REGNUM)
2059 (compare:CC_NOOV (mult:SI
2060 (match_operand:SI 2 "s_register_operand" "r")
2061 (match_operand:SI 1 "s_register_operand" "r"))
2062 (const_int 0)))
2063 (clobber (match_scratch:SI 0 "=r"))]
2064 "TARGET_ARM && arm_arch6 && optimize_size"
2065 "muls%?\\t%0, %2, %1"
2066 [(set_attr "conds" "set")
2067 (set_attr "type" "muls")]
2068 )
2069
2070 (define_insn "*mulsi3addsi_compare0"
2071 [(set (reg:CC_NOOV CC_REGNUM)
2072 (compare:CC_NOOV
2073 (plus:SI (mult:SI
2074 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2075 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2076 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2077 (const_int 0)))
2078 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2079 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2080 (match_dup 3)))]
2081 "TARGET_ARM && arm_arch6"
2082 "mlas%?\\t%0, %2, %1, %3"
2083 [(set_attr "conds" "set")
2084 (set_attr "type" "mlas")]
2085 )
2086
2087 (define_insn "*mulsi3addsi_compare0_v6"
2088 [(set (reg:CC_NOOV CC_REGNUM)
2089 (compare:CC_NOOV
2090 (plus:SI (mult:SI
2091 (match_operand:SI 2 "s_register_operand" "r")
2092 (match_operand:SI 1 "s_register_operand" "r"))
2093 (match_operand:SI 3 "s_register_operand" "r"))
2094 (const_int 0)))
2095 (set (match_operand:SI 0 "s_register_operand" "=r")
2096 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2097 (match_dup 3)))]
2098 "TARGET_ARM && arm_arch6 && optimize_size"
2099 "mlas%?\\t%0, %2, %1, %3"
2100 [(set_attr "conds" "set")
2101 (set_attr "type" "mlas")]
2102 )
2103
2104 (define_insn "*mulsi3addsi_compare0_scratch"
2105 [(set (reg:CC_NOOV CC_REGNUM)
2106 (compare:CC_NOOV
2107 (plus:SI (mult:SI
2108 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2109 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2110 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2111 (const_int 0)))
2112 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2113 "TARGET_ARM && !arm_arch6"
2114 "mlas%?\\t%0, %2, %1, %3"
2115 [(set_attr "conds" "set")
2116 (set_attr "type" "mlas")]
2117 )
2118
2119 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2120 [(set (reg:CC_NOOV CC_REGNUM)
2121 (compare:CC_NOOV
2122 (plus:SI (mult:SI
2123 (match_operand:SI 2 "s_register_operand" "r")
2124 (match_operand:SI 1 "s_register_operand" "r"))
2125 (match_operand:SI 3 "s_register_operand" "r"))
2126 (const_int 0)))
2127 (clobber (match_scratch:SI 0 "=r"))]
2128 "TARGET_ARM && arm_arch6 && optimize_size"
2129 "mlas%?\\t%0, %2, %1, %3"
2130 [(set_attr "conds" "set")
2131 (set_attr "type" "mlas")]
2132 )
2133
2134 ;; 32x32->64 widening multiply.
2135 ;; The only difference between the v3-5 and v6+ versions is the requirement
2136 ;; that the output does not overlap with either input.
2137
2138 (define_expand "<Us>mulsidi3"
2139 [(set (match_operand:DI 0 "s_register_operand")
2140 (mult:DI
2141 (SE:DI (match_operand:SI 1 "s_register_operand"))
2142 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2143 "TARGET_32BIT"
2144 {
2145 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2146 gen_highpart (SImode, operands[0]),
2147 operands[1], operands[2]));
2148 DONE;
2149 }
2150 )
2151
2152 (define_insn "<US>mull"
2153 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2154 (mult:SI
2155 (match_operand:SI 2 "s_register_operand" "%r,r")
2156 (match_operand:SI 3 "s_register_operand" "r,r")))
2157 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2158 (truncate:SI
2159 (lshiftrt:DI
2160 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2161 (const_int 32))))]
2162 "TARGET_32BIT"
2163 "<US>mull%?\\t%0, %1, %2, %3"
2164 [(set_attr "type" "umull")
2165 (set_attr "predicable" "yes")
2166 (set_attr "arch" "v6,nov6")]
2167 )
2168
2169 (define_expand "<Us>maddsidi4"
2170 [(set (match_operand:DI 0 "s_register_operand")
2171 (plus:DI
2172 (mult:DI
2173 (SE:DI (match_operand:SI 1 "s_register_operand"))
2174 (SE:DI (match_operand:SI 2 "s_register_operand")))
2175 (match_operand:DI 3 "s_register_operand")))]
2176 "TARGET_32BIT"
2177 {
2178 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2179 gen_lowpart (SImode, operands[3]),
2180 gen_highpart (SImode, operands[0]),
2181 gen_highpart (SImode, operands[3]),
2182 operands[1], operands[2]));
2183 DONE;
2184 }
2185 )
2186
2187 (define_insn "<US>mlal"
2188 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2189 (plus:SI
2190 (mult:SI
2191 (match_operand:SI 4 "s_register_operand" "%r,r")
2192 (match_operand:SI 5 "s_register_operand" "r,r"))
2193 (match_operand:SI 1 "s_register_operand" "0,0")))
2194 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2195 (plus:SI
2196 (truncate:SI
2197 (lshiftrt:DI
2198 (plus:DI
2199 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2200 (zero_extend:DI (match_dup 1)))
2201 (const_int 32)))
2202 (match_operand:SI 3 "s_register_operand" "2,2")))]
2203 "TARGET_32BIT"
2204 "<US>mlal%?\\t%0, %2, %4, %5"
2205 [(set_attr "type" "umlal")
2206 (set_attr "predicable" "yes")
2207 (set_attr "arch" "v6,nov6")]
2208 )
2209
2210 (define_expand "<US>mulsi3_highpart"
2211 [(parallel
2212 [(set (match_operand:SI 0 "s_register_operand")
2213 (truncate:SI
2214 (lshiftrt:DI
2215 (mult:DI
2216 (SE:DI (match_operand:SI 1 "s_register_operand"))
2217 (SE:DI (match_operand:SI 2 "s_register_operand")))
2218 (const_int 32))))
2219 (clobber (match_scratch:SI 3 ""))])]
2220 "TARGET_32BIT"
2221 ""
2222 )
2223
2224 (define_insn "*<US>mull_high"
2225 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2226 (truncate:SI
2227 (lshiftrt:DI
2228 (mult:DI
2229 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2230 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2231 (const_int 32))))
2232 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2233 "TARGET_32BIT"
2234 "<US>mull%?\\t%3, %0, %2, %1"
2235 [(set_attr "type" "umull")
2236 (set_attr "predicable" "yes")
2237 (set_attr "arch" "v6,nov6,nov6")]
2238 )
2239
2240 (define_insn "mulhisi3"
2241 [(set (match_operand:SI 0 "s_register_operand" "=r")
2242 (mult:SI (sign_extend:SI
2243 (match_operand:HI 1 "s_register_operand" "%r"))
2244 (sign_extend:SI
2245 (match_operand:HI 2 "s_register_operand" "r"))))]
2246 "TARGET_DSP_MULTIPLY"
2247 "smulbb%?\\t%0, %1, %2"
2248 [(set_attr "type" "smulxy")
2249 (set_attr "predicable" "yes")]
2250 )
2251
2252 (define_insn "*mulhisi3tb"
2253 [(set (match_operand:SI 0 "s_register_operand" "=r")
2254 (mult:SI (ashiftrt:SI
2255 (match_operand:SI 1 "s_register_operand" "r")
2256 (const_int 16))
2257 (sign_extend:SI
2258 (match_operand:HI 2 "s_register_operand" "r"))))]
2259 "TARGET_DSP_MULTIPLY"
2260 "smultb%?\\t%0, %1, %2"
2261 [(set_attr "type" "smulxy")
2262 (set_attr "predicable" "yes")]
2263 )
2264
2265 (define_insn "*mulhisi3bt"
2266 [(set (match_operand:SI 0 "s_register_operand" "=r")
2267 (mult:SI (sign_extend:SI
2268 (match_operand:HI 1 "s_register_operand" "r"))
2269 (ashiftrt:SI
2270 (match_operand:SI 2 "s_register_operand" "r")
2271 (const_int 16))))]
2272 "TARGET_DSP_MULTIPLY"
2273 "smulbt%?\\t%0, %1, %2"
2274 [(set_attr "type" "smulxy")
2275 (set_attr "predicable" "yes")]
2276 )
2277
2278 (define_insn "*mulhisi3tt"
2279 [(set (match_operand:SI 0 "s_register_operand" "=r")
2280 (mult:SI (ashiftrt:SI
2281 (match_operand:SI 1 "s_register_operand" "r")
2282 (const_int 16))
2283 (ashiftrt:SI
2284 (match_operand:SI 2 "s_register_operand" "r")
2285 (const_int 16))))]
2286 "TARGET_DSP_MULTIPLY"
2287 "smultt%?\\t%0, %1, %2"
2288 [(set_attr "type" "smulxy")
2289 (set_attr "predicable" "yes")]
2290 )
2291
2292 (define_insn "maddhisi4"
2293 [(set (match_operand:SI 0 "s_register_operand" "=r")
2294 (plus:SI (mult:SI (sign_extend:SI
2295 (match_operand:HI 1 "s_register_operand" "r"))
2296 (sign_extend:SI
2297 (match_operand:HI 2 "s_register_operand" "r")))
2298 (match_operand:SI 3 "s_register_operand" "r")))]
2299 "TARGET_DSP_MULTIPLY"
2300 "smlabb%?\\t%0, %1, %2, %3"
2301 [(set_attr "type" "smlaxy")
2302 (set_attr "predicable" "yes")]
2303 )
2304
2305 ;; Note: there is no maddhisi4ibt because this one is canonical form
2306 (define_insn "*maddhisi4tb"
2307 [(set (match_operand:SI 0 "s_register_operand" "=r")
2308 (plus:SI (mult:SI (ashiftrt:SI
2309 (match_operand:SI 1 "s_register_operand" "r")
2310 (const_int 16))
2311 (sign_extend:SI
2312 (match_operand:HI 2 "s_register_operand" "r")))
2313 (match_operand:SI 3 "s_register_operand" "r")))]
2314 "TARGET_DSP_MULTIPLY"
2315 "smlatb%?\\t%0, %1, %2, %3"
2316 [(set_attr "type" "smlaxy")
2317 (set_attr "predicable" "yes")]
2318 )
2319
2320 (define_insn "*maddhisi4tt"
2321 [(set (match_operand:SI 0 "s_register_operand" "=r")
2322 (plus:SI (mult:SI (ashiftrt:SI
2323 (match_operand:SI 1 "s_register_operand" "r")
2324 (const_int 16))
2325 (ashiftrt:SI
2326 (match_operand:SI 2 "s_register_operand" "r")
2327 (const_int 16)))
2328 (match_operand:SI 3 "s_register_operand" "r")))]
2329 "TARGET_DSP_MULTIPLY"
2330 "smlatt%?\\t%0, %1, %2, %3"
2331 [(set_attr "type" "smlaxy")
2332 (set_attr "predicable" "yes")]
2333 )
2334
2335 (define_insn "maddhidi4"
2336 [(set (match_operand:DI 0 "s_register_operand" "=r")
2337 (plus:DI
2338 (mult:DI (sign_extend:DI
2339 (match_operand:HI 1 "s_register_operand" "r"))
2340 (sign_extend:DI
2341 (match_operand:HI 2 "s_register_operand" "r")))
2342 (match_operand:DI 3 "s_register_operand" "0")))]
2343 "TARGET_DSP_MULTIPLY"
2344 "smlalbb%?\\t%Q0, %R0, %1, %2"
2345 [(set_attr "type" "smlalxy")
2346 (set_attr "predicable" "yes")])
2347
2348 ;; Note: there is no maddhidi4ibt because this one is canonical form
2349 (define_insn "*maddhidi4tb"
2350 [(set (match_operand:DI 0 "s_register_operand" "=r")
2351 (plus:DI
2352 (mult:DI (sign_extend:DI
2353 (ashiftrt:SI
2354 (match_operand:SI 1 "s_register_operand" "r")
2355 (const_int 16)))
2356 (sign_extend:DI
2357 (match_operand:HI 2 "s_register_operand" "r")))
2358 (match_operand:DI 3 "s_register_operand" "0")))]
2359 "TARGET_DSP_MULTIPLY"
2360 "smlaltb%?\\t%Q0, %R0, %1, %2"
2361 [(set_attr "type" "smlalxy")
2362 (set_attr "predicable" "yes")])
2363
2364 (define_insn "*maddhidi4tt"
2365 [(set (match_operand:DI 0 "s_register_operand" "=r")
2366 (plus:DI
2367 (mult:DI (sign_extend:DI
2368 (ashiftrt:SI
2369 (match_operand:SI 1 "s_register_operand" "r")
2370 (const_int 16)))
2371 (sign_extend:DI
2372 (ashiftrt:SI
2373 (match_operand:SI 2 "s_register_operand" "r")
2374 (const_int 16))))
2375 (match_operand:DI 3 "s_register_operand" "0")))]
2376 "TARGET_DSP_MULTIPLY"
2377 "smlaltt%?\\t%Q0, %R0, %1, %2"
2378 [(set_attr "type" "smlalxy")
2379 (set_attr "predicable" "yes")])
2380
2381 (define_expand "mulsf3"
2382 [(set (match_operand:SF 0 "s_register_operand")
2383 (mult:SF (match_operand:SF 1 "s_register_operand")
2384 (match_operand:SF 2 "s_register_operand")))]
2385 "TARGET_32BIT && TARGET_HARD_FLOAT"
2386 "
2387 ")
2388
2389 (define_expand "muldf3"
2390 [(set (match_operand:DF 0 "s_register_operand")
2391 (mult:DF (match_operand:DF 1 "s_register_operand")
2392 (match_operand:DF 2 "s_register_operand")))]
2393 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2394 "
2395 ")
2396 \f
2397 ;; Division insns
2398
2399 (define_expand "divsf3"
2400 [(set (match_operand:SF 0 "s_register_operand")
2401 (div:SF (match_operand:SF 1 "s_register_operand")
2402 (match_operand:SF 2 "s_register_operand")))]
2403 "TARGET_32BIT && TARGET_HARD_FLOAT"
2404 "")
2405
2406 (define_expand "divdf3"
2407 [(set (match_operand:DF 0 "s_register_operand")
2408 (div:DF (match_operand:DF 1 "s_register_operand")
2409 (match_operand:DF 2 "s_register_operand")))]
2410 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2411 "")
2412 \f
2413
2414 ; Expand logical operations. The mid-end expander does not split off memory
2415 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2416 ; So an explicit expander is needed to generate better code.
2417
2418 (define_expand "<LOGICAL:optab>di3"
2419 [(set (match_operand:DI 0 "s_register_operand")
2420 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2421 (match_operand:DI 2 "arm_<optab>di_operand")))]
2422 "TARGET_32BIT"
2423 {
2424 rtx low = simplify_gen_binary (<CODE>, SImode,
2425 gen_lowpart (SImode, operands[1]),
2426 gen_lowpart (SImode, operands[2]));
2427 rtx high = simplify_gen_binary (<CODE>, SImode,
2428 gen_highpart (SImode, operands[1]),
2429 gen_highpart_mode (SImode, DImode,
2430 operands[2]));
2431
2432 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2433 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2434 DONE;
2435 }
2436 )
2437
2438 (define_expand "one_cmpldi2"
2439 [(set (match_operand:DI 0 "s_register_operand")
2440 (not:DI (match_operand:DI 1 "s_register_operand")))]
2441 "TARGET_32BIT"
2442 {
2443 rtx low = simplify_gen_unary (NOT, SImode,
2444 gen_lowpart (SImode, operands[1]),
2445 SImode);
2446 rtx high = simplify_gen_unary (NOT, SImode,
2447 gen_highpart_mode (SImode, DImode,
2448 operands[1]),
2449 SImode);
2450
2451 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2452 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2453 DONE;
2454 }
2455 )
2456
2457 ;; Split DImode and, ior, xor operations. Simply perform the logical
2458 ;; operation on the upper and lower halves of the registers.
2459 ;; This is needed for atomic operations in arm_split_atomic_op.
2460 ;; Avoid splitting IWMMXT instructions.
2461 (define_split
2462 [(set (match_operand:DI 0 "s_register_operand" "")
2463 (match_operator:DI 6 "logical_binary_operator"
2464 [(match_operand:DI 1 "s_register_operand" "")
2465 (match_operand:DI 2 "s_register_operand" "")]))]
2466 "TARGET_32BIT && reload_completed
2467 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2468 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2469 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2470 "
2471 {
2472 operands[3] = gen_highpart (SImode, operands[0]);
2473 operands[0] = gen_lowpart (SImode, operands[0]);
2474 operands[4] = gen_highpart (SImode, operands[1]);
2475 operands[1] = gen_lowpart (SImode, operands[1]);
2476 operands[5] = gen_highpart (SImode, operands[2]);
2477 operands[2] = gen_lowpart (SImode, operands[2]);
2478 }"
2479 )
2480
2481 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2482 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2483 (define_split
2484 [(set (match_operand:DI 0 "s_register_operand")
2485 (not:DI (match_operand:DI 1 "s_register_operand")))]
2486 "TARGET_32BIT"
2487 [(set (match_dup 0) (not:SI (match_dup 1)))
2488 (set (match_dup 2) (not:SI (match_dup 3)))]
2489 "
2490 {
2491 operands[2] = gen_highpart (SImode, operands[0]);
2492 operands[0] = gen_lowpart (SImode, operands[0]);
2493 operands[3] = gen_highpart (SImode, operands[1]);
2494 operands[1] = gen_lowpart (SImode, operands[1]);
2495 }"
2496 )
2497
2498 (define_expand "andsi3"
2499 [(set (match_operand:SI 0 "s_register_operand")
2500 (and:SI (match_operand:SI 1 "s_register_operand")
2501 (match_operand:SI 2 "reg_or_int_operand")))]
2502 "TARGET_EITHER"
2503 "
2504 if (TARGET_32BIT)
2505 {
2506 if (CONST_INT_P (operands[2]))
2507 {
2508 if (INTVAL (operands[2]) == 255 && arm_arch6)
2509 {
2510 operands[1] = convert_to_mode (QImode, operands[1], 1);
2511 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2512 operands[1]));
2513 DONE;
2514 }
2515 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2516 operands[2] = force_reg (SImode, operands[2]);
2517 else
2518 {
2519 arm_split_constant (AND, SImode, NULL_RTX,
2520 INTVAL (operands[2]), operands[0],
2521 operands[1],
2522 optimize && can_create_pseudo_p ());
2523
2524 DONE;
2525 }
2526 }
2527 }
2528 else /* TARGET_THUMB1 */
2529 {
2530 if (!CONST_INT_P (operands[2]))
2531 {
2532 rtx tmp = force_reg (SImode, operands[2]);
2533 if (rtx_equal_p (operands[0], operands[1]))
2534 operands[2] = tmp;
2535 else
2536 {
2537 operands[2] = operands[1];
2538 operands[1] = tmp;
2539 }
2540 }
2541 else
2542 {
2543 int i;
2544
2545 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2546 {
2547 operands[2] = force_reg (SImode,
2548 GEN_INT (~INTVAL (operands[2])));
2549
2550 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2551
2552 DONE;
2553 }
2554
2555 for (i = 9; i <= 31; i++)
2556 {
2557 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2558 {
2559 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2560 const0_rtx));
2561 DONE;
2562 }
2563 else if ((HOST_WIDE_INT_1 << i) - 1
2564 == ~INTVAL (operands[2]))
2565 {
2566 rtx shift = GEN_INT (i);
2567 rtx reg = gen_reg_rtx (SImode);
2568
2569 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2570 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2571
2572 DONE;
2573 }
2574 }
2575
2576 operands[2] = force_reg (SImode, operands[2]);
2577 }
2578 }
2579 "
2580 )
2581
2582 ; ??? Check split length for Thumb-2
2583 (define_insn_and_split "*arm_andsi3_insn"
2584 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2585 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2586 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2587 "TARGET_32BIT"
2588 "@
2589 and%?\\t%0, %1, %2
2590 and%?\\t%0, %1, %2
2591 bic%?\\t%0, %1, #%B2
2592 and%?\\t%0, %1, %2
2593 #"
2594 "TARGET_32BIT
2595 && CONST_INT_P (operands[2])
2596 && !(const_ok_for_arm (INTVAL (operands[2]))
2597 || const_ok_for_arm (~INTVAL (operands[2])))"
2598 [(clobber (const_int 0))]
2599 "
2600 arm_split_constant (AND, SImode, curr_insn,
2601 INTVAL (operands[2]), operands[0], operands[1], 0);
2602 DONE;
2603 "
2604 [(set_attr "length" "4,4,4,4,16")
2605 (set_attr "predicable" "yes")
2606 (set_attr "predicable_short_it" "no,yes,no,no,no")
2607 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2608 )
2609
2610 (define_insn "*andsi3_compare0"
2611 [(set (reg:CC_NOOV CC_REGNUM)
2612 (compare:CC_NOOV
2613 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2614 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2615 (const_int 0)))
2616 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2617 (and:SI (match_dup 1) (match_dup 2)))]
2618 "TARGET_32BIT"
2619 "@
2620 ands%?\\t%0, %1, %2
2621 bics%?\\t%0, %1, #%B2
2622 ands%?\\t%0, %1, %2"
2623 [(set_attr "conds" "set")
2624 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2625 )
2626
2627 (define_insn "*andsi3_compare0_scratch"
2628 [(set (reg:CC_NOOV CC_REGNUM)
2629 (compare:CC_NOOV
2630 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2631 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2632 (const_int 0)))
2633 (clobber (match_scratch:SI 2 "=X,r,X"))]
2634 "TARGET_32BIT"
2635 "@
2636 tst%?\\t%0, %1
2637 bics%?\\t%2, %0, #%B1
2638 tst%?\\t%0, %1"
2639 [(set_attr "conds" "set")
2640 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2641 )
2642
2643 (define_insn "*zeroextractsi_compare0_scratch"
2644 [(set (reg:CC_NOOV CC_REGNUM)
2645 (compare:CC_NOOV (zero_extract:SI
2646 (match_operand:SI 0 "s_register_operand" "r")
2647 (match_operand 1 "const_int_operand" "n")
2648 (match_operand 2 "const_int_operand" "n"))
2649 (const_int 0)))]
2650 "TARGET_32BIT
2651 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2652 && INTVAL (operands[1]) > 0
2653 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2654 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2655 "*
2656 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2657 << INTVAL (operands[2]));
2658 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2659 return \"\";
2660 "
2661 [(set_attr "conds" "set")
2662 (set_attr "predicable" "yes")
2663 (set_attr "type" "logics_imm")]
2664 )
2665
2666 (define_insn_and_split "*ne_zeroextractsi"
2667 [(set (match_operand:SI 0 "s_register_operand" "=r")
2668 (ne:SI (zero_extract:SI
2669 (match_operand:SI 1 "s_register_operand" "r")
2670 (match_operand:SI 2 "const_int_operand" "n")
2671 (match_operand:SI 3 "const_int_operand" "n"))
2672 (const_int 0)))
2673 (clobber (reg:CC CC_REGNUM))]
2674 "TARGET_32BIT
2675 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2676 && INTVAL (operands[2]) > 0
2677 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2678 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2679 "#"
2680 "TARGET_32BIT
2681 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2682 && INTVAL (operands[2]) > 0
2683 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2684 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2685 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2686 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2687 (const_int 0)))
2688 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2689 (set (match_dup 0)
2690 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2691 (match_dup 0) (const_int 1)))]
2692 "
2693 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2694 << INTVAL (operands[3]));
2695 "
2696 [(set_attr "conds" "clob")
2697 (set (attr "length")
2698 (if_then_else (eq_attr "is_thumb" "yes")
2699 (const_int 12)
2700 (const_int 8)))
2701 (set_attr "type" "multiple")]
2702 )
2703
2704 (define_insn_and_split "*ne_zeroextractsi_shifted"
2705 [(set (match_operand:SI 0 "s_register_operand" "=r")
2706 (ne:SI (zero_extract:SI
2707 (match_operand:SI 1 "s_register_operand" "r")
2708 (match_operand:SI 2 "const_int_operand" "n")
2709 (const_int 0))
2710 (const_int 0)))
2711 (clobber (reg:CC CC_REGNUM))]
2712 "TARGET_ARM"
2713 "#"
2714 "TARGET_ARM"
2715 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2716 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2717 (const_int 0)))
2718 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2719 (set (match_dup 0)
2720 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2721 (match_dup 0) (const_int 1)))]
2722 "
2723 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2724 "
2725 [(set_attr "conds" "clob")
2726 (set_attr "length" "8")
2727 (set_attr "type" "multiple")]
2728 )
2729
2730 (define_insn_and_split "*ite_ne_zeroextractsi"
2731 [(set (match_operand:SI 0 "s_register_operand" "=r")
2732 (if_then_else:SI (ne (zero_extract:SI
2733 (match_operand:SI 1 "s_register_operand" "r")
2734 (match_operand:SI 2 "const_int_operand" "n")
2735 (match_operand:SI 3 "const_int_operand" "n"))
2736 (const_int 0))
2737 (match_operand:SI 4 "arm_not_operand" "rIK")
2738 (const_int 0)))
2739 (clobber (reg:CC CC_REGNUM))]
2740 "TARGET_ARM
2741 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2742 && INTVAL (operands[2]) > 0
2743 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2744 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2745 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2746 "#"
2747 "TARGET_ARM
2748 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2749 && INTVAL (operands[2]) > 0
2750 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2751 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2752 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2753 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2754 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2755 (const_int 0)))
2756 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2757 (set (match_dup 0)
2758 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2759 (match_dup 0) (match_dup 4)))]
2760 "
2761 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2762 << INTVAL (operands[3]));
2763 "
2764 [(set_attr "conds" "clob")
2765 (set_attr "length" "8")
2766 (set_attr "type" "multiple")]
2767 )
2768
2769 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2770 [(set (match_operand:SI 0 "s_register_operand" "=r")
2771 (if_then_else:SI (ne (zero_extract:SI
2772 (match_operand:SI 1 "s_register_operand" "r")
2773 (match_operand:SI 2 "const_int_operand" "n")
2774 (const_int 0))
2775 (const_int 0))
2776 (match_operand:SI 3 "arm_not_operand" "rIK")
2777 (const_int 0)))
2778 (clobber (reg:CC CC_REGNUM))]
2779 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2780 "#"
2781 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2782 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2783 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2784 (const_int 0)))
2785 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2786 (set (match_dup 0)
2787 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2788 (match_dup 0) (match_dup 3)))]
2789 "
2790 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2791 "
2792 [(set_attr "conds" "clob")
2793 (set_attr "length" "8")
2794 (set_attr "type" "multiple")]
2795 )
2796
2797 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2798 (define_split
2799 [(set (match_operand:SI 0 "s_register_operand" "")
2800 (match_operator:SI 1 "shiftable_operator"
2801 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2802 (match_operand:SI 3 "const_int_operand" "")
2803 (match_operand:SI 4 "const_int_operand" ""))
2804 (match_operand:SI 5 "s_register_operand" "")]))
2805 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2806 "TARGET_ARM"
2807 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2808 (set (match_dup 0)
2809 (match_op_dup 1
2810 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2811 (match_dup 5)]))]
2812 "{
2813 HOST_WIDE_INT temp = INTVAL (operands[3]);
2814
2815 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2816 operands[4] = GEN_INT (32 - temp);
2817 }"
2818 )
2819
2820 (define_split
2821 [(set (match_operand:SI 0 "s_register_operand" "")
2822 (match_operator:SI 1 "shiftable_operator"
2823 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2824 (match_operand:SI 3 "const_int_operand" "")
2825 (match_operand:SI 4 "const_int_operand" ""))
2826 (match_operand:SI 5 "s_register_operand" "")]))
2827 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2828 "TARGET_ARM"
2829 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2830 (set (match_dup 0)
2831 (match_op_dup 1
2832 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2833 (match_dup 5)]))]
2834 "{
2835 HOST_WIDE_INT temp = INTVAL (operands[3]);
2836
2837 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2838 operands[4] = GEN_INT (32 - temp);
2839 }"
2840 )
2841
2842 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2843 ;;; represented by the bitfield, then this will produce incorrect results.
2844 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2845 ;;; which have a real bit-field insert instruction, the truncation happens
2846 ;;; in the bit-field insert instruction itself. Since arm does not have a
2847 ;;; bit-field insert instruction, we would have to emit code here to truncate
2848 ;;; the value before we insert. This loses some of the advantage of having
2849 ;;; this insv pattern, so this pattern needs to be reevalutated.
2850
2851 (define_expand "insv"
2852 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2853 (match_operand 1 "general_operand")
2854 (match_operand 2 "general_operand"))
2855 (match_operand 3 "reg_or_int_operand"))]
2856 "TARGET_ARM || arm_arch_thumb2"
2857 "
2858 {
2859 int start_bit = INTVAL (operands[2]);
2860 int width = INTVAL (operands[1]);
2861 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2862 rtx target, subtarget;
2863
2864 if (arm_arch_thumb2)
2865 {
2866 if (unaligned_access && MEM_P (operands[0])
2867 && s_register_operand (operands[3], GET_MODE (operands[3]))
2868 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2869 {
2870 rtx base_addr;
2871
2872 if (BYTES_BIG_ENDIAN)
2873 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2874 - start_bit;
2875
2876 if (width == 32)
2877 {
2878 base_addr = adjust_address (operands[0], SImode,
2879 start_bit / BITS_PER_UNIT);
2880 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2881 }
2882 else
2883 {
2884 rtx tmp = gen_reg_rtx (HImode);
2885
2886 base_addr = adjust_address (operands[0], HImode,
2887 start_bit / BITS_PER_UNIT);
2888 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2889 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2890 }
2891 DONE;
2892 }
2893 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2894 {
2895 bool use_bfi = TRUE;
2896
2897 if (CONST_INT_P (operands[3]))
2898 {
2899 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2900
2901 if (val == 0)
2902 {
2903 emit_insn (gen_insv_zero (operands[0], operands[1],
2904 operands[2]));
2905 DONE;
2906 }
2907
2908 /* See if the set can be done with a single orr instruction. */
2909 if (val == mask && const_ok_for_arm (val << start_bit))
2910 use_bfi = FALSE;
2911 }
2912
2913 if (use_bfi)
2914 {
2915 if (!REG_P (operands[3]))
2916 operands[3] = force_reg (SImode, operands[3]);
2917
2918 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2919 operands[3]));
2920 DONE;
2921 }
2922 }
2923 else
2924 FAIL;
2925 }
2926
2927 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2928 FAIL;
2929
2930 target = copy_rtx (operands[0]);
2931 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2932 subreg as the final target. */
2933 if (GET_CODE (target) == SUBREG)
2934 {
2935 subtarget = gen_reg_rtx (SImode);
2936 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2937 < GET_MODE_SIZE (SImode))
2938 target = SUBREG_REG (target);
2939 }
2940 else
2941 subtarget = target;
2942
2943 if (CONST_INT_P (operands[3]))
2944 {
2945 /* Since we are inserting a known constant, we may be able to
2946 reduce the number of bits that we have to clear so that
2947 the mask becomes simple. */
2948 /* ??? This code does not check to see if the new mask is actually
2949 simpler. It may not be. */
2950 rtx op1 = gen_reg_rtx (SImode);
2951 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2952 start of this pattern. */
2953 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2954 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2955
2956 emit_insn (gen_andsi3 (op1, operands[0],
2957 gen_int_mode (~mask2, SImode)));
2958 emit_insn (gen_iorsi3 (subtarget, op1,
2959 gen_int_mode (op3_value << start_bit, SImode)));
2960 }
2961 else if (start_bit == 0
2962 && !(const_ok_for_arm (mask)
2963 || const_ok_for_arm (~mask)))
2964 {
2965 /* A Trick, since we are setting the bottom bits in the word,
2966 we can shift operand[3] up, operand[0] down, OR them together
2967 and rotate the result back again. This takes 3 insns, and
2968 the third might be mergeable into another op. */
2969 /* The shift up copes with the possibility that operand[3] is
2970 wider than the bitfield. */
2971 rtx op0 = gen_reg_rtx (SImode);
2972 rtx op1 = gen_reg_rtx (SImode);
2973
2974 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2975 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2976 emit_insn (gen_iorsi3 (op1, op1, op0));
2977 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2978 }
2979 else if ((width + start_bit == 32)
2980 && !(const_ok_for_arm (mask)
2981 || const_ok_for_arm (~mask)))
2982 {
2983 /* Similar trick, but slightly less efficient. */
2984
2985 rtx op0 = gen_reg_rtx (SImode);
2986 rtx op1 = gen_reg_rtx (SImode);
2987
2988 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2989 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2990 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2991 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2992 }
2993 else
2994 {
2995 rtx op0 = gen_int_mode (mask, SImode);
2996 rtx op1 = gen_reg_rtx (SImode);
2997 rtx op2 = gen_reg_rtx (SImode);
2998
2999 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3000 {
3001 rtx tmp = gen_reg_rtx (SImode);
3002
3003 emit_insn (gen_movsi (tmp, op0));
3004 op0 = tmp;
3005 }
3006
3007 /* Mask out any bits in operand[3] that are not needed. */
3008 emit_insn (gen_andsi3 (op1, operands[3], op0));
3009
3010 if (CONST_INT_P (op0)
3011 && (const_ok_for_arm (mask << start_bit)
3012 || const_ok_for_arm (~(mask << start_bit))))
3013 {
3014 op0 = gen_int_mode (~(mask << start_bit), SImode);
3015 emit_insn (gen_andsi3 (op2, operands[0], op0));
3016 }
3017 else
3018 {
3019 if (CONST_INT_P (op0))
3020 {
3021 rtx tmp = gen_reg_rtx (SImode);
3022
3023 emit_insn (gen_movsi (tmp, op0));
3024 op0 = tmp;
3025 }
3026
3027 if (start_bit != 0)
3028 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3029
3030 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3031 }
3032
3033 if (start_bit != 0)
3034 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3035
3036 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3037 }
3038
3039 if (subtarget != target)
3040 {
3041 /* If TARGET is still a SUBREG, then it must be wider than a word,
3042 so we must be careful only to set the subword we were asked to. */
3043 if (GET_CODE (target) == SUBREG)
3044 emit_move_insn (target, subtarget);
3045 else
3046 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3047 }
3048
3049 DONE;
3050 }"
3051 )
3052
3053 (define_insn "insv_zero"
3054 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3055 (match_operand:SI 1 "const_int_M_operand" "M")
3056 (match_operand:SI 2 "const_int_M_operand" "M"))
3057 (const_int 0))]
3058 "arm_arch_thumb2"
3059 "bfc%?\t%0, %2, %1"
3060 [(set_attr "length" "4")
3061 (set_attr "predicable" "yes")
3062 (set_attr "type" "bfm")]
3063 )
3064
3065 (define_insn "insv_t2"
3066 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3067 (match_operand:SI 1 "const_int_M_operand" "M")
3068 (match_operand:SI 2 "const_int_M_operand" "M"))
3069 (match_operand:SI 3 "s_register_operand" "r"))]
3070 "arm_arch_thumb2"
3071 "bfi%?\t%0, %3, %2, %1"
3072 [(set_attr "length" "4")
3073 (set_attr "predicable" "yes")
3074 (set_attr "type" "bfm")]
3075 )
3076
3077 (define_insn "andsi_notsi_si"
3078 [(set (match_operand:SI 0 "s_register_operand" "=r")
3079 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3080 (match_operand:SI 1 "s_register_operand" "r")))]
3081 "TARGET_32BIT"
3082 "bic%?\\t%0, %1, %2"
3083 [(set_attr "predicable" "yes")
3084 (set_attr "type" "logic_reg")]
3085 )
3086
3087 (define_insn "andsi_not_shiftsi_si"
3088 [(set (match_operand:SI 0 "s_register_operand" "=r")
3089 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3090 [(match_operand:SI 2 "s_register_operand" "r")
3091 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
3092 (match_operand:SI 1 "s_register_operand" "r")))]
3093 "TARGET_ARM"
3094 "bic%?\\t%0, %1, %2%S4"
3095 [(set_attr "predicable" "yes")
3096 (set_attr "shift" "2")
3097 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
3098 (const_string "logic_shift_imm")
3099 (const_string "logic_shift_reg")))]
3100 )
3101
3102 ;; Shifted bics pattern used to set up CC status register and not reusing
3103 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3104 ;; does not support shift by register.
3105 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3106 [(set (reg:CC_NOOV CC_REGNUM)
3107 (compare:CC_NOOV
3108 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3109 [(match_operand:SI 1 "s_register_operand" "r")
3110 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3111 (match_operand:SI 3 "s_register_operand" "r"))
3112 (const_int 0)))
3113 (clobber (match_scratch:SI 4 "=r"))]
3114 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
3115 "bics%?\\t%4, %3, %1%S0"
3116 [(set_attr "predicable" "yes")
3117 (set_attr "conds" "set")
3118 (set_attr "shift" "1")
3119 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3120 (const_string "logic_shift_imm")
3121 (const_string "logic_shift_reg")))]
3122 )
3123
3124 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3125 ;; getting reused later.
3126 (define_insn "andsi_not_shiftsi_si_scc"
3127 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3128 (compare:CC_NOOV
3129 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3130 [(match_operand:SI 1 "s_register_operand" "r")
3131 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3132 (match_operand:SI 3 "s_register_operand" "r"))
3133 (const_int 0)))
3134 (set (match_operand:SI 4 "s_register_operand" "=r")
3135 (and:SI (not:SI (match_op_dup 0
3136 [(match_dup 1)
3137 (match_dup 2)]))
3138 (match_dup 3)))])]
3139 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
3140 "bics%?\\t%4, %3, %1%S0"
3141 [(set_attr "predicable" "yes")
3142 (set_attr "conds" "set")
3143 (set_attr "shift" "1")
3144 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3145 (const_string "logic_shift_imm")
3146 (const_string "logic_shift_reg")))]
3147 )
3148
3149 (define_insn "*andsi_notsi_si_compare0"
3150 [(set (reg:CC_NOOV CC_REGNUM)
3151 (compare:CC_NOOV
3152 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3153 (match_operand:SI 1 "s_register_operand" "r"))
3154 (const_int 0)))
3155 (set (match_operand:SI 0 "s_register_operand" "=r")
3156 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3157 "TARGET_32BIT"
3158 "bics\\t%0, %1, %2"
3159 [(set_attr "conds" "set")
3160 (set_attr "type" "logics_shift_reg")]
3161 )
3162
3163 (define_insn "*andsi_notsi_si_compare0_scratch"
3164 [(set (reg:CC_NOOV CC_REGNUM)
3165 (compare:CC_NOOV
3166 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3167 (match_operand:SI 1 "s_register_operand" "r"))
3168 (const_int 0)))
3169 (clobber (match_scratch:SI 0 "=r"))]
3170 "TARGET_32BIT"
3171 "bics\\t%0, %1, %2"
3172 [(set_attr "conds" "set")
3173 (set_attr "type" "logics_shift_reg")]
3174 )
3175
3176 (define_expand "iorsi3"
3177 [(set (match_operand:SI 0 "s_register_operand")
3178 (ior:SI (match_operand:SI 1 "s_register_operand")
3179 (match_operand:SI 2 "reg_or_int_operand")))]
3180 "TARGET_EITHER"
3181 "
3182 if (CONST_INT_P (operands[2]))
3183 {
3184 if (TARGET_32BIT)
3185 {
3186 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3187 operands[2] = force_reg (SImode, operands[2]);
3188 else
3189 {
3190 arm_split_constant (IOR, SImode, NULL_RTX,
3191 INTVAL (operands[2]), operands[0],
3192 operands[1],
3193 optimize && can_create_pseudo_p ());
3194 DONE;
3195 }
3196 }
3197 else /* TARGET_THUMB1 */
3198 {
3199 rtx tmp = force_reg (SImode, operands[2]);
3200 if (rtx_equal_p (operands[0], operands[1]))
3201 operands[2] = tmp;
3202 else
3203 {
3204 operands[2] = operands[1];
3205 operands[1] = tmp;
3206 }
3207 }
3208 }
3209 "
3210 )
3211
3212 (define_insn_and_split "*iorsi3_insn"
3213 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3214 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3215 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3216 "TARGET_32BIT"
3217 "@
3218 orr%?\\t%0, %1, %2
3219 orr%?\\t%0, %1, %2
3220 orn%?\\t%0, %1, #%B2
3221 orr%?\\t%0, %1, %2
3222 #"
3223 "TARGET_32BIT
3224 && CONST_INT_P (operands[2])
3225 && !(const_ok_for_arm (INTVAL (operands[2]))
3226 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3227 [(clobber (const_int 0))]
3228 {
3229 arm_split_constant (IOR, SImode, curr_insn,
3230 INTVAL (operands[2]), operands[0], operands[1], 0);
3231 DONE;
3232 }
3233 [(set_attr "length" "4,4,4,4,16")
3234 (set_attr "arch" "32,t2,t2,32,32")
3235 (set_attr "predicable" "yes")
3236 (set_attr "predicable_short_it" "no,yes,no,no,no")
3237 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3238 )
3239
3240 (define_peephole2
3241 [(match_scratch:SI 3 "r")
3242 (set (match_operand:SI 0 "arm_general_register_operand" "")
3243 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3244 (match_operand:SI 2 "const_int_operand" "")))]
3245 "TARGET_ARM
3246 && !const_ok_for_arm (INTVAL (operands[2]))
3247 && const_ok_for_arm (~INTVAL (operands[2]))"
3248 [(set (match_dup 3) (match_dup 2))
3249 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3250 ""
3251 )
3252
3253 (define_insn "*iorsi3_compare0"
3254 [(set (reg:CC_NOOV CC_REGNUM)
3255 (compare:CC_NOOV
3256 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3257 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3258 (const_int 0)))
3259 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3260 (ior:SI (match_dup 1) (match_dup 2)))]
3261 "TARGET_32BIT"
3262 "orrs%?\\t%0, %1, %2"
3263 [(set_attr "conds" "set")
3264 (set_attr "arch" "*,t2,*")
3265 (set_attr "length" "4,2,4")
3266 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3267 )
3268
3269 (define_insn "*iorsi3_compare0_scratch"
3270 [(set (reg:CC_NOOV CC_REGNUM)
3271 (compare:CC_NOOV
3272 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3273 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3274 (const_int 0)))
3275 (clobber (match_scratch:SI 0 "=r,l,r"))]
3276 "TARGET_32BIT"
3277 "orrs%?\\t%0, %1, %2"
3278 [(set_attr "conds" "set")
3279 (set_attr "arch" "*,t2,*")
3280 (set_attr "length" "4,2,4")
3281 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3282 )
3283
3284 (define_expand "xorsi3"
3285 [(set (match_operand:SI 0 "s_register_operand")
3286 (xor:SI (match_operand:SI 1 "s_register_operand")
3287 (match_operand:SI 2 "reg_or_int_operand")))]
3288 "TARGET_EITHER"
3289 "if (CONST_INT_P (operands[2]))
3290 {
3291 if (TARGET_32BIT)
3292 {
3293 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3294 operands[2] = force_reg (SImode, operands[2]);
3295 else
3296 {
3297 arm_split_constant (XOR, SImode, NULL_RTX,
3298 INTVAL (operands[2]), operands[0],
3299 operands[1],
3300 optimize && can_create_pseudo_p ());
3301 DONE;
3302 }
3303 }
3304 else /* TARGET_THUMB1 */
3305 {
3306 rtx tmp = force_reg (SImode, operands[2]);
3307 if (rtx_equal_p (operands[0], operands[1]))
3308 operands[2] = tmp;
3309 else
3310 {
3311 operands[2] = operands[1];
3312 operands[1] = tmp;
3313 }
3314 }
3315 }"
3316 )
3317
3318 (define_insn_and_split "*arm_xorsi3"
3319 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3320 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3321 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3322 "TARGET_32BIT"
3323 "@
3324 eor%?\\t%0, %1, %2
3325 eor%?\\t%0, %1, %2
3326 eor%?\\t%0, %1, %2
3327 #"
3328 "TARGET_32BIT
3329 && CONST_INT_P (operands[2])
3330 && !const_ok_for_arm (INTVAL (operands[2]))"
3331 [(clobber (const_int 0))]
3332 {
3333 arm_split_constant (XOR, SImode, curr_insn,
3334 INTVAL (operands[2]), operands[0], operands[1], 0);
3335 DONE;
3336 }
3337 [(set_attr "length" "4,4,4,16")
3338 (set_attr "predicable" "yes")
3339 (set_attr "predicable_short_it" "no,yes,no,no")
3340 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3341 )
3342
3343 (define_insn "*xorsi3_compare0"
3344 [(set (reg:CC_NOOV CC_REGNUM)
3345 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3346 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3347 (const_int 0)))
3348 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3349 (xor:SI (match_dup 1) (match_dup 2)))]
3350 "TARGET_32BIT"
3351 "eors%?\\t%0, %1, %2"
3352 [(set_attr "conds" "set")
3353 (set_attr "type" "logics_imm,logics_reg")]
3354 )
3355
3356 (define_insn "*xorsi3_compare0_scratch"
3357 [(set (reg:CC_NOOV CC_REGNUM)
3358 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3359 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3360 (const_int 0)))]
3361 "TARGET_32BIT"
3362 "teq%?\\t%0, %1"
3363 [(set_attr "conds" "set")
3364 (set_attr "type" "logics_imm,logics_reg")]
3365 )
3366
3367 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3368 ; (NOT D) we can sometimes merge the final NOT into one of the following
3369 ; insns.
3370
3371 (define_split
3372 [(set (match_operand:SI 0 "s_register_operand" "")
3373 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3374 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3375 (match_operand:SI 3 "arm_rhs_operand" "")))
3376 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3377 "TARGET_32BIT"
3378 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3379 (not:SI (match_dup 3))))
3380 (set (match_dup 0) (not:SI (match_dup 4)))]
3381 ""
3382 )
3383
3384 (define_insn_and_split "*andsi_iorsi3_notsi"
3385 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3386 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3387 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3388 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3389 "TARGET_32BIT"
3390 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3391 "&& reload_completed"
3392 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3393 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3394 {
3395 /* If operands[3] is a constant make sure to fold the NOT into it
3396 to avoid creating a NOT of a CONST_INT. */
3397 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3398 if (CONST_INT_P (not_rtx))
3399 {
3400 operands[4] = operands[0];
3401 operands[5] = not_rtx;
3402 }
3403 else
3404 {
3405 operands[5] = operands[0];
3406 operands[4] = not_rtx;
3407 }
3408 }
3409 [(set_attr "length" "8")
3410 (set_attr "ce_count" "2")
3411 (set_attr "predicable" "yes")
3412 (set_attr "type" "multiple")]
3413 )
3414
3415 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3416 ; insns are available?
3417 (define_split
3418 [(set (match_operand:SI 0 "s_register_operand" "")
3419 (match_operator:SI 1 "logical_binary_operator"
3420 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3421 (match_operand:SI 3 "const_int_operand" "")
3422 (match_operand:SI 4 "const_int_operand" ""))
3423 (match_operator:SI 9 "logical_binary_operator"
3424 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3425 (match_operand:SI 6 "const_int_operand" ""))
3426 (match_operand:SI 7 "s_register_operand" "")])]))
3427 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3428 "TARGET_32BIT
3429 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3430 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3431 [(set (match_dup 8)
3432 (match_op_dup 1
3433 [(ashift:SI (match_dup 2) (match_dup 4))
3434 (match_dup 5)]))
3435 (set (match_dup 0)
3436 (match_op_dup 1
3437 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3438 (match_dup 7)]))]
3439 "
3440 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3441 ")
3442
3443 (define_split
3444 [(set (match_operand:SI 0 "s_register_operand" "")
3445 (match_operator:SI 1 "logical_binary_operator"
3446 [(match_operator:SI 9 "logical_binary_operator"
3447 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3448 (match_operand:SI 6 "const_int_operand" ""))
3449 (match_operand:SI 7 "s_register_operand" "")])
3450 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3451 (match_operand:SI 3 "const_int_operand" "")
3452 (match_operand:SI 4 "const_int_operand" ""))]))
3453 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3454 "TARGET_32BIT
3455 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3456 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3457 [(set (match_dup 8)
3458 (match_op_dup 1
3459 [(ashift:SI (match_dup 2) (match_dup 4))
3460 (match_dup 5)]))
3461 (set (match_dup 0)
3462 (match_op_dup 1
3463 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3464 (match_dup 7)]))]
3465 "
3466 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3467 ")
3468
3469 (define_split
3470 [(set (match_operand:SI 0 "s_register_operand" "")
3471 (match_operator:SI 1 "logical_binary_operator"
3472 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3473 (match_operand:SI 3 "const_int_operand" "")
3474 (match_operand:SI 4 "const_int_operand" ""))
3475 (match_operator:SI 9 "logical_binary_operator"
3476 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3477 (match_operand:SI 6 "const_int_operand" ""))
3478 (match_operand:SI 7 "s_register_operand" "")])]))
3479 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3480 "TARGET_32BIT
3481 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3482 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3483 [(set (match_dup 8)
3484 (match_op_dup 1
3485 [(ashift:SI (match_dup 2) (match_dup 4))
3486 (match_dup 5)]))
3487 (set (match_dup 0)
3488 (match_op_dup 1
3489 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3490 (match_dup 7)]))]
3491 "
3492 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3493 ")
3494
3495 (define_split
3496 [(set (match_operand:SI 0 "s_register_operand" "")
3497 (match_operator:SI 1 "logical_binary_operator"
3498 [(match_operator:SI 9 "logical_binary_operator"
3499 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3500 (match_operand:SI 6 "const_int_operand" ""))
3501 (match_operand:SI 7 "s_register_operand" "")])
3502 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3503 (match_operand:SI 3 "const_int_operand" "")
3504 (match_operand:SI 4 "const_int_operand" ""))]))
3505 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3506 "TARGET_32BIT
3507 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3508 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3509 [(set (match_dup 8)
3510 (match_op_dup 1
3511 [(ashift:SI (match_dup 2) (match_dup 4))
3512 (match_dup 5)]))
3513 (set (match_dup 0)
3514 (match_op_dup 1
3515 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3516 (match_dup 7)]))]
3517 "
3518 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3519 ")
3520 \f
3521
3522 ;; Minimum and maximum insns
3523
3524 (define_expand "smaxsi3"
3525 [(parallel [
3526 (set (match_operand:SI 0 "s_register_operand")
3527 (smax:SI (match_operand:SI 1 "s_register_operand")
3528 (match_operand:SI 2 "arm_rhs_operand")))
3529 (clobber (reg:CC CC_REGNUM))])]
3530 "TARGET_32BIT"
3531 "
3532 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3533 {
3534 /* No need for a clobber of the condition code register here. */
3535 emit_insn (gen_rtx_SET (operands[0],
3536 gen_rtx_SMAX (SImode, operands[1],
3537 operands[2])));
3538 DONE;
3539 }
3540 ")
3541
3542 (define_insn "*smax_0"
3543 [(set (match_operand:SI 0 "s_register_operand" "=r")
3544 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3545 (const_int 0)))]
3546 "TARGET_32BIT"
3547 "bic%?\\t%0, %1, %1, asr #31"
3548 [(set_attr "predicable" "yes")
3549 (set_attr "type" "logic_shift_reg")]
3550 )
3551
3552 (define_insn "*smax_m1"
3553 [(set (match_operand:SI 0 "s_register_operand" "=r")
3554 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3555 (const_int -1)))]
3556 "TARGET_32BIT"
3557 "orr%?\\t%0, %1, %1, asr #31"
3558 [(set_attr "predicable" "yes")
3559 (set_attr "type" "logic_shift_reg")]
3560 )
3561
3562 (define_insn_and_split "*arm_smax_insn"
3563 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3564 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3565 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3566 (clobber (reg:CC CC_REGNUM))]
3567 "TARGET_ARM"
3568 "#"
3569 ; cmp\\t%1, %2\;movlt\\t%0, %2
3570 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3571 "TARGET_ARM"
3572 [(set (reg:CC CC_REGNUM)
3573 (compare:CC (match_dup 1) (match_dup 2)))
3574 (set (match_dup 0)
3575 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3576 (match_dup 1)
3577 (match_dup 2)))]
3578 ""
3579 [(set_attr "conds" "clob")
3580 (set_attr "length" "8,12")
3581 (set_attr "type" "multiple")]
3582 )
3583
3584 (define_expand "sminsi3"
3585 [(parallel [
3586 (set (match_operand:SI 0 "s_register_operand")
3587 (smin:SI (match_operand:SI 1 "s_register_operand")
3588 (match_operand:SI 2 "arm_rhs_operand")))
3589 (clobber (reg:CC CC_REGNUM))])]
3590 "TARGET_32BIT"
3591 "
3592 if (operands[2] == const0_rtx)
3593 {
3594 /* No need for a clobber of the condition code register here. */
3595 emit_insn (gen_rtx_SET (operands[0],
3596 gen_rtx_SMIN (SImode, operands[1],
3597 operands[2])));
3598 DONE;
3599 }
3600 ")
3601
3602 (define_insn "*smin_0"
3603 [(set (match_operand:SI 0 "s_register_operand" "=r")
3604 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3605 (const_int 0)))]
3606 "TARGET_32BIT"
3607 "and%?\\t%0, %1, %1, asr #31"
3608 [(set_attr "predicable" "yes")
3609 (set_attr "type" "logic_shift_reg")]
3610 )
3611
3612 (define_insn_and_split "*arm_smin_insn"
3613 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3614 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3615 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3616 (clobber (reg:CC CC_REGNUM))]
3617 "TARGET_ARM"
3618 "#"
3619 ; cmp\\t%1, %2\;movge\\t%0, %2
3620 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3621 "TARGET_ARM"
3622 [(set (reg:CC CC_REGNUM)
3623 (compare:CC (match_dup 1) (match_dup 2)))
3624 (set (match_dup 0)
3625 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3626 (match_dup 1)
3627 (match_dup 2)))]
3628 ""
3629 [(set_attr "conds" "clob")
3630 (set_attr "length" "8,12")
3631 (set_attr "type" "multiple,multiple")]
3632 )
3633
3634 (define_expand "umaxsi3"
3635 [(parallel [
3636 (set (match_operand:SI 0 "s_register_operand")
3637 (umax:SI (match_operand:SI 1 "s_register_operand")
3638 (match_operand:SI 2 "arm_rhs_operand")))
3639 (clobber (reg:CC CC_REGNUM))])]
3640 "TARGET_32BIT"
3641 ""
3642 )
3643
3644 (define_insn_and_split "*arm_umaxsi3"
3645 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3646 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3647 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3648 (clobber (reg:CC CC_REGNUM))]
3649 "TARGET_ARM"
3650 "#"
3651 ; cmp\\t%1, %2\;movcc\\t%0, %2
3652 ; cmp\\t%1, %2\;movcs\\t%0, %1
3653 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3654 "TARGET_ARM"
3655 [(set (reg:CC CC_REGNUM)
3656 (compare:CC (match_dup 1) (match_dup 2)))
3657 (set (match_dup 0)
3658 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3659 (match_dup 1)
3660 (match_dup 2)))]
3661 ""
3662 [(set_attr "conds" "clob")
3663 (set_attr "length" "8,8,12")
3664 (set_attr "type" "store_4")]
3665 )
3666
3667 (define_expand "uminsi3"
3668 [(parallel [
3669 (set (match_operand:SI 0 "s_register_operand")
3670 (umin:SI (match_operand:SI 1 "s_register_operand")
3671 (match_operand:SI 2 "arm_rhs_operand")))
3672 (clobber (reg:CC CC_REGNUM))])]
3673 "TARGET_32BIT"
3674 ""
3675 )
3676
3677 (define_insn_and_split "*arm_uminsi3"
3678 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3679 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3680 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3681 (clobber (reg:CC CC_REGNUM))]
3682 "TARGET_ARM"
3683 "#"
3684 ; cmp\\t%1, %2\;movcs\\t%0, %2
3685 ; cmp\\t%1, %2\;movcc\\t%0, %1
3686 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3687 "TARGET_ARM"
3688 [(set (reg:CC CC_REGNUM)
3689 (compare:CC (match_dup 1) (match_dup 2)))
3690 (set (match_dup 0)
3691 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3692 (match_dup 1)
3693 (match_dup 2)))]
3694 ""
3695 [(set_attr "conds" "clob")
3696 (set_attr "length" "8,8,12")
3697 (set_attr "type" "store_4")]
3698 )
3699
3700 (define_insn "*store_minmaxsi"
3701 [(set (match_operand:SI 0 "memory_operand" "=m")
3702 (match_operator:SI 3 "minmax_operator"
3703 [(match_operand:SI 1 "s_register_operand" "r")
3704 (match_operand:SI 2 "s_register_operand" "r")]))
3705 (clobber (reg:CC CC_REGNUM))]
3706 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3707 "*
3708 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3709 operands[1], operands[2]);
3710 output_asm_insn (\"cmp\\t%1, %2\", operands);
3711 if (TARGET_THUMB2)
3712 output_asm_insn (\"ite\t%d3\", operands);
3713 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3714 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3715 return \"\";
3716 "
3717 [(set_attr "conds" "clob")
3718 (set (attr "length")
3719 (if_then_else (eq_attr "is_thumb" "yes")
3720 (const_int 14)
3721 (const_int 12)))
3722 (set_attr "type" "store_4")]
3723 )
3724
3725 ; Reject the frame pointer in operand[1], since reloading this after
3726 ; it has been eliminated can cause carnage.
3727 (define_insn "*minmax_arithsi"
3728 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3729 (match_operator:SI 4 "shiftable_operator"
3730 [(match_operator:SI 5 "minmax_operator"
3731 [(match_operand:SI 2 "s_register_operand" "r,r")
3732 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3733 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3734 (clobber (reg:CC CC_REGNUM))]
3735 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3736 "*
3737 {
3738 enum rtx_code code = GET_CODE (operands[4]);
3739 bool need_else;
3740
3741 if (which_alternative != 0 || operands[3] != const0_rtx
3742 || (code != PLUS && code != IOR && code != XOR))
3743 need_else = true;
3744 else
3745 need_else = false;
3746
3747 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3748 operands[2], operands[3]);
3749 output_asm_insn (\"cmp\\t%2, %3\", operands);
3750 if (TARGET_THUMB2)
3751 {
3752 if (need_else)
3753 output_asm_insn (\"ite\\t%d5\", operands);
3754 else
3755 output_asm_insn (\"it\\t%d5\", operands);
3756 }
3757 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3758 if (need_else)
3759 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3760 return \"\";
3761 }"
3762 [(set_attr "conds" "clob")
3763 (set (attr "length")
3764 (if_then_else (eq_attr "is_thumb" "yes")
3765 (const_int 14)
3766 (const_int 12)))
3767 (set_attr "type" "multiple")]
3768 )
3769
3770 ; Reject the frame pointer in operand[1], since reloading this after
3771 ; it has been eliminated can cause carnage.
3772 (define_insn_and_split "*minmax_arithsi_non_canon"
3773 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3774 (minus:SI
3775 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3776 (match_operator:SI 4 "minmax_operator"
3777 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3778 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3779 (clobber (reg:CC CC_REGNUM))]
3780 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3781 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3782 "#"
3783 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3784 [(set (reg:CC CC_REGNUM)
3785 (compare:CC (match_dup 2) (match_dup 3)))
3786
3787 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3788 (set (match_dup 0)
3789 (minus:SI (match_dup 1)
3790 (match_dup 2))))
3791 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3792 (set (match_dup 0)
3793 (match_dup 6)))]
3794 {
3795 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3796 operands[2], operands[3]);
3797 enum rtx_code rc = minmax_code (operands[4]);
3798 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3799 operands[2], operands[3]);
3800
3801 if (mode == CCFPmode || mode == CCFPEmode)
3802 rc = reverse_condition_maybe_unordered (rc);
3803 else
3804 rc = reverse_condition (rc);
3805 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3806 if (CONST_INT_P (operands[3]))
3807 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3808 else
3809 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3810 }
3811 [(set_attr "conds" "clob")
3812 (set (attr "length")
3813 (if_then_else (eq_attr "is_thumb" "yes")
3814 (const_int 14)
3815 (const_int 12)))
3816 (set_attr "type" "multiple")]
3817 )
3818
3819 (define_code_iterator SAT [smin smax])
3820 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3821 (define_code_attr SATlo [(smin "1") (smax "2")])
3822 (define_code_attr SAThi [(smin "2") (smax "1")])
3823
3824 (define_insn "*satsi_<SAT:code>"
3825 [(set (match_operand:SI 0 "s_register_operand" "=r")
3826 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3827 (match_operand:SI 1 "const_int_operand" "i"))
3828 (match_operand:SI 2 "const_int_operand" "i")))]
3829 "TARGET_32BIT && arm_arch6
3830 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3831 {
3832 int mask;
3833 bool signed_sat;
3834 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3835 &mask, &signed_sat))
3836 gcc_unreachable ();
3837
3838 operands[1] = GEN_INT (mask);
3839 if (signed_sat)
3840 return "ssat%?\t%0, %1, %3";
3841 else
3842 return "usat%?\t%0, %1, %3";
3843 }
3844 [(set_attr "predicable" "yes")
3845 (set_attr "type" "alus_imm")]
3846 )
3847
3848 (define_insn "*satsi_<SAT:code>_shift"
3849 [(set (match_operand:SI 0 "s_register_operand" "=r")
3850 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3851 [(match_operand:SI 4 "s_register_operand" "r")
3852 (match_operand:SI 5 "const_int_operand" "i")])
3853 (match_operand:SI 1 "const_int_operand" "i"))
3854 (match_operand:SI 2 "const_int_operand" "i")))]
3855 "TARGET_32BIT && arm_arch6
3856 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3857 {
3858 int mask;
3859 bool signed_sat;
3860 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3861 &mask, &signed_sat))
3862 gcc_unreachable ();
3863
3864 operands[1] = GEN_INT (mask);
3865 if (signed_sat)
3866 return "ssat%?\t%0, %1, %4%S3";
3867 else
3868 return "usat%?\t%0, %1, %4%S3";
3869 }
3870 [(set_attr "predicable" "yes")
3871 (set_attr "shift" "3")
3872 (set_attr "type" "logic_shift_reg")])
3873 \f
3874 ;; Shift and rotation insns
3875
3876 (define_expand "ashldi3"
3877 [(set (match_operand:DI 0 "s_register_operand")
3878 (ashift:DI (match_operand:DI 1 "s_register_operand")
3879 (match_operand:SI 2 "reg_or_int_operand")))]
3880 "TARGET_32BIT"
3881 "
3882 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3883 operands[2], gen_reg_rtx (SImode),
3884 gen_reg_rtx (SImode));
3885 DONE;
3886 ")
3887
3888 (define_expand "ashlsi3"
3889 [(set (match_operand:SI 0 "s_register_operand")
3890 (ashift:SI (match_operand:SI 1 "s_register_operand")
3891 (match_operand:SI 2 "arm_rhs_operand")))]
3892 "TARGET_EITHER"
3893 "
3894 if (CONST_INT_P (operands[2])
3895 && (UINTVAL (operands[2])) > 31)
3896 {
3897 emit_insn (gen_movsi (operands[0], const0_rtx));
3898 DONE;
3899 }
3900 "
3901 )
3902
3903 (define_expand "ashrdi3"
3904 [(set (match_operand:DI 0 "s_register_operand")
3905 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3906 (match_operand:SI 2 "reg_or_int_operand")))]
3907 "TARGET_32BIT"
3908 "
3909 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3910 operands[2], gen_reg_rtx (SImode),
3911 gen_reg_rtx (SImode));
3912 DONE;
3913 ")
3914
3915 (define_expand "ashrsi3"
3916 [(set (match_operand:SI 0 "s_register_operand")
3917 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3918 (match_operand:SI 2 "arm_rhs_operand")))]
3919 "TARGET_EITHER"
3920 "
3921 if (CONST_INT_P (operands[2])
3922 && UINTVAL (operands[2]) > 31)
3923 operands[2] = GEN_INT (31);
3924 "
3925 )
3926
3927 (define_expand "lshrdi3"
3928 [(set (match_operand:DI 0 "s_register_operand")
3929 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3930 (match_operand:SI 2 "reg_or_int_operand")))]
3931 "TARGET_32BIT"
3932 "
3933 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3934 operands[2], gen_reg_rtx (SImode),
3935 gen_reg_rtx (SImode));
3936 DONE;
3937 ")
3938
3939 (define_expand "lshrsi3"
3940 [(set (match_operand:SI 0 "s_register_operand")
3941 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3942 (match_operand:SI 2 "arm_rhs_operand")))]
3943 "TARGET_EITHER"
3944 "
3945 if (CONST_INT_P (operands[2])
3946 && (UINTVAL (operands[2])) > 31)
3947 {
3948 emit_insn (gen_movsi (operands[0], const0_rtx));
3949 DONE;
3950 }
3951 "
3952 )
3953
3954 (define_expand "rotlsi3"
3955 [(set (match_operand:SI 0 "s_register_operand")
3956 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3957 (match_operand:SI 2 "reg_or_int_operand")))]
3958 "TARGET_32BIT"
3959 "
3960 if (CONST_INT_P (operands[2]))
3961 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3962 else
3963 {
3964 rtx reg = gen_reg_rtx (SImode);
3965 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3966 operands[2] = reg;
3967 }
3968 "
3969 )
3970
3971 (define_expand "rotrsi3"
3972 [(set (match_operand:SI 0 "s_register_operand")
3973 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3974 (match_operand:SI 2 "arm_rhs_operand")))]
3975 "TARGET_EITHER"
3976 "
3977 if (TARGET_32BIT)
3978 {
3979 if (CONST_INT_P (operands[2])
3980 && UINTVAL (operands[2]) > 31)
3981 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3982 }
3983 else /* TARGET_THUMB1 */
3984 {
3985 if (CONST_INT_P (operands [2]))
3986 operands [2] = force_reg (SImode, operands[2]);
3987 }
3988 "
3989 )
3990
3991 (define_insn "*arm_shiftsi3"
3992 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3993 (match_operator:SI 3 "shift_operator"
3994 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3995 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3996 "TARGET_32BIT"
3997 "* return arm_output_shift(operands, 0);"
3998 [(set_attr "predicable" "yes")
3999 (set_attr "arch" "t2,t2,*,*")
4000 (set_attr "predicable_short_it" "yes,yes,no,no")
4001 (set_attr "length" "4")
4002 (set_attr "shift" "1")
4003 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
4004 )
4005
4006 (define_insn "*shiftsi3_compare0"
4007 [(set (reg:CC_NOOV CC_REGNUM)
4008 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4009 [(match_operand:SI 1 "s_register_operand" "r,r")
4010 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4011 (const_int 0)))
4012 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4013 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4014 "TARGET_32BIT"
4015 "* return arm_output_shift(operands, 1);"
4016 [(set_attr "conds" "set")
4017 (set_attr "shift" "1")
4018 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4019 )
4020
4021 (define_insn "*shiftsi3_compare0_scratch"
4022 [(set (reg:CC_NOOV CC_REGNUM)
4023 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4024 [(match_operand:SI 1 "s_register_operand" "r,r")
4025 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4026 (const_int 0)))
4027 (clobber (match_scratch:SI 0 "=r,r"))]
4028 "TARGET_32BIT"
4029 "* return arm_output_shift(operands, 1);"
4030 [(set_attr "conds" "set")
4031 (set_attr "shift" "1")
4032 (set_attr "type" "shift_imm,shift_reg")]
4033 )
4034
4035 (define_insn "*not_shiftsi"
4036 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4037 (not:SI (match_operator:SI 3 "shift_operator"
4038 [(match_operand:SI 1 "s_register_operand" "r,r")
4039 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4040 "TARGET_32BIT"
4041 "mvn%?\\t%0, %1%S3"
4042 [(set_attr "predicable" "yes")
4043 (set_attr "shift" "1")
4044 (set_attr "arch" "32,a")
4045 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4046
4047 (define_insn "*not_shiftsi_compare0"
4048 [(set (reg:CC_NOOV CC_REGNUM)
4049 (compare:CC_NOOV
4050 (not:SI (match_operator:SI 3 "shift_operator"
4051 [(match_operand:SI 1 "s_register_operand" "r,r")
4052 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4053 (const_int 0)))
4054 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4055 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4056 "TARGET_32BIT"
4057 "mvns%?\\t%0, %1%S3"
4058 [(set_attr "conds" "set")
4059 (set_attr "shift" "1")
4060 (set_attr "arch" "32,a")
4061 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4062
4063 (define_insn "*not_shiftsi_compare0_scratch"
4064 [(set (reg:CC_NOOV CC_REGNUM)
4065 (compare:CC_NOOV
4066 (not:SI (match_operator:SI 3 "shift_operator"
4067 [(match_operand:SI 1 "s_register_operand" "r,r")
4068 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4069 (const_int 0)))
4070 (clobber (match_scratch:SI 0 "=r,r"))]
4071 "TARGET_32BIT"
4072 "mvns%?\\t%0, %1%S3"
4073 [(set_attr "conds" "set")
4074 (set_attr "shift" "1")
4075 (set_attr "arch" "32,a")
4076 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4077
4078 ;; We don't really have extzv, but defining this using shifts helps
4079 ;; to reduce register pressure later on.
4080
4081 (define_expand "extzv"
4082 [(set (match_operand 0 "s_register_operand")
4083 (zero_extract (match_operand 1 "nonimmediate_operand")
4084 (match_operand 2 "const_int_operand")
4085 (match_operand 3 "const_int_operand")))]
4086 "TARGET_THUMB1 || arm_arch_thumb2"
4087 "
4088 {
4089 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4090 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4091
4092 if (arm_arch_thumb2)
4093 {
4094 HOST_WIDE_INT width = INTVAL (operands[2]);
4095 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4096
4097 if (unaligned_access && MEM_P (operands[1])
4098 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4099 {
4100 rtx base_addr;
4101
4102 if (BYTES_BIG_ENDIAN)
4103 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4104 - bitpos;
4105
4106 if (width == 32)
4107 {
4108 base_addr = adjust_address (operands[1], SImode,
4109 bitpos / BITS_PER_UNIT);
4110 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4111 }
4112 else
4113 {
4114 rtx dest = operands[0];
4115 rtx tmp = gen_reg_rtx (SImode);
4116
4117 /* We may get a paradoxical subreg here. Strip it off. */
4118 if (GET_CODE (dest) == SUBREG
4119 && GET_MODE (dest) == SImode
4120 && GET_MODE (SUBREG_REG (dest)) == HImode)
4121 dest = SUBREG_REG (dest);
4122
4123 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4124 FAIL;
4125
4126 base_addr = adjust_address (operands[1], HImode,
4127 bitpos / BITS_PER_UNIT);
4128 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4129 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4130 }
4131 DONE;
4132 }
4133 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4134 {
4135 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4136 operands[3]));
4137 DONE;
4138 }
4139 else
4140 FAIL;
4141 }
4142
4143 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4144 FAIL;
4145
4146 operands[3] = GEN_INT (rshift);
4147
4148 if (lshift == 0)
4149 {
4150 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4151 DONE;
4152 }
4153
4154 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4155 operands[3], gen_reg_rtx (SImode)));
4156 DONE;
4157 }"
4158 )
4159
4160 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4161
4162 (define_expand "extzv_t1"
4163 [(set (match_operand:SI 4 "s_register_operand")
4164 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4165 (match_operand:SI 2 "const_int_operand")))
4166 (set (match_operand:SI 0 "s_register_operand")
4167 (lshiftrt:SI (match_dup 4)
4168 (match_operand:SI 3 "const_int_operand")))]
4169 "TARGET_THUMB1"
4170 "")
4171
4172 (define_expand "extv"
4173 [(set (match_operand 0 "s_register_operand")
4174 (sign_extract (match_operand 1 "nonimmediate_operand")
4175 (match_operand 2 "const_int_operand")
4176 (match_operand 3 "const_int_operand")))]
4177 "arm_arch_thumb2"
4178 {
4179 HOST_WIDE_INT width = INTVAL (operands[2]);
4180 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4181
4182 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4183 && (bitpos % BITS_PER_UNIT) == 0)
4184 {
4185 rtx base_addr;
4186
4187 if (BYTES_BIG_ENDIAN)
4188 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4189
4190 if (width == 32)
4191 {
4192 base_addr = adjust_address (operands[1], SImode,
4193 bitpos / BITS_PER_UNIT);
4194 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4195 }
4196 else
4197 {
4198 rtx dest = operands[0];
4199 rtx tmp = gen_reg_rtx (SImode);
4200
4201 /* We may get a paradoxical subreg here. Strip it off. */
4202 if (GET_CODE (dest) == SUBREG
4203 && GET_MODE (dest) == SImode
4204 && GET_MODE (SUBREG_REG (dest)) == HImode)
4205 dest = SUBREG_REG (dest);
4206
4207 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4208 FAIL;
4209
4210 base_addr = adjust_address (operands[1], HImode,
4211 bitpos / BITS_PER_UNIT);
4212 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4213 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4214 }
4215
4216 DONE;
4217 }
4218 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4219 FAIL;
4220 else if (GET_MODE (operands[0]) == SImode
4221 && GET_MODE (operands[1]) == SImode)
4222 {
4223 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4224 operands[3]));
4225 DONE;
4226 }
4227
4228 FAIL;
4229 })
4230
4231 ; Helper to expand register forms of extv with the proper modes.
4232
4233 (define_expand "extv_regsi"
4234 [(set (match_operand:SI 0 "s_register_operand")
4235 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4236 (match_operand 2 "const_int_operand")
4237 (match_operand 3 "const_int_operand")))]
4238 ""
4239 {
4240 })
4241
4242 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4243
4244 (define_insn "unaligned_loaddi"
4245 [(set (match_operand:DI 0 "s_register_operand" "=r")
4246 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4247 UNSPEC_UNALIGNED_LOAD))]
4248 "TARGET_32BIT && TARGET_LDRD"
4249 "*
4250 return output_move_double (operands, true, NULL);
4251 "
4252 [(set_attr "length" "8")
4253 (set_attr "type" "load_8")])
4254
4255 (define_insn "unaligned_loadsi"
4256 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4257 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4258 UNSPEC_UNALIGNED_LOAD))]
4259 "unaligned_access"
4260 "@
4261 ldr\t%0, %1\t@ unaligned
4262 ldr%?\t%0, %1\t@ unaligned
4263 ldr%?\t%0, %1\t@ unaligned"
4264 [(set_attr "arch" "t1,t2,32")
4265 (set_attr "length" "2,2,4")
4266 (set_attr "predicable" "no,yes,yes")
4267 (set_attr "predicable_short_it" "no,yes,no")
4268 (set_attr "type" "load_4")])
4269
4270 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4271 ;; address (there's no immediate format). That's tricky to support
4272 ;; here and we don't really need this pattern for that case, so only
4273 ;; enable for 32-bit ISAs.
4274 (define_insn "unaligned_loadhis"
4275 [(set (match_operand:SI 0 "s_register_operand" "=r")
4276 (sign_extend:SI
4277 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4278 UNSPEC_UNALIGNED_LOAD)))]
4279 "unaligned_access && TARGET_32BIT"
4280 "ldrsh%?\t%0, %1\t@ unaligned"
4281 [(set_attr "predicable" "yes")
4282 (set_attr "type" "load_byte")])
4283
4284 (define_insn "unaligned_loadhiu"
4285 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4286 (zero_extend:SI
4287 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4288 UNSPEC_UNALIGNED_LOAD)))]
4289 "unaligned_access"
4290 "@
4291 ldrh\t%0, %1\t@ unaligned
4292 ldrh%?\t%0, %1\t@ unaligned
4293 ldrh%?\t%0, %1\t@ unaligned"
4294 [(set_attr "arch" "t1,t2,32")
4295 (set_attr "length" "2,2,4")
4296 (set_attr "predicable" "no,yes,yes")
4297 (set_attr "predicable_short_it" "no,yes,no")
4298 (set_attr "type" "load_byte")])
4299
4300 (define_insn "unaligned_storedi"
4301 [(set (match_operand:DI 0 "memory_operand" "=m")
4302 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4303 UNSPEC_UNALIGNED_STORE))]
4304 "TARGET_32BIT && TARGET_LDRD"
4305 "*
4306 return output_move_double (operands, true, NULL);
4307 "
4308 [(set_attr "length" "8")
4309 (set_attr "type" "store_8")])
4310
4311 (define_insn "unaligned_storesi"
4312 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4313 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4314 UNSPEC_UNALIGNED_STORE))]
4315 "unaligned_access"
4316 "@
4317 str\t%1, %0\t@ unaligned
4318 str%?\t%1, %0\t@ unaligned
4319 str%?\t%1, %0\t@ unaligned"
4320 [(set_attr "arch" "t1,t2,32")
4321 (set_attr "length" "2,2,4")
4322 (set_attr "predicable" "no,yes,yes")
4323 (set_attr "predicable_short_it" "no,yes,no")
4324 (set_attr "type" "store_4")])
4325
4326 (define_insn "unaligned_storehi"
4327 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4328 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4329 UNSPEC_UNALIGNED_STORE))]
4330 "unaligned_access"
4331 "@
4332 strh\t%1, %0\t@ unaligned
4333 strh%?\t%1, %0\t@ unaligned
4334 strh%?\t%1, %0\t@ unaligned"
4335 [(set_attr "arch" "t1,t2,32")
4336 (set_attr "length" "2,2,4")
4337 (set_attr "predicable" "no,yes,yes")
4338 (set_attr "predicable_short_it" "no,yes,no")
4339 (set_attr "type" "store_4")])
4340
4341
4342 (define_insn "*extv_reg"
4343 [(set (match_operand:SI 0 "s_register_operand" "=r")
4344 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4345 (match_operand:SI 2 "const_int_operand" "n")
4346 (match_operand:SI 3 "const_int_operand" "n")))]
4347 "arm_arch_thumb2
4348 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4349 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4350 "sbfx%?\t%0, %1, %3, %2"
4351 [(set_attr "length" "4")
4352 (set_attr "predicable" "yes")
4353 (set_attr "type" "bfm")]
4354 )
4355
4356 (define_insn "extzv_t2"
4357 [(set (match_operand:SI 0 "s_register_operand" "=r")
4358 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4359 (match_operand:SI 2 "const_int_operand" "n")
4360 (match_operand:SI 3 "const_int_operand" "n")))]
4361 "arm_arch_thumb2
4362 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4363 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4364 "ubfx%?\t%0, %1, %3, %2"
4365 [(set_attr "length" "4")
4366 (set_attr "predicable" "yes")
4367 (set_attr "type" "bfm")]
4368 )
4369
4370
4371 ;; Division instructions
4372 (define_insn "divsi3"
4373 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4374 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
4375 (match_operand:SI 2 "s_register_operand" "r,r")))]
4376 "TARGET_IDIV"
4377 "@
4378 sdiv%?\t%0, %1, %2
4379 sdiv\t%0, %1, %2"
4380 [(set_attr "arch" "32,v8mb")
4381 (set_attr "predicable" "yes")
4382 (set_attr "type" "sdiv")]
4383 )
4384
4385 (define_insn "udivsi3"
4386 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4387 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
4388 (match_operand:SI 2 "s_register_operand" "r,r")))]
4389 "TARGET_IDIV"
4390 "@
4391 udiv%?\t%0, %1, %2
4392 udiv\t%0, %1, %2"
4393 [(set_attr "arch" "32,v8mb")
4394 (set_attr "predicable" "yes")
4395 (set_attr "type" "udiv")]
4396 )
4397
4398 \f
4399 ;; Unary arithmetic insns
4400
4401 (define_expand "negvsi3"
4402 [(match_operand:SI 0 "register_operand")
4403 (match_operand:SI 1 "register_operand")
4404 (match_operand 2 "")]
4405 "TARGET_32BIT"
4406 {
4407 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
4408 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4409
4410 DONE;
4411 })
4412
4413 (define_expand "negvdi3"
4414 [(match_operand:DI 0 "s_register_operand")
4415 (match_operand:DI 1 "s_register_operand")
4416 (match_operand 2 "")]
4417 "TARGET_ARM"
4418 {
4419 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
4420 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4421
4422 DONE;
4423 })
4424
4425
4426 (define_insn "negdi2_compare"
4427 [(set (reg:CC CC_REGNUM)
4428 (compare:CC
4429 (const_int 0)
4430 (match_operand:DI 1 "register_operand" "r,r")))
4431 (set (match_operand:DI 0 "register_operand" "=&r,&r")
4432 (minus:DI (const_int 0) (match_dup 1)))]
4433 "TARGET_ARM"
4434 "@
4435 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
4436 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
4437 [(set_attr "conds" "set")
4438 (set_attr "arch" "a,t2")
4439 (set_attr "length" "8")
4440 (set_attr "type" "multiple")]
4441 )
4442
4443 (define_expand "negsi2"
4444 [(set (match_operand:SI 0 "s_register_operand")
4445 (neg:SI (match_operand:SI 1 "s_register_operand")))]
4446 "TARGET_EITHER"
4447 ""
4448 )
4449
4450 (define_insn "*arm_negsi2"
4451 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4452 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4453 "TARGET_32BIT"
4454 "rsb%?\\t%0, %1, #0"
4455 [(set_attr "predicable" "yes")
4456 (set_attr "predicable_short_it" "yes,no")
4457 (set_attr "arch" "t2,*")
4458 (set_attr "length" "4")
4459 (set_attr "type" "alu_imm")]
4460 )
4461
4462 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
4463 ;; rather than (0 cmp reg). This gives the same results for unsigned
4464 ;; and equality compares which is what we mostly need here.
4465 (define_insn "negsi2_0compare"
4466 [(set (reg:CC_RSB CC_REGNUM)
4467 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
4468 (const_int -1)))
4469 (set (match_operand:SI 0 "s_register_operand" "=l,r")
4470 (neg:SI (match_dup 1)))]
4471 "TARGET_32BIT"
4472 "@
4473 negs\\t%0, %1
4474 rsbs\\t%0, %1, #0"
4475 [(set_attr "conds" "set")
4476 (set_attr "arch" "t2,*")
4477 (set_attr "length" "2,*")
4478 (set_attr "type" "alus_imm")]
4479 )
4480
4481 (define_insn "negsi2_carryin"
4482 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4483 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
4484 (match_operand:SI 2 "arm_borrow_operation" "")))]
4485 "TARGET_32BIT"
4486 "@
4487 rsc\\t%0, %1, #0
4488 sbc\\t%0, %1, %1, lsl #1"
4489 [(set_attr "conds" "use")
4490 (set_attr "arch" "a,t2")
4491 (set_attr "type" "adc_imm,adc_reg")]
4492 )
4493
4494 (define_expand "negsf2"
4495 [(set (match_operand:SF 0 "s_register_operand")
4496 (neg:SF (match_operand:SF 1 "s_register_operand")))]
4497 "TARGET_32BIT && TARGET_HARD_FLOAT"
4498 ""
4499 )
4500
4501 (define_expand "negdf2"
4502 [(set (match_operand:DF 0 "s_register_operand")
4503 (neg:DF (match_operand:DF 1 "s_register_operand")))]
4504 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4505 "")
4506
4507 ;; abssi2 doesn't really clobber the condition codes if a different register
4508 ;; is being set. To keep things simple, assume during rtl manipulations that
4509 ;; it does, but tell the final scan operator the truth. Similarly for
4510 ;; (neg (abs...))
4511
4512 (define_expand "abssi2"
4513 [(parallel
4514 [(set (match_operand:SI 0 "s_register_operand")
4515 (abs:SI (match_operand:SI 1 "s_register_operand")))
4516 (clobber (match_dup 2))])]
4517 "TARGET_EITHER"
4518 "
4519 if (TARGET_THUMB1)
4520 operands[2] = gen_rtx_SCRATCH (SImode);
4521 else
4522 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4523 ")
4524
4525 (define_insn_and_split "*arm_abssi2"
4526 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4527 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4528 (clobber (reg:CC CC_REGNUM))]
4529 "TARGET_ARM"
4530 "#"
4531 "&& reload_completed"
4532 [(const_int 0)]
4533 {
4534 /* if (which_alternative == 0) */
4535 if (REGNO(operands[0]) == REGNO(operands[1]))
4536 {
4537 /* Emit the pattern:
4538 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4539 [(set (reg:CC CC_REGNUM)
4540 (compare:CC (match_dup 0) (const_int 0)))
4541 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4542 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4543 */
4544 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4545 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4546 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4547 (gen_rtx_LT (SImode,
4548 gen_rtx_REG (CCmode, CC_REGNUM),
4549 const0_rtx)),
4550 (gen_rtx_SET (operands[0],
4551 (gen_rtx_MINUS (SImode,
4552 const0_rtx,
4553 operands[1]))))));
4554 DONE;
4555 }
4556 else
4557 {
4558 /* Emit the pattern:
4559 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4560 [(set (match_dup 0)
4561 (xor:SI (match_dup 1)
4562 (ashiftrt:SI (match_dup 1) (const_int 31))))
4563 (set (match_dup 0)
4564 (minus:SI (match_dup 0)
4565 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4566 */
4567 emit_insn (gen_rtx_SET (operands[0],
4568 gen_rtx_XOR (SImode,
4569 gen_rtx_ASHIFTRT (SImode,
4570 operands[1],
4571 GEN_INT (31)),
4572 operands[1])));
4573 emit_insn (gen_rtx_SET (operands[0],
4574 gen_rtx_MINUS (SImode,
4575 operands[0],
4576 gen_rtx_ASHIFTRT (SImode,
4577 operands[1],
4578 GEN_INT (31)))));
4579 DONE;
4580 }
4581 }
4582 [(set_attr "conds" "clob,*")
4583 (set_attr "shift" "1")
4584 (set_attr "predicable" "no, yes")
4585 (set_attr "length" "8")
4586 (set_attr "type" "multiple")]
4587 )
4588
4589 (define_insn_and_split "*arm_neg_abssi2"
4590 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4591 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4592 (clobber (reg:CC CC_REGNUM))]
4593 "TARGET_ARM"
4594 "#"
4595 "&& reload_completed"
4596 [(const_int 0)]
4597 {
4598 /* if (which_alternative == 0) */
4599 if (REGNO (operands[0]) == REGNO (operands[1]))
4600 {
4601 /* Emit the pattern:
4602 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4603 */
4604 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4605 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4606 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4607 gen_rtx_GT (SImode,
4608 gen_rtx_REG (CCmode, CC_REGNUM),
4609 const0_rtx),
4610 gen_rtx_SET (operands[0],
4611 (gen_rtx_MINUS (SImode,
4612 const0_rtx,
4613 operands[1])))));
4614 }
4615 else
4616 {
4617 /* Emit the pattern:
4618 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4619 */
4620 emit_insn (gen_rtx_SET (operands[0],
4621 gen_rtx_XOR (SImode,
4622 gen_rtx_ASHIFTRT (SImode,
4623 operands[1],
4624 GEN_INT (31)),
4625 operands[1])));
4626 emit_insn (gen_rtx_SET (operands[0],
4627 gen_rtx_MINUS (SImode,
4628 gen_rtx_ASHIFTRT (SImode,
4629 operands[1],
4630 GEN_INT (31)),
4631 operands[0])));
4632 }
4633 DONE;
4634 }
4635 [(set_attr "conds" "clob,*")
4636 (set_attr "shift" "1")
4637 (set_attr "predicable" "no, yes")
4638 (set_attr "length" "8")
4639 (set_attr "type" "multiple")]
4640 )
4641
4642 (define_expand "abssf2"
4643 [(set (match_operand:SF 0 "s_register_operand")
4644 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4645 "TARGET_32BIT && TARGET_HARD_FLOAT"
4646 "")
4647
4648 (define_expand "absdf2"
4649 [(set (match_operand:DF 0 "s_register_operand")
4650 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4651 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4652 "")
4653
4654 (define_expand "sqrtsf2"
4655 [(set (match_operand:SF 0 "s_register_operand")
4656 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4657 "TARGET_32BIT && TARGET_HARD_FLOAT"
4658 "")
4659
4660 (define_expand "sqrtdf2"
4661 [(set (match_operand:DF 0 "s_register_operand")
4662 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4663 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4664 "")
4665
4666 (define_expand "one_cmplsi2"
4667 [(set (match_operand:SI 0 "s_register_operand")
4668 (not:SI (match_operand:SI 1 "s_register_operand")))]
4669 "TARGET_EITHER"
4670 ""
4671 )
4672
4673 (define_insn "*arm_one_cmplsi2"
4674 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4675 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4676 "TARGET_32BIT"
4677 "mvn%?\\t%0, %1"
4678 [(set_attr "predicable" "yes")
4679 (set_attr "predicable_short_it" "yes,no")
4680 (set_attr "arch" "t2,*")
4681 (set_attr "length" "4")
4682 (set_attr "type" "mvn_reg")]
4683 )
4684
4685 (define_insn "*notsi_compare0"
4686 [(set (reg:CC_NOOV CC_REGNUM)
4687 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4688 (const_int 0)))
4689 (set (match_operand:SI 0 "s_register_operand" "=r")
4690 (not:SI (match_dup 1)))]
4691 "TARGET_32BIT"
4692 "mvns%?\\t%0, %1"
4693 [(set_attr "conds" "set")
4694 (set_attr "type" "mvn_reg")]
4695 )
4696
4697 (define_insn "*notsi_compare0_scratch"
4698 [(set (reg:CC_NOOV CC_REGNUM)
4699 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4700 (const_int 0)))
4701 (clobber (match_scratch:SI 0 "=r"))]
4702 "TARGET_32BIT"
4703 "mvns%?\\t%0, %1"
4704 [(set_attr "conds" "set")
4705 (set_attr "type" "mvn_reg")]
4706 )
4707 \f
4708 ;; Fixed <--> Floating conversion insns
4709
4710 (define_expand "floatsihf2"
4711 [(set (match_operand:HF 0 "general_operand")
4712 (float:HF (match_operand:SI 1 "general_operand")))]
4713 "TARGET_EITHER"
4714 "
4715 {
4716 rtx op1 = gen_reg_rtx (SFmode);
4717 expand_float (op1, operands[1], 0);
4718 op1 = convert_to_mode (HFmode, op1, 0);
4719 emit_move_insn (operands[0], op1);
4720 DONE;
4721 }"
4722 )
4723
4724 (define_expand "floatdihf2"
4725 [(set (match_operand:HF 0 "general_operand")
4726 (float:HF (match_operand:DI 1 "general_operand")))]
4727 "TARGET_EITHER"
4728 "
4729 {
4730 rtx op1 = gen_reg_rtx (SFmode);
4731 expand_float (op1, operands[1], 0);
4732 op1 = convert_to_mode (HFmode, op1, 0);
4733 emit_move_insn (operands[0], op1);
4734 DONE;
4735 }"
4736 )
4737
4738 (define_expand "floatsisf2"
4739 [(set (match_operand:SF 0 "s_register_operand")
4740 (float:SF (match_operand:SI 1 "s_register_operand")))]
4741 "TARGET_32BIT && TARGET_HARD_FLOAT"
4742 "
4743 ")
4744
4745 (define_expand "floatsidf2"
4746 [(set (match_operand:DF 0 "s_register_operand")
4747 (float:DF (match_operand:SI 1 "s_register_operand")))]
4748 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4749 "
4750 ")
4751
4752 (define_expand "fix_trunchfsi2"
4753 [(set (match_operand:SI 0 "general_operand")
4754 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4755 "TARGET_EITHER"
4756 "
4757 {
4758 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4759 expand_fix (operands[0], op1, 0);
4760 DONE;
4761 }"
4762 )
4763
4764 (define_expand "fix_trunchfdi2"
4765 [(set (match_operand:DI 0 "general_operand")
4766 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4767 "TARGET_EITHER"
4768 "
4769 {
4770 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4771 expand_fix (operands[0], op1, 0);
4772 DONE;
4773 }"
4774 )
4775
4776 (define_expand "fix_truncsfsi2"
4777 [(set (match_operand:SI 0 "s_register_operand")
4778 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4779 "TARGET_32BIT && TARGET_HARD_FLOAT"
4780 "
4781 ")
4782
4783 (define_expand "fix_truncdfsi2"
4784 [(set (match_operand:SI 0 "s_register_operand")
4785 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4786 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4787 "
4788 ")
4789
4790 ;; Truncation insns
4791
4792 (define_expand "truncdfsf2"
4793 [(set (match_operand:SF 0 "s_register_operand")
4794 (float_truncate:SF
4795 (match_operand:DF 1 "s_register_operand")))]
4796 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4797 ""
4798 )
4799
4800 ;; DFmode to HFmode conversions on targets without a single-step hardware
4801 ;; instruction for it would have to go through SFmode. This is dangerous
4802 ;; as it introduces double rounding.
4803 ;;
4804 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4805 ;; a single-step instruction.
4806
4807 (define_expand "truncdfhf2"
4808 [(set (match_operand:HF 0 "s_register_operand")
4809 (float_truncate:HF
4810 (match_operand:DF 1 "s_register_operand")))]
4811 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4812 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4813 {
4814 /* We don't have a direct instruction for this, so we must be in
4815 an unsafe math mode, and going via SFmode. */
4816
4817 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4818 {
4819 rtx op1;
4820 op1 = convert_to_mode (SFmode, operands[1], 0);
4821 op1 = convert_to_mode (HFmode, op1, 0);
4822 emit_move_insn (operands[0], op1);
4823 DONE;
4824 }
4825 /* Otherwise, we will pick this up as a single instruction with
4826 no intermediary rounding. */
4827 }
4828 )
4829 \f
4830 ;; Zero and sign extension instructions.
4831
4832 (define_expand "zero_extend<mode>di2"
4833 [(set (match_operand:DI 0 "s_register_operand" "")
4834 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4835 "TARGET_32BIT <qhs_zextenddi_cond>"
4836 {
4837 rtx res_lo, res_hi, op0_lo, op0_hi;
4838 res_lo = gen_lowpart (SImode, operands[0]);
4839 res_hi = gen_highpart (SImode, operands[0]);
4840 if (can_create_pseudo_p ())
4841 {
4842 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4843 op0_hi = gen_reg_rtx (SImode);
4844 }
4845 else
4846 {
4847 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4848 op0_hi = res_hi;
4849 }
4850 if (<MODE>mode != SImode)
4851 emit_insn (gen_rtx_SET (op0_lo,
4852 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4853 emit_insn (gen_movsi (op0_hi, const0_rtx));
4854 if (res_lo != op0_lo)
4855 emit_move_insn (res_lo, op0_lo);
4856 if (res_hi != op0_hi)
4857 emit_move_insn (res_hi, op0_hi);
4858 DONE;
4859 }
4860 )
4861
4862 (define_expand "extend<mode>di2"
4863 [(set (match_operand:DI 0 "s_register_operand" "")
4864 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4865 "TARGET_32BIT <qhs_sextenddi_cond>"
4866 {
4867 rtx res_lo, res_hi, op0_lo, op0_hi;
4868 res_lo = gen_lowpart (SImode, operands[0]);
4869 res_hi = gen_highpart (SImode, operands[0]);
4870 if (can_create_pseudo_p ())
4871 {
4872 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4873 op0_hi = gen_reg_rtx (SImode);
4874 }
4875 else
4876 {
4877 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4878 op0_hi = res_hi;
4879 }
4880 if (<MODE>mode != SImode)
4881 emit_insn (gen_rtx_SET (op0_lo,
4882 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4883 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4884 if (res_lo != op0_lo)
4885 emit_move_insn (res_lo, op0_lo);
4886 if (res_hi != op0_hi)
4887 emit_move_insn (res_hi, op0_hi);
4888 DONE;
4889 }
4890 )
4891
4892 ;; Splits for all extensions to DImode
4893 (define_split
4894 [(set (match_operand:DI 0 "s_register_operand" "")
4895 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4896 "TARGET_32BIT"
4897 [(set (match_dup 0) (match_dup 1))]
4898 {
4899 rtx lo_part = gen_lowpart (SImode, operands[0]);
4900 machine_mode src_mode = GET_MODE (operands[1]);
4901
4902 if (src_mode == SImode)
4903 emit_move_insn (lo_part, operands[1]);
4904 else
4905 emit_insn (gen_rtx_SET (lo_part,
4906 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4907 operands[0] = gen_highpart (SImode, operands[0]);
4908 operands[1] = const0_rtx;
4909 })
4910
4911 (define_split
4912 [(set (match_operand:DI 0 "s_register_operand" "")
4913 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4914 "TARGET_32BIT"
4915 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4916 {
4917 rtx lo_part = gen_lowpart (SImode, operands[0]);
4918 machine_mode src_mode = GET_MODE (operands[1]);
4919
4920 if (src_mode == SImode)
4921 emit_move_insn (lo_part, operands[1]);
4922 else
4923 emit_insn (gen_rtx_SET (lo_part,
4924 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4925 operands[1] = lo_part;
4926 operands[0] = gen_highpart (SImode, operands[0]);
4927 })
4928
4929 (define_expand "zero_extendhisi2"
4930 [(set (match_operand:SI 0 "s_register_operand")
4931 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4932 "TARGET_EITHER"
4933 {
4934 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4935 {
4936 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4937 DONE;
4938 }
4939 if (!arm_arch6 && !MEM_P (operands[1]))
4940 {
4941 rtx t = gen_lowpart (SImode, operands[1]);
4942 rtx tmp = gen_reg_rtx (SImode);
4943 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4944 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4945 DONE;
4946 }
4947 })
4948
4949 (define_split
4950 [(set (match_operand:SI 0 "s_register_operand" "")
4951 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4952 "!TARGET_THUMB2 && !arm_arch6"
4953 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4954 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4955 {
4956 operands[2] = gen_lowpart (SImode, operands[1]);
4957 })
4958
4959 (define_insn "*arm_zero_extendhisi2"
4960 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4961 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4962 "TARGET_ARM && arm_arch4 && !arm_arch6"
4963 "@
4964 #
4965 ldrh%?\\t%0, %1"
4966 [(set_attr "type" "alu_shift_reg,load_byte")
4967 (set_attr "predicable" "yes")]
4968 )
4969
4970 (define_insn "*arm_zero_extendhisi2_v6"
4971 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4972 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4973 "TARGET_ARM && arm_arch6"
4974 "@
4975 uxth%?\\t%0, %1
4976 ldrh%?\\t%0, %1"
4977 [(set_attr "predicable" "yes")
4978 (set_attr "type" "extend,load_byte")]
4979 )
4980
4981 (define_insn "*arm_zero_extendhisi2addsi"
4982 [(set (match_operand:SI 0 "s_register_operand" "=r")
4983 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4984 (match_operand:SI 2 "s_register_operand" "r")))]
4985 "TARGET_INT_SIMD"
4986 "uxtah%?\\t%0, %2, %1"
4987 [(set_attr "type" "alu_shift_reg")
4988 (set_attr "predicable" "yes")]
4989 )
4990
4991 (define_expand "zero_extendqisi2"
4992 [(set (match_operand:SI 0 "s_register_operand")
4993 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4994 "TARGET_EITHER"
4995 {
4996 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4997 {
4998 emit_insn (gen_andsi3 (operands[0],
4999 gen_lowpart (SImode, operands[1]),
5000 GEN_INT (255)));
5001 DONE;
5002 }
5003 if (!arm_arch6 && !MEM_P (operands[1]))
5004 {
5005 rtx t = gen_lowpart (SImode, operands[1]);
5006 rtx tmp = gen_reg_rtx (SImode);
5007 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5008 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5009 DONE;
5010 }
5011 })
5012
5013 (define_split
5014 [(set (match_operand:SI 0 "s_register_operand" "")
5015 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5016 "!arm_arch6"
5017 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5018 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5019 {
5020 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5021 if (TARGET_ARM)
5022 {
5023 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5024 DONE;
5025 }
5026 })
5027
5028 (define_insn "*arm_zero_extendqisi2"
5029 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5030 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5031 "TARGET_ARM && !arm_arch6"
5032 "@
5033 #
5034 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5035 [(set_attr "length" "8,4")
5036 (set_attr "type" "alu_shift_reg,load_byte")
5037 (set_attr "predicable" "yes")]
5038 )
5039
5040 (define_insn "*arm_zero_extendqisi2_v6"
5041 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5042 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5043 "TARGET_ARM && arm_arch6"
5044 "@
5045 uxtb%?\\t%0, %1
5046 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5047 [(set_attr "type" "extend,load_byte")
5048 (set_attr "predicable" "yes")]
5049 )
5050
5051 (define_insn "*arm_zero_extendqisi2addsi"
5052 [(set (match_operand:SI 0 "s_register_operand" "=r")
5053 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5054 (match_operand:SI 2 "s_register_operand" "r")))]
5055 "TARGET_INT_SIMD"
5056 "uxtab%?\\t%0, %2, %1"
5057 [(set_attr "predicable" "yes")
5058 (set_attr "type" "alu_shift_reg")]
5059 )
5060
5061 (define_split
5062 [(set (match_operand:SI 0 "s_register_operand" "")
5063 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5064 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5065 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5066 [(set (match_dup 2) (match_dup 1))
5067 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5068 ""
5069 )
5070
5071 (define_split
5072 [(set (match_operand:SI 0 "s_register_operand" "")
5073 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5074 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5075 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5076 [(set (match_dup 2) (match_dup 1))
5077 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5078 ""
5079 )
5080
5081
5082 (define_split
5083 [(set (match_operand:SI 0 "s_register_operand" "")
5084 (IOR_XOR:SI (and:SI (ashift:SI
5085 (match_operand:SI 1 "s_register_operand" "")
5086 (match_operand:SI 2 "const_int_operand" ""))
5087 (match_operand:SI 3 "const_int_operand" ""))
5088 (zero_extend:SI
5089 (match_operator 5 "subreg_lowpart_operator"
5090 [(match_operand:SI 4 "s_register_operand" "")]))))]
5091 "TARGET_32BIT
5092 && (UINTVAL (operands[3])
5093 == (GET_MODE_MASK (GET_MODE (operands[5]))
5094 & (GET_MODE_MASK (GET_MODE (operands[5]))
5095 << (INTVAL (operands[2])))))"
5096 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5097 (match_dup 4)))
5098 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5099 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5100 )
5101
5102 (define_insn "*compareqi_eq0"
5103 [(set (reg:CC_Z CC_REGNUM)
5104 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5105 (const_int 0)))]
5106 "TARGET_32BIT"
5107 "tst%?\\t%0, #255"
5108 [(set_attr "conds" "set")
5109 (set_attr "predicable" "yes")
5110 (set_attr "type" "logic_imm")]
5111 )
5112
5113 (define_expand "extendhisi2"
5114 [(set (match_operand:SI 0 "s_register_operand")
5115 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5116 "TARGET_EITHER"
5117 {
5118 if (TARGET_THUMB1)
5119 {
5120 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5121 DONE;
5122 }
5123 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5124 {
5125 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5126 DONE;
5127 }
5128
5129 if (!arm_arch6 && !MEM_P (operands[1]))
5130 {
5131 rtx t = gen_lowpart (SImode, operands[1]);
5132 rtx tmp = gen_reg_rtx (SImode);
5133 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5134 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5135 DONE;
5136 }
5137 })
5138
5139 (define_split
5140 [(parallel
5141 [(set (match_operand:SI 0 "register_operand" "")
5142 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5143 (clobber (match_scratch:SI 2 ""))])]
5144 "!arm_arch6"
5145 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5146 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5147 {
5148 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5149 })
5150
5151 ;; This pattern will only be used when ldsh is not available
5152 (define_expand "extendhisi2_mem"
5153 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5154 (set (match_dup 3)
5155 (zero_extend:SI (match_dup 7)))
5156 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5157 (set (match_operand:SI 0 "" "")
5158 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5159 "TARGET_ARM"
5160 "
5161 {
5162 rtx mem1, mem2;
5163 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5164
5165 mem1 = change_address (operands[1], QImode, addr);
5166 mem2 = change_address (operands[1], QImode,
5167 plus_constant (Pmode, addr, 1));
5168 operands[0] = gen_lowpart (SImode, operands[0]);
5169 operands[1] = mem1;
5170 operands[2] = gen_reg_rtx (SImode);
5171 operands[3] = gen_reg_rtx (SImode);
5172 operands[6] = gen_reg_rtx (SImode);
5173 operands[7] = mem2;
5174
5175 if (BYTES_BIG_ENDIAN)
5176 {
5177 operands[4] = operands[2];
5178 operands[5] = operands[3];
5179 }
5180 else
5181 {
5182 operands[4] = operands[3];
5183 operands[5] = operands[2];
5184 }
5185 }"
5186 )
5187
5188 (define_split
5189 [(set (match_operand:SI 0 "register_operand" "")
5190 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5191 "!arm_arch6"
5192 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5193 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5194 {
5195 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5196 })
5197
5198 (define_insn "*arm_extendhisi2"
5199 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5200 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5201 "TARGET_ARM && arm_arch4 && !arm_arch6"
5202 "@
5203 #
5204 ldrsh%?\\t%0, %1"
5205 [(set_attr "length" "8,4")
5206 (set_attr "type" "alu_shift_reg,load_byte")
5207 (set_attr "predicable" "yes")]
5208 )
5209
5210 ;; ??? Check Thumb-2 pool range
5211 (define_insn "*arm_extendhisi2_v6"
5212 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5213 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5214 "TARGET_32BIT && arm_arch6"
5215 "@
5216 sxth%?\\t%0, %1
5217 ldrsh%?\\t%0, %1"
5218 [(set_attr "type" "extend,load_byte")
5219 (set_attr "predicable" "yes")]
5220 )
5221
5222 (define_insn "*arm_extendhisi2addsi"
5223 [(set (match_operand:SI 0 "s_register_operand" "=r")
5224 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5225 (match_operand:SI 2 "s_register_operand" "r")))]
5226 "TARGET_INT_SIMD"
5227 "sxtah%?\\t%0, %2, %1"
5228 [(set_attr "type" "alu_shift_reg")]
5229 )
5230
5231 (define_expand "extendqihi2"
5232 [(set (match_dup 2)
5233 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5234 (const_int 24)))
5235 (set (match_operand:HI 0 "s_register_operand")
5236 (ashiftrt:SI (match_dup 2)
5237 (const_int 24)))]
5238 "TARGET_ARM"
5239 "
5240 {
5241 if (arm_arch4 && MEM_P (operands[1]))
5242 {
5243 emit_insn (gen_rtx_SET (operands[0],
5244 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5245 DONE;
5246 }
5247 if (!s_register_operand (operands[1], QImode))
5248 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5249 operands[0] = gen_lowpart (SImode, operands[0]);
5250 operands[1] = gen_lowpart (SImode, operands[1]);
5251 operands[2] = gen_reg_rtx (SImode);
5252 }"
5253 )
5254
5255 (define_insn "*arm_extendqihi_insn"
5256 [(set (match_operand:HI 0 "s_register_operand" "=r")
5257 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5258 "TARGET_ARM && arm_arch4"
5259 "ldrsb%?\\t%0, %1"
5260 [(set_attr "type" "load_byte")
5261 (set_attr "predicable" "yes")]
5262 )
5263
5264 (define_expand "extendqisi2"
5265 [(set (match_operand:SI 0 "s_register_operand")
5266 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5267 "TARGET_EITHER"
5268 {
5269 if (!arm_arch4 && MEM_P (operands[1]))
5270 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5271
5272 if (!arm_arch6 && !MEM_P (operands[1]))
5273 {
5274 rtx t = gen_lowpart (SImode, operands[1]);
5275 rtx tmp = gen_reg_rtx (SImode);
5276 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5277 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5278 DONE;
5279 }
5280 })
5281
5282 (define_split
5283 [(set (match_operand:SI 0 "register_operand" "")
5284 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5285 "!arm_arch6"
5286 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5287 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5288 {
5289 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5290 })
5291
5292 (define_insn "*arm_extendqisi"
5293 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5294 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5295 "TARGET_ARM && arm_arch4 && !arm_arch6"
5296 "@
5297 #
5298 ldrsb%?\\t%0, %1"
5299 [(set_attr "length" "8,4")
5300 (set_attr "type" "alu_shift_reg,load_byte")
5301 (set_attr "predicable" "yes")]
5302 )
5303
5304 (define_insn "*arm_extendqisi_v6"
5305 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5306 (sign_extend:SI
5307 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5308 "TARGET_ARM && arm_arch6"
5309 "@
5310 sxtb%?\\t%0, %1
5311 ldrsb%?\\t%0, %1"
5312 [(set_attr "type" "extend,load_byte")
5313 (set_attr "predicable" "yes")]
5314 )
5315
5316 (define_insn "*arm_extendqisi2addsi"
5317 [(set (match_operand:SI 0 "s_register_operand" "=r")
5318 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5319 (match_operand:SI 2 "s_register_operand" "r")))]
5320 "TARGET_INT_SIMD"
5321 "sxtab%?\\t%0, %2, %1"
5322 [(set_attr "type" "alu_shift_reg")
5323 (set_attr "predicable" "yes")]
5324 )
5325
5326 (define_insn "arm_<sup>xtb16"
5327 [(set (match_operand:SI 0 "s_register_operand" "=r")
5328 (unspec:SI
5329 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5330 "TARGET_INT_SIMD"
5331 "<sup>xtb16%?\\t%0, %1"
5332 [(set_attr "predicable" "yes")
5333 (set_attr "type" "alu_dsp_reg")])
5334
5335 (define_insn "arm_<simd32_op>"
5336 [(set (match_operand:SI 0 "s_register_operand" "=r")
5337 (unspec:SI
5338 [(match_operand:SI 1 "s_register_operand" "r")
5339 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5340 "TARGET_INT_SIMD"
5341 "<simd32_op>%?\\t%0, %1, %2"
5342 [(set_attr "predicable" "yes")
5343 (set_attr "type" "alu_dsp_reg")])
5344
5345 (define_insn "arm_usada8"
5346 [(set (match_operand:SI 0 "s_register_operand" "=r")
5347 (unspec:SI
5348 [(match_operand:SI 1 "s_register_operand" "r")
5349 (match_operand:SI 2 "s_register_operand" "r")
5350 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5351 "TARGET_INT_SIMD"
5352 "usada8%?\\t%0, %1, %2, %3"
5353 [(set_attr "predicable" "yes")
5354 (set_attr "type" "alu_dsp_reg")])
5355
5356 (define_insn "arm_<simd32_op>"
5357 [(set (match_operand:DI 0 "s_register_operand" "=r")
5358 (unspec:DI
5359 [(match_operand:SI 1 "s_register_operand" "r")
5360 (match_operand:SI 2 "s_register_operand" "r")
5361 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5362 "TARGET_INT_SIMD"
5363 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5364 [(set_attr "predicable" "yes")
5365 (set_attr "type" "smlald")])
5366
5367 (define_expand "extendsfdf2"
5368 [(set (match_operand:DF 0 "s_register_operand")
5369 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
5370 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5371 ""
5372 )
5373
5374 ;; HFmode -> DFmode conversions where we don't have an instruction for it
5375 ;; must go through SFmode.
5376 ;;
5377 ;; This is always safe for an extend.
5378
5379 (define_expand "extendhfdf2"
5380 [(set (match_operand:DF 0 "s_register_operand")
5381 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
5382 "TARGET_EITHER"
5383 {
5384 /* We don't have a direct instruction for this, so go via SFmode. */
5385 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5386 {
5387 rtx op1;
5388 op1 = convert_to_mode (SFmode, operands[1], 0);
5389 op1 = convert_to_mode (DFmode, op1, 0);
5390 emit_insn (gen_movdf (operands[0], op1));
5391 DONE;
5392 }
5393 /* Otherwise, we're done producing RTL and will pick up the correct
5394 pattern to do this with one rounding-step in a single instruction. */
5395 }
5396 )
5397 \f
5398 ;; Move insns (including loads and stores)
5399
5400 ;; XXX Just some ideas about movti.
5401 ;; I don't think these are a good idea on the arm, there just aren't enough
5402 ;; registers
5403 ;;(define_expand "loadti"
5404 ;; [(set (match_operand:TI 0 "s_register_operand")
5405 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
5406 ;; "" "")
5407
5408 ;;(define_expand "storeti"
5409 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
5410 ;; (match_operand:TI 1 "s_register_operand"))]
5411 ;; "" "")
5412
5413 ;;(define_expand "movti"
5414 ;; [(set (match_operand:TI 0 "general_operand")
5415 ;; (match_operand:TI 1 "general_operand"))]
5416 ;; ""
5417 ;; "
5418 ;;{
5419 ;; rtx insn;
5420 ;;
5421 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5422 ;; operands[1] = copy_to_reg (operands[1]);
5423 ;; if (MEM_P (operands[0]))
5424 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5425 ;; else if (MEM_P (operands[1]))
5426 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5427 ;; else
5428 ;; FAIL;
5429 ;;
5430 ;; emit_insn (insn);
5431 ;; DONE;
5432 ;;}")
5433
5434 ;; Recognize garbage generated above.
5435
5436 ;;(define_insn ""
5437 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5438 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5439 ;; ""
5440 ;; "*
5441 ;; {
5442 ;; register mem = (which_alternative < 3);
5443 ;; register const char *template;
5444 ;;
5445 ;; operands[mem] = XEXP (operands[mem], 0);
5446 ;; switch (which_alternative)
5447 ;; {
5448 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5449 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5450 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5451 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5452 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5453 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5454 ;; }
5455 ;; output_asm_insn (template, operands);
5456 ;; return \"\";
5457 ;; }")
5458
5459 (define_expand "movdi"
5460 [(set (match_operand:DI 0 "general_operand")
5461 (match_operand:DI 1 "general_operand"))]
5462 "TARGET_EITHER"
5463 "
5464 gcc_checking_assert (aligned_operand (operands[0], DImode));
5465 gcc_checking_assert (aligned_operand (operands[1], DImode));
5466 if (can_create_pseudo_p ())
5467 {
5468 if (!REG_P (operands[0]))
5469 operands[1] = force_reg (DImode, operands[1]);
5470 }
5471 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
5472 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
5473 {
5474 /* Avoid LDRD's into an odd-numbered register pair in ARM state
5475 when expanding function calls. */
5476 gcc_assert (can_create_pseudo_p ());
5477 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
5478 {
5479 /* Perform load into legal reg pair first, then move. */
5480 rtx reg = gen_reg_rtx (DImode);
5481 emit_insn (gen_movdi (reg, operands[1]));
5482 operands[1] = reg;
5483 }
5484 emit_move_insn (gen_lowpart (SImode, operands[0]),
5485 gen_lowpart (SImode, operands[1]));
5486 emit_move_insn (gen_highpart (SImode, operands[0]),
5487 gen_highpart (SImode, operands[1]));
5488 DONE;
5489 }
5490 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
5491 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
5492 {
5493 /* Avoid STRD's from an odd-numbered register pair in ARM state
5494 when expanding function prologue. */
5495 gcc_assert (can_create_pseudo_p ());
5496 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
5497 ? gen_reg_rtx (DImode)
5498 : operands[0];
5499 emit_move_insn (gen_lowpart (SImode, split_dest),
5500 gen_lowpart (SImode, operands[1]));
5501 emit_move_insn (gen_highpart (SImode, split_dest),
5502 gen_highpart (SImode, operands[1]));
5503 if (split_dest != operands[0])
5504 emit_insn (gen_movdi (operands[0], split_dest));
5505 DONE;
5506 }
5507 "
5508 )
5509
5510 (define_insn "*arm_movdi"
5511 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5512 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5513 "TARGET_32BIT
5514 && !(TARGET_HARD_FLOAT)
5515 && !TARGET_IWMMXT
5516 && ( register_operand (operands[0], DImode)
5517 || register_operand (operands[1], DImode))"
5518 "*
5519 switch (which_alternative)
5520 {
5521 case 0:
5522 case 1:
5523 case 2:
5524 return \"#\";
5525 case 3:
5526 /* Cannot load it directly, split to load it via MOV / MOVT. */
5527 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
5528 return \"#\";
5529 /* Fall through. */
5530 default:
5531 return output_move_double (operands, true, NULL);
5532 }
5533 "
5534 [(set_attr "length" "8,12,16,8,8")
5535 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
5536 (set_attr "arm_pool_range" "*,*,*,1020,*")
5537 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5538 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5539 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5540 )
5541
5542 (define_split
5543 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5544 (match_operand:ANY64 1 "immediate_operand" ""))]
5545 "TARGET_32BIT
5546 && reload_completed
5547 && (arm_disable_literal_pool
5548 || (arm_const_double_inline_cost (operands[1])
5549 <= arm_max_const_double_inline_cost ()))"
5550 [(const_int 0)]
5551 "
5552 arm_split_constant (SET, SImode, curr_insn,
5553 INTVAL (gen_lowpart (SImode, operands[1])),
5554 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5555 arm_split_constant (SET, SImode, curr_insn,
5556 INTVAL (gen_highpart_mode (SImode,
5557 GET_MODE (operands[0]),
5558 operands[1])),
5559 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5560 DONE;
5561 "
5562 )
5563
5564 ; If optimizing for size, or if we have load delay slots, then
5565 ; we want to split the constant into two separate operations.
5566 ; In both cases this may split a trivial part into a single data op
5567 ; leaving a single complex constant to load. We can also get longer
5568 ; offsets in a LDR which means we get better chances of sharing the pool
5569 ; entries. Finally, we can normally do a better job of scheduling
5570 ; LDR instructions than we can with LDM.
5571 ; This pattern will only match if the one above did not.
5572 (define_split
5573 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5574 (match_operand:ANY64 1 "const_double_operand" ""))]
5575 "TARGET_ARM && reload_completed
5576 && arm_const_double_by_parts (operands[1])"
5577 [(set (match_dup 0) (match_dup 1))
5578 (set (match_dup 2) (match_dup 3))]
5579 "
5580 operands[2] = gen_highpart (SImode, operands[0]);
5581 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5582 operands[1]);
5583 operands[0] = gen_lowpart (SImode, operands[0]);
5584 operands[1] = gen_lowpart (SImode, operands[1]);
5585 "
5586 )
5587
5588 (define_split
5589 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5590 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5591 "TARGET_EITHER && reload_completed"
5592 [(set (match_dup 0) (match_dup 1))
5593 (set (match_dup 2) (match_dup 3))]
5594 "
5595 operands[2] = gen_highpart (SImode, operands[0]);
5596 operands[3] = gen_highpart (SImode, operands[1]);
5597 operands[0] = gen_lowpart (SImode, operands[0]);
5598 operands[1] = gen_lowpart (SImode, operands[1]);
5599
5600 /* Handle a partial overlap. */
5601 if (rtx_equal_p (operands[0], operands[3]))
5602 {
5603 rtx tmp0 = operands[0];
5604 rtx tmp1 = operands[1];
5605
5606 operands[0] = operands[2];
5607 operands[1] = operands[3];
5608 operands[2] = tmp0;
5609 operands[3] = tmp1;
5610 }
5611 "
5612 )
5613
5614 ;; We can't actually do base+index doubleword loads if the index and
5615 ;; destination overlap. Split here so that we at least have chance to
5616 ;; schedule.
5617 (define_split
5618 [(set (match_operand:DI 0 "s_register_operand" "")
5619 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5620 (match_operand:SI 2 "s_register_operand" ""))))]
5621 "TARGET_LDRD
5622 && reg_overlap_mentioned_p (operands[0], operands[1])
5623 && reg_overlap_mentioned_p (operands[0], operands[2])"
5624 [(set (match_dup 4)
5625 (plus:SI (match_dup 1)
5626 (match_dup 2)))
5627 (set (match_dup 0)
5628 (mem:DI (match_dup 4)))]
5629 "
5630 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5631 "
5632 )
5633
5634 (define_expand "movsi"
5635 [(set (match_operand:SI 0 "general_operand")
5636 (match_operand:SI 1 "general_operand"))]
5637 "TARGET_EITHER"
5638 "
5639 {
5640 rtx base, offset, tmp;
5641
5642 gcc_checking_assert (aligned_operand (operands[0], SImode));
5643 gcc_checking_assert (aligned_operand (operands[1], SImode));
5644 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5645 {
5646 /* Everything except mem = const or mem = mem can be done easily. */
5647 if (MEM_P (operands[0]))
5648 operands[1] = force_reg (SImode, operands[1]);
5649 if (arm_general_register_operand (operands[0], SImode)
5650 && CONST_INT_P (operands[1])
5651 && !(const_ok_for_arm (INTVAL (operands[1]))
5652 || const_ok_for_arm (~INTVAL (operands[1]))))
5653 {
5654 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5655 {
5656 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5657 DONE;
5658 }
5659 else
5660 {
5661 arm_split_constant (SET, SImode, NULL_RTX,
5662 INTVAL (operands[1]), operands[0], NULL_RTX,
5663 optimize && can_create_pseudo_p ());
5664 DONE;
5665 }
5666 }
5667 }
5668 else /* Target doesn't have MOVT... */
5669 {
5670 if (can_create_pseudo_p ())
5671 {
5672 if (!REG_P (operands[0]))
5673 operands[1] = force_reg (SImode, operands[1]);
5674 }
5675 }
5676
5677 split_const (operands[1], &base, &offset);
5678 if (INTVAL (offset) != 0
5679 && targetm.cannot_force_const_mem (SImode, operands[1]))
5680 {
5681 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5682 emit_move_insn (tmp, base);
5683 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5684 DONE;
5685 }
5686
5687 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5688
5689 /* Recognize the case where operand[1] is a reference to thread-local
5690 data and load its address to a register. Offsets have been split off
5691 already. */
5692 if (arm_tls_referenced_p (operands[1]))
5693 operands[1] = legitimize_tls_address (operands[1], tmp);
5694 else if (flag_pic
5695 && (CONSTANT_P (operands[1])
5696 || symbol_mentioned_p (operands[1])
5697 || label_mentioned_p (operands[1])))
5698 operands[1] =
5699 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5700 }
5701 "
5702 )
5703
5704 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5705 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5706 ;; so this does not matter.
5707 (define_insn "*arm_movt"
5708 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5709 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5710 (match_operand:SI 2 "general_operand" "i,i")))]
5711 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5712 "@
5713 movt%?\t%0, #:upper16:%c2
5714 movt\t%0, #:upper16:%c2"
5715 [(set_attr "arch" "32,v8mb")
5716 (set_attr "predicable" "yes")
5717 (set_attr "length" "4")
5718 (set_attr "type" "alu_sreg")]
5719 )
5720
5721 (define_insn "*arm_movsi_insn"
5722 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5723 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5724 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5725 && ( register_operand (operands[0], SImode)
5726 || register_operand (operands[1], SImode))"
5727 "@
5728 mov%?\\t%0, %1
5729 mov%?\\t%0, %1
5730 mvn%?\\t%0, #%B1
5731 movw%?\\t%0, %1
5732 ldr%?\\t%0, %1
5733 str%?\\t%1, %0"
5734 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5735 (set_attr "predicable" "yes")
5736 (set_attr "arch" "*,*,*,v6t2,*,*")
5737 (set_attr "pool_range" "*,*,*,*,4096,*")
5738 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5739 )
5740
5741 (define_split
5742 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5743 (match_operand:SI 1 "const_int_operand" ""))]
5744 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5745 && (!(const_ok_for_arm (INTVAL (operands[1]))
5746 || const_ok_for_arm (~INTVAL (operands[1]))))"
5747 [(clobber (const_int 0))]
5748 "
5749 arm_split_constant (SET, SImode, NULL_RTX,
5750 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5751 DONE;
5752 "
5753 )
5754
5755 ;; A normal way to do (symbol + offset) requires three instructions at least
5756 ;; (depends on how big the offset is) as below:
5757 ;; movw r0, #:lower16:g
5758 ;; movw r0, #:upper16:g
5759 ;; adds r0, #4
5760 ;;
5761 ;; A better way would be:
5762 ;; movw r0, #:lower16:g+4
5763 ;; movw r0, #:upper16:g+4
5764 ;;
5765 ;; The limitation of this way is that the length of offset should be a 16-bit
5766 ;; signed value, because current assembler only supports REL type relocation for
5767 ;; such case. If the more powerful RELA type is supported in future, we should
5768 ;; update this pattern to go with better way.
5769 (define_split
5770 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5771 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5772 (match_operand:SI 2 "const_int_operand" ""))))]
5773 "TARGET_THUMB
5774 && TARGET_HAVE_MOVT
5775 && arm_disable_literal_pool
5776 && reload_completed
5777 && GET_CODE (operands[1]) == SYMBOL_REF"
5778 [(clobber (const_int 0))]
5779 "
5780 int offset = INTVAL (operands[2]);
5781
5782 if (offset < -0x8000 || offset > 0x7fff)
5783 {
5784 arm_emit_movpair (operands[0], operands[1]);
5785 emit_insn (gen_rtx_SET (operands[0],
5786 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5787 }
5788 else
5789 {
5790 rtx op = gen_rtx_CONST (SImode,
5791 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5792 arm_emit_movpair (operands[0], op);
5793 }
5794 "
5795 )
5796
5797 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5798 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5799 ;; and lo_sum would be merged back into memory load at cprop. However,
5800 ;; if the default is to prefer movt/movw rather than a load from the constant
5801 ;; pool, the performance is better.
5802 (define_split
5803 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5804 (match_operand:SI 1 "general_operand" ""))]
5805 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5806 && !target_word_relocations
5807 && !arm_tls_referenced_p (operands[1])"
5808 [(clobber (const_int 0))]
5809 {
5810 arm_emit_movpair (operands[0], operands[1]);
5811 DONE;
5812 })
5813
5814 ;; When generating pic, we need to load the symbol offset into a register.
5815 ;; So that the optimizer does not confuse this with a normal symbol load
5816 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5817 ;; since that is the only type of relocation we can use.
5818
5819 ;; Wrap calculation of the whole PIC address in a single pattern for the
5820 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5821 ;; a PIC address involves two loads from memory, so we want to CSE it
5822 ;; as often as possible.
5823 ;; This pattern will be split into one of the pic_load_addr_* patterns
5824 ;; and a move after GCSE optimizations.
5825 ;;
5826 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5827 (define_expand "calculate_pic_address"
5828 [(set (match_operand:SI 0 "register_operand")
5829 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5830 (unspec:SI [(match_operand:SI 2 "" "")]
5831 UNSPEC_PIC_SYM))))]
5832 "flag_pic"
5833 )
5834
5835 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5836 (define_split
5837 [(set (match_operand:SI 0 "register_operand" "")
5838 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5839 (unspec:SI [(match_operand:SI 2 "" "")]
5840 UNSPEC_PIC_SYM))))]
5841 "flag_pic"
5842 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5843 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5844 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5845 )
5846
5847 ;; operand1 is the memory address to go into
5848 ;; pic_load_addr_32bit.
5849 ;; operand2 is the PIC label to be emitted
5850 ;; from pic_add_dot_plus_eight.
5851 ;; We do this to allow hoisting of the entire insn.
5852 (define_insn_and_split "pic_load_addr_unified"
5853 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5854 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5855 (match_operand:SI 2 "" "")]
5856 UNSPEC_PIC_UNIFIED))]
5857 "flag_pic"
5858 "#"
5859 "&& reload_completed"
5860 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5861 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5862 (match_dup 2)] UNSPEC_PIC_BASE))]
5863 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5864 [(set_attr "type" "load_4,load_4,load_4")
5865 (set_attr "pool_range" "4096,4094,1022")
5866 (set_attr "neg_pool_range" "4084,0,0")
5867 (set_attr "arch" "a,t2,t1")
5868 (set_attr "length" "8,6,4")]
5869 )
5870
5871 ;; The rather odd constraints on the following are to force reload to leave
5872 ;; the insn alone, and to force the minipool generation pass to then move
5873 ;; the GOT symbol to memory.
5874
5875 (define_insn "pic_load_addr_32bit"
5876 [(set (match_operand:SI 0 "s_register_operand" "=r")
5877 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5878 "TARGET_32BIT && flag_pic"
5879 "ldr%?\\t%0, %1"
5880 [(set_attr "type" "load_4")
5881 (set (attr "pool_range")
5882 (if_then_else (eq_attr "is_thumb" "no")
5883 (const_int 4096)
5884 (const_int 4094)))
5885 (set (attr "neg_pool_range")
5886 (if_then_else (eq_attr "is_thumb" "no")
5887 (const_int 4084)
5888 (const_int 0)))]
5889 )
5890
5891 (define_insn "pic_load_addr_thumb1"
5892 [(set (match_operand:SI 0 "s_register_operand" "=l")
5893 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5894 "TARGET_THUMB1 && flag_pic"
5895 "ldr\\t%0, %1"
5896 [(set_attr "type" "load_4")
5897 (set (attr "pool_range") (const_int 1018))]
5898 )
5899
5900 (define_insn "pic_add_dot_plus_four"
5901 [(set (match_operand:SI 0 "register_operand" "=r")
5902 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5903 (const_int 4)
5904 (match_operand 2 "" "")]
5905 UNSPEC_PIC_BASE))]
5906 "TARGET_THUMB"
5907 "*
5908 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5909 INTVAL (operands[2]));
5910 return \"add\\t%0, %|pc\";
5911 "
5912 [(set_attr "length" "2")
5913 (set_attr "type" "alu_sreg")]
5914 )
5915
5916 (define_insn "pic_add_dot_plus_eight"
5917 [(set (match_operand:SI 0 "register_operand" "=r")
5918 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5919 (const_int 8)
5920 (match_operand 2 "" "")]
5921 UNSPEC_PIC_BASE))]
5922 "TARGET_ARM"
5923 "*
5924 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5925 INTVAL (operands[2]));
5926 return \"add%?\\t%0, %|pc, %1\";
5927 "
5928 [(set_attr "predicable" "yes")
5929 (set_attr "type" "alu_sreg")]
5930 )
5931
5932 (define_insn "tls_load_dot_plus_eight"
5933 [(set (match_operand:SI 0 "register_operand" "=r")
5934 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5935 (const_int 8)
5936 (match_operand 2 "" "")]
5937 UNSPEC_PIC_BASE)))]
5938 "TARGET_ARM"
5939 "*
5940 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5941 INTVAL (operands[2]));
5942 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5943 "
5944 [(set_attr "predicable" "yes")
5945 (set_attr "type" "load_4")]
5946 )
5947
5948 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5949 ;; followed by a load. These sequences can be crunched down to
5950 ;; tls_load_dot_plus_eight by a peephole.
5951
5952 (define_peephole2
5953 [(set (match_operand:SI 0 "register_operand" "")
5954 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5955 (const_int 8)
5956 (match_operand 1 "" "")]
5957 UNSPEC_PIC_BASE))
5958 (set (match_operand:SI 2 "arm_general_register_operand" "")
5959 (mem:SI (match_dup 0)))]
5960 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5961 [(set (match_dup 2)
5962 (mem:SI (unspec:SI [(match_dup 3)
5963 (const_int 8)
5964 (match_dup 1)]
5965 UNSPEC_PIC_BASE)))]
5966 ""
5967 )
5968
5969 (define_insn "pic_offset_arm"
5970 [(set (match_operand:SI 0 "register_operand" "=r")
5971 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5972 (unspec:SI [(match_operand:SI 2 "" "X")]
5973 UNSPEC_PIC_OFFSET))))]
5974 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5975 "ldr%?\\t%0, [%1,%2]"
5976 [(set_attr "type" "load_4")]
5977 )
5978
5979 (define_expand "builtin_setjmp_receiver"
5980 [(label_ref (match_operand 0 "" ""))]
5981 "flag_pic"
5982 "
5983 {
5984 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5985 register. */
5986 if (arm_pic_register != INVALID_REGNUM)
5987 arm_load_pic_register (1UL << 3, NULL_RTX);
5988 DONE;
5989 }")
5990
5991 ;; If copying one reg to another we can set the condition codes according to
5992 ;; its value. Such a move is common after a return from subroutine and the
5993 ;; result is being tested against zero.
5994
5995 (define_insn "*movsi_compare0"
5996 [(set (reg:CC CC_REGNUM)
5997 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5998 (const_int 0)))
5999 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6000 (match_dup 1))]
6001 "TARGET_32BIT"
6002 "@
6003 cmp%?\\t%0, #0
6004 subs%?\\t%0, %1, #0"
6005 [(set_attr "conds" "set")
6006 (set_attr "type" "alus_imm,alus_imm")]
6007 )
6008
6009 ;; Subroutine to store a half word from a register into memory.
6010 ;; Operand 0 is the source register (HImode)
6011 ;; Operand 1 is the destination address in a register (SImode)
6012
6013 ;; In both this routine and the next, we must be careful not to spill
6014 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6015 ;; can generate unrecognizable rtl.
6016
6017 (define_expand "storehi"
6018 [;; store the low byte
6019 (set (match_operand 1 "" "") (match_dup 3))
6020 ;; extract the high byte
6021 (set (match_dup 2)
6022 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6023 ;; store the high byte
6024 (set (match_dup 4) (match_dup 5))]
6025 "TARGET_ARM"
6026 "
6027 {
6028 rtx op1 = operands[1];
6029 rtx addr = XEXP (op1, 0);
6030 enum rtx_code code = GET_CODE (addr);
6031
6032 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6033 || code == MINUS)
6034 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6035
6036 operands[4] = adjust_address (op1, QImode, 1);
6037 operands[1] = adjust_address (operands[1], QImode, 0);
6038 operands[3] = gen_lowpart (QImode, operands[0]);
6039 operands[0] = gen_lowpart (SImode, operands[0]);
6040 operands[2] = gen_reg_rtx (SImode);
6041 operands[5] = gen_lowpart (QImode, operands[2]);
6042 }"
6043 )
6044
6045 (define_expand "storehi_bigend"
6046 [(set (match_dup 4) (match_dup 3))
6047 (set (match_dup 2)
6048 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6049 (set (match_operand 1 "" "") (match_dup 5))]
6050 "TARGET_ARM"
6051 "
6052 {
6053 rtx op1 = operands[1];
6054 rtx addr = XEXP (op1, 0);
6055 enum rtx_code code = GET_CODE (addr);
6056
6057 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6058 || code == MINUS)
6059 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6060
6061 operands[4] = adjust_address (op1, QImode, 1);
6062 operands[1] = adjust_address (operands[1], QImode, 0);
6063 operands[3] = gen_lowpart (QImode, operands[0]);
6064 operands[0] = gen_lowpart (SImode, operands[0]);
6065 operands[2] = gen_reg_rtx (SImode);
6066 operands[5] = gen_lowpart (QImode, operands[2]);
6067 }"
6068 )
6069
6070 ;; Subroutine to store a half word integer constant into memory.
6071 (define_expand "storeinthi"
6072 [(set (match_operand 0 "" "")
6073 (match_operand 1 "" ""))
6074 (set (match_dup 3) (match_dup 2))]
6075 "TARGET_ARM"
6076 "
6077 {
6078 HOST_WIDE_INT value = INTVAL (operands[1]);
6079 rtx addr = XEXP (operands[0], 0);
6080 rtx op0 = operands[0];
6081 enum rtx_code code = GET_CODE (addr);
6082
6083 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6084 || code == MINUS)
6085 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6086
6087 operands[1] = gen_reg_rtx (SImode);
6088 if (BYTES_BIG_ENDIAN)
6089 {
6090 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6091 if ((value & 255) == ((value >> 8) & 255))
6092 operands[2] = operands[1];
6093 else
6094 {
6095 operands[2] = gen_reg_rtx (SImode);
6096 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6097 }
6098 }
6099 else
6100 {
6101 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6102 if ((value & 255) == ((value >> 8) & 255))
6103 operands[2] = operands[1];
6104 else
6105 {
6106 operands[2] = gen_reg_rtx (SImode);
6107 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6108 }
6109 }
6110
6111 operands[3] = adjust_address (op0, QImode, 1);
6112 operands[0] = adjust_address (operands[0], QImode, 0);
6113 operands[2] = gen_lowpart (QImode, operands[2]);
6114 operands[1] = gen_lowpart (QImode, operands[1]);
6115 }"
6116 )
6117
6118 (define_expand "storehi_single_op"
6119 [(set (match_operand:HI 0 "memory_operand")
6120 (match_operand:HI 1 "general_operand"))]
6121 "TARGET_32BIT && arm_arch4"
6122 "
6123 if (!s_register_operand (operands[1], HImode))
6124 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6125 "
6126 )
6127
6128 (define_expand "movhi"
6129 [(set (match_operand:HI 0 "general_operand")
6130 (match_operand:HI 1 "general_operand"))]
6131 "TARGET_EITHER"
6132 "
6133 gcc_checking_assert (aligned_operand (operands[0], HImode));
6134 gcc_checking_assert (aligned_operand (operands[1], HImode));
6135 if (TARGET_ARM)
6136 {
6137 if (can_create_pseudo_p ())
6138 {
6139 if (MEM_P (operands[0]))
6140 {
6141 if (arm_arch4)
6142 {
6143 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6144 DONE;
6145 }
6146 if (CONST_INT_P (operands[1]))
6147 emit_insn (gen_storeinthi (operands[0], operands[1]));
6148 else
6149 {
6150 if (MEM_P (operands[1]))
6151 operands[1] = force_reg (HImode, operands[1]);
6152 if (BYTES_BIG_ENDIAN)
6153 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6154 else
6155 emit_insn (gen_storehi (operands[1], operands[0]));
6156 }
6157 DONE;
6158 }
6159 /* Sign extend a constant, and keep it in an SImode reg. */
6160 else if (CONST_INT_P (operands[1]))
6161 {
6162 rtx reg = gen_reg_rtx (SImode);
6163 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6164
6165 /* If the constant is already valid, leave it alone. */
6166 if (!const_ok_for_arm (val))
6167 {
6168 /* If setting all the top bits will make the constant
6169 loadable in a single instruction, then set them.
6170 Otherwise, sign extend the number. */
6171
6172 if (const_ok_for_arm (~(val | ~0xffff)))
6173 val |= ~0xffff;
6174 else if (val & 0x8000)
6175 val |= ~0xffff;
6176 }
6177
6178 emit_insn (gen_movsi (reg, GEN_INT (val)));
6179 operands[1] = gen_lowpart (HImode, reg);
6180 }
6181 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6182 && MEM_P (operands[1]))
6183 {
6184 rtx reg = gen_reg_rtx (SImode);
6185
6186 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6187 operands[1] = gen_lowpart (HImode, reg);
6188 }
6189 else if (!arm_arch4)
6190 {
6191 if (MEM_P (operands[1]))
6192 {
6193 rtx base;
6194 rtx offset = const0_rtx;
6195 rtx reg = gen_reg_rtx (SImode);
6196
6197 if ((REG_P (base = XEXP (operands[1], 0))
6198 || (GET_CODE (base) == PLUS
6199 && (CONST_INT_P (offset = XEXP (base, 1)))
6200 && ((INTVAL(offset) & 1) != 1)
6201 && REG_P (base = XEXP (base, 0))))
6202 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6203 {
6204 rtx new_rtx;
6205
6206 new_rtx = widen_memory_access (operands[1], SImode,
6207 ((INTVAL (offset) & ~3)
6208 - INTVAL (offset)));
6209 emit_insn (gen_movsi (reg, new_rtx));
6210 if (((INTVAL (offset) & 2) != 0)
6211 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6212 {
6213 rtx reg2 = gen_reg_rtx (SImode);
6214
6215 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6216 reg = reg2;
6217 }
6218 }
6219 else
6220 emit_insn (gen_movhi_bytes (reg, operands[1]));
6221
6222 operands[1] = gen_lowpart (HImode, reg);
6223 }
6224 }
6225 }
6226 /* Handle loading a large integer during reload. */
6227 else if (CONST_INT_P (operands[1])
6228 && !const_ok_for_arm (INTVAL (operands[1]))
6229 && !const_ok_for_arm (~INTVAL (operands[1])))
6230 {
6231 /* Writing a constant to memory needs a scratch, which should
6232 be handled with SECONDARY_RELOADs. */
6233 gcc_assert (REG_P (operands[0]));
6234
6235 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6236 emit_insn (gen_movsi (operands[0], operands[1]));
6237 DONE;
6238 }
6239 }
6240 else if (TARGET_THUMB2)
6241 {
6242 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6243 if (can_create_pseudo_p ())
6244 {
6245 if (!REG_P (operands[0]))
6246 operands[1] = force_reg (HImode, operands[1]);
6247 /* Zero extend a constant, and keep it in an SImode reg. */
6248 else if (CONST_INT_P (operands[1]))
6249 {
6250 rtx reg = gen_reg_rtx (SImode);
6251 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6252
6253 emit_insn (gen_movsi (reg, GEN_INT (val)));
6254 operands[1] = gen_lowpart (HImode, reg);
6255 }
6256 }
6257 }
6258 else /* TARGET_THUMB1 */
6259 {
6260 if (can_create_pseudo_p ())
6261 {
6262 if (CONST_INT_P (operands[1]))
6263 {
6264 rtx reg = gen_reg_rtx (SImode);
6265
6266 emit_insn (gen_movsi (reg, operands[1]));
6267 operands[1] = gen_lowpart (HImode, reg);
6268 }
6269
6270 /* ??? We shouldn't really get invalid addresses here, but this can
6271 happen if we are passed a SP (never OK for HImode/QImode) or
6272 virtual register (also rejected as illegitimate for HImode/QImode)
6273 relative address. */
6274 /* ??? This should perhaps be fixed elsewhere, for instance, in
6275 fixup_stack_1, by checking for other kinds of invalid addresses,
6276 e.g. a bare reference to a virtual register. This may confuse the
6277 alpha though, which must handle this case differently. */
6278 if (MEM_P (operands[0])
6279 && !memory_address_p (GET_MODE (operands[0]),
6280 XEXP (operands[0], 0)))
6281 operands[0]
6282 = replace_equiv_address (operands[0],
6283 copy_to_reg (XEXP (operands[0], 0)));
6284
6285 if (MEM_P (operands[1])
6286 && !memory_address_p (GET_MODE (operands[1]),
6287 XEXP (operands[1], 0)))
6288 operands[1]
6289 = replace_equiv_address (operands[1],
6290 copy_to_reg (XEXP (operands[1], 0)));
6291
6292 if (MEM_P (operands[1]) && optimize > 0)
6293 {
6294 rtx reg = gen_reg_rtx (SImode);
6295
6296 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6297 operands[1] = gen_lowpart (HImode, reg);
6298 }
6299
6300 if (MEM_P (operands[0]))
6301 operands[1] = force_reg (HImode, operands[1]);
6302 }
6303 else if (CONST_INT_P (operands[1])
6304 && !satisfies_constraint_I (operands[1]))
6305 {
6306 /* Handle loading a large integer during reload. */
6307
6308 /* Writing a constant to memory needs a scratch, which should
6309 be handled with SECONDARY_RELOADs. */
6310 gcc_assert (REG_P (operands[0]));
6311
6312 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6313 emit_insn (gen_movsi (operands[0], operands[1]));
6314 DONE;
6315 }
6316 }
6317 "
6318 )
6319
6320 (define_expand "movhi_bytes"
6321 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6322 (set (match_dup 3)
6323 (zero_extend:SI (match_dup 6)))
6324 (set (match_operand:SI 0 "" "")
6325 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6326 "TARGET_ARM"
6327 "
6328 {
6329 rtx mem1, mem2;
6330 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6331
6332 mem1 = change_address (operands[1], QImode, addr);
6333 mem2 = change_address (operands[1], QImode,
6334 plus_constant (Pmode, addr, 1));
6335 operands[0] = gen_lowpart (SImode, operands[0]);
6336 operands[1] = mem1;
6337 operands[2] = gen_reg_rtx (SImode);
6338 operands[3] = gen_reg_rtx (SImode);
6339 operands[6] = mem2;
6340
6341 if (BYTES_BIG_ENDIAN)
6342 {
6343 operands[4] = operands[2];
6344 operands[5] = operands[3];
6345 }
6346 else
6347 {
6348 operands[4] = operands[3];
6349 operands[5] = operands[2];
6350 }
6351 }"
6352 )
6353
6354 (define_expand "movhi_bigend"
6355 [(set (match_dup 2)
6356 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
6357 (const_int 16)))
6358 (set (match_dup 3)
6359 (ashiftrt:SI (match_dup 2) (const_int 16)))
6360 (set (match_operand:HI 0 "s_register_operand")
6361 (match_dup 4))]
6362 "TARGET_ARM"
6363 "
6364 operands[2] = gen_reg_rtx (SImode);
6365 operands[3] = gen_reg_rtx (SImode);
6366 operands[4] = gen_lowpart (HImode, operands[3]);
6367 "
6368 )
6369
6370 ;; Pattern to recognize insn generated default case above
6371 (define_insn "*movhi_insn_arch4"
6372 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6373 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
6374 "TARGET_ARM
6375 && arm_arch4 && !TARGET_HARD_FLOAT
6376 && (register_operand (operands[0], HImode)
6377 || register_operand (operands[1], HImode))"
6378 "@
6379 mov%?\\t%0, %1\\t%@ movhi
6380 mvn%?\\t%0, #%B1\\t%@ movhi
6381 movw%?\\t%0, %L1\\t%@ movhi
6382 strh%?\\t%1, %0\\t%@ movhi
6383 ldrh%?\\t%0, %1\\t%@ movhi"
6384 [(set_attr "predicable" "yes")
6385 (set_attr "pool_range" "*,*,*,*,256")
6386 (set_attr "neg_pool_range" "*,*,*,*,244")
6387 (set_attr "arch" "*,*,v6t2,*,*")
6388 (set_attr_alternative "type"
6389 [(if_then_else (match_operand 1 "const_int_operand" "")
6390 (const_string "mov_imm" )
6391 (const_string "mov_reg"))
6392 (const_string "mvn_imm")
6393 (const_string "mov_imm")
6394 (const_string "store_4")
6395 (const_string "load_4")])]
6396 )
6397
6398 (define_insn "*movhi_bytes"
6399 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6400 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
6401 "TARGET_ARM && !TARGET_HARD_FLOAT"
6402 "@
6403 mov%?\\t%0, %1\\t%@ movhi
6404 mov%?\\t%0, %1\\t%@ movhi
6405 mvn%?\\t%0, #%B1\\t%@ movhi"
6406 [(set_attr "predicable" "yes")
6407 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
6408 )
6409
6410 ;; We use a DImode scratch because we may occasionally need an additional
6411 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6412 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6413 ;; The reload_in<m> and reload_out<m> patterns require special constraints
6414 ;; to be correctly handled in default_secondary_reload function.
6415 (define_expand "reload_outhi"
6416 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6417 (match_operand:HI 1 "s_register_operand" "r")
6418 (match_operand:DI 2 "s_register_operand" "=&l")])]
6419 "TARGET_EITHER"
6420 "if (TARGET_ARM)
6421 arm_reload_out_hi (operands);
6422 else
6423 thumb_reload_out_hi (operands);
6424 DONE;
6425 "
6426 )
6427
6428 (define_expand "reload_inhi"
6429 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6430 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6431 (match_operand:DI 2 "s_register_operand" "=&r")])]
6432 "TARGET_EITHER"
6433 "
6434 if (TARGET_ARM)
6435 arm_reload_in_hi (operands);
6436 else
6437 thumb_reload_out_hi (operands);
6438 DONE;
6439 ")
6440
6441 (define_expand "movqi"
6442 [(set (match_operand:QI 0 "general_operand")
6443 (match_operand:QI 1 "general_operand"))]
6444 "TARGET_EITHER"
6445 "
6446 /* Everything except mem = const or mem = mem can be done easily */
6447
6448 if (can_create_pseudo_p ())
6449 {
6450 if (CONST_INT_P (operands[1]))
6451 {
6452 rtx reg = gen_reg_rtx (SImode);
6453
6454 /* For thumb we want an unsigned immediate, then we are more likely
6455 to be able to use a movs insn. */
6456 if (TARGET_THUMB)
6457 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6458
6459 emit_insn (gen_movsi (reg, operands[1]));
6460 operands[1] = gen_lowpart (QImode, reg);
6461 }
6462
6463 if (TARGET_THUMB)
6464 {
6465 /* ??? We shouldn't really get invalid addresses here, but this can
6466 happen if we are passed a SP (never OK for HImode/QImode) or
6467 virtual register (also rejected as illegitimate for HImode/QImode)
6468 relative address. */
6469 /* ??? This should perhaps be fixed elsewhere, for instance, in
6470 fixup_stack_1, by checking for other kinds of invalid addresses,
6471 e.g. a bare reference to a virtual register. This may confuse the
6472 alpha though, which must handle this case differently. */
6473 if (MEM_P (operands[0])
6474 && !memory_address_p (GET_MODE (operands[0]),
6475 XEXP (operands[0], 0)))
6476 operands[0]
6477 = replace_equiv_address (operands[0],
6478 copy_to_reg (XEXP (operands[0], 0)));
6479 if (MEM_P (operands[1])
6480 && !memory_address_p (GET_MODE (operands[1]),
6481 XEXP (operands[1], 0)))
6482 operands[1]
6483 = replace_equiv_address (operands[1],
6484 copy_to_reg (XEXP (operands[1], 0)));
6485 }
6486
6487 if (MEM_P (operands[1]) && optimize > 0)
6488 {
6489 rtx reg = gen_reg_rtx (SImode);
6490
6491 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6492 operands[1] = gen_lowpart (QImode, reg);
6493 }
6494
6495 if (MEM_P (operands[0]))
6496 operands[1] = force_reg (QImode, operands[1]);
6497 }
6498 else if (TARGET_THUMB
6499 && CONST_INT_P (operands[1])
6500 && !satisfies_constraint_I (operands[1]))
6501 {
6502 /* Handle loading a large integer during reload. */
6503
6504 /* Writing a constant to memory needs a scratch, which should
6505 be handled with SECONDARY_RELOADs. */
6506 gcc_assert (REG_P (operands[0]));
6507
6508 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6509 emit_insn (gen_movsi (operands[0], operands[1]));
6510 DONE;
6511 }
6512 "
6513 )
6514
6515 (define_insn "*arm_movqi_insn"
6516 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
6517 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
6518 "TARGET_32BIT
6519 && ( register_operand (operands[0], QImode)
6520 || register_operand (operands[1], QImode))"
6521 "@
6522 mov%?\\t%0, %1
6523 mov%?\\t%0, %1
6524 mov%?\\t%0, %1
6525 mov%?\\t%0, %1
6526 mvn%?\\t%0, #%B1
6527 ldrb%?\\t%0, %1
6528 strb%?\\t%1, %0
6529 ldrb%?\\t%0, %1
6530 strb%?\\t%1, %0"
6531 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
6532 (set_attr "predicable" "yes")
6533 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
6534 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
6535 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
6536 )
6537
6538 ;; HFmode moves
6539 (define_expand "movhf"
6540 [(set (match_operand:HF 0 "general_operand")
6541 (match_operand:HF 1 "general_operand"))]
6542 "TARGET_EITHER"
6543 "
6544 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6545 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6546 if (TARGET_32BIT)
6547 {
6548 if (MEM_P (operands[0]))
6549 operands[1] = force_reg (HFmode, operands[1]);
6550 }
6551 else /* TARGET_THUMB1 */
6552 {
6553 if (can_create_pseudo_p ())
6554 {
6555 if (!REG_P (operands[0]))
6556 operands[1] = force_reg (HFmode, operands[1]);
6557 }
6558 }
6559 "
6560 )
6561
6562 (define_insn "*arm32_movhf"
6563 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6564 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6565 "TARGET_32BIT && !TARGET_HARD_FLOAT
6566 && ( s_register_operand (operands[0], HFmode)
6567 || s_register_operand (operands[1], HFmode))"
6568 "*
6569 switch (which_alternative)
6570 {
6571 case 0: /* ARM register from memory */
6572 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6573 case 1: /* memory from ARM register */
6574 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6575 case 2: /* ARM register from ARM register */
6576 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6577 case 3: /* ARM register from constant */
6578 {
6579 long bits;
6580 rtx ops[4];
6581
6582 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6583 HFmode);
6584 ops[0] = operands[0];
6585 ops[1] = GEN_INT (bits);
6586 ops[2] = GEN_INT (bits & 0xff00);
6587 ops[3] = GEN_INT (bits & 0x00ff);
6588
6589 if (arm_arch_thumb2)
6590 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6591 else
6592 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6593 return \"\";
6594 }
6595 default:
6596 gcc_unreachable ();
6597 }
6598 "
6599 [(set_attr "conds" "unconditional")
6600 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6601 (set_attr "length" "4,4,4,8")
6602 (set_attr "predicable" "yes")]
6603 )
6604
6605 (define_expand "movsf"
6606 [(set (match_operand:SF 0 "general_operand")
6607 (match_operand:SF 1 "general_operand"))]
6608 "TARGET_EITHER"
6609 "
6610 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6611 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6612 if (TARGET_32BIT)
6613 {
6614 if (MEM_P (operands[0]))
6615 operands[1] = force_reg (SFmode, operands[1]);
6616 }
6617 else /* TARGET_THUMB1 */
6618 {
6619 if (can_create_pseudo_p ())
6620 {
6621 if (!REG_P (operands[0]))
6622 operands[1] = force_reg (SFmode, operands[1]);
6623 }
6624 }
6625
6626 /* Cannot load it directly, generate a load with clobber so that it can be
6627 loaded via GPR with MOV / MOVT. */
6628 if (arm_disable_literal_pool
6629 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6630 && CONST_DOUBLE_P (operands[1])
6631 && TARGET_HARD_FLOAT
6632 && !vfp3_const_double_rtx (operands[1]))
6633 {
6634 rtx clobreg = gen_reg_rtx (SFmode);
6635 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6636 clobreg));
6637 DONE;
6638 }
6639 "
6640 )
6641
6642 ;; Transform a floating-point move of a constant into a core register into
6643 ;; an SImode operation.
6644 (define_split
6645 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6646 (match_operand:SF 1 "immediate_operand" ""))]
6647 "TARGET_EITHER
6648 && reload_completed
6649 && CONST_DOUBLE_P (operands[1])"
6650 [(set (match_dup 2) (match_dup 3))]
6651 "
6652 operands[2] = gen_lowpart (SImode, operands[0]);
6653 operands[3] = gen_lowpart (SImode, operands[1]);
6654 if (operands[2] == 0 || operands[3] == 0)
6655 FAIL;
6656 "
6657 )
6658
6659 (define_insn "*arm_movsf_soft_insn"
6660 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6661 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6662 "TARGET_32BIT
6663 && TARGET_SOFT_FLOAT
6664 && (!MEM_P (operands[0])
6665 || register_operand (operands[1], SFmode))"
6666 {
6667 switch (which_alternative)
6668 {
6669 case 0: return \"mov%?\\t%0, %1\";
6670 case 1:
6671 /* Cannot load it directly, split to load it via MOV / MOVT. */
6672 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6673 return \"#\";
6674 return \"ldr%?\\t%0, %1\\t%@ float\";
6675 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6676 default: gcc_unreachable ();
6677 }
6678 }
6679 [(set_attr "predicable" "yes")
6680 (set_attr "type" "mov_reg,load_4,store_4")
6681 (set_attr "arm_pool_range" "*,4096,*")
6682 (set_attr "thumb2_pool_range" "*,4094,*")
6683 (set_attr "arm_neg_pool_range" "*,4084,*")
6684 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6685 )
6686
6687 ;; Splitter for the above.
6688 (define_split
6689 [(set (match_operand:SF 0 "s_register_operand")
6690 (match_operand:SF 1 "const_double_operand"))]
6691 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6692 [(const_int 0)]
6693 {
6694 long buf;
6695 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6696 rtx cst = gen_int_mode (buf, SImode);
6697 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6698 DONE;
6699 }
6700 )
6701
6702 (define_expand "movdf"
6703 [(set (match_operand:DF 0 "general_operand")
6704 (match_operand:DF 1 "general_operand"))]
6705 "TARGET_EITHER"
6706 "
6707 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6708 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6709 if (TARGET_32BIT)
6710 {
6711 if (MEM_P (operands[0]))
6712 operands[1] = force_reg (DFmode, operands[1]);
6713 }
6714 else /* TARGET_THUMB */
6715 {
6716 if (can_create_pseudo_p ())
6717 {
6718 if (!REG_P (operands[0]))
6719 operands[1] = force_reg (DFmode, operands[1]);
6720 }
6721 }
6722
6723 /* Cannot load it directly, generate a load with clobber so that it can be
6724 loaded via GPR with MOV / MOVT. */
6725 if (arm_disable_literal_pool
6726 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6727 && CONSTANT_P (operands[1])
6728 && TARGET_HARD_FLOAT
6729 && !arm_const_double_rtx (operands[1])
6730 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6731 {
6732 rtx clobreg = gen_reg_rtx (DFmode);
6733 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6734 clobreg));
6735 DONE;
6736 }
6737 "
6738 )
6739
6740 ;; Reloading a df mode value stored in integer regs to memory can require a
6741 ;; scratch reg.
6742 ;; Another reload_out<m> pattern that requires special constraints.
6743 (define_expand "reload_outdf"
6744 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6745 (match_operand:DF 1 "s_register_operand" "r")
6746 (match_operand:SI 2 "s_register_operand" "=&r")]
6747 "TARGET_THUMB2"
6748 "
6749 {
6750 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6751
6752 if (code == REG)
6753 operands[2] = XEXP (operands[0], 0);
6754 else if (code == POST_INC || code == PRE_DEC)
6755 {
6756 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6757 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6758 emit_insn (gen_movdi (operands[0], operands[1]));
6759 DONE;
6760 }
6761 else if (code == PRE_INC)
6762 {
6763 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6764
6765 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6766 operands[2] = reg;
6767 }
6768 else if (code == POST_DEC)
6769 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6770 else
6771 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6772 XEXP (XEXP (operands[0], 0), 1)));
6773
6774 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6775 operands[1]));
6776
6777 if (code == POST_DEC)
6778 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6779
6780 DONE;
6781 }"
6782 )
6783
6784 (define_insn "*movdf_soft_insn"
6785 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6786 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6787 "TARGET_32BIT && TARGET_SOFT_FLOAT
6788 && ( register_operand (operands[0], DFmode)
6789 || register_operand (operands[1], DFmode))"
6790 "*
6791 switch (which_alternative)
6792 {
6793 case 0:
6794 case 1:
6795 case 2:
6796 return \"#\";
6797 case 3:
6798 /* Cannot load it directly, split to load it via MOV / MOVT. */
6799 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6800 return \"#\";
6801 /* Fall through. */
6802 default:
6803 return output_move_double (operands, true, NULL);
6804 }
6805 "
6806 [(set_attr "length" "8,12,16,8,8")
6807 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6808 (set_attr "arm_pool_range" "*,*,*,1020,*")
6809 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6810 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6811 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6812 )
6813
6814 ;; Splitter for the above.
6815 (define_split
6816 [(set (match_operand:DF 0 "s_register_operand")
6817 (match_operand:DF 1 "const_double_operand"))]
6818 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6819 [(const_int 0)]
6820 {
6821 long buf[2];
6822 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6823 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6824 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6825 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6826 rtx cst = gen_int_mode (ival, DImode);
6827 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6828 DONE;
6829 }
6830 )
6831 \f
6832
6833 ;; load- and store-multiple insns
6834 ;; The arm can load/store any set of registers, provided that they are in
6835 ;; ascending order, but these expanders assume a contiguous set.
6836
6837 (define_expand "load_multiple"
6838 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6839 (match_operand:SI 1 "" ""))
6840 (use (match_operand:SI 2 "" ""))])]
6841 "TARGET_32BIT"
6842 {
6843 HOST_WIDE_INT offset = 0;
6844
6845 /* Support only fixed point registers. */
6846 if (!CONST_INT_P (operands[2])
6847 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6848 || INTVAL (operands[2]) < 2
6849 || !MEM_P (operands[1])
6850 || !REG_P (operands[0])
6851 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6852 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6853 FAIL;
6854
6855 operands[3]
6856 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6857 INTVAL (operands[2]),
6858 force_reg (SImode, XEXP (operands[1], 0)),
6859 FALSE, operands[1], &offset);
6860 })
6861
6862 (define_expand "store_multiple"
6863 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6864 (match_operand:SI 1 "" ""))
6865 (use (match_operand:SI 2 "" ""))])]
6866 "TARGET_32BIT"
6867 {
6868 HOST_WIDE_INT offset = 0;
6869
6870 /* Support only fixed point registers. */
6871 if (!CONST_INT_P (operands[2])
6872 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6873 || INTVAL (operands[2]) < 2
6874 || !REG_P (operands[1])
6875 || !MEM_P (operands[0])
6876 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6877 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6878 FAIL;
6879
6880 operands[3]
6881 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6882 INTVAL (operands[2]),
6883 force_reg (SImode, XEXP (operands[0], 0)),
6884 FALSE, operands[0], &offset);
6885 })
6886
6887
6888 (define_expand "setmemsi"
6889 [(match_operand:BLK 0 "general_operand")
6890 (match_operand:SI 1 "const_int_operand")
6891 (match_operand:SI 2 "const_int_operand")
6892 (match_operand:SI 3 "const_int_operand")]
6893 "TARGET_32BIT"
6894 {
6895 if (arm_gen_setmem (operands))
6896 DONE;
6897
6898 FAIL;
6899 })
6900
6901
6902 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6903 ;; We could let this apply for blocks of less than this, but it clobbers so
6904 ;; many registers that there is then probably a better way.
6905
6906 (define_expand "cpymemqi"
6907 [(match_operand:BLK 0 "general_operand")
6908 (match_operand:BLK 1 "general_operand")
6909 (match_operand:SI 2 "const_int_operand")
6910 (match_operand:SI 3 "const_int_operand")]
6911 ""
6912 "
6913 if (TARGET_32BIT)
6914 {
6915 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6916 && !optimize_function_for_size_p (cfun))
6917 {
6918 if (gen_cpymem_ldrd_strd (operands))
6919 DONE;
6920 FAIL;
6921 }
6922
6923 if (arm_gen_cpymemqi (operands))
6924 DONE;
6925 FAIL;
6926 }
6927 else /* TARGET_THUMB1 */
6928 {
6929 if ( INTVAL (operands[3]) != 4
6930 || INTVAL (operands[2]) > 48)
6931 FAIL;
6932
6933 thumb_expand_cpymemqi (operands);
6934 DONE;
6935 }
6936 "
6937 )
6938 \f
6939
6940 ;; Compare & branch insns
6941 ;; The range calculations are based as follows:
6942 ;; For forward branches, the address calculation returns the address of
6943 ;; the next instruction. This is 2 beyond the branch instruction.
6944 ;; For backward branches, the address calculation returns the address of
6945 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6946 ;; instruction for the shortest sequence, and 4 before the branch instruction
6947 ;; if we have to jump around an unconditional branch.
6948 ;; To the basic branch range the PC offset must be added (this is +4).
6949 ;; So for forward branches we have
6950 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6951 ;; And for backward branches we have
6952 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6953 ;;
6954 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6955 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6956
6957 (define_expand "cbranchsi4"
6958 [(set (pc) (if_then_else
6959 (match_operator 0 "expandable_comparison_operator"
6960 [(match_operand:SI 1 "s_register_operand")
6961 (match_operand:SI 2 "nonmemory_operand")])
6962 (label_ref (match_operand 3 "" ""))
6963 (pc)))]
6964 "TARGET_EITHER"
6965 "
6966 if (!TARGET_THUMB1)
6967 {
6968 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6969 FAIL;
6970 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6971 operands[3]));
6972 DONE;
6973 }
6974 if (thumb1_cmpneg_operand (operands[2], SImode))
6975 {
6976 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6977 operands[3], operands[0]));
6978 DONE;
6979 }
6980 if (!thumb1_cmp_operand (operands[2], SImode))
6981 operands[2] = force_reg (SImode, operands[2]);
6982 ")
6983
6984 (define_expand "cbranchsf4"
6985 [(set (pc) (if_then_else
6986 (match_operator 0 "expandable_comparison_operator"
6987 [(match_operand:SF 1 "s_register_operand")
6988 (match_operand:SF 2 "vfp_compare_operand")])
6989 (label_ref (match_operand 3 "" ""))
6990 (pc)))]
6991 "TARGET_32BIT && TARGET_HARD_FLOAT"
6992 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6993 operands[3])); DONE;"
6994 )
6995
6996 (define_expand "cbranchdf4"
6997 [(set (pc) (if_then_else
6998 (match_operator 0 "expandable_comparison_operator"
6999 [(match_operand:DF 1 "s_register_operand")
7000 (match_operand:DF 2 "vfp_compare_operand")])
7001 (label_ref (match_operand 3 "" ""))
7002 (pc)))]
7003 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7004 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7005 operands[3])); DONE;"
7006 )
7007
7008 (define_expand "cbranchdi4"
7009 [(set (pc) (if_then_else
7010 (match_operator 0 "expandable_comparison_operator"
7011 [(match_operand:DI 1 "s_register_operand")
7012 (match_operand:DI 2 "reg_or_int_operand")])
7013 (label_ref (match_operand 3 "" ""))
7014 (pc)))]
7015 "TARGET_32BIT"
7016 "{
7017 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7018 FAIL;
7019 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7020 operands[3]));
7021 DONE;
7022 }"
7023 )
7024
7025 ;; Comparison and test insns
7026
7027 (define_insn "*arm_cmpsi_insn"
7028 [(set (reg:CC CC_REGNUM)
7029 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7030 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7031 "TARGET_32BIT"
7032 "@
7033 cmp%?\\t%0, %1
7034 cmp%?\\t%0, %1
7035 cmp%?\\t%0, %1
7036 cmp%?\\t%0, %1
7037 cmn%?\\t%0, #%n1"
7038 [(set_attr "conds" "set")
7039 (set_attr "arch" "t2,t2,any,any,any")
7040 (set_attr "length" "2,2,4,4,4")
7041 (set_attr "predicable" "yes")
7042 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7043 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7044 )
7045
7046 (define_insn "*cmpsi_shiftsi"
7047 [(set (reg:CC CC_REGNUM)
7048 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
7049 (match_operator:SI 3 "shift_operator"
7050 [(match_operand:SI 1 "s_register_operand" "r,r,r")
7051 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
7052 "TARGET_32BIT"
7053 "cmp\\t%0, %1%S3"
7054 [(set_attr "conds" "set")
7055 (set_attr "shift" "1")
7056 (set_attr "arch" "32,a,a")
7057 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
7058
7059 (define_insn "*cmpsi_shiftsi_swp"
7060 [(set (reg:CC_SWP CC_REGNUM)
7061 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7062 [(match_operand:SI 1 "s_register_operand" "r,r,r")
7063 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
7064 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
7065 "TARGET_32BIT"
7066 "cmp%?\\t%0, %1%S3"
7067 [(set_attr "conds" "set")
7068 (set_attr "shift" "1")
7069 (set_attr "arch" "32,a,a")
7070 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
7071
7072 (define_insn "*arm_cmpsi_negshiftsi_si"
7073 [(set (reg:CC_Z CC_REGNUM)
7074 (compare:CC_Z
7075 (neg:SI (match_operator:SI 1 "shift_operator"
7076 [(match_operand:SI 2 "s_register_operand" "r")
7077 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7078 (match_operand:SI 0 "s_register_operand" "r")))]
7079 "TARGET_ARM"
7080 "cmn%?\\t%0, %2%S1"
7081 [(set_attr "conds" "set")
7082 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7083 (const_string "alus_shift_imm")
7084 (const_string "alus_shift_reg")))
7085 (set_attr "predicable" "yes")]
7086 )
7087
7088 ; This insn allows redundant compares to be removed by cse, nothing should
7089 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7090 ; is deleted later on. The match_dup will match the mode here, so that
7091 ; mode changes of the condition codes aren't lost by this even though we don't
7092 ; specify what they are.
7093
7094 (define_insn "*deleted_compare"
7095 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7096 "TARGET_32BIT"
7097 "\\t%@ deleted compare"
7098 [(set_attr "conds" "set")
7099 (set_attr "length" "0")
7100 (set_attr "type" "no_insn")]
7101 )
7102
7103 \f
7104 ;; Conditional branch insns
7105
7106 (define_expand "cbranch_cc"
7107 [(set (pc)
7108 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7109 (match_operand 2 "" "")])
7110 (label_ref (match_operand 3 "" ""))
7111 (pc)))]
7112 "TARGET_32BIT"
7113 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7114 operands[1], operands[2], NULL_RTX);
7115 operands[2] = const0_rtx;"
7116 )
7117
7118 ;;
7119 ;; Patterns to match conditional branch insns.
7120 ;;
7121
7122 (define_insn "arm_cond_branch"
7123 [(set (pc)
7124 (if_then_else (match_operator 1 "arm_comparison_operator"
7125 [(match_operand 2 "cc_register" "") (const_int 0)])
7126 (label_ref (match_operand 0 "" ""))
7127 (pc)))]
7128 "TARGET_32BIT"
7129 "*
7130 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7131 {
7132 arm_ccfsm_state += 2;
7133 return \"\";
7134 }
7135 return \"b%d1\\t%l0\";
7136 "
7137 [(set_attr "conds" "use")
7138 (set_attr "type" "branch")
7139 (set (attr "length")
7140 (if_then_else
7141 (and (match_test "TARGET_THUMB2")
7142 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7143 (le (minus (match_dup 0) (pc)) (const_int 256))))
7144 (const_int 2)
7145 (const_int 4)))]
7146 )
7147
7148 (define_insn "*arm_cond_branch_reversed"
7149 [(set (pc)
7150 (if_then_else (match_operator 1 "arm_comparison_operator"
7151 [(match_operand 2 "cc_register" "") (const_int 0)])
7152 (pc)
7153 (label_ref (match_operand 0 "" ""))))]
7154 "TARGET_32BIT"
7155 "*
7156 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7157 {
7158 arm_ccfsm_state += 2;
7159 return \"\";
7160 }
7161 return \"b%D1\\t%l0\";
7162 "
7163 [(set_attr "conds" "use")
7164 (set_attr "type" "branch")
7165 (set (attr "length")
7166 (if_then_else
7167 (and (match_test "TARGET_THUMB2")
7168 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7169 (le (minus (match_dup 0) (pc)) (const_int 256))))
7170 (const_int 2)
7171 (const_int 4)))]
7172 )
7173
7174 \f
7175
7176 ; scc insns
7177
7178 (define_expand "cstore_cc"
7179 [(set (match_operand:SI 0 "s_register_operand")
7180 (match_operator:SI 1 "" [(match_operand 2 "" "")
7181 (match_operand 3 "" "")]))]
7182 "TARGET_32BIT"
7183 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7184 operands[2], operands[3], NULL_RTX);
7185 operands[3] = const0_rtx;"
7186 )
7187
7188 (define_insn_and_split "*mov_scc"
7189 [(set (match_operand:SI 0 "s_register_operand" "=r")
7190 (match_operator:SI 1 "arm_comparison_operator_mode"
7191 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7192 "TARGET_ARM"
7193 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7194 "TARGET_ARM"
7195 [(set (match_dup 0)
7196 (if_then_else:SI (match_dup 1)
7197 (const_int 1)
7198 (const_int 0)))]
7199 ""
7200 [(set_attr "conds" "use")
7201 (set_attr "length" "8")
7202 (set_attr "type" "multiple")]
7203 )
7204
7205 (define_insn "*negscc_borrow"
7206 [(set (match_operand:SI 0 "s_register_operand" "=r")
7207 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
7208 "TARGET_32BIT"
7209 "sbc\\t%0, %0, %0"
7210 [(set_attr "conds" "use")
7211 (set_attr "length" "4")
7212 (set_attr "type" "adc_reg")]
7213 )
7214
7215 (define_insn_and_split "*mov_negscc"
7216 [(set (match_operand:SI 0 "s_register_operand" "=r")
7217 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
7218 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7219 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
7220 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7221 "&& true"
7222 [(set (match_dup 0)
7223 (if_then_else:SI (match_dup 1)
7224 (match_dup 3)
7225 (const_int 0)))]
7226 {
7227 operands[3] = GEN_INT (~0);
7228 }
7229 [(set_attr "conds" "use")
7230 (set_attr "length" "8")
7231 (set_attr "type" "multiple")]
7232 )
7233
7234 (define_insn_and_split "*mov_notscc"
7235 [(set (match_operand:SI 0 "s_register_operand" "=r")
7236 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7237 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7238 "TARGET_ARM"
7239 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7240 "TARGET_ARM"
7241 [(set (match_dup 0)
7242 (if_then_else:SI (match_dup 1)
7243 (match_dup 3)
7244 (match_dup 4)))]
7245 {
7246 operands[3] = GEN_INT (~1);
7247 operands[4] = GEN_INT (~0);
7248 }
7249 [(set_attr "conds" "use")
7250 (set_attr "length" "8")
7251 (set_attr "type" "multiple")]
7252 )
7253
7254 (define_expand "cstoresi4"
7255 [(set (match_operand:SI 0 "s_register_operand")
7256 (match_operator:SI 1 "expandable_comparison_operator"
7257 [(match_operand:SI 2 "s_register_operand")
7258 (match_operand:SI 3 "reg_or_int_operand")]))]
7259 "TARGET_32BIT || TARGET_THUMB1"
7260 "{
7261 rtx op3, scratch, scratch2;
7262
7263 if (!TARGET_THUMB1)
7264 {
7265 if (!arm_add_operand (operands[3], SImode))
7266 operands[3] = force_reg (SImode, operands[3]);
7267 emit_insn (gen_cstore_cc (operands[0], operands[1],
7268 operands[2], operands[3]));
7269 DONE;
7270 }
7271
7272 if (operands[3] == const0_rtx)
7273 {
7274 switch (GET_CODE (operands[1]))
7275 {
7276 case EQ:
7277 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7278 break;
7279
7280 case NE:
7281 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7282 break;
7283
7284 case LE:
7285 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7286 NULL_RTX, 0, OPTAB_WIDEN);
7287 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7288 NULL_RTX, 0, OPTAB_WIDEN);
7289 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7290 operands[0], 1, OPTAB_WIDEN);
7291 break;
7292
7293 case GE:
7294 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7295 NULL_RTX, 1);
7296 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7297 NULL_RTX, 1, OPTAB_WIDEN);
7298 break;
7299
7300 case GT:
7301 scratch = expand_binop (SImode, ashr_optab, operands[2],
7302 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7303 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7304 NULL_RTX, 0, OPTAB_WIDEN);
7305 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7306 0, OPTAB_WIDEN);
7307 break;
7308
7309 /* LT is handled by generic code. No need for unsigned with 0. */
7310 default:
7311 FAIL;
7312 }
7313 DONE;
7314 }
7315
7316 switch (GET_CODE (operands[1]))
7317 {
7318 case EQ:
7319 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7320 NULL_RTX, 0, OPTAB_WIDEN);
7321 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7322 break;
7323
7324 case NE:
7325 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7326 NULL_RTX, 0, OPTAB_WIDEN);
7327 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7328 break;
7329
7330 case LE:
7331 op3 = force_reg (SImode, operands[3]);
7332
7333 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7334 NULL_RTX, 1, OPTAB_WIDEN);
7335 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7336 NULL_RTX, 0, OPTAB_WIDEN);
7337 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7338 op3, operands[2]));
7339 break;
7340
7341 case GE:
7342 op3 = operands[3];
7343 if (!thumb1_cmp_operand (op3, SImode))
7344 op3 = force_reg (SImode, op3);
7345 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7346 NULL_RTX, 0, OPTAB_WIDEN);
7347 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7348 NULL_RTX, 1, OPTAB_WIDEN);
7349 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7350 operands[2], op3));
7351 break;
7352
7353 case LEU:
7354 op3 = force_reg (SImode, operands[3]);
7355 scratch = force_reg (SImode, const0_rtx);
7356 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7357 op3, operands[2]));
7358 break;
7359
7360 case GEU:
7361 op3 = operands[3];
7362 if (!thumb1_cmp_operand (op3, SImode))
7363 op3 = force_reg (SImode, op3);
7364 scratch = force_reg (SImode, const0_rtx);
7365 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7366 operands[2], op3));
7367 break;
7368
7369 case LTU:
7370 op3 = operands[3];
7371 if (!thumb1_cmp_operand (op3, SImode))
7372 op3 = force_reg (SImode, op3);
7373 scratch = gen_reg_rtx (SImode);
7374 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7375 break;
7376
7377 case GTU:
7378 op3 = force_reg (SImode, operands[3]);
7379 scratch = gen_reg_rtx (SImode);
7380 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7381 break;
7382
7383 /* No good sequences for GT, LT. */
7384 default:
7385 FAIL;
7386 }
7387 DONE;
7388 }")
7389
7390 (define_expand "cstorehf4"
7391 [(set (match_operand:SI 0 "s_register_operand")
7392 (match_operator:SI 1 "expandable_comparison_operator"
7393 [(match_operand:HF 2 "s_register_operand")
7394 (match_operand:HF 3 "vfp_compare_operand")]))]
7395 "TARGET_VFP_FP16INST"
7396 {
7397 if (!arm_validize_comparison (&operands[1],
7398 &operands[2],
7399 &operands[3]))
7400 FAIL;
7401
7402 emit_insn (gen_cstore_cc (operands[0], operands[1],
7403 operands[2], operands[3]));
7404 DONE;
7405 }
7406 )
7407
7408 (define_expand "cstoresf4"
7409 [(set (match_operand:SI 0 "s_register_operand")
7410 (match_operator:SI 1 "expandable_comparison_operator"
7411 [(match_operand:SF 2 "s_register_operand")
7412 (match_operand:SF 3 "vfp_compare_operand")]))]
7413 "TARGET_32BIT && TARGET_HARD_FLOAT"
7414 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7415 operands[2], operands[3])); DONE;"
7416 )
7417
7418 (define_expand "cstoredf4"
7419 [(set (match_operand:SI 0 "s_register_operand")
7420 (match_operator:SI 1 "expandable_comparison_operator"
7421 [(match_operand:DF 2 "s_register_operand")
7422 (match_operand:DF 3 "vfp_compare_operand")]))]
7423 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7424 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7425 operands[2], operands[3])); DONE;"
7426 )
7427
7428 (define_expand "cstoredi4"
7429 [(set (match_operand:SI 0 "s_register_operand")
7430 (match_operator:SI 1 "expandable_comparison_operator"
7431 [(match_operand:DI 2 "s_register_operand")
7432 (match_operand:DI 3 "reg_or_int_operand")]))]
7433 "TARGET_32BIT"
7434 "{
7435 if (!arm_validize_comparison (&operands[1],
7436 &operands[2],
7437 &operands[3]))
7438 FAIL;
7439 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7440 operands[3]));
7441 DONE;
7442 }"
7443 )
7444
7445 \f
7446 ;; Conditional move insns
7447
7448 (define_expand "movsicc"
7449 [(set (match_operand:SI 0 "s_register_operand")
7450 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
7451 (match_operand:SI 2 "arm_not_operand")
7452 (match_operand:SI 3 "arm_not_operand")))]
7453 "TARGET_32BIT"
7454 "
7455 {
7456 enum rtx_code code;
7457 rtx ccreg;
7458
7459 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7460 &XEXP (operands[1], 1)))
7461 FAIL;
7462
7463 code = GET_CODE (operands[1]);
7464 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7465 XEXP (operands[1], 1), NULL_RTX);
7466 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7467 }"
7468 )
7469
7470 (define_expand "movhfcc"
7471 [(set (match_operand:HF 0 "s_register_operand")
7472 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
7473 (match_operand:HF 2 "s_register_operand")
7474 (match_operand:HF 3 "s_register_operand")))]
7475 "TARGET_VFP_FP16INST"
7476 "
7477 {
7478 enum rtx_code code = GET_CODE (operands[1]);
7479 rtx ccreg;
7480
7481 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7482 &XEXP (operands[1], 1)))
7483 FAIL;
7484
7485 code = GET_CODE (operands[1]);
7486 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7487 XEXP (operands[1], 1), NULL_RTX);
7488 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7489 }"
7490 )
7491
7492 (define_expand "movsfcc"
7493 [(set (match_operand:SF 0 "s_register_operand")
7494 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
7495 (match_operand:SF 2 "s_register_operand")
7496 (match_operand:SF 3 "s_register_operand")))]
7497 "TARGET_32BIT && TARGET_HARD_FLOAT"
7498 "
7499 {
7500 enum rtx_code code = GET_CODE (operands[1]);
7501 rtx ccreg;
7502
7503 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7504 &XEXP (operands[1], 1)))
7505 FAIL;
7506
7507 code = GET_CODE (operands[1]);
7508 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7509 XEXP (operands[1], 1), NULL_RTX);
7510 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7511 }"
7512 )
7513
7514 (define_expand "movdfcc"
7515 [(set (match_operand:DF 0 "s_register_operand")
7516 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7517 (match_operand:DF 2 "s_register_operand")
7518 (match_operand:DF 3 "s_register_operand")))]
7519 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7520 "
7521 {
7522 enum rtx_code code = GET_CODE (operands[1]);
7523 rtx ccreg;
7524
7525 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7526 &XEXP (operands[1], 1)))
7527 FAIL;
7528 code = GET_CODE (operands[1]);
7529 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7530 XEXP (operands[1], 1), NULL_RTX);
7531 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7532 }"
7533 )
7534
7535 (define_insn "*cmov<mode>"
7536 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7537 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7538 [(match_operand 2 "cc_register" "") (const_int 0)])
7539 (match_operand:SDF 3 "s_register_operand"
7540 "<F_constraint>")
7541 (match_operand:SDF 4 "s_register_operand"
7542 "<F_constraint>")))]
7543 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7544 "*
7545 {
7546 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7547 switch (code)
7548 {
7549 case ARM_GE:
7550 case ARM_GT:
7551 case ARM_EQ:
7552 case ARM_VS:
7553 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7554 case ARM_LT:
7555 case ARM_LE:
7556 case ARM_NE:
7557 case ARM_VC:
7558 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7559 default:
7560 gcc_unreachable ();
7561 }
7562 return \"\";
7563 }"
7564 [(set_attr "conds" "use")
7565 (set_attr "type" "fcsel")]
7566 )
7567
7568 (define_insn "*cmovhf"
7569 [(set (match_operand:HF 0 "s_register_operand" "=t")
7570 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7571 [(match_operand 2 "cc_register" "") (const_int 0)])
7572 (match_operand:HF 3 "s_register_operand" "t")
7573 (match_operand:HF 4 "s_register_operand" "t")))]
7574 "TARGET_VFP_FP16INST"
7575 "*
7576 {
7577 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7578 switch (code)
7579 {
7580 case ARM_GE:
7581 case ARM_GT:
7582 case ARM_EQ:
7583 case ARM_VS:
7584 return \"vsel%d1.f16\\t%0, %3, %4\";
7585 case ARM_LT:
7586 case ARM_LE:
7587 case ARM_NE:
7588 case ARM_VC:
7589 return \"vsel%D1.f16\\t%0, %4, %3\";
7590 default:
7591 gcc_unreachable ();
7592 }
7593 return \"\";
7594 }"
7595 [(set_attr "conds" "use")
7596 (set_attr "type" "fcsel")]
7597 )
7598
7599 (define_insn_and_split "*movsicc_insn"
7600 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7601 (if_then_else:SI
7602 (match_operator 3 "arm_comparison_operator"
7603 [(match_operand 4 "cc_register" "") (const_int 0)])
7604 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7605 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7606 "TARGET_ARM"
7607 "@
7608 mov%D3\\t%0, %2
7609 mvn%D3\\t%0, #%B2
7610 mov%d3\\t%0, %1
7611 mvn%d3\\t%0, #%B1
7612 #
7613 #
7614 #
7615 #"
7616 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7617 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7618 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7619 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7620 "&& reload_completed"
7621 [(const_int 0)]
7622 {
7623 enum rtx_code rev_code;
7624 machine_mode mode;
7625 rtx rev_cond;
7626
7627 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7628 operands[3],
7629 gen_rtx_SET (operands[0], operands[1])));
7630
7631 rev_code = GET_CODE (operands[3]);
7632 mode = GET_MODE (operands[4]);
7633 if (mode == CCFPmode || mode == CCFPEmode)
7634 rev_code = reverse_condition_maybe_unordered (rev_code);
7635 else
7636 rev_code = reverse_condition (rev_code);
7637
7638 rev_cond = gen_rtx_fmt_ee (rev_code,
7639 VOIDmode,
7640 operands[4],
7641 const0_rtx);
7642 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7643 rev_cond,
7644 gen_rtx_SET (operands[0], operands[2])));
7645 DONE;
7646 }
7647 [(set_attr "length" "4,4,4,4,8,8,8,8")
7648 (set_attr "conds" "use")
7649 (set_attr_alternative "type"
7650 [(if_then_else (match_operand 2 "const_int_operand" "")
7651 (const_string "mov_imm")
7652 (const_string "mov_reg"))
7653 (const_string "mvn_imm")
7654 (if_then_else (match_operand 1 "const_int_operand" "")
7655 (const_string "mov_imm")
7656 (const_string "mov_reg"))
7657 (const_string "mvn_imm")
7658 (const_string "multiple")
7659 (const_string "multiple")
7660 (const_string "multiple")
7661 (const_string "multiple")])]
7662 )
7663
7664 (define_insn "*movsfcc_soft_insn"
7665 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7666 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7667 [(match_operand 4 "cc_register" "") (const_int 0)])
7668 (match_operand:SF 1 "s_register_operand" "0,r")
7669 (match_operand:SF 2 "s_register_operand" "r,0")))]
7670 "TARGET_ARM && TARGET_SOFT_FLOAT"
7671 "@
7672 mov%D3\\t%0, %2
7673 mov%d3\\t%0, %1"
7674 [(set_attr "conds" "use")
7675 (set_attr "type" "mov_reg")]
7676 )
7677
7678 \f
7679 ;; Jump and linkage insns
7680
7681 (define_expand "jump"
7682 [(set (pc)
7683 (label_ref (match_operand 0 "" "")))]
7684 "TARGET_EITHER"
7685 ""
7686 )
7687
7688 (define_insn "*arm_jump"
7689 [(set (pc)
7690 (label_ref (match_operand 0 "" "")))]
7691 "TARGET_32BIT"
7692 "*
7693 {
7694 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7695 {
7696 arm_ccfsm_state += 2;
7697 return \"\";
7698 }
7699 return \"b%?\\t%l0\";
7700 }
7701 "
7702 [(set_attr "predicable" "yes")
7703 (set (attr "length")
7704 (if_then_else
7705 (and (match_test "TARGET_THUMB2")
7706 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7707 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7708 (const_int 2)
7709 (const_int 4)))
7710 (set_attr "type" "branch")]
7711 )
7712
7713 (define_expand "call"
7714 [(parallel [(call (match_operand 0 "memory_operand")
7715 (match_operand 1 "general_operand"))
7716 (use (match_operand 2 "" ""))
7717 (clobber (reg:SI LR_REGNUM))])]
7718 "TARGET_EITHER"
7719 "
7720 {
7721 rtx callee, pat;
7722 tree addr = MEM_EXPR (operands[0]);
7723
7724 /* In an untyped call, we can get NULL for operand 2. */
7725 if (operands[2] == NULL_RTX)
7726 operands[2] = const0_rtx;
7727
7728 /* Decide if we should generate indirect calls by loading the
7729 32-bit address of the callee into a register before performing the
7730 branch and link. */
7731 callee = XEXP (operands[0], 0);
7732 if (GET_CODE (callee) == SYMBOL_REF
7733 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7734 : !REG_P (callee))
7735 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7736
7737 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7738 /* Indirect call: set r9 with FDPIC value of callee. */
7739 XEXP (operands[0], 0)
7740 = arm_load_function_descriptor (XEXP (operands[0], 0));
7741
7742 if (detect_cmse_nonsecure_call (addr))
7743 {
7744 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7745 operands[2]);
7746 emit_call_insn (pat);
7747 }
7748 else
7749 {
7750 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7751 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7752 }
7753
7754 /* Restore FDPIC register (r9) after call. */
7755 if (TARGET_FDPIC)
7756 {
7757 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7758 rtx initial_fdpic_reg
7759 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7760
7761 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7762 initial_fdpic_reg));
7763 }
7764
7765 DONE;
7766 }"
7767 )
7768
7769 (define_insn "restore_pic_register_after_call"
7770 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7771 (unspec:SI [(match_dup 0)
7772 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7773 UNSPEC_PIC_RESTORE))]
7774 ""
7775 "@
7776 mov\t%0, %1
7777 ldr\t%0, %1"
7778 )
7779
7780 (define_expand "call_internal"
7781 [(parallel [(call (match_operand 0 "memory_operand")
7782 (match_operand 1 "general_operand"))
7783 (use (match_operand 2 "" ""))
7784 (clobber (reg:SI LR_REGNUM))])])
7785
7786 (define_expand "nonsecure_call_internal"
7787 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7788 UNSPEC_NONSECURE_MEM)
7789 (match_operand 1 "general_operand"))
7790 (use (match_operand 2 "" ""))
7791 (clobber (reg:SI LR_REGNUM))])]
7792 "use_cmse"
7793 "
7794 {
7795 rtx tmp;
7796 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7797 gen_rtx_REG (SImode, R4_REGNUM),
7798 SImode);
7799
7800 operands[0] = replace_equiv_address (operands[0], tmp);
7801 }")
7802
7803 (define_insn "*call_reg_armv5"
7804 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7805 (match_operand 1 "" ""))
7806 (use (match_operand 2 "" ""))
7807 (clobber (reg:SI LR_REGNUM))]
7808 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7809 "blx%?\\t%0"
7810 [(set_attr "type" "call")]
7811 )
7812
7813 (define_insn "*call_reg_arm"
7814 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7815 (match_operand 1 "" ""))
7816 (use (match_operand 2 "" ""))
7817 (clobber (reg:SI LR_REGNUM))]
7818 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7819 "*
7820 return output_call (operands);
7821 "
7822 ;; length is worst case, normally it is only two
7823 [(set_attr "length" "12")
7824 (set_attr "type" "call")]
7825 )
7826
7827
7828 (define_expand "call_value"
7829 [(parallel [(set (match_operand 0 "" "")
7830 (call (match_operand 1 "memory_operand")
7831 (match_operand 2 "general_operand")))
7832 (use (match_operand 3 "" ""))
7833 (clobber (reg:SI LR_REGNUM))])]
7834 "TARGET_EITHER"
7835 "
7836 {
7837 rtx pat, callee;
7838 tree addr = MEM_EXPR (operands[1]);
7839
7840 /* In an untyped call, we can get NULL for operand 2. */
7841 if (operands[3] == 0)
7842 operands[3] = const0_rtx;
7843
7844 /* Decide if we should generate indirect calls by loading the
7845 32-bit address of the callee into a register before performing the
7846 branch and link. */
7847 callee = XEXP (operands[1], 0);
7848 if (GET_CODE (callee) == SYMBOL_REF
7849 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7850 : !REG_P (callee))
7851 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7852
7853 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7854 /* Indirect call: set r9 with FDPIC value of callee. */
7855 XEXP (operands[1], 0)
7856 = arm_load_function_descriptor (XEXP (operands[1], 0));
7857
7858 if (detect_cmse_nonsecure_call (addr))
7859 {
7860 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7861 operands[2], operands[3]);
7862 emit_call_insn (pat);
7863 }
7864 else
7865 {
7866 pat = gen_call_value_internal (operands[0], operands[1],
7867 operands[2], operands[3]);
7868 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7869 }
7870
7871 /* Restore FDPIC register (r9) after call. */
7872 if (TARGET_FDPIC)
7873 {
7874 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7875 rtx initial_fdpic_reg
7876 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7877
7878 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7879 initial_fdpic_reg));
7880 }
7881
7882 DONE;
7883 }"
7884 )
7885
7886 (define_expand "call_value_internal"
7887 [(parallel [(set (match_operand 0 "" "")
7888 (call (match_operand 1 "memory_operand")
7889 (match_operand 2 "general_operand")))
7890 (use (match_operand 3 "" ""))
7891 (clobber (reg:SI LR_REGNUM))])])
7892
7893 (define_expand "nonsecure_call_value_internal"
7894 [(parallel [(set (match_operand 0 "" "")
7895 (call (unspec:SI [(match_operand 1 "memory_operand")]
7896 UNSPEC_NONSECURE_MEM)
7897 (match_operand 2 "general_operand")))
7898 (use (match_operand 3 "" ""))
7899 (clobber (reg:SI LR_REGNUM))])]
7900 "use_cmse"
7901 "
7902 {
7903 rtx tmp;
7904 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7905 gen_rtx_REG (SImode, R4_REGNUM),
7906 SImode);
7907
7908 operands[1] = replace_equiv_address (operands[1], tmp);
7909 }")
7910
7911 (define_insn "*call_value_reg_armv5"
7912 [(set (match_operand 0 "" "")
7913 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7914 (match_operand 2 "" "")))
7915 (use (match_operand 3 "" ""))
7916 (clobber (reg:SI LR_REGNUM))]
7917 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7918 "blx%?\\t%1"
7919 [(set_attr "type" "call")]
7920 )
7921
7922 (define_insn "*call_value_reg_arm"
7923 [(set (match_operand 0 "" "")
7924 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7925 (match_operand 2 "" "")))
7926 (use (match_operand 3 "" ""))
7927 (clobber (reg:SI LR_REGNUM))]
7928 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7929 "*
7930 return output_call (&operands[1]);
7931 "
7932 [(set_attr "length" "12")
7933 (set_attr "type" "call")]
7934 )
7935
7936 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7937 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7938
7939 (define_insn "*call_symbol"
7940 [(call (mem:SI (match_operand:SI 0 "" ""))
7941 (match_operand 1 "" ""))
7942 (use (match_operand 2 "" ""))
7943 (clobber (reg:SI LR_REGNUM))]
7944 "TARGET_32BIT
7945 && !SIBLING_CALL_P (insn)
7946 && (GET_CODE (operands[0]) == SYMBOL_REF)
7947 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7948 "*
7949 {
7950 rtx op = operands[0];
7951
7952 /* Switch mode now when possible. */
7953 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7954 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7955 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7956
7957 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7958 }"
7959 [(set_attr "type" "call")]
7960 )
7961
7962 (define_insn "*call_value_symbol"
7963 [(set (match_operand 0 "" "")
7964 (call (mem:SI (match_operand:SI 1 "" ""))
7965 (match_operand:SI 2 "" "")))
7966 (use (match_operand 3 "" ""))
7967 (clobber (reg:SI LR_REGNUM))]
7968 "TARGET_32BIT
7969 && !SIBLING_CALL_P (insn)
7970 && (GET_CODE (operands[1]) == SYMBOL_REF)
7971 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7972 "*
7973 {
7974 rtx op = operands[1];
7975
7976 /* Switch mode now when possible. */
7977 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7978 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7979 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7980
7981 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7982 }"
7983 [(set_attr "type" "call")]
7984 )
7985
7986 (define_expand "sibcall_internal"
7987 [(parallel [(call (match_operand 0 "memory_operand")
7988 (match_operand 1 "general_operand"))
7989 (return)
7990 (use (match_operand 2 "" ""))])])
7991
7992 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7993 (define_expand "sibcall"
7994 [(parallel [(call (match_operand 0 "memory_operand")
7995 (match_operand 1 "general_operand"))
7996 (return)
7997 (use (match_operand 2 "" ""))])]
7998 "TARGET_32BIT"
7999 "
8000 {
8001 rtx pat;
8002
8003 if ((!REG_P (XEXP (operands[0], 0))
8004 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8005 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8006 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8007 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8008
8009 if (operands[2] == NULL_RTX)
8010 operands[2] = const0_rtx;
8011
8012 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8013 arm_emit_call_insn (pat, operands[0], true);
8014 DONE;
8015 }"
8016 )
8017
8018 (define_expand "sibcall_value_internal"
8019 [(parallel [(set (match_operand 0 "" "")
8020 (call (match_operand 1 "memory_operand")
8021 (match_operand 2 "general_operand")))
8022 (return)
8023 (use (match_operand 3 "" ""))])])
8024
8025 (define_expand "sibcall_value"
8026 [(parallel [(set (match_operand 0 "" "")
8027 (call (match_operand 1 "memory_operand")
8028 (match_operand 2 "general_operand")))
8029 (return)
8030 (use (match_operand 3 "" ""))])]
8031 "TARGET_32BIT"
8032 "
8033 {
8034 rtx pat;
8035
8036 if ((!REG_P (XEXP (operands[1], 0))
8037 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8038 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8039 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8040 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8041
8042 if (operands[3] == NULL_RTX)
8043 operands[3] = const0_rtx;
8044
8045 pat = gen_sibcall_value_internal (operands[0], operands[1],
8046 operands[2], operands[3]);
8047 arm_emit_call_insn (pat, operands[1], true);
8048 DONE;
8049 }"
8050 )
8051
8052 (define_insn "*sibcall_insn"
8053 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8054 (match_operand 1 "" ""))
8055 (return)
8056 (use (match_operand 2 "" ""))]
8057 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8058 "*
8059 if (which_alternative == 1)
8060 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8061 else
8062 {
8063 if (arm_arch5t || arm_arch4t)
8064 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8065 else
8066 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8067 }
8068 "
8069 [(set_attr "type" "call")]
8070 )
8071
8072 (define_insn "*sibcall_value_insn"
8073 [(set (match_operand 0 "" "")
8074 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8075 (match_operand 2 "" "")))
8076 (return)
8077 (use (match_operand 3 "" ""))]
8078 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8079 "*
8080 if (which_alternative == 1)
8081 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8082 else
8083 {
8084 if (arm_arch5t || arm_arch4t)
8085 return \"bx%?\\t%1\";
8086 else
8087 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8088 }
8089 "
8090 [(set_attr "type" "call")]
8091 )
8092
8093 (define_expand "<return_str>return"
8094 [(RETURNS)]
8095 "(TARGET_ARM || (TARGET_THUMB2
8096 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8097 && !IS_STACKALIGN (arm_current_func_type ())))
8098 <return_cond_false>"
8099 "
8100 {
8101 if (TARGET_THUMB2)
8102 {
8103 thumb2_expand_return (<return_simple_p>);
8104 DONE;
8105 }
8106 }
8107 "
8108 )
8109
8110 ;; Often the return insn will be the same as loading from memory, so set attr
8111 (define_insn "*arm_return"
8112 [(return)]
8113 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8114 "*
8115 {
8116 if (arm_ccfsm_state == 2)
8117 {
8118 arm_ccfsm_state += 2;
8119 return \"\";
8120 }
8121 return output_return_instruction (const_true_rtx, true, false, false);
8122 }"
8123 [(set_attr "type" "load_4")
8124 (set_attr "length" "12")
8125 (set_attr "predicable" "yes")]
8126 )
8127
8128 (define_insn "*cond_<return_str>return"
8129 [(set (pc)
8130 (if_then_else (match_operator 0 "arm_comparison_operator"
8131 [(match_operand 1 "cc_register" "") (const_int 0)])
8132 (RETURNS)
8133 (pc)))]
8134 "TARGET_ARM <return_cond_true>"
8135 "*
8136 {
8137 if (arm_ccfsm_state == 2)
8138 {
8139 arm_ccfsm_state += 2;
8140 return \"\";
8141 }
8142 return output_return_instruction (operands[0], true, false,
8143 <return_simple_p>);
8144 }"
8145 [(set_attr "conds" "use")
8146 (set_attr "length" "12")
8147 (set_attr "type" "load_4")]
8148 )
8149
8150 (define_insn "*cond_<return_str>return_inverted"
8151 [(set (pc)
8152 (if_then_else (match_operator 0 "arm_comparison_operator"
8153 [(match_operand 1 "cc_register" "") (const_int 0)])
8154 (pc)
8155 (RETURNS)))]
8156 "TARGET_ARM <return_cond_true>"
8157 "*
8158 {
8159 if (arm_ccfsm_state == 2)
8160 {
8161 arm_ccfsm_state += 2;
8162 return \"\";
8163 }
8164 return output_return_instruction (operands[0], true, true,
8165 <return_simple_p>);
8166 }"
8167 [(set_attr "conds" "use")
8168 (set_attr "length" "12")
8169 (set_attr "type" "load_4")]
8170 )
8171
8172 (define_insn "*arm_simple_return"
8173 [(simple_return)]
8174 "TARGET_ARM"
8175 "*
8176 {
8177 if (arm_ccfsm_state == 2)
8178 {
8179 arm_ccfsm_state += 2;
8180 return \"\";
8181 }
8182 return output_return_instruction (const_true_rtx, true, false, true);
8183 }"
8184 [(set_attr "type" "branch")
8185 (set_attr "length" "4")
8186 (set_attr "predicable" "yes")]
8187 )
8188
8189 ;; Generate a sequence of instructions to determine if the processor is
8190 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8191 ;; mask.
8192
8193 (define_expand "return_addr_mask"
8194 [(set (match_dup 1)
8195 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8196 (const_int 0)))
8197 (set (match_operand:SI 0 "s_register_operand")
8198 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8199 (const_int -1)
8200 (const_int 67108860)))] ; 0x03fffffc
8201 "TARGET_ARM"
8202 "
8203 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8204 ")
8205
8206 (define_insn "*check_arch2"
8207 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8208 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8209 (const_int 0)))]
8210 "TARGET_ARM"
8211 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8212 [(set_attr "length" "8")
8213 (set_attr "conds" "set")
8214 (set_attr "type" "multiple")]
8215 )
8216
8217 ;; Call subroutine returning any type.
8218
8219 (define_expand "untyped_call"
8220 [(parallel [(call (match_operand 0 "" "")
8221 (const_int 0))
8222 (match_operand 1 "" "")
8223 (match_operand 2 "" "")])]
8224 "TARGET_EITHER && !TARGET_FDPIC"
8225 "
8226 {
8227 int i;
8228 rtx par = gen_rtx_PARALLEL (VOIDmode,
8229 rtvec_alloc (XVECLEN (operands[2], 0)));
8230 rtx addr = gen_reg_rtx (Pmode);
8231 rtx mem;
8232 int size = 0;
8233
8234 emit_move_insn (addr, XEXP (operands[1], 0));
8235 mem = change_address (operands[1], BLKmode, addr);
8236
8237 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8238 {
8239 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8240
8241 /* Default code only uses r0 as a return value, but we could
8242 be using anything up to 4 registers. */
8243 if (REGNO (src) == R0_REGNUM)
8244 src = gen_rtx_REG (TImode, R0_REGNUM);
8245
8246 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8247 GEN_INT (size));
8248 size += GET_MODE_SIZE (GET_MODE (src));
8249 }
8250
8251 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
8252
8253 size = 0;
8254
8255 for (i = 0; i < XVECLEN (par, 0); i++)
8256 {
8257 HOST_WIDE_INT offset = 0;
8258 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8259
8260 if (size != 0)
8261 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8262
8263 mem = change_address (mem, GET_MODE (reg), NULL);
8264 if (REGNO (reg) == R0_REGNUM)
8265 {
8266 /* On thumb we have to use a write-back instruction. */
8267 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8268 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8269 size = TARGET_ARM ? 16 : 0;
8270 }
8271 else
8272 {
8273 emit_move_insn (mem, reg);
8274 size = GET_MODE_SIZE (GET_MODE (reg));
8275 }
8276 }
8277
8278 /* The optimizer does not know that the call sets the function value
8279 registers we stored in the result block. We avoid problems by
8280 claiming that all hard registers are used and clobbered at this
8281 point. */
8282 emit_insn (gen_blockage ());
8283
8284 DONE;
8285 }"
8286 )
8287
8288 (define_expand "untyped_return"
8289 [(match_operand:BLK 0 "memory_operand")
8290 (match_operand 1 "" "")]
8291 "TARGET_EITHER && !TARGET_FDPIC"
8292 "
8293 {
8294 int i;
8295 rtx addr = gen_reg_rtx (Pmode);
8296 rtx mem;
8297 int size = 0;
8298
8299 emit_move_insn (addr, XEXP (operands[0], 0));
8300 mem = change_address (operands[0], BLKmode, addr);
8301
8302 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8303 {
8304 HOST_WIDE_INT offset = 0;
8305 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8306
8307 if (size != 0)
8308 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8309
8310 mem = change_address (mem, GET_MODE (reg), NULL);
8311 if (REGNO (reg) == R0_REGNUM)
8312 {
8313 /* On thumb we have to use a write-back instruction. */
8314 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8315 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8316 size = TARGET_ARM ? 16 : 0;
8317 }
8318 else
8319 {
8320 emit_move_insn (reg, mem);
8321 size = GET_MODE_SIZE (GET_MODE (reg));
8322 }
8323 }
8324
8325 /* Emit USE insns before the return. */
8326 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8327 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8328
8329 /* Construct the return. */
8330 expand_naked_return ();
8331
8332 DONE;
8333 }"
8334 )
8335
8336 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8337 ;; all of memory. This blocks insns from being moved across this point.
8338
8339 (define_insn "blockage"
8340 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8341 "TARGET_EITHER"
8342 ""
8343 [(set_attr "length" "0")
8344 (set_attr "type" "block")]
8345 )
8346
8347 ;; Since we hard code r0 here use the 'o' constraint to prevent
8348 ;; provoking undefined behaviour in the hardware with putting out
8349 ;; auto-increment operations with potentially r0 as the base register.
8350 (define_insn "probe_stack"
8351 [(set (match_operand:SI 0 "memory_operand" "=o")
8352 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
8353 "TARGET_32BIT"
8354 "str%?\\tr0, %0"
8355 [(set_attr "type" "store_4")
8356 (set_attr "predicable" "yes")]
8357 )
8358
8359 (define_insn "probe_stack_range"
8360 [(set (match_operand:SI 0 "register_operand" "=r")
8361 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
8362 (match_operand:SI 2 "register_operand" "r")]
8363 VUNSPEC_PROBE_STACK_RANGE))]
8364 "TARGET_32BIT"
8365 {
8366 return output_probe_stack_range (operands[0], operands[2]);
8367 }
8368 [(set_attr "type" "multiple")
8369 (set_attr "conds" "clob")]
8370 )
8371
8372 ;; Named patterns for stack smashing protection.
8373 (define_expand "stack_protect_combined_set"
8374 [(parallel
8375 [(set (match_operand:SI 0 "memory_operand")
8376 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8377 UNSPEC_SP_SET))
8378 (clobber (match_scratch:SI 2 ""))
8379 (clobber (match_scratch:SI 3 ""))])]
8380 ""
8381 ""
8382 )
8383
8384 ;; Use a separate insn from the above expand to be able to have the mem outside
8385 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8386 ;; try to reload the guard since we need to control how PIC access is done in
8387 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8388 ;; legitimize_pic_address ()).
8389 (define_insn_and_split "*stack_protect_combined_set_insn"
8390 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8391 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8392 UNSPEC_SP_SET))
8393 (clobber (match_scratch:SI 2 "=&l,&r"))
8394 (clobber (match_scratch:SI 3 "=&l,&r"))]
8395 ""
8396 "#"
8397 "reload_completed"
8398 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
8399 UNSPEC_SP_SET))
8400 (clobber (match_dup 2))])]
8401 "
8402 {
8403 if (flag_pic)
8404 {
8405 rtx pic_reg;
8406
8407 if (TARGET_FDPIC)
8408 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8409 else
8410 pic_reg = operands[3];
8411
8412 /* Forces recomputing of GOT base now. */
8413 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
8414 true /*compute_now*/);
8415 }
8416 else
8417 {
8418 if (address_operand (operands[1], SImode))
8419 operands[2] = operands[1];
8420 else
8421 {
8422 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8423 emit_move_insn (operands[2], mem);
8424 }
8425 }
8426 }"
8427 [(set_attr "arch" "t1,32")]
8428 )
8429
8430 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
8431 ;; canary value does not live beyond the life of this sequence.
8432 (define_insn "*stack_protect_set_insn"
8433 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8434 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
8435 UNSPEC_SP_SET))
8436 (clobber (match_dup 1))]
8437 ""
8438 "@
8439 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
8440 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
8441 [(set_attr "length" "8,12")
8442 (set_attr "conds" "clob,nocond")
8443 (set_attr "type" "multiple")
8444 (set_attr "arch" "t1,32")]
8445 )
8446
8447 (define_expand "stack_protect_combined_test"
8448 [(parallel
8449 [(set (pc)
8450 (if_then_else
8451 (eq (match_operand:SI 0 "memory_operand")
8452 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8453 UNSPEC_SP_TEST))
8454 (label_ref (match_operand 2))
8455 (pc)))
8456 (clobber (match_scratch:SI 3 ""))
8457 (clobber (match_scratch:SI 4 ""))
8458 (clobber (reg:CC CC_REGNUM))])]
8459 ""
8460 ""
8461 )
8462
8463 ;; Use a separate insn from the above expand to be able to have the mem outside
8464 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8465 ;; try to reload the guard since we need to control how PIC access is done in
8466 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8467 ;; legitimize_pic_address ()).
8468 (define_insn_and_split "*stack_protect_combined_test_insn"
8469 [(set (pc)
8470 (if_then_else
8471 (eq (match_operand:SI 0 "memory_operand" "m,m")
8472 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8473 UNSPEC_SP_TEST))
8474 (label_ref (match_operand 2))
8475 (pc)))
8476 (clobber (match_scratch:SI 3 "=&l,&r"))
8477 (clobber (match_scratch:SI 4 "=&l,&r"))
8478 (clobber (reg:CC CC_REGNUM))]
8479 ""
8480 "#"
8481 "reload_completed"
8482 [(const_int 0)]
8483 {
8484 rtx eq;
8485
8486 if (flag_pic)
8487 {
8488 rtx pic_reg;
8489
8490 if (TARGET_FDPIC)
8491 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8492 else
8493 pic_reg = operands[4];
8494
8495 /* Forces recomputing of GOT base now. */
8496 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
8497 true /*compute_now*/);
8498 }
8499 else
8500 {
8501 if (address_operand (operands[1], SImode))
8502 operands[3] = operands[1];
8503 else
8504 {
8505 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8506 emit_move_insn (operands[3], mem);
8507 }
8508 }
8509 if (TARGET_32BIT)
8510 {
8511 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8512 operands[3]));
8513 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8514 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8515 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8516 }
8517 else
8518 {
8519 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8520 operands[3]));
8521 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8522 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8523 operands[2]));
8524 }
8525 DONE;
8526 }
8527 [(set_attr "arch" "t1,32")]
8528 )
8529
8530 (define_insn "arm_stack_protect_test_insn"
8531 [(set (reg:CC_Z CC_REGNUM)
8532 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8533 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8534 UNSPEC_SP_TEST)
8535 (const_int 0)))
8536 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8537 (clobber (match_dup 2))]
8538 "TARGET_32BIT"
8539 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8540 [(set_attr "length" "8,12")
8541 (set_attr "conds" "set")
8542 (set_attr "type" "multiple")
8543 (set_attr "arch" "t,32")]
8544 )
8545
8546 (define_expand "casesi"
8547 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8548 (match_operand:SI 1 "const_int_operand") ; lower bound
8549 (match_operand:SI 2 "const_int_operand") ; total range
8550 (match_operand:SI 3 "" "") ; table label
8551 (match_operand:SI 4 "" "")] ; Out of range label
8552 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8553 "
8554 {
8555 enum insn_code code;
8556 if (operands[1] != const0_rtx)
8557 {
8558 rtx reg = gen_reg_rtx (SImode);
8559
8560 emit_insn (gen_addsi3 (reg, operands[0],
8561 gen_int_mode (-INTVAL (operands[1]),
8562 SImode)));
8563 operands[0] = reg;
8564 }
8565
8566 if (TARGET_ARM)
8567 code = CODE_FOR_arm_casesi_internal;
8568 else if (TARGET_THUMB1)
8569 code = CODE_FOR_thumb1_casesi_internal_pic;
8570 else if (flag_pic)
8571 code = CODE_FOR_thumb2_casesi_internal_pic;
8572 else
8573 code = CODE_FOR_thumb2_casesi_internal;
8574
8575 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8576 operands[2] = force_reg (SImode, operands[2]);
8577
8578 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8579 operands[3], operands[4]));
8580 DONE;
8581 }"
8582 )
8583
8584 ;; The USE in this pattern is needed to tell flow analysis that this is
8585 ;; a CASESI insn. It has no other purpose.
8586 (define_expand "arm_casesi_internal"
8587 [(parallel [(set (pc)
8588 (if_then_else
8589 (leu (match_operand:SI 0 "s_register_operand")
8590 (match_operand:SI 1 "arm_rhs_operand"))
8591 (match_dup 4)
8592 (label_ref:SI (match_operand 3 ""))))
8593 (clobber (reg:CC CC_REGNUM))
8594 (use (label_ref:SI (match_operand 2 "")))])]
8595 "TARGET_ARM"
8596 {
8597 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8598 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8599 gen_rtx_LABEL_REF (SImode, operands[2]));
8600 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8601 MEM_READONLY_P (operands[4]) = 1;
8602 MEM_NOTRAP_P (operands[4]) = 1;
8603 })
8604
8605 (define_insn "*arm_casesi_internal"
8606 [(parallel [(set (pc)
8607 (if_then_else
8608 (leu (match_operand:SI 0 "s_register_operand" "r")
8609 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8610 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8611 (label_ref:SI (match_operand 2 "" ""))))
8612 (label_ref:SI (match_operand 3 "" ""))))
8613 (clobber (reg:CC CC_REGNUM))
8614 (use (label_ref:SI (match_dup 2)))])]
8615 "TARGET_ARM"
8616 "*
8617 if (flag_pic)
8618 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8619 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8620 "
8621 [(set_attr "conds" "clob")
8622 (set_attr "length" "12")
8623 (set_attr "type" "multiple")]
8624 )
8625
8626 (define_expand "indirect_jump"
8627 [(set (pc)
8628 (match_operand:SI 0 "s_register_operand"))]
8629 "TARGET_EITHER"
8630 "
8631 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8632 address and use bx. */
8633 if (TARGET_THUMB2)
8634 {
8635 rtx tmp;
8636 tmp = gen_reg_rtx (SImode);
8637 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8638 operands[0] = tmp;
8639 }
8640 "
8641 )
8642
8643 ;; NB Never uses BX.
8644 (define_insn "*arm_indirect_jump"
8645 [(set (pc)
8646 (match_operand:SI 0 "s_register_operand" "r"))]
8647 "TARGET_ARM"
8648 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8649 [(set_attr "predicable" "yes")
8650 (set_attr "type" "branch")]
8651 )
8652
8653 (define_insn "*load_indirect_jump"
8654 [(set (pc)
8655 (match_operand:SI 0 "memory_operand" "m"))]
8656 "TARGET_ARM"
8657 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8658 [(set_attr "type" "load_4")
8659 (set_attr "pool_range" "4096")
8660 (set_attr "neg_pool_range" "4084")
8661 (set_attr "predicable" "yes")]
8662 )
8663
8664 \f
8665 ;; Misc insns
8666
8667 (define_insn "nop"
8668 [(const_int 0)]
8669 "TARGET_EITHER"
8670 "nop"
8671 [(set (attr "length")
8672 (if_then_else (eq_attr "is_thumb" "yes")
8673 (const_int 2)
8674 (const_int 4)))
8675 (set_attr "type" "mov_reg")]
8676 )
8677
8678 (define_insn "trap"
8679 [(trap_if (const_int 1) (const_int 0))]
8680 ""
8681 "*
8682 if (TARGET_ARM)
8683 return \".inst\\t0xe7f000f0\";
8684 else
8685 return \".inst\\t0xdeff\";
8686 "
8687 [(set (attr "length")
8688 (if_then_else (eq_attr "is_thumb" "yes")
8689 (const_int 2)
8690 (const_int 4)))
8691 (set_attr "type" "trap")
8692 (set_attr "conds" "unconditional")]
8693 )
8694
8695 \f
8696 ;; Patterns to allow combination of arithmetic, cond code and shifts
8697
8698 (define_insn "*<arith_shift_insn>_multsi"
8699 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8700 (SHIFTABLE_OPS:SI
8701 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8702 (match_operand:SI 3 "power_of_two_operand" ""))
8703 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8704 "TARGET_32BIT"
8705 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8706 [(set_attr "predicable" "yes")
8707 (set_attr "shift" "2")
8708 (set_attr "arch" "a,t2")
8709 (set_attr "type" "alu_shift_imm")])
8710
8711 (define_insn "*<arith_shift_insn>_shiftsi"
8712 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8713 (SHIFTABLE_OPS:SI
8714 (match_operator:SI 2 "shift_nomul_operator"
8715 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8716 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8717 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8718 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8719 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8720 [(set_attr "predicable" "yes")
8721 (set_attr "shift" "3")
8722 (set_attr "arch" "a,t2,a")
8723 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8724
8725 (define_split
8726 [(set (match_operand:SI 0 "s_register_operand" "")
8727 (match_operator:SI 1 "shiftable_operator"
8728 [(match_operator:SI 2 "shiftable_operator"
8729 [(match_operator:SI 3 "shift_operator"
8730 [(match_operand:SI 4 "s_register_operand" "")
8731 (match_operand:SI 5 "reg_or_int_operand" "")])
8732 (match_operand:SI 6 "s_register_operand" "")])
8733 (match_operand:SI 7 "arm_rhs_operand" "")]))
8734 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8735 "TARGET_32BIT"
8736 [(set (match_dup 8)
8737 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8738 (match_dup 6)]))
8739 (set (match_dup 0)
8740 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8741 "")
8742
8743 (define_insn "*arith_shiftsi_compare0"
8744 [(set (reg:CC_NOOV CC_REGNUM)
8745 (compare:CC_NOOV
8746 (match_operator:SI 1 "shiftable_operator"
8747 [(match_operator:SI 3 "shift_operator"
8748 [(match_operand:SI 4 "s_register_operand" "r,r")
8749 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8750 (match_operand:SI 2 "s_register_operand" "r,r")])
8751 (const_int 0)))
8752 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8753 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8754 (match_dup 2)]))]
8755 "TARGET_32BIT"
8756 "%i1s%?\\t%0, %2, %4%S3"
8757 [(set_attr "conds" "set")
8758 (set_attr "shift" "4")
8759 (set_attr "arch" "32,a")
8760 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8761
8762 (define_insn "*arith_shiftsi_compare0_scratch"
8763 [(set (reg:CC_NOOV CC_REGNUM)
8764 (compare:CC_NOOV
8765 (match_operator:SI 1 "shiftable_operator"
8766 [(match_operator:SI 3 "shift_operator"
8767 [(match_operand:SI 4 "s_register_operand" "r,r")
8768 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8769 (match_operand:SI 2 "s_register_operand" "r,r")])
8770 (const_int 0)))
8771 (clobber (match_scratch:SI 0 "=r,r"))]
8772 "TARGET_32BIT"
8773 "%i1s%?\\t%0, %2, %4%S3"
8774 [(set_attr "conds" "set")
8775 (set_attr "shift" "4")
8776 (set_attr "arch" "32,a")
8777 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8778
8779 (define_insn "*sub_shiftsi"
8780 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8781 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8782 (match_operator:SI 2 "shift_operator"
8783 [(match_operand:SI 3 "s_register_operand" "r,r")
8784 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8785 "TARGET_32BIT"
8786 "sub%?\\t%0, %1, %3%S2"
8787 [(set_attr "predicable" "yes")
8788 (set_attr "predicable_short_it" "no")
8789 (set_attr "shift" "3")
8790 (set_attr "arch" "32,a")
8791 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8792
8793 (define_insn "*sub_shiftsi_compare0"
8794 [(set (reg:CC_NOOV CC_REGNUM)
8795 (compare:CC_NOOV
8796 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8797 (match_operator:SI 2 "shift_operator"
8798 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8799 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8800 (const_int 0)))
8801 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8802 (minus:SI (match_dup 1)
8803 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8804 "TARGET_32BIT"
8805 "subs%?\\t%0, %1, %3%S2"
8806 [(set_attr "conds" "set")
8807 (set_attr "shift" "3")
8808 (set_attr "arch" "32,a,a")
8809 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8810
8811 (define_insn "*sub_shiftsi_compare0_scratch"
8812 [(set (reg:CC_NOOV CC_REGNUM)
8813 (compare:CC_NOOV
8814 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8815 (match_operator:SI 2 "shift_operator"
8816 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8817 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8818 (const_int 0)))
8819 (clobber (match_scratch:SI 0 "=r,r,r"))]
8820 "TARGET_32BIT"
8821 "subs%?\\t%0, %1, %3%S2"
8822 [(set_attr "conds" "set")
8823 (set_attr "shift" "3")
8824 (set_attr "arch" "32,a,a")
8825 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8826 \f
8827
8828 (define_insn_and_split "*and_scc"
8829 [(set (match_operand:SI 0 "s_register_operand" "=r")
8830 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8831 [(match_operand 2 "cc_register" "") (const_int 0)])
8832 (match_operand:SI 3 "s_register_operand" "r")))]
8833 "TARGET_ARM"
8834 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8835 "&& reload_completed"
8836 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8837 (cond_exec (match_dup 4) (set (match_dup 0)
8838 (and:SI (match_dup 3) (const_int 1))))]
8839 {
8840 machine_mode mode = GET_MODE (operands[2]);
8841 enum rtx_code rc = GET_CODE (operands[1]);
8842
8843 /* Note that operands[4] is the same as operands[1],
8844 but with VOIDmode as the result. */
8845 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8846 if (mode == CCFPmode || mode == CCFPEmode)
8847 rc = reverse_condition_maybe_unordered (rc);
8848 else
8849 rc = reverse_condition (rc);
8850 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8851 }
8852 [(set_attr "conds" "use")
8853 (set_attr "type" "multiple")
8854 (set_attr "length" "8")]
8855 )
8856
8857 (define_insn_and_split "*ior_scc"
8858 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8859 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8860 [(match_operand 2 "cc_register" "") (const_int 0)])
8861 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8862 "TARGET_ARM"
8863 "@
8864 orr%d1\\t%0, %3, #1
8865 #"
8866 "&& reload_completed
8867 && REGNO (operands [0]) != REGNO (operands[3])"
8868 ;; && which_alternative == 1
8869 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8870 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8871 (cond_exec (match_dup 4) (set (match_dup 0)
8872 (ior:SI (match_dup 3) (const_int 1))))]
8873 {
8874 machine_mode mode = GET_MODE (operands[2]);
8875 enum rtx_code rc = GET_CODE (operands[1]);
8876
8877 /* Note that operands[4] is the same as operands[1],
8878 but with VOIDmode as the result. */
8879 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8880 if (mode == CCFPmode || mode == CCFPEmode)
8881 rc = reverse_condition_maybe_unordered (rc);
8882 else
8883 rc = reverse_condition (rc);
8884 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8885 }
8886 [(set_attr "conds" "use")
8887 (set_attr "length" "4,8")
8888 (set_attr "type" "logic_imm,multiple")]
8889 )
8890
8891 ; A series of splitters for the compare_scc pattern below. Note that
8892 ; order is important.
8893 (define_split
8894 [(set (match_operand:SI 0 "s_register_operand" "")
8895 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8896 (const_int 0)))
8897 (clobber (reg:CC CC_REGNUM))]
8898 "TARGET_32BIT && reload_completed"
8899 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8900
8901 (define_split
8902 [(set (match_operand:SI 0 "s_register_operand" "")
8903 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8904 (const_int 0)))
8905 (clobber (reg:CC CC_REGNUM))]
8906 "TARGET_32BIT && reload_completed"
8907 [(set (match_dup 0) (not:SI (match_dup 1)))
8908 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8909
8910 (define_split
8911 [(set (match_operand:SI 0 "s_register_operand" "")
8912 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8913 (const_int 0)))
8914 (clobber (reg:CC CC_REGNUM))]
8915 "arm_arch5t && TARGET_32BIT"
8916 [(set (match_dup 0) (clz:SI (match_dup 1)))
8917 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8918 )
8919
8920 (define_split
8921 [(set (match_operand:SI 0 "s_register_operand" "")
8922 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8923 (const_int 0)))
8924 (clobber (reg:CC CC_REGNUM))]
8925 "TARGET_32BIT && reload_completed"
8926 [(parallel
8927 [(set (reg:CC CC_REGNUM)
8928 (compare:CC (const_int 1) (match_dup 1)))
8929 (set (match_dup 0)
8930 (minus:SI (const_int 1) (match_dup 1)))])
8931 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8932 (set (match_dup 0) (const_int 0)))])
8933
8934 (define_split
8935 [(set (match_operand:SI 0 "s_register_operand" "")
8936 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8937 (match_operand:SI 2 "const_int_operand" "")))
8938 (clobber (reg:CC CC_REGNUM))]
8939 "TARGET_32BIT && reload_completed"
8940 [(parallel
8941 [(set (reg:CC CC_REGNUM)
8942 (compare:CC (match_dup 1) (match_dup 2)))
8943 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8944 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8945 (set (match_dup 0) (const_int 1)))]
8946 {
8947 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8948 })
8949
8950 (define_split
8951 [(set (match_operand:SI 0 "s_register_operand" "")
8952 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8953 (match_operand:SI 2 "arm_add_operand" "")))
8954 (clobber (reg:CC CC_REGNUM))]
8955 "TARGET_32BIT && reload_completed"
8956 [(parallel
8957 [(set (reg:CC_NOOV CC_REGNUM)
8958 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8959 (const_int 0)))
8960 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8961 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8962 (set (match_dup 0) (const_int 1)))])
8963
8964 (define_insn_and_split "*compare_scc"
8965 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8966 (match_operator:SI 1 "arm_comparison_operator"
8967 [(match_operand:SI 2 "s_register_operand" "r,r")
8968 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8969 (clobber (reg:CC CC_REGNUM))]
8970 "TARGET_32BIT"
8971 "#"
8972 "&& reload_completed"
8973 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8974 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8975 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8976 {
8977 rtx tmp1;
8978 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8979 operands[2], operands[3]);
8980 enum rtx_code rc = GET_CODE (operands[1]);
8981
8982 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8983
8984 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8985 if (mode == CCFPmode || mode == CCFPEmode)
8986 rc = reverse_condition_maybe_unordered (rc);
8987 else
8988 rc = reverse_condition (rc);
8989 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8990 }
8991 [(set_attr "type" "multiple")]
8992 )
8993
8994 ;; Attempt to improve the sequence generated by the compare_scc splitters
8995 ;; not to use conditional execution.
8996
8997 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8998 ;; clz Rd, reg1
8999 ;; lsr Rd, Rd, #5
9000 (define_peephole2
9001 [(set (reg:CC CC_REGNUM)
9002 (compare:CC (match_operand:SI 1 "register_operand" "")
9003 (const_int 0)))
9004 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9005 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9006 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9007 (set (match_dup 0) (const_int 1)))]
9008 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9009 [(set (match_dup 0) (clz:SI (match_dup 1)))
9010 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9011 )
9012
9013 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9014 ;; negs Rd, reg1
9015 ;; adc Rd, Rd, reg1
9016 (define_peephole2
9017 [(set (reg:CC CC_REGNUM)
9018 (compare:CC (match_operand:SI 1 "register_operand" "")
9019 (const_int 0)))
9020 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9021 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9022 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9023 (set (match_dup 0) (const_int 1)))
9024 (match_scratch:SI 2 "r")]
9025 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9026 [(parallel
9027 [(set (reg:CC CC_REGNUM)
9028 (compare:CC (const_int 0) (match_dup 1)))
9029 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9030 (set (match_dup 0)
9031 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9032 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9033 )
9034
9035 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9036 ;; sub Rd, Reg1, reg2
9037 ;; clz Rd, Rd
9038 ;; lsr Rd, Rd, #5
9039 (define_peephole2
9040 [(set (reg:CC CC_REGNUM)
9041 (compare:CC (match_operand:SI 1 "register_operand" "")
9042 (match_operand:SI 2 "arm_rhs_operand" "")))
9043 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9044 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9045 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9046 (set (match_dup 0) (const_int 1)))]
9047 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9048 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9049 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9050 (set (match_dup 0) (clz:SI (match_dup 0)))
9051 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9052 )
9053
9054
9055 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9056 ;; sub T1, Reg1, reg2
9057 ;; negs Rd, T1
9058 ;; adc Rd, Rd, T1
9059 (define_peephole2
9060 [(set (reg:CC CC_REGNUM)
9061 (compare:CC (match_operand:SI 1 "register_operand" "")
9062 (match_operand:SI 2 "arm_rhs_operand" "")))
9063 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9064 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9065 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9066 (set (match_dup 0) (const_int 1)))
9067 (match_scratch:SI 3 "r")]
9068 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9069 [(set (match_dup 3) (match_dup 4))
9070 (parallel
9071 [(set (reg:CC CC_REGNUM)
9072 (compare:CC (const_int 0) (match_dup 3)))
9073 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9074 (set (match_dup 0)
9075 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9076 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9077 "
9078 if (CONST_INT_P (operands[2]))
9079 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9080 else
9081 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9082 ")
9083
9084 (define_insn "*cond_move"
9085 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9086 (if_then_else:SI (match_operator 3 "equality_operator"
9087 [(match_operator 4 "arm_comparison_operator"
9088 [(match_operand 5 "cc_register" "") (const_int 0)])
9089 (const_int 0)])
9090 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9091 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9092 "TARGET_ARM"
9093 "*
9094 if (GET_CODE (operands[3]) == NE)
9095 {
9096 if (which_alternative != 1)
9097 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9098 if (which_alternative != 0)
9099 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9100 return \"\";
9101 }
9102 if (which_alternative != 0)
9103 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9104 if (which_alternative != 1)
9105 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9106 return \"\";
9107 "
9108 [(set_attr "conds" "use")
9109 (set_attr_alternative "type"
9110 [(if_then_else (match_operand 2 "const_int_operand" "")
9111 (const_string "mov_imm")
9112 (const_string "mov_reg"))
9113 (if_then_else (match_operand 1 "const_int_operand" "")
9114 (const_string "mov_imm")
9115 (const_string "mov_reg"))
9116 (const_string "multiple")])
9117 (set_attr "length" "4,4,8")]
9118 )
9119
9120 (define_insn "*cond_arith"
9121 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9122 (match_operator:SI 5 "shiftable_operator"
9123 [(match_operator:SI 4 "arm_comparison_operator"
9124 [(match_operand:SI 2 "s_register_operand" "r,r")
9125 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9126 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9127 (clobber (reg:CC CC_REGNUM))]
9128 "TARGET_ARM"
9129 "*
9130 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9131 return \"%i5\\t%0, %1, %2, lsr #31\";
9132
9133 output_asm_insn (\"cmp\\t%2, %3\", operands);
9134 if (GET_CODE (operands[5]) == AND)
9135 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9136 else if (GET_CODE (operands[5]) == MINUS)
9137 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9138 else if (which_alternative != 0)
9139 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9140 return \"%i5%d4\\t%0, %1, #1\";
9141 "
9142 [(set_attr "conds" "clob")
9143 (set_attr "length" "12")
9144 (set_attr "type" "multiple")]
9145 )
9146
9147 (define_insn "*cond_sub"
9148 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9149 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9150 (match_operator:SI 4 "arm_comparison_operator"
9151 [(match_operand:SI 2 "s_register_operand" "r,r")
9152 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9153 (clobber (reg:CC CC_REGNUM))]
9154 "TARGET_ARM"
9155 "*
9156 output_asm_insn (\"cmp\\t%2, %3\", operands);
9157 if (which_alternative != 0)
9158 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9159 return \"sub%d4\\t%0, %1, #1\";
9160 "
9161 [(set_attr "conds" "clob")
9162 (set_attr "length" "8,12")
9163 (set_attr "type" "multiple")]
9164 )
9165
9166 (define_insn "*cmp_ite0"
9167 [(set (match_operand 6 "dominant_cc_register" "")
9168 (compare
9169 (if_then_else:SI
9170 (match_operator 4 "arm_comparison_operator"
9171 [(match_operand:SI 0 "s_register_operand"
9172 "l,l,l,r,r,r,r,r,r")
9173 (match_operand:SI 1 "arm_add_operand"
9174 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9175 (match_operator:SI 5 "arm_comparison_operator"
9176 [(match_operand:SI 2 "s_register_operand"
9177 "l,r,r,l,l,r,r,r,r")
9178 (match_operand:SI 3 "arm_add_operand"
9179 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9180 (const_int 0))
9181 (const_int 0)))]
9182 "TARGET_32BIT"
9183 "*
9184 {
9185 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9186 {
9187 {\"cmp%d5\\t%0, %1\",
9188 \"cmp%d4\\t%2, %3\"},
9189 {\"cmn%d5\\t%0, #%n1\",
9190 \"cmp%d4\\t%2, %3\"},
9191 {\"cmp%d5\\t%0, %1\",
9192 \"cmn%d4\\t%2, #%n3\"},
9193 {\"cmn%d5\\t%0, #%n1\",
9194 \"cmn%d4\\t%2, #%n3\"}
9195 };
9196 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9197 {
9198 {\"cmp\\t%2, %3\",
9199 \"cmp\\t%0, %1\"},
9200 {\"cmp\\t%2, %3\",
9201 \"cmn\\t%0, #%n1\"},
9202 {\"cmn\\t%2, #%n3\",
9203 \"cmp\\t%0, %1\"},
9204 {\"cmn\\t%2, #%n3\",
9205 \"cmn\\t%0, #%n1\"}
9206 };
9207 static const char * const ite[2] =
9208 {
9209 \"it\\t%d5\",
9210 \"it\\t%d4\"
9211 };
9212 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9213 CMP_CMP, CMN_CMP, CMP_CMP,
9214 CMN_CMP, CMP_CMN, CMN_CMN};
9215 int swap =
9216 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9217
9218 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9219 if (TARGET_THUMB2) {
9220 output_asm_insn (ite[swap], operands);
9221 }
9222 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9223 return \"\";
9224 }"
9225 [(set_attr "conds" "set")
9226 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9227 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9228 (set_attr "type" "multiple")
9229 (set_attr_alternative "length"
9230 [(const_int 6)
9231 (const_int 8)
9232 (const_int 8)
9233 (const_int 8)
9234 (const_int 8)
9235 (if_then_else (eq_attr "is_thumb" "no")
9236 (const_int 8)
9237 (const_int 10))
9238 (if_then_else (eq_attr "is_thumb" "no")
9239 (const_int 8)
9240 (const_int 10))
9241 (if_then_else (eq_attr "is_thumb" "no")
9242 (const_int 8)
9243 (const_int 10))
9244 (if_then_else (eq_attr "is_thumb" "no")
9245 (const_int 8)
9246 (const_int 10))])]
9247 )
9248
9249 (define_insn "*cmp_ite1"
9250 [(set (match_operand 6 "dominant_cc_register" "")
9251 (compare
9252 (if_then_else:SI
9253 (match_operator 4 "arm_comparison_operator"
9254 [(match_operand:SI 0 "s_register_operand"
9255 "l,l,l,r,r,r,r,r,r")
9256 (match_operand:SI 1 "arm_add_operand"
9257 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9258 (match_operator:SI 5 "arm_comparison_operator"
9259 [(match_operand:SI 2 "s_register_operand"
9260 "l,r,r,l,l,r,r,r,r")
9261 (match_operand:SI 3 "arm_add_operand"
9262 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9263 (const_int 1))
9264 (const_int 0)))]
9265 "TARGET_32BIT"
9266 "*
9267 {
9268 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9269 {
9270 {\"cmp\\t%0, %1\",
9271 \"cmp\\t%2, %3\"},
9272 {\"cmn\\t%0, #%n1\",
9273 \"cmp\\t%2, %3\"},
9274 {\"cmp\\t%0, %1\",
9275 \"cmn\\t%2, #%n3\"},
9276 {\"cmn\\t%0, #%n1\",
9277 \"cmn\\t%2, #%n3\"}
9278 };
9279 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9280 {
9281 {\"cmp%d4\\t%2, %3\",
9282 \"cmp%D5\\t%0, %1\"},
9283 {\"cmp%d4\\t%2, %3\",
9284 \"cmn%D5\\t%0, #%n1\"},
9285 {\"cmn%d4\\t%2, #%n3\",
9286 \"cmp%D5\\t%0, %1\"},
9287 {\"cmn%d4\\t%2, #%n3\",
9288 \"cmn%D5\\t%0, #%n1\"}
9289 };
9290 static const char * const ite[2] =
9291 {
9292 \"it\\t%d4\",
9293 \"it\\t%D5\"
9294 };
9295 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9296 CMP_CMP, CMN_CMP, CMP_CMP,
9297 CMN_CMP, CMP_CMN, CMN_CMN};
9298 int swap =
9299 comparison_dominates_p (GET_CODE (operands[5]),
9300 reverse_condition (GET_CODE (operands[4])));
9301
9302 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9303 if (TARGET_THUMB2) {
9304 output_asm_insn (ite[swap], operands);
9305 }
9306 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9307 return \"\";
9308 }"
9309 [(set_attr "conds" "set")
9310 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9311 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9312 (set_attr_alternative "length"
9313 [(const_int 6)
9314 (const_int 8)
9315 (const_int 8)
9316 (const_int 8)
9317 (const_int 8)
9318 (if_then_else (eq_attr "is_thumb" "no")
9319 (const_int 8)
9320 (const_int 10))
9321 (if_then_else (eq_attr "is_thumb" "no")
9322 (const_int 8)
9323 (const_int 10))
9324 (if_then_else (eq_attr "is_thumb" "no")
9325 (const_int 8)
9326 (const_int 10))
9327 (if_then_else (eq_attr "is_thumb" "no")
9328 (const_int 8)
9329 (const_int 10))])
9330 (set_attr "type" "multiple")]
9331 )
9332
9333 (define_insn "*cmp_and"
9334 [(set (match_operand 6 "dominant_cc_register" "")
9335 (compare
9336 (and:SI
9337 (match_operator 4 "arm_comparison_operator"
9338 [(match_operand:SI 0 "s_register_operand"
9339 "l,l,l,r,r,r,r,r,r,r")
9340 (match_operand:SI 1 "arm_add_operand"
9341 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9342 (match_operator:SI 5 "arm_comparison_operator"
9343 [(match_operand:SI 2 "s_register_operand"
9344 "l,r,r,l,l,r,r,r,r,r")
9345 (match_operand:SI 3 "arm_add_operand"
9346 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9347 (const_int 0)))]
9348 "TARGET_32BIT"
9349 "*
9350 {
9351 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9352 {
9353 {\"cmp%d5\\t%0, %1\",
9354 \"cmp%d4\\t%2, %3\"},
9355 {\"cmn%d5\\t%0, #%n1\",
9356 \"cmp%d4\\t%2, %3\"},
9357 {\"cmp%d5\\t%0, %1\",
9358 \"cmn%d4\\t%2, #%n3\"},
9359 {\"cmn%d5\\t%0, #%n1\",
9360 \"cmn%d4\\t%2, #%n3\"}
9361 };
9362 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9363 {
9364 {\"cmp\\t%2, %3\",
9365 \"cmp\\t%0, %1\"},
9366 {\"cmp\\t%2, %3\",
9367 \"cmn\\t%0, #%n1\"},
9368 {\"cmn\\t%2, #%n3\",
9369 \"cmp\\t%0, %1\"},
9370 {\"cmn\\t%2, #%n3\",
9371 \"cmn\\t%0, #%n1\"}
9372 };
9373 static const char *const ite[2] =
9374 {
9375 \"it\\t%d5\",
9376 \"it\\t%d4\"
9377 };
9378 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9379 CMP_CMP, CMN_CMP, CMP_CMP,
9380 CMP_CMP, CMN_CMP, CMP_CMN,
9381 CMN_CMN};
9382 int swap =
9383 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9384
9385 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9386 if (TARGET_THUMB2) {
9387 output_asm_insn (ite[swap], operands);
9388 }
9389 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9390 return \"\";
9391 }"
9392 [(set_attr "conds" "set")
9393 (set_attr "predicable" "no")
9394 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9395 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9396 (set_attr_alternative "length"
9397 [(const_int 6)
9398 (const_int 8)
9399 (const_int 8)
9400 (const_int 8)
9401 (const_int 8)
9402 (const_int 6)
9403 (if_then_else (eq_attr "is_thumb" "no")
9404 (const_int 8)
9405 (const_int 10))
9406 (if_then_else (eq_attr "is_thumb" "no")
9407 (const_int 8)
9408 (const_int 10))
9409 (if_then_else (eq_attr "is_thumb" "no")
9410 (const_int 8)
9411 (const_int 10))
9412 (if_then_else (eq_attr "is_thumb" "no")
9413 (const_int 8)
9414 (const_int 10))])
9415 (set_attr "type" "multiple")]
9416 )
9417
9418 (define_insn "*cmp_ior"
9419 [(set (match_operand 6 "dominant_cc_register" "")
9420 (compare
9421 (ior:SI
9422 (match_operator 4 "arm_comparison_operator"
9423 [(match_operand:SI 0 "s_register_operand"
9424 "l,l,l,r,r,r,r,r,r,r")
9425 (match_operand:SI 1 "arm_add_operand"
9426 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9427 (match_operator:SI 5 "arm_comparison_operator"
9428 [(match_operand:SI 2 "s_register_operand"
9429 "l,r,r,l,l,r,r,r,r,r")
9430 (match_operand:SI 3 "arm_add_operand"
9431 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9432 (const_int 0)))]
9433 "TARGET_32BIT"
9434 "*
9435 {
9436 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9437 {
9438 {\"cmp\\t%0, %1\",
9439 \"cmp\\t%2, %3\"},
9440 {\"cmn\\t%0, #%n1\",
9441 \"cmp\\t%2, %3\"},
9442 {\"cmp\\t%0, %1\",
9443 \"cmn\\t%2, #%n3\"},
9444 {\"cmn\\t%0, #%n1\",
9445 \"cmn\\t%2, #%n3\"}
9446 };
9447 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9448 {
9449 {\"cmp%D4\\t%2, %3\",
9450 \"cmp%D5\\t%0, %1\"},
9451 {\"cmp%D4\\t%2, %3\",
9452 \"cmn%D5\\t%0, #%n1\"},
9453 {\"cmn%D4\\t%2, #%n3\",
9454 \"cmp%D5\\t%0, %1\"},
9455 {\"cmn%D4\\t%2, #%n3\",
9456 \"cmn%D5\\t%0, #%n1\"}
9457 };
9458 static const char *const ite[2] =
9459 {
9460 \"it\\t%D4\",
9461 \"it\\t%D5\"
9462 };
9463 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9464 CMP_CMP, CMN_CMP, CMP_CMP,
9465 CMP_CMP, CMN_CMP, CMP_CMN,
9466 CMN_CMN};
9467 int swap =
9468 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9469
9470 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9471 if (TARGET_THUMB2) {
9472 output_asm_insn (ite[swap], operands);
9473 }
9474 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9475 return \"\";
9476 }
9477 "
9478 [(set_attr "conds" "set")
9479 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9480 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9481 (set_attr_alternative "length"
9482 [(const_int 6)
9483 (const_int 8)
9484 (const_int 8)
9485 (const_int 8)
9486 (const_int 8)
9487 (const_int 6)
9488 (if_then_else (eq_attr "is_thumb" "no")
9489 (const_int 8)
9490 (const_int 10))
9491 (if_then_else (eq_attr "is_thumb" "no")
9492 (const_int 8)
9493 (const_int 10))
9494 (if_then_else (eq_attr "is_thumb" "no")
9495 (const_int 8)
9496 (const_int 10))
9497 (if_then_else (eq_attr "is_thumb" "no")
9498 (const_int 8)
9499 (const_int 10))])
9500 (set_attr "type" "multiple")]
9501 )
9502
9503 (define_insn_and_split "*ior_scc_scc"
9504 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9505 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9506 [(match_operand:SI 1 "s_register_operand" "l,r")
9507 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9508 (match_operator:SI 6 "arm_comparison_operator"
9509 [(match_operand:SI 4 "s_register_operand" "l,r")
9510 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9511 (clobber (reg:CC CC_REGNUM))]
9512 "TARGET_32BIT
9513 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9514 != CCmode)"
9515 "#"
9516 "TARGET_32BIT && reload_completed"
9517 [(set (match_dup 7)
9518 (compare
9519 (ior:SI
9520 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9521 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9522 (const_int 0)))
9523 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9524 "operands[7]
9525 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9526 DOM_CC_X_OR_Y),
9527 CC_REGNUM);"
9528 [(set_attr "conds" "clob")
9529 (set_attr "enabled_for_short_it" "yes,no")
9530 (set_attr "length" "16")
9531 (set_attr "type" "multiple")]
9532 )
9533
9534 ; If the above pattern is followed by a CMP insn, then the compare is
9535 ; redundant, since we can rework the conditional instruction that follows.
9536 (define_insn_and_split "*ior_scc_scc_cmp"
9537 [(set (match_operand 0 "dominant_cc_register" "")
9538 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9539 [(match_operand:SI 1 "s_register_operand" "l,r")
9540 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9541 (match_operator:SI 6 "arm_comparison_operator"
9542 [(match_operand:SI 4 "s_register_operand" "l,r")
9543 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9544 (const_int 0)))
9545 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9546 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9547 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9548 "TARGET_32BIT"
9549 "#"
9550 "TARGET_32BIT && reload_completed"
9551 [(set (match_dup 0)
9552 (compare
9553 (ior:SI
9554 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9555 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9556 (const_int 0)))
9557 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9558 ""
9559 [(set_attr "conds" "set")
9560 (set_attr "enabled_for_short_it" "yes,no")
9561 (set_attr "length" "16")
9562 (set_attr "type" "multiple")]
9563 )
9564
9565 (define_insn_and_split "*and_scc_scc"
9566 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9567 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9568 [(match_operand:SI 1 "s_register_operand" "l,r")
9569 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9570 (match_operator:SI 6 "arm_comparison_operator"
9571 [(match_operand:SI 4 "s_register_operand" "l,r")
9572 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9573 (clobber (reg:CC CC_REGNUM))]
9574 "TARGET_32BIT
9575 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9576 != CCmode)"
9577 "#"
9578 "TARGET_32BIT && reload_completed
9579 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9580 != CCmode)"
9581 [(set (match_dup 7)
9582 (compare
9583 (and:SI
9584 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9585 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9586 (const_int 0)))
9587 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9588 "operands[7]
9589 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9590 DOM_CC_X_AND_Y),
9591 CC_REGNUM);"
9592 [(set_attr "conds" "clob")
9593 (set_attr "enabled_for_short_it" "yes,no")
9594 (set_attr "length" "16")
9595 (set_attr "type" "multiple")]
9596 )
9597
9598 ; If the above pattern is followed by a CMP insn, then the compare is
9599 ; redundant, since we can rework the conditional instruction that follows.
9600 (define_insn_and_split "*and_scc_scc_cmp"
9601 [(set (match_operand 0 "dominant_cc_register" "")
9602 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9603 [(match_operand:SI 1 "s_register_operand" "l,r")
9604 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9605 (match_operator:SI 6 "arm_comparison_operator"
9606 [(match_operand:SI 4 "s_register_operand" "l,r")
9607 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9608 (const_int 0)))
9609 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9610 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9611 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9612 "TARGET_32BIT"
9613 "#"
9614 "TARGET_32BIT && reload_completed"
9615 [(set (match_dup 0)
9616 (compare
9617 (and:SI
9618 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9619 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9620 (const_int 0)))
9621 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9622 ""
9623 [(set_attr "conds" "set")
9624 (set_attr "enabled_for_short_it" "yes,no")
9625 (set_attr "length" "16")
9626 (set_attr "type" "multiple")]
9627 )
9628
9629 ;; If there is no dominance in the comparison, then we can still save an
9630 ;; instruction in the AND case, since we can know that the second compare
9631 ;; need only zero the value if false (if true, then the value is already
9632 ;; correct).
9633 (define_insn_and_split "*and_scc_scc_nodom"
9634 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9635 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9636 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9637 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9638 (match_operator:SI 6 "arm_comparison_operator"
9639 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9640 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9641 (clobber (reg:CC CC_REGNUM))]
9642 "TARGET_32BIT
9643 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9644 == CCmode)"
9645 "#"
9646 "TARGET_32BIT && reload_completed"
9647 [(parallel [(set (match_dup 0)
9648 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9649 (clobber (reg:CC CC_REGNUM))])
9650 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9651 (set (match_dup 0)
9652 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9653 (match_dup 0)
9654 (const_int 0)))]
9655 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9656 operands[4], operands[5]),
9657 CC_REGNUM);
9658 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9659 operands[5]);"
9660 [(set_attr "conds" "clob")
9661 (set_attr "length" "20")
9662 (set_attr "type" "multiple")]
9663 )
9664
9665 (define_split
9666 [(set (reg:CC_NOOV CC_REGNUM)
9667 (compare:CC_NOOV (ior:SI
9668 (and:SI (match_operand:SI 0 "s_register_operand" "")
9669 (const_int 1))
9670 (match_operator:SI 1 "arm_comparison_operator"
9671 [(match_operand:SI 2 "s_register_operand" "")
9672 (match_operand:SI 3 "arm_add_operand" "")]))
9673 (const_int 0)))
9674 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9675 "TARGET_ARM"
9676 [(set (match_dup 4)
9677 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9678 (match_dup 0)))
9679 (set (reg:CC_NOOV CC_REGNUM)
9680 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9681 (const_int 0)))]
9682 "")
9683
9684 (define_split
9685 [(set (reg:CC_NOOV CC_REGNUM)
9686 (compare:CC_NOOV (ior:SI
9687 (match_operator:SI 1 "arm_comparison_operator"
9688 [(match_operand:SI 2 "s_register_operand" "")
9689 (match_operand:SI 3 "arm_add_operand" "")])
9690 (and:SI (match_operand:SI 0 "s_register_operand" "")
9691 (const_int 1)))
9692 (const_int 0)))
9693 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9694 "TARGET_ARM"
9695 [(set (match_dup 4)
9696 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9697 (match_dup 0)))
9698 (set (reg:CC_NOOV CC_REGNUM)
9699 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9700 (const_int 0)))]
9701 "")
9702 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9703
9704 (define_insn_and_split "*negscc"
9705 [(set (match_operand:SI 0 "s_register_operand" "=r")
9706 (neg:SI (match_operator 3 "arm_comparison_operator"
9707 [(match_operand:SI 1 "s_register_operand" "r")
9708 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9709 (clobber (reg:CC CC_REGNUM))]
9710 "TARGET_ARM"
9711 "#"
9712 "&& reload_completed"
9713 [(const_int 0)]
9714 {
9715 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9716
9717 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9718 {
9719 /* Emit mov\\t%0, %1, asr #31 */
9720 emit_insn (gen_rtx_SET (operands[0],
9721 gen_rtx_ASHIFTRT (SImode,
9722 operands[1],
9723 GEN_INT (31))));
9724 DONE;
9725 }
9726 else if (GET_CODE (operands[3]) == NE)
9727 {
9728 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9729 if (CONST_INT_P (operands[2]))
9730 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9731 gen_int_mode (-INTVAL (operands[2]),
9732 SImode)));
9733 else
9734 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9735
9736 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9737 gen_rtx_NE (SImode,
9738 cc_reg,
9739 const0_rtx),
9740 gen_rtx_SET (operands[0],
9741 GEN_INT (~0))));
9742 DONE;
9743 }
9744 else
9745 {
9746 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9747 emit_insn (gen_rtx_SET (cc_reg,
9748 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9749 enum rtx_code rc = GET_CODE (operands[3]);
9750
9751 rc = reverse_condition (rc);
9752 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9753 gen_rtx_fmt_ee (rc,
9754 VOIDmode,
9755 cc_reg,
9756 const0_rtx),
9757 gen_rtx_SET (operands[0], const0_rtx)));
9758 rc = GET_CODE (operands[3]);
9759 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9760 gen_rtx_fmt_ee (rc,
9761 VOIDmode,
9762 cc_reg,
9763 const0_rtx),
9764 gen_rtx_SET (operands[0],
9765 GEN_INT (~0))));
9766 DONE;
9767 }
9768 FAIL;
9769 }
9770 [(set_attr "conds" "clob")
9771 (set_attr "length" "12")
9772 (set_attr "type" "multiple")]
9773 )
9774
9775 (define_insn_and_split "movcond_addsi"
9776 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9777 (if_then_else:SI
9778 (match_operator 5 "comparison_operator"
9779 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9780 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9781 (const_int 0)])
9782 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9783 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9784 (clobber (reg:CC CC_REGNUM))]
9785 "TARGET_32BIT"
9786 "#"
9787 "&& reload_completed"
9788 [(set (reg:CC_NOOV CC_REGNUM)
9789 (compare:CC_NOOV
9790 (plus:SI (match_dup 3)
9791 (match_dup 4))
9792 (const_int 0)))
9793 (set (match_dup 0) (match_dup 1))
9794 (cond_exec (match_dup 6)
9795 (set (match_dup 0) (match_dup 2)))]
9796 "
9797 {
9798 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9799 operands[3], operands[4]);
9800 enum rtx_code rc = GET_CODE (operands[5]);
9801 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9802 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9803 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9804 rc = reverse_condition (rc);
9805 else
9806 std::swap (operands[1], operands[2]);
9807
9808 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9809 }
9810 "
9811 [(set_attr "conds" "clob")
9812 (set_attr "enabled_for_short_it" "no,yes,yes")
9813 (set_attr "type" "multiple")]
9814 )
9815
9816 (define_insn "movcond"
9817 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9818 (if_then_else:SI
9819 (match_operator 5 "arm_comparison_operator"
9820 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9821 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9822 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9823 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9824 (clobber (reg:CC CC_REGNUM))]
9825 "TARGET_ARM"
9826 "*
9827 if (GET_CODE (operands[5]) == LT
9828 && (operands[4] == const0_rtx))
9829 {
9830 if (which_alternative != 1 && REG_P (operands[1]))
9831 {
9832 if (operands[2] == const0_rtx)
9833 return \"and\\t%0, %1, %3, asr #31\";
9834 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9835 }
9836 else if (which_alternative != 0 && REG_P (operands[2]))
9837 {
9838 if (operands[1] == const0_rtx)
9839 return \"bic\\t%0, %2, %3, asr #31\";
9840 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9841 }
9842 /* The only case that falls through to here is when both ops 1 & 2
9843 are constants. */
9844 }
9845
9846 if (GET_CODE (operands[5]) == GE
9847 && (operands[4] == const0_rtx))
9848 {
9849 if (which_alternative != 1 && REG_P (operands[1]))
9850 {
9851 if (operands[2] == const0_rtx)
9852 return \"bic\\t%0, %1, %3, asr #31\";
9853 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9854 }
9855 else if (which_alternative != 0 && REG_P (operands[2]))
9856 {
9857 if (operands[1] == const0_rtx)
9858 return \"and\\t%0, %2, %3, asr #31\";
9859 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9860 }
9861 /* The only case that falls through to here is when both ops 1 & 2
9862 are constants. */
9863 }
9864 if (CONST_INT_P (operands[4])
9865 && !const_ok_for_arm (INTVAL (operands[4])))
9866 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9867 else
9868 output_asm_insn (\"cmp\\t%3, %4\", operands);
9869 if (which_alternative != 0)
9870 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9871 if (which_alternative != 1)
9872 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9873 return \"\";
9874 "
9875 [(set_attr "conds" "clob")
9876 (set_attr "length" "8,8,12")
9877 (set_attr "type" "multiple")]
9878 )
9879
9880 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9881
9882 (define_insn "*ifcompare_plus_move"
9883 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9884 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9885 [(match_operand:SI 4 "s_register_operand" "r,r")
9886 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9887 (plus:SI
9888 (match_operand:SI 2 "s_register_operand" "r,r")
9889 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9890 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9891 (clobber (reg:CC CC_REGNUM))]
9892 "TARGET_ARM"
9893 "#"
9894 [(set_attr "conds" "clob")
9895 (set_attr "length" "8,12")
9896 (set_attr "type" "multiple")]
9897 )
9898
9899 (define_insn "*if_plus_move"
9900 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9901 (if_then_else:SI
9902 (match_operator 4 "arm_comparison_operator"
9903 [(match_operand 5 "cc_register" "") (const_int 0)])
9904 (plus:SI
9905 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9906 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9907 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9908 "TARGET_ARM"
9909 "@
9910 add%d4\\t%0, %2, %3
9911 sub%d4\\t%0, %2, #%n3
9912 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9913 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9914 [(set_attr "conds" "use")
9915 (set_attr "length" "4,4,8,8")
9916 (set_attr_alternative "type"
9917 [(if_then_else (match_operand 3 "const_int_operand" "")
9918 (const_string "alu_imm" )
9919 (const_string "alu_sreg"))
9920 (const_string "alu_imm")
9921 (const_string "multiple")
9922 (const_string "multiple")])]
9923 )
9924
9925 (define_insn "*ifcompare_move_plus"
9926 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9927 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9928 [(match_operand:SI 4 "s_register_operand" "r,r")
9929 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9930 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9931 (plus:SI
9932 (match_operand:SI 2 "s_register_operand" "r,r")
9933 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9934 (clobber (reg:CC CC_REGNUM))]
9935 "TARGET_ARM"
9936 "#"
9937 [(set_attr "conds" "clob")
9938 (set_attr "length" "8,12")
9939 (set_attr "type" "multiple")]
9940 )
9941
9942 (define_insn "*if_move_plus"
9943 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9944 (if_then_else:SI
9945 (match_operator 4 "arm_comparison_operator"
9946 [(match_operand 5 "cc_register" "") (const_int 0)])
9947 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9948 (plus:SI
9949 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9950 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9951 "TARGET_ARM"
9952 "@
9953 add%D4\\t%0, %2, %3
9954 sub%D4\\t%0, %2, #%n3
9955 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9956 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9957 [(set_attr "conds" "use")
9958 (set_attr "length" "4,4,8,8")
9959 (set_attr_alternative "type"
9960 [(if_then_else (match_operand 3 "const_int_operand" "")
9961 (const_string "alu_imm" )
9962 (const_string "alu_sreg"))
9963 (const_string "alu_imm")
9964 (const_string "multiple")
9965 (const_string "multiple")])]
9966 )
9967
9968 (define_insn "*ifcompare_arith_arith"
9969 [(set (match_operand:SI 0 "s_register_operand" "=r")
9970 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9971 [(match_operand:SI 5 "s_register_operand" "r")
9972 (match_operand:SI 6 "arm_add_operand" "rIL")])
9973 (match_operator:SI 8 "shiftable_operator"
9974 [(match_operand:SI 1 "s_register_operand" "r")
9975 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9976 (match_operator:SI 7 "shiftable_operator"
9977 [(match_operand:SI 3 "s_register_operand" "r")
9978 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9979 (clobber (reg:CC CC_REGNUM))]
9980 "TARGET_ARM"
9981 "#"
9982 [(set_attr "conds" "clob")
9983 (set_attr "length" "12")
9984 (set_attr "type" "multiple")]
9985 )
9986
9987 (define_insn "*if_arith_arith"
9988 [(set (match_operand:SI 0 "s_register_operand" "=r")
9989 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9990 [(match_operand 8 "cc_register" "") (const_int 0)])
9991 (match_operator:SI 6 "shiftable_operator"
9992 [(match_operand:SI 1 "s_register_operand" "r")
9993 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9994 (match_operator:SI 7 "shiftable_operator"
9995 [(match_operand:SI 3 "s_register_operand" "r")
9996 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9997 "TARGET_ARM"
9998 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9999 [(set_attr "conds" "use")
10000 (set_attr "length" "8")
10001 (set_attr "type" "multiple")]
10002 )
10003
10004 (define_insn "*ifcompare_arith_move"
10005 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10006 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10007 [(match_operand:SI 2 "s_register_operand" "r,r")
10008 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10009 (match_operator:SI 7 "shiftable_operator"
10010 [(match_operand:SI 4 "s_register_operand" "r,r")
10011 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10012 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10013 (clobber (reg:CC CC_REGNUM))]
10014 "TARGET_ARM"
10015 "*
10016 /* If we have an operation where (op x 0) is the identity operation and
10017 the conditional operator is LT or GE and we are comparing against zero and
10018 everything is in registers then we can do this in two instructions. */
10019 if (operands[3] == const0_rtx
10020 && GET_CODE (operands[7]) != AND
10021 && REG_P (operands[5])
10022 && REG_P (operands[1])
10023 && REGNO (operands[1]) == REGNO (operands[4])
10024 && REGNO (operands[4]) != REGNO (operands[0]))
10025 {
10026 if (GET_CODE (operands[6]) == LT)
10027 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10028 else if (GET_CODE (operands[6]) == GE)
10029 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10030 }
10031 if (CONST_INT_P (operands[3])
10032 && !const_ok_for_arm (INTVAL (operands[3])))
10033 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10034 else
10035 output_asm_insn (\"cmp\\t%2, %3\", operands);
10036 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10037 if (which_alternative != 0)
10038 return \"mov%D6\\t%0, %1\";
10039 return \"\";
10040 "
10041 [(set_attr "conds" "clob")
10042 (set_attr "length" "8,12")
10043 (set_attr "type" "multiple")]
10044 )
10045
10046 (define_insn "*if_arith_move"
10047 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10048 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10049 [(match_operand 6 "cc_register" "") (const_int 0)])
10050 (match_operator:SI 5 "shiftable_operator"
10051 [(match_operand:SI 2 "s_register_operand" "r,r")
10052 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10053 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10054 "TARGET_ARM"
10055 "@
10056 %I5%d4\\t%0, %2, %3
10057 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10058 [(set_attr "conds" "use")
10059 (set_attr "length" "4,8")
10060 (set_attr_alternative "type"
10061 [(if_then_else (match_operand 3 "const_int_operand" "")
10062 (const_string "alu_shift_imm" )
10063 (const_string "alu_shift_reg"))
10064 (const_string "multiple")])]
10065 )
10066
10067 (define_insn "*ifcompare_move_arith"
10068 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10069 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10070 [(match_operand:SI 4 "s_register_operand" "r,r")
10071 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10072 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10073 (match_operator:SI 7 "shiftable_operator"
10074 [(match_operand:SI 2 "s_register_operand" "r,r")
10075 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10076 (clobber (reg:CC CC_REGNUM))]
10077 "TARGET_ARM"
10078 "*
10079 /* If we have an operation where (op x 0) is the identity operation and
10080 the conditional operator is LT or GE and we are comparing against zero and
10081 everything is in registers then we can do this in two instructions */
10082 if (operands[5] == const0_rtx
10083 && GET_CODE (operands[7]) != AND
10084 && REG_P (operands[3])
10085 && REG_P (operands[1])
10086 && REGNO (operands[1]) == REGNO (operands[2])
10087 && REGNO (operands[2]) != REGNO (operands[0]))
10088 {
10089 if (GET_CODE (operands[6]) == GE)
10090 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10091 else if (GET_CODE (operands[6]) == LT)
10092 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10093 }
10094
10095 if (CONST_INT_P (operands[5])
10096 && !const_ok_for_arm (INTVAL (operands[5])))
10097 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10098 else
10099 output_asm_insn (\"cmp\\t%4, %5\", operands);
10100
10101 if (which_alternative != 0)
10102 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10103 return \"%I7%D6\\t%0, %2, %3\";
10104 "
10105 [(set_attr "conds" "clob")
10106 (set_attr "length" "8,12")
10107 (set_attr "type" "multiple")]
10108 )
10109
10110 (define_insn "*if_move_arith"
10111 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10112 (if_then_else:SI
10113 (match_operator 4 "arm_comparison_operator"
10114 [(match_operand 6 "cc_register" "") (const_int 0)])
10115 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10116 (match_operator:SI 5 "shiftable_operator"
10117 [(match_operand:SI 2 "s_register_operand" "r,r")
10118 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10119 "TARGET_ARM"
10120 "@
10121 %I5%D4\\t%0, %2, %3
10122 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10123 [(set_attr "conds" "use")
10124 (set_attr "length" "4,8")
10125 (set_attr_alternative "type"
10126 [(if_then_else (match_operand 3 "const_int_operand" "")
10127 (const_string "alu_shift_imm" )
10128 (const_string "alu_shift_reg"))
10129 (const_string "multiple")])]
10130 )
10131
10132 (define_insn "*ifcompare_move_not"
10133 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10134 (if_then_else:SI
10135 (match_operator 5 "arm_comparison_operator"
10136 [(match_operand:SI 3 "s_register_operand" "r,r")
10137 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10138 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10139 (not:SI
10140 (match_operand:SI 2 "s_register_operand" "r,r"))))
10141 (clobber (reg:CC CC_REGNUM))]
10142 "TARGET_ARM"
10143 "#"
10144 [(set_attr "conds" "clob")
10145 (set_attr "length" "8,12")
10146 (set_attr "type" "multiple")]
10147 )
10148
10149 (define_insn "*if_move_not"
10150 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10151 (if_then_else:SI
10152 (match_operator 4 "arm_comparison_operator"
10153 [(match_operand 3 "cc_register" "") (const_int 0)])
10154 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10155 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10156 "TARGET_ARM"
10157 "@
10158 mvn%D4\\t%0, %2
10159 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10160 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10161 [(set_attr "conds" "use")
10162 (set_attr "type" "mvn_reg")
10163 (set_attr "length" "4,8,8")
10164 (set_attr "type" "mvn_reg,multiple,multiple")]
10165 )
10166
10167 (define_insn "*ifcompare_not_move"
10168 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10169 (if_then_else:SI
10170 (match_operator 5 "arm_comparison_operator"
10171 [(match_operand:SI 3 "s_register_operand" "r,r")
10172 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10173 (not:SI
10174 (match_operand:SI 2 "s_register_operand" "r,r"))
10175 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10176 (clobber (reg:CC CC_REGNUM))]
10177 "TARGET_ARM"
10178 "#"
10179 [(set_attr "conds" "clob")
10180 (set_attr "length" "8,12")
10181 (set_attr "type" "multiple")]
10182 )
10183
10184 (define_insn "*if_not_move"
10185 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10186 (if_then_else:SI
10187 (match_operator 4 "arm_comparison_operator"
10188 [(match_operand 3 "cc_register" "") (const_int 0)])
10189 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10190 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10191 "TARGET_ARM"
10192 "@
10193 mvn%d4\\t%0, %2
10194 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10195 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10196 [(set_attr "conds" "use")
10197 (set_attr "type" "mvn_reg,multiple,multiple")
10198 (set_attr "length" "4,8,8")]
10199 )
10200
10201 (define_insn "*ifcompare_shift_move"
10202 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10203 (if_then_else:SI
10204 (match_operator 6 "arm_comparison_operator"
10205 [(match_operand:SI 4 "s_register_operand" "r,r")
10206 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10207 (match_operator:SI 7 "shift_operator"
10208 [(match_operand:SI 2 "s_register_operand" "r,r")
10209 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10210 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10211 (clobber (reg:CC CC_REGNUM))]
10212 "TARGET_ARM"
10213 "#"
10214 [(set_attr "conds" "clob")
10215 (set_attr "length" "8,12")
10216 (set_attr "type" "multiple")]
10217 )
10218
10219 (define_insn "*if_shift_move"
10220 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10221 (if_then_else:SI
10222 (match_operator 5 "arm_comparison_operator"
10223 [(match_operand 6 "cc_register" "") (const_int 0)])
10224 (match_operator:SI 4 "shift_operator"
10225 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10226 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10227 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10228 "TARGET_ARM"
10229 "@
10230 mov%d5\\t%0, %2%S4
10231 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10232 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10233 [(set_attr "conds" "use")
10234 (set_attr "shift" "2")
10235 (set_attr "length" "4,8,8")
10236 (set_attr_alternative "type"
10237 [(if_then_else (match_operand 3 "const_int_operand" "")
10238 (const_string "mov_shift" )
10239 (const_string "mov_shift_reg"))
10240 (const_string "multiple")
10241 (const_string "multiple")])]
10242 )
10243
10244 (define_insn "*ifcompare_move_shift"
10245 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10246 (if_then_else:SI
10247 (match_operator 6 "arm_comparison_operator"
10248 [(match_operand:SI 4 "s_register_operand" "r,r")
10249 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10250 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10251 (match_operator:SI 7 "shift_operator"
10252 [(match_operand:SI 2 "s_register_operand" "r,r")
10253 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10254 (clobber (reg:CC CC_REGNUM))]
10255 "TARGET_ARM"
10256 "#"
10257 [(set_attr "conds" "clob")
10258 (set_attr "length" "8,12")
10259 (set_attr "type" "multiple")]
10260 )
10261
10262 (define_insn "*if_move_shift"
10263 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10264 (if_then_else:SI
10265 (match_operator 5 "arm_comparison_operator"
10266 [(match_operand 6 "cc_register" "") (const_int 0)])
10267 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10268 (match_operator:SI 4 "shift_operator"
10269 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10270 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10271 "TARGET_ARM"
10272 "@
10273 mov%D5\\t%0, %2%S4
10274 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10275 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10276 [(set_attr "conds" "use")
10277 (set_attr "shift" "2")
10278 (set_attr "length" "4,8,8")
10279 (set_attr_alternative "type"
10280 [(if_then_else (match_operand 3 "const_int_operand" "")
10281 (const_string "mov_shift" )
10282 (const_string "mov_shift_reg"))
10283 (const_string "multiple")
10284 (const_string "multiple")])]
10285 )
10286
10287 (define_insn "*ifcompare_shift_shift"
10288 [(set (match_operand:SI 0 "s_register_operand" "=r")
10289 (if_then_else:SI
10290 (match_operator 7 "arm_comparison_operator"
10291 [(match_operand:SI 5 "s_register_operand" "r")
10292 (match_operand:SI 6 "arm_add_operand" "rIL")])
10293 (match_operator:SI 8 "shift_operator"
10294 [(match_operand:SI 1 "s_register_operand" "r")
10295 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10296 (match_operator:SI 9 "shift_operator"
10297 [(match_operand:SI 3 "s_register_operand" "r")
10298 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10299 (clobber (reg:CC CC_REGNUM))]
10300 "TARGET_ARM"
10301 "#"
10302 [(set_attr "conds" "clob")
10303 (set_attr "length" "12")
10304 (set_attr "type" "multiple")]
10305 )
10306
10307 (define_insn "*if_shift_shift"
10308 [(set (match_operand:SI 0 "s_register_operand" "=r")
10309 (if_then_else:SI
10310 (match_operator 5 "arm_comparison_operator"
10311 [(match_operand 8 "cc_register" "") (const_int 0)])
10312 (match_operator:SI 6 "shift_operator"
10313 [(match_operand:SI 1 "s_register_operand" "r")
10314 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10315 (match_operator:SI 7 "shift_operator"
10316 [(match_operand:SI 3 "s_register_operand" "r")
10317 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10318 "TARGET_ARM"
10319 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10320 [(set_attr "conds" "use")
10321 (set_attr "shift" "1")
10322 (set_attr "length" "8")
10323 (set (attr "type") (if_then_else
10324 (and (match_operand 2 "const_int_operand" "")
10325 (match_operand 4 "const_int_operand" ""))
10326 (const_string "mov_shift")
10327 (const_string "mov_shift_reg")))]
10328 )
10329
10330 (define_insn "*ifcompare_not_arith"
10331 [(set (match_operand:SI 0 "s_register_operand" "=r")
10332 (if_then_else:SI
10333 (match_operator 6 "arm_comparison_operator"
10334 [(match_operand:SI 4 "s_register_operand" "r")
10335 (match_operand:SI 5 "arm_add_operand" "rIL")])
10336 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10337 (match_operator:SI 7 "shiftable_operator"
10338 [(match_operand:SI 2 "s_register_operand" "r")
10339 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10340 (clobber (reg:CC CC_REGNUM))]
10341 "TARGET_ARM"
10342 "#"
10343 [(set_attr "conds" "clob")
10344 (set_attr "length" "12")
10345 (set_attr "type" "multiple")]
10346 )
10347
10348 (define_insn "*if_not_arith"
10349 [(set (match_operand:SI 0 "s_register_operand" "=r")
10350 (if_then_else:SI
10351 (match_operator 5 "arm_comparison_operator"
10352 [(match_operand 4 "cc_register" "") (const_int 0)])
10353 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10354 (match_operator:SI 6 "shiftable_operator"
10355 [(match_operand:SI 2 "s_register_operand" "r")
10356 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10357 "TARGET_ARM"
10358 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10359 [(set_attr "conds" "use")
10360 (set_attr "type" "mvn_reg")
10361 (set_attr "length" "8")]
10362 )
10363
10364 (define_insn "*ifcompare_arith_not"
10365 [(set (match_operand:SI 0 "s_register_operand" "=r")
10366 (if_then_else:SI
10367 (match_operator 6 "arm_comparison_operator"
10368 [(match_operand:SI 4 "s_register_operand" "r")
10369 (match_operand:SI 5 "arm_add_operand" "rIL")])
10370 (match_operator:SI 7 "shiftable_operator"
10371 [(match_operand:SI 2 "s_register_operand" "r")
10372 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10373 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10374 (clobber (reg:CC CC_REGNUM))]
10375 "TARGET_ARM"
10376 "#"
10377 [(set_attr "conds" "clob")
10378 (set_attr "length" "12")
10379 (set_attr "type" "multiple")]
10380 )
10381
10382 (define_insn "*if_arith_not"
10383 [(set (match_operand:SI 0 "s_register_operand" "=r")
10384 (if_then_else:SI
10385 (match_operator 5 "arm_comparison_operator"
10386 [(match_operand 4 "cc_register" "") (const_int 0)])
10387 (match_operator:SI 6 "shiftable_operator"
10388 [(match_operand:SI 2 "s_register_operand" "r")
10389 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10390 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10391 "TARGET_ARM"
10392 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10393 [(set_attr "conds" "use")
10394 (set_attr "type" "multiple")
10395 (set_attr "length" "8")]
10396 )
10397
10398 (define_insn "*ifcompare_neg_move"
10399 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10400 (if_then_else:SI
10401 (match_operator 5 "arm_comparison_operator"
10402 [(match_operand:SI 3 "s_register_operand" "r,r")
10403 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10404 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10405 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10406 (clobber (reg:CC CC_REGNUM))]
10407 "TARGET_ARM"
10408 "#"
10409 [(set_attr "conds" "clob")
10410 (set_attr "length" "8,12")
10411 (set_attr "type" "multiple")]
10412 )
10413
10414 (define_insn_and_split "*if_neg_move"
10415 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10416 (if_then_else:SI
10417 (match_operator 4 "arm_comparison_operator"
10418 [(match_operand 3 "cc_register" "") (const_int 0)])
10419 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
10420 (match_operand:SI 1 "s_register_operand" "0,0")))]
10421 "TARGET_32BIT"
10422 "#"
10423 "&& reload_completed"
10424 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
10425 (set (match_dup 0) (neg:SI (match_dup 2))))]
10426 ""
10427 [(set_attr "conds" "use")
10428 (set_attr "length" "4")
10429 (set_attr "arch" "t2,32")
10430 (set_attr "enabled_for_short_it" "yes,no")
10431 (set_attr "type" "logic_shift_imm")]
10432 )
10433
10434 (define_insn "*ifcompare_move_neg"
10435 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10436 (if_then_else:SI
10437 (match_operator 5 "arm_comparison_operator"
10438 [(match_operand:SI 3 "s_register_operand" "r,r")
10439 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10440 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10441 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10442 (clobber (reg:CC CC_REGNUM))]
10443 "TARGET_ARM"
10444 "#"
10445 [(set_attr "conds" "clob")
10446 (set_attr "length" "8,12")
10447 (set_attr "type" "multiple")]
10448 )
10449
10450 (define_insn_and_split "*if_move_neg"
10451 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10452 (if_then_else:SI
10453 (match_operator 4 "arm_comparison_operator"
10454 [(match_operand 3 "cc_register" "") (const_int 0)])
10455 (match_operand:SI 1 "s_register_operand" "0,0")
10456 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
10457 "TARGET_32BIT"
10458 "#"
10459 "&& reload_completed"
10460 [(cond_exec (match_dup 5)
10461 (set (match_dup 0) (neg:SI (match_dup 2))))]
10462 {
10463 machine_mode mode = GET_MODE (operands[3]);
10464 rtx_code rc = GET_CODE (operands[4]);
10465
10466 if (mode == CCFPmode || mode == CCFPEmode)
10467 rc = reverse_condition_maybe_unordered (rc);
10468 else
10469 rc = reverse_condition (rc);
10470
10471 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
10472 }
10473 [(set_attr "conds" "use")
10474 (set_attr "length" "4")
10475 (set_attr "arch" "t2,32")
10476 (set_attr "enabled_for_short_it" "yes,no")
10477 (set_attr "type" "logic_shift_imm")]
10478 )
10479
10480 (define_insn "*arith_adjacentmem"
10481 [(set (match_operand:SI 0 "s_register_operand" "=r")
10482 (match_operator:SI 1 "shiftable_operator"
10483 [(match_operand:SI 2 "memory_operand" "m")
10484 (match_operand:SI 3 "memory_operand" "m")]))
10485 (clobber (match_scratch:SI 4 "=r"))]
10486 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10487 "*
10488 {
10489 rtx ldm[3];
10490 rtx arith[4];
10491 rtx base_reg;
10492 HOST_WIDE_INT val1 = 0, val2 = 0;
10493
10494 if (REGNO (operands[0]) > REGNO (operands[4]))
10495 {
10496 ldm[1] = operands[4];
10497 ldm[2] = operands[0];
10498 }
10499 else
10500 {
10501 ldm[1] = operands[0];
10502 ldm[2] = operands[4];
10503 }
10504
10505 base_reg = XEXP (operands[2], 0);
10506
10507 if (!REG_P (base_reg))
10508 {
10509 val1 = INTVAL (XEXP (base_reg, 1));
10510 base_reg = XEXP (base_reg, 0);
10511 }
10512
10513 if (!REG_P (XEXP (operands[3], 0)))
10514 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10515
10516 arith[0] = operands[0];
10517 arith[3] = operands[1];
10518
10519 if (val1 < val2)
10520 {
10521 arith[1] = ldm[1];
10522 arith[2] = ldm[2];
10523 }
10524 else
10525 {
10526 arith[1] = ldm[2];
10527 arith[2] = ldm[1];
10528 }
10529
10530 ldm[0] = base_reg;
10531 if (val1 !=0 && val2 != 0)
10532 {
10533 rtx ops[3];
10534
10535 if (val1 == 4 || val2 == 4)
10536 /* Other val must be 8, since we know they are adjacent and neither
10537 is zero. */
10538 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10539 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10540 {
10541 ldm[0] = ops[0] = operands[4];
10542 ops[1] = base_reg;
10543 ops[2] = GEN_INT (val1);
10544 output_add_immediate (ops);
10545 if (val1 < val2)
10546 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10547 else
10548 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10549 }
10550 else
10551 {
10552 /* Offset is out of range for a single add, so use two ldr. */
10553 ops[0] = ldm[1];
10554 ops[1] = base_reg;
10555 ops[2] = GEN_INT (val1);
10556 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10557 ops[0] = ldm[2];
10558 ops[2] = GEN_INT (val2);
10559 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10560 }
10561 }
10562 else if (val1 != 0)
10563 {
10564 if (val1 < val2)
10565 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10566 else
10567 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10568 }
10569 else
10570 {
10571 if (val1 < val2)
10572 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10573 else
10574 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10575 }
10576 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10577 return \"\";
10578 }"
10579 [(set_attr "length" "12")
10580 (set_attr "predicable" "yes")
10581 (set_attr "type" "load_4")]
10582 )
10583
10584 ; This pattern is never tried by combine, so do it as a peephole
10585
10586 (define_peephole2
10587 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10588 (match_operand:SI 1 "arm_general_register_operand" ""))
10589 (set (reg:CC CC_REGNUM)
10590 (compare:CC (match_dup 1) (const_int 0)))]
10591 "TARGET_ARM"
10592 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10593 (set (match_dup 0) (match_dup 1))])]
10594 ""
10595 )
10596
10597 (define_split
10598 [(set (match_operand:SI 0 "s_register_operand" "")
10599 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10600 (const_int 0))
10601 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10602 [(match_operand:SI 3 "s_register_operand" "")
10603 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10604 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10605 "TARGET_ARM"
10606 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10607 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10608 (match_dup 5)))]
10609 ""
10610 )
10611
10612 ;; This split can be used because CC_Z mode implies that the following
10613 ;; branch will be an equality, or an unsigned inequality, so the sign
10614 ;; extension is not needed.
10615
10616 (define_split
10617 [(set (reg:CC_Z CC_REGNUM)
10618 (compare:CC_Z
10619 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10620 (const_int 24))
10621 (match_operand 1 "const_int_operand" "")))
10622 (clobber (match_scratch:SI 2 ""))]
10623 "TARGET_ARM
10624 && ((UINTVAL (operands[1]))
10625 == ((UINTVAL (operands[1])) >> 24) << 24)"
10626 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10627 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10628 "
10629 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10630 "
10631 )
10632 ;; ??? Check the patterns above for Thumb-2 usefulness
10633
10634 (define_expand "prologue"
10635 [(clobber (const_int 0))]
10636 "TARGET_EITHER"
10637 "if (TARGET_32BIT)
10638 arm_expand_prologue ();
10639 else
10640 thumb1_expand_prologue ();
10641 DONE;
10642 "
10643 )
10644
10645 (define_expand "epilogue"
10646 [(clobber (const_int 0))]
10647 "TARGET_EITHER"
10648 "
10649 if (crtl->calls_eh_return)
10650 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10651 if (TARGET_THUMB1)
10652 {
10653 thumb1_expand_epilogue ();
10654 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10655 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10656 }
10657 else if (HAVE_return)
10658 {
10659 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10660 no need for explicit testing again. */
10661 emit_jump_insn (gen_return ());
10662 }
10663 else if (TARGET_32BIT)
10664 {
10665 arm_expand_epilogue (true);
10666 }
10667 DONE;
10668 "
10669 )
10670
10671 ;; Note - although unspec_volatile's USE all hard registers,
10672 ;; USEs are ignored after relaod has completed. Thus we need
10673 ;; to add an unspec of the link register to ensure that flow
10674 ;; does not think that it is unused by the sibcall branch that
10675 ;; will replace the standard function epilogue.
10676 (define_expand "sibcall_epilogue"
10677 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10678 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10679 "TARGET_32BIT"
10680 "
10681 arm_expand_epilogue (false);
10682 DONE;
10683 "
10684 )
10685
10686 (define_expand "eh_epilogue"
10687 [(use (match_operand:SI 0 "register_operand"))
10688 (use (match_operand:SI 1 "register_operand"))
10689 (use (match_operand:SI 2 "register_operand"))]
10690 "TARGET_EITHER"
10691 "
10692 {
10693 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10694 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10695 {
10696 rtx ra = gen_rtx_REG (Pmode, 2);
10697
10698 emit_move_insn (ra, operands[2]);
10699 operands[2] = ra;
10700 }
10701 /* This is a hack -- we may have crystalized the function type too
10702 early. */
10703 cfun->machine->func_type = 0;
10704 }"
10705 )
10706
10707 ;; This split is only used during output to reduce the number of patterns
10708 ;; that need assembler instructions adding to them. We allowed the setting
10709 ;; of the conditions to be implicit during rtl generation so that
10710 ;; the conditional compare patterns would work. However this conflicts to
10711 ;; some extent with the conditional data operations, so we have to split them
10712 ;; up again here.
10713
10714 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10715 ;; conditional execution sufficient?
10716
10717 (define_split
10718 [(set (match_operand:SI 0 "s_register_operand" "")
10719 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10720 [(match_operand 2 "" "") (match_operand 3 "" "")])
10721 (match_dup 0)
10722 (match_operand 4 "" "")))
10723 (clobber (reg:CC CC_REGNUM))]
10724 "TARGET_ARM && reload_completed"
10725 [(set (match_dup 5) (match_dup 6))
10726 (cond_exec (match_dup 7)
10727 (set (match_dup 0) (match_dup 4)))]
10728 "
10729 {
10730 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10731 operands[2], operands[3]);
10732 enum rtx_code rc = GET_CODE (operands[1]);
10733
10734 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10735 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10736 if (mode == CCFPmode || mode == CCFPEmode)
10737 rc = reverse_condition_maybe_unordered (rc);
10738 else
10739 rc = reverse_condition (rc);
10740
10741 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10742 }"
10743 )
10744
10745 (define_split
10746 [(set (match_operand:SI 0 "s_register_operand" "")
10747 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10748 [(match_operand 2 "" "") (match_operand 3 "" "")])
10749 (match_operand 4 "" "")
10750 (match_dup 0)))
10751 (clobber (reg:CC CC_REGNUM))]
10752 "TARGET_ARM && reload_completed"
10753 [(set (match_dup 5) (match_dup 6))
10754 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10755 (set (match_dup 0) (match_dup 4)))]
10756 "
10757 {
10758 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10759 operands[2], operands[3]);
10760
10761 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10762 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10763 }"
10764 )
10765
10766 (define_split
10767 [(set (match_operand:SI 0 "s_register_operand" "")
10768 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10769 [(match_operand 2 "" "") (match_operand 3 "" "")])
10770 (match_operand 4 "" "")
10771 (match_operand 5 "" "")))
10772 (clobber (reg:CC CC_REGNUM))]
10773 "TARGET_ARM && reload_completed"
10774 [(set (match_dup 6) (match_dup 7))
10775 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10776 (set (match_dup 0) (match_dup 4)))
10777 (cond_exec (match_dup 8)
10778 (set (match_dup 0) (match_dup 5)))]
10779 "
10780 {
10781 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10782 operands[2], operands[3]);
10783 enum rtx_code rc = GET_CODE (operands[1]);
10784
10785 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10786 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10787 if (mode == CCFPmode || mode == CCFPEmode)
10788 rc = reverse_condition_maybe_unordered (rc);
10789 else
10790 rc = reverse_condition (rc);
10791
10792 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10793 }"
10794 )
10795
10796 (define_split
10797 [(set (match_operand:SI 0 "s_register_operand" "")
10798 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10799 [(match_operand:SI 2 "s_register_operand" "")
10800 (match_operand:SI 3 "arm_add_operand" "")])
10801 (match_operand:SI 4 "arm_rhs_operand" "")
10802 (not:SI
10803 (match_operand:SI 5 "s_register_operand" ""))))
10804 (clobber (reg:CC CC_REGNUM))]
10805 "TARGET_ARM && reload_completed"
10806 [(set (match_dup 6) (match_dup 7))
10807 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10808 (set (match_dup 0) (match_dup 4)))
10809 (cond_exec (match_dup 8)
10810 (set (match_dup 0) (not:SI (match_dup 5))))]
10811 "
10812 {
10813 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10814 operands[2], operands[3]);
10815 enum rtx_code rc = GET_CODE (operands[1]);
10816
10817 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10818 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10819 if (mode == CCFPmode || mode == CCFPEmode)
10820 rc = reverse_condition_maybe_unordered (rc);
10821 else
10822 rc = reverse_condition (rc);
10823
10824 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10825 }"
10826 )
10827
10828 (define_insn "*cond_move_not"
10829 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10830 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10831 [(match_operand 3 "cc_register" "") (const_int 0)])
10832 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10833 (not:SI
10834 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10835 "TARGET_ARM"
10836 "@
10837 mvn%D4\\t%0, %2
10838 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10839 [(set_attr "conds" "use")
10840 (set_attr "type" "mvn_reg,multiple")
10841 (set_attr "length" "4,8")]
10842 )
10843
10844 ;; The next two patterns occur when an AND operation is followed by a
10845 ;; scc insn sequence
10846
10847 (define_insn "*sign_extract_onebit"
10848 [(set (match_operand:SI 0 "s_register_operand" "=r")
10849 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10850 (const_int 1)
10851 (match_operand:SI 2 "const_int_operand" "n")))
10852 (clobber (reg:CC CC_REGNUM))]
10853 "TARGET_ARM"
10854 "*
10855 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10856 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10857 return \"mvnne\\t%0, #0\";
10858 "
10859 [(set_attr "conds" "clob")
10860 (set_attr "length" "8")
10861 (set_attr "type" "multiple")]
10862 )
10863
10864 (define_insn "*not_signextract_onebit"
10865 [(set (match_operand:SI 0 "s_register_operand" "=r")
10866 (not:SI
10867 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10868 (const_int 1)
10869 (match_operand:SI 2 "const_int_operand" "n"))))
10870 (clobber (reg:CC CC_REGNUM))]
10871 "TARGET_ARM"
10872 "*
10873 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10874 output_asm_insn (\"tst\\t%1, %2\", operands);
10875 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10876 return \"movne\\t%0, #0\";
10877 "
10878 [(set_attr "conds" "clob")
10879 (set_attr "length" "12")
10880 (set_attr "type" "multiple")]
10881 )
10882 ;; ??? The above patterns need auditing for Thumb-2
10883
10884 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10885 ;; expressions. For simplicity, the first register is also in the unspec
10886 ;; part.
10887 ;; To avoid the usage of GNU extension, the length attribute is computed
10888 ;; in a C function arm_attr_length_push_multi.
10889 (define_insn "*push_multi"
10890 [(match_parallel 2 "multi_register_push"
10891 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10892 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10893 UNSPEC_PUSH_MULT))])]
10894 ""
10895 "*
10896 {
10897 int num_saves = XVECLEN (operands[2], 0);
10898
10899 /* For the StrongARM at least it is faster to
10900 use STR to store only a single register.
10901 In Thumb mode always use push, and the assembler will pick
10902 something appropriate. */
10903 if (num_saves == 1 && TARGET_ARM)
10904 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10905 else
10906 {
10907 int i;
10908 char pattern[100];
10909
10910 if (TARGET_32BIT)
10911 strcpy (pattern, \"push%?\\t{%1\");
10912 else
10913 strcpy (pattern, \"push\\t{%1\");
10914
10915 for (i = 1; i < num_saves; i++)
10916 {
10917 strcat (pattern, \", %|\");
10918 strcat (pattern,
10919 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10920 }
10921
10922 strcat (pattern, \"}\");
10923 output_asm_insn (pattern, operands);
10924 }
10925
10926 return \"\";
10927 }"
10928 [(set_attr "type" "store_16")
10929 (set (attr "length")
10930 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10931 )
10932
10933 (define_insn "stack_tie"
10934 [(set (mem:BLK (scratch))
10935 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10936 (match_operand:SI 1 "s_register_operand" "rk")]
10937 UNSPEC_PRLG_STK))]
10938 ""
10939 ""
10940 [(set_attr "length" "0")
10941 (set_attr "type" "block")]
10942 )
10943
10944 ;; Pop (as used in epilogue RTL)
10945 ;;
10946 (define_insn "*load_multiple_with_writeback"
10947 [(match_parallel 0 "load_multiple_operation"
10948 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10949 (plus:SI (match_dup 1)
10950 (match_operand:SI 2 "const_int_I_operand" "I")))
10951 (set (match_operand:SI 3 "s_register_operand" "=rk")
10952 (mem:SI (match_dup 1)))
10953 ])]
10954 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10955 "*
10956 {
10957 arm_output_multireg_pop (operands, /*return_pc=*/false,
10958 /*cond=*/const_true_rtx,
10959 /*reverse=*/false,
10960 /*update=*/true);
10961 return \"\";
10962 }
10963 "
10964 [(set_attr "type" "load_16")
10965 (set_attr "predicable" "yes")
10966 (set (attr "length")
10967 (symbol_ref "arm_attr_length_pop_multi (operands,
10968 /*return_pc=*/false,
10969 /*write_back_p=*/true)"))]
10970 )
10971
10972 ;; Pop with return (as used in epilogue RTL)
10973 ;;
10974 ;; This instruction is generated when the registers are popped at the end of
10975 ;; epilogue. Here, instead of popping the value into LR and then generating
10976 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10977 ;; with (return).
10978 (define_insn "*pop_multiple_with_writeback_and_return"
10979 [(match_parallel 0 "pop_multiple_return"
10980 [(return)
10981 (set (match_operand:SI 1 "s_register_operand" "+rk")
10982 (plus:SI (match_dup 1)
10983 (match_operand:SI 2 "const_int_I_operand" "I")))
10984 (set (match_operand:SI 3 "s_register_operand" "=rk")
10985 (mem:SI (match_dup 1)))
10986 ])]
10987 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10988 "*
10989 {
10990 arm_output_multireg_pop (operands, /*return_pc=*/true,
10991 /*cond=*/const_true_rtx,
10992 /*reverse=*/false,
10993 /*update=*/true);
10994 return \"\";
10995 }
10996 "
10997 [(set_attr "type" "load_16")
10998 (set_attr "predicable" "yes")
10999 (set (attr "length")
11000 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11001 /*write_back_p=*/true)"))]
11002 )
11003
11004 (define_insn "*pop_multiple_with_return"
11005 [(match_parallel 0 "pop_multiple_return"
11006 [(return)
11007 (set (match_operand:SI 2 "s_register_operand" "=rk")
11008 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11009 ])]
11010 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11011 "*
11012 {
11013 arm_output_multireg_pop (operands, /*return_pc=*/true,
11014 /*cond=*/const_true_rtx,
11015 /*reverse=*/false,
11016 /*update=*/false);
11017 return \"\";
11018 }
11019 "
11020 [(set_attr "type" "load_16")
11021 (set_attr "predicable" "yes")
11022 (set (attr "length")
11023 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11024 /*write_back_p=*/false)"))]
11025 )
11026
11027 ;; Load into PC and return
11028 (define_insn "*ldr_with_return"
11029 [(return)
11030 (set (reg:SI PC_REGNUM)
11031 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11032 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11033 "ldr%?\t%|pc, [%0], #4"
11034 [(set_attr "type" "load_4")
11035 (set_attr "predicable" "yes")]
11036 )
11037 ;; Pop for floating point registers (as used in epilogue RTL)
11038 (define_insn "*vfp_pop_multiple_with_writeback"
11039 [(match_parallel 0 "pop_multiple_fp"
11040 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11041 (plus:SI (match_dup 1)
11042 (match_operand:SI 2 "const_int_I_operand" "I")))
11043 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11044 (mem:DF (match_dup 1)))])]
11045 "TARGET_32BIT && TARGET_HARD_FLOAT"
11046 "*
11047 {
11048 int num_regs = XVECLEN (operands[0], 0);
11049 char pattern[100];
11050 rtx op_list[2];
11051 strcpy (pattern, \"vldm\\t\");
11052 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11053 strcat (pattern, \"!, {\");
11054 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11055 strcat (pattern, \"%P0\");
11056 if ((num_regs - 1) > 1)
11057 {
11058 strcat (pattern, \"-%P1\");
11059 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11060 }
11061
11062 strcat (pattern, \"}\");
11063 output_asm_insn (pattern, op_list);
11064 return \"\";
11065 }
11066 "
11067 [(set_attr "type" "load_16")
11068 (set_attr "conds" "unconditional")
11069 (set_attr "predicable" "no")]
11070 )
11071
11072 ;; Special patterns for dealing with the constant pool
11073
11074 (define_insn "align_4"
11075 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11076 "TARGET_EITHER"
11077 "*
11078 assemble_align (32);
11079 return \"\";
11080 "
11081 [(set_attr "type" "no_insn")]
11082 )
11083
11084 (define_insn "align_8"
11085 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11086 "TARGET_EITHER"
11087 "*
11088 assemble_align (64);
11089 return \"\";
11090 "
11091 [(set_attr "type" "no_insn")]
11092 )
11093
11094 (define_insn "consttable_end"
11095 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11096 "TARGET_EITHER"
11097 "*
11098 making_const_table = FALSE;
11099 return \"\";
11100 "
11101 [(set_attr "type" "no_insn")]
11102 )
11103
11104 (define_insn "consttable_1"
11105 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11106 "TARGET_EITHER"
11107 "*
11108 making_const_table = TRUE;
11109 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11110 assemble_zeros (3);
11111 return \"\";
11112 "
11113 [(set_attr "length" "4")
11114 (set_attr "type" "no_insn")]
11115 )
11116
11117 (define_insn "consttable_2"
11118 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11119 "TARGET_EITHER"
11120 "*
11121 {
11122 rtx x = operands[0];
11123 making_const_table = TRUE;
11124 switch (GET_MODE_CLASS (GET_MODE (x)))
11125 {
11126 case MODE_FLOAT:
11127 arm_emit_fp16_const (x);
11128 break;
11129 default:
11130 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11131 assemble_zeros (2);
11132 break;
11133 }
11134 return \"\";
11135 }"
11136 [(set_attr "length" "4")
11137 (set_attr "type" "no_insn")]
11138 )
11139
11140 (define_insn "consttable_4"
11141 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11142 "TARGET_EITHER"
11143 "*
11144 {
11145 rtx x = operands[0];
11146 making_const_table = TRUE;
11147 scalar_float_mode float_mode;
11148 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11149 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11150 else
11151 {
11152 /* XXX: Sometimes gcc does something really dumb and ends up with
11153 a HIGH in a constant pool entry, usually because it's trying to
11154 load into a VFP register. We know this will always be used in
11155 combination with a LO_SUM which ignores the high bits, so just
11156 strip off the HIGH. */
11157 if (GET_CODE (x) == HIGH)
11158 x = XEXP (x, 0);
11159 assemble_integer (x, 4, BITS_PER_WORD, 1);
11160 mark_symbol_refs_as_used (x);
11161 }
11162 return \"\";
11163 }"
11164 [(set_attr "length" "4")
11165 (set_attr "type" "no_insn")]
11166 )
11167
11168 (define_insn "consttable_8"
11169 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11170 "TARGET_EITHER"
11171 "*
11172 {
11173 making_const_table = TRUE;
11174 scalar_float_mode float_mode;
11175 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11176 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11177 float_mode, BITS_PER_WORD);
11178 else
11179 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11180 return \"\";
11181 }"
11182 [(set_attr "length" "8")
11183 (set_attr "type" "no_insn")]
11184 )
11185
11186 (define_insn "consttable_16"
11187 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11188 "TARGET_EITHER"
11189 "*
11190 {
11191 making_const_table = TRUE;
11192 scalar_float_mode float_mode;
11193 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11194 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11195 float_mode, BITS_PER_WORD);
11196 else
11197 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11198 return \"\";
11199 }"
11200 [(set_attr "length" "16")
11201 (set_attr "type" "no_insn")]
11202 )
11203
11204 ;; V5 Instructions,
11205
11206 (define_insn "clzsi2"
11207 [(set (match_operand:SI 0 "s_register_operand" "=r")
11208 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11209 "TARGET_32BIT && arm_arch5t"
11210 "clz%?\\t%0, %1"
11211 [(set_attr "predicable" "yes")
11212 (set_attr "type" "clz")])
11213
11214 (define_insn "rbitsi2"
11215 [(set (match_operand:SI 0 "s_register_operand" "=r")
11216 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11217 "TARGET_32BIT && arm_arch_thumb2"
11218 "rbit%?\\t%0, %1"
11219 [(set_attr "predicable" "yes")
11220 (set_attr "type" "clz")])
11221
11222 ;; Keep this as a CTZ expression until after reload and then split
11223 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
11224 ;; to fold with any other expression.
11225
11226 (define_insn_and_split "ctzsi2"
11227 [(set (match_operand:SI 0 "s_register_operand" "=r")
11228 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11229 "TARGET_32BIT && arm_arch_thumb2"
11230 "#"
11231 "&& reload_completed"
11232 [(const_int 0)]
11233 "
11234 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
11235 emit_insn (gen_clzsi2 (operands[0], operands[0]));
11236 DONE;
11237 ")
11238
11239 ;; V5E instructions.
11240
11241 (define_insn "prefetch"
11242 [(prefetch (match_operand:SI 0 "address_operand" "p")
11243 (match_operand:SI 1 "" "")
11244 (match_operand:SI 2 "" ""))]
11245 "TARGET_32BIT && arm_arch5te"
11246 "pld\\t%a0"
11247 [(set_attr "type" "load_4")]
11248 )
11249
11250 ;; General predication pattern
11251
11252 (define_cond_exec
11253 [(match_operator 0 "arm_comparison_operator"
11254 [(match_operand 1 "cc_register" "")
11255 (const_int 0)])]
11256 "TARGET_32BIT
11257 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
11258 ""
11259 [(set_attr "predicated" "yes")]
11260 )
11261
11262 (define_insn "force_register_use"
11263 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11264 ""
11265 "%@ %0 needed"
11266 [(set_attr "length" "0")
11267 (set_attr "type" "no_insn")]
11268 )
11269
11270
11271 ;; Patterns for exception handling
11272
11273 (define_expand "eh_return"
11274 [(use (match_operand 0 "general_operand"))]
11275 "TARGET_EITHER"
11276 "
11277 {
11278 if (TARGET_32BIT)
11279 emit_insn (gen_arm_eh_return (operands[0]));
11280 else
11281 emit_insn (gen_thumb_eh_return (operands[0]));
11282 DONE;
11283 }"
11284 )
11285
11286 ;; We can't expand this before we know where the link register is stored.
11287 (define_insn_and_split "arm_eh_return"
11288 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11289 VUNSPEC_EH_RETURN)
11290 (clobber (match_scratch:SI 1 "=&r"))]
11291 "TARGET_ARM"
11292 "#"
11293 "&& reload_completed"
11294 [(const_int 0)]
11295 "
11296 {
11297 arm_set_return_address (operands[0], operands[1]);
11298 DONE;
11299 }"
11300 )
11301
11302 \f
11303 ;; TLS support
11304
11305 (define_insn "load_tp_hard"
11306 [(set (match_operand:SI 0 "register_operand" "=r")
11307 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11308 "TARGET_HARD_TP"
11309 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11310 [(set_attr "predicable" "yes")
11311 (set_attr "type" "mrs")]
11312 )
11313
11314 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11315 (define_insn "load_tp_soft_fdpic"
11316 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11317 (clobber (reg:SI FDPIC_REGNUM))
11318 (clobber (reg:SI LR_REGNUM))
11319 (clobber (reg:SI IP_REGNUM))
11320 (clobber (reg:CC CC_REGNUM))]
11321 "TARGET_SOFT_TP && TARGET_FDPIC"
11322 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11323 [(set_attr "conds" "clob")
11324 (set_attr "type" "branch")]
11325 )
11326
11327 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11328 (define_insn "load_tp_soft"
11329 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11330 (clobber (reg:SI LR_REGNUM))
11331 (clobber (reg:SI IP_REGNUM))
11332 (clobber (reg:CC CC_REGNUM))]
11333 "TARGET_SOFT_TP && !TARGET_FDPIC"
11334 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11335 [(set_attr "conds" "clob")
11336 (set_attr "type" "branch")]
11337 )
11338
11339 ;; tls descriptor call
11340 (define_insn "tlscall"
11341 [(set (reg:SI R0_REGNUM)
11342 (unspec:SI [(reg:SI R0_REGNUM)
11343 (match_operand:SI 0 "" "X")
11344 (match_operand 1 "" "")] UNSPEC_TLS))
11345 (clobber (reg:SI R1_REGNUM))
11346 (clobber (reg:SI LR_REGNUM))
11347 (clobber (reg:SI CC_REGNUM))]
11348 "TARGET_GNU2_TLS"
11349 {
11350 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11351 INTVAL (operands[1]));
11352 return "bl\\t%c0(tlscall)";
11353 }
11354 [(set_attr "conds" "clob")
11355 (set_attr "length" "4")
11356 (set_attr "type" "branch")]
11357 )
11358
11359 ;; For thread pointer builtin
11360 (define_expand "get_thread_pointersi"
11361 [(match_operand:SI 0 "s_register_operand")]
11362 ""
11363 "
11364 {
11365 arm_load_tp (operands[0]);
11366 DONE;
11367 }")
11368
11369 ;;
11370
11371 ;; We only care about the lower 16 bits of the constant
11372 ;; being inserted into the upper 16 bits of the register.
11373 (define_insn "*arm_movtas_ze"
11374 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
11375 (const_int 16)
11376 (const_int 16))
11377 (match_operand:SI 1 "const_int_operand" ""))]
11378 "TARGET_HAVE_MOVT"
11379 "@
11380 movt%?\t%0, %L1
11381 movt\t%0, %L1"
11382 [(set_attr "arch" "32,v8mb")
11383 (set_attr "predicable" "yes")
11384 (set_attr "length" "4")
11385 (set_attr "type" "alu_sreg")]
11386 )
11387
11388 (define_insn "*arm_rev"
11389 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11390 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11391 "arm_arch6"
11392 "@
11393 rev\t%0, %1
11394 rev%?\t%0, %1
11395 rev%?\t%0, %1"
11396 [(set_attr "arch" "t1,t2,32")
11397 (set_attr "length" "2,2,4")
11398 (set_attr "predicable" "no,yes,yes")
11399 (set_attr "type" "rev")]
11400 )
11401
11402 (define_expand "arm_legacy_rev"
11403 [(set (match_operand:SI 2 "s_register_operand")
11404 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
11405 (const_int 16))
11406 (match_dup 1)))
11407 (set (match_dup 2)
11408 (lshiftrt:SI (match_dup 2)
11409 (const_int 8)))
11410 (set (match_operand:SI 3 "s_register_operand")
11411 (rotatert:SI (match_dup 1)
11412 (const_int 8)))
11413 (set (match_dup 2)
11414 (and:SI (match_dup 2)
11415 (const_int -65281)))
11416 (set (match_operand:SI 0 "s_register_operand")
11417 (xor:SI (match_dup 3)
11418 (match_dup 2)))]
11419 "TARGET_32BIT"
11420 ""
11421 )
11422
11423 ;; Reuse temporaries to keep register pressure down.
11424 (define_expand "thumb_legacy_rev"
11425 [(set (match_operand:SI 2 "s_register_operand")
11426 (ashift:SI (match_operand:SI 1 "s_register_operand")
11427 (const_int 24)))
11428 (set (match_operand:SI 3 "s_register_operand")
11429 (lshiftrt:SI (match_dup 1)
11430 (const_int 24)))
11431 (set (match_dup 3)
11432 (ior:SI (match_dup 3)
11433 (match_dup 2)))
11434 (set (match_operand:SI 4 "s_register_operand")
11435 (const_int 16))
11436 (set (match_operand:SI 5 "s_register_operand")
11437 (rotatert:SI (match_dup 1)
11438 (match_dup 4)))
11439 (set (match_dup 2)
11440 (ashift:SI (match_dup 5)
11441 (const_int 24)))
11442 (set (match_dup 5)
11443 (lshiftrt:SI (match_dup 5)
11444 (const_int 24)))
11445 (set (match_dup 5)
11446 (ior:SI (match_dup 5)
11447 (match_dup 2)))
11448 (set (match_dup 5)
11449 (rotatert:SI (match_dup 5)
11450 (match_dup 4)))
11451 (set (match_operand:SI 0 "s_register_operand")
11452 (ior:SI (match_dup 5)
11453 (match_dup 3)))]
11454 "TARGET_THUMB"
11455 ""
11456 )
11457
11458 ;; ARM-specific expansion of signed mod by power of 2
11459 ;; using conditional negate.
11460 ;; For r0 % n where n is a power of 2 produce:
11461 ;; rsbs r1, r0, #0
11462 ;; and r0, r0, #(n - 1)
11463 ;; and r1, r1, #(n - 1)
11464 ;; rsbpl r0, r1, #0
11465
11466 (define_expand "modsi3"
11467 [(match_operand:SI 0 "register_operand")
11468 (match_operand:SI 1 "register_operand")
11469 (match_operand:SI 2 "const_int_operand")]
11470 "TARGET_32BIT"
11471 {
11472 HOST_WIDE_INT val = INTVAL (operands[2]);
11473
11474 if (val <= 0
11475 || exact_log2 (val) <= 0)
11476 FAIL;
11477
11478 rtx mask = GEN_INT (val - 1);
11479
11480 /* In the special case of x0 % 2 we can do the even shorter:
11481 cmp r0, #0
11482 and r0, r0, #1
11483 rsblt r0, r0, #0. */
11484
11485 if (val == 2)
11486 {
11487 rtx cc_reg = arm_gen_compare_reg (LT,
11488 operands[1], const0_rtx, NULL_RTX);
11489 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
11490 rtx masked = gen_reg_rtx (SImode);
11491
11492 emit_insn (gen_andsi3 (masked, operands[1], mask));
11493 emit_move_insn (operands[0],
11494 gen_rtx_IF_THEN_ELSE (SImode, cond,
11495 gen_rtx_NEG (SImode,
11496 masked),
11497 masked));
11498 DONE;
11499 }
11500
11501 rtx neg_op = gen_reg_rtx (SImode);
11502 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11503 operands[1]));
11504
11505 /* Extract the condition register and mode. */
11506 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11507 rtx cc_reg = SET_DEST (cmp);
11508 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11509
11510 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11511
11512 rtx masked_neg = gen_reg_rtx (SImode);
11513 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11514
11515 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11516 during expand does not always work. Do an IF_THEN_ELSE instead. */
11517 emit_move_insn (operands[0],
11518 gen_rtx_IF_THEN_ELSE (SImode, cond,
11519 gen_rtx_NEG (SImode, masked_neg),
11520 operands[0]));
11521
11522
11523 DONE;
11524 }
11525 )
11526
11527 (define_expand "bswapsi2"
11528 [(set (match_operand:SI 0 "s_register_operand")
11529 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11530 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11531 "
11532 if (!arm_arch6)
11533 {
11534 rtx op2 = gen_reg_rtx (SImode);
11535 rtx op3 = gen_reg_rtx (SImode);
11536
11537 if (TARGET_THUMB)
11538 {
11539 rtx op4 = gen_reg_rtx (SImode);
11540 rtx op5 = gen_reg_rtx (SImode);
11541
11542 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11543 op2, op3, op4, op5));
11544 }
11545 else
11546 {
11547 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11548 op2, op3));
11549 }
11550
11551 DONE;
11552 }
11553 "
11554 )
11555
11556 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11557 ;; and unsigned variants, respectively. For rev16, expose
11558 ;; byte-swapping in the lower 16 bits only.
11559 (define_insn "*arm_revsh"
11560 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11561 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11562 "arm_arch6"
11563 "@
11564 revsh\t%0, %1
11565 revsh%?\t%0, %1
11566 revsh%?\t%0, %1"
11567 [(set_attr "arch" "t1,t2,32")
11568 (set_attr "length" "2,2,4")
11569 (set_attr "type" "rev")]
11570 )
11571
11572 (define_insn "*arm_rev16"
11573 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11574 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11575 "arm_arch6"
11576 "@
11577 rev16\t%0, %1
11578 rev16%?\t%0, %1
11579 rev16%?\t%0, %1"
11580 [(set_attr "arch" "t1,t2,32")
11581 (set_attr "length" "2,2,4")
11582 (set_attr "type" "rev")]
11583 )
11584
11585 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11586 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11587 ;; each valid permutation.
11588
11589 (define_insn "arm_rev16si2"
11590 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11591 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11592 (const_int 8))
11593 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11594 (and:SI (lshiftrt:SI (match_dup 1)
11595 (const_int 8))
11596 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11597 "arm_arch6
11598 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11599 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11600 "rev16\\t%0, %1"
11601 [(set_attr "arch" "t1,t2,32")
11602 (set_attr "length" "2,2,4")
11603 (set_attr "type" "rev")]
11604 )
11605
11606 (define_insn "arm_rev16si2_alt"
11607 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11608 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11609 (const_int 8))
11610 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11611 (and:SI (ashift:SI (match_dup 1)
11612 (const_int 8))
11613 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11614 "arm_arch6
11615 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11616 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11617 "rev16\\t%0, %1"
11618 [(set_attr "arch" "t1,t2,32")
11619 (set_attr "length" "2,2,4")
11620 (set_attr "type" "rev")]
11621 )
11622
11623 (define_expand "bswaphi2"
11624 [(set (match_operand:HI 0 "s_register_operand")
11625 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11626 "arm_arch6"
11627 ""
11628 )
11629
11630 ;; Patterns for LDRD/STRD in Thumb2 mode
11631
11632 (define_insn "*thumb2_ldrd"
11633 [(set (match_operand:SI 0 "s_register_operand" "=r")
11634 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11635 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11636 (set (match_operand:SI 3 "s_register_operand" "=r")
11637 (mem:SI (plus:SI (match_dup 1)
11638 (match_operand:SI 4 "const_int_operand" ""))))]
11639 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11640 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11641 && (operands_ok_ldrd_strd (operands[0], operands[3],
11642 operands[1], INTVAL (operands[2]),
11643 false, true))"
11644 "ldrd%?\t%0, %3, [%1, %2]"
11645 [(set_attr "type" "load_8")
11646 (set_attr "predicable" "yes")])
11647
11648 (define_insn "*thumb2_ldrd_base"
11649 [(set (match_operand:SI 0 "s_register_operand" "=r")
11650 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11651 (set (match_operand:SI 2 "s_register_operand" "=r")
11652 (mem:SI (plus:SI (match_dup 1)
11653 (const_int 4))))]
11654 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11655 && (operands_ok_ldrd_strd (operands[0], operands[2],
11656 operands[1], 0, false, true))"
11657 "ldrd%?\t%0, %2, [%1]"
11658 [(set_attr "type" "load_8")
11659 (set_attr "predicable" "yes")])
11660
11661 (define_insn "*thumb2_ldrd_base_neg"
11662 [(set (match_operand:SI 0 "s_register_operand" "=r")
11663 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11664 (const_int -4))))
11665 (set (match_operand:SI 2 "s_register_operand" "=r")
11666 (mem:SI (match_dup 1)))]
11667 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11668 && (operands_ok_ldrd_strd (operands[0], operands[2],
11669 operands[1], -4, false, true))"
11670 "ldrd%?\t%0, %2, [%1, #-4]"
11671 [(set_attr "type" "load_8")
11672 (set_attr "predicable" "yes")])
11673
11674 (define_insn "*thumb2_strd"
11675 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11676 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11677 (match_operand:SI 2 "s_register_operand" "r"))
11678 (set (mem:SI (plus:SI (match_dup 0)
11679 (match_operand:SI 3 "const_int_operand" "")))
11680 (match_operand:SI 4 "s_register_operand" "r"))]
11681 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11682 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11683 && (operands_ok_ldrd_strd (operands[2], operands[4],
11684 operands[0], INTVAL (operands[1]),
11685 false, false))"
11686 "strd%?\t%2, %4, [%0, %1]"
11687 [(set_attr "type" "store_8")
11688 (set_attr "predicable" "yes")])
11689
11690 (define_insn "*thumb2_strd_base"
11691 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11692 (match_operand:SI 1 "s_register_operand" "r"))
11693 (set (mem:SI (plus:SI (match_dup 0)
11694 (const_int 4)))
11695 (match_operand:SI 2 "s_register_operand" "r"))]
11696 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11697 && (operands_ok_ldrd_strd (operands[1], operands[2],
11698 operands[0], 0, false, false))"
11699 "strd%?\t%1, %2, [%0]"
11700 [(set_attr "type" "store_8")
11701 (set_attr "predicable" "yes")])
11702
11703 (define_insn "*thumb2_strd_base_neg"
11704 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11705 (const_int -4)))
11706 (match_operand:SI 1 "s_register_operand" "r"))
11707 (set (mem:SI (match_dup 0))
11708 (match_operand:SI 2 "s_register_operand" "r"))]
11709 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11710 && (operands_ok_ldrd_strd (operands[1], operands[2],
11711 operands[0], -4, false, false))"
11712 "strd%?\t%1, %2, [%0, #-4]"
11713 [(set_attr "type" "store_8")
11714 (set_attr "predicable" "yes")])
11715
11716 ;; ARMv8 CRC32 instructions.
11717 (define_insn "arm_<crc_variant>"
11718 [(set (match_operand:SI 0 "s_register_operand" "=r")
11719 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11720 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11721 CRC))]
11722 "TARGET_CRC32"
11723 "<crc_variant>\\t%0, %1, %2"
11724 [(set_attr "type" "crc")
11725 (set_attr "conds" "unconditional")]
11726 )
11727
11728 ;; Load the load/store double peephole optimizations.
11729 (include "ldrdstrd.md")
11730
11731 ;; Load the load/store multiple patterns
11732 (include "ldmstm.md")
11733
11734 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11735 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11736 ;; The operands are validated through the load_multiple_operation
11737 ;; match_parallel predicate rather than through constraints so enable it only
11738 ;; after reload.
11739 (define_insn "*load_multiple"
11740 [(match_parallel 0 "load_multiple_operation"
11741 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11742 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11743 ])]
11744 "TARGET_32BIT && reload_completed"
11745 "*
11746 {
11747 arm_output_multireg_pop (operands, /*return_pc=*/false,
11748 /*cond=*/const_true_rtx,
11749 /*reverse=*/false,
11750 /*update=*/false);
11751 return \"\";
11752 }
11753 "
11754 [(set_attr "predicable" "yes")]
11755 )
11756
11757 (define_expand "copysignsf3"
11758 [(match_operand:SF 0 "register_operand")
11759 (match_operand:SF 1 "register_operand")
11760 (match_operand:SF 2 "register_operand")]
11761 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11762 "{
11763 emit_move_insn (operands[0], operands[2]);
11764 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11765 GEN_INT (31), GEN_INT (0),
11766 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11767 DONE;
11768 }"
11769 )
11770
11771 (define_expand "copysigndf3"
11772 [(match_operand:DF 0 "register_operand")
11773 (match_operand:DF 1 "register_operand")
11774 (match_operand:DF 2 "register_operand")]
11775 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11776 "{
11777 rtx op0_low = gen_lowpart (SImode, operands[0]);
11778 rtx op0_high = gen_highpart (SImode, operands[0]);
11779 rtx op1_low = gen_lowpart (SImode, operands[1]);
11780 rtx op1_high = gen_highpart (SImode, operands[1]);
11781 rtx op2_high = gen_highpart (SImode, operands[2]);
11782
11783 rtx scratch1 = gen_reg_rtx (SImode);
11784 rtx scratch2 = gen_reg_rtx (SImode);
11785 emit_move_insn (scratch1, op2_high);
11786 emit_move_insn (scratch2, op1_high);
11787
11788 emit_insn(gen_rtx_SET(scratch1,
11789 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11790 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11791 emit_move_insn (op0_low, op1_low);
11792 emit_move_insn (op0_high, scratch2);
11793
11794 DONE;
11795 }"
11796 )
11797
11798 ;; movmisalign patterns for HImode and SImode.
11799 (define_expand "movmisalign<mode>"
11800 [(match_operand:HSI 0 "general_operand")
11801 (match_operand:HSI 1 "general_operand")]
11802 "unaligned_access"
11803 {
11804 /* This pattern is not permitted to fail during expansion: if both arguments
11805 are non-registers (e.g. memory := constant), force operand 1 into a
11806 register. */
11807 rtx (* gen_unaligned_load)(rtx, rtx);
11808 rtx tmp_dest = operands[0];
11809 if (!s_register_operand (operands[0], <MODE>mode)
11810 && !s_register_operand (operands[1], <MODE>mode))
11811 operands[1] = force_reg (<MODE>mode, operands[1]);
11812
11813 if (<MODE>mode == HImode)
11814 {
11815 gen_unaligned_load = gen_unaligned_loadhiu;
11816 tmp_dest = gen_reg_rtx (SImode);
11817 }
11818 else
11819 gen_unaligned_load = gen_unaligned_loadsi;
11820
11821 if (MEM_P (operands[1]))
11822 {
11823 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11824 if (<MODE>mode == HImode)
11825 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11826 }
11827 else
11828 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11829
11830 DONE;
11831 })
11832
11833 (define_insn "arm_<cdp>"
11834 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11835 (match_operand:SI 1 "immediate_operand" "n")
11836 (match_operand:SI 2 "immediate_operand" "n")
11837 (match_operand:SI 3 "immediate_operand" "n")
11838 (match_operand:SI 4 "immediate_operand" "n")
11839 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11840 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11841 {
11842 arm_const_bounds (operands[0], 0, 16);
11843 arm_const_bounds (operands[1], 0, 16);
11844 arm_const_bounds (operands[2], 0, (1 << 5));
11845 arm_const_bounds (operands[3], 0, (1 << 5));
11846 arm_const_bounds (operands[4], 0, (1 << 5));
11847 arm_const_bounds (operands[5], 0, 8);
11848 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11849 }
11850 [(set_attr "length" "4")
11851 (set_attr "type" "coproc")])
11852
11853 (define_insn "*ldc"
11854 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11855 (match_operand:SI 1 "immediate_operand" "n")
11856 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11857 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11858 {
11859 arm_const_bounds (operands[0], 0, 16);
11860 arm_const_bounds (operands[1], 0, (1 << 5));
11861 return "<ldc>\\tp%c0, CR%c1, %2";
11862 }
11863 [(set_attr "length" "4")
11864 (set_attr "type" "coproc")])
11865
11866 (define_insn "*stc"
11867 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11868 (match_operand:SI 1 "immediate_operand" "n")
11869 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11870 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11871 {
11872 arm_const_bounds (operands[0], 0, 16);
11873 arm_const_bounds (operands[1], 0, (1 << 5));
11874 return "<stc>\\tp%c0, CR%c1, %2";
11875 }
11876 [(set_attr "length" "4")
11877 (set_attr "type" "coproc")])
11878
11879 (define_expand "arm_<ldc>"
11880 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11881 (match_operand:SI 1 "immediate_operand")
11882 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11883 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11884
11885 (define_expand "arm_<stc>"
11886 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11887 (match_operand:SI 1 "immediate_operand")
11888 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11889 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11890
11891 (define_insn "arm_<mcr>"
11892 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11893 (match_operand:SI 1 "immediate_operand" "n")
11894 (match_operand:SI 2 "s_register_operand" "r")
11895 (match_operand:SI 3 "immediate_operand" "n")
11896 (match_operand:SI 4 "immediate_operand" "n")
11897 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11898 (use (match_dup 2))]
11899 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11900 {
11901 arm_const_bounds (operands[0], 0, 16);
11902 arm_const_bounds (operands[1], 0, 8);
11903 arm_const_bounds (operands[3], 0, (1 << 5));
11904 arm_const_bounds (operands[4], 0, (1 << 5));
11905 arm_const_bounds (operands[5], 0, 8);
11906 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11907 }
11908 [(set_attr "length" "4")
11909 (set_attr "type" "coproc")])
11910
11911 (define_insn "arm_<mrc>"
11912 [(set (match_operand:SI 0 "s_register_operand" "=r")
11913 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11914 (match_operand:SI 2 "immediate_operand" "n")
11915 (match_operand:SI 3 "immediate_operand" "n")
11916 (match_operand:SI 4 "immediate_operand" "n")
11917 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11918 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11919 {
11920 arm_const_bounds (operands[1], 0, 16);
11921 arm_const_bounds (operands[2], 0, 8);
11922 arm_const_bounds (operands[3], 0, (1 << 5));
11923 arm_const_bounds (operands[4], 0, (1 << 5));
11924 arm_const_bounds (operands[5], 0, 8);
11925 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11926 }
11927 [(set_attr "length" "4")
11928 (set_attr "type" "coproc")])
11929
11930 (define_insn "arm_<mcrr>"
11931 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11932 (match_operand:SI 1 "immediate_operand" "n")
11933 (match_operand:DI 2 "s_register_operand" "r")
11934 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11935 (use (match_dup 2))]
11936 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11937 {
11938 arm_const_bounds (operands[0], 0, 16);
11939 arm_const_bounds (operands[1], 0, 8);
11940 arm_const_bounds (operands[3], 0, (1 << 5));
11941 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11942 }
11943 [(set_attr "length" "4")
11944 (set_attr "type" "coproc")])
11945
11946 (define_insn "arm_<mrrc>"
11947 [(set (match_operand:DI 0 "s_register_operand" "=r")
11948 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11949 (match_operand:SI 2 "immediate_operand" "n")
11950 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11951 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11952 {
11953 arm_const_bounds (operands[1], 0, 16);
11954 arm_const_bounds (operands[2], 0, 8);
11955 arm_const_bounds (operands[3], 0, (1 << 5));
11956 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11957 }
11958 [(set_attr "length" "4")
11959 (set_attr "type" "coproc")])
11960
11961 (define_expand "speculation_barrier"
11962 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11963 "TARGET_EITHER"
11964 "
11965 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11966 have a usable barrier (and probably don't need one in practice).
11967 But to be safe if such code is run on later architectures, call a
11968 helper function in libgcc that will do the thing for the active
11969 system. */
11970 if (!(arm_arch7 || arm_arch8))
11971 {
11972 arm_emit_speculation_barrier_function ();
11973 DONE;
11974 }
11975 "
11976 )
11977
11978 ;; Generate a hard speculation barrier when we have not enabled speculation
11979 ;; tracking.
11980 (define_insn "*speculation_barrier_insn"
11981 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11982 "arm_arch7 || arm_arch8"
11983 "isb\;dsb\\tsy"
11984 [(set_attr "type" "block")
11985 (set_attr "length" "8")]
11986 )
11987
11988 ;; Vector bits common to IWMMXT and Neon
11989 (include "vec-common.md")
11990 ;; Load the Intel Wireless Multimedia Extension patterns
11991 (include "iwmmxt.md")
11992 ;; Load the VFP co-processor patterns
11993 (include "vfp.md")
11994 ;; Thumb-1 patterns
11995 (include "thumb1.md")
11996 ;; Thumb-2 patterns
11997 (include "thumb2.md")
11998 ;; Neon patterns
11999 (include "neon.md")
12000 ;; Crypto patterns
12001 (include "crypto.md")
12002 ;; Synchronization Primitives
12003 (include "sync.md")
12004 ;; Fixed-point patterns
12005 (include "arm-fixed.md")