]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
[arm] Early expansion of subvdi4
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51 ;; conditional compare combination
52 (define_constants
53 [(CMP_CMP 0)
54 (CMN_CMP 1)
55 (CMP_CMN 2)
56 (CMN_CMN 3)
57 (NUM_OF_COND_CMP 4)
58 ]
59 )
60
61 \f
62 ;;---------------------------------------------------------------------------
63 ;; Attributes
64
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
67
68 ;; Instruction classification types
69 (include "types.md")
70
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
77
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
85
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
101
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
104 ;; registers.
105 (define_attr "fp" "no,yes" (const_string "no"))
106
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
112
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
117
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
120 (const_int 4))
121
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
131
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
134 (const_string "yes")
135
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
138 (const_string "yes")
139
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
142 (const_string "yes")
143
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
182 (const_string "yes")
183 ]
184
185 (const_string "no")))
186
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
189
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
192 (const_string "yes")
193
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
196 (const_string "yes")
197
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
202
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
208
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
225 (const_string "no")
226
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
229 (const_string "no")
230
231 (eq_attr "arch_enabled" "no")
232 (const_string "no")]
233 (const_string "yes")))
234
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
254
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
262
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
270 ;
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
273 ; inlined branches
274 ;
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
277 ;
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
280 ;
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
283 ;
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
286
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (if_then_else
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
295
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
301
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
312 "block,call,load_4")
313 (const_string "yes")
314 (const_string "no")))
315
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
338
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
342
343
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
347
348 ;;---------------------------------------------------------------------------
349 ;; Unspecs
350
351 (include "unspecs.md")
352
353 ;;---------------------------------------------------------------------------
354 ;; Mode iterators
355
356 (include "iterators.md")
357
358 ;;---------------------------------------------------------------------------
359 ;; Predicates
360
361 (include "predicates.md")
362 (include "constraints.md")
363
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
366
367 (define_attr "tune_cortexr4" "yes,no"
368 (const (if_then_else
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "yes")
371 (const_string "no"))))
372
373 ;; True if the generic scheduling description should be used.
374
375 (define_attr "generic_sched" "yes,no"
376 (const (if_then_else
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "no")
385 (const_string "yes"))))
386
387 (define_attr "generic_vfp" "yes,no"
388 (const (if_then_else
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
394 (const_string "yes")
395 (const_string "no"))))
396
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
403 (include "fa526.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
422 (include "vfp11.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
425
426 \f
427 ;;---------------------------------------------------------------------------
428 ;; Insn patterns
429 ;;
430 ;; Addition insns.
431
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
435
436 (define_expand "adddi3"
437 [(parallel
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
442 "TARGET_EITHER"
443 "
444 if (TARGET_THUMB1)
445 {
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
448 }
449 else
450 {
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
454 &lo_op2, &hi_op2);
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
457
458 if (lo_op2 == const0_rtx)
459 {
460 lo_dest = lo_op1;
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
465 }
466 else
467 {
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
472
473 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
475 const0_rtx);
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
478 else
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
480 }
481
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
486 DONE;
487 }
488 "
489 )
490
491 (define_expand "addvsi4"
492 [(match_operand:SI 0 "s_register_operand")
493 (match_operand:SI 1 "s_register_operand")
494 (match_operand:SI 2 "arm_add_operand")
495 (match_operand 3 "")]
496 "TARGET_32BIT"
497 {
498 if (CONST_INT_P (operands[2]))
499 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
500 else
501 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
502 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
503
504 DONE;
505 })
506
507 (define_expand "addvdi4"
508 [(match_operand:DI 0 "s_register_operand")
509 (match_operand:DI 1 "s_register_operand")
510 (match_operand:DI 2 "reg_or_int_operand")
511 (match_operand 3 "")]
512 "TARGET_32BIT"
513 {
514 rtx lo_result, hi_result;
515 rtx lo_op1, hi_op1, lo_op2, hi_op2;
516 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
517 &lo_op2, &hi_op2);
518 lo_result = gen_lowpart (SImode, operands[0]);
519 hi_result = gen_highpart (SImode, operands[0]);
520
521 if (lo_op2 == const0_rtx)
522 {
523 emit_move_insn (lo_result, lo_op1);
524 if (!arm_add_operand (hi_op2, SImode))
525 hi_op2 = force_reg (SImode, hi_op2);
526
527 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
528 }
529 else
530 {
531 if (!arm_add_operand (lo_op2, SImode))
532 lo_op2 = force_reg (SImode, lo_op2);
533 if (!arm_not_operand (hi_op2, SImode))
534 hi_op2 = force_reg (SImode, hi_op2);
535
536 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
537
538 if (hi_op2 == const0_rtx)
539 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
540 else if (CONST_INT_P (hi_op2))
541 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
542 else
543 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
544
545 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
546 }
547
548 DONE;
549 })
550
551 (define_expand "addsi3_cin_vout_reg"
552 [(parallel
553 [(set (match_dup 3)
554 (compare:CC_V
555 (plus:DI
556 (plus:DI (match_dup 4)
557 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
558 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
559 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
560 (match_dup 2)))))
561 (set (match_operand:SI 0 "s_register_operand")
562 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
563 (match_dup 2)))])]
564 "TARGET_32BIT"
565 {
566 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
567 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
568 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
569 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
570 }
571 )
572
573 (define_insn "*addsi3_cin_vout_reg_insn"
574 [(set (reg:CC_V CC_REGNUM)
575 (compare:CC_V
576 (plus:DI
577 (plus:DI
578 (match_operand:DI 3 "arm_carry_operation" "")
579 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
581 (sign_extend:DI
582 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
583 (match_dup 1))
584 (match_dup 2)))))
585 (set (match_operand:SI 0 "s_register_operand" "=l,r")
586 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
587 (match_dup 2)))]
588 "TARGET_32BIT"
589 "@
590 adcs%?\\t%0, %0, %2
591 adcs%?\\t%0, %1, %2"
592 [(set_attr "type" "alus_sreg")
593 (set_attr "arch" "t2,*")
594 (set_attr "length" "2,4")]
595 )
596
597 (define_expand "addsi3_cin_vout_imm"
598 [(parallel
599 [(set (match_dup 3)
600 (compare:CC_V
601 (plus:DI
602 (plus:DI (match_dup 4)
603 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
604 (match_dup 2))
605 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
606 (match_dup 2)))))
607 (set (match_operand:SI 0 "s_register_operand")
608 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
609 (match_operand 2 "arm_adcimm_operand")))])]
610 "TARGET_32BIT"
611 {
612 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
613 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
614 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
615 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
616 }
617 )
618
619 (define_insn "*addsi3_cin_vout_imm_insn"
620 [(set (reg:CC_V CC_REGNUM)
621 (compare:CC_V
622 (plus:DI
623 (plus:DI
624 (match_operand:DI 3 "arm_carry_operation" "")
625 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
626 (match_operand 2 "arm_adcimm_operand" "I,K"))
627 (sign_extend:DI
628 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
629 (match_dup 1))
630 (match_dup 2)))))
631 (set (match_operand:SI 0 "s_register_operand" "=r,r")
632 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
633 (match_dup 2)))]
634 "TARGET_32BIT"
635 "@
636 adcs%?\\t%0, %1, %2
637 sbcs%?\\t%0, %1, #%B2"
638 [(set_attr "type" "alus_imm")]
639 )
640
641 (define_expand "addsi3_cin_vout_0"
642 [(parallel
643 [(set (match_dup 2)
644 (compare:CC_V
645 (plus:DI (match_dup 3)
646 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
647 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
648 (set (match_operand:SI 0 "s_register_operand")
649 (plus:SI (match_dup 4) (match_dup 1)))])]
650 "TARGET_32BIT"
651 {
652 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
656 }
657 )
658
659 (define_insn "*addsi3_cin_vout_0_insn"
660 [(set (reg:CC_V CC_REGNUM)
661 (compare:CC_V
662 (plus:DI
663 (match_operand:DI 2 "arm_carry_operation" "")
664 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
665 (sign_extend:DI (plus:SI
666 (match_operand:SI 3 "arm_carry_operation" "")
667 (match_dup 1)))))
668 (set (match_operand:SI 0 "s_register_operand" "=r")
669 (plus:SI (match_dup 3) (match_dup 1)))]
670 "TARGET_32BIT"
671 "adcs%?\\t%0, %1, #0"
672 [(set_attr "type" "alus_imm")]
673 )
674
675 (define_expand "uaddvsi4"
676 [(match_operand:SI 0 "s_register_operand")
677 (match_operand:SI 1 "s_register_operand")
678 (match_operand:SI 2 "arm_add_operand")
679 (match_operand 3 "")]
680 "TARGET_32BIT"
681 {
682 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
683 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
684
685 DONE;
686 })
687
688 (define_expand "uaddvdi4"
689 [(match_operand:DI 0 "s_register_operand")
690 (match_operand:DI 1 "s_register_operand")
691 (match_operand:DI 2 "reg_or_int_operand")
692 (match_operand 3 "")]
693 "TARGET_32BIT"
694 {
695 rtx lo_result, hi_result;
696 rtx lo_op1, hi_op1, lo_op2, hi_op2;
697 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
698 &lo_op2, &hi_op2);
699 lo_result = gen_lowpart (SImode, operands[0]);
700 hi_result = gen_highpart (SImode, operands[0]);
701
702 if (lo_op2 == const0_rtx)
703 {
704 emit_move_insn (lo_result, lo_op1);
705 if (!arm_add_operand (hi_op2, SImode))
706 hi_op2 = force_reg (SImode, hi_op2);
707
708 gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]);
709 }
710 else
711 {
712 if (!arm_add_operand (lo_op2, SImode))
713 lo_op2 = force_reg (SImode, lo_op2);
714 if (!arm_not_operand (hi_op2, SImode))
715 hi_op2 = force_reg (SImode, hi_op2);
716
717 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
718
719 if (hi_op2 == const0_rtx)
720 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
721 else if (CONST_INT_P (hi_op2))
722 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
723 else
724 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
725
726 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
727 }
728
729 DONE;
730 })
731
732 (define_expand "addsi3_cin_cout_reg"
733 [(parallel
734 [(set (match_dup 3)
735 (compare:CC_ADC
736 (plus:DI
737 (plus:DI (match_dup 4)
738 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
739 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
740 (const_int 4294967296)))
741 (set (match_operand:SI 0 "s_register_operand")
742 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
743 (match_dup 2)))])]
744 "TARGET_32BIT"
745 {
746 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
747 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
748 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
749 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
750 }
751 )
752
753 (define_insn "*addsi3_cin_cout_reg_insn"
754 [(set (reg:CC_ADC CC_REGNUM)
755 (compare:CC_ADC
756 (plus:DI
757 (plus:DI
758 (match_operand:DI 3 "arm_carry_operation" "")
759 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
760 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
761 (const_int 4294967296)))
762 (set (match_operand:SI 0 "s_register_operand" "=l,r")
763 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
764 (match_dup 1))
765 (match_dup 2)))]
766 "TARGET_32BIT"
767 "@
768 adcs%?\\t%0, %0, %2
769 adcs%?\\t%0, %1, %2"
770 [(set_attr "type" "alus_sreg")
771 (set_attr "arch" "t2,*")
772 (set_attr "length" "2,4")]
773 )
774
775 (define_expand "addsi3_cin_cout_imm"
776 [(parallel
777 [(set (match_dup 3)
778 (compare:CC_ADC
779 (plus:DI
780 (plus:DI (match_dup 4)
781 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
782 (match_dup 6))
783 (const_int 4294967296)))
784 (set (match_operand:SI 0 "s_register_operand")
785 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
786 (match_operand:SI 2 "arm_adcimm_operand")))])]
787 "TARGET_32BIT"
788 {
789 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
790 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
791 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
792 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
793 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
794 }
795 )
796
797 (define_insn "*addsi3_cin_cout_imm_insn"
798 [(set (reg:CC_ADC CC_REGNUM)
799 (compare:CC_ADC
800 (plus:DI
801 (plus:DI
802 (match_operand:DI 3 "arm_carry_operation" "")
803 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
804 (match_operand:DI 5 "const_int_operand" "n,n"))
805 (const_int 4294967296)))
806 (set (match_operand:SI 0 "s_register_operand" "=r,r")
807 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
808 (match_dup 1))
809 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
810 "TARGET_32BIT
811 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
812 "@
813 adcs%?\\t%0, %1, %2
814 sbcs%?\\t%0, %1, #%B2"
815 [(set_attr "type" "alus_imm")]
816 )
817
818 (define_expand "addsi3_cin_cout_0"
819 [(parallel
820 [(set (match_dup 2)
821 (compare:CC_ADC
822 (plus:DI (match_dup 3)
823 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
824 (const_int 4294967296)))
825 (set (match_operand:SI 0 "s_register_operand")
826 (plus:SI (match_dup 4) (match_dup 1)))])]
827 "TARGET_32BIT"
828 {
829 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
833 }
834 )
835
836 (define_insn "*addsi3_cin_cout_0_insn"
837 [(set (reg:CC_ADC CC_REGNUM)
838 (compare:CC_ADC
839 (plus:DI
840 (match_operand:DI 2 "arm_carry_operation" "")
841 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
842 (const_int 4294967296)))
843 (set (match_operand:SI 0 "s_register_operand" "=r")
844 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
845 "TARGET_32BIT"
846 "adcs%?\\t%0, %1, #0"
847 [(set_attr "type" "alus_imm")]
848 )
849
850 (define_expand "addsi3"
851 [(set (match_operand:SI 0 "s_register_operand")
852 (plus:SI (match_operand:SI 1 "s_register_operand")
853 (match_operand:SI 2 "reg_or_int_operand")))]
854 "TARGET_EITHER"
855 "
856 if (TARGET_32BIT && CONST_INT_P (operands[2]))
857 {
858 arm_split_constant (PLUS, SImode, NULL_RTX,
859 INTVAL (operands[2]), operands[0], operands[1],
860 optimize && can_create_pseudo_p ());
861 DONE;
862 }
863 "
864 )
865
866 ; If there is a scratch available, this will be faster than synthesizing the
867 ; addition.
868 (define_peephole2
869 [(match_scratch:SI 3 "r")
870 (set (match_operand:SI 0 "arm_general_register_operand" "")
871 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
872 (match_operand:SI 2 "const_int_operand" "")))]
873 "TARGET_32BIT &&
874 !(const_ok_for_arm (INTVAL (operands[2]))
875 || const_ok_for_arm (-INTVAL (operands[2])))
876 && const_ok_for_arm (~INTVAL (operands[2]))"
877 [(set (match_dup 3) (match_dup 2))
878 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
879 ""
880 )
881
882 ;; The r/r/k alternative is required when reloading the address
883 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
884 ;; put the duplicated register first, and not try the commutative version.
885 (define_insn_and_split "*arm_addsi3"
886 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
887 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
888 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
889 "TARGET_32BIT"
890 "@
891 add%?\\t%0, %0, %2
892 add%?\\t%0, %1, %2
893 add%?\\t%0, %1, %2
894 add%?\\t%0, %1, %2
895 add%?\\t%0, %1, %2
896 add%?\\t%0, %1, %2
897 add%?\\t%0, %2, %1
898 add%?\\t%0, %1, %2
899 addw%?\\t%0, %1, %2
900 addw%?\\t%0, %1, %2
901 sub%?\\t%0, %1, #%n2
902 sub%?\\t%0, %1, #%n2
903 sub%?\\t%0, %1, #%n2
904 subw%?\\t%0, %1, #%n2
905 subw%?\\t%0, %1, #%n2
906 #"
907 "TARGET_32BIT
908 && CONST_INT_P (operands[2])
909 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
910 && (reload_completed || !arm_eliminable_register (operands[1]))"
911 [(clobber (const_int 0))]
912 "
913 arm_split_constant (PLUS, SImode, curr_insn,
914 INTVAL (operands[2]), operands[0],
915 operands[1], 0);
916 DONE;
917 "
918 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
919 (set_attr "predicable" "yes")
920 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
921 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
922 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
923 (const_string "alu_imm")
924 (const_string "alu_sreg")))
925 ]
926 )
927
928 (define_insn "addsi3_compareV_reg"
929 [(set (reg:CC_V CC_REGNUM)
930 (compare:CC_V
931 (plus:DI
932 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
933 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
934 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
935 (set (match_operand:SI 0 "register_operand" "=l,r,r")
936 (plus:SI (match_dup 1) (match_dup 2)))]
937 "TARGET_32BIT"
938 "adds%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "arch" "t2,t2,*")
941 (set_attr "length" "2,2,4")
942 (set_attr "type" "alus_sreg")]
943 )
944
945 (define_insn "*addsi3_compareV_reg_nosum"
946 [(set (reg:CC_V CC_REGNUM)
947 (compare:CC_V
948 (plus:DI
949 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
950 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
951 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
952 "TARGET_32BIT"
953 "cmn%?\\t%0, %1"
954 [(set_attr "conds" "set")
955 (set_attr "arch" "t2,*")
956 (set_attr "length" "2,4")
957 (set_attr "type" "alus_sreg")]
958 )
959
960 (define_insn "subvsi3_intmin"
961 [(set (reg:CC_V CC_REGNUM)
962 (compare:CC_V
963 (plus:DI
964 (sign_extend:DI
965 (match_operand:SI 1 "register_operand" "r"))
966 (const_int 2147483648))
967 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
968 (set (match_operand:SI 0 "register_operand" "=r")
969 (plus:SI (match_dup 1) (const_int -2147483648)))]
970 "TARGET_32BIT"
971 "subs%?\\t%0, %1, #-2147483648"
972 [(set_attr "conds" "set")
973 (set_attr "type" "alus_imm")]
974 )
975
976 (define_insn "addsi3_compareV_imm"
977 [(set (reg:CC_V CC_REGNUM)
978 (compare:CC_V
979 (plus:DI
980 (sign_extend:DI
981 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
982 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
983 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
984 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
985 (plus:SI (match_dup 1) (match_dup 2)))]
986 "TARGET_32BIT
987 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
988 "@
989 adds%?\\t%0, %1, %2
990 adds%?\\t%0, %0, %2
991 subs%?\\t%0, %1, #%n2
992 subs%?\\t%0, %0, #%n2
993 adds%?\\t%0, %1, %2
994 subs%?\\t%0, %1, #%n2"
995 [(set_attr "conds" "set")
996 (set_attr "arch" "t2,t2,t2,t2,*,*")
997 (set_attr "length" "2,2,2,2,4,4")
998 (set_attr "type" "alus_imm")]
999 )
1000
1001 (define_insn "addsi3_compareV_imm_nosum"
1002 [(set (reg:CC_V CC_REGNUM)
1003 (compare:CC_V
1004 (plus:DI
1005 (sign_extend:DI
1006 (match_operand:SI 0 "register_operand" "l,r,r"))
1007 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1008 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1009 "TARGET_32BIT
1010 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1011 "@
1012 cmp%?\\t%0, #%n1
1013 cmn%?\\t%0, %1
1014 cmp%?\\t%0, #%n1"
1015 [(set_attr "conds" "set")
1016 (set_attr "arch" "t2,*,*")
1017 (set_attr "length" "2,4,4")
1018 (set_attr "type" "alus_imm")]
1019 )
1020
1021 ;; We can handle more constants efficently if we can clobber either a scratch
1022 ;; or the other source operand. We deliberately leave this late as in
1023 ;; high register pressure situations it's not worth forcing any reloads.
1024 (define_peephole2
1025 [(match_scratch:SI 2 "l")
1026 (set (reg:CC_V CC_REGNUM)
1027 (compare:CC_V
1028 (plus:DI
1029 (sign_extend:DI
1030 (match_operand:SI 0 "low_register_operand"))
1031 (match_operand 1 "const_int_operand"))
1032 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1033 "TARGET_THUMB2
1034 && satisfies_constraint_Pd (operands[1])"
1035 [(parallel[
1036 (set (reg:CC_V CC_REGNUM)
1037 (compare:CC_V
1038 (plus:DI (sign_extend:DI (match_dup 0))
1039 (sign_extend:DI (match_dup 1)))
1040 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1041 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1042 )
1043
1044 (define_peephole2
1045 [(set (reg:CC_V CC_REGNUM)
1046 (compare:CC_V
1047 (plus:DI
1048 (sign_extend:DI
1049 (match_operand:SI 0 "low_register_operand"))
1050 (match_operand 1 "const_int_operand"))
1051 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1052 "TARGET_THUMB2
1053 && dead_or_set_p (peep2_next_insn (0), operands[0])
1054 && satisfies_constraint_Py (operands[1])"
1055 [(parallel[
1056 (set (reg:CC_V CC_REGNUM)
1057 (compare:CC_V
1058 (plus:DI (sign_extend:DI (match_dup 0))
1059 (sign_extend:DI (match_dup 1)))
1060 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1061 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1062 )
1063
1064 (define_insn "addsi3_compare0"
1065 [(set (reg:CC_NOOV CC_REGNUM)
1066 (compare:CC_NOOV
1067 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1068 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1069 (const_int 0)))
1070 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1071 (plus:SI (match_dup 1) (match_dup 2)))]
1072 "TARGET_ARM"
1073 "@
1074 adds%?\\t%0, %1, %2
1075 subs%?\\t%0, %1, #%n2
1076 adds%?\\t%0, %1, %2"
1077 [(set_attr "conds" "set")
1078 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1079 )
1080
1081 (define_insn "*addsi3_compare0_scratch"
1082 [(set (reg:CC_NOOV CC_REGNUM)
1083 (compare:CC_NOOV
1084 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1085 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1086 (const_int 0)))]
1087 "TARGET_ARM"
1088 "@
1089 cmn%?\\t%0, %1
1090 cmp%?\\t%0, #%n1
1091 cmn%?\\t%0, %1"
1092 [(set_attr "conds" "set")
1093 (set_attr "predicable" "yes")
1094 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1095 )
1096
1097 (define_insn "*compare_negsi_si"
1098 [(set (reg:CC_Z CC_REGNUM)
1099 (compare:CC_Z
1100 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1101 (match_operand:SI 1 "s_register_operand" "l,r")))]
1102 "TARGET_32BIT"
1103 "cmn%?\\t%1, %0"
1104 [(set_attr "conds" "set")
1105 (set_attr "predicable" "yes")
1106 (set_attr "arch" "t2,*")
1107 (set_attr "length" "2,4")
1108 (set_attr "predicable_short_it" "yes,no")
1109 (set_attr "type" "alus_sreg")]
1110 )
1111
1112 ;; This is the canonicalization of subsi3_compare when the
1113 ;; addend is a constant.
1114 (define_insn "cmpsi2_addneg"
1115 [(set (reg:CC CC_REGNUM)
1116 (compare:CC
1117 (match_operand:SI 1 "s_register_operand" "r,r")
1118 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1119 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1120 (plus:SI (match_dup 1)
1121 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1122 "TARGET_32BIT
1123 && (INTVAL (operands[2])
1124 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1125 {
1126 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1127 in different condition codes (like cmn rather than like cmp), so that
1128 alternative comes first. Both alternatives can match for any 0x??000000
1129 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1130 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1131 as it is shorter. */
1132 if (which_alternative == 0 && operands[3] != const1_rtx)
1133 return "subs%?\\t%0, %1, #%n3";
1134 else
1135 return "adds%?\\t%0, %1, %3";
1136 }
1137 [(set_attr "conds" "set")
1138 (set_attr "type" "alus_sreg")]
1139 )
1140
1141 ;; Convert the sequence
1142 ;; sub rd, rn, #1
1143 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1144 ;; bne dest
1145 ;; into
1146 ;; subs rd, rn, #1
1147 ;; bcs dest ((unsigned)rn >= 1)
1148 ;; similarly for the beq variant using bcc.
1149 ;; This is a common looping idiom (while (n--))
1150 (define_peephole2
1151 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1152 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1153 (const_int -1)))
1154 (set (match_operand 2 "cc_register" "")
1155 (compare (match_dup 0) (const_int -1)))
1156 (set (pc)
1157 (if_then_else (match_operator 3 "equality_operator"
1158 [(match_dup 2) (const_int 0)])
1159 (match_operand 4 "" "")
1160 (match_operand 5 "" "")))]
1161 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1162 [(parallel[
1163 (set (match_dup 2)
1164 (compare:CC
1165 (match_dup 1) (const_int 1)))
1166 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1167 (set (pc)
1168 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1169 (match_dup 4)
1170 (match_dup 5)))]
1171 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1172 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1173 ? GEU : LTU),
1174 VOIDmode,
1175 operands[2], const0_rtx);"
1176 )
1177
1178 ;; The next four insns work because they compare the result with one of
1179 ;; the operands, and we know that the use of the condition code is
1180 ;; either GEU or LTU, so we can use the carry flag from the addition
1181 ;; instead of doing the compare a second time.
1182 (define_insn "addsi3_compare_op1"
1183 [(set (reg:CC_C CC_REGNUM)
1184 (compare:CC_C
1185 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1186 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1187 (match_dup 1)))
1188 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1189 (plus:SI (match_dup 1) (match_dup 2)))]
1190 "TARGET_32BIT"
1191 "@
1192 adds%?\\t%0, %1, %2
1193 adds%?\\t%0, %0, %2
1194 subs%?\\t%0, %1, #%n2
1195 subs%?\\t%0, %0, #%n2
1196 adds%?\\t%0, %1, %2
1197 subs%?\\t%0, %1, #%n2"
1198 [(set_attr "conds" "set")
1199 (set_attr "arch" "t2,t2,t2,t2,*,*")
1200 (set_attr "length" "2,2,2,2,4,4")
1201 (set (attr "type")
1202 (if_then_else (match_operand 2 "const_int_operand")
1203 (const_string "alu_imm")
1204 (const_string "alu_sreg")))]
1205 )
1206
1207 (define_insn "*addsi3_compare_op2"
1208 [(set (reg:CC_C CC_REGNUM)
1209 (compare:CC_C
1210 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1211 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1212 (match_dup 2)))
1213 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1214 (plus:SI (match_dup 1) (match_dup 2)))]
1215 "TARGET_32BIT"
1216 "@
1217 adds%?\\t%0, %1, %2
1218 adds%?\\t%0, %0, %2
1219 subs%?\\t%0, %1, #%n2
1220 subs%?\\t%0, %0, #%n2
1221 adds%?\\t%0, %1, %2
1222 subs%?\\t%0, %1, #%n2"
1223 [(set_attr "conds" "set")
1224 (set_attr "arch" "t2,t2,t2,t2,*,*")
1225 (set_attr "length" "2,2,2,2,4,4")
1226 (set (attr "type")
1227 (if_then_else (match_operand 2 "const_int_operand")
1228 (const_string "alu_imm")
1229 (const_string "alu_sreg")))]
1230 )
1231
1232 (define_insn "*compare_addsi2_op0"
1233 [(set (reg:CC_C CC_REGNUM)
1234 (compare:CC_C
1235 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1236 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1237 (match_dup 0)))]
1238 "TARGET_32BIT"
1239 "@
1240 cmn%?\\t%0, %1
1241 cmp%?\\t%0, #%n1
1242 cmn%?\\t%0, %1
1243 cmp%?\\t%0, #%n1"
1244 [(set_attr "conds" "set")
1245 (set_attr "predicable" "yes")
1246 (set_attr "arch" "t2,t2,*,*")
1247 (set_attr "predicable_short_it" "yes,yes,no,no")
1248 (set_attr "length" "2,2,4,4")
1249 (set (attr "type")
1250 (if_then_else (match_operand 1 "const_int_operand")
1251 (const_string "alu_imm")
1252 (const_string "alu_sreg")))]
1253 )
1254
1255 (define_insn "*compare_addsi2_op1"
1256 [(set (reg:CC_C CC_REGNUM)
1257 (compare:CC_C
1258 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1259 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1260 (match_dup 1)))]
1261 "TARGET_32BIT"
1262 "@
1263 cmn%?\\t%0, %1
1264 cmp%?\\t%0, #%n1
1265 cmn%?\\t%0, %1
1266 cmp%?\\t%0, #%n1"
1267 [(set_attr "conds" "set")
1268 (set_attr "predicable" "yes")
1269 (set_attr "arch" "t2,t2,*,*")
1270 (set_attr "predicable_short_it" "yes,yes,no,no")
1271 (set_attr "length" "2,2,4,4")
1272 (set (attr "type")
1273 (if_then_else (match_operand 1 "const_int_operand")
1274 (const_string "alu_imm")
1275 (const_string "alu_sreg")))]
1276 )
1277
1278 (define_insn "addsi3_carryin"
1279 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1280 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1281 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1282 (match_operand:SI 3 "arm_carry_operation" "")))]
1283 "TARGET_32BIT"
1284 "@
1285 adc%?\\t%0, %1, %2
1286 adc%?\\t%0, %1, %2
1287 sbc%?\\t%0, %1, #%B2"
1288 [(set_attr "conds" "use")
1289 (set_attr "predicable" "yes")
1290 (set_attr "arch" "t2,*,*")
1291 (set_attr "length" "4")
1292 (set_attr "predicable_short_it" "yes,no,no")
1293 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1294 )
1295
1296 ;; Canonicalization of the above when the immediate is zero.
1297 (define_insn "add0si3_carryin"
1298 [(set (match_operand:SI 0 "s_register_operand" "=r")
1299 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1300 (match_operand:SI 1 "arm_not_operand" "r")))]
1301 "TARGET_32BIT"
1302 "adc%?\\t%0, %1, #0"
1303 [(set_attr "conds" "use")
1304 (set_attr "predicable" "yes")
1305 (set_attr "length" "4")
1306 (set_attr "type" "adc_imm")]
1307 )
1308
1309 (define_insn "*addsi3_carryin_alt2"
1310 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1311 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1312 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1313 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1314 "TARGET_32BIT"
1315 "@
1316 adc%?\\t%0, %1, %2
1317 adc%?\\t%0, %1, %2
1318 sbc%?\\t%0, %1, #%B2"
1319 [(set_attr "conds" "use")
1320 (set_attr "predicable" "yes")
1321 (set_attr "arch" "t2,*,*")
1322 (set_attr "length" "4")
1323 (set_attr "predicable_short_it" "yes,no,no")
1324 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1325 )
1326
1327 (define_insn "*addsi3_carryin_shift"
1328 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1329 (plus:SI (plus:SI
1330 (match_operator:SI 2 "shift_operator"
1331 [(match_operand:SI 3 "s_register_operand" "r,r")
1332 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1333 (match_operand:SI 5 "arm_carry_operation" ""))
1334 (match_operand:SI 1 "s_register_operand" "r,r")))]
1335 "TARGET_32BIT"
1336 "adc%?\\t%0, %1, %3%S2"
1337 [(set_attr "conds" "use")
1338 (set_attr "arch" "32,a")
1339 (set_attr "shift" "3")
1340 (set_attr "predicable" "yes")
1341 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1342 (const_string "alu_shift_imm")
1343 (const_string "alu_shift_reg")))]
1344 )
1345
1346 (define_insn "*addsi3_carryin_clobercc"
1347 [(set (match_operand:SI 0 "s_register_operand" "=r")
1348 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1349 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1350 (match_operand:SI 3 "arm_carry_operation" "")))
1351 (clobber (reg:CC CC_REGNUM))]
1352 "TARGET_32BIT"
1353 "adcs%?\\t%0, %1, %2"
1354 [(set_attr "conds" "set")
1355 (set_attr "type" "adcs_reg")]
1356 )
1357
1358 (define_expand "subvsi4"
1359 [(match_operand:SI 0 "s_register_operand")
1360 (match_operand:SI 1 "arm_rhs_operand")
1361 (match_operand:SI 2 "arm_add_operand")
1362 (match_operand 3 "")]
1363 "TARGET_32BIT"
1364 {
1365 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1366 {
1367 /* If both operands are constants we can decide the result statically. */
1368 wi::overflow_type overflow;
1369 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1370 rtx_mode_t (operands[2], SImode),
1371 SIGNED, &overflow);
1372 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1373 if (overflow != wi::OVF_NONE)
1374 emit_jump_insn (gen_jump (operands[3]));
1375 DONE;
1376 }
1377 else if (CONST_INT_P (operands[2]))
1378 {
1379 operands[2] = GEN_INT (-INTVAL (operands[2]));
1380 /* Special case for INT_MIN. */
1381 if (INTVAL (operands[2]) == 0x80000000)
1382 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1383 else
1384 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1385 operands[2]));
1386 }
1387 else if (CONST_INT_P (operands[1]))
1388 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1389 else
1390 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1391
1392 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1393 DONE;
1394 })
1395
1396 (define_expand "subvdi4"
1397 [(match_operand:DI 0 "s_register_operand")
1398 (match_operand:DI 1 "reg_or_int_operand")
1399 (match_operand:DI 2 "reg_or_int_operand")
1400 (match_operand 3 "")]
1401 "TARGET_32BIT"
1402 {
1403 rtx lo_result, hi_result;
1404 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1405 lo_result = gen_lowpart (SImode, operands[0]);
1406 hi_result = gen_highpart (SImode, operands[0]);
1407 machine_mode mode = CCmode;
1408
1409 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1410 {
1411 /* If both operands are constants we can decide the result statically. */
1412 wi::overflow_type overflow;
1413 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1414 rtx_mode_t (operands[2], DImode),
1415 SIGNED, &overflow);
1416 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1417 if (overflow != wi::OVF_NONE)
1418 emit_jump_insn (gen_jump (operands[3]));
1419 DONE;
1420 }
1421 else if (CONST_INT_P (operands[1]))
1422 {
1423 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1424 &lo_op1, &hi_op1);
1425 if (const_ok_for_arm (INTVAL (lo_op1)))
1426 {
1427 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1428 GEN_INT (~UINTVAL (lo_op1))));
1429 /* We could potentially use RSC here in Arm state, but not
1430 in Thumb, so it's probably not worth the effort of handling
1431 this. */
1432 hi_op1 = force_reg (SImode, hi_op1);
1433 mode = CC_RSBmode;
1434 goto highpart;
1435 }
1436 operands[1] = force_reg (DImode, operands[1]);
1437 }
1438
1439 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1440 &lo_op2, &hi_op2);
1441 if (lo_op2 == const0_rtx)
1442 {
1443 emit_move_insn (lo_result, lo_op1);
1444 if (!arm_add_operand (hi_op2, SImode))
1445 hi_op2 = force_reg (SImode, hi_op2);
1446 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1447 DONE;
1448 }
1449
1450 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1451 lo_op2 = force_reg (SImode, lo_op2);
1452 if (CONST_INT_P (lo_op2))
1453 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1454 GEN_INT (-INTVAL (lo_op2))));
1455 else
1456 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1457
1458 highpart:
1459 if (!arm_not_operand (hi_op2, SImode))
1460 hi_op2 = force_reg (SImode, hi_op2);
1461 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1462 if (CONST_INT_P (hi_op2))
1463 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1464 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1465 gen_rtx_LTU (DImode, ccreg,
1466 const0_rtx)));
1467 else
1468 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1469 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1470 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1471 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1472
1473 DONE;
1474 })
1475
1476 (define_expand "usubvsi4"
1477 [(match_operand:SI 0 "s_register_operand")
1478 (match_operand:SI 1 "arm_rhs_operand")
1479 (match_operand:SI 2 "arm_add_operand")
1480 (match_operand 3 "")]
1481 "TARGET_32BIT"
1482 {
1483 machine_mode mode = CCmode;
1484 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1485 {
1486 /* If both operands are constants we can decide the result statically. */
1487 wi::overflow_type overflow;
1488 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1489 rtx_mode_t (operands[2], SImode),
1490 UNSIGNED, &overflow);
1491 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1492 if (overflow != wi::OVF_NONE)
1493 emit_jump_insn (gen_jump (operands[3]));
1494 DONE;
1495 }
1496 else if (CONST_INT_P (operands[2]))
1497 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1498 GEN_INT (-INTVAL (operands[2]))));
1499 else if (CONST_INT_P (operands[1]))
1500 {
1501 mode = CC_RSBmode;
1502 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1503 GEN_INT (~UINTVAL (operands[1]))));
1504 }
1505 else
1506 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1507 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1508
1509 DONE;
1510 })
1511
1512 (define_expand "usubvdi4"
1513 [(match_operand:DI 0 "s_register_operand")
1514 (match_operand:DI 1 "reg_or_int_operand")
1515 (match_operand:DI 2 "reg_or_int_operand")
1516 (match_operand 3 "")]
1517 "TARGET_32BIT"
1518 {
1519 rtx lo_result, hi_result;
1520 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1521 lo_result = gen_lowpart (SImode, operands[0]);
1522 hi_result = gen_highpart (SImode, operands[0]);
1523 machine_mode mode = CCmode;
1524
1525 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1526 {
1527 /* If both operands are constants we can decide the result statically. */
1528 wi::overflow_type overflow;
1529 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1530 rtx_mode_t (operands[2], DImode),
1531 UNSIGNED, &overflow);
1532 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1533 if (overflow != wi::OVF_NONE)
1534 emit_jump_insn (gen_jump (operands[3]));
1535 DONE;
1536 }
1537 else if (CONST_INT_P (operands[1]))
1538 {
1539 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1540 &lo_op1, &hi_op1);
1541 if (const_ok_for_arm (INTVAL (lo_op1)))
1542 {
1543 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1544 GEN_INT (~UINTVAL (lo_op1))));
1545 /* We could potentially use RSC here in Arm state, but not
1546 in Thumb, so it's probably not worth the effort of handling
1547 this. */
1548 hi_op1 = force_reg (SImode, hi_op1);
1549 mode = CC_RSBmode;
1550 goto highpart;
1551 }
1552 operands[1] = force_reg (DImode, operands[1]);
1553 }
1554
1555 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1556 &lo_op2, &hi_op2);
1557 if (lo_op2 == const0_rtx)
1558 {
1559 emit_move_insn (lo_result, lo_op1);
1560 if (!arm_add_operand (hi_op2, SImode))
1561 hi_op2 = force_reg (SImode, hi_op2);
1562 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1563 DONE;
1564 }
1565
1566 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1567 lo_op2 = force_reg (SImode, lo_op2);
1568 if (CONST_INT_P (lo_op2))
1569 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1570 GEN_INT (-INTVAL (lo_op2))));
1571 else
1572 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1573
1574 highpart:
1575 if (!arm_not_operand (hi_op2, SImode))
1576 hi_op2 = force_reg (SImode, hi_op2);
1577 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1578 if (CONST_INT_P (hi_op2))
1579 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1580 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1581 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1582 gen_rtx_LTU (DImode, ccreg,
1583 const0_rtx)));
1584 else
1585 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1586 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1587 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1588 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1589
1590 DONE;
1591 })
1592
1593 (define_insn "subsi3_compare1"
1594 [(set (reg:CC CC_REGNUM)
1595 (compare:CC
1596 (match_operand:SI 1 "register_operand" "r")
1597 (match_operand:SI 2 "register_operand" "r")))
1598 (set (match_operand:SI 0 "register_operand" "=r")
1599 (minus:SI (match_dup 1) (match_dup 2)))]
1600 "TARGET_32BIT"
1601 "subs%?\\t%0, %1, %2"
1602 [(set_attr "conds" "set")
1603 (set_attr "type" "alus_sreg")]
1604 )
1605
1606 (define_insn "subvsi3"
1607 [(set (reg:CC_V CC_REGNUM)
1608 (compare:CC_V
1609 (minus:DI
1610 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1611 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1612 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1613 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1614 (minus:SI (match_dup 1) (match_dup 2)))]
1615 "TARGET_32BIT"
1616 "subs%?\\t%0, %1, %2"
1617 [(set_attr "conds" "set")
1618 (set_attr "arch" "t2,*")
1619 (set_attr "length" "2,4")
1620 (set_attr "type" "alus_sreg")]
1621 )
1622
1623 (define_insn "subvsi3_imm1"
1624 [(set (reg:CC_V CC_REGNUM)
1625 (compare:CC_V
1626 (minus:DI
1627 (match_operand 1 "arm_immediate_operand" "I")
1628 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1629 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1630 (set (match_operand:SI 0 "s_register_operand" "=r")
1631 (minus:SI (match_dup 1) (match_dup 2)))]
1632 "TARGET_32BIT"
1633 "rsbs%?\\t%0, %2, %1"
1634 [(set_attr "conds" "set")
1635 (set_attr "type" "alus_imm")]
1636 )
1637
1638 (define_insn "subsi3_carryin"
1639 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1640 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1641 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1642 (match_operand:SI 3 "arm_borrow_operation" "")))]
1643 "TARGET_32BIT"
1644 "@
1645 sbc%?\\t%0, %1, %2
1646 rsc%?\\t%0, %2, %1
1647 sbc%?\\t%0, %2, %2, lsl #1"
1648 [(set_attr "conds" "use")
1649 (set_attr "arch" "*,a,t2")
1650 (set_attr "predicable" "yes")
1651 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1652 )
1653
1654 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1655 [(set (reg:<CC_EXTEND> CC_REGNUM)
1656 (compare:<CC_EXTEND>
1657 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1658 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1659 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1660 (clobber (match_scratch:SI 0 "=l,r"))]
1661 "TARGET_32BIT"
1662 "sbcs\\t%0, %1, %2"
1663 [(set_attr "conds" "set")
1664 (set_attr "arch" "t2,*")
1665 (set_attr "length" "2,4")
1666 (set_attr "type" "adc_reg")]
1667 )
1668
1669 ;; Similar to the above, but handling a constant which has a different
1670 ;; canonicalization.
1671 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1672 [(set (reg:<CC_EXTEND> CC_REGNUM)
1673 (compare:<CC_EXTEND>
1674 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1675 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1676 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1677 (clobber (match_scratch:SI 0 "=l,r"))]
1678 "TARGET_32BIT"
1679 "@
1680 sbcs\\t%0, %1, %2
1681 adcs\\t%0, %1, #%B2"
1682 [(set_attr "conds" "set")
1683 (set_attr "type" "adc_imm")]
1684 )
1685
1686 ;; Further canonicalization when the constant is zero.
1687 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1688 [(set (reg:<CC_EXTEND> CC_REGNUM)
1689 (compare:<CC_EXTEND>
1690 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1691 (match_operand:DI 2 "arm_borrow_operation" "")))
1692 (clobber (match_scratch:SI 0 "=l,r"))]
1693 "TARGET_32BIT"
1694 "sbcs\\t%0, %1, #0"
1695 [(set_attr "conds" "set")
1696 (set_attr "type" "adc_imm")]
1697 )
1698
1699 (define_insn "*subsi3_carryin_const"
1700 [(set (match_operand:SI 0 "s_register_operand" "=r")
1701 (minus:SI (plus:SI
1702 (match_operand:SI 1 "s_register_operand" "r")
1703 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1704 (match_operand:SI 3 "arm_borrow_operation" "")))]
1705 "TARGET_32BIT"
1706 "sbc\\t%0, %1, #%n2"
1707 [(set_attr "conds" "use")
1708 (set_attr "type" "adc_imm")]
1709 )
1710
1711 (define_insn "*subsi3_carryin_const0"
1712 [(set (match_operand:SI 0 "s_register_operand" "=r")
1713 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1714 (match_operand:SI 2 "arm_borrow_operation" "")))]
1715 "TARGET_32BIT"
1716 "sbc\\t%0, %1, #0"
1717 [(set_attr "conds" "use")
1718 (set_attr "type" "adc_imm")]
1719 )
1720
1721 (define_insn "*subsi3_carryin_shift"
1722 [(set (match_operand:SI 0 "s_register_operand" "=r")
1723 (minus:SI (minus:SI
1724 (match_operand:SI 1 "s_register_operand" "r")
1725 (match_operator:SI 2 "shift_operator"
1726 [(match_operand:SI 3 "s_register_operand" "r")
1727 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1728 (match_operand:SI 5 "arm_borrow_operation" "")))]
1729 "TARGET_32BIT"
1730 "sbc%?\\t%0, %1, %3%S2"
1731 [(set_attr "conds" "use")
1732 (set_attr "predicable" "yes")
1733 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1734 (const_string "alu_shift_imm")
1735 (const_string "alu_shift_reg")))]
1736 )
1737
1738 (define_insn "*subsi3_carryin_shift_alt"
1739 [(set (match_operand:SI 0 "s_register_operand" "=r")
1740 (minus:SI (minus:SI
1741 (match_operand:SI 1 "s_register_operand" "r")
1742 (match_operand:SI 5 "arm_borrow_operation" ""))
1743 (match_operator:SI 2 "shift_operator"
1744 [(match_operand:SI 3 "s_register_operand" "r")
1745 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1746 "TARGET_32BIT"
1747 "sbc%?\\t%0, %1, %3%S2"
1748 [(set_attr "conds" "use")
1749 (set_attr "predicable" "yes")
1750 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1751 (const_string "alu_shift_imm")
1752 (const_string "alu_shift_reg")))]
1753 )
1754
1755 (define_insn "*rsbsi3_carryin_shift"
1756 [(set (match_operand:SI 0 "s_register_operand" "=r")
1757 (minus:SI (minus:SI
1758 (match_operator:SI 2 "shift_operator"
1759 [(match_operand:SI 3 "s_register_operand" "r")
1760 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1761 (match_operand:SI 1 "s_register_operand" "r"))
1762 (match_operand:SI 5 "arm_borrow_operation" "")))]
1763 "TARGET_ARM"
1764 "rsc%?\\t%0, %1, %3%S2"
1765 [(set_attr "conds" "use")
1766 (set_attr "predicable" "yes")
1767 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1768 (const_string "alu_shift_imm")
1769 (const_string "alu_shift_reg")))]
1770 )
1771
1772 (define_insn "*rsbsi3_carryin_shift_alt"
1773 [(set (match_operand:SI 0 "s_register_operand" "=r")
1774 (minus:SI (minus:SI
1775 (match_operator:SI 2 "shift_operator"
1776 [(match_operand:SI 3 "s_register_operand" "r")
1777 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1778 (match_operand:SI 5 "arm_borrow_operation" ""))
1779 (match_operand:SI 1 "s_register_operand" "r")))]
1780 "TARGET_ARM"
1781 "rsc%?\\t%0, %1, %3%S2"
1782 [(set_attr "conds" "use")
1783 (set_attr "predicable" "yes")
1784 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1785 (const_string "alu_shift_imm")
1786 (const_string "alu_shift_reg")))]
1787 )
1788
1789 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1790 (define_split
1791 [(set (match_operand:SI 0 "s_register_operand" "")
1792 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1793 (match_operand:SI 2 "s_register_operand" ""))
1794 (const_int -1)))
1795 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1796 "TARGET_32BIT"
1797 [(set (match_dup 3) (match_dup 1))
1798 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1799 "
1800 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1801 ")
1802
1803 (define_expand "addsf3"
1804 [(set (match_operand:SF 0 "s_register_operand")
1805 (plus:SF (match_operand:SF 1 "s_register_operand")
1806 (match_operand:SF 2 "s_register_operand")))]
1807 "TARGET_32BIT && TARGET_HARD_FLOAT"
1808 "
1809 ")
1810
1811 (define_expand "adddf3"
1812 [(set (match_operand:DF 0 "s_register_operand")
1813 (plus:DF (match_operand:DF 1 "s_register_operand")
1814 (match_operand:DF 2 "s_register_operand")))]
1815 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1816 "
1817 ")
1818
1819 (define_expand "subdi3"
1820 [(parallel
1821 [(set (match_operand:DI 0 "s_register_operand")
1822 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1823 (match_operand:DI 2 "s_register_operand")))
1824 (clobber (reg:CC CC_REGNUM))])]
1825 "TARGET_EITHER"
1826 "
1827 if (TARGET_THUMB1)
1828 {
1829 if (!REG_P (operands[1]))
1830 operands[1] = force_reg (DImode, operands[1]);
1831 }
1832 else
1833 {
1834 rtx lo_result, hi_result, lo_dest, hi_dest;
1835 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1836 rtx condition;
1837
1838 /* Since operands[1] may be an integer, pass it second, so that
1839 any necessary simplifications will be done on the decomposed
1840 constant. */
1841 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1842 &lo_op1, &hi_op1);
1843 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1844 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1845
1846 if (!arm_rhs_operand (lo_op1, SImode))
1847 lo_op1 = force_reg (SImode, lo_op1);
1848
1849 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1850 || !arm_rhs_operand (hi_op1, SImode))
1851 hi_op1 = force_reg (SImode, hi_op1);
1852
1853 rtx cc_reg;
1854 if (lo_op1 == const0_rtx)
1855 {
1856 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1857 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1858 }
1859 else if (CONST_INT_P (lo_op1))
1860 {
1861 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1862 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1863 GEN_INT (~UINTVAL (lo_op1))));
1864 }
1865 else
1866 {
1867 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1868 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1869 }
1870
1871 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1872
1873 if (hi_op1 == const0_rtx)
1874 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1875 else
1876 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1877
1878 if (lo_result != lo_dest)
1879 emit_move_insn (lo_result, lo_dest);
1880
1881 if (hi_result != hi_dest)
1882 emit_move_insn (hi_result, hi_dest);
1883
1884 DONE;
1885 }
1886 "
1887 )
1888
1889 (define_expand "subsi3"
1890 [(set (match_operand:SI 0 "s_register_operand")
1891 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1892 (match_operand:SI 2 "s_register_operand")))]
1893 "TARGET_EITHER"
1894 "
1895 if (CONST_INT_P (operands[1]))
1896 {
1897 if (TARGET_32BIT)
1898 {
1899 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1900 operands[1] = force_reg (SImode, operands[1]);
1901 else
1902 {
1903 arm_split_constant (MINUS, SImode, NULL_RTX,
1904 INTVAL (operands[1]), operands[0],
1905 operands[2],
1906 optimize && can_create_pseudo_p ());
1907 DONE;
1908 }
1909 }
1910 else /* TARGET_THUMB1 */
1911 operands[1] = force_reg (SImode, operands[1]);
1912 }
1913 "
1914 )
1915
1916 ; ??? Check Thumb-2 split length
1917 (define_insn_and_split "*arm_subsi3_insn"
1918 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1919 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1920 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1921 "TARGET_32BIT"
1922 "@
1923 sub%?\\t%0, %1, %2
1924 sub%?\\t%0, %2
1925 sub%?\\t%0, %1, %2
1926 rsb%?\\t%0, %2, %1
1927 rsb%?\\t%0, %2, %1
1928 sub%?\\t%0, %1, %2
1929 sub%?\\t%0, %1, %2
1930 sub%?\\t%0, %1, %2
1931 #"
1932 "&& (CONST_INT_P (operands[1])
1933 && !const_ok_for_arm (INTVAL (operands[1])))"
1934 [(clobber (const_int 0))]
1935 "
1936 arm_split_constant (MINUS, SImode, curr_insn,
1937 INTVAL (operands[1]), operands[0], operands[2], 0);
1938 DONE;
1939 "
1940 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1941 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1942 (set_attr "predicable" "yes")
1943 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1944 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1945 )
1946
1947 (define_peephole2
1948 [(match_scratch:SI 3 "r")
1949 (set (match_operand:SI 0 "arm_general_register_operand" "")
1950 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1951 (match_operand:SI 2 "arm_general_register_operand" "")))]
1952 "TARGET_32BIT
1953 && !const_ok_for_arm (INTVAL (operands[1]))
1954 && const_ok_for_arm (~INTVAL (operands[1]))"
1955 [(set (match_dup 3) (match_dup 1))
1956 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1957 ""
1958 )
1959
1960 (define_insn "subsi3_compare0"
1961 [(set (reg:CC_NOOV CC_REGNUM)
1962 (compare:CC_NOOV
1963 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1964 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1965 (const_int 0)))
1966 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1967 (minus:SI (match_dup 1) (match_dup 2)))]
1968 "TARGET_32BIT"
1969 "@
1970 subs%?\\t%0, %1, %2
1971 subs%?\\t%0, %1, %2
1972 rsbs%?\\t%0, %2, %1"
1973 [(set_attr "conds" "set")
1974 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1975 )
1976
1977 (define_insn "subsi3_compare"
1978 [(set (reg:CC CC_REGNUM)
1979 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1980 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1981 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1982 (minus:SI (match_dup 1) (match_dup 2)))]
1983 "TARGET_32BIT"
1984 "@
1985 subs%?\\t%0, %1, %2
1986 subs%?\\t%0, %1, %2
1987 rsbs%?\\t%0, %2, %1"
1988 [(set_attr "conds" "set")
1989 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1990 )
1991
1992 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1993 ;; rather than (0 cmp reg). This gives the same results for unsigned
1994 ;; and equality compares which is what we mostly need here.
1995 (define_insn "rsb_imm_compare"
1996 [(set (reg:CC_RSB CC_REGNUM)
1997 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1998 (match_operand 3 "const_int_operand" "")))
1999 (set (match_operand:SI 0 "s_register_operand" "=r")
2000 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
2001 (match_dup 2)))]
2002 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2003 "rsbs\\t%0, %2, %1"
2004 [(set_attr "conds" "set")
2005 (set_attr "type" "alus_imm")]
2006 )
2007
2008 ;; Similarly, but the result is unused.
2009 (define_insn "rsb_imm_compare_scratch"
2010 [(set (reg:CC_RSB CC_REGNUM)
2011 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2012 (match_operand 1 "arm_not_immediate_operand" "K")))
2013 (clobber (match_scratch:SI 0 "=r"))]
2014 "TARGET_32BIT"
2015 "rsbs\\t%0, %2, #%B1"
2016 [(set_attr "conds" "set")
2017 (set_attr "type" "alus_imm")]
2018 )
2019
2020 ;; Compare the sum of a value plus a carry against a constant. Uses
2021 ;; RSC, so the result is swapped. Only available on Arm
2022 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2023 [(set (reg:CC_SWP CC_REGNUM)
2024 (compare:CC_SWP
2025 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2026 (match_operand:DI 3 "arm_borrow_operation" ""))
2027 (match_operand 1 "arm_immediate_operand" "I")))
2028 (clobber (match_scratch:SI 0 "=r"))]
2029 "TARGET_ARM"
2030 "rscs\\t%0, %2, %1"
2031 [(set_attr "conds" "set")
2032 (set_attr "type" "alus_imm")]
2033 )
2034
2035 (define_insn "usubvsi3_borrow"
2036 [(set (reg:CC_B CC_REGNUM)
2037 (compare:CC_B
2038 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2039 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2040 (zero_extend:DI
2041 (match_operand:SI 2 "s_register_operand" "l,r")))))
2042 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2043 (minus:SI (match_dup 1)
2044 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2045 (match_dup 2))))]
2046 "TARGET_32BIT"
2047 "sbcs%?\\t%0, %1, %2"
2048 [(set_attr "conds" "set")
2049 (set_attr "arch" "t2,*")
2050 (set_attr "length" "2,4")]
2051 )
2052
2053 (define_insn "usubvsi3_borrow_imm"
2054 [(set (reg:CC_B CC_REGNUM)
2055 (compare:CC_B
2056 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2057 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2058 (match_operand:DI 3 "const_int_operand" "n,n"))))
2059 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2060 (minus:SI (match_dup 1)
2061 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2062 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2063 "TARGET_32BIT
2064 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2065 "@
2066 sbcs%?\\t%0, %1, %2
2067 adcs%?\\t%0, %1, #%B2"
2068 [(set_attr "conds" "set")
2069 (set_attr "type" "alus_imm")]
2070 )
2071
2072 (define_insn "subvsi3_borrow"
2073 [(set (reg:CC_V CC_REGNUM)
2074 (compare:CC_V
2075 (minus:DI
2076 (minus:DI
2077 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2078 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2079 (match_operand:DI 4 "arm_borrow_operation" ""))
2080 (sign_extend:DI
2081 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2082 (match_operand:SI 3 "arm_borrow_operation" "")))))
2083 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2084 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2085 (match_dup 3)))]
2086 "TARGET_32BIT"
2087 "sbcs%?\\t%0, %1, %2"
2088 [(set_attr "conds" "set")
2089 (set_attr "arch" "t2,*")
2090 (set_attr "length" "2,4")]
2091 )
2092
2093 (define_insn "subvsi3_borrow_imm"
2094 [(set (reg:CC_V CC_REGNUM)
2095 (compare:CC_V
2096 (minus:DI
2097 (minus:DI
2098 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2099 (match_operand 2 "arm_adcimm_operand" "I,K"))
2100 (match_operand:DI 4 "arm_borrow_operation" ""))
2101 (sign_extend:DI
2102 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2103 (match_operand:SI 3 "arm_borrow_operation" "")))))
2104 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2105 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2106 (match_dup 3)))]
2107 "TARGET_32BIT
2108 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2109 "@
2110 sbcs%?\\t%0, %1, %2
2111 adcs%?\\t%0, %1, #%B2"
2112 [(set_attr "conds" "set")
2113 (set_attr "type" "alus_imm")]
2114 )
2115
2116 (define_expand "subsf3"
2117 [(set (match_operand:SF 0 "s_register_operand")
2118 (minus:SF (match_operand:SF 1 "s_register_operand")
2119 (match_operand:SF 2 "s_register_operand")))]
2120 "TARGET_32BIT && TARGET_HARD_FLOAT"
2121 "
2122 ")
2123
2124 (define_expand "subdf3"
2125 [(set (match_operand:DF 0 "s_register_operand")
2126 (minus:DF (match_operand:DF 1 "s_register_operand")
2127 (match_operand:DF 2 "s_register_operand")))]
2128 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2129 "
2130 ")
2131
2132 \f
2133 ;; Multiplication insns
2134
2135 (define_expand "mulhi3"
2136 [(set (match_operand:HI 0 "s_register_operand")
2137 (mult:HI (match_operand:HI 1 "s_register_operand")
2138 (match_operand:HI 2 "s_register_operand")))]
2139 "TARGET_DSP_MULTIPLY"
2140 "
2141 {
2142 rtx result = gen_reg_rtx (SImode);
2143 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2144 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2145 DONE;
2146 }"
2147 )
2148
2149 (define_expand "mulsi3"
2150 [(set (match_operand:SI 0 "s_register_operand")
2151 (mult:SI (match_operand:SI 2 "s_register_operand")
2152 (match_operand:SI 1 "s_register_operand")))]
2153 "TARGET_EITHER"
2154 ""
2155 )
2156
2157 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2158 (define_insn "*mul"
2159 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2160 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2161 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2162 "TARGET_32BIT"
2163 "mul%?\\t%0, %2, %1"
2164 [(set_attr "type" "mul")
2165 (set_attr "predicable" "yes")
2166 (set_attr "arch" "t2,v6,nov6,nov6")
2167 (set_attr "length" "4")
2168 (set_attr "predicable_short_it" "yes,no,*,*")]
2169 )
2170
2171 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2172 ;; reusing the same register.
2173
2174 (define_insn "*mla"
2175 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2176 (plus:SI
2177 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2178 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2179 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2180 "TARGET_32BIT"
2181 "mla%?\\t%0, %3, %2, %1"
2182 [(set_attr "type" "mla")
2183 (set_attr "predicable" "yes")
2184 (set_attr "arch" "v6,nov6,nov6,nov6")]
2185 )
2186
2187 (define_insn "*mls"
2188 [(set (match_operand:SI 0 "s_register_operand" "=r")
2189 (minus:SI
2190 (match_operand:SI 1 "s_register_operand" "r")
2191 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2192 (match_operand:SI 2 "s_register_operand" "r"))))]
2193 "TARGET_32BIT && arm_arch_thumb2"
2194 "mls%?\\t%0, %3, %2, %1"
2195 [(set_attr "type" "mla")
2196 (set_attr "predicable" "yes")]
2197 )
2198
2199 (define_insn "*mulsi3_compare0"
2200 [(set (reg:CC_NOOV CC_REGNUM)
2201 (compare:CC_NOOV (mult:SI
2202 (match_operand:SI 2 "s_register_operand" "r,r")
2203 (match_operand:SI 1 "s_register_operand" "%0,r"))
2204 (const_int 0)))
2205 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2206 (mult:SI (match_dup 2) (match_dup 1)))]
2207 "TARGET_ARM && !arm_arch6"
2208 "muls%?\\t%0, %2, %1"
2209 [(set_attr "conds" "set")
2210 (set_attr "type" "muls")]
2211 )
2212
2213 (define_insn "*mulsi3_compare0_v6"
2214 [(set (reg:CC_NOOV CC_REGNUM)
2215 (compare:CC_NOOV (mult:SI
2216 (match_operand:SI 2 "s_register_operand" "r")
2217 (match_operand:SI 1 "s_register_operand" "r"))
2218 (const_int 0)))
2219 (set (match_operand:SI 0 "s_register_operand" "=r")
2220 (mult:SI (match_dup 2) (match_dup 1)))]
2221 "TARGET_ARM && arm_arch6 && optimize_size"
2222 "muls%?\\t%0, %2, %1"
2223 [(set_attr "conds" "set")
2224 (set_attr "type" "muls")]
2225 )
2226
2227 (define_insn "*mulsi_compare0_scratch"
2228 [(set (reg:CC_NOOV CC_REGNUM)
2229 (compare:CC_NOOV (mult:SI
2230 (match_operand:SI 2 "s_register_operand" "r,r")
2231 (match_operand:SI 1 "s_register_operand" "%0,r"))
2232 (const_int 0)))
2233 (clobber (match_scratch:SI 0 "=&r,&r"))]
2234 "TARGET_ARM && !arm_arch6"
2235 "muls%?\\t%0, %2, %1"
2236 [(set_attr "conds" "set")
2237 (set_attr "type" "muls")]
2238 )
2239
2240 (define_insn "*mulsi_compare0_scratch_v6"
2241 [(set (reg:CC_NOOV CC_REGNUM)
2242 (compare:CC_NOOV (mult:SI
2243 (match_operand:SI 2 "s_register_operand" "r")
2244 (match_operand:SI 1 "s_register_operand" "r"))
2245 (const_int 0)))
2246 (clobber (match_scratch:SI 0 "=r"))]
2247 "TARGET_ARM && arm_arch6 && optimize_size"
2248 "muls%?\\t%0, %2, %1"
2249 [(set_attr "conds" "set")
2250 (set_attr "type" "muls")]
2251 )
2252
2253 (define_insn "*mulsi3addsi_compare0"
2254 [(set (reg:CC_NOOV CC_REGNUM)
2255 (compare:CC_NOOV
2256 (plus:SI (mult:SI
2257 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2258 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2259 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2260 (const_int 0)))
2261 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2262 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2263 (match_dup 3)))]
2264 "TARGET_ARM && arm_arch6"
2265 "mlas%?\\t%0, %2, %1, %3"
2266 [(set_attr "conds" "set")
2267 (set_attr "type" "mlas")]
2268 )
2269
2270 (define_insn "*mulsi3addsi_compare0_v6"
2271 [(set (reg:CC_NOOV CC_REGNUM)
2272 (compare:CC_NOOV
2273 (plus:SI (mult:SI
2274 (match_operand:SI 2 "s_register_operand" "r")
2275 (match_operand:SI 1 "s_register_operand" "r"))
2276 (match_operand:SI 3 "s_register_operand" "r"))
2277 (const_int 0)))
2278 (set (match_operand:SI 0 "s_register_operand" "=r")
2279 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2280 (match_dup 3)))]
2281 "TARGET_ARM && arm_arch6 && optimize_size"
2282 "mlas%?\\t%0, %2, %1, %3"
2283 [(set_attr "conds" "set")
2284 (set_attr "type" "mlas")]
2285 )
2286
2287 (define_insn "*mulsi3addsi_compare0_scratch"
2288 [(set (reg:CC_NOOV CC_REGNUM)
2289 (compare:CC_NOOV
2290 (plus:SI (mult:SI
2291 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2292 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2293 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2294 (const_int 0)))
2295 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2296 "TARGET_ARM && !arm_arch6"
2297 "mlas%?\\t%0, %2, %1, %3"
2298 [(set_attr "conds" "set")
2299 (set_attr "type" "mlas")]
2300 )
2301
2302 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2303 [(set (reg:CC_NOOV CC_REGNUM)
2304 (compare:CC_NOOV
2305 (plus:SI (mult:SI
2306 (match_operand:SI 2 "s_register_operand" "r")
2307 (match_operand:SI 1 "s_register_operand" "r"))
2308 (match_operand:SI 3 "s_register_operand" "r"))
2309 (const_int 0)))
2310 (clobber (match_scratch:SI 0 "=r"))]
2311 "TARGET_ARM && arm_arch6 && optimize_size"
2312 "mlas%?\\t%0, %2, %1, %3"
2313 [(set_attr "conds" "set")
2314 (set_attr "type" "mlas")]
2315 )
2316
2317 ;; 32x32->64 widening multiply.
2318 ;; The only difference between the v3-5 and v6+ versions is the requirement
2319 ;; that the output does not overlap with either input.
2320
2321 (define_expand "<Us>mulsidi3"
2322 [(set (match_operand:DI 0 "s_register_operand")
2323 (mult:DI
2324 (SE:DI (match_operand:SI 1 "s_register_operand"))
2325 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2326 "TARGET_32BIT"
2327 {
2328 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2329 gen_highpart (SImode, operands[0]),
2330 operands[1], operands[2]));
2331 DONE;
2332 }
2333 )
2334
2335 (define_insn "<US>mull"
2336 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2337 (mult:SI
2338 (match_operand:SI 2 "s_register_operand" "%r,r")
2339 (match_operand:SI 3 "s_register_operand" "r,r")))
2340 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2341 (truncate:SI
2342 (lshiftrt:DI
2343 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2344 (const_int 32))))]
2345 "TARGET_32BIT"
2346 "<US>mull%?\\t%0, %1, %2, %3"
2347 [(set_attr "type" "umull")
2348 (set_attr "predicable" "yes")
2349 (set_attr "arch" "v6,nov6")]
2350 )
2351
2352 (define_expand "<Us>maddsidi4"
2353 [(set (match_operand:DI 0 "s_register_operand")
2354 (plus:DI
2355 (mult:DI
2356 (SE:DI (match_operand:SI 1 "s_register_operand"))
2357 (SE:DI (match_operand:SI 2 "s_register_operand")))
2358 (match_operand:DI 3 "s_register_operand")))]
2359 "TARGET_32BIT"
2360 {
2361 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2362 gen_lowpart (SImode, operands[3]),
2363 gen_highpart (SImode, operands[0]),
2364 gen_highpart (SImode, operands[3]),
2365 operands[1], operands[2]));
2366 DONE;
2367 }
2368 )
2369
2370 (define_insn "<US>mlal"
2371 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2372 (plus:SI
2373 (mult:SI
2374 (match_operand:SI 4 "s_register_operand" "%r,r")
2375 (match_operand:SI 5 "s_register_operand" "r,r"))
2376 (match_operand:SI 1 "s_register_operand" "0,0")))
2377 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2378 (plus:SI
2379 (truncate:SI
2380 (lshiftrt:DI
2381 (plus:DI
2382 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2383 (zero_extend:DI (match_dup 1)))
2384 (const_int 32)))
2385 (match_operand:SI 3 "s_register_operand" "2,2")))]
2386 "TARGET_32BIT"
2387 "<US>mlal%?\\t%0, %2, %4, %5"
2388 [(set_attr "type" "umlal")
2389 (set_attr "predicable" "yes")
2390 (set_attr "arch" "v6,nov6")]
2391 )
2392
2393 (define_expand "<US>mulsi3_highpart"
2394 [(parallel
2395 [(set (match_operand:SI 0 "s_register_operand")
2396 (truncate:SI
2397 (lshiftrt:DI
2398 (mult:DI
2399 (SE:DI (match_operand:SI 1 "s_register_operand"))
2400 (SE:DI (match_operand:SI 2 "s_register_operand")))
2401 (const_int 32))))
2402 (clobber (match_scratch:SI 3 ""))])]
2403 "TARGET_32BIT"
2404 ""
2405 )
2406
2407 (define_insn "*<US>mull_high"
2408 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2409 (truncate:SI
2410 (lshiftrt:DI
2411 (mult:DI
2412 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2413 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2414 (const_int 32))))
2415 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2416 "TARGET_32BIT"
2417 "<US>mull%?\\t%3, %0, %2, %1"
2418 [(set_attr "type" "umull")
2419 (set_attr "predicable" "yes")
2420 (set_attr "arch" "v6,nov6,nov6")]
2421 )
2422
2423 (define_insn "mulhisi3"
2424 [(set (match_operand:SI 0 "s_register_operand" "=r")
2425 (mult:SI (sign_extend:SI
2426 (match_operand:HI 1 "s_register_operand" "%r"))
2427 (sign_extend:SI
2428 (match_operand:HI 2 "s_register_operand" "r"))))]
2429 "TARGET_DSP_MULTIPLY"
2430 "smulbb%?\\t%0, %1, %2"
2431 [(set_attr "type" "smulxy")
2432 (set_attr "predicable" "yes")]
2433 )
2434
2435 (define_insn "*mulhisi3tb"
2436 [(set (match_operand:SI 0 "s_register_operand" "=r")
2437 (mult:SI (ashiftrt:SI
2438 (match_operand:SI 1 "s_register_operand" "r")
2439 (const_int 16))
2440 (sign_extend:SI
2441 (match_operand:HI 2 "s_register_operand" "r"))))]
2442 "TARGET_DSP_MULTIPLY"
2443 "smultb%?\\t%0, %1, %2"
2444 [(set_attr "type" "smulxy")
2445 (set_attr "predicable" "yes")]
2446 )
2447
2448 (define_insn "*mulhisi3bt"
2449 [(set (match_operand:SI 0 "s_register_operand" "=r")
2450 (mult:SI (sign_extend:SI
2451 (match_operand:HI 1 "s_register_operand" "r"))
2452 (ashiftrt:SI
2453 (match_operand:SI 2 "s_register_operand" "r")
2454 (const_int 16))))]
2455 "TARGET_DSP_MULTIPLY"
2456 "smulbt%?\\t%0, %1, %2"
2457 [(set_attr "type" "smulxy")
2458 (set_attr "predicable" "yes")]
2459 )
2460
2461 (define_insn "*mulhisi3tt"
2462 [(set (match_operand:SI 0 "s_register_operand" "=r")
2463 (mult:SI (ashiftrt:SI
2464 (match_operand:SI 1 "s_register_operand" "r")
2465 (const_int 16))
2466 (ashiftrt:SI
2467 (match_operand:SI 2 "s_register_operand" "r")
2468 (const_int 16))))]
2469 "TARGET_DSP_MULTIPLY"
2470 "smultt%?\\t%0, %1, %2"
2471 [(set_attr "type" "smulxy")
2472 (set_attr "predicable" "yes")]
2473 )
2474
2475 (define_insn "maddhisi4"
2476 [(set (match_operand:SI 0 "s_register_operand" "=r")
2477 (plus:SI (mult:SI (sign_extend:SI
2478 (match_operand:HI 1 "s_register_operand" "r"))
2479 (sign_extend:SI
2480 (match_operand:HI 2 "s_register_operand" "r")))
2481 (match_operand:SI 3 "s_register_operand" "r")))]
2482 "TARGET_DSP_MULTIPLY"
2483 "smlabb%?\\t%0, %1, %2, %3"
2484 [(set_attr "type" "smlaxy")
2485 (set_attr "predicable" "yes")]
2486 )
2487
2488 ;; Note: there is no maddhisi4ibt because this one is canonical form
2489 (define_insn "*maddhisi4tb"
2490 [(set (match_operand:SI 0 "s_register_operand" "=r")
2491 (plus:SI (mult:SI (ashiftrt:SI
2492 (match_operand:SI 1 "s_register_operand" "r")
2493 (const_int 16))
2494 (sign_extend:SI
2495 (match_operand:HI 2 "s_register_operand" "r")))
2496 (match_operand:SI 3 "s_register_operand" "r")))]
2497 "TARGET_DSP_MULTIPLY"
2498 "smlatb%?\\t%0, %1, %2, %3"
2499 [(set_attr "type" "smlaxy")
2500 (set_attr "predicable" "yes")]
2501 )
2502
2503 (define_insn "*maddhisi4tt"
2504 [(set (match_operand:SI 0 "s_register_operand" "=r")
2505 (plus:SI (mult:SI (ashiftrt:SI
2506 (match_operand:SI 1 "s_register_operand" "r")
2507 (const_int 16))
2508 (ashiftrt:SI
2509 (match_operand:SI 2 "s_register_operand" "r")
2510 (const_int 16)))
2511 (match_operand:SI 3 "s_register_operand" "r")))]
2512 "TARGET_DSP_MULTIPLY"
2513 "smlatt%?\\t%0, %1, %2, %3"
2514 [(set_attr "type" "smlaxy")
2515 (set_attr "predicable" "yes")]
2516 )
2517
2518 (define_insn "maddhidi4"
2519 [(set (match_operand:DI 0 "s_register_operand" "=r")
2520 (plus:DI
2521 (mult:DI (sign_extend:DI
2522 (match_operand:HI 1 "s_register_operand" "r"))
2523 (sign_extend:DI
2524 (match_operand:HI 2 "s_register_operand" "r")))
2525 (match_operand:DI 3 "s_register_operand" "0")))]
2526 "TARGET_DSP_MULTIPLY"
2527 "smlalbb%?\\t%Q0, %R0, %1, %2"
2528 [(set_attr "type" "smlalxy")
2529 (set_attr "predicable" "yes")])
2530
2531 ;; Note: there is no maddhidi4ibt because this one is canonical form
2532 (define_insn "*maddhidi4tb"
2533 [(set (match_operand:DI 0 "s_register_operand" "=r")
2534 (plus:DI
2535 (mult:DI (sign_extend:DI
2536 (ashiftrt:SI
2537 (match_operand:SI 1 "s_register_operand" "r")
2538 (const_int 16)))
2539 (sign_extend:DI
2540 (match_operand:HI 2 "s_register_operand" "r")))
2541 (match_operand:DI 3 "s_register_operand" "0")))]
2542 "TARGET_DSP_MULTIPLY"
2543 "smlaltb%?\\t%Q0, %R0, %1, %2"
2544 [(set_attr "type" "smlalxy")
2545 (set_attr "predicable" "yes")])
2546
2547 (define_insn "*maddhidi4tt"
2548 [(set (match_operand:DI 0 "s_register_operand" "=r")
2549 (plus:DI
2550 (mult:DI (sign_extend:DI
2551 (ashiftrt:SI
2552 (match_operand:SI 1 "s_register_operand" "r")
2553 (const_int 16)))
2554 (sign_extend:DI
2555 (ashiftrt:SI
2556 (match_operand:SI 2 "s_register_operand" "r")
2557 (const_int 16))))
2558 (match_operand:DI 3 "s_register_operand" "0")))]
2559 "TARGET_DSP_MULTIPLY"
2560 "smlaltt%?\\t%Q0, %R0, %1, %2"
2561 [(set_attr "type" "smlalxy")
2562 (set_attr "predicable" "yes")])
2563
2564 (define_expand "mulsf3"
2565 [(set (match_operand:SF 0 "s_register_operand")
2566 (mult:SF (match_operand:SF 1 "s_register_operand")
2567 (match_operand:SF 2 "s_register_operand")))]
2568 "TARGET_32BIT && TARGET_HARD_FLOAT"
2569 "
2570 ")
2571
2572 (define_expand "muldf3"
2573 [(set (match_operand:DF 0 "s_register_operand")
2574 (mult:DF (match_operand:DF 1 "s_register_operand")
2575 (match_operand:DF 2 "s_register_operand")))]
2576 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2577 "
2578 ")
2579 \f
2580 ;; Division insns
2581
2582 (define_expand "divsf3"
2583 [(set (match_operand:SF 0 "s_register_operand")
2584 (div:SF (match_operand:SF 1 "s_register_operand")
2585 (match_operand:SF 2 "s_register_operand")))]
2586 "TARGET_32BIT && TARGET_HARD_FLOAT"
2587 "")
2588
2589 (define_expand "divdf3"
2590 [(set (match_operand:DF 0 "s_register_operand")
2591 (div:DF (match_operand:DF 1 "s_register_operand")
2592 (match_operand:DF 2 "s_register_operand")))]
2593 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2594 "")
2595 \f
2596
2597 ; Expand logical operations. The mid-end expander does not split off memory
2598 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2599 ; So an explicit expander is needed to generate better code.
2600
2601 (define_expand "<LOGICAL:optab>di3"
2602 [(set (match_operand:DI 0 "s_register_operand")
2603 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2604 (match_operand:DI 2 "arm_<optab>di_operand")))]
2605 "TARGET_32BIT"
2606 {
2607 rtx low = simplify_gen_binary (<CODE>, SImode,
2608 gen_lowpart (SImode, operands[1]),
2609 gen_lowpart (SImode, operands[2]));
2610 rtx high = simplify_gen_binary (<CODE>, SImode,
2611 gen_highpart (SImode, operands[1]),
2612 gen_highpart_mode (SImode, DImode,
2613 operands[2]));
2614
2615 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2616 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2617 DONE;
2618 }
2619 )
2620
2621 (define_expand "one_cmpldi2"
2622 [(set (match_operand:DI 0 "s_register_operand")
2623 (not:DI (match_operand:DI 1 "s_register_operand")))]
2624 "TARGET_32BIT"
2625 {
2626 rtx low = simplify_gen_unary (NOT, SImode,
2627 gen_lowpart (SImode, operands[1]),
2628 SImode);
2629 rtx high = simplify_gen_unary (NOT, SImode,
2630 gen_highpart_mode (SImode, DImode,
2631 operands[1]),
2632 SImode);
2633
2634 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2635 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2636 DONE;
2637 }
2638 )
2639
2640 ;; Split DImode and, ior, xor operations. Simply perform the logical
2641 ;; operation on the upper and lower halves of the registers.
2642 ;; This is needed for atomic operations in arm_split_atomic_op.
2643 ;; Avoid splitting IWMMXT instructions.
2644 (define_split
2645 [(set (match_operand:DI 0 "s_register_operand" "")
2646 (match_operator:DI 6 "logical_binary_operator"
2647 [(match_operand:DI 1 "s_register_operand" "")
2648 (match_operand:DI 2 "s_register_operand" "")]))]
2649 "TARGET_32BIT && reload_completed
2650 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2651 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2652 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2653 "
2654 {
2655 operands[3] = gen_highpart (SImode, operands[0]);
2656 operands[0] = gen_lowpart (SImode, operands[0]);
2657 operands[4] = gen_highpart (SImode, operands[1]);
2658 operands[1] = gen_lowpart (SImode, operands[1]);
2659 operands[5] = gen_highpart (SImode, operands[2]);
2660 operands[2] = gen_lowpart (SImode, operands[2]);
2661 }"
2662 )
2663
2664 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2665 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2666 (define_split
2667 [(set (match_operand:DI 0 "s_register_operand")
2668 (not:DI (match_operand:DI 1 "s_register_operand")))]
2669 "TARGET_32BIT"
2670 [(set (match_dup 0) (not:SI (match_dup 1)))
2671 (set (match_dup 2) (not:SI (match_dup 3)))]
2672 "
2673 {
2674 operands[2] = gen_highpart (SImode, operands[0]);
2675 operands[0] = gen_lowpart (SImode, operands[0]);
2676 operands[3] = gen_highpart (SImode, operands[1]);
2677 operands[1] = gen_lowpart (SImode, operands[1]);
2678 }"
2679 )
2680
2681 (define_expand "andsi3"
2682 [(set (match_operand:SI 0 "s_register_operand")
2683 (and:SI (match_operand:SI 1 "s_register_operand")
2684 (match_operand:SI 2 "reg_or_int_operand")))]
2685 "TARGET_EITHER"
2686 "
2687 if (TARGET_32BIT)
2688 {
2689 if (CONST_INT_P (operands[2]))
2690 {
2691 if (INTVAL (operands[2]) == 255 && arm_arch6)
2692 {
2693 operands[1] = convert_to_mode (QImode, operands[1], 1);
2694 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2695 operands[1]));
2696 DONE;
2697 }
2698 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2699 operands[2] = force_reg (SImode, operands[2]);
2700 else
2701 {
2702 arm_split_constant (AND, SImode, NULL_RTX,
2703 INTVAL (operands[2]), operands[0],
2704 operands[1],
2705 optimize && can_create_pseudo_p ());
2706
2707 DONE;
2708 }
2709 }
2710 }
2711 else /* TARGET_THUMB1 */
2712 {
2713 if (!CONST_INT_P (operands[2]))
2714 {
2715 rtx tmp = force_reg (SImode, operands[2]);
2716 if (rtx_equal_p (operands[0], operands[1]))
2717 operands[2] = tmp;
2718 else
2719 {
2720 operands[2] = operands[1];
2721 operands[1] = tmp;
2722 }
2723 }
2724 else
2725 {
2726 int i;
2727
2728 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2729 {
2730 operands[2] = force_reg (SImode,
2731 GEN_INT (~INTVAL (operands[2])));
2732
2733 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2734
2735 DONE;
2736 }
2737
2738 for (i = 9; i <= 31; i++)
2739 {
2740 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2741 {
2742 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2743 const0_rtx));
2744 DONE;
2745 }
2746 else if ((HOST_WIDE_INT_1 << i) - 1
2747 == ~INTVAL (operands[2]))
2748 {
2749 rtx shift = GEN_INT (i);
2750 rtx reg = gen_reg_rtx (SImode);
2751
2752 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2753 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2754
2755 DONE;
2756 }
2757 }
2758
2759 operands[2] = force_reg (SImode, operands[2]);
2760 }
2761 }
2762 "
2763 )
2764
2765 ; ??? Check split length for Thumb-2
2766 (define_insn_and_split "*arm_andsi3_insn"
2767 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2768 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2769 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2770 "TARGET_32BIT"
2771 "@
2772 and%?\\t%0, %1, %2
2773 and%?\\t%0, %1, %2
2774 bic%?\\t%0, %1, #%B2
2775 and%?\\t%0, %1, %2
2776 #"
2777 "TARGET_32BIT
2778 && CONST_INT_P (operands[2])
2779 && !(const_ok_for_arm (INTVAL (operands[2]))
2780 || const_ok_for_arm (~INTVAL (operands[2])))"
2781 [(clobber (const_int 0))]
2782 "
2783 arm_split_constant (AND, SImode, curr_insn,
2784 INTVAL (operands[2]), operands[0], operands[1], 0);
2785 DONE;
2786 "
2787 [(set_attr "length" "4,4,4,4,16")
2788 (set_attr "predicable" "yes")
2789 (set_attr "predicable_short_it" "no,yes,no,no,no")
2790 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2791 )
2792
2793 (define_insn "*andsi3_compare0"
2794 [(set (reg:CC_NOOV CC_REGNUM)
2795 (compare:CC_NOOV
2796 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2797 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2798 (const_int 0)))
2799 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2800 (and:SI (match_dup 1) (match_dup 2)))]
2801 "TARGET_32BIT"
2802 "@
2803 ands%?\\t%0, %1, %2
2804 bics%?\\t%0, %1, #%B2
2805 ands%?\\t%0, %1, %2"
2806 [(set_attr "conds" "set")
2807 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2808 )
2809
2810 (define_insn "*andsi3_compare0_scratch"
2811 [(set (reg:CC_NOOV CC_REGNUM)
2812 (compare:CC_NOOV
2813 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2814 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2815 (const_int 0)))
2816 (clobber (match_scratch:SI 2 "=X,r,X"))]
2817 "TARGET_32BIT"
2818 "@
2819 tst%?\\t%0, %1
2820 bics%?\\t%2, %0, #%B1
2821 tst%?\\t%0, %1"
2822 [(set_attr "conds" "set")
2823 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2824 )
2825
2826 (define_insn "*zeroextractsi_compare0_scratch"
2827 [(set (reg:CC_NOOV CC_REGNUM)
2828 (compare:CC_NOOV (zero_extract:SI
2829 (match_operand:SI 0 "s_register_operand" "r")
2830 (match_operand 1 "const_int_operand" "n")
2831 (match_operand 2 "const_int_operand" "n"))
2832 (const_int 0)))]
2833 "TARGET_32BIT
2834 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2835 && INTVAL (operands[1]) > 0
2836 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2837 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2838 "*
2839 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2840 << INTVAL (operands[2]));
2841 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2842 return \"\";
2843 "
2844 [(set_attr "conds" "set")
2845 (set_attr "predicable" "yes")
2846 (set_attr "type" "logics_imm")]
2847 )
2848
2849 (define_insn_and_split "*ne_zeroextractsi"
2850 [(set (match_operand:SI 0 "s_register_operand" "=r")
2851 (ne:SI (zero_extract:SI
2852 (match_operand:SI 1 "s_register_operand" "r")
2853 (match_operand:SI 2 "const_int_operand" "n")
2854 (match_operand:SI 3 "const_int_operand" "n"))
2855 (const_int 0)))
2856 (clobber (reg:CC CC_REGNUM))]
2857 "TARGET_32BIT
2858 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2859 && INTVAL (operands[2]) > 0
2860 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2861 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2862 "#"
2863 "TARGET_32BIT
2864 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2865 && INTVAL (operands[2]) > 0
2866 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2867 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2868 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2869 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2870 (const_int 0)))
2871 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2872 (set (match_dup 0)
2873 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2874 (match_dup 0) (const_int 1)))]
2875 "
2876 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2877 << INTVAL (operands[3]));
2878 "
2879 [(set_attr "conds" "clob")
2880 (set (attr "length")
2881 (if_then_else (eq_attr "is_thumb" "yes")
2882 (const_int 12)
2883 (const_int 8)))
2884 (set_attr "type" "multiple")]
2885 )
2886
2887 (define_insn_and_split "*ne_zeroextractsi_shifted"
2888 [(set (match_operand:SI 0 "s_register_operand" "=r")
2889 (ne:SI (zero_extract:SI
2890 (match_operand:SI 1 "s_register_operand" "r")
2891 (match_operand:SI 2 "const_int_operand" "n")
2892 (const_int 0))
2893 (const_int 0)))
2894 (clobber (reg:CC CC_REGNUM))]
2895 "TARGET_ARM"
2896 "#"
2897 "TARGET_ARM"
2898 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2899 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2900 (const_int 0)))
2901 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2902 (set (match_dup 0)
2903 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2904 (match_dup 0) (const_int 1)))]
2905 "
2906 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2907 "
2908 [(set_attr "conds" "clob")
2909 (set_attr "length" "8")
2910 (set_attr "type" "multiple")]
2911 )
2912
2913 (define_insn_and_split "*ite_ne_zeroextractsi"
2914 [(set (match_operand:SI 0 "s_register_operand" "=r")
2915 (if_then_else:SI (ne (zero_extract:SI
2916 (match_operand:SI 1 "s_register_operand" "r")
2917 (match_operand:SI 2 "const_int_operand" "n")
2918 (match_operand:SI 3 "const_int_operand" "n"))
2919 (const_int 0))
2920 (match_operand:SI 4 "arm_not_operand" "rIK")
2921 (const_int 0)))
2922 (clobber (reg:CC CC_REGNUM))]
2923 "TARGET_ARM
2924 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2925 && INTVAL (operands[2]) > 0
2926 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2927 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2928 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2929 "#"
2930 "TARGET_ARM
2931 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2932 && INTVAL (operands[2]) > 0
2933 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2934 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2935 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2936 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2937 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2938 (const_int 0)))
2939 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2940 (set (match_dup 0)
2941 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2942 (match_dup 0) (match_dup 4)))]
2943 "
2944 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2945 << INTVAL (operands[3]));
2946 "
2947 [(set_attr "conds" "clob")
2948 (set_attr "length" "8")
2949 (set_attr "type" "multiple")]
2950 )
2951
2952 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2953 [(set (match_operand:SI 0 "s_register_operand" "=r")
2954 (if_then_else:SI (ne (zero_extract:SI
2955 (match_operand:SI 1 "s_register_operand" "r")
2956 (match_operand:SI 2 "const_int_operand" "n")
2957 (const_int 0))
2958 (const_int 0))
2959 (match_operand:SI 3 "arm_not_operand" "rIK")
2960 (const_int 0)))
2961 (clobber (reg:CC CC_REGNUM))]
2962 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2963 "#"
2964 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2965 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2966 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2967 (const_int 0)))
2968 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2969 (set (match_dup 0)
2970 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2971 (match_dup 0) (match_dup 3)))]
2972 "
2973 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2974 "
2975 [(set_attr "conds" "clob")
2976 (set_attr "length" "8")
2977 (set_attr "type" "multiple")]
2978 )
2979
2980 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2981 (define_split
2982 [(set (match_operand:SI 0 "s_register_operand" "")
2983 (match_operator:SI 1 "shiftable_operator"
2984 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2985 (match_operand:SI 3 "const_int_operand" "")
2986 (match_operand:SI 4 "const_int_operand" ""))
2987 (match_operand:SI 5 "s_register_operand" "")]))
2988 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2989 "TARGET_ARM"
2990 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2991 (set (match_dup 0)
2992 (match_op_dup 1
2993 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2994 (match_dup 5)]))]
2995 "{
2996 HOST_WIDE_INT temp = INTVAL (operands[3]);
2997
2998 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2999 operands[4] = GEN_INT (32 - temp);
3000 }"
3001 )
3002
3003 (define_split
3004 [(set (match_operand:SI 0 "s_register_operand" "")
3005 (match_operator:SI 1 "shiftable_operator"
3006 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3007 (match_operand:SI 3 "const_int_operand" "")
3008 (match_operand:SI 4 "const_int_operand" ""))
3009 (match_operand:SI 5 "s_register_operand" "")]))
3010 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3011 "TARGET_ARM"
3012 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3013 (set (match_dup 0)
3014 (match_op_dup 1
3015 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3016 (match_dup 5)]))]
3017 "{
3018 HOST_WIDE_INT temp = INTVAL (operands[3]);
3019
3020 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3021 operands[4] = GEN_INT (32 - temp);
3022 }"
3023 )
3024
3025 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3026 ;;; represented by the bitfield, then this will produce incorrect results.
3027 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3028 ;;; which have a real bit-field insert instruction, the truncation happens
3029 ;;; in the bit-field insert instruction itself. Since arm does not have a
3030 ;;; bit-field insert instruction, we would have to emit code here to truncate
3031 ;;; the value before we insert. This loses some of the advantage of having
3032 ;;; this insv pattern, so this pattern needs to be reevalutated.
3033
3034 (define_expand "insv"
3035 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3036 (match_operand 1 "general_operand")
3037 (match_operand 2 "general_operand"))
3038 (match_operand 3 "reg_or_int_operand"))]
3039 "TARGET_ARM || arm_arch_thumb2"
3040 "
3041 {
3042 int start_bit = INTVAL (operands[2]);
3043 int width = INTVAL (operands[1]);
3044 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3045 rtx target, subtarget;
3046
3047 if (arm_arch_thumb2)
3048 {
3049 if (unaligned_access && MEM_P (operands[0])
3050 && s_register_operand (operands[3], GET_MODE (operands[3]))
3051 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3052 {
3053 rtx base_addr;
3054
3055 if (BYTES_BIG_ENDIAN)
3056 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3057 - start_bit;
3058
3059 if (width == 32)
3060 {
3061 base_addr = adjust_address (operands[0], SImode,
3062 start_bit / BITS_PER_UNIT);
3063 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3064 }
3065 else
3066 {
3067 rtx tmp = gen_reg_rtx (HImode);
3068
3069 base_addr = adjust_address (operands[0], HImode,
3070 start_bit / BITS_PER_UNIT);
3071 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3072 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3073 }
3074 DONE;
3075 }
3076 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3077 {
3078 bool use_bfi = TRUE;
3079
3080 if (CONST_INT_P (operands[3]))
3081 {
3082 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3083
3084 if (val == 0)
3085 {
3086 emit_insn (gen_insv_zero (operands[0], operands[1],
3087 operands[2]));
3088 DONE;
3089 }
3090
3091 /* See if the set can be done with a single orr instruction. */
3092 if (val == mask && const_ok_for_arm (val << start_bit))
3093 use_bfi = FALSE;
3094 }
3095
3096 if (use_bfi)
3097 {
3098 if (!REG_P (operands[3]))
3099 operands[3] = force_reg (SImode, operands[3]);
3100
3101 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3102 operands[3]));
3103 DONE;
3104 }
3105 }
3106 else
3107 FAIL;
3108 }
3109
3110 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3111 FAIL;
3112
3113 target = copy_rtx (operands[0]);
3114 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3115 subreg as the final target. */
3116 if (GET_CODE (target) == SUBREG)
3117 {
3118 subtarget = gen_reg_rtx (SImode);
3119 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3120 < GET_MODE_SIZE (SImode))
3121 target = SUBREG_REG (target);
3122 }
3123 else
3124 subtarget = target;
3125
3126 if (CONST_INT_P (operands[3]))
3127 {
3128 /* Since we are inserting a known constant, we may be able to
3129 reduce the number of bits that we have to clear so that
3130 the mask becomes simple. */
3131 /* ??? This code does not check to see if the new mask is actually
3132 simpler. It may not be. */
3133 rtx op1 = gen_reg_rtx (SImode);
3134 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3135 start of this pattern. */
3136 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3137 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3138
3139 emit_insn (gen_andsi3 (op1, operands[0],
3140 gen_int_mode (~mask2, SImode)));
3141 emit_insn (gen_iorsi3 (subtarget, op1,
3142 gen_int_mode (op3_value << start_bit, SImode)));
3143 }
3144 else if (start_bit == 0
3145 && !(const_ok_for_arm (mask)
3146 || const_ok_for_arm (~mask)))
3147 {
3148 /* A Trick, since we are setting the bottom bits in the word,
3149 we can shift operand[3] up, operand[0] down, OR them together
3150 and rotate the result back again. This takes 3 insns, and
3151 the third might be mergeable into another op. */
3152 /* The shift up copes with the possibility that operand[3] is
3153 wider than the bitfield. */
3154 rtx op0 = gen_reg_rtx (SImode);
3155 rtx op1 = gen_reg_rtx (SImode);
3156
3157 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3158 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3159 emit_insn (gen_iorsi3 (op1, op1, op0));
3160 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3161 }
3162 else if ((width + start_bit == 32)
3163 && !(const_ok_for_arm (mask)
3164 || const_ok_for_arm (~mask)))
3165 {
3166 /* Similar trick, but slightly less efficient. */
3167
3168 rtx op0 = gen_reg_rtx (SImode);
3169 rtx op1 = gen_reg_rtx (SImode);
3170
3171 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3172 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3173 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3174 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3175 }
3176 else
3177 {
3178 rtx op0 = gen_int_mode (mask, SImode);
3179 rtx op1 = gen_reg_rtx (SImode);
3180 rtx op2 = gen_reg_rtx (SImode);
3181
3182 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3183 {
3184 rtx tmp = gen_reg_rtx (SImode);
3185
3186 emit_insn (gen_movsi (tmp, op0));
3187 op0 = tmp;
3188 }
3189
3190 /* Mask out any bits in operand[3] that are not needed. */
3191 emit_insn (gen_andsi3 (op1, operands[3], op0));
3192
3193 if (CONST_INT_P (op0)
3194 && (const_ok_for_arm (mask << start_bit)
3195 || const_ok_for_arm (~(mask << start_bit))))
3196 {
3197 op0 = gen_int_mode (~(mask << start_bit), SImode);
3198 emit_insn (gen_andsi3 (op2, operands[0], op0));
3199 }
3200 else
3201 {
3202 if (CONST_INT_P (op0))
3203 {
3204 rtx tmp = gen_reg_rtx (SImode);
3205
3206 emit_insn (gen_movsi (tmp, op0));
3207 op0 = tmp;
3208 }
3209
3210 if (start_bit != 0)
3211 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3212
3213 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3214 }
3215
3216 if (start_bit != 0)
3217 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3218
3219 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3220 }
3221
3222 if (subtarget != target)
3223 {
3224 /* If TARGET is still a SUBREG, then it must be wider than a word,
3225 so we must be careful only to set the subword we were asked to. */
3226 if (GET_CODE (target) == SUBREG)
3227 emit_move_insn (target, subtarget);
3228 else
3229 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3230 }
3231
3232 DONE;
3233 }"
3234 )
3235
3236 (define_insn "insv_zero"
3237 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3238 (match_operand:SI 1 "const_int_M_operand" "M")
3239 (match_operand:SI 2 "const_int_M_operand" "M"))
3240 (const_int 0))]
3241 "arm_arch_thumb2"
3242 "bfc%?\t%0, %2, %1"
3243 [(set_attr "length" "4")
3244 (set_attr "predicable" "yes")
3245 (set_attr "type" "bfm")]
3246 )
3247
3248 (define_insn "insv_t2"
3249 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3250 (match_operand:SI 1 "const_int_M_operand" "M")
3251 (match_operand:SI 2 "const_int_M_operand" "M"))
3252 (match_operand:SI 3 "s_register_operand" "r"))]
3253 "arm_arch_thumb2"
3254 "bfi%?\t%0, %3, %2, %1"
3255 [(set_attr "length" "4")
3256 (set_attr "predicable" "yes")
3257 (set_attr "type" "bfm")]
3258 )
3259
3260 (define_insn "andsi_notsi_si"
3261 [(set (match_operand:SI 0 "s_register_operand" "=r")
3262 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3263 (match_operand:SI 1 "s_register_operand" "r")))]
3264 "TARGET_32BIT"
3265 "bic%?\\t%0, %1, %2"
3266 [(set_attr "predicable" "yes")
3267 (set_attr "type" "logic_reg")]
3268 )
3269
3270 (define_insn "andsi_not_shiftsi_si"
3271 [(set (match_operand:SI 0 "s_register_operand" "=r")
3272 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3273 [(match_operand:SI 2 "s_register_operand" "r")
3274 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
3275 (match_operand:SI 1 "s_register_operand" "r")))]
3276 "TARGET_ARM"
3277 "bic%?\\t%0, %1, %2%S4"
3278 [(set_attr "predicable" "yes")
3279 (set_attr "shift" "2")
3280 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
3281 (const_string "logic_shift_imm")
3282 (const_string "logic_shift_reg")))]
3283 )
3284
3285 ;; Shifted bics pattern used to set up CC status register and not reusing
3286 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3287 ;; does not support shift by register.
3288 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3289 [(set (reg:CC_NOOV CC_REGNUM)
3290 (compare:CC_NOOV
3291 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3292 [(match_operand:SI 1 "s_register_operand" "r")
3293 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3294 (match_operand:SI 3 "s_register_operand" "r"))
3295 (const_int 0)))
3296 (clobber (match_scratch:SI 4 "=r"))]
3297 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
3298 "bics%?\\t%4, %3, %1%S0"
3299 [(set_attr "predicable" "yes")
3300 (set_attr "conds" "set")
3301 (set_attr "shift" "1")
3302 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3303 (const_string "logic_shift_imm")
3304 (const_string "logic_shift_reg")))]
3305 )
3306
3307 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3308 ;; getting reused later.
3309 (define_insn "andsi_not_shiftsi_si_scc"
3310 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3311 (compare:CC_NOOV
3312 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3313 [(match_operand:SI 1 "s_register_operand" "r")
3314 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3315 (match_operand:SI 3 "s_register_operand" "r"))
3316 (const_int 0)))
3317 (set (match_operand:SI 4 "s_register_operand" "=r")
3318 (and:SI (not:SI (match_op_dup 0
3319 [(match_dup 1)
3320 (match_dup 2)]))
3321 (match_dup 3)))])]
3322 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
3323 "bics%?\\t%4, %3, %1%S0"
3324 [(set_attr "predicable" "yes")
3325 (set_attr "conds" "set")
3326 (set_attr "shift" "1")
3327 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3328 (const_string "logic_shift_imm")
3329 (const_string "logic_shift_reg")))]
3330 )
3331
3332 (define_insn "*andsi_notsi_si_compare0"
3333 [(set (reg:CC_NOOV CC_REGNUM)
3334 (compare:CC_NOOV
3335 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3336 (match_operand:SI 1 "s_register_operand" "r"))
3337 (const_int 0)))
3338 (set (match_operand:SI 0 "s_register_operand" "=r")
3339 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3340 "TARGET_32BIT"
3341 "bics\\t%0, %1, %2"
3342 [(set_attr "conds" "set")
3343 (set_attr "type" "logics_shift_reg")]
3344 )
3345
3346 (define_insn "*andsi_notsi_si_compare0_scratch"
3347 [(set (reg:CC_NOOV CC_REGNUM)
3348 (compare:CC_NOOV
3349 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3350 (match_operand:SI 1 "s_register_operand" "r"))
3351 (const_int 0)))
3352 (clobber (match_scratch:SI 0 "=r"))]
3353 "TARGET_32BIT"
3354 "bics\\t%0, %1, %2"
3355 [(set_attr "conds" "set")
3356 (set_attr "type" "logics_shift_reg")]
3357 )
3358
3359 (define_expand "iorsi3"
3360 [(set (match_operand:SI 0 "s_register_operand")
3361 (ior:SI (match_operand:SI 1 "s_register_operand")
3362 (match_operand:SI 2 "reg_or_int_operand")))]
3363 "TARGET_EITHER"
3364 "
3365 if (CONST_INT_P (operands[2]))
3366 {
3367 if (TARGET_32BIT)
3368 {
3369 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3370 operands[2] = force_reg (SImode, operands[2]);
3371 else
3372 {
3373 arm_split_constant (IOR, SImode, NULL_RTX,
3374 INTVAL (operands[2]), operands[0],
3375 operands[1],
3376 optimize && can_create_pseudo_p ());
3377 DONE;
3378 }
3379 }
3380 else /* TARGET_THUMB1 */
3381 {
3382 rtx tmp = force_reg (SImode, operands[2]);
3383 if (rtx_equal_p (operands[0], operands[1]))
3384 operands[2] = tmp;
3385 else
3386 {
3387 operands[2] = operands[1];
3388 operands[1] = tmp;
3389 }
3390 }
3391 }
3392 "
3393 )
3394
3395 (define_insn_and_split "*iorsi3_insn"
3396 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3397 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3398 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3399 "TARGET_32BIT"
3400 "@
3401 orr%?\\t%0, %1, %2
3402 orr%?\\t%0, %1, %2
3403 orn%?\\t%0, %1, #%B2
3404 orr%?\\t%0, %1, %2
3405 #"
3406 "TARGET_32BIT
3407 && CONST_INT_P (operands[2])
3408 && !(const_ok_for_arm (INTVAL (operands[2]))
3409 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3410 [(clobber (const_int 0))]
3411 {
3412 arm_split_constant (IOR, SImode, curr_insn,
3413 INTVAL (operands[2]), operands[0], operands[1], 0);
3414 DONE;
3415 }
3416 [(set_attr "length" "4,4,4,4,16")
3417 (set_attr "arch" "32,t2,t2,32,32")
3418 (set_attr "predicable" "yes")
3419 (set_attr "predicable_short_it" "no,yes,no,no,no")
3420 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3421 )
3422
3423 (define_peephole2
3424 [(match_scratch:SI 3 "r")
3425 (set (match_operand:SI 0 "arm_general_register_operand" "")
3426 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3427 (match_operand:SI 2 "const_int_operand" "")))]
3428 "TARGET_ARM
3429 && !const_ok_for_arm (INTVAL (operands[2]))
3430 && const_ok_for_arm (~INTVAL (operands[2]))"
3431 [(set (match_dup 3) (match_dup 2))
3432 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3433 ""
3434 )
3435
3436 (define_insn "*iorsi3_compare0"
3437 [(set (reg:CC_NOOV CC_REGNUM)
3438 (compare:CC_NOOV
3439 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3440 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3441 (const_int 0)))
3442 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3443 (ior:SI (match_dup 1) (match_dup 2)))]
3444 "TARGET_32BIT"
3445 "orrs%?\\t%0, %1, %2"
3446 [(set_attr "conds" "set")
3447 (set_attr "arch" "*,t2,*")
3448 (set_attr "length" "4,2,4")
3449 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3450 )
3451
3452 (define_insn "*iorsi3_compare0_scratch"
3453 [(set (reg:CC_NOOV CC_REGNUM)
3454 (compare:CC_NOOV
3455 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3456 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3457 (const_int 0)))
3458 (clobber (match_scratch:SI 0 "=r,l,r"))]
3459 "TARGET_32BIT"
3460 "orrs%?\\t%0, %1, %2"
3461 [(set_attr "conds" "set")
3462 (set_attr "arch" "*,t2,*")
3463 (set_attr "length" "4,2,4")
3464 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3465 )
3466
3467 (define_expand "xorsi3"
3468 [(set (match_operand:SI 0 "s_register_operand")
3469 (xor:SI (match_operand:SI 1 "s_register_operand")
3470 (match_operand:SI 2 "reg_or_int_operand")))]
3471 "TARGET_EITHER"
3472 "if (CONST_INT_P (operands[2]))
3473 {
3474 if (TARGET_32BIT)
3475 {
3476 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3477 operands[2] = force_reg (SImode, operands[2]);
3478 else
3479 {
3480 arm_split_constant (XOR, SImode, NULL_RTX,
3481 INTVAL (operands[2]), operands[0],
3482 operands[1],
3483 optimize && can_create_pseudo_p ());
3484 DONE;
3485 }
3486 }
3487 else /* TARGET_THUMB1 */
3488 {
3489 rtx tmp = force_reg (SImode, operands[2]);
3490 if (rtx_equal_p (operands[0], operands[1]))
3491 operands[2] = tmp;
3492 else
3493 {
3494 operands[2] = operands[1];
3495 operands[1] = tmp;
3496 }
3497 }
3498 }"
3499 )
3500
3501 (define_insn_and_split "*arm_xorsi3"
3502 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3503 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3504 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3505 "TARGET_32BIT"
3506 "@
3507 eor%?\\t%0, %1, %2
3508 eor%?\\t%0, %1, %2
3509 eor%?\\t%0, %1, %2
3510 #"
3511 "TARGET_32BIT
3512 && CONST_INT_P (operands[2])
3513 && !const_ok_for_arm (INTVAL (operands[2]))"
3514 [(clobber (const_int 0))]
3515 {
3516 arm_split_constant (XOR, SImode, curr_insn,
3517 INTVAL (operands[2]), operands[0], operands[1], 0);
3518 DONE;
3519 }
3520 [(set_attr "length" "4,4,4,16")
3521 (set_attr "predicable" "yes")
3522 (set_attr "predicable_short_it" "no,yes,no,no")
3523 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3524 )
3525
3526 (define_insn "*xorsi3_compare0"
3527 [(set (reg:CC_NOOV CC_REGNUM)
3528 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3529 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3530 (const_int 0)))
3531 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3532 (xor:SI (match_dup 1) (match_dup 2)))]
3533 "TARGET_32BIT"
3534 "eors%?\\t%0, %1, %2"
3535 [(set_attr "conds" "set")
3536 (set_attr "type" "logics_imm,logics_reg")]
3537 )
3538
3539 (define_insn "*xorsi3_compare0_scratch"
3540 [(set (reg:CC_NOOV CC_REGNUM)
3541 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3542 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3543 (const_int 0)))]
3544 "TARGET_32BIT"
3545 "teq%?\\t%0, %1"
3546 [(set_attr "conds" "set")
3547 (set_attr "type" "logics_imm,logics_reg")]
3548 )
3549
3550 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3551 ; (NOT D) we can sometimes merge the final NOT into one of the following
3552 ; insns.
3553
3554 (define_split
3555 [(set (match_operand:SI 0 "s_register_operand" "")
3556 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3557 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3558 (match_operand:SI 3 "arm_rhs_operand" "")))
3559 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3560 "TARGET_32BIT"
3561 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3562 (not:SI (match_dup 3))))
3563 (set (match_dup 0) (not:SI (match_dup 4)))]
3564 ""
3565 )
3566
3567 (define_insn_and_split "*andsi_iorsi3_notsi"
3568 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3569 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3570 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3571 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3572 "TARGET_32BIT"
3573 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3574 "&& reload_completed"
3575 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3576 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3577 {
3578 /* If operands[3] is a constant make sure to fold the NOT into it
3579 to avoid creating a NOT of a CONST_INT. */
3580 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3581 if (CONST_INT_P (not_rtx))
3582 {
3583 operands[4] = operands[0];
3584 operands[5] = not_rtx;
3585 }
3586 else
3587 {
3588 operands[5] = operands[0];
3589 operands[4] = not_rtx;
3590 }
3591 }
3592 [(set_attr "length" "8")
3593 (set_attr "ce_count" "2")
3594 (set_attr "predicable" "yes")
3595 (set_attr "type" "multiple")]
3596 )
3597
3598 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3599 ; insns are available?
3600 (define_split
3601 [(set (match_operand:SI 0 "s_register_operand" "")
3602 (match_operator:SI 1 "logical_binary_operator"
3603 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3604 (match_operand:SI 3 "const_int_operand" "")
3605 (match_operand:SI 4 "const_int_operand" ""))
3606 (match_operator:SI 9 "logical_binary_operator"
3607 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3608 (match_operand:SI 6 "const_int_operand" ""))
3609 (match_operand:SI 7 "s_register_operand" "")])]))
3610 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3611 "TARGET_32BIT
3612 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3613 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3614 [(set (match_dup 8)
3615 (match_op_dup 1
3616 [(ashift:SI (match_dup 2) (match_dup 4))
3617 (match_dup 5)]))
3618 (set (match_dup 0)
3619 (match_op_dup 1
3620 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3621 (match_dup 7)]))]
3622 "
3623 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3624 ")
3625
3626 (define_split
3627 [(set (match_operand:SI 0 "s_register_operand" "")
3628 (match_operator:SI 1 "logical_binary_operator"
3629 [(match_operator:SI 9 "logical_binary_operator"
3630 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3631 (match_operand:SI 6 "const_int_operand" ""))
3632 (match_operand:SI 7 "s_register_operand" "")])
3633 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3634 (match_operand:SI 3 "const_int_operand" "")
3635 (match_operand:SI 4 "const_int_operand" ""))]))
3636 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3637 "TARGET_32BIT
3638 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3639 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3640 [(set (match_dup 8)
3641 (match_op_dup 1
3642 [(ashift:SI (match_dup 2) (match_dup 4))
3643 (match_dup 5)]))
3644 (set (match_dup 0)
3645 (match_op_dup 1
3646 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3647 (match_dup 7)]))]
3648 "
3649 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3650 ")
3651
3652 (define_split
3653 [(set (match_operand:SI 0 "s_register_operand" "")
3654 (match_operator:SI 1 "logical_binary_operator"
3655 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3656 (match_operand:SI 3 "const_int_operand" "")
3657 (match_operand:SI 4 "const_int_operand" ""))
3658 (match_operator:SI 9 "logical_binary_operator"
3659 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3660 (match_operand:SI 6 "const_int_operand" ""))
3661 (match_operand:SI 7 "s_register_operand" "")])]))
3662 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3663 "TARGET_32BIT
3664 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3665 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3666 [(set (match_dup 8)
3667 (match_op_dup 1
3668 [(ashift:SI (match_dup 2) (match_dup 4))
3669 (match_dup 5)]))
3670 (set (match_dup 0)
3671 (match_op_dup 1
3672 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3673 (match_dup 7)]))]
3674 "
3675 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3676 ")
3677
3678 (define_split
3679 [(set (match_operand:SI 0 "s_register_operand" "")
3680 (match_operator:SI 1 "logical_binary_operator"
3681 [(match_operator:SI 9 "logical_binary_operator"
3682 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3683 (match_operand:SI 6 "const_int_operand" ""))
3684 (match_operand:SI 7 "s_register_operand" "")])
3685 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3686 (match_operand:SI 3 "const_int_operand" "")
3687 (match_operand:SI 4 "const_int_operand" ""))]))
3688 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3689 "TARGET_32BIT
3690 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3691 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3692 [(set (match_dup 8)
3693 (match_op_dup 1
3694 [(ashift:SI (match_dup 2) (match_dup 4))
3695 (match_dup 5)]))
3696 (set (match_dup 0)
3697 (match_op_dup 1
3698 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3699 (match_dup 7)]))]
3700 "
3701 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3702 ")
3703 \f
3704
3705 ;; Minimum and maximum insns
3706
3707 (define_expand "smaxsi3"
3708 [(parallel [
3709 (set (match_operand:SI 0 "s_register_operand")
3710 (smax:SI (match_operand:SI 1 "s_register_operand")
3711 (match_operand:SI 2 "arm_rhs_operand")))
3712 (clobber (reg:CC CC_REGNUM))])]
3713 "TARGET_32BIT"
3714 "
3715 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3716 {
3717 /* No need for a clobber of the condition code register here. */
3718 emit_insn (gen_rtx_SET (operands[0],
3719 gen_rtx_SMAX (SImode, operands[1],
3720 operands[2])));
3721 DONE;
3722 }
3723 ")
3724
3725 (define_insn "*smax_0"
3726 [(set (match_operand:SI 0 "s_register_operand" "=r")
3727 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3728 (const_int 0)))]
3729 "TARGET_32BIT"
3730 "bic%?\\t%0, %1, %1, asr #31"
3731 [(set_attr "predicable" "yes")
3732 (set_attr "type" "logic_shift_reg")]
3733 )
3734
3735 (define_insn "*smax_m1"
3736 [(set (match_operand:SI 0 "s_register_operand" "=r")
3737 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3738 (const_int -1)))]
3739 "TARGET_32BIT"
3740 "orr%?\\t%0, %1, %1, asr #31"
3741 [(set_attr "predicable" "yes")
3742 (set_attr "type" "logic_shift_reg")]
3743 )
3744
3745 (define_insn_and_split "*arm_smax_insn"
3746 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3747 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3748 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3749 (clobber (reg:CC CC_REGNUM))]
3750 "TARGET_ARM"
3751 "#"
3752 ; cmp\\t%1, %2\;movlt\\t%0, %2
3753 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3754 "TARGET_ARM"
3755 [(set (reg:CC CC_REGNUM)
3756 (compare:CC (match_dup 1) (match_dup 2)))
3757 (set (match_dup 0)
3758 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3759 (match_dup 1)
3760 (match_dup 2)))]
3761 ""
3762 [(set_attr "conds" "clob")
3763 (set_attr "length" "8,12")
3764 (set_attr "type" "multiple")]
3765 )
3766
3767 (define_expand "sminsi3"
3768 [(parallel [
3769 (set (match_operand:SI 0 "s_register_operand")
3770 (smin:SI (match_operand:SI 1 "s_register_operand")
3771 (match_operand:SI 2 "arm_rhs_operand")))
3772 (clobber (reg:CC CC_REGNUM))])]
3773 "TARGET_32BIT"
3774 "
3775 if (operands[2] == const0_rtx)
3776 {
3777 /* No need for a clobber of the condition code register here. */
3778 emit_insn (gen_rtx_SET (operands[0],
3779 gen_rtx_SMIN (SImode, operands[1],
3780 operands[2])));
3781 DONE;
3782 }
3783 ")
3784
3785 (define_insn "*smin_0"
3786 [(set (match_operand:SI 0 "s_register_operand" "=r")
3787 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3788 (const_int 0)))]
3789 "TARGET_32BIT"
3790 "and%?\\t%0, %1, %1, asr #31"
3791 [(set_attr "predicable" "yes")
3792 (set_attr "type" "logic_shift_reg")]
3793 )
3794
3795 (define_insn_and_split "*arm_smin_insn"
3796 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3797 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3798 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3799 (clobber (reg:CC CC_REGNUM))]
3800 "TARGET_ARM"
3801 "#"
3802 ; cmp\\t%1, %2\;movge\\t%0, %2
3803 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3804 "TARGET_ARM"
3805 [(set (reg:CC CC_REGNUM)
3806 (compare:CC (match_dup 1) (match_dup 2)))
3807 (set (match_dup 0)
3808 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3809 (match_dup 1)
3810 (match_dup 2)))]
3811 ""
3812 [(set_attr "conds" "clob")
3813 (set_attr "length" "8,12")
3814 (set_attr "type" "multiple,multiple")]
3815 )
3816
3817 (define_expand "umaxsi3"
3818 [(parallel [
3819 (set (match_operand:SI 0 "s_register_operand")
3820 (umax:SI (match_operand:SI 1 "s_register_operand")
3821 (match_operand:SI 2 "arm_rhs_operand")))
3822 (clobber (reg:CC CC_REGNUM))])]
3823 "TARGET_32BIT"
3824 ""
3825 )
3826
3827 (define_insn_and_split "*arm_umaxsi3"
3828 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3829 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3830 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3831 (clobber (reg:CC CC_REGNUM))]
3832 "TARGET_ARM"
3833 "#"
3834 ; cmp\\t%1, %2\;movcc\\t%0, %2
3835 ; cmp\\t%1, %2\;movcs\\t%0, %1
3836 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3837 "TARGET_ARM"
3838 [(set (reg:CC CC_REGNUM)
3839 (compare:CC (match_dup 1) (match_dup 2)))
3840 (set (match_dup 0)
3841 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3842 (match_dup 1)
3843 (match_dup 2)))]
3844 ""
3845 [(set_attr "conds" "clob")
3846 (set_attr "length" "8,8,12")
3847 (set_attr "type" "store_4")]
3848 )
3849
3850 (define_expand "uminsi3"
3851 [(parallel [
3852 (set (match_operand:SI 0 "s_register_operand")
3853 (umin:SI (match_operand:SI 1 "s_register_operand")
3854 (match_operand:SI 2 "arm_rhs_operand")))
3855 (clobber (reg:CC CC_REGNUM))])]
3856 "TARGET_32BIT"
3857 ""
3858 )
3859
3860 (define_insn_and_split "*arm_uminsi3"
3861 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3862 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3863 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3864 (clobber (reg:CC CC_REGNUM))]
3865 "TARGET_ARM"
3866 "#"
3867 ; cmp\\t%1, %2\;movcs\\t%0, %2
3868 ; cmp\\t%1, %2\;movcc\\t%0, %1
3869 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3870 "TARGET_ARM"
3871 [(set (reg:CC CC_REGNUM)
3872 (compare:CC (match_dup 1) (match_dup 2)))
3873 (set (match_dup 0)
3874 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3875 (match_dup 1)
3876 (match_dup 2)))]
3877 ""
3878 [(set_attr "conds" "clob")
3879 (set_attr "length" "8,8,12")
3880 (set_attr "type" "store_4")]
3881 )
3882
3883 (define_insn "*store_minmaxsi"
3884 [(set (match_operand:SI 0 "memory_operand" "=m")
3885 (match_operator:SI 3 "minmax_operator"
3886 [(match_operand:SI 1 "s_register_operand" "r")
3887 (match_operand:SI 2 "s_register_operand" "r")]))
3888 (clobber (reg:CC CC_REGNUM))]
3889 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3890 "*
3891 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3892 operands[1], operands[2]);
3893 output_asm_insn (\"cmp\\t%1, %2\", operands);
3894 if (TARGET_THUMB2)
3895 output_asm_insn (\"ite\t%d3\", operands);
3896 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3897 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3898 return \"\";
3899 "
3900 [(set_attr "conds" "clob")
3901 (set (attr "length")
3902 (if_then_else (eq_attr "is_thumb" "yes")
3903 (const_int 14)
3904 (const_int 12)))
3905 (set_attr "type" "store_4")]
3906 )
3907
3908 ; Reject the frame pointer in operand[1], since reloading this after
3909 ; it has been eliminated can cause carnage.
3910 (define_insn "*minmax_arithsi"
3911 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3912 (match_operator:SI 4 "shiftable_operator"
3913 [(match_operator:SI 5 "minmax_operator"
3914 [(match_operand:SI 2 "s_register_operand" "r,r")
3915 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3916 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3917 (clobber (reg:CC CC_REGNUM))]
3918 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3919 "*
3920 {
3921 enum rtx_code code = GET_CODE (operands[4]);
3922 bool need_else;
3923
3924 if (which_alternative != 0 || operands[3] != const0_rtx
3925 || (code != PLUS && code != IOR && code != XOR))
3926 need_else = true;
3927 else
3928 need_else = false;
3929
3930 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3931 operands[2], operands[3]);
3932 output_asm_insn (\"cmp\\t%2, %3\", operands);
3933 if (TARGET_THUMB2)
3934 {
3935 if (need_else)
3936 output_asm_insn (\"ite\\t%d5\", operands);
3937 else
3938 output_asm_insn (\"it\\t%d5\", operands);
3939 }
3940 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3941 if (need_else)
3942 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3943 return \"\";
3944 }"
3945 [(set_attr "conds" "clob")
3946 (set (attr "length")
3947 (if_then_else (eq_attr "is_thumb" "yes")
3948 (const_int 14)
3949 (const_int 12)))
3950 (set_attr "type" "multiple")]
3951 )
3952
3953 ; Reject the frame pointer in operand[1], since reloading this after
3954 ; it has been eliminated can cause carnage.
3955 (define_insn_and_split "*minmax_arithsi_non_canon"
3956 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3957 (minus:SI
3958 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3959 (match_operator:SI 4 "minmax_operator"
3960 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3961 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3962 (clobber (reg:CC CC_REGNUM))]
3963 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3964 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3965 "#"
3966 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3967 [(set (reg:CC CC_REGNUM)
3968 (compare:CC (match_dup 2) (match_dup 3)))
3969
3970 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3971 (set (match_dup 0)
3972 (minus:SI (match_dup 1)
3973 (match_dup 2))))
3974 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3975 (set (match_dup 0)
3976 (match_dup 6)))]
3977 {
3978 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3979 operands[2], operands[3]);
3980 enum rtx_code rc = minmax_code (operands[4]);
3981 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3982 operands[2], operands[3]);
3983
3984 if (mode == CCFPmode || mode == CCFPEmode)
3985 rc = reverse_condition_maybe_unordered (rc);
3986 else
3987 rc = reverse_condition (rc);
3988 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3989 if (CONST_INT_P (operands[3]))
3990 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3991 else
3992 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3993 }
3994 [(set_attr "conds" "clob")
3995 (set (attr "length")
3996 (if_then_else (eq_attr "is_thumb" "yes")
3997 (const_int 14)
3998 (const_int 12)))
3999 (set_attr "type" "multiple")]
4000 )
4001
4002 (define_code_iterator SAT [smin smax])
4003 (define_code_attr SATrev [(smin "smax") (smax "smin")])
4004 (define_code_attr SATlo [(smin "1") (smax "2")])
4005 (define_code_attr SAThi [(smin "2") (smax "1")])
4006
4007 (define_insn "*satsi_<SAT:code>"
4008 [(set (match_operand:SI 0 "s_register_operand" "=r")
4009 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4010 (match_operand:SI 1 "const_int_operand" "i"))
4011 (match_operand:SI 2 "const_int_operand" "i")))]
4012 "TARGET_32BIT && arm_arch6
4013 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4014 {
4015 int mask;
4016 bool signed_sat;
4017 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4018 &mask, &signed_sat))
4019 gcc_unreachable ();
4020
4021 operands[1] = GEN_INT (mask);
4022 if (signed_sat)
4023 return "ssat%?\t%0, %1, %3";
4024 else
4025 return "usat%?\t%0, %1, %3";
4026 }
4027 [(set_attr "predicable" "yes")
4028 (set_attr "type" "alus_imm")]
4029 )
4030
4031 (define_insn "*satsi_<SAT:code>_shift"
4032 [(set (match_operand:SI 0 "s_register_operand" "=r")
4033 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4034 [(match_operand:SI 4 "s_register_operand" "r")
4035 (match_operand:SI 5 "const_int_operand" "i")])
4036 (match_operand:SI 1 "const_int_operand" "i"))
4037 (match_operand:SI 2 "const_int_operand" "i")))]
4038 "TARGET_32BIT && arm_arch6
4039 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4040 {
4041 int mask;
4042 bool signed_sat;
4043 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4044 &mask, &signed_sat))
4045 gcc_unreachable ();
4046
4047 operands[1] = GEN_INT (mask);
4048 if (signed_sat)
4049 return "ssat%?\t%0, %1, %4%S3";
4050 else
4051 return "usat%?\t%0, %1, %4%S3";
4052 }
4053 [(set_attr "predicable" "yes")
4054 (set_attr "shift" "3")
4055 (set_attr "type" "logic_shift_reg")])
4056 \f
4057 ;; Shift and rotation insns
4058
4059 (define_expand "ashldi3"
4060 [(set (match_operand:DI 0 "s_register_operand")
4061 (ashift:DI (match_operand:DI 1 "s_register_operand")
4062 (match_operand:SI 2 "reg_or_int_operand")))]
4063 "TARGET_32BIT"
4064 "
4065 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4066 operands[2], gen_reg_rtx (SImode),
4067 gen_reg_rtx (SImode));
4068 DONE;
4069 ")
4070
4071 (define_expand "ashlsi3"
4072 [(set (match_operand:SI 0 "s_register_operand")
4073 (ashift:SI (match_operand:SI 1 "s_register_operand")
4074 (match_operand:SI 2 "arm_rhs_operand")))]
4075 "TARGET_EITHER"
4076 "
4077 if (CONST_INT_P (operands[2])
4078 && (UINTVAL (operands[2])) > 31)
4079 {
4080 emit_insn (gen_movsi (operands[0], const0_rtx));
4081 DONE;
4082 }
4083 "
4084 )
4085
4086 (define_expand "ashrdi3"
4087 [(set (match_operand:DI 0 "s_register_operand")
4088 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4089 (match_operand:SI 2 "reg_or_int_operand")))]
4090 "TARGET_32BIT"
4091 "
4092 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4093 operands[2], gen_reg_rtx (SImode),
4094 gen_reg_rtx (SImode));
4095 DONE;
4096 ")
4097
4098 (define_expand "ashrsi3"
4099 [(set (match_operand:SI 0 "s_register_operand")
4100 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4101 (match_operand:SI 2 "arm_rhs_operand")))]
4102 "TARGET_EITHER"
4103 "
4104 if (CONST_INT_P (operands[2])
4105 && UINTVAL (operands[2]) > 31)
4106 operands[2] = GEN_INT (31);
4107 "
4108 )
4109
4110 (define_expand "lshrdi3"
4111 [(set (match_operand:DI 0 "s_register_operand")
4112 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4113 (match_operand:SI 2 "reg_or_int_operand")))]
4114 "TARGET_32BIT"
4115 "
4116 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4117 operands[2], gen_reg_rtx (SImode),
4118 gen_reg_rtx (SImode));
4119 DONE;
4120 ")
4121
4122 (define_expand "lshrsi3"
4123 [(set (match_operand:SI 0 "s_register_operand")
4124 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4125 (match_operand:SI 2 "arm_rhs_operand")))]
4126 "TARGET_EITHER"
4127 "
4128 if (CONST_INT_P (operands[2])
4129 && (UINTVAL (operands[2])) > 31)
4130 {
4131 emit_insn (gen_movsi (operands[0], const0_rtx));
4132 DONE;
4133 }
4134 "
4135 )
4136
4137 (define_expand "rotlsi3"
4138 [(set (match_operand:SI 0 "s_register_operand")
4139 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4140 (match_operand:SI 2 "reg_or_int_operand")))]
4141 "TARGET_32BIT"
4142 "
4143 if (CONST_INT_P (operands[2]))
4144 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4145 else
4146 {
4147 rtx reg = gen_reg_rtx (SImode);
4148 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4149 operands[2] = reg;
4150 }
4151 "
4152 )
4153
4154 (define_expand "rotrsi3"
4155 [(set (match_operand:SI 0 "s_register_operand")
4156 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4157 (match_operand:SI 2 "arm_rhs_operand")))]
4158 "TARGET_EITHER"
4159 "
4160 if (TARGET_32BIT)
4161 {
4162 if (CONST_INT_P (operands[2])
4163 && UINTVAL (operands[2]) > 31)
4164 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4165 }
4166 else /* TARGET_THUMB1 */
4167 {
4168 if (CONST_INT_P (operands [2]))
4169 operands [2] = force_reg (SImode, operands[2]);
4170 }
4171 "
4172 )
4173
4174 (define_insn "*arm_shiftsi3"
4175 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4176 (match_operator:SI 3 "shift_operator"
4177 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4178 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4179 "TARGET_32BIT"
4180 "* return arm_output_shift(operands, 0);"
4181 [(set_attr "predicable" "yes")
4182 (set_attr "arch" "t2,t2,*,*")
4183 (set_attr "predicable_short_it" "yes,yes,no,no")
4184 (set_attr "length" "4")
4185 (set_attr "shift" "1")
4186 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
4187 )
4188
4189 (define_insn "*shiftsi3_compare0"
4190 [(set (reg:CC_NOOV CC_REGNUM)
4191 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4192 [(match_operand:SI 1 "s_register_operand" "r,r")
4193 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4194 (const_int 0)))
4195 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4196 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4197 "TARGET_32BIT"
4198 "* return arm_output_shift(operands, 1);"
4199 [(set_attr "conds" "set")
4200 (set_attr "shift" "1")
4201 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4202 )
4203
4204 (define_insn "*shiftsi3_compare0_scratch"
4205 [(set (reg:CC_NOOV CC_REGNUM)
4206 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4207 [(match_operand:SI 1 "s_register_operand" "r,r")
4208 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4209 (const_int 0)))
4210 (clobber (match_scratch:SI 0 "=r,r"))]
4211 "TARGET_32BIT"
4212 "* return arm_output_shift(operands, 1);"
4213 [(set_attr "conds" "set")
4214 (set_attr "shift" "1")
4215 (set_attr "type" "shift_imm,shift_reg")]
4216 )
4217
4218 (define_insn "*not_shiftsi"
4219 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4220 (not:SI (match_operator:SI 3 "shift_operator"
4221 [(match_operand:SI 1 "s_register_operand" "r,r")
4222 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4223 "TARGET_32BIT"
4224 "mvn%?\\t%0, %1%S3"
4225 [(set_attr "predicable" "yes")
4226 (set_attr "shift" "1")
4227 (set_attr "arch" "32,a")
4228 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4229
4230 (define_insn "*not_shiftsi_compare0"
4231 [(set (reg:CC_NOOV CC_REGNUM)
4232 (compare:CC_NOOV
4233 (not:SI (match_operator:SI 3 "shift_operator"
4234 [(match_operand:SI 1 "s_register_operand" "r,r")
4235 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4236 (const_int 0)))
4237 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4238 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4239 "TARGET_32BIT"
4240 "mvns%?\\t%0, %1%S3"
4241 [(set_attr "conds" "set")
4242 (set_attr "shift" "1")
4243 (set_attr "arch" "32,a")
4244 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4245
4246 (define_insn "*not_shiftsi_compare0_scratch"
4247 [(set (reg:CC_NOOV CC_REGNUM)
4248 (compare:CC_NOOV
4249 (not:SI (match_operator:SI 3 "shift_operator"
4250 [(match_operand:SI 1 "s_register_operand" "r,r")
4251 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4252 (const_int 0)))
4253 (clobber (match_scratch:SI 0 "=r,r"))]
4254 "TARGET_32BIT"
4255 "mvns%?\\t%0, %1%S3"
4256 [(set_attr "conds" "set")
4257 (set_attr "shift" "1")
4258 (set_attr "arch" "32,a")
4259 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4260
4261 ;; We don't really have extzv, but defining this using shifts helps
4262 ;; to reduce register pressure later on.
4263
4264 (define_expand "extzv"
4265 [(set (match_operand 0 "s_register_operand")
4266 (zero_extract (match_operand 1 "nonimmediate_operand")
4267 (match_operand 2 "const_int_operand")
4268 (match_operand 3 "const_int_operand")))]
4269 "TARGET_THUMB1 || arm_arch_thumb2"
4270 "
4271 {
4272 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4273 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4274
4275 if (arm_arch_thumb2)
4276 {
4277 HOST_WIDE_INT width = INTVAL (operands[2]);
4278 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4279
4280 if (unaligned_access && MEM_P (operands[1])
4281 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4282 {
4283 rtx base_addr;
4284
4285 if (BYTES_BIG_ENDIAN)
4286 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4287 - bitpos;
4288
4289 if (width == 32)
4290 {
4291 base_addr = adjust_address (operands[1], SImode,
4292 bitpos / BITS_PER_UNIT);
4293 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4294 }
4295 else
4296 {
4297 rtx dest = operands[0];
4298 rtx tmp = gen_reg_rtx (SImode);
4299
4300 /* We may get a paradoxical subreg here. Strip it off. */
4301 if (GET_CODE (dest) == SUBREG
4302 && GET_MODE (dest) == SImode
4303 && GET_MODE (SUBREG_REG (dest)) == HImode)
4304 dest = SUBREG_REG (dest);
4305
4306 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4307 FAIL;
4308
4309 base_addr = adjust_address (operands[1], HImode,
4310 bitpos / BITS_PER_UNIT);
4311 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4312 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4313 }
4314 DONE;
4315 }
4316 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4317 {
4318 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4319 operands[3]));
4320 DONE;
4321 }
4322 else
4323 FAIL;
4324 }
4325
4326 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4327 FAIL;
4328
4329 operands[3] = GEN_INT (rshift);
4330
4331 if (lshift == 0)
4332 {
4333 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4334 DONE;
4335 }
4336
4337 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4338 operands[3], gen_reg_rtx (SImode)));
4339 DONE;
4340 }"
4341 )
4342
4343 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4344
4345 (define_expand "extzv_t1"
4346 [(set (match_operand:SI 4 "s_register_operand")
4347 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4348 (match_operand:SI 2 "const_int_operand")))
4349 (set (match_operand:SI 0 "s_register_operand")
4350 (lshiftrt:SI (match_dup 4)
4351 (match_operand:SI 3 "const_int_operand")))]
4352 "TARGET_THUMB1"
4353 "")
4354
4355 (define_expand "extv"
4356 [(set (match_operand 0 "s_register_operand")
4357 (sign_extract (match_operand 1 "nonimmediate_operand")
4358 (match_operand 2 "const_int_operand")
4359 (match_operand 3 "const_int_operand")))]
4360 "arm_arch_thumb2"
4361 {
4362 HOST_WIDE_INT width = INTVAL (operands[2]);
4363 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4364
4365 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4366 && (bitpos % BITS_PER_UNIT) == 0)
4367 {
4368 rtx base_addr;
4369
4370 if (BYTES_BIG_ENDIAN)
4371 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4372
4373 if (width == 32)
4374 {
4375 base_addr = adjust_address (operands[1], SImode,
4376 bitpos / BITS_PER_UNIT);
4377 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4378 }
4379 else
4380 {
4381 rtx dest = operands[0];
4382 rtx tmp = gen_reg_rtx (SImode);
4383
4384 /* We may get a paradoxical subreg here. Strip it off. */
4385 if (GET_CODE (dest) == SUBREG
4386 && GET_MODE (dest) == SImode
4387 && GET_MODE (SUBREG_REG (dest)) == HImode)
4388 dest = SUBREG_REG (dest);
4389
4390 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4391 FAIL;
4392
4393 base_addr = adjust_address (operands[1], HImode,
4394 bitpos / BITS_PER_UNIT);
4395 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4396 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4397 }
4398
4399 DONE;
4400 }
4401 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4402 FAIL;
4403 else if (GET_MODE (operands[0]) == SImode
4404 && GET_MODE (operands[1]) == SImode)
4405 {
4406 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4407 operands[3]));
4408 DONE;
4409 }
4410
4411 FAIL;
4412 })
4413
4414 ; Helper to expand register forms of extv with the proper modes.
4415
4416 (define_expand "extv_regsi"
4417 [(set (match_operand:SI 0 "s_register_operand")
4418 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4419 (match_operand 2 "const_int_operand")
4420 (match_operand 3 "const_int_operand")))]
4421 ""
4422 {
4423 })
4424
4425 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4426
4427 (define_insn "unaligned_loaddi"
4428 [(set (match_operand:DI 0 "s_register_operand" "=r")
4429 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4430 UNSPEC_UNALIGNED_LOAD))]
4431 "TARGET_32BIT && TARGET_LDRD"
4432 "*
4433 return output_move_double (operands, true, NULL);
4434 "
4435 [(set_attr "length" "8")
4436 (set_attr "type" "load_8")])
4437
4438 (define_insn "unaligned_loadsi"
4439 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4440 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4441 UNSPEC_UNALIGNED_LOAD))]
4442 "unaligned_access"
4443 "@
4444 ldr\t%0, %1\t@ unaligned
4445 ldr%?\t%0, %1\t@ unaligned
4446 ldr%?\t%0, %1\t@ unaligned"
4447 [(set_attr "arch" "t1,t2,32")
4448 (set_attr "length" "2,2,4")
4449 (set_attr "predicable" "no,yes,yes")
4450 (set_attr "predicable_short_it" "no,yes,no")
4451 (set_attr "type" "load_4")])
4452
4453 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4454 ;; address (there's no immediate format). That's tricky to support
4455 ;; here and we don't really need this pattern for that case, so only
4456 ;; enable for 32-bit ISAs.
4457 (define_insn "unaligned_loadhis"
4458 [(set (match_operand:SI 0 "s_register_operand" "=r")
4459 (sign_extend:SI
4460 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4461 UNSPEC_UNALIGNED_LOAD)))]
4462 "unaligned_access && TARGET_32BIT"
4463 "ldrsh%?\t%0, %1\t@ unaligned"
4464 [(set_attr "predicable" "yes")
4465 (set_attr "type" "load_byte")])
4466
4467 (define_insn "unaligned_loadhiu"
4468 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4469 (zero_extend:SI
4470 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4471 UNSPEC_UNALIGNED_LOAD)))]
4472 "unaligned_access"
4473 "@
4474 ldrh\t%0, %1\t@ unaligned
4475 ldrh%?\t%0, %1\t@ unaligned
4476 ldrh%?\t%0, %1\t@ unaligned"
4477 [(set_attr "arch" "t1,t2,32")
4478 (set_attr "length" "2,2,4")
4479 (set_attr "predicable" "no,yes,yes")
4480 (set_attr "predicable_short_it" "no,yes,no")
4481 (set_attr "type" "load_byte")])
4482
4483 (define_insn "unaligned_storedi"
4484 [(set (match_operand:DI 0 "memory_operand" "=m")
4485 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4486 UNSPEC_UNALIGNED_STORE))]
4487 "TARGET_32BIT && TARGET_LDRD"
4488 "*
4489 return output_move_double (operands, true, NULL);
4490 "
4491 [(set_attr "length" "8")
4492 (set_attr "type" "store_8")])
4493
4494 (define_insn "unaligned_storesi"
4495 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4496 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4497 UNSPEC_UNALIGNED_STORE))]
4498 "unaligned_access"
4499 "@
4500 str\t%1, %0\t@ unaligned
4501 str%?\t%1, %0\t@ unaligned
4502 str%?\t%1, %0\t@ unaligned"
4503 [(set_attr "arch" "t1,t2,32")
4504 (set_attr "length" "2,2,4")
4505 (set_attr "predicable" "no,yes,yes")
4506 (set_attr "predicable_short_it" "no,yes,no")
4507 (set_attr "type" "store_4")])
4508
4509 (define_insn "unaligned_storehi"
4510 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4511 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4512 UNSPEC_UNALIGNED_STORE))]
4513 "unaligned_access"
4514 "@
4515 strh\t%1, %0\t@ unaligned
4516 strh%?\t%1, %0\t@ unaligned
4517 strh%?\t%1, %0\t@ unaligned"
4518 [(set_attr "arch" "t1,t2,32")
4519 (set_attr "length" "2,2,4")
4520 (set_attr "predicable" "no,yes,yes")
4521 (set_attr "predicable_short_it" "no,yes,no")
4522 (set_attr "type" "store_4")])
4523
4524
4525 (define_insn "*extv_reg"
4526 [(set (match_operand:SI 0 "s_register_operand" "=r")
4527 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4528 (match_operand:SI 2 "const_int_operand" "n")
4529 (match_operand:SI 3 "const_int_operand" "n")))]
4530 "arm_arch_thumb2
4531 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4532 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4533 "sbfx%?\t%0, %1, %3, %2"
4534 [(set_attr "length" "4")
4535 (set_attr "predicable" "yes")
4536 (set_attr "type" "bfm")]
4537 )
4538
4539 (define_insn "extzv_t2"
4540 [(set (match_operand:SI 0 "s_register_operand" "=r")
4541 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4542 (match_operand:SI 2 "const_int_operand" "n")
4543 (match_operand:SI 3 "const_int_operand" "n")))]
4544 "arm_arch_thumb2
4545 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4546 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4547 "ubfx%?\t%0, %1, %3, %2"
4548 [(set_attr "length" "4")
4549 (set_attr "predicable" "yes")
4550 (set_attr "type" "bfm")]
4551 )
4552
4553
4554 ;; Division instructions
4555 (define_insn "divsi3"
4556 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4557 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
4558 (match_operand:SI 2 "s_register_operand" "r,r")))]
4559 "TARGET_IDIV"
4560 "@
4561 sdiv%?\t%0, %1, %2
4562 sdiv\t%0, %1, %2"
4563 [(set_attr "arch" "32,v8mb")
4564 (set_attr "predicable" "yes")
4565 (set_attr "type" "sdiv")]
4566 )
4567
4568 (define_insn "udivsi3"
4569 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4570 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
4571 (match_operand:SI 2 "s_register_operand" "r,r")))]
4572 "TARGET_IDIV"
4573 "@
4574 udiv%?\t%0, %1, %2
4575 udiv\t%0, %1, %2"
4576 [(set_attr "arch" "32,v8mb")
4577 (set_attr "predicable" "yes")
4578 (set_attr "type" "udiv")]
4579 )
4580
4581 \f
4582 ;; Unary arithmetic insns
4583
4584 (define_expand "negvsi3"
4585 [(match_operand:SI 0 "register_operand")
4586 (match_operand:SI 1 "register_operand")
4587 (match_operand 2 "")]
4588 "TARGET_32BIT"
4589 {
4590 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
4591 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4592
4593 DONE;
4594 })
4595
4596 (define_expand "negvdi3"
4597 [(match_operand:DI 0 "s_register_operand")
4598 (match_operand:DI 1 "s_register_operand")
4599 (match_operand 2 "")]
4600 "TARGET_ARM"
4601 {
4602 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
4603 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4604
4605 DONE;
4606 })
4607
4608
4609 (define_insn "negdi2_compare"
4610 [(set (reg:CC CC_REGNUM)
4611 (compare:CC
4612 (const_int 0)
4613 (match_operand:DI 1 "register_operand" "r,r")))
4614 (set (match_operand:DI 0 "register_operand" "=&r,&r")
4615 (minus:DI (const_int 0) (match_dup 1)))]
4616 "TARGET_ARM"
4617 "@
4618 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
4619 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
4620 [(set_attr "conds" "set")
4621 (set_attr "arch" "a,t2")
4622 (set_attr "length" "8")
4623 (set_attr "type" "multiple")]
4624 )
4625
4626 (define_expand "negsi2"
4627 [(set (match_operand:SI 0 "s_register_operand")
4628 (neg:SI (match_operand:SI 1 "s_register_operand")))]
4629 "TARGET_EITHER"
4630 ""
4631 )
4632
4633 (define_insn "*arm_negsi2"
4634 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4635 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4636 "TARGET_32BIT"
4637 "rsb%?\\t%0, %1, #0"
4638 [(set_attr "predicable" "yes")
4639 (set_attr "predicable_short_it" "yes,no")
4640 (set_attr "arch" "t2,*")
4641 (set_attr "length" "4")
4642 (set_attr "type" "alu_imm")]
4643 )
4644
4645 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
4646 ;; rather than (0 cmp reg). This gives the same results for unsigned
4647 ;; and equality compares which is what we mostly need here.
4648 (define_insn "negsi2_0compare"
4649 [(set (reg:CC_RSB CC_REGNUM)
4650 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
4651 (const_int -1)))
4652 (set (match_operand:SI 0 "s_register_operand" "=l,r")
4653 (neg:SI (match_dup 1)))]
4654 "TARGET_32BIT"
4655 "@
4656 negs\\t%0, %1
4657 rsbs\\t%0, %1, #0"
4658 [(set_attr "conds" "set")
4659 (set_attr "arch" "t2,*")
4660 (set_attr "length" "2,*")
4661 (set_attr "type" "alus_imm")]
4662 )
4663
4664 (define_insn "negsi2_carryin"
4665 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4666 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
4667 (match_operand:SI 2 "arm_borrow_operation" "")))]
4668 "TARGET_32BIT"
4669 "@
4670 rsc\\t%0, %1, #0
4671 sbc\\t%0, %1, %1, lsl #1"
4672 [(set_attr "conds" "use")
4673 (set_attr "arch" "a,t2")
4674 (set_attr "type" "adc_imm,adc_reg")]
4675 )
4676
4677 (define_expand "negsf2"
4678 [(set (match_operand:SF 0 "s_register_operand")
4679 (neg:SF (match_operand:SF 1 "s_register_operand")))]
4680 "TARGET_32BIT && TARGET_HARD_FLOAT"
4681 ""
4682 )
4683
4684 (define_expand "negdf2"
4685 [(set (match_operand:DF 0 "s_register_operand")
4686 (neg:DF (match_operand:DF 1 "s_register_operand")))]
4687 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4688 "")
4689
4690 ;; abssi2 doesn't really clobber the condition codes if a different register
4691 ;; is being set. To keep things simple, assume during rtl manipulations that
4692 ;; it does, but tell the final scan operator the truth. Similarly for
4693 ;; (neg (abs...))
4694
4695 (define_expand "abssi2"
4696 [(parallel
4697 [(set (match_operand:SI 0 "s_register_operand")
4698 (abs:SI (match_operand:SI 1 "s_register_operand")))
4699 (clobber (match_dup 2))])]
4700 "TARGET_EITHER"
4701 "
4702 if (TARGET_THUMB1)
4703 operands[2] = gen_rtx_SCRATCH (SImode);
4704 else
4705 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4706 ")
4707
4708 (define_insn_and_split "*arm_abssi2"
4709 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4710 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4711 (clobber (reg:CC CC_REGNUM))]
4712 "TARGET_ARM"
4713 "#"
4714 "&& reload_completed"
4715 [(const_int 0)]
4716 {
4717 /* if (which_alternative == 0) */
4718 if (REGNO(operands[0]) == REGNO(operands[1]))
4719 {
4720 /* Emit the pattern:
4721 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4722 [(set (reg:CC CC_REGNUM)
4723 (compare:CC (match_dup 0) (const_int 0)))
4724 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4725 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4726 */
4727 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4728 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4729 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4730 (gen_rtx_LT (SImode,
4731 gen_rtx_REG (CCmode, CC_REGNUM),
4732 const0_rtx)),
4733 (gen_rtx_SET (operands[0],
4734 (gen_rtx_MINUS (SImode,
4735 const0_rtx,
4736 operands[1]))))));
4737 DONE;
4738 }
4739 else
4740 {
4741 /* Emit the pattern:
4742 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4743 [(set (match_dup 0)
4744 (xor:SI (match_dup 1)
4745 (ashiftrt:SI (match_dup 1) (const_int 31))))
4746 (set (match_dup 0)
4747 (minus:SI (match_dup 0)
4748 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4749 */
4750 emit_insn (gen_rtx_SET (operands[0],
4751 gen_rtx_XOR (SImode,
4752 gen_rtx_ASHIFTRT (SImode,
4753 operands[1],
4754 GEN_INT (31)),
4755 operands[1])));
4756 emit_insn (gen_rtx_SET (operands[0],
4757 gen_rtx_MINUS (SImode,
4758 operands[0],
4759 gen_rtx_ASHIFTRT (SImode,
4760 operands[1],
4761 GEN_INT (31)))));
4762 DONE;
4763 }
4764 }
4765 [(set_attr "conds" "clob,*")
4766 (set_attr "shift" "1")
4767 (set_attr "predicable" "no, yes")
4768 (set_attr "length" "8")
4769 (set_attr "type" "multiple")]
4770 )
4771
4772 (define_insn_and_split "*arm_neg_abssi2"
4773 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4774 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4775 (clobber (reg:CC CC_REGNUM))]
4776 "TARGET_ARM"
4777 "#"
4778 "&& reload_completed"
4779 [(const_int 0)]
4780 {
4781 /* if (which_alternative == 0) */
4782 if (REGNO (operands[0]) == REGNO (operands[1]))
4783 {
4784 /* Emit the pattern:
4785 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4786 */
4787 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4788 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4789 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4790 gen_rtx_GT (SImode,
4791 gen_rtx_REG (CCmode, CC_REGNUM),
4792 const0_rtx),
4793 gen_rtx_SET (operands[0],
4794 (gen_rtx_MINUS (SImode,
4795 const0_rtx,
4796 operands[1])))));
4797 }
4798 else
4799 {
4800 /* Emit the pattern:
4801 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4802 */
4803 emit_insn (gen_rtx_SET (operands[0],
4804 gen_rtx_XOR (SImode,
4805 gen_rtx_ASHIFTRT (SImode,
4806 operands[1],
4807 GEN_INT (31)),
4808 operands[1])));
4809 emit_insn (gen_rtx_SET (operands[0],
4810 gen_rtx_MINUS (SImode,
4811 gen_rtx_ASHIFTRT (SImode,
4812 operands[1],
4813 GEN_INT (31)),
4814 operands[0])));
4815 }
4816 DONE;
4817 }
4818 [(set_attr "conds" "clob,*")
4819 (set_attr "shift" "1")
4820 (set_attr "predicable" "no, yes")
4821 (set_attr "length" "8")
4822 (set_attr "type" "multiple")]
4823 )
4824
4825 (define_expand "abssf2"
4826 [(set (match_operand:SF 0 "s_register_operand")
4827 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4828 "TARGET_32BIT && TARGET_HARD_FLOAT"
4829 "")
4830
4831 (define_expand "absdf2"
4832 [(set (match_operand:DF 0 "s_register_operand")
4833 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4834 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4835 "")
4836
4837 (define_expand "sqrtsf2"
4838 [(set (match_operand:SF 0 "s_register_operand")
4839 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4840 "TARGET_32BIT && TARGET_HARD_FLOAT"
4841 "")
4842
4843 (define_expand "sqrtdf2"
4844 [(set (match_operand:DF 0 "s_register_operand")
4845 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4846 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4847 "")
4848
4849 (define_expand "one_cmplsi2"
4850 [(set (match_operand:SI 0 "s_register_operand")
4851 (not:SI (match_operand:SI 1 "s_register_operand")))]
4852 "TARGET_EITHER"
4853 ""
4854 )
4855
4856 (define_insn "*arm_one_cmplsi2"
4857 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4858 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4859 "TARGET_32BIT"
4860 "mvn%?\\t%0, %1"
4861 [(set_attr "predicable" "yes")
4862 (set_attr "predicable_short_it" "yes,no")
4863 (set_attr "arch" "t2,*")
4864 (set_attr "length" "4")
4865 (set_attr "type" "mvn_reg")]
4866 )
4867
4868 (define_insn "*notsi_compare0"
4869 [(set (reg:CC_NOOV CC_REGNUM)
4870 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4871 (const_int 0)))
4872 (set (match_operand:SI 0 "s_register_operand" "=r")
4873 (not:SI (match_dup 1)))]
4874 "TARGET_32BIT"
4875 "mvns%?\\t%0, %1"
4876 [(set_attr "conds" "set")
4877 (set_attr "type" "mvn_reg")]
4878 )
4879
4880 (define_insn "*notsi_compare0_scratch"
4881 [(set (reg:CC_NOOV CC_REGNUM)
4882 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4883 (const_int 0)))
4884 (clobber (match_scratch:SI 0 "=r"))]
4885 "TARGET_32BIT"
4886 "mvns%?\\t%0, %1"
4887 [(set_attr "conds" "set")
4888 (set_attr "type" "mvn_reg")]
4889 )
4890 \f
4891 ;; Fixed <--> Floating conversion insns
4892
4893 (define_expand "floatsihf2"
4894 [(set (match_operand:HF 0 "general_operand")
4895 (float:HF (match_operand:SI 1 "general_operand")))]
4896 "TARGET_EITHER"
4897 "
4898 {
4899 rtx op1 = gen_reg_rtx (SFmode);
4900 expand_float (op1, operands[1], 0);
4901 op1 = convert_to_mode (HFmode, op1, 0);
4902 emit_move_insn (operands[0], op1);
4903 DONE;
4904 }"
4905 )
4906
4907 (define_expand "floatdihf2"
4908 [(set (match_operand:HF 0 "general_operand")
4909 (float:HF (match_operand:DI 1 "general_operand")))]
4910 "TARGET_EITHER"
4911 "
4912 {
4913 rtx op1 = gen_reg_rtx (SFmode);
4914 expand_float (op1, operands[1], 0);
4915 op1 = convert_to_mode (HFmode, op1, 0);
4916 emit_move_insn (operands[0], op1);
4917 DONE;
4918 }"
4919 )
4920
4921 (define_expand "floatsisf2"
4922 [(set (match_operand:SF 0 "s_register_operand")
4923 (float:SF (match_operand:SI 1 "s_register_operand")))]
4924 "TARGET_32BIT && TARGET_HARD_FLOAT"
4925 "
4926 ")
4927
4928 (define_expand "floatsidf2"
4929 [(set (match_operand:DF 0 "s_register_operand")
4930 (float:DF (match_operand:SI 1 "s_register_operand")))]
4931 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4932 "
4933 ")
4934
4935 (define_expand "fix_trunchfsi2"
4936 [(set (match_operand:SI 0 "general_operand")
4937 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4938 "TARGET_EITHER"
4939 "
4940 {
4941 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4942 expand_fix (operands[0], op1, 0);
4943 DONE;
4944 }"
4945 )
4946
4947 (define_expand "fix_trunchfdi2"
4948 [(set (match_operand:DI 0 "general_operand")
4949 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4950 "TARGET_EITHER"
4951 "
4952 {
4953 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4954 expand_fix (operands[0], op1, 0);
4955 DONE;
4956 }"
4957 )
4958
4959 (define_expand "fix_truncsfsi2"
4960 [(set (match_operand:SI 0 "s_register_operand")
4961 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4962 "TARGET_32BIT && TARGET_HARD_FLOAT"
4963 "
4964 ")
4965
4966 (define_expand "fix_truncdfsi2"
4967 [(set (match_operand:SI 0 "s_register_operand")
4968 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4969 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4970 "
4971 ")
4972
4973 ;; Truncation insns
4974
4975 (define_expand "truncdfsf2"
4976 [(set (match_operand:SF 0 "s_register_operand")
4977 (float_truncate:SF
4978 (match_operand:DF 1 "s_register_operand")))]
4979 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4980 ""
4981 )
4982
4983 ;; DFmode to HFmode conversions on targets without a single-step hardware
4984 ;; instruction for it would have to go through SFmode. This is dangerous
4985 ;; as it introduces double rounding.
4986 ;;
4987 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4988 ;; a single-step instruction.
4989
4990 (define_expand "truncdfhf2"
4991 [(set (match_operand:HF 0 "s_register_operand")
4992 (float_truncate:HF
4993 (match_operand:DF 1 "s_register_operand")))]
4994 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4995 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4996 {
4997 /* We don't have a direct instruction for this, so we must be in
4998 an unsafe math mode, and going via SFmode. */
4999
5000 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5001 {
5002 rtx op1;
5003 op1 = convert_to_mode (SFmode, operands[1], 0);
5004 op1 = convert_to_mode (HFmode, op1, 0);
5005 emit_move_insn (operands[0], op1);
5006 DONE;
5007 }
5008 /* Otherwise, we will pick this up as a single instruction with
5009 no intermediary rounding. */
5010 }
5011 )
5012 \f
5013 ;; Zero and sign extension instructions.
5014
5015 (define_expand "zero_extend<mode>di2"
5016 [(set (match_operand:DI 0 "s_register_operand" "")
5017 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5018 "TARGET_32BIT <qhs_zextenddi_cond>"
5019 {
5020 rtx res_lo, res_hi, op0_lo, op0_hi;
5021 res_lo = gen_lowpart (SImode, operands[0]);
5022 res_hi = gen_highpart (SImode, operands[0]);
5023 if (can_create_pseudo_p ())
5024 {
5025 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5026 op0_hi = gen_reg_rtx (SImode);
5027 }
5028 else
5029 {
5030 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5031 op0_hi = res_hi;
5032 }
5033 if (<MODE>mode != SImode)
5034 emit_insn (gen_rtx_SET (op0_lo,
5035 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5036 emit_insn (gen_movsi (op0_hi, const0_rtx));
5037 if (res_lo != op0_lo)
5038 emit_move_insn (res_lo, op0_lo);
5039 if (res_hi != op0_hi)
5040 emit_move_insn (res_hi, op0_hi);
5041 DONE;
5042 }
5043 )
5044
5045 (define_expand "extend<mode>di2"
5046 [(set (match_operand:DI 0 "s_register_operand" "")
5047 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5048 "TARGET_32BIT <qhs_sextenddi_cond>"
5049 {
5050 rtx res_lo, res_hi, op0_lo, op0_hi;
5051 res_lo = gen_lowpart (SImode, operands[0]);
5052 res_hi = gen_highpart (SImode, operands[0]);
5053 if (can_create_pseudo_p ())
5054 {
5055 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5056 op0_hi = gen_reg_rtx (SImode);
5057 }
5058 else
5059 {
5060 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5061 op0_hi = res_hi;
5062 }
5063 if (<MODE>mode != SImode)
5064 emit_insn (gen_rtx_SET (op0_lo,
5065 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5066 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5067 if (res_lo != op0_lo)
5068 emit_move_insn (res_lo, op0_lo);
5069 if (res_hi != op0_hi)
5070 emit_move_insn (res_hi, op0_hi);
5071 DONE;
5072 }
5073 )
5074
5075 ;; Splits for all extensions to DImode
5076 (define_split
5077 [(set (match_operand:DI 0 "s_register_operand" "")
5078 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5079 "TARGET_32BIT"
5080 [(set (match_dup 0) (match_dup 1))]
5081 {
5082 rtx lo_part = gen_lowpart (SImode, operands[0]);
5083 machine_mode src_mode = GET_MODE (operands[1]);
5084
5085 if (src_mode == SImode)
5086 emit_move_insn (lo_part, operands[1]);
5087 else
5088 emit_insn (gen_rtx_SET (lo_part,
5089 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5090 operands[0] = gen_highpart (SImode, operands[0]);
5091 operands[1] = const0_rtx;
5092 })
5093
5094 (define_split
5095 [(set (match_operand:DI 0 "s_register_operand" "")
5096 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5097 "TARGET_32BIT"
5098 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5099 {
5100 rtx lo_part = gen_lowpart (SImode, operands[0]);
5101 machine_mode src_mode = GET_MODE (operands[1]);
5102
5103 if (src_mode == SImode)
5104 emit_move_insn (lo_part, operands[1]);
5105 else
5106 emit_insn (gen_rtx_SET (lo_part,
5107 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5108 operands[1] = lo_part;
5109 operands[0] = gen_highpart (SImode, operands[0]);
5110 })
5111
5112 (define_expand "zero_extendhisi2"
5113 [(set (match_operand:SI 0 "s_register_operand")
5114 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5115 "TARGET_EITHER"
5116 {
5117 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5118 {
5119 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5120 DONE;
5121 }
5122 if (!arm_arch6 && !MEM_P (operands[1]))
5123 {
5124 rtx t = gen_lowpart (SImode, operands[1]);
5125 rtx tmp = gen_reg_rtx (SImode);
5126 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5127 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5128 DONE;
5129 }
5130 })
5131
5132 (define_split
5133 [(set (match_operand:SI 0 "s_register_operand" "")
5134 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5135 "!TARGET_THUMB2 && !arm_arch6"
5136 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5137 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5138 {
5139 operands[2] = gen_lowpart (SImode, operands[1]);
5140 })
5141
5142 (define_insn "*arm_zero_extendhisi2"
5143 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5144 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5145 "TARGET_ARM && arm_arch4 && !arm_arch6"
5146 "@
5147 #
5148 ldrh%?\\t%0, %1"
5149 [(set_attr "type" "alu_shift_reg,load_byte")
5150 (set_attr "predicable" "yes")]
5151 )
5152
5153 (define_insn "*arm_zero_extendhisi2_v6"
5154 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5155 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5156 "TARGET_ARM && arm_arch6"
5157 "@
5158 uxth%?\\t%0, %1
5159 ldrh%?\\t%0, %1"
5160 [(set_attr "predicable" "yes")
5161 (set_attr "type" "extend,load_byte")]
5162 )
5163
5164 (define_insn "*arm_zero_extendhisi2addsi"
5165 [(set (match_operand:SI 0 "s_register_operand" "=r")
5166 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5167 (match_operand:SI 2 "s_register_operand" "r")))]
5168 "TARGET_INT_SIMD"
5169 "uxtah%?\\t%0, %2, %1"
5170 [(set_attr "type" "alu_shift_reg")
5171 (set_attr "predicable" "yes")]
5172 )
5173
5174 (define_expand "zero_extendqisi2"
5175 [(set (match_operand:SI 0 "s_register_operand")
5176 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5177 "TARGET_EITHER"
5178 {
5179 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5180 {
5181 emit_insn (gen_andsi3 (operands[0],
5182 gen_lowpart (SImode, operands[1]),
5183 GEN_INT (255)));
5184 DONE;
5185 }
5186 if (!arm_arch6 && !MEM_P (operands[1]))
5187 {
5188 rtx t = gen_lowpart (SImode, operands[1]);
5189 rtx tmp = gen_reg_rtx (SImode);
5190 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5191 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5192 DONE;
5193 }
5194 })
5195
5196 (define_split
5197 [(set (match_operand:SI 0 "s_register_operand" "")
5198 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5199 "!arm_arch6"
5200 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5201 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5202 {
5203 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5204 if (TARGET_ARM)
5205 {
5206 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5207 DONE;
5208 }
5209 })
5210
5211 (define_insn "*arm_zero_extendqisi2"
5212 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5213 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5214 "TARGET_ARM && !arm_arch6"
5215 "@
5216 #
5217 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5218 [(set_attr "length" "8,4")
5219 (set_attr "type" "alu_shift_reg,load_byte")
5220 (set_attr "predicable" "yes")]
5221 )
5222
5223 (define_insn "*arm_zero_extendqisi2_v6"
5224 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5225 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5226 "TARGET_ARM && arm_arch6"
5227 "@
5228 uxtb%?\\t%0, %1
5229 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5230 [(set_attr "type" "extend,load_byte")
5231 (set_attr "predicable" "yes")]
5232 )
5233
5234 (define_insn "*arm_zero_extendqisi2addsi"
5235 [(set (match_operand:SI 0 "s_register_operand" "=r")
5236 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5237 (match_operand:SI 2 "s_register_operand" "r")))]
5238 "TARGET_INT_SIMD"
5239 "uxtab%?\\t%0, %2, %1"
5240 [(set_attr "predicable" "yes")
5241 (set_attr "type" "alu_shift_reg")]
5242 )
5243
5244 (define_split
5245 [(set (match_operand:SI 0 "s_register_operand" "")
5246 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5247 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5248 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5249 [(set (match_dup 2) (match_dup 1))
5250 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5251 ""
5252 )
5253
5254 (define_split
5255 [(set (match_operand:SI 0 "s_register_operand" "")
5256 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5257 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5258 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5259 [(set (match_dup 2) (match_dup 1))
5260 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5261 ""
5262 )
5263
5264
5265 (define_split
5266 [(set (match_operand:SI 0 "s_register_operand" "")
5267 (IOR_XOR:SI (and:SI (ashift:SI
5268 (match_operand:SI 1 "s_register_operand" "")
5269 (match_operand:SI 2 "const_int_operand" ""))
5270 (match_operand:SI 3 "const_int_operand" ""))
5271 (zero_extend:SI
5272 (match_operator 5 "subreg_lowpart_operator"
5273 [(match_operand:SI 4 "s_register_operand" "")]))))]
5274 "TARGET_32BIT
5275 && (UINTVAL (operands[3])
5276 == (GET_MODE_MASK (GET_MODE (operands[5]))
5277 & (GET_MODE_MASK (GET_MODE (operands[5]))
5278 << (INTVAL (operands[2])))))"
5279 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5280 (match_dup 4)))
5281 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5282 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5283 )
5284
5285 (define_insn "*compareqi_eq0"
5286 [(set (reg:CC_Z CC_REGNUM)
5287 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5288 (const_int 0)))]
5289 "TARGET_32BIT"
5290 "tst%?\\t%0, #255"
5291 [(set_attr "conds" "set")
5292 (set_attr "predicable" "yes")
5293 (set_attr "type" "logic_imm")]
5294 )
5295
5296 (define_expand "extendhisi2"
5297 [(set (match_operand:SI 0 "s_register_operand")
5298 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5299 "TARGET_EITHER"
5300 {
5301 if (TARGET_THUMB1)
5302 {
5303 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5304 DONE;
5305 }
5306 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5307 {
5308 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5309 DONE;
5310 }
5311
5312 if (!arm_arch6 && !MEM_P (operands[1]))
5313 {
5314 rtx t = gen_lowpart (SImode, operands[1]);
5315 rtx tmp = gen_reg_rtx (SImode);
5316 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5317 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5318 DONE;
5319 }
5320 })
5321
5322 (define_split
5323 [(parallel
5324 [(set (match_operand:SI 0 "register_operand" "")
5325 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5326 (clobber (match_scratch:SI 2 ""))])]
5327 "!arm_arch6"
5328 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5329 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5330 {
5331 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5332 })
5333
5334 ;; This pattern will only be used when ldsh is not available
5335 (define_expand "extendhisi2_mem"
5336 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5337 (set (match_dup 3)
5338 (zero_extend:SI (match_dup 7)))
5339 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5340 (set (match_operand:SI 0 "" "")
5341 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5342 "TARGET_ARM"
5343 "
5344 {
5345 rtx mem1, mem2;
5346 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5347
5348 mem1 = change_address (operands[1], QImode, addr);
5349 mem2 = change_address (operands[1], QImode,
5350 plus_constant (Pmode, addr, 1));
5351 operands[0] = gen_lowpart (SImode, operands[0]);
5352 operands[1] = mem1;
5353 operands[2] = gen_reg_rtx (SImode);
5354 operands[3] = gen_reg_rtx (SImode);
5355 operands[6] = gen_reg_rtx (SImode);
5356 operands[7] = mem2;
5357
5358 if (BYTES_BIG_ENDIAN)
5359 {
5360 operands[4] = operands[2];
5361 operands[5] = operands[3];
5362 }
5363 else
5364 {
5365 operands[4] = operands[3];
5366 operands[5] = operands[2];
5367 }
5368 }"
5369 )
5370
5371 (define_split
5372 [(set (match_operand:SI 0 "register_operand" "")
5373 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5374 "!arm_arch6"
5375 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5376 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5377 {
5378 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5379 })
5380
5381 (define_insn "*arm_extendhisi2"
5382 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5383 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5384 "TARGET_ARM && arm_arch4 && !arm_arch6"
5385 "@
5386 #
5387 ldrsh%?\\t%0, %1"
5388 [(set_attr "length" "8,4")
5389 (set_attr "type" "alu_shift_reg,load_byte")
5390 (set_attr "predicable" "yes")]
5391 )
5392
5393 ;; ??? Check Thumb-2 pool range
5394 (define_insn "*arm_extendhisi2_v6"
5395 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5396 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5397 "TARGET_32BIT && arm_arch6"
5398 "@
5399 sxth%?\\t%0, %1
5400 ldrsh%?\\t%0, %1"
5401 [(set_attr "type" "extend,load_byte")
5402 (set_attr "predicable" "yes")]
5403 )
5404
5405 (define_insn "*arm_extendhisi2addsi"
5406 [(set (match_operand:SI 0 "s_register_operand" "=r")
5407 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5408 (match_operand:SI 2 "s_register_operand" "r")))]
5409 "TARGET_INT_SIMD"
5410 "sxtah%?\\t%0, %2, %1"
5411 [(set_attr "type" "alu_shift_reg")]
5412 )
5413
5414 (define_expand "extendqihi2"
5415 [(set (match_dup 2)
5416 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5417 (const_int 24)))
5418 (set (match_operand:HI 0 "s_register_operand")
5419 (ashiftrt:SI (match_dup 2)
5420 (const_int 24)))]
5421 "TARGET_ARM"
5422 "
5423 {
5424 if (arm_arch4 && MEM_P (operands[1]))
5425 {
5426 emit_insn (gen_rtx_SET (operands[0],
5427 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5428 DONE;
5429 }
5430 if (!s_register_operand (operands[1], QImode))
5431 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5432 operands[0] = gen_lowpart (SImode, operands[0]);
5433 operands[1] = gen_lowpart (SImode, operands[1]);
5434 operands[2] = gen_reg_rtx (SImode);
5435 }"
5436 )
5437
5438 (define_insn "*arm_extendqihi_insn"
5439 [(set (match_operand:HI 0 "s_register_operand" "=r")
5440 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5441 "TARGET_ARM && arm_arch4"
5442 "ldrsb%?\\t%0, %1"
5443 [(set_attr "type" "load_byte")
5444 (set_attr "predicable" "yes")]
5445 )
5446
5447 (define_expand "extendqisi2"
5448 [(set (match_operand:SI 0 "s_register_operand")
5449 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5450 "TARGET_EITHER"
5451 {
5452 if (!arm_arch4 && MEM_P (operands[1]))
5453 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5454
5455 if (!arm_arch6 && !MEM_P (operands[1]))
5456 {
5457 rtx t = gen_lowpart (SImode, operands[1]);
5458 rtx tmp = gen_reg_rtx (SImode);
5459 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5460 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5461 DONE;
5462 }
5463 })
5464
5465 (define_split
5466 [(set (match_operand:SI 0 "register_operand" "")
5467 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5468 "!arm_arch6"
5469 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5470 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5471 {
5472 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5473 })
5474
5475 (define_insn "*arm_extendqisi"
5476 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5477 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5478 "TARGET_ARM && arm_arch4 && !arm_arch6"
5479 "@
5480 #
5481 ldrsb%?\\t%0, %1"
5482 [(set_attr "length" "8,4")
5483 (set_attr "type" "alu_shift_reg,load_byte")
5484 (set_attr "predicable" "yes")]
5485 )
5486
5487 (define_insn "*arm_extendqisi_v6"
5488 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5489 (sign_extend:SI
5490 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5491 "TARGET_ARM && arm_arch6"
5492 "@
5493 sxtb%?\\t%0, %1
5494 ldrsb%?\\t%0, %1"
5495 [(set_attr "type" "extend,load_byte")
5496 (set_attr "predicable" "yes")]
5497 )
5498
5499 (define_insn "*arm_extendqisi2addsi"
5500 [(set (match_operand:SI 0 "s_register_operand" "=r")
5501 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5502 (match_operand:SI 2 "s_register_operand" "r")))]
5503 "TARGET_INT_SIMD"
5504 "sxtab%?\\t%0, %2, %1"
5505 [(set_attr "type" "alu_shift_reg")
5506 (set_attr "predicable" "yes")]
5507 )
5508
5509 (define_insn "arm_<sup>xtb16"
5510 [(set (match_operand:SI 0 "s_register_operand" "=r")
5511 (unspec:SI
5512 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5513 "TARGET_INT_SIMD"
5514 "<sup>xtb16%?\\t%0, %1"
5515 [(set_attr "predicable" "yes")
5516 (set_attr "type" "alu_dsp_reg")])
5517
5518 (define_insn "arm_<simd32_op>"
5519 [(set (match_operand:SI 0 "s_register_operand" "=r")
5520 (unspec:SI
5521 [(match_operand:SI 1 "s_register_operand" "r")
5522 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5523 "TARGET_INT_SIMD"
5524 "<simd32_op>%?\\t%0, %1, %2"
5525 [(set_attr "predicable" "yes")
5526 (set_attr "type" "alu_dsp_reg")])
5527
5528 (define_insn "arm_usada8"
5529 [(set (match_operand:SI 0 "s_register_operand" "=r")
5530 (unspec:SI
5531 [(match_operand:SI 1 "s_register_operand" "r")
5532 (match_operand:SI 2 "s_register_operand" "r")
5533 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5534 "TARGET_INT_SIMD"
5535 "usada8%?\\t%0, %1, %2, %3"
5536 [(set_attr "predicable" "yes")
5537 (set_attr "type" "alu_dsp_reg")])
5538
5539 (define_insn "arm_<simd32_op>"
5540 [(set (match_operand:DI 0 "s_register_operand" "=r")
5541 (unspec:DI
5542 [(match_operand:SI 1 "s_register_operand" "r")
5543 (match_operand:SI 2 "s_register_operand" "r")
5544 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5545 "TARGET_INT_SIMD"
5546 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5547 [(set_attr "predicable" "yes")
5548 (set_attr "type" "smlald")])
5549
5550 (define_expand "extendsfdf2"
5551 [(set (match_operand:DF 0 "s_register_operand")
5552 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
5553 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5554 ""
5555 )
5556
5557 ;; HFmode -> DFmode conversions where we don't have an instruction for it
5558 ;; must go through SFmode.
5559 ;;
5560 ;; This is always safe for an extend.
5561
5562 (define_expand "extendhfdf2"
5563 [(set (match_operand:DF 0 "s_register_operand")
5564 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
5565 "TARGET_EITHER"
5566 {
5567 /* We don't have a direct instruction for this, so go via SFmode. */
5568 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5569 {
5570 rtx op1;
5571 op1 = convert_to_mode (SFmode, operands[1], 0);
5572 op1 = convert_to_mode (DFmode, op1, 0);
5573 emit_insn (gen_movdf (operands[0], op1));
5574 DONE;
5575 }
5576 /* Otherwise, we're done producing RTL and will pick up the correct
5577 pattern to do this with one rounding-step in a single instruction. */
5578 }
5579 )
5580 \f
5581 ;; Move insns (including loads and stores)
5582
5583 ;; XXX Just some ideas about movti.
5584 ;; I don't think these are a good idea on the arm, there just aren't enough
5585 ;; registers
5586 ;;(define_expand "loadti"
5587 ;; [(set (match_operand:TI 0 "s_register_operand")
5588 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
5589 ;; "" "")
5590
5591 ;;(define_expand "storeti"
5592 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
5593 ;; (match_operand:TI 1 "s_register_operand"))]
5594 ;; "" "")
5595
5596 ;;(define_expand "movti"
5597 ;; [(set (match_operand:TI 0 "general_operand")
5598 ;; (match_operand:TI 1 "general_operand"))]
5599 ;; ""
5600 ;; "
5601 ;;{
5602 ;; rtx insn;
5603 ;;
5604 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5605 ;; operands[1] = copy_to_reg (operands[1]);
5606 ;; if (MEM_P (operands[0]))
5607 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5608 ;; else if (MEM_P (operands[1]))
5609 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5610 ;; else
5611 ;; FAIL;
5612 ;;
5613 ;; emit_insn (insn);
5614 ;; DONE;
5615 ;;}")
5616
5617 ;; Recognize garbage generated above.
5618
5619 ;;(define_insn ""
5620 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5621 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5622 ;; ""
5623 ;; "*
5624 ;; {
5625 ;; register mem = (which_alternative < 3);
5626 ;; register const char *template;
5627 ;;
5628 ;; operands[mem] = XEXP (operands[mem], 0);
5629 ;; switch (which_alternative)
5630 ;; {
5631 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5632 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5633 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5634 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5635 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5636 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5637 ;; }
5638 ;; output_asm_insn (template, operands);
5639 ;; return \"\";
5640 ;; }")
5641
5642 (define_expand "movdi"
5643 [(set (match_operand:DI 0 "general_operand")
5644 (match_operand:DI 1 "general_operand"))]
5645 "TARGET_EITHER"
5646 "
5647 gcc_checking_assert (aligned_operand (operands[0], DImode));
5648 gcc_checking_assert (aligned_operand (operands[1], DImode));
5649 if (can_create_pseudo_p ())
5650 {
5651 if (!REG_P (operands[0]))
5652 operands[1] = force_reg (DImode, operands[1]);
5653 }
5654 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
5655 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
5656 {
5657 /* Avoid LDRD's into an odd-numbered register pair in ARM state
5658 when expanding function calls. */
5659 gcc_assert (can_create_pseudo_p ());
5660 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
5661 {
5662 /* Perform load into legal reg pair first, then move. */
5663 rtx reg = gen_reg_rtx (DImode);
5664 emit_insn (gen_movdi (reg, operands[1]));
5665 operands[1] = reg;
5666 }
5667 emit_move_insn (gen_lowpart (SImode, operands[0]),
5668 gen_lowpart (SImode, operands[1]));
5669 emit_move_insn (gen_highpart (SImode, operands[0]),
5670 gen_highpart (SImode, operands[1]));
5671 DONE;
5672 }
5673 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
5674 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
5675 {
5676 /* Avoid STRD's from an odd-numbered register pair in ARM state
5677 when expanding function prologue. */
5678 gcc_assert (can_create_pseudo_p ());
5679 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
5680 ? gen_reg_rtx (DImode)
5681 : operands[0];
5682 emit_move_insn (gen_lowpart (SImode, split_dest),
5683 gen_lowpart (SImode, operands[1]));
5684 emit_move_insn (gen_highpart (SImode, split_dest),
5685 gen_highpart (SImode, operands[1]));
5686 if (split_dest != operands[0])
5687 emit_insn (gen_movdi (operands[0], split_dest));
5688 DONE;
5689 }
5690 "
5691 )
5692
5693 (define_insn "*arm_movdi"
5694 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5695 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5696 "TARGET_32BIT
5697 && !(TARGET_HARD_FLOAT)
5698 && !TARGET_IWMMXT
5699 && ( register_operand (operands[0], DImode)
5700 || register_operand (operands[1], DImode))"
5701 "*
5702 switch (which_alternative)
5703 {
5704 case 0:
5705 case 1:
5706 case 2:
5707 return \"#\";
5708 case 3:
5709 /* Cannot load it directly, split to load it via MOV / MOVT. */
5710 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
5711 return \"#\";
5712 /* Fall through. */
5713 default:
5714 return output_move_double (operands, true, NULL);
5715 }
5716 "
5717 [(set_attr "length" "8,12,16,8,8")
5718 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
5719 (set_attr "arm_pool_range" "*,*,*,1020,*")
5720 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5721 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5722 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5723 )
5724
5725 (define_split
5726 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5727 (match_operand:ANY64 1 "immediate_operand" ""))]
5728 "TARGET_32BIT
5729 && reload_completed
5730 && (arm_disable_literal_pool
5731 || (arm_const_double_inline_cost (operands[1])
5732 <= arm_max_const_double_inline_cost ()))"
5733 [(const_int 0)]
5734 "
5735 arm_split_constant (SET, SImode, curr_insn,
5736 INTVAL (gen_lowpart (SImode, operands[1])),
5737 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5738 arm_split_constant (SET, SImode, curr_insn,
5739 INTVAL (gen_highpart_mode (SImode,
5740 GET_MODE (operands[0]),
5741 operands[1])),
5742 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5743 DONE;
5744 "
5745 )
5746
5747 ; If optimizing for size, or if we have load delay slots, then
5748 ; we want to split the constant into two separate operations.
5749 ; In both cases this may split a trivial part into a single data op
5750 ; leaving a single complex constant to load. We can also get longer
5751 ; offsets in a LDR which means we get better chances of sharing the pool
5752 ; entries. Finally, we can normally do a better job of scheduling
5753 ; LDR instructions than we can with LDM.
5754 ; This pattern will only match if the one above did not.
5755 (define_split
5756 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5757 (match_operand:ANY64 1 "const_double_operand" ""))]
5758 "TARGET_ARM && reload_completed
5759 && arm_const_double_by_parts (operands[1])"
5760 [(set (match_dup 0) (match_dup 1))
5761 (set (match_dup 2) (match_dup 3))]
5762 "
5763 operands[2] = gen_highpart (SImode, operands[0]);
5764 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5765 operands[1]);
5766 operands[0] = gen_lowpart (SImode, operands[0]);
5767 operands[1] = gen_lowpart (SImode, operands[1]);
5768 "
5769 )
5770
5771 (define_split
5772 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5773 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5774 "TARGET_EITHER && reload_completed"
5775 [(set (match_dup 0) (match_dup 1))
5776 (set (match_dup 2) (match_dup 3))]
5777 "
5778 operands[2] = gen_highpart (SImode, operands[0]);
5779 operands[3] = gen_highpart (SImode, operands[1]);
5780 operands[0] = gen_lowpart (SImode, operands[0]);
5781 operands[1] = gen_lowpart (SImode, operands[1]);
5782
5783 /* Handle a partial overlap. */
5784 if (rtx_equal_p (operands[0], operands[3]))
5785 {
5786 rtx tmp0 = operands[0];
5787 rtx tmp1 = operands[1];
5788
5789 operands[0] = operands[2];
5790 operands[1] = operands[3];
5791 operands[2] = tmp0;
5792 operands[3] = tmp1;
5793 }
5794 "
5795 )
5796
5797 ;; We can't actually do base+index doubleword loads if the index and
5798 ;; destination overlap. Split here so that we at least have chance to
5799 ;; schedule.
5800 (define_split
5801 [(set (match_operand:DI 0 "s_register_operand" "")
5802 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5803 (match_operand:SI 2 "s_register_operand" ""))))]
5804 "TARGET_LDRD
5805 && reg_overlap_mentioned_p (operands[0], operands[1])
5806 && reg_overlap_mentioned_p (operands[0], operands[2])"
5807 [(set (match_dup 4)
5808 (plus:SI (match_dup 1)
5809 (match_dup 2)))
5810 (set (match_dup 0)
5811 (mem:DI (match_dup 4)))]
5812 "
5813 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5814 "
5815 )
5816
5817 (define_expand "movsi"
5818 [(set (match_operand:SI 0 "general_operand")
5819 (match_operand:SI 1 "general_operand"))]
5820 "TARGET_EITHER"
5821 "
5822 {
5823 rtx base, offset, tmp;
5824
5825 gcc_checking_assert (aligned_operand (operands[0], SImode));
5826 gcc_checking_assert (aligned_operand (operands[1], SImode));
5827 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5828 {
5829 /* Everything except mem = const or mem = mem can be done easily. */
5830 if (MEM_P (operands[0]))
5831 operands[1] = force_reg (SImode, operands[1]);
5832 if (arm_general_register_operand (operands[0], SImode)
5833 && CONST_INT_P (operands[1])
5834 && !(const_ok_for_arm (INTVAL (operands[1]))
5835 || const_ok_for_arm (~INTVAL (operands[1]))))
5836 {
5837 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5838 {
5839 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5840 DONE;
5841 }
5842 else
5843 {
5844 arm_split_constant (SET, SImode, NULL_RTX,
5845 INTVAL (operands[1]), operands[0], NULL_RTX,
5846 optimize && can_create_pseudo_p ());
5847 DONE;
5848 }
5849 }
5850 }
5851 else /* Target doesn't have MOVT... */
5852 {
5853 if (can_create_pseudo_p ())
5854 {
5855 if (!REG_P (operands[0]))
5856 operands[1] = force_reg (SImode, operands[1]);
5857 }
5858 }
5859
5860 split_const (operands[1], &base, &offset);
5861 if (INTVAL (offset) != 0
5862 && targetm.cannot_force_const_mem (SImode, operands[1]))
5863 {
5864 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5865 emit_move_insn (tmp, base);
5866 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5867 DONE;
5868 }
5869
5870 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5871
5872 /* Recognize the case where operand[1] is a reference to thread-local
5873 data and load its address to a register. Offsets have been split off
5874 already. */
5875 if (arm_tls_referenced_p (operands[1]))
5876 operands[1] = legitimize_tls_address (operands[1], tmp);
5877 else if (flag_pic
5878 && (CONSTANT_P (operands[1])
5879 || symbol_mentioned_p (operands[1])
5880 || label_mentioned_p (operands[1])))
5881 operands[1] =
5882 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5883 }
5884 "
5885 )
5886
5887 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5888 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5889 ;; so this does not matter.
5890 (define_insn "*arm_movt"
5891 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5892 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5893 (match_operand:SI 2 "general_operand" "i,i")))]
5894 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5895 "@
5896 movt%?\t%0, #:upper16:%c2
5897 movt\t%0, #:upper16:%c2"
5898 [(set_attr "arch" "32,v8mb")
5899 (set_attr "predicable" "yes")
5900 (set_attr "length" "4")
5901 (set_attr "type" "alu_sreg")]
5902 )
5903
5904 (define_insn "*arm_movsi_insn"
5905 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5906 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5907 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5908 && ( register_operand (operands[0], SImode)
5909 || register_operand (operands[1], SImode))"
5910 "@
5911 mov%?\\t%0, %1
5912 mov%?\\t%0, %1
5913 mvn%?\\t%0, #%B1
5914 movw%?\\t%0, %1
5915 ldr%?\\t%0, %1
5916 str%?\\t%1, %0"
5917 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5918 (set_attr "predicable" "yes")
5919 (set_attr "arch" "*,*,*,v6t2,*,*")
5920 (set_attr "pool_range" "*,*,*,*,4096,*")
5921 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5922 )
5923
5924 (define_split
5925 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5926 (match_operand:SI 1 "const_int_operand" ""))]
5927 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5928 && (!(const_ok_for_arm (INTVAL (operands[1]))
5929 || const_ok_for_arm (~INTVAL (operands[1]))))"
5930 [(clobber (const_int 0))]
5931 "
5932 arm_split_constant (SET, SImode, NULL_RTX,
5933 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5934 DONE;
5935 "
5936 )
5937
5938 ;; A normal way to do (symbol + offset) requires three instructions at least
5939 ;; (depends on how big the offset is) as below:
5940 ;; movw r0, #:lower16:g
5941 ;; movw r0, #:upper16:g
5942 ;; adds r0, #4
5943 ;;
5944 ;; A better way would be:
5945 ;; movw r0, #:lower16:g+4
5946 ;; movw r0, #:upper16:g+4
5947 ;;
5948 ;; The limitation of this way is that the length of offset should be a 16-bit
5949 ;; signed value, because current assembler only supports REL type relocation for
5950 ;; such case. If the more powerful RELA type is supported in future, we should
5951 ;; update this pattern to go with better way.
5952 (define_split
5953 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5954 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5955 (match_operand:SI 2 "const_int_operand" ""))))]
5956 "TARGET_THUMB
5957 && TARGET_HAVE_MOVT
5958 && arm_disable_literal_pool
5959 && reload_completed
5960 && GET_CODE (operands[1]) == SYMBOL_REF"
5961 [(clobber (const_int 0))]
5962 "
5963 int offset = INTVAL (operands[2]);
5964
5965 if (offset < -0x8000 || offset > 0x7fff)
5966 {
5967 arm_emit_movpair (operands[0], operands[1]);
5968 emit_insn (gen_rtx_SET (operands[0],
5969 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5970 }
5971 else
5972 {
5973 rtx op = gen_rtx_CONST (SImode,
5974 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5975 arm_emit_movpair (operands[0], op);
5976 }
5977 "
5978 )
5979
5980 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5981 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5982 ;; and lo_sum would be merged back into memory load at cprop. However,
5983 ;; if the default is to prefer movt/movw rather than a load from the constant
5984 ;; pool, the performance is better.
5985 (define_split
5986 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5987 (match_operand:SI 1 "general_operand" ""))]
5988 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5989 && !target_word_relocations
5990 && !arm_tls_referenced_p (operands[1])"
5991 [(clobber (const_int 0))]
5992 {
5993 arm_emit_movpair (operands[0], operands[1]);
5994 DONE;
5995 })
5996
5997 ;; When generating pic, we need to load the symbol offset into a register.
5998 ;; So that the optimizer does not confuse this with a normal symbol load
5999 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6000 ;; since that is the only type of relocation we can use.
6001
6002 ;; Wrap calculation of the whole PIC address in a single pattern for the
6003 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6004 ;; a PIC address involves two loads from memory, so we want to CSE it
6005 ;; as often as possible.
6006 ;; This pattern will be split into one of the pic_load_addr_* patterns
6007 ;; and a move after GCSE optimizations.
6008 ;;
6009 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6010 (define_expand "calculate_pic_address"
6011 [(set (match_operand:SI 0 "register_operand")
6012 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6013 (unspec:SI [(match_operand:SI 2 "" "")]
6014 UNSPEC_PIC_SYM))))]
6015 "flag_pic"
6016 )
6017
6018 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6019 (define_split
6020 [(set (match_operand:SI 0 "register_operand" "")
6021 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6022 (unspec:SI [(match_operand:SI 2 "" "")]
6023 UNSPEC_PIC_SYM))))]
6024 "flag_pic"
6025 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6026 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6027 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6028 )
6029
6030 ;; operand1 is the memory address to go into
6031 ;; pic_load_addr_32bit.
6032 ;; operand2 is the PIC label to be emitted
6033 ;; from pic_add_dot_plus_eight.
6034 ;; We do this to allow hoisting of the entire insn.
6035 (define_insn_and_split "pic_load_addr_unified"
6036 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6037 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6038 (match_operand:SI 2 "" "")]
6039 UNSPEC_PIC_UNIFIED))]
6040 "flag_pic"
6041 "#"
6042 "&& reload_completed"
6043 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6044 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6045 (match_dup 2)] UNSPEC_PIC_BASE))]
6046 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6047 [(set_attr "type" "load_4,load_4,load_4")
6048 (set_attr "pool_range" "4096,4094,1022")
6049 (set_attr "neg_pool_range" "4084,0,0")
6050 (set_attr "arch" "a,t2,t1")
6051 (set_attr "length" "8,6,4")]
6052 )
6053
6054 ;; The rather odd constraints on the following are to force reload to leave
6055 ;; the insn alone, and to force the minipool generation pass to then move
6056 ;; the GOT symbol to memory.
6057
6058 (define_insn "pic_load_addr_32bit"
6059 [(set (match_operand:SI 0 "s_register_operand" "=r")
6060 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6061 "TARGET_32BIT && flag_pic"
6062 "ldr%?\\t%0, %1"
6063 [(set_attr "type" "load_4")
6064 (set (attr "pool_range")
6065 (if_then_else (eq_attr "is_thumb" "no")
6066 (const_int 4096)
6067 (const_int 4094)))
6068 (set (attr "neg_pool_range")
6069 (if_then_else (eq_attr "is_thumb" "no")
6070 (const_int 4084)
6071 (const_int 0)))]
6072 )
6073
6074 (define_insn "pic_load_addr_thumb1"
6075 [(set (match_operand:SI 0 "s_register_operand" "=l")
6076 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6077 "TARGET_THUMB1 && flag_pic"
6078 "ldr\\t%0, %1"
6079 [(set_attr "type" "load_4")
6080 (set (attr "pool_range") (const_int 1018))]
6081 )
6082
6083 (define_insn "pic_add_dot_plus_four"
6084 [(set (match_operand:SI 0 "register_operand" "=r")
6085 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6086 (const_int 4)
6087 (match_operand 2 "" "")]
6088 UNSPEC_PIC_BASE))]
6089 "TARGET_THUMB"
6090 "*
6091 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6092 INTVAL (operands[2]));
6093 return \"add\\t%0, %|pc\";
6094 "
6095 [(set_attr "length" "2")
6096 (set_attr "type" "alu_sreg")]
6097 )
6098
6099 (define_insn "pic_add_dot_plus_eight"
6100 [(set (match_operand:SI 0 "register_operand" "=r")
6101 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6102 (const_int 8)
6103 (match_operand 2 "" "")]
6104 UNSPEC_PIC_BASE))]
6105 "TARGET_ARM"
6106 "*
6107 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6108 INTVAL (operands[2]));
6109 return \"add%?\\t%0, %|pc, %1\";
6110 "
6111 [(set_attr "predicable" "yes")
6112 (set_attr "type" "alu_sreg")]
6113 )
6114
6115 (define_insn "tls_load_dot_plus_eight"
6116 [(set (match_operand:SI 0 "register_operand" "=r")
6117 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6118 (const_int 8)
6119 (match_operand 2 "" "")]
6120 UNSPEC_PIC_BASE)))]
6121 "TARGET_ARM"
6122 "*
6123 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6124 INTVAL (operands[2]));
6125 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6126 "
6127 [(set_attr "predicable" "yes")
6128 (set_attr "type" "load_4")]
6129 )
6130
6131 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6132 ;; followed by a load. These sequences can be crunched down to
6133 ;; tls_load_dot_plus_eight by a peephole.
6134
6135 (define_peephole2
6136 [(set (match_operand:SI 0 "register_operand" "")
6137 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6138 (const_int 8)
6139 (match_operand 1 "" "")]
6140 UNSPEC_PIC_BASE))
6141 (set (match_operand:SI 2 "arm_general_register_operand" "")
6142 (mem:SI (match_dup 0)))]
6143 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6144 [(set (match_dup 2)
6145 (mem:SI (unspec:SI [(match_dup 3)
6146 (const_int 8)
6147 (match_dup 1)]
6148 UNSPEC_PIC_BASE)))]
6149 ""
6150 )
6151
6152 (define_insn "pic_offset_arm"
6153 [(set (match_operand:SI 0 "register_operand" "=r")
6154 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6155 (unspec:SI [(match_operand:SI 2 "" "X")]
6156 UNSPEC_PIC_OFFSET))))]
6157 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6158 "ldr%?\\t%0, [%1,%2]"
6159 [(set_attr "type" "load_4")]
6160 )
6161
6162 (define_expand "builtin_setjmp_receiver"
6163 [(label_ref (match_operand 0 "" ""))]
6164 "flag_pic"
6165 "
6166 {
6167 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6168 register. */
6169 if (arm_pic_register != INVALID_REGNUM)
6170 arm_load_pic_register (1UL << 3, NULL_RTX);
6171 DONE;
6172 }")
6173
6174 ;; If copying one reg to another we can set the condition codes according to
6175 ;; its value. Such a move is common after a return from subroutine and the
6176 ;; result is being tested against zero.
6177
6178 (define_insn "*movsi_compare0"
6179 [(set (reg:CC CC_REGNUM)
6180 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6181 (const_int 0)))
6182 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6183 (match_dup 1))]
6184 "TARGET_32BIT"
6185 "@
6186 cmp%?\\t%0, #0
6187 subs%?\\t%0, %1, #0"
6188 [(set_attr "conds" "set")
6189 (set_attr "type" "alus_imm,alus_imm")]
6190 )
6191
6192 ;; Subroutine to store a half word from a register into memory.
6193 ;; Operand 0 is the source register (HImode)
6194 ;; Operand 1 is the destination address in a register (SImode)
6195
6196 ;; In both this routine and the next, we must be careful not to spill
6197 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6198 ;; can generate unrecognizable rtl.
6199
6200 (define_expand "storehi"
6201 [;; store the low byte
6202 (set (match_operand 1 "" "") (match_dup 3))
6203 ;; extract the high byte
6204 (set (match_dup 2)
6205 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6206 ;; store the high byte
6207 (set (match_dup 4) (match_dup 5))]
6208 "TARGET_ARM"
6209 "
6210 {
6211 rtx op1 = operands[1];
6212 rtx addr = XEXP (op1, 0);
6213 enum rtx_code code = GET_CODE (addr);
6214
6215 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6216 || code == MINUS)
6217 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6218
6219 operands[4] = adjust_address (op1, QImode, 1);
6220 operands[1] = adjust_address (operands[1], QImode, 0);
6221 operands[3] = gen_lowpart (QImode, operands[0]);
6222 operands[0] = gen_lowpart (SImode, operands[0]);
6223 operands[2] = gen_reg_rtx (SImode);
6224 operands[5] = gen_lowpart (QImode, operands[2]);
6225 }"
6226 )
6227
6228 (define_expand "storehi_bigend"
6229 [(set (match_dup 4) (match_dup 3))
6230 (set (match_dup 2)
6231 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6232 (set (match_operand 1 "" "") (match_dup 5))]
6233 "TARGET_ARM"
6234 "
6235 {
6236 rtx op1 = operands[1];
6237 rtx addr = XEXP (op1, 0);
6238 enum rtx_code code = GET_CODE (addr);
6239
6240 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6241 || code == MINUS)
6242 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6243
6244 operands[4] = adjust_address (op1, QImode, 1);
6245 operands[1] = adjust_address (operands[1], QImode, 0);
6246 operands[3] = gen_lowpart (QImode, operands[0]);
6247 operands[0] = gen_lowpart (SImode, operands[0]);
6248 operands[2] = gen_reg_rtx (SImode);
6249 operands[5] = gen_lowpart (QImode, operands[2]);
6250 }"
6251 )
6252
6253 ;; Subroutine to store a half word integer constant into memory.
6254 (define_expand "storeinthi"
6255 [(set (match_operand 0 "" "")
6256 (match_operand 1 "" ""))
6257 (set (match_dup 3) (match_dup 2))]
6258 "TARGET_ARM"
6259 "
6260 {
6261 HOST_WIDE_INT value = INTVAL (operands[1]);
6262 rtx addr = XEXP (operands[0], 0);
6263 rtx op0 = operands[0];
6264 enum rtx_code code = GET_CODE (addr);
6265
6266 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6267 || code == MINUS)
6268 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6269
6270 operands[1] = gen_reg_rtx (SImode);
6271 if (BYTES_BIG_ENDIAN)
6272 {
6273 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6274 if ((value & 255) == ((value >> 8) & 255))
6275 operands[2] = operands[1];
6276 else
6277 {
6278 operands[2] = gen_reg_rtx (SImode);
6279 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6280 }
6281 }
6282 else
6283 {
6284 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6285 if ((value & 255) == ((value >> 8) & 255))
6286 operands[2] = operands[1];
6287 else
6288 {
6289 operands[2] = gen_reg_rtx (SImode);
6290 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6291 }
6292 }
6293
6294 operands[3] = adjust_address (op0, QImode, 1);
6295 operands[0] = adjust_address (operands[0], QImode, 0);
6296 operands[2] = gen_lowpart (QImode, operands[2]);
6297 operands[1] = gen_lowpart (QImode, operands[1]);
6298 }"
6299 )
6300
6301 (define_expand "storehi_single_op"
6302 [(set (match_operand:HI 0 "memory_operand")
6303 (match_operand:HI 1 "general_operand"))]
6304 "TARGET_32BIT && arm_arch4"
6305 "
6306 if (!s_register_operand (operands[1], HImode))
6307 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6308 "
6309 )
6310
6311 (define_expand "movhi"
6312 [(set (match_operand:HI 0 "general_operand")
6313 (match_operand:HI 1 "general_operand"))]
6314 "TARGET_EITHER"
6315 "
6316 gcc_checking_assert (aligned_operand (operands[0], HImode));
6317 gcc_checking_assert (aligned_operand (operands[1], HImode));
6318 if (TARGET_ARM)
6319 {
6320 if (can_create_pseudo_p ())
6321 {
6322 if (MEM_P (operands[0]))
6323 {
6324 if (arm_arch4)
6325 {
6326 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6327 DONE;
6328 }
6329 if (CONST_INT_P (operands[1]))
6330 emit_insn (gen_storeinthi (operands[0], operands[1]));
6331 else
6332 {
6333 if (MEM_P (operands[1]))
6334 operands[1] = force_reg (HImode, operands[1]);
6335 if (BYTES_BIG_ENDIAN)
6336 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6337 else
6338 emit_insn (gen_storehi (operands[1], operands[0]));
6339 }
6340 DONE;
6341 }
6342 /* Sign extend a constant, and keep it in an SImode reg. */
6343 else if (CONST_INT_P (operands[1]))
6344 {
6345 rtx reg = gen_reg_rtx (SImode);
6346 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6347
6348 /* If the constant is already valid, leave it alone. */
6349 if (!const_ok_for_arm (val))
6350 {
6351 /* If setting all the top bits will make the constant
6352 loadable in a single instruction, then set them.
6353 Otherwise, sign extend the number. */
6354
6355 if (const_ok_for_arm (~(val | ~0xffff)))
6356 val |= ~0xffff;
6357 else if (val & 0x8000)
6358 val |= ~0xffff;
6359 }
6360
6361 emit_insn (gen_movsi (reg, GEN_INT (val)));
6362 operands[1] = gen_lowpart (HImode, reg);
6363 }
6364 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6365 && MEM_P (operands[1]))
6366 {
6367 rtx reg = gen_reg_rtx (SImode);
6368
6369 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6370 operands[1] = gen_lowpart (HImode, reg);
6371 }
6372 else if (!arm_arch4)
6373 {
6374 if (MEM_P (operands[1]))
6375 {
6376 rtx base;
6377 rtx offset = const0_rtx;
6378 rtx reg = gen_reg_rtx (SImode);
6379
6380 if ((REG_P (base = XEXP (operands[1], 0))
6381 || (GET_CODE (base) == PLUS
6382 && (CONST_INT_P (offset = XEXP (base, 1)))
6383 && ((INTVAL(offset) & 1) != 1)
6384 && REG_P (base = XEXP (base, 0))))
6385 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6386 {
6387 rtx new_rtx;
6388
6389 new_rtx = widen_memory_access (operands[1], SImode,
6390 ((INTVAL (offset) & ~3)
6391 - INTVAL (offset)));
6392 emit_insn (gen_movsi (reg, new_rtx));
6393 if (((INTVAL (offset) & 2) != 0)
6394 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6395 {
6396 rtx reg2 = gen_reg_rtx (SImode);
6397
6398 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6399 reg = reg2;
6400 }
6401 }
6402 else
6403 emit_insn (gen_movhi_bytes (reg, operands[1]));
6404
6405 operands[1] = gen_lowpart (HImode, reg);
6406 }
6407 }
6408 }
6409 /* Handle loading a large integer during reload. */
6410 else if (CONST_INT_P (operands[1])
6411 && !const_ok_for_arm (INTVAL (operands[1]))
6412 && !const_ok_for_arm (~INTVAL (operands[1])))
6413 {
6414 /* Writing a constant to memory needs a scratch, which should
6415 be handled with SECONDARY_RELOADs. */
6416 gcc_assert (REG_P (operands[0]));
6417
6418 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6419 emit_insn (gen_movsi (operands[0], operands[1]));
6420 DONE;
6421 }
6422 }
6423 else if (TARGET_THUMB2)
6424 {
6425 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6426 if (can_create_pseudo_p ())
6427 {
6428 if (!REG_P (operands[0]))
6429 operands[1] = force_reg (HImode, operands[1]);
6430 /* Zero extend a constant, and keep it in an SImode reg. */
6431 else if (CONST_INT_P (operands[1]))
6432 {
6433 rtx reg = gen_reg_rtx (SImode);
6434 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6435
6436 emit_insn (gen_movsi (reg, GEN_INT (val)));
6437 operands[1] = gen_lowpart (HImode, reg);
6438 }
6439 }
6440 }
6441 else /* TARGET_THUMB1 */
6442 {
6443 if (can_create_pseudo_p ())
6444 {
6445 if (CONST_INT_P (operands[1]))
6446 {
6447 rtx reg = gen_reg_rtx (SImode);
6448
6449 emit_insn (gen_movsi (reg, operands[1]));
6450 operands[1] = gen_lowpart (HImode, reg);
6451 }
6452
6453 /* ??? We shouldn't really get invalid addresses here, but this can
6454 happen if we are passed a SP (never OK for HImode/QImode) or
6455 virtual register (also rejected as illegitimate for HImode/QImode)
6456 relative address. */
6457 /* ??? This should perhaps be fixed elsewhere, for instance, in
6458 fixup_stack_1, by checking for other kinds of invalid addresses,
6459 e.g. a bare reference to a virtual register. This may confuse the
6460 alpha though, which must handle this case differently. */
6461 if (MEM_P (operands[0])
6462 && !memory_address_p (GET_MODE (operands[0]),
6463 XEXP (operands[0], 0)))
6464 operands[0]
6465 = replace_equiv_address (operands[0],
6466 copy_to_reg (XEXP (operands[0], 0)));
6467
6468 if (MEM_P (operands[1])
6469 && !memory_address_p (GET_MODE (operands[1]),
6470 XEXP (operands[1], 0)))
6471 operands[1]
6472 = replace_equiv_address (operands[1],
6473 copy_to_reg (XEXP (operands[1], 0)));
6474
6475 if (MEM_P (operands[1]) && optimize > 0)
6476 {
6477 rtx reg = gen_reg_rtx (SImode);
6478
6479 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6480 operands[1] = gen_lowpart (HImode, reg);
6481 }
6482
6483 if (MEM_P (operands[0]))
6484 operands[1] = force_reg (HImode, operands[1]);
6485 }
6486 else if (CONST_INT_P (operands[1])
6487 && !satisfies_constraint_I (operands[1]))
6488 {
6489 /* Handle loading a large integer during reload. */
6490
6491 /* Writing a constant to memory needs a scratch, which should
6492 be handled with SECONDARY_RELOADs. */
6493 gcc_assert (REG_P (operands[0]));
6494
6495 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6496 emit_insn (gen_movsi (operands[0], operands[1]));
6497 DONE;
6498 }
6499 }
6500 "
6501 )
6502
6503 (define_expand "movhi_bytes"
6504 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6505 (set (match_dup 3)
6506 (zero_extend:SI (match_dup 6)))
6507 (set (match_operand:SI 0 "" "")
6508 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6509 "TARGET_ARM"
6510 "
6511 {
6512 rtx mem1, mem2;
6513 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6514
6515 mem1 = change_address (operands[1], QImode, addr);
6516 mem2 = change_address (operands[1], QImode,
6517 plus_constant (Pmode, addr, 1));
6518 operands[0] = gen_lowpart (SImode, operands[0]);
6519 operands[1] = mem1;
6520 operands[2] = gen_reg_rtx (SImode);
6521 operands[3] = gen_reg_rtx (SImode);
6522 operands[6] = mem2;
6523
6524 if (BYTES_BIG_ENDIAN)
6525 {
6526 operands[4] = operands[2];
6527 operands[5] = operands[3];
6528 }
6529 else
6530 {
6531 operands[4] = operands[3];
6532 operands[5] = operands[2];
6533 }
6534 }"
6535 )
6536
6537 (define_expand "movhi_bigend"
6538 [(set (match_dup 2)
6539 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
6540 (const_int 16)))
6541 (set (match_dup 3)
6542 (ashiftrt:SI (match_dup 2) (const_int 16)))
6543 (set (match_operand:HI 0 "s_register_operand")
6544 (match_dup 4))]
6545 "TARGET_ARM"
6546 "
6547 operands[2] = gen_reg_rtx (SImode);
6548 operands[3] = gen_reg_rtx (SImode);
6549 operands[4] = gen_lowpart (HImode, operands[3]);
6550 "
6551 )
6552
6553 ;; Pattern to recognize insn generated default case above
6554 (define_insn "*movhi_insn_arch4"
6555 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6556 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
6557 "TARGET_ARM
6558 && arm_arch4 && !TARGET_HARD_FLOAT
6559 && (register_operand (operands[0], HImode)
6560 || register_operand (operands[1], HImode))"
6561 "@
6562 mov%?\\t%0, %1\\t%@ movhi
6563 mvn%?\\t%0, #%B1\\t%@ movhi
6564 movw%?\\t%0, %L1\\t%@ movhi
6565 strh%?\\t%1, %0\\t%@ movhi
6566 ldrh%?\\t%0, %1\\t%@ movhi"
6567 [(set_attr "predicable" "yes")
6568 (set_attr "pool_range" "*,*,*,*,256")
6569 (set_attr "neg_pool_range" "*,*,*,*,244")
6570 (set_attr "arch" "*,*,v6t2,*,*")
6571 (set_attr_alternative "type"
6572 [(if_then_else (match_operand 1 "const_int_operand" "")
6573 (const_string "mov_imm" )
6574 (const_string "mov_reg"))
6575 (const_string "mvn_imm")
6576 (const_string "mov_imm")
6577 (const_string "store_4")
6578 (const_string "load_4")])]
6579 )
6580
6581 (define_insn "*movhi_bytes"
6582 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6583 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
6584 "TARGET_ARM && !TARGET_HARD_FLOAT"
6585 "@
6586 mov%?\\t%0, %1\\t%@ movhi
6587 mov%?\\t%0, %1\\t%@ movhi
6588 mvn%?\\t%0, #%B1\\t%@ movhi"
6589 [(set_attr "predicable" "yes")
6590 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
6591 )
6592
6593 ;; We use a DImode scratch because we may occasionally need an additional
6594 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6595 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6596 ;; The reload_in<m> and reload_out<m> patterns require special constraints
6597 ;; to be correctly handled in default_secondary_reload function.
6598 (define_expand "reload_outhi"
6599 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6600 (match_operand:HI 1 "s_register_operand" "r")
6601 (match_operand:DI 2 "s_register_operand" "=&l")])]
6602 "TARGET_EITHER"
6603 "if (TARGET_ARM)
6604 arm_reload_out_hi (operands);
6605 else
6606 thumb_reload_out_hi (operands);
6607 DONE;
6608 "
6609 )
6610
6611 (define_expand "reload_inhi"
6612 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6613 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6614 (match_operand:DI 2 "s_register_operand" "=&r")])]
6615 "TARGET_EITHER"
6616 "
6617 if (TARGET_ARM)
6618 arm_reload_in_hi (operands);
6619 else
6620 thumb_reload_out_hi (operands);
6621 DONE;
6622 ")
6623
6624 (define_expand "movqi"
6625 [(set (match_operand:QI 0 "general_operand")
6626 (match_operand:QI 1 "general_operand"))]
6627 "TARGET_EITHER"
6628 "
6629 /* Everything except mem = const or mem = mem can be done easily */
6630
6631 if (can_create_pseudo_p ())
6632 {
6633 if (CONST_INT_P (operands[1]))
6634 {
6635 rtx reg = gen_reg_rtx (SImode);
6636
6637 /* For thumb we want an unsigned immediate, then we are more likely
6638 to be able to use a movs insn. */
6639 if (TARGET_THUMB)
6640 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6641
6642 emit_insn (gen_movsi (reg, operands[1]));
6643 operands[1] = gen_lowpart (QImode, reg);
6644 }
6645
6646 if (TARGET_THUMB)
6647 {
6648 /* ??? We shouldn't really get invalid addresses here, but this can
6649 happen if we are passed a SP (never OK for HImode/QImode) or
6650 virtual register (also rejected as illegitimate for HImode/QImode)
6651 relative address. */
6652 /* ??? This should perhaps be fixed elsewhere, for instance, in
6653 fixup_stack_1, by checking for other kinds of invalid addresses,
6654 e.g. a bare reference to a virtual register. This may confuse the
6655 alpha though, which must handle this case differently. */
6656 if (MEM_P (operands[0])
6657 && !memory_address_p (GET_MODE (operands[0]),
6658 XEXP (operands[0], 0)))
6659 operands[0]
6660 = replace_equiv_address (operands[0],
6661 copy_to_reg (XEXP (operands[0], 0)));
6662 if (MEM_P (operands[1])
6663 && !memory_address_p (GET_MODE (operands[1]),
6664 XEXP (operands[1], 0)))
6665 operands[1]
6666 = replace_equiv_address (operands[1],
6667 copy_to_reg (XEXP (operands[1], 0)));
6668 }
6669
6670 if (MEM_P (operands[1]) && optimize > 0)
6671 {
6672 rtx reg = gen_reg_rtx (SImode);
6673
6674 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6675 operands[1] = gen_lowpart (QImode, reg);
6676 }
6677
6678 if (MEM_P (operands[0]))
6679 operands[1] = force_reg (QImode, operands[1]);
6680 }
6681 else if (TARGET_THUMB
6682 && CONST_INT_P (operands[1])
6683 && !satisfies_constraint_I (operands[1]))
6684 {
6685 /* Handle loading a large integer during reload. */
6686
6687 /* Writing a constant to memory needs a scratch, which should
6688 be handled with SECONDARY_RELOADs. */
6689 gcc_assert (REG_P (operands[0]));
6690
6691 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6692 emit_insn (gen_movsi (operands[0], operands[1]));
6693 DONE;
6694 }
6695 "
6696 )
6697
6698 (define_insn "*arm_movqi_insn"
6699 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
6700 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
6701 "TARGET_32BIT
6702 && ( register_operand (operands[0], QImode)
6703 || register_operand (operands[1], QImode))"
6704 "@
6705 mov%?\\t%0, %1
6706 mov%?\\t%0, %1
6707 mov%?\\t%0, %1
6708 mov%?\\t%0, %1
6709 mvn%?\\t%0, #%B1
6710 ldrb%?\\t%0, %1
6711 strb%?\\t%1, %0
6712 ldrb%?\\t%0, %1
6713 strb%?\\t%1, %0"
6714 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
6715 (set_attr "predicable" "yes")
6716 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
6717 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
6718 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
6719 )
6720
6721 ;; HFmode moves
6722 (define_expand "movhf"
6723 [(set (match_operand:HF 0 "general_operand")
6724 (match_operand:HF 1 "general_operand"))]
6725 "TARGET_EITHER"
6726 "
6727 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6728 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6729 if (TARGET_32BIT)
6730 {
6731 if (MEM_P (operands[0]))
6732 operands[1] = force_reg (HFmode, operands[1]);
6733 }
6734 else /* TARGET_THUMB1 */
6735 {
6736 if (can_create_pseudo_p ())
6737 {
6738 if (!REG_P (operands[0]))
6739 operands[1] = force_reg (HFmode, operands[1]);
6740 }
6741 }
6742 "
6743 )
6744
6745 (define_insn "*arm32_movhf"
6746 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6747 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6748 "TARGET_32BIT && !TARGET_HARD_FLOAT
6749 && ( s_register_operand (operands[0], HFmode)
6750 || s_register_operand (operands[1], HFmode))"
6751 "*
6752 switch (which_alternative)
6753 {
6754 case 0: /* ARM register from memory */
6755 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6756 case 1: /* memory from ARM register */
6757 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6758 case 2: /* ARM register from ARM register */
6759 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6760 case 3: /* ARM register from constant */
6761 {
6762 long bits;
6763 rtx ops[4];
6764
6765 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6766 HFmode);
6767 ops[0] = operands[0];
6768 ops[1] = GEN_INT (bits);
6769 ops[2] = GEN_INT (bits & 0xff00);
6770 ops[3] = GEN_INT (bits & 0x00ff);
6771
6772 if (arm_arch_thumb2)
6773 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6774 else
6775 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6776 return \"\";
6777 }
6778 default:
6779 gcc_unreachable ();
6780 }
6781 "
6782 [(set_attr "conds" "unconditional")
6783 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6784 (set_attr "length" "4,4,4,8")
6785 (set_attr "predicable" "yes")]
6786 )
6787
6788 (define_expand "movsf"
6789 [(set (match_operand:SF 0 "general_operand")
6790 (match_operand:SF 1 "general_operand"))]
6791 "TARGET_EITHER"
6792 "
6793 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6794 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6795 if (TARGET_32BIT)
6796 {
6797 if (MEM_P (operands[0]))
6798 operands[1] = force_reg (SFmode, operands[1]);
6799 }
6800 else /* TARGET_THUMB1 */
6801 {
6802 if (can_create_pseudo_p ())
6803 {
6804 if (!REG_P (operands[0]))
6805 operands[1] = force_reg (SFmode, operands[1]);
6806 }
6807 }
6808
6809 /* Cannot load it directly, generate a load with clobber so that it can be
6810 loaded via GPR with MOV / MOVT. */
6811 if (arm_disable_literal_pool
6812 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6813 && CONST_DOUBLE_P (operands[1])
6814 && TARGET_HARD_FLOAT
6815 && !vfp3_const_double_rtx (operands[1]))
6816 {
6817 rtx clobreg = gen_reg_rtx (SFmode);
6818 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6819 clobreg));
6820 DONE;
6821 }
6822 "
6823 )
6824
6825 ;; Transform a floating-point move of a constant into a core register into
6826 ;; an SImode operation.
6827 (define_split
6828 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6829 (match_operand:SF 1 "immediate_operand" ""))]
6830 "TARGET_EITHER
6831 && reload_completed
6832 && CONST_DOUBLE_P (operands[1])"
6833 [(set (match_dup 2) (match_dup 3))]
6834 "
6835 operands[2] = gen_lowpart (SImode, operands[0]);
6836 operands[3] = gen_lowpart (SImode, operands[1]);
6837 if (operands[2] == 0 || operands[3] == 0)
6838 FAIL;
6839 "
6840 )
6841
6842 (define_insn "*arm_movsf_soft_insn"
6843 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6844 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6845 "TARGET_32BIT
6846 && TARGET_SOFT_FLOAT
6847 && (!MEM_P (operands[0])
6848 || register_operand (operands[1], SFmode))"
6849 {
6850 switch (which_alternative)
6851 {
6852 case 0: return \"mov%?\\t%0, %1\";
6853 case 1:
6854 /* Cannot load it directly, split to load it via MOV / MOVT. */
6855 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6856 return \"#\";
6857 return \"ldr%?\\t%0, %1\\t%@ float\";
6858 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6859 default: gcc_unreachable ();
6860 }
6861 }
6862 [(set_attr "predicable" "yes")
6863 (set_attr "type" "mov_reg,load_4,store_4")
6864 (set_attr "arm_pool_range" "*,4096,*")
6865 (set_attr "thumb2_pool_range" "*,4094,*")
6866 (set_attr "arm_neg_pool_range" "*,4084,*")
6867 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6868 )
6869
6870 ;; Splitter for the above.
6871 (define_split
6872 [(set (match_operand:SF 0 "s_register_operand")
6873 (match_operand:SF 1 "const_double_operand"))]
6874 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6875 [(const_int 0)]
6876 {
6877 long buf;
6878 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6879 rtx cst = gen_int_mode (buf, SImode);
6880 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6881 DONE;
6882 }
6883 )
6884
6885 (define_expand "movdf"
6886 [(set (match_operand:DF 0 "general_operand")
6887 (match_operand:DF 1 "general_operand"))]
6888 "TARGET_EITHER"
6889 "
6890 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6891 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6892 if (TARGET_32BIT)
6893 {
6894 if (MEM_P (operands[0]))
6895 operands[1] = force_reg (DFmode, operands[1]);
6896 }
6897 else /* TARGET_THUMB */
6898 {
6899 if (can_create_pseudo_p ())
6900 {
6901 if (!REG_P (operands[0]))
6902 operands[1] = force_reg (DFmode, operands[1]);
6903 }
6904 }
6905
6906 /* Cannot load it directly, generate a load with clobber so that it can be
6907 loaded via GPR with MOV / MOVT. */
6908 if (arm_disable_literal_pool
6909 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6910 && CONSTANT_P (operands[1])
6911 && TARGET_HARD_FLOAT
6912 && !arm_const_double_rtx (operands[1])
6913 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6914 {
6915 rtx clobreg = gen_reg_rtx (DFmode);
6916 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6917 clobreg));
6918 DONE;
6919 }
6920 "
6921 )
6922
6923 ;; Reloading a df mode value stored in integer regs to memory can require a
6924 ;; scratch reg.
6925 ;; Another reload_out<m> pattern that requires special constraints.
6926 (define_expand "reload_outdf"
6927 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6928 (match_operand:DF 1 "s_register_operand" "r")
6929 (match_operand:SI 2 "s_register_operand" "=&r")]
6930 "TARGET_THUMB2"
6931 "
6932 {
6933 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6934
6935 if (code == REG)
6936 operands[2] = XEXP (operands[0], 0);
6937 else if (code == POST_INC || code == PRE_DEC)
6938 {
6939 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6940 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6941 emit_insn (gen_movdi (operands[0], operands[1]));
6942 DONE;
6943 }
6944 else if (code == PRE_INC)
6945 {
6946 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6947
6948 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6949 operands[2] = reg;
6950 }
6951 else if (code == POST_DEC)
6952 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6953 else
6954 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6955 XEXP (XEXP (operands[0], 0), 1)));
6956
6957 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6958 operands[1]));
6959
6960 if (code == POST_DEC)
6961 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6962
6963 DONE;
6964 }"
6965 )
6966
6967 (define_insn "*movdf_soft_insn"
6968 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6969 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6970 "TARGET_32BIT && TARGET_SOFT_FLOAT
6971 && ( register_operand (operands[0], DFmode)
6972 || register_operand (operands[1], DFmode))"
6973 "*
6974 switch (which_alternative)
6975 {
6976 case 0:
6977 case 1:
6978 case 2:
6979 return \"#\";
6980 case 3:
6981 /* Cannot load it directly, split to load it via MOV / MOVT. */
6982 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6983 return \"#\";
6984 /* Fall through. */
6985 default:
6986 return output_move_double (operands, true, NULL);
6987 }
6988 "
6989 [(set_attr "length" "8,12,16,8,8")
6990 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6991 (set_attr "arm_pool_range" "*,*,*,1020,*")
6992 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6993 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6994 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6995 )
6996
6997 ;; Splitter for the above.
6998 (define_split
6999 [(set (match_operand:DF 0 "s_register_operand")
7000 (match_operand:DF 1 "const_double_operand"))]
7001 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7002 [(const_int 0)]
7003 {
7004 long buf[2];
7005 int order = BYTES_BIG_ENDIAN ? 1 : 0;
7006 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7007 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7008 ival |= (zext_hwi (buf[1 - order], 32) << 32);
7009 rtx cst = gen_int_mode (ival, DImode);
7010 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7011 DONE;
7012 }
7013 )
7014 \f
7015
7016 ;; load- and store-multiple insns
7017 ;; The arm can load/store any set of registers, provided that they are in
7018 ;; ascending order, but these expanders assume a contiguous set.
7019
7020 (define_expand "load_multiple"
7021 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7022 (match_operand:SI 1 "" ""))
7023 (use (match_operand:SI 2 "" ""))])]
7024 "TARGET_32BIT"
7025 {
7026 HOST_WIDE_INT offset = 0;
7027
7028 /* Support only fixed point registers. */
7029 if (!CONST_INT_P (operands[2])
7030 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7031 || INTVAL (operands[2]) < 2
7032 || !MEM_P (operands[1])
7033 || !REG_P (operands[0])
7034 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7035 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7036 FAIL;
7037
7038 operands[3]
7039 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7040 INTVAL (operands[2]),
7041 force_reg (SImode, XEXP (operands[1], 0)),
7042 FALSE, operands[1], &offset);
7043 })
7044
7045 (define_expand "store_multiple"
7046 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7047 (match_operand:SI 1 "" ""))
7048 (use (match_operand:SI 2 "" ""))])]
7049 "TARGET_32BIT"
7050 {
7051 HOST_WIDE_INT offset = 0;
7052
7053 /* Support only fixed point registers. */
7054 if (!CONST_INT_P (operands[2])
7055 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7056 || INTVAL (operands[2]) < 2
7057 || !REG_P (operands[1])
7058 || !MEM_P (operands[0])
7059 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7060 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7061 FAIL;
7062
7063 operands[3]
7064 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7065 INTVAL (operands[2]),
7066 force_reg (SImode, XEXP (operands[0], 0)),
7067 FALSE, operands[0], &offset);
7068 })
7069
7070
7071 (define_expand "setmemsi"
7072 [(match_operand:BLK 0 "general_operand")
7073 (match_operand:SI 1 "const_int_operand")
7074 (match_operand:SI 2 "const_int_operand")
7075 (match_operand:SI 3 "const_int_operand")]
7076 "TARGET_32BIT"
7077 {
7078 if (arm_gen_setmem (operands))
7079 DONE;
7080
7081 FAIL;
7082 })
7083
7084
7085 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7086 ;; We could let this apply for blocks of less than this, but it clobbers so
7087 ;; many registers that there is then probably a better way.
7088
7089 (define_expand "cpymemqi"
7090 [(match_operand:BLK 0 "general_operand")
7091 (match_operand:BLK 1 "general_operand")
7092 (match_operand:SI 2 "const_int_operand")
7093 (match_operand:SI 3 "const_int_operand")]
7094 ""
7095 "
7096 if (TARGET_32BIT)
7097 {
7098 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7099 && !optimize_function_for_size_p (cfun))
7100 {
7101 if (gen_cpymem_ldrd_strd (operands))
7102 DONE;
7103 FAIL;
7104 }
7105
7106 if (arm_gen_cpymemqi (operands))
7107 DONE;
7108 FAIL;
7109 }
7110 else /* TARGET_THUMB1 */
7111 {
7112 if ( INTVAL (operands[3]) != 4
7113 || INTVAL (operands[2]) > 48)
7114 FAIL;
7115
7116 thumb_expand_cpymemqi (operands);
7117 DONE;
7118 }
7119 "
7120 )
7121 \f
7122
7123 ;; Compare & branch insns
7124 ;; The range calculations are based as follows:
7125 ;; For forward branches, the address calculation returns the address of
7126 ;; the next instruction. This is 2 beyond the branch instruction.
7127 ;; For backward branches, the address calculation returns the address of
7128 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7129 ;; instruction for the shortest sequence, and 4 before the branch instruction
7130 ;; if we have to jump around an unconditional branch.
7131 ;; To the basic branch range the PC offset must be added (this is +4).
7132 ;; So for forward branches we have
7133 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7134 ;; And for backward branches we have
7135 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7136 ;;
7137 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7138 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7139
7140 (define_expand "cbranchsi4"
7141 [(set (pc) (if_then_else
7142 (match_operator 0 "expandable_comparison_operator"
7143 [(match_operand:SI 1 "s_register_operand")
7144 (match_operand:SI 2 "nonmemory_operand")])
7145 (label_ref (match_operand 3 "" ""))
7146 (pc)))]
7147 "TARGET_EITHER"
7148 "
7149 if (!TARGET_THUMB1)
7150 {
7151 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7152 FAIL;
7153 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7154 operands[3]));
7155 DONE;
7156 }
7157 if (thumb1_cmpneg_operand (operands[2], SImode))
7158 {
7159 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7160 operands[3], operands[0]));
7161 DONE;
7162 }
7163 if (!thumb1_cmp_operand (operands[2], SImode))
7164 operands[2] = force_reg (SImode, operands[2]);
7165 ")
7166
7167 (define_expand "cbranchsf4"
7168 [(set (pc) (if_then_else
7169 (match_operator 0 "expandable_comparison_operator"
7170 [(match_operand:SF 1 "s_register_operand")
7171 (match_operand:SF 2 "vfp_compare_operand")])
7172 (label_ref (match_operand 3 "" ""))
7173 (pc)))]
7174 "TARGET_32BIT && TARGET_HARD_FLOAT"
7175 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7176 operands[3])); DONE;"
7177 )
7178
7179 (define_expand "cbranchdf4"
7180 [(set (pc) (if_then_else
7181 (match_operator 0 "expandable_comparison_operator"
7182 [(match_operand:DF 1 "s_register_operand")
7183 (match_operand:DF 2 "vfp_compare_operand")])
7184 (label_ref (match_operand 3 "" ""))
7185 (pc)))]
7186 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7187 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7188 operands[3])); DONE;"
7189 )
7190
7191 (define_expand "cbranchdi4"
7192 [(set (pc) (if_then_else
7193 (match_operator 0 "expandable_comparison_operator"
7194 [(match_operand:DI 1 "s_register_operand")
7195 (match_operand:DI 2 "reg_or_int_operand")])
7196 (label_ref (match_operand 3 "" ""))
7197 (pc)))]
7198 "TARGET_32BIT"
7199 "{
7200 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7201 FAIL;
7202 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7203 operands[3]));
7204 DONE;
7205 }"
7206 )
7207
7208 ;; Comparison and test insns
7209
7210 (define_insn "*arm_cmpsi_insn"
7211 [(set (reg:CC CC_REGNUM)
7212 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7213 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7214 "TARGET_32BIT"
7215 "@
7216 cmp%?\\t%0, %1
7217 cmp%?\\t%0, %1
7218 cmp%?\\t%0, %1
7219 cmp%?\\t%0, %1
7220 cmn%?\\t%0, #%n1"
7221 [(set_attr "conds" "set")
7222 (set_attr "arch" "t2,t2,any,any,any")
7223 (set_attr "length" "2,2,4,4,4")
7224 (set_attr "predicable" "yes")
7225 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7226 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7227 )
7228
7229 (define_insn "*cmpsi_shiftsi"
7230 [(set (reg:CC CC_REGNUM)
7231 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
7232 (match_operator:SI 3 "shift_operator"
7233 [(match_operand:SI 1 "s_register_operand" "r,r,r")
7234 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
7235 "TARGET_32BIT"
7236 "cmp\\t%0, %1%S3"
7237 [(set_attr "conds" "set")
7238 (set_attr "shift" "1")
7239 (set_attr "arch" "32,a,a")
7240 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
7241
7242 (define_insn "*cmpsi_shiftsi_swp"
7243 [(set (reg:CC_SWP CC_REGNUM)
7244 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7245 [(match_operand:SI 1 "s_register_operand" "r,r,r")
7246 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
7247 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
7248 "TARGET_32BIT"
7249 "cmp%?\\t%0, %1%S3"
7250 [(set_attr "conds" "set")
7251 (set_attr "shift" "1")
7252 (set_attr "arch" "32,a,a")
7253 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
7254
7255 (define_insn "*arm_cmpsi_negshiftsi_si"
7256 [(set (reg:CC_Z CC_REGNUM)
7257 (compare:CC_Z
7258 (neg:SI (match_operator:SI 1 "shift_operator"
7259 [(match_operand:SI 2 "s_register_operand" "r")
7260 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7261 (match_operand:SI 0 "s_register_operand" "r")))]
7262 "TARGET_ARM"
7263 "cmn%?\\t%0, %2%S1"
7264 [(set_attr "conds" "set")
7265 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7266 (const_string "alus_shift_imm")
7267 (const_string "alus_shift_reg")))
7268 (set_attr "predicable" "yes")]
7269 )
7270
7271 ; This insn allows redundant compares to be removed by cse, nothing should
7272 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7273 ; is deleted later on. The match_dup will match the mode here, so that
7274 ; mode changes of the condition codes aren't lost by this even though we don't
7275 ; specify what they are.
7276
7277 (define_insn "*deleted_compare"
7278 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7279 "TARGET_32BIT"
7280 "\\t%@ deleted compare"
7281 [(set_attr "conds" "set")
7282 (set_attr "length" "0")
7283 (set_attr "type" "no_insn")]
7284 )
7285
7286 \f
7287 ;; Conditional branch insns
7288
7289 (define_expand "cbranch_cc"
7290 [(set (pc)
7291 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7292 (match_operand 2 "" "")])
7293 (label_ref (match_operand 3 "" ""))
7294 (pc)))]
7295 "TARGET_32BIT"
7296 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7297 operands[1], operands[2], NULL_RTX);
7298 operands[2] = const0_rtx;"
7299 )
7300
7301 ;;
7302 ;; Patterns to match conditional branch insns.
7303 ;;
7304
7305 (define_insn "arm_cond_branch"
7306 [(set (pc)
7307 (if_then_else (match_operator 1 "arm_comparison_operator"
7308 [(match_operand 2 "cc_register" "") (const_int 0)])
7309 (label_ref (match_operand 0 "" ""))
7310 (pc)))]
7311 "TARGET_32BIT"
7312 "*
7313 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7314 {
7315 arm_ccfsm_state += 2;
7316 return \"\";
7317 }
7318 return \"b%d1\\t%l0\";
7319 "
7320 [(set_attr "conds" "use")
7321 (set_attr "type" "branch")
7322 (set (attr "length")
7323 (if_then_else
7324 (and (match_test "TARGET_THUMB2")
7325 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7326 (le (minus (match_dup 0) (pc)) (const_int 256))))
7327 (const_int 2)
7328 (const_int 4)))]
7329 )
7330
7331 (define_insn "*arm_cond_branch_reversed"
7332 [(set (pc)
7333 (if_then_else (match_operator 1 "arm_comparison_operator"
7334 [(match_operand 2 "cc_register" "") (const_int 0)])
7335 (pc)
7336 (label_ref (match_operand 0 "" ""))))]
7337 "TARGET_32BIT"
7338 "*
7339 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7340 {
7341 arm_ccfsm_state += 2;
7342 return \"\";
7343 }
7344 return \"b%D1\\t%l0\";
7345 "
7346 [(set_attr "conds" "use")
7347 (set_attr "type" "branch")
7348 (set (attr "length")
7349 (if_then_else
7350 (and (match_test "TARGET_THUMB2")
7351 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7352 (le (minus (match_dup 0) (pc)) (const_int 256))))
7353 (const_int 2)
7354 (const_int 4)))]
7355 )
7356
7357 \f
7358
7359 ; scc insns
7360
7361 (define_expand "cstore_cc"
7362 [(set (match_operand:SI 0 "s_register_operand")
7363 (match_operator:SI 1 "" [(match_operand 2 "" "")
7364 (match_operand 3 "" "")]))]
7365 "TARGET_32BIT"
7366 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7367 operands[2], operands[3], NULL_RTX);
7368 operands[3] = const0_rtx;"
7369 )
7370
7371 (define_insn_and_split "*mov_scc"
7372 [(set (match_operand:SI 0 "s_register_operand" "=r")
7373 (match_operator:SI 1 "arm_comparison_operator_mode"
7374 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7375 "TARGET_ARM"
7376 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7377 "TARGET_ARM"
7378 [(set (match_dup 0)
7379 (if_then_else:SI (match_dup 1)
7380 (const_int 1)
7381 (const_int 0)))]
7382 ""
7383 [(set_attr "conds" "use")
7384 (set_attr "length" "8")
7385 (set_attr "type" "multiple")]
7386 )
7387
7388 (define_insn "*negscc_borrow"
7389 [(set (match_operand:SI 0 "s_register_operand" "=r")
7390 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
7391 "TARGET_32BIT"
7392 "sbc\\t%0, %0, %0"
7393 [(set_attr "conds" "use")
7394 (set_attr "length" "4")
7395 (set_attr "type" "adc_reg")]
7396 )
7397
7398 (define_insn_and_split "*mov_negscc"
7399 [(set (match_operand:SI 0 "s_register_operand" "=r")
7400 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
7401 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7402 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
7403 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7404 "&& true"
7405 [(set (match_dup 0)
7406 (if_then_else:SI (match_dup 1)
7407 (match_dup 3)
7408 (const_int 0)))]
7409 {
7410 operands[3] = GEN_INT (~0);
7411 }
7412 [(set_attr "conds" "use")
7413 (set_attr "length" "8")
7414 (set_attr "type" "multiple")]
7415 )
7416
7417 (define_insn_and_split "*mov_notscc"
7418 [(set (match_operand:SI 0 "s_register_operand" "=r")
7419 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7420 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7421 "TARGET_ARM"
7422 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7423 "TARGET_ARM"
7424 [(set (match_dup 0)
7425 (if_then_else:SI (match_dup 1)
7426 (match_dup 3)
7427 (match_dup 4)))]
7428 {
7429 operands[3] = GEN_INT (~1);
7430 operands[4] = GEN_INT (~0);
7431 }
7432 [(set_attr "conds" "use")
7433 (set_attr "length" "8")
7434 (set_attr "type" "multiple")]
7435 )
7436
7437 (define_expand "cstoresi4"
7438 [(set (match_operand:SI 0 "s_register_operand")
7439 (match_operator:SI 1 "expandable_comparison_operator"
7440 [(match_operand:SI 2 "s_register_operand")
7441 (match_operand:SI 3 "reg_or_int_operand")]))]
7442 "TARGET_32BIT || TARGET_THUMB1"
7443 "{
7444 rtx op3, scratch, scratch2;
7445
7446 if (!TARGET_THUMB1)
7447 {
7448 if (!arm_add_operand (operands[3], SImode))
7449 operands[3] = force_reg (SImode, operands[3]);
7450 emit_insn (gen_cstore_cc (operands[0], operands[1],
7451 operands[2], operands[3]));
7452 DONE;
7453 }
7454
7455 if (operands[3] == const0_rtx)
7456 {
7457 switch (GET_CODE (operands[1]))
7458 {
7459 case EQ:
7460 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7461 break;
7462
7463 case NE:
7464 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7465 break;
7466
7467 case LE:
7468 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7469 NULL_RTX, 0, OPTAB_WIDEN);
7470 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7471 NULL_RTX, 0, OPTAB_WIDEN);
7472 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7473 operands[0], 1, OPTAB_WIDEN);
7474 break;
7475
7476 case GE:
7477 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7478 NULL_RTX, 1);
7479 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7480 NULL_RTX, 1, OPTAB_WIDEN);
7481 break;
7482
7483 case GT:
7484 scratch = expand_binop (SImode, ashr_optab, operands[2],
7485 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7486 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7487 NULL_RTX, 0, OPTAB_WIDEN);
7488 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7489 0, OPTAB_WIDEN);
7490 break;
7491
7492 /* LT is handled by generic code. No need for unsigned with 0. */
7493 default:
7494 FAIL;
7495 }
7496 DONE;
7497 }
7498
7499 switch (GET_CODE (operands[1]))
7500 {
7501 case EQ:
7502 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7503 NULL_RTX, 0, OPTAB_WIDEN);
7504 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7505 break;
7506
7507 case NE:
7508 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7509 NULL_RTX, 0, OPTAB_WIDEN);
7510 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7511 break;
7512
7513 case LE:
7514 op3 = force_reg (SImode, operands[3]);
7515
7516 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7517 NULL_RTX, 1, OPTAB_WIDEN);
7518 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7519 NULL_RTX, 0, OPTAB_WIDEN);
7520 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7521 op3, operands[2]));
7522 break;
7523
7524 case GE:
7525 op3 = operands[3];
7526 if (!thumb1_cmp_operand (op3, SImode))
7527 op3 = force_reg (SImode, op3);
7528 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7529 NULL_RTX, 0, OPTAB_WIDEN);
7530 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7531 NULL_RTX, 1, OPTAB_WIDEN);
7532 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7533 operands[2], op3));
7534 break;
7535
7536 case LEU:
7537 op3 = force_reg (SImode, operands[3]);
7538 scratch = force_reg (SImode, const0_rtx);
7539 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7540 op3, operands[2]));
7541 break;
7542
7543 case GEU:
7544 op3 = operands[3];
7545 if (!thumb1_cmp_operand (op3, SImode))
7546 op3 = force_reg (SImode, op3);
7547 scratch = force_reg (SImode, const0_rtx);
7548 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7549 operands[2], op3));
7550 break;
7551
7552 case LTU:
7553 op3 = operands[3];
7554 if (!thumb1_cmp_operand (op3, SImode))
7555 op3 = force_reg (SImode, op3);
7556 scratch = gen_reg_rtx (SImode);
7557 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7558 break;
7559
7560 case GTU:
7561 op3 = force_reg (SImode, operands[3]);
7562 scratch = gen_reg_rtx (SImode);
7563 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7564 break;
7565
7566 /* No good sequences for GT, LT. */
7567 default:
7568 FAIL;
7569 }
7570 DONE;
7571 }")
7572
7573 (define_expand "cstorehf4"
7574 [(set (match_operand:SI 0 "s_register_operand")
7575 (match_operator:SI 1 "expandable_comparison_operator"
7576 [(match_operand:HF 2 "s_register_operand")
7577 (match_operand:HF 3 "vfp_compare_operand")]))]
7578 "TARGET_VFP_FP16INST"
7579 {
7580 if (!arm_validize_comparison (&operands[1],
7581 &operands[2],
7582 &operands[3]))
7583 FAIL;
7584
7585 emit_insn (gen_cstore_cc (operands[0], operands[1],
7586 operands[2], operands[3]));
7587 DONE;
7588 }
7589 )
7590
7591 (define_expand "cstoresf4"
7592 [(set (match_operand:SI 0 "s_register_operand")
7593 (match_operator:SI 1 "expandable_comparison_operator"
7594 [(match_operand:SF 2 "s_register_operand")
7595 (match_operand:SF 3 "vfp_compare_operand")]))]
7596 "TARGET_32BIT && TARGET_HARD_FLOAT"
7597 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7598 operands[2], operands[3])); DONE;"
7599 )
7600
7601 (define_expand "cstoredf4"
7602 [(set (match_operand:SI 0 "s_register_operand")
7603 (match_operator:SI 1 "expandable_comparison_operator"
7604 [(match_operand:DF 2 "s_register_operand")
7605 (match_operand:DF 3 "vfp_compare_operand")]))]
7606 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7607 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7608 operands[2], operands[3])); DONE;"
7609 )
7610
7611 (define_expand "cstoredi4"
7612 [(set (match_operand:SI 0 "s_register_operand")
7613 (match_operator:SI 1 "expandable_comparison_operator"
7614 [(match_operand:DI 2 "s_register_operand")
7615 (match_operand:DI 3 "reg_or_int_operand")]))]
7616 "TARGET_32BIT"
7617 "{
7618 if (!arm_validize_comparison (&operands[1],
7619 &operands[2],
7620 &operands[3]))
7621 FAIL;
7622 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7623 operands[3]));
7624 DONE;
7625 }"
7626 )
7627
7628 \f
7629 ;; Conditional move insns
7630
7631 (define_expand "movsicc"
7632 [(set (match_operand:SI 0 "s_register_operand")
7633 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
7634 (match_operand:SI 2 "arm_not_operand")
7635 (match_operand:SI 3 "arm_not_operand")))]
7636 "TARGET_32BIT"
7637 "
7638 {
7639 enum rtx_code code;
7640 rtx ccreg;
7641
7642 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7643 &XEXP (operands[1], 1)))
7644 FAIL;
7645
7646 code = GET_CODE (operands[1]);
7647 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7648 XEXP (operands[1], 1), NULL_RTX);
7649 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7650 }"
7651 )
7652
7653 (define_expand "movhfcc"
7654 [(set (match_operand:HF 0 "s_register_operand")
7655 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
7656 (match_operand:HF 2 "s_register_operand")
7657 (match_operand:HF 3 "s_register_operand")))]
7658 "TARGET_VFP_FP16INST"
7659 "
7660 {
7661 enum rtx_code code = GET_CODE (operands[1]);
7662 rtx ccreg;
7663
7664 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7665 &XEXP (operands[1], 1)))
7666 FAIL;
7667
7668 code = GET_CODE (operands[1]);
7669 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7670 XEXP (operands[1], 1), NULL_RTX);
7671 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7672 }"
7673 )
7674
7675 (define_expand "movsfcc"
7676 [(set (match_operand:SF 0 "s_register_operand")
7677 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
7678 (match_operand:SF 2 "s_register_operand")
7679 (match_operand:SF 3 "s_register_operand")))]
7680 "TARGET_32BIT && TARGET_HARD_FLOAT"
7681 "
7682 {
7683 enum rtx_code code = GET_CODE (operands[1]);
7684 rtx ccreg;
7685
7686 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7687 &XEXP (operands[1], 1)))
7688 FAIL;
7689
7690 code = GET_CODE (operands[1]);
7691 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7692 XEXP (operands[1], 1), NULL_RTX);
7693 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7694 }"
7695 )
7696
7697 (define_expand "movdfcc"
7698 [(set (match_operand:DF 0 "s_register_operand")
7699 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7700 (match_operand:DF 2 "s_register_operand")
7701 (match_operand:DF 3 "s_register_operand")))]
7702 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7703 "
7704 {
7705 enum rtx_code code = GET_CODE (operands[1]);
7706 rtx ccreg;
7707
7708 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7709 &XEXP (operands[1], 1)))
7710 FAIL;
7711 code = GET_CODE (operands[1]);
7712 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7713 XEXP (operands[1], 1), NULL_RTX);
7714 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7715 }"
7716 )
7717
7718 (define_insn "*cmov<mode>"
7719 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7720 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7721 [(match_operand 2 "cc_register" "") (const_int 0)])
7722 (match_operand:SDF 3 "s_register_operand"
7723 "<F_constraint>")
7724 (match_operand:SDF 4 "s_register_operand"
7725 "<F_constraint>")))]
7726 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7727 "*
7728 {
7729 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7730 switch (code)
7731 {
7732 case ARM_GE:
7733 case ARM_GT:
7734 case ARM_EQ:
7735 case ARM_VS:
7736 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7737 case ARM_LT:
7738 case ARM_LE:
7739 case ARM_NE:
7740 case ARM_VC:
7741 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7742 default:
7743 gcc_unreachable ();
7744 }
7745 return \"\";
7746 }"
7747 [(set_attr "conds" "use")
7748 (set_attr "type" "fcsel")]
7749 )
7750
7751 (define_insn "*cmovhf"
7752 [(set (match_operand:HF 0 "s_register_operand" "=t")
7753 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7754 [(match_operand 2 "cc_register" "") (const_int 0)])
7755 (match_operand:HF 3 "s_register_operand" "t")
7756 (match_operand:HF 4 "s_register_operand" "t")))]
7757 "TARGET_VFP_FP16INST"
7758 "*
7759 {
7760 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7761 switch (code)
7762 {
7763 case ARM_GE:
7764 case ARM_GT:
7765 case ARM_EQ:
7766 case ARM_VS:
7767 return \"vsel%d1.f16\\t%0, %3, %4\";
7768 case ARM_LT:
7769 case ARM_LE:
7770 case ARM_NE:
7771 case ARM_VC:
7772 return \"vsel%D1.f16\\t%0, %4, %3\";
7773 default:
7774 gcc_unreachable ();
7775 }
7776 return \"\";
7777 }"
7778 [(set_attr "conds" "use")
7779 (set_attr "type" "fcsel")]
7780 )
7781
7782 (define_insn_and_split "*movsicc_insn"
7783 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7784 (if_then_else:SI
7785 (match_operator 3 "arm_comparison_operator"
7786 [(match_operand 4 "cc_register" "") (const_int 0)])
7787 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7788 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7789 "TARGET_ARM"
7790 "@
7791 mov%D3\\t%0, %2
7792 mvn%D3\\t%0, #%B2
7793 mov%d3\\t%0, %1
7794 mvn%d3\\t%0, #%B1
7795 #
7796 #
7797 #
7798 #"
7799 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7800 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7801 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7802 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7803 "&& reload_completed"
7804 [(const_int 0)]
7805 {
7806 enum rtx_code rev_code;
7807 machine_mode mode;
7808 rtx rev_cond;
7809
7810 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7811 operands[3],
7812 gen_rtx_SET (operands[0], operands[1])));
7813
7814 rev_code = GET_CODE (operands[3]);
7815 mode = GET_MODE (operands[4]);
7816 if (mode == CCFPmode || mode == CCFPEmode)
7817 rev_code = reverse_condition_maybe_unordered (rev_code);
7818 else
7819 rev_code = reverse_condition (rev_code);
7820
7821 rev_cond = gen_rtx_fmt_ee (rev_code,
7822 VOIDmode,
7823 operands[4],
7824 const0_rtx);
7825 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7826 rev_cond,
7827 gen_rtx_SET (operands[0], operands[2])));
7828 DONE;
7829 }
7830 [(set_attr "length" "4,4,4,4,8,8,8,8")
7831 (set_attr "conds" "use")
7832 (set_attr_alternative "type"
7833 [(if_then_else (match_operand 2 "const_int_operand" "")
7834 (const_string "mov_imm")
7835 (const_string "mov_reg"))
7836 (const_string "mvn_imm")
7837 (if_then_else (match_operand 1 "const_int_operand" "")
7838 (const_string "mov_imm")
7839 (const_string "mov_reg"))
7840 (const_string "mvn_imm")
7841 (const_string "multiple")
7842 (const_string "multiple")
7843 (const_string "multiple")
7844 (const_string "multiple")])]
7845 )
7846
7847 (define_insn "*movsfcc_soft_insn"
7848 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7849 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7850 [(match_operand 4 "cc_register" "") (const_int 0)])
7851 (match_operand:SF 1 "s_register_operand" "0,r")
7852 (match_operand:SF 2 "s_register_operand" "r,0")))]
7853 "TARGET_ARM && TARGET_SOFT_FLOAT"
7854 "@
7855 mov%D3\\t%0, %2
7856 mov%d3\\t%0, %1"
7857 [(set_attr "conds" "use")
7858 (set_attr "type" "mov_reg")]
7859 )
7860
7861 \f
7862 ;; Jump and linkage insns
7863
7864 (define_expand "jump"
7865 [(set (pc)
7866 (label_ref (match_operand 0 "" "")))]
7867 "TARGET_EITHER"
7868 ""
7869 )
7870
7871 (define_insn "*arm_jump"
7872 [(set (pc)
7873 (label_ref (match_operand 0 "" "")))]
7874 "TARGET_32BIT"
7875 "*
7876 {
7877 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7878 {
7879 arm_ccfsm_state += 2;
7880 return \"\";
7881 }
7882 return \"b%?\\t%l0\";
7883 }
7884 "
7885 [(set_attr "predicable" "yes")
7886 (set (attr "length")
7887 (if_then_else
7888 (and (match_test "TARGET_THUMB2")
7889 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7890 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7891 (const_int 2)
7892 (const_int 4)))
7893 (set_attr "type" "branch")]
7894 )
7895
7896 (define_expand "call"
7897 [(parallel [(call (match_operand 0 "memory_operand")
7898 (match_operand 1 "general_operand"))
7899 (use (match_operand 2 "" ""))
7900 (clobber (reg:SI LR_REGNUM))])]
7901 "TARGET_EITHER"
7902 "
7903 {
7904 rtx callee, pat;
7905 tree addr = MEM_EXPR (operands[0]);
7906
7907 /* In an untyped call, we can get NULL for operand 2. */
7908 if (operands[2] == NULL_RTX)
7909 operands[2] = const0_rtx;
7910
7911 /* Decide if we should generate indirect calls by loading the
7912 32-bit address of the callee into a register before performing the
7913 branch and link. */
7914 callee = XEXP (operands[0], 0);
7915 if (GET_CODE (callee) == SYMBOL_REF
7916 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7917 : !REG_P (callee))
7918 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7919
7920 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7921 /* Indirect call: set r9 with FDPIC value of callee. */
7922 XEXP (operands[0], 0)
7923 = arm_load_function_descriptor (XEXP (operands[0], 0));
7924
7925 if (detect_cmse_nonsecure_call (addr))
7926 {
7927 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7928 operands[2]);
7929 emit_call_insn (pat);
7930 }
7931 else
7932 {
7933 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7934 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7935 }
7936
7937 /* Restore FDPIC register (r9) after call. */
7938 if (TARGET_FDPIC)
7939 {
7940 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7941 rtx initial_fdpic_reg
7942 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7943
7944 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7945 initial_fdpic_reg));
7946 }
7947
7948 DONE;
7949 }"
7950 )
7951
7952 (define_insn "restore_pic_register_after_call"
7953 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7954 (unspec:SI [(match_dup 0)
7955 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7956 UNSPEC_PIC_RESTORE))]
7957 ""
7958 "@
7959 mov\t%0, %1
7960 ldr\t%0, %1"
7961 )
7962
7963 (define_expand "call_internal"
7964 [(parallel [(call (match_operand 0 "memory_operand")
7965 (match_operand 1 "general_operand"))
7966 (use (match_operand 2 "" ""))
7967 (clobber (reg:SI LR_REGNUM))])])
7968
7969 (define_expand "nonsecure_call_internal"
7970 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7971 UNSPEC_NONSECURE_MEM)
7972 (match_operand 1 "general_operand"))
7973 (use (match_operand 2 "" ""))
7974 (clobber (reg:SI LR_REGNUM))])]
7975 "use_cmse"
7976 "
7977 {
7978 rtx tmp;
7979 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7980 gen_rtx_REG (SImode, R4_REGNUM),
7981 SImode);
7982
7983 operands[0] = replace_equiv_address (operands[0], tmp);
7984 }")
7985
7986 (define_insn "*call_reg_armv5"
7987 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7988 (match_operand 1 "" ""))
7989 (use (match_operand 2 "" ""))
7990 (clobber (reg:SI LR_REGNUM))]
7991 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7992 "blx%?\\t%0"
7993 [(set_attr "type" "call")]
7994 )
7995
7996 (define_insn "*call_reg_arm"
7997 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7998 (match_operand 1 "" ""))
7999 (use (match_operand 2 "" ""))
8000 (clobber (reg:SI LR_REGNUM))]
8001 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8002 "*
8003 return output_call (operands);
8004 "
8005 ;; length is worst case, normally it is only two
8006 [(set_attr "length" "12")
8007 (set_attr "type" "call")]
8008 )
8009
8010
8011 (define_expand "call_value"
8012 [(parallel [(set (match_operand 0 "" "")
8013 (call (match_operand 1 "memory_operand")
8014 (match_operand 2 "general_operand")))
8015 (use (match_operand 3 "" ""))
8016 (clobber (reg:SI LR_REGNUM))])]
8017 "TARGET_EITHER"
8018 "
8019 {
8020 rtx pat, callee;
8021 tree addr = MEM_EXPR (operands[1]);
8022
8023 /* In an untyped call, we can get NULL for operand 2. */
8024 if (operands[3] == 0)
8025 operands[3] = const0_rtx;
8026
8027 /* Decide if we should generate indirect calls by loading the
8028 32-bit address of the callee into a register before performing the
8029 branch and link. */
8030 callee = XEXP (operands[1], 0);
8031 if (GET_CODE (callee) == SYMBOL_REF
8032 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8033 : !REG_P (callee))
8034 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8035
8036 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8037 /* Indirect call: set r9 with FDPIC value of callee. */
8038 XEXP (operands[1], 0)
8039 = arm_load_function_descriptor (XEXP (operands[1], 0));
8040
8041 if (detect_cmse_nonsecure_call (addr))
8042 {
8043 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8044 operands[2], operands[3]);
8045 emit_call_insn (pat);
8046 }
8047 else
8048 {
8049 pat = gen_call_value_internal (operands[0], operands[1],
8050 operands[2], operands[3]);
8051 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8052 }
8053
8054 /* Restore FDPIC register (r9) after call. */
8055 if (TARGET_FDPIC)
8056 {
8057 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8058 rtx initial_fdpic_reg
8059 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8060
8061 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8062 initial_fdpic_reg));
8063 }
8064
8065 DONE;
8066 }"
8067 )
8068
8069 (define_expand "call_value_internal"
8070 [(parallel [(set (match_operand 0 "" "")
8071 (call (match_operand 1 "memory_operand")
8072 (match_operand 2 "general_operand")))
8073 (use (match_operand 3 "" ""))
8074 (clobber (reg:SI LR_REGNUM))])])
8075
8076 (define_expand "nonsecure_call_value_internal"
8077 [(parallel [(set (match_operand 0 "" "")
8078 (call (unspec:SI [(match_operand 1 "memory_operand")]
8079 UNSPEC_NONSECURE_MEM)
8080 (match_operand 2 "general_operand")))
8081 (use (match_operand 3 "" ""))
8082 (clobber (reg:SI LR_REGNUM))])]
8083 "use_cmse"
8084 "
8085 {
8086 rtx tmp;
8087 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
8088 gen_rtx_REG (SImode, R4_REGNUM),
8089 SImode);
8090
8091 operands[1] = replace_equiv_address (operands[1], tmp);
8092 }")
8093
8094 (define_insn "*call_value_reg_armv5"
8095 [(set (match_operand 0 "" "")
8096 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8097 (match_operand 2 "" "")))
8098 (use (match_operand 3 "" ""))
8099 (clobber (reg:SI LR_REGNUM))]
8100 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8101 "blx%?\\t%1"
8102 [(set_attr "type" "call")]
8103 )
8104
8105 (define_insn "*call_value_reg_arm"
8106 [(set (match_operand 0 "" "")
8107 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8108 (match_operand 2 "" "")))
8109 (use (match_operand 3 "" ""))
8110 (clobber (reg:SI LR_REGNUM))]
8111 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8112 "*
8113 return output_call (&operands[1]);
8114 "
8115 [(set_attr "length" "12")
8116 (set_attr "type" "call")]
8117 )
8118
8119 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8120 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8121
8122 (define_insn "*call_symbol"
8123 [(call (mem:SI (match_operand:SI 0 "" ""))
8124 (match_operand 1 "" ""))
8125 (use (match_operand 2 "" ""))
8126 (clobber (reg:SI LR_REGNUM))]
8127 "TARGET_32BIT
8128 && !SIBLING_CALL_P (insn)
8129 && (GET_CODE (operands[0]) == SYMBOL_REF)
8130 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8131 "*
8132 {
8133 rtx op = operands[0];
8134
8135 /* Switch mode now when possible. */
8136 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8137 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8138 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8139
8140 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8141 }"
8142 [(set_attr "type" "call")]
8143 )
8144
8145 (define_insn "*call_value_symbol"
8146 [(set (match_operand 0 "" "")
8147 (call (mem:SI (match_operand:SI 1 "" ""))
8148 (match_operand:SI 2 "" "")))
8149 (use (match_operand 3 "" ""))
8150 (clobber (reg:SI LR_REGNUM))]
8151 "TARGET_32BIT
8152 && !SIBLING_CALL_P (insn)
8153 && (GET_CODE (operands[1]) == SYMBOL_REF)
8154 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8155 "*
8156 {
8157 rtx op = operands[1];
8158
8159 /* Switch mode now when possible. */
8160 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8161 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8162 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8163
8164 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8165 }"
8166 [(set_attr "type" "call")]
8167 )
8168
8169 (define_expand "sibcall_internal"
8170 [(parallel [(call (match_operand 0 "memory_operand")
8171 (match_operand 1 "general_operand"))
8172 (return)
8173 (use (match_operand 2 "" ""))])])
8174
8175 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8176 (define_expand "sibcall"
8177 [(parallel [(call (match_operand 0 "memory_operand")
8178 (match_operand 1 "general_operand"))
8179 (return)
8180 (use (match_operand 2 "" ""))])]
8181 "TARGET_32BIT"
8182 "
8183 {
8184 rtx pat;
8185
8186 if ((!REG_P (XEXP (operands[0], 0))
8187 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8188 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8189 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8190 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8191
8192 if (operands[2] == NULL_RTX)
8193 operands[2] = const0_rtx;
8194
8195 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8196 arm_emit_call_insn (pat, operands[0], true);
8197 DONE;
8198 }"
8199 )
8200
8201 (define_expand "sibcall_value_internal"
8202 [(parallel [(set (match_operand 0 "" "")
8203 (call (match_operand 1 "memory_operand")
8204 (match_operand 2 "general_operand")))
8205 (return)
8206 (use (match_operand 3 "" ""))])])
8207
8208 (define_expand "sibcall_value"
8209 [(parallel [(set (match_operand 0 "" "")
8210 (call (match_operand 1 "memory_operand")
8211 (match_operand 2 "general_operand")))
8212 (return)
8213 (use (match_operand 3 "" ""))])]
8214 "TARGET_32BIT"
8215 "
8216 {
8217 rtx pat;
8218
8219 if ((!REG_P (XEXP (operands[1], 0))
8220 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8221 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8222 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8223 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8224
8225 if (operands[3] == NULL_RTX)
8226 operands[3] = const0_rtx;
8227
8228 pat = gen_sibcall_value_internal (operands[0], operands[1],
8229 operands[2], operands[3]);
8230 arm_emit_call_insn (pat, operands[1], true);
8231 DONE;
8232 }"
8233 )
8234
8235 (define_insn "*sibcall_insn"
8236 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8237 (match_operand 1 "" ""))
8238 (return)
8239 (use (match_operand 2 "" ""))]
8240 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8241 "*
8242 if (which_alternative == 1)
8243 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8244 else
8245 {
8246 if (arm_arch5t || arm_arch4t)
8247 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8248 else
8249 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8250 }
8251 "
8252 [(set_attr "type" "call")]
8253 )
8254
8255 (define_insn "*sibcall_value_insn"
8256 [(set (match_operand 0 "" "")
8257 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8258 (match_operand 2 "" "")))
8259 (return)
8260 (use (match_operand 3 "" ""))]
8261 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8262 "*
8263 if (which_alternative == 1)
8264 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8265 else
8266 {
8267 if (arm_arch5t || arm_arch4t)
8268 return \"bx%?\\t%1\";
8269 else
8270 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8271 }
8272 "
8273 [(set_attr "type" "call")]
8274 )
8275
8276 (define_expand "<return_str>return"
8277 [(RETURNS)]
8278 "(TARGET_ARM || (TARGET_THUMB2
8279 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8280 && !IS_STACKALIGN (arm_current_func_type ())))
8281 <return_cond_false>"
8282 "
8283 {
8284 if (TARGET_THUMB2)
8285 {
8286 thumb2_expand_return (<return_simple_p>);
8287 DONE;
8288 }
8289 }
8290 "
8291 )
8292
8293 ;; Often the return insn will be the same as loading from memory, so set attr
8294 (define_insn "*arm_return"
8295 [(return)]
8296 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8297 "*
8298 {
8299 if (arm_ccfsm_state == 2)
8300 {
8301 arm_ccfsm_state += 2;
8302 return \"\";
8303 }
8304 return output_return_instruction (const_true_rtx, true, false, false);
8305 }"
8306 [(set_attr "type" "load_4")
8307 (set_attr "length" "12")
8308 (set_attr "predicable" "yes")]
8309 )
8310
8311 (define_insn "*cond_<return_str>return"
8312 [(set (pc)
8313 (if_then_else (match_operator 0 "arm_comparison_operator"
8314 [(match_operand 1 "cc_register" "") (const_int 0)])
8315 (RETURNS)
8316 (pc)))]
8317 "TARGET_ARM <return_cond_true>"
8318 "*
8319 {
8320 if (arm_ccfsm_state == 2)
8321 {
8322 arm_ccfsm_state += 2;
8323 return \"\";
8324 }
8325 return output_return_instruction (operands[0], true, false,
8326 <return_simple_p>);
8327 }"
8328 [(set_attr "conds" "use")
8329 (set_attr "length" "12")
8330 (set_attr "type" "load_4")]
8331 )
8332
8333 (define_insn "*cond_<return_str>return_inverted"
8334 [(set (pc)
8335 (if_then_else (match_operator 0 "arm_comparison_operator"
8336 [(match_operand 1 "cc_register" "") (const_int 0)])
8337 (pc)
8338 (RETURNS)))]
8339 "TARGET_ARM <return_cond_true>"
8340 "*
8341 {
8342 if (arm_ccfsm_state == 2)
8343 {
8344 arm_ccfsm_state += 2;
8345 return \"\";
8346 }
8347 return output_return_instruction (operands[0], true, true,
8348 <return_simple_p>);
8349 }"
8350 [(set_attr "conds" "use")
8351 (set_attr "length" "12")
8352 (set_attr "type" "load_4")]
8353 )
8354
8355 (define_insn "*arm_simple_return"
8356 [(simple_return)]
8357 "TARGET_ARM"
8358 "*
8359 {
8360 if (arm_ccfsm_state == 2)
8361 {
8362 arm_ccfsm_state += 2;
8363 return \"\";
8364 }
8365 return output_return_instruction (const_true_rtx, true, false, true);
8366 }"
8367 [(set_attr "type" "branch")
8368 (set_attr "length" "4")
8369 (set_attr "predicable" "yes")]
8370 )
8371
8372 ;; Generate a sequence of instructions to determine if the processor is
8373 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8374 ;; mask.
8375
8376 (define_expand "return_addr_mask"
8377 [(set (match_dup 1)
8378 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8379 (const_int 0)))
8380 (set (match_operand:SI 0 "s_register_operand")
8381 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8382 (const_int -1)
8383 (const_int 67108860)))] ; 0x03fffffc
8384 "TARGET_ARM"
8385 "
8386 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8387 ")
8388
8389 (define_insn "*check_arch2"
8390 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8391 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8392 (const_int 0)))]
8393 "TARGET_ARM"
8394 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8395 [(set_attr "length" "8")
8396 (set_attr "conds" "set")
8397 (set_attr "type" "multiple")]
8398 )
8399
8400 ;; Call subroutine returning any type.
8401
8402 (define_expand "untyped_call"
8403 [(parallel [(call (match_operand 0 "" "")
8404 (const_int 0))
8405 (match_operand 1 "" "")
8406 (match_operand 2 "" "")])]
8407 "TARGET_EITHER && !TARGET_FDPIC"
8408 "
8409 {
8410 int i;
8411 rtx par = gen_rtx_PARALLEL (VOIDmode,
8412 rtvec_alloc (XVECLEN (operands[2], 0)));
8413 rtx addr = gen_reg_rtx (Pmode);
8414 rtx mem;
8415 int size = 0;
8416
8417 emit_move_insn (addr, XEXP (operands[1], 0));
8418 mem = change_address (operands[1], BLKmode, addr);
8419
8420 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8421 {
8422 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8423
8424 /* Default code only uses r0 as a return value, but we could
8425 be using anything up to 4 registers. */
8426 if (REGNO (src) == R0_REGNUM)
8427 src = gen_rtx_REG (TImode, R0_REGNUM);
8428
8429 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8430 GEN_INT (size));
8431 size += GET_MODE_SIZE (GET_MODE (src));
8432 }
8433
8434 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
8435
8436 size = 0;
8437
8438 for (i = 0; i < XVECLEN (par, 0); i++)
8439 {
8440 HOST_WIDE_INT offset = 0;
8441 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8442
8443 if (size != 0)
8444 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8445
8446 mem = change_address (mem, GET_MODE (reg), NULL);
8447 if (REGNO (reg) == R0_REGNUM)
8448 {
8449 /* On thumb we have to use a write-back instruction. */
8450 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8451 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8452 size = TARGET_ARM ? 16 : 0;
8453 }
8454 else
8455 {
8456 emit_move_insn (mem, reg);
8457 size = GET_MODE_SIZE (GET_MODE (reg));
8458 }
8459 }
8460
8461 /* The optimizer does not know that the call sets the function value
8462 registers we stored in the result block. We avoid problems by
8463 claiming that all hard registers are used and clobbered at this
8464 point. */
8465 emit_insn (gen_blockage ());
8466
8467 DONE;
8468 }"
8469 )
8470
8471 (define_expand "untyped_return"
8472 [(match_operand:BLK 0 "memory_operand")
8473 (match_operand 1 "" "")]
8474 "TARGET_EITHER && !TARGET_FDPIC"
8475 "
8476 {
8477 int i;
8478 rtx addr = gen_reg_rtx (Pmode);
8479 rtx mem;
8480 int size = 0;
8481
8482 emit_move_insn (addr, XEXP (operands[0], 0));
8483 mem = change_address (operands[0], BLKmode, addr);
8484
8485 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8486 {
8487 HOST_WIDE_INT offset = 0;
8488 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8489
8490 if (size != 0)
8491 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8492
8493 mem = change_address (mem, GET_MODE (reg), NULL);
8494 if (REGNO (reg) == R0_REGNUM)
8495 {
8496 /* On thumb we have to use a write-back instruction. */
8497 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8498 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8499 size = TARGET_ARM ? 16 : 0;
8500 }
8501 else
8502 {
8503 emit_move_insn (reg, mem);
8504 size = GET_MODE_SIZE (GET_MODE (reg));
8505 }
8506 }
8507
8508 /* Emit USE insns before the return. */
8509 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8510 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8511
8512 /* Construct the return. */
8513 expand_naked_return ();
8514
8515 DONE;
8516 }"
8517 )
8518
8519 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8520 ;; all of memory. This blocks insns from being moved across this point.
8521
8522 (define_insn "blockage"
8523 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8524 "TARGET_EITHER"
8525 ""
8526 [(set_attr "length" "0")
8527 (set_attr "type" "block")]
8528 )
8529
8530 ;; Since we hard code r0 here use the 'o' constraint to prevent
8531 ;; provoking undefined behaviour in the hardware with putting out
8532 ;; auto-increment operations with potentially r0 as the base register.
8533 (define_insn "probe_stack"
8534 [(set (match_operand:SI 0 "memory_operand" "=o")
8535 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
8536 "TARGET_32BIT"
8537 "str%?\\tr0, %0"
8538 [(set_attr "type" "store_4")
8539 (set_attr "predicable" "yes")]
8540 )
8541
8542 (define_insn "probe_stack_range"
8543 [(set (match_operand:SI 0 "register_operand" "=r")
8544 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
8545 (match_operand:SI 2 "register_operand" "r")]
8546 VUNSPEC_PROBE_STACK_RANGE))]
8547 "TARGET_32BIT"
8548 {
8549 return output_probe_stack_range (operands[0], operands[2]);
8550 }
8551 [(set_attr "type" "multiple")
8552 (set_attr "conds" "clob")]
8553 )
8554
8555 ;; Named patterns for stack smashing protection.
8556 (define_expand "stack_protect_combined_set"
8557 [(parallel
8558 [(set (match_operand:SI 0 "memory_operand")
8559 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8560 UNSPEC_SP_SET))
8561 (clobber (match_scratch:SI 2 ""))
8562 (clobber (match_scratch:SI 3 ""))])]
8563 ""
8564 ""
8565 )
8566
8567 ;; Use a separate insn from the above expand to be able to have the mem outside
8568 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8569 ;; try to reload the guard since we need to control how PIC access is done in
8570 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8571 ;; legitimize_pic_address ()).
8572 (define_insn_and_split "*stack_protect_combined_set_insn"
8573 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8574 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8575 UNSPEC_SP_SET))
8576 (clobber (match_scratch:SI 2 "=&l,&r"))
8577 (clobber (match_scratch:SI 3 "=&l,&r"))]
8578 ""
8579 "#"
8580 "reload_completed"
8581 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
8582 UNSPEC_SP_SET))
8583 (clobber (match_dup 2))])]
8584 "
8585 {
8586 if (flag_pic)
8587 {
8588 rtx pic_reg;
8589
8590 if (TARGET_FDPIC)
8591 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8592 else
8593 pic_reg = operands[3];
8594
8595 /* Forces recomputing of GOT base now. */
8596 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
8597 true /*compute_now*/);
8598 }
8599 else
8600 {
8601 if (address_operand (operands[1], SImode))
8602 operands[2] = operands[1];
8603 else
8604 {
8605 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8606 emit_move_insn (operands[2], mem);
8607 }
8608 }
8609 }"
8610 [(set_attr "arch" "t1,32")]
8611 )
8612
8613 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
8614 ;; canary value does not live beyond the life of this sequence.
8615 (define_insn "*stack_protect_set_insn"
8616 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8617 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
8618 UNSPEC_SP_SET))
8619 (clobber (match_dup 1))]
8620 ""
8621 "@
8622 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
8623 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
8624 [(set_attr "length" "8,12")
8625 (set_attr "conds" "clob,nocond")
8626 (set_attr "type" "multiple")
8627 (set_attr "arch" "t1,32")]
8628 )
8629
8630 (define_expand "stack_protect_combined_test"
8631 [(parallel
8632 [(set (pc)
8633 (if_then_else
8634 (eq (match_operand:SI 0 "memory_operand")
8635 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8636 UNSPEC_SP_TEST))
8637 (label_ref (match_operand 2))
8638 (pc)))
8639 (clobber (match_scratch:SI 3 ""))
8640 (clobber (match_scratch:SI 4 ""))
8641 (clobber (reg:CC CC_REGNUM))])]
8642 ""
8643 ""
8644 )
8645
8646 ;; Use a separate insn from the above expand to be able to have the mem outside
8647 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8648 ;; try to reload the guard since we need to control how PIC access is done in
8649 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8650 ;; legitimize_pic_address ()).
8651 (define_insn_and_split "*stack_protect_combined_test_insn"
8652 [(set (pc)
8653 (if_then_else
8654 (eq (match_operand:SI 0 "memory_operand" "m,m")
8655 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8656 UNSPEC_SP_TEST))
8657 (label_ref (match_operand 2))
8658 (pc)))
8659 (clobber (match_scratch:SI 3 "=&l,&r"))
8660 (clobber (match_scratch:SI 4 "=&l,&r"))
8661 (clobber (reg:CC CC_REGNUM))]
8662 ""
8663 "#"
8664 "reload_completed"
8665 [(const_int 0)]
8666 {
8667 rtx eq;
8668
8669 if (flag_pic)
8670 {
8671 rtx pic_reg;
8672
8673 if (TARGET_FDPIC)
8674 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8675 else
8676 pic_reg = operands[4];
8677
8678 /* Forces recomputing of GOT base now. */
8679 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
8680 true /*compute_now*/);
8681 }
8682 else
8683 {
8684 if (address_operand (operands[1], SImode))
8685 operands[3] = operands[1];
8686 else
8687 {
8688 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8689 emit_move_insn (operands[3], mem);
8690 }
8691 }
8692 if (TARGET_32BIT)
8693 {
8694 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8695 operands[3]));
8696 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8697 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8698 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8699 }
8700 else
8701 {
8702 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8703 operands[3]));
8704 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8705 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8706 operands[2]));
8707 }
8708 DONE;
8709 }
8710 [(set_attr "arch" "t1,32")]
8711 )
8712
8713 (define_insn "arm_stack_protect_test_insn"
8714 [(set (reg:CC_Z CC_REGNUM)
8715 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8716 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8717 UNSPEC_SP_TEST)
8718 (const_int 0)))
8719 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8720 (clobber (match_dup 2))]
8721 "TARGET_32BIT"
8722 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8723 [(set_attr "length" "8,12")
8724 (set_attr "conds" "set")
8725 (set_attr "type" "multiple")
8726 (set_attr "arch" "t,32")]
8727 )
8728
8729 (define_expand "casesi"
8730 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8731 (match_operand:SI 1 "const_int_operand") ; lower bound
8732 (match_operand:SI 2 "const_int_operand") ; total range
8733 (match_operand:SI 3 "" "") ; table label
8734 (match_operand:SI 4 "" "")] ; Out of range label
8735 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8736 "
8737 {
8738 enum insn_code code;
8739 if (operands[1] != const0_rtx)
8740 {
8741 rtx reg = gen_reg_rtx (SImode);
8742
8743 emit_insn (gen_addsi3 (reg, operands[0],
8744 gen_int_mode (-INTVAL (operands[1]),
8745 SImode)));
8746 operands[0] = reg;
8747 }
8748
8749 if (TARGET_ARM)
8750 code = CODE_FOR_arm_casesi_internal;
8751 else if (TARGET_THUMB1)
8752 code = CODE_FOR_thumb1_casesi_internal_pic;
8753 else if (flag_pic)
8754 code = CODE_FOR_thumb2_casesi_internal_pic;
8755 else
8756 code = CODE_FOR_thumb2_casesi_internal;
8757
8758 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8759 operands[2] = force_reg (SImode, operands[2]);
8760
8761 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8762 operands[3], operands[4]));
8763 DONE;
8764 }"
8765 )
8766
8767 ;; The USE in this pattern is needed to tell flow analysis that this is
8768 ;; a CASESI insn. It has no other purpose.
8769 (define_expand "arm_casesi_internal"
8770 [(parallel [(set (pc)
8771 (if_then_else
8772 (leu (match_operand:SI 0 "s_register_operand")
8773 (match_operand:SI 1 "arm_rhs_operand"))
8774 (match_dup 4)
8775 (label_ref:SI (match_operand 3 ""))))
8776 (clobber (reg:CC CC_REGNUM))
8777 (use (label_ref:SI (match_operand 2 "")))])]
8778 "TARGET_ARM"
8779 {
8780 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8781 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8782 gen_rtx_LABEL_REF (SImode, operands[2]));
8783 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8784 MEM_READONLY_P (operands[4]) = 1;
8785 MEM_NOTRAP_P (operands[4]) = 1;
8786 })
8787
8788 (define_insn "*arm_casesi_internal"
8789 [(parallel [(set (pc)
8790 (if_then_else
8791 (leu (match_operand:SI 0 "s_register_operand" "r")
8792 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8793 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8794 (label_ref:SI (match_operand 2 "" ""))))
8795 (label_ref:SI (match_operand 3 "" ""))))
8796 (clobber (reg:CC CC_REGNUM))
8797 (use (label_ref:SI (match_dup 2)))])]
8798 "TARGET_ARM"
8799 "*
8800 if (flag_pic)
8801 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8802 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8803 "
8804 [(set_attr "conds" "clob")
8805 (set_attr "length" "12")
8806 (set_attr "type" "multiple")]
8807 )
8808
8809 (define_expand "indirect_jump"
8810 [(set (pc)
8811 (match_operand:SI 0 "s_register_operand"))]
8812 "TARGET_EITHER"
8813 "
8814 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8815 address and use bx. */
8816 if (TARGET_THUMB2)
8817 {
8818 rtx tmp;
8819 tmp = gen_reg_rtx (SImode);
8820 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8821 operands[0] = tmp;
8822 }
8823 "
8824 )
8825
8826 ;; NB Never uses BX.
8827 (define_insn "*arm_indirect_jump"
8828 [(set (pc)
8829 (match_operand:SI 0 "s_register_operand" "r"))]
8830 "TARGET_ARM"
8831 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8832 [(set_attr "predicable" "yes")
8833 (set_attr "type" "branch")]
8834 )
8835
8836 (define_insn "*load_indirect_jump"
8837 [(set (pc)
8838 (match_operand:SI 0 "memory_operand" "m"))]
8839 "TARGET_ARM"
8840 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8841 [(set_attr "type" "load_4")
8842 (set_attr "pool_range" "4096")
8843 (set_attr "neg_pool_range" "4084")
8844 (set_attr "predicable" "yes")]
8845 )
8846
8847 \f
8848 ;; Misc insns
8849
8850 (define_insn "nop"
8851 [(const_int 0)]
8852 "TARGET_EITHER"
8853 "nop"
8854 [(set (attr "length")
8855 (if_then_else (eq_attr "is_thumb" "yes")
8856 (const_int 2)
8857 (const_int 4)))
8858 (set_attr "type" "mov_reg")]
8859 )
8860
8861 (define_insn "trap"
8862 [(trap_if (const_int 1) (const_int 0))]
8863 ""
8864 "*
8865 if (TARGET_ARM)
8866 return \".inst\\t0xe7f000f0\";
8867 else
8868 return \".inst\\t0xdeff\";
8869 "
8870 [(set (attr "length")
8871 (if_then_else (eq_attr "is_thumb" "yes")
8872 (const_int 2)
8873 (const_int 4)))
8874 (set_attr "type" "trap")
8875 (set_attr "conds" "unconditional")]
8876 )
8877
8878 \f
8879 ;; Patterns to allow combination of arithmetic, cond code and shifts
8880
8881 (define_insn "*<arith_shift_insn>_multsi"
8882 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8883 (SHIFTABLE_OPS:SI
8884 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8885 (match_operand:SI 3 "power_of_two_operand" ""))
8886 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8887 "TARGET_32BIT"
8888 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8889 [(set_attr "predicable" "yes")
8890 (set_attr "shift" "2")
8891 (set_attr "arch" "a,t2")
8892 (set_attr "type" "alu_shift_imm")])
8893
8894 (define_insn "*<arith_shift_insn>_shiftsi"
8895 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8896 (SHIFTABLE_OPS:SI
8897 (match_operator:SI 2 "shift_nomul_operator"
8898 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8899 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8900 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8901 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8902 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8903 [(set_attr "predicable" "yes")
8904 (set_attr "shift" "3")
8905 (set_attr "arch" "a,t2,a")
8906 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8907
8908 (define_split
8909 [(set (match_operand:SI 0 "s_register_operand" "")
8910 (match_operator:SI 1 "shiftable_operator"
8911 [(match_operator:SI 2 "shiftable_operator"
8912 [(match_operator:SI 3 "shift_operator"
8913 [(match_operand:SI 4 "s_register_operand" "")
8914 (match_operand:SI 5 "reg_or_int_operand" "")])
8915 (match_operand:SI 6 "s_register_operand" "")])
8916 (match_operand:SI 7 "arm_rhs_operand" "")]))
8917 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8918 "TARGET_32BIT"
8919 [(set (match_dup 8)
8920 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8921 (match_dup 6)]))
8922 (set (match_dup 0)
8923 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8924 "")
8925
8926 (define_insn "*arith_shiftsi_compare0"
8927 [(set (reg:CC_NOOV CC_REGNUM)
8928 (compare:CC_NOOV
8929 (match_operator:SI 1 "shiftable_operator"
8930 [(match_operator:SI 3 "shift_operator"
8931 [(match_operand:SI 4 "s_register_operand" "r,r")
8932 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8933 (match_operand:SI 2 "s_register_operand" "r,r")])
8934 (const_int 0)))
8935 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8936 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8937 (match_dup 2)]))]
8938 "TARGET_32BIT"
8939 "%i1s%?\\t%0, %2, %4%S3"
8940 [(set_attr "conds" "set")
8941 (set_attr "shift" "4")
8942 (set_attr "arch" "32,a")
8943 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8944
8945 (define_insn "*arith_shiftsi_compare0_scratch"
8946 [(set (reg:CC_NOOV CC_REGNUM)
8947 (compare:CC_NOOV
8948 (match_operator:SI 1 "shiftable_operator"
8949 [(match_operator:SI 3 "shift_operator"
8950 [(match_operand:SI 4 "s_register_operand" "r,r")
8951 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8952 (match_operand:SI 2 "s_register_operand" "r,r")])
8953 (const_int 0)))
8954 (clobber (match_scratch:SI 0 "=r,r"))]
8955 "TARGET_32BIT"
8956 "%i1s%?\\t%0, %2, %4%S3"
8957 [(set_attr "conds" "set")
8958 (set_attr "shift" "4")
8959 (set_attr "arch" "32,a")
8960 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8961
8962 (define_insn "*sub_shiftsi"
8963 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8964 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8965 (match_operator:SI 2 "shift_operator"
8966 [(match_operand:SI 3 "s_register_operand" "r,r")
8967 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8968 "TARGET_32BIT"
8969 "sub%?\\t%0, %1, %3%S2"
8970 [(set_attr "predicable" "yes")
8971 (set_attr "predicable_short_it" "no")
8972 (set_attr "shift" "3")
8973 (set_attr "arch" "32,a")
8974 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8975
8976 (define_insn "*sub_shiftsi_compare0"
8977 [(set (reg:CC_NOOV CC_REGNUM)
8978 (compare:CC_NOOV
8979 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8980 (match_operator:SI 2 "shift_operator"
8981 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8982 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8983 (const_int 0)))
8984 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8985 (minus:SI (match_dup 1)
8986 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8987 "TARGET_32BIT"
8988 "subs%?\\t%0, %1, %3%S2"
8989 [(set_attr "conds" "set")
8990 (set_attr "shift" "3")
8991 (set_attr "arch" "32,a,a")
8992 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8993
8994 (define_insn "*sub_shiftsi_compare0_scratch"
8995 [(set (reg:CC_NOOV CC_REGNUM)
8996 (compare:CC_NOOV
8997 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8998 (match_operator:SI 2 "shift_operator"
8999 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9000 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
9001 (const_int 0)))
9002 (clobber (match_scratch:SI 0 "=r,r,r"))]
9003 "TARGET_32BIT"
9004 "subs%?\\t%0, %1, %3%S2"
9005 [(set_attr "conds" "set")
9006 (set_attr "shift" "3")
9007 (set_attr "arch" "32,a,a")
9008 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
9009 \f
9010
9011 (define_insn_and_split "*and_scc"
9012 [(set (match_operand:SI 0 "s_register_operand" "=r")
9013 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9014 [(match_operand 2 "cc_register" "") (const_int 0)])
9015 (match_operand:SI 3 "s_register_operand" "r")))]
9016 "TARGET_ARM"
9017 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9018 "&& reload_completed"
9019 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9020 (cond_exec (match_dup 4) (set (match_dup 0)
9021 (and:SI (match_dup 3) (const_int 1))))]
9022 {
9023 machine_mode mode = GET_MODE (operands[2]);
9024 enum rtx_code rc = GET_CODE (operands[1]);
9025
9026 /* Note that operands[4] is the same as operands[1],
9027 but with VOIDmode as the result. */
9028 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9029 if (mode == CCFPmode || mode == CCFPEmode)
9030 rc = reverse_condition_maybe_unordered (rc);
9031 else
9032 rc = reverse_condition (rc);
9033 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9034 }
9035 [(set_attr "conds" "use")
9036 (set_attr "type" "multiple")
9037 (set_attr "length" "8")]
9038 )
9039
9040 (define_insn_and_split "*ior_scc"
9041 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9042 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9043 [(match_operand 2 "cc_register" "") (const_int 0)])
9044 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9045 "TARGET_ARM"
9046 "@
9047 orr%d1\\t%0, %3, #1
9048 #"
9049 "&& reload_completed
9050 && REGNO (operands [0]) != REGNO (operands[3])"
9051 ;; && which_alternative == 1
9052 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9053 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9054 (cond_exec (match_dup 4) (set (match_dup 0)
9055 (ior:SI (match_dup 3) (const_int 1))))]
9056 {
9057 machine_mode mode = GET_MODE (operands[2]);
9058 enum rtx_code rc = GET_CODE (operands[1]);
9059
9060 /* Note that operands[4] is the same as operands[1],
9061 but with VOIDmode as the result. */
9062 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9063 if (mode == CCFPmode || mode == CCFPEmode)
9064 rc = reverse_condition_maybe_unordered (rc);
9065 else
9066 rc = reverse_condition (rc);
9067 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9068 }
9069 [(set_attr "conds" "use")
9070 (set_attr "length" "4,8")
9071 (set_attr "type" "logic_imm,multiple")]
9072 )
9073
9074 ; A series of splitters for the compare_scc pattern below. Note that
9075 ; order is important.
9076 (define_split
9077 [(set (match_operand:SI 0 "s_register_operand" "")
9078 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9079 (const_int 0)))
9080 (clobber (reg:CC CC_REGNUM))]
9081 "TARGET_32BIT && reload_completed"
9082 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9083
9084 (define_split
9085 [(set (match_operand:SI 0 "s_register_operand" "")
9086 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9087 (const_int 0)))
9088 (clobber (reg:CC CC_REGNUM))]
9089 "TARGET_32BIT && reload_completed"
9090 [(set (match_dup 0) (not:SI (match_dup 1)))
9091 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9092
9093 (define_split
9094 [(set (match_operand:SI 0 "s_register_operand" "")
9095 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9096 (const_int 0)))
9097 (clobber (reg:CC CC_REGNUM))]
9098 "arm_arch5t && TARGET_32BIT"
9099 [(set (match_dup 0) (clz:SI (match_dup 1)))
9100 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9101 )
9102
9103 (define_split
9104 [(set (match_operand:SI 0 "s_register_operand" "")
9105 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9106 (const_int 0)))
9107 (clobber (reg:CC CC_REGNUM))]
9108 "TARGET_32BIT && reload_completed"
9109 [(parallel
9110 [(set (reg:CC CC_REGNUM)
9111 (compare:CC (const_int 1) (match_dup 1)))
9112 (set (match_dup 0)
9113 (minus:SI (const_int 1) (match_dup 1)))])
9114 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9115 (set (match_dup 0) (const_int 0)))])
9116
9117 (define_split
9118 [(set (match_operand:SI 0 "s_register_operand" "")
9119 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9120 (match_operand:SI 2 "const_int_operand" "")))
9121 (clobber (reg:CC CC_REGNUM))]
9122 "TARGET_32BIT && reload_completed"
9123 [(parallel
9124 [(set (reg:CC CC_REGNUM)
9125 (compare:CC (match_dup 1) (match_dup 2)))
9126 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9127 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9128 (set (match_dup 0) (const_int 1)))]
9129 {
9130 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9131 })
9132
9133 (define_split
9134 [(set (match_operand:SI 0 "s_register_operand" "")
9135 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9136 (match_operand:SI 2 "arm_add_operand" "")))
9137 (clobber (reg:CC CC_REGNUM))]
9138 "TARGET_32BIT && reload_completed"
9139 [(parallel
9140 [(set (reg:CC_NOOV CC_REGNUM)
9141 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9142 (const_int 0)))
9143 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9144 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9145 (set (match_dup 0) (const_int 1)))])
9146
9147 (define_insn_and_split "*compare_scc"
9148 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9149 (match_operator:SI 1 "arm_comparison_operator"
9150 [(match_operand:SI 2 "s_register_operand" "r,r")
9151 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9152 (clobber (reg:CC CC_REGNUM))]
9153 "TARGET_32BIT"
9154 "#"
9155 "&& reload_completed"
9156 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9157 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9158 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9159 {
9160 rtx tmp1;
9161 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9162 operands[2], operands[3]);
9163 enum rtx_code rc = GET_CODE (operands[1]);
9164
9165 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9166
9167 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9168 if (mode == CCFPmode || mode == CCFPEmode)
9169 rc = reverse_condition_maybe_unordered (rc);
9170 else
9171 rc = reverse_condition (rc);
9172 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9173 }
9174 [(set_attr "type" "multiple")]
9175 )
9176
9177 ;; Attempt to improve the sequence generated by the compare_scc splitters
9178 ;; not to use conditional execution.
9179
9180 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9181 ;; clz Rd, reg1
9182 ;; lsr Rd, Rd, #5
9183 (define_peephole2
9184 [(set (reg:CC CC_REGNUM)
9185 (compare:CC (match_operand:SI 1 "register_operand" "")
9186 (const_int 0)))
9187 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9188 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9189 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9190 (set (match_dup 0) (const_int 1)))]
9191 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9192 [(set (match_dup 0) (clz:SI (match_dup 1)))
9193 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9194 )
9195
9196 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9197 ;; negs Rd, reg1
9198 ;; adc Rd, Rd, reg1
9199 (define_peephole2
9200 [(set (reg:CC CC_REGNUM)
9201 (compare:CC (match_operand:SI 1 "register_operand" "")
9202 (const_int 0)))
9203 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9204 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9205 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9206 (set (match_dup 0) (const_int 1)))
9207 (match_scratch:SI 2 "r")]
9208 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9209 [(parallel
9210 [(set (reg:CC CC_REGNUM)
9211 (compare:CC (const_int 0) (match_dup 1)))
9212 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9213 (set (match_dup 0)
9214 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9215 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9216 )
9217
9218 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9219 ;; sub Rd, Reg1, reg2
9220 ;; clz Rd, Rd
9221 ;; lsr Rd, Rd, #5
9222 (define_peephole2
9223 [(set (reg:CC CC_REGNUM)
9224 (compare:CC (match_operand:SI 1 "register_operand" "")
9225 (match_operand:SI 2 "arm_rhs_operand" "")))
9226 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9227 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9228 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9229 (set (match_dup 0) (const_int 1)))]
9230 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9231 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9232 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9233 (set (match_dup 0) (clz:SI (match_dup 0)))
9234 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9235 )
9236
9237
9238 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9239 ;; sub T1, Reg1, reg2
9240 ;; negs Rd, T1
9241 ;; adc Rd, Rd, T1
9242 (define_peephole2
9243 [(set (reg:CC CC_REGNUM)
9244 (compare:CC (match_operand:SI 1 "register_operand" "")
9245 (match_operand:SI 2 "arm_rhs_operand" "")))
9246 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9247 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9248 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9249 (set (match_dup 0) (const_int 1)))
9250 (match_scratch:SI 3 "r")]
9251 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9252 [(set (match_dup 3) (match_dup 4))
9253 (parallel
9254 [(set (reg:CC CC_REGNUM)
9255 (compare:CC (const_int 0) (match_dup 3)))
9256 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9257 (set (match_dup 0)
9258 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9259 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9260 "
9261 if (CONST_INT_P (operands[2]))
9262 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9263 else
9264 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9265 ")
9266
9267 (define_insn "*cond_move"
9268 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9269 (if_then_else:SI (match_operator 3 "equality_operator"
9270 [(match_operator 4 "arm_comparison_operator"
9271 [(match_operand 5 "cc_register" "") (const_int 0)])
9272 (const_int 0)])
9273 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9274 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9275 "TARGET_ARM"
9276 "*
9277 if (GET_CODE (operands[3]) == NE)
9278 {
9279 if (which_alternative != 1)
9280 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9281 if (which_alternative != 0)
9282 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9283 return \"\";
9284 }
9285 if (which_alternative != 0)
9286 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9287 if (which_alternative != 1)
9288 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9289 return \"\";
9290 "
9291 [(set_attr "conds" "use")
9292 (set_attr_alternative "type"
9293 [(if_then_else (match_operand 2 "const_int_operand" "")
9294 (const_string "mov_imm")
9295 (const_string "mov_reg"))
9296 (if_then_else (match_operand 1 "const_int_operand" "")
9297 (const_string "mov_imm")
9298 (const_string "mov_reg"))
9299 (const_string "multiple")])
9300 (set_attr "length" "4,4,8")]
9301 )
9302
9303 (define_insn "*cond_arith"
9304 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9305 (match_operator:SI 5 "shiftable_operator"
9306 [(match_operator:SI 4 "arm_comparison_operator"
9307 [(match_operand:SI 2 "s_register_operand" "r,r")
9308 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9309 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9310 (clobber (reg:CC CC_REGNUM))]
9311 "TARGET_ARM"
9312 "*
9313 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9314 return \"%i5\\t%0, %1, %2, lsr #31\";
9315
9316 output_asm_insn (\"cmp\\t%2, %3\", operands);
9317 if (GET_CODE (operands[5]) == AND)
9318 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9319 else if (GET_CODE (operands[5]) == MINUS)
9320 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9321 else if (which_alternative != 0)
9322 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9323 return \"%i5%d4\\t%0, %1, #1\";
9324 "
9325 [(set_attr "conds" "clob")
9326 (set_attr "length" "12")
9327 (set_attr "type" "multiple")]
9328 )
9329
9330 (define_insn "*cond_sub"
9331 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9332 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9333 (match_operator:SI 4 "arm_comparison_operator"
9334 [(match_operand:SI 2 "s_register_operand" "r,r")
9335 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9336 (clobber (reg:CC CC_REGNUM))]
9337 "TARGET_ARM"
9338 "*
9339 output_asm_insn (\"cmp\\t%2, %3\", operands);
9340 if (which_alternative != 0)
9341 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9342 return \"sub%d4\\t%0, %1, #1\";
9343 "
9344 [(set_attr "conds" "clob")
9345 (set_attr "length" "8,12")
9346 (set_attr "type" "multiple")]
9347 )
9348
9349 (define_insn "*cmp_ite0"
9350 [(set (match_operand 6 "dominant_cc_register" "")
9351 (compare
9352 (if_then_else:SI
9353 (match_operator 4 "arm_comparison_operator"
9354 [(match_operand:SI 0 "s_register_operand"
9355 "l,l,l,r,r,r,r,r,r")
9356 (match_operand:SI 1 "arm_add_operand"
9357 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9358 (match_operator:SI 5 "arm_comparison_operator"
9359 [(match_operand:SI 2 "s_register_operand"
9360 "l,r,r,l,l,r,r,r,r")
9361 (match_operand:SI 3 "arm_add_operand"
9362 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9363 (const_int 0))
9364 (const_int 0)))]
9365 "TARGET_32BIT"
9366 "*
9367 {
9368 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9369 {
9370 {\"cmp%d5\\t%0, %1\",
9371 \"cmp%d4\\t%2, %3\"},
9372 {\"cmn%d5\\t%0, #%n1\",
9373 \"cmp%d4\\t%2, %3\"},
9374 {\"cmp%d5\\t%0, %1\",
9375 \"cmn%d4\\t%2, #%n3\"},
9376 {\"cmn%d5\\t%0, #%n1\",
9377 \"cmn%d4\\t%2, #%n3\"}
9378 };
9379 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9380 {
9381 {\"cmp\\t%2, %3\",
9382 \"cmp\\t%0, %1\"},
9383 {\"cmp\\t%2, %3\",
9384 \"cmn\\t%0, #%n1\"},
9385 {\"cmn\\t%2, #%n3\",
9386 \"cmp\\t%0, %1\"},
9387 {\"cmn\\t%2, #%n3\",
9388 \"cmn\\t%0, #%n1\"}
9389 };
9390 static const char * const ite[2] =
9391 {
9392 \"it\\t%d5\",
9393 \"it\\t%d4\"
9394 };
9395 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9396 CMP_CMP, CMN_CMP, CMP_CMP,
9397 CMN_CMP, CMP_CMN, CMN_CMN};
9398 int swap =
9399 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9400
9401 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9402 if (TARGET_THUMB2) {
9403 output_asm_insn (ite[swap], operands);
9404 }
9405 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9406 return \"\";
9407 }"
9408 [(set_attr "conds" "set")
9409 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9410 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9411 (set_attr "type" "multiple")
9412 (set_attr_alternative "length"
9413 [(const_int 6)
9414 (const_int 8)
9415 (const_int 8)
9416 (const_int 8)
9417 (const_int 8)
9418 (if_then_else (eq_attr "is_thumb" "no")
9419 (const_int 8)
9420 (const_int 10))
9421 (if_then_else (eq_attr "is_thumb" "no")
9422 (const_int 8)
9423 (const_int 10))
9424 (if_then_else (eq_attr "is_thumb" "no")
9425 (const_int 8)
9426 (const_int 10))
9427 (if_then_else (eq_attr "is_thumb" "no")
9428 (const_int 8)
9429 (const_int 10))])]
9430 )
9431
9432 (define_insn "*cmp_ite1"
9433 [(set (match_operand 6 "dominant_cc_register" "")
9434 (compare
9435 (if_then_else:SI
9436 (match_operator 4 "arm_comparison_operator"
9437 [(match_operand:SI 0 "s_register_operand"
9438 "l,l,l,r,r,r,r,r,r")
9439 (match_operand:SI 1 "arm_add_operand"
9440 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9441 (match_operator:SI 5 "arm_comparison_operator"
9442 [(match_operand:SI 2 "s_register_operand"
9443 "l,r,r,l,l,r,r,r,r")
9444 (match_operand:SI 3 "arm_add_operand"
9445 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9446 (const_int 1))
9447 (const_int 0)))]
9448 "TARGET_32BIT"
9449 "*
9450 {
9451 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9452 {
9453 {\"cmp\\t%0, %1\",
9454 \"cmp\\t%2, %3\"},
9455 {\"cmn\\t%0, #%n1\",
9456 \"cmp\\t%2, %3\"},
9457 {\"cmp\\t%0, %1\",
9458 \"cmn\\t%2, #%n3\"},
9459 {\"cmn\\t%0, #%n1\",
9460 \"cmn\\t%2, #%n3\"}
9461 };
9462 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9463 {
9464 {\"cmp%d4\\t%2, %3\",
9465 \"cmp%D5\\t%0, %1\"},
9466 {\"cmp%d4\\t%2, %3\",
9467 \"cmn%D5\\t%0, #%n1\"},
9468 {\"cmn%d4\\t%2, #%n3\",
9469 \"cmp%D5\\t%0, %1\"},
9470 {\"cmn%d4\\t%2, #%n3\",
9471 \"cmn%D5\\t%0, #%n1\"}
9472 };
9473 static const char * const ite[2] =
9474 {
9475 \"it\\t%d4\",
9476 \"it\\t%D5\"
9477 };
9478 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9479 CMP_CMP, CMN_CMP, CMP_CMP,
9480 CMN_CMP, CMP_CMN, CMN_CMN};
9481 int swap =
9482 comparison_dominates_p (GET_CODE (operands[5]),
9483 reverse_condition (GET_CODE (operands[4])));
9484
9485 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9486 if (TARGET_THUMB2) {
9487 output_asm_insn (ite[swap], operands);
9488 }
9489 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9490 return \"\";
9491 }"
9492 [(set_attr "conds" "set")
9493 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9494 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9495 (set_attr_alternative "length"
9496 [(const_int 6)
9497 (const_int 8)
9498 (const_int 8)
9499 (const_int 8)
9500 (const_int 8)
9501 (if_then_else (eq_attr "is_thumb" "no")
9502 (const_int 8)
9503 (const_int 10))
9504 (if_then_else (eq_attr "is_thumb" "no")
9505 (const_int 8)
9506 (const_int 10))
9507 (if_then_else (eq_attr "is_thumb" "no")
9508 (const_int 8)
9509 (const_int 10))
9510 (if_then_else (eq_attr "is_thumb" "no")
9511 (const_int 8)
9512 (const_int 10))])
9513 (set_attr "type" "multiple")]
9514 )
9515
9516 (define_insn "*cmp_and"
9517 [(set (match_operand 6 "dominant_cc_register" "")
9518 (compare
9519 (and:SI
9520 (match_operator 4 "arm_comparison_operator"
9521 [(match_operand:SI 0 "s_register_operand"
9522 "l,l,l,r,r,r,r,r,r,r")
9523 (match_operand:SI 1 "arm_add_operand"
9524 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9525 (match_operator:SI 5 "arm_comparison_operator"
9526 [(match_operand:SI 2 "s_register_operand"
9527 "l,r,r,l,l,r,r,r,r,r")
9528 (match_operand:SI 3 "arm_add_operand"
9529 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9530 (const_int 0)))]
9531 "TARGET_32BIT"
9532 "*
9533 {
9534 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9535 {
9536 {\"cmp%d5\\t%0, %1\",
9537 \"cmp%d4\\t%2, %3\"},
9538 {\"cmn%d5\\t%0, #%n1\",
9539 \"cmp%d4\\t%2, %3\"},
9540 {\"cmp%d5\\t%0, %1\",
9541 \"cmn%d4\\t%2, #%n3\"},
9542 {\"cmn%d5\\t%0, #%n1\",
9543 \"cmn%d4\\t%2, #%n3\"}
9544 };
9545 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9546 {
9547 {\"cmp\\t%2, %3\",
9548 \"cmp\\t%0, %1\"},
9549 {\"cmp\\t%2, %3\",
9550 \"cmn\\t%0, #%n1\"},
9551 {\"cmn\\t%2, #%n3\",
9552 \"cmp\\t%0, %1\"},
9553 {\"cmn\\t%2, #%n3\",
9554 \"cmn\\t%0, #%n1\"}
9555 };
9556 static const char *const ite[2] =
9557 {
9558 \"it\\t%d5\",
9559 \"it\\t%d4\"
9560 };
9561 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9562 CMP_CMP, CMN_CMP, CMP_CMP,
9563 CMP_CMP, CMN_CMP, CMP_CMN,
9564 CMN_CMN};
9565 int swap =
9566 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9567
9568 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9569 if (TARGET_THUMB2) {
9570 output_asm_insn (ite[swap], operands);
9571 }
9572 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9573 return \"\";
9574 }"
9575 [(set_attr "conds" "set")
9576 (set_attr "predicable" "no")
9577 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9578 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9579 (set_attr_alternative "length"
9580 [(const_int 6)
9581 (const_int 8)
9582 (const_int 8)
9583 (const_int 8)
9584 (const_int 8)
9585 (const_int 6)
9586 (if_then_else (eq_attr "is_thumb" "no")
9587 (const_int 8)
9588 (const_int 10))
9589 (if_then_else (eq_attr "is_thumb" "no")
9590 (const_int 8)
9591 (const_int 10))
9592 (if_then_else (eq_attr "is_thumb" "no")
9593 (const_int 8)
9594 (const_int 10))
9595 (if_then_else (eq_attr "is_thumb" "no")
9596 (const_int 8)
9597 (const_int 10))])
9598 (set_attr "type" "multiple")]
9599 )
9600
9601 (define_insn "*cmp_ior"
9602 [(set (match_operand 6 "dominant_cc_register" "")
9603 (compare
9604 (ior:SI
9605 (match_operator 4 "arm_comparison_operator"
9606 [(match_operand:SI 0 "s_register_operand"
9607 "l,l,l,r,r,r,r,r,r,r")
9608 (match_operand:SI 1 "arm_add_operand"
9609 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9610 (match_operator:SI 5 "arm_comparison_operator"
9611 [(match_operand:SI 2 "s_register_operand"
9612 "l,r,r,l,l,r,r,r,r,r")
9613 (match_operand:SI 3 "arm_add_operand"
9614 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9615 (const_int 0)))]
9616 "TARGET_32BIT"
9617 "*
9618 {
9619 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9620 {
9621 {\"cmp\\t%0, %1\",
9622 \"cmp\\t%2, %3\"},
9623 {\"cmn\\t%0, #%n1\",
9624 \"cmp\\t%2, %3\"},
9625 {\"cmp\\t%0, %1\",
9626 \"cmn\\t%2, #%n3\"},
9627 {\"cmn\\t%0, #%n1\",
9628 \"cmn\\t%2, #%n3\"}
9629 };
9630 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9631 {
9632 {\"cmp%D4\\t%2, %3\",
9633 \"cmp%D5\\t%0, %1\"},
9634 {\"cmp%D4\\t%2, %3\",
9635 \"cmn%D5\\t%0, #%n1\"},
9636 {\"cmn%D4\\t%2, #%n3\",
9637 \"cmp%D5\\t%0, %1\"},
9638 {\"cmn%D4\\t%2, #%n3\",
9639 \"cmn%D5\\t%0, #%n1\"}
9640 };
9641 static const char *const ite[2] =
9642 {
9643 \"it\\t%D4\",
9644 \"it\\t%D5\"
9645 };
9646 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9647 CMP_CMP, CMN_CMP, CMP_CMP,
9648 CMP_CMP, CMN_CMP, CMP_CMN,
9649 CMN_CMN};
9650 int swap =
9651 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9652
9653 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9654 if (TARGET_THUMB2) {
9655 output_asm_insn (ite[swap], operands);
9656 }
9657 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9658 return \"\";
9659 }
9660 "
9661 [(set_attr "conds" "set")
9662 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9663 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9664 (set_attr_alternative "length"
9665 [(const_int 6)
9666 (const_int 8)
9667 (const_int 8)
9668 (const_int 8)
9669 (const_int 8)
9670 (const_int 6)
9671 (if_then_else (eq_attr "is_thumb" "no")
9672 (const_int 8)
9673 (const_int 10))
9674 (if_then_else (eq_attr "is_thumb" "no")
9675 (const_int 8)
9676 (const_int 10))
9677 (if_then_else (eq_attr "is_thumb" "no")
9678 (const_int 8)
9679 (const_int 10))
9680 (if_then_else (eq_attr "is_thumb" "no")
9681 (const_int 8)
9682 (const_int 10))])
9683 (set_attr "type" "multiple")]
9684 )
9685
9686 (define_insn_and_split "*ior_scc_scc"
9687 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9688 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9689 [(match_operand:SI 1 "s_register_operand" "l,r")
9690 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9691 (match_operator:SI 6 "arm_comparison_operator"
9692 [(match_operand:SI 4 "s_register_operand" "l,r")
9693 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9694 (clobber (reg:CC CC_REGNUM))]
9695 "TARGET_32BIT
9696 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9697 != CCmode)"
9698 "#"
9699 "TARGET_32BIT && reload_completed"
9700 [(set (match_dup 7)
9701 (compare
9702 (ior:SI
9703 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9704 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9705 (const_int 0)))
9706 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9707 "operands[7]
9708 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9709 DOM_CC_X_OR_Y),
9710 CC_REGNUM);"
9711 [(set_attr "conds" "clob")
9712 (set_attr "enabled_for_short_it" "yes,no")
9713 (set_attr "length" "16")
9714 (set_attr "type" "multiple")]
9715 )
9716
9717 ; If the above pattern is followed by a CMP insn, then the compare is
9718 ; redundant, since we can rework the conditional instruction that follows.
9719 (define_insn_and_split "*ior_scc_scc_cmp"
9720 [(set (match_operand 0 "dominant_cc_register" "")
9721 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9722 [(match_operand:SI 1 "s_register_operand" "l,r")
9723 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9724 (match_operator:SI 6 "arm_comparison_operator"
9725 [(match_operand:SI 4 "s_register_operand" "l,r")
9726 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9727 (const_int 0)))
9728 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9729 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9730 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9731 "TARGET_32BIT"
9732 "#"
9733 "TARGET_32BIT && reload_completed"
9734 [(set (match_dup 0)
9735 (compare
9736 (ior:SI
9737 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9738 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9739 (const_int 0)))
9740 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9741 ""
9742 [(set_attr "conds" "set")
9743 (set_attr "enabled_for_short_it" "yes,no")
9744 (set_attr "length" "16")
9745 (set_attr "type" "multiple")]
9746 )
9747
9748 (define_insn_and_split "*and_scc_scc"
9749 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9750 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9751 [(match_operand:SI 1 "s_register_operand" "l,r")
9752 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9753 (match_operator:SI 6 "arm_comparison_operator"
9754 [(match_operand:SI 4 "s_register_operand" "l,r")
9755 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9756 (clobber (reg:CC CC_REGNUM))]
9757 "TARGET_32BIT
9758 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9759 != CCmode)"
9760 "#"
9761 "TARGET_32BIT && reload_completed
9762 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9763 != CCmode)"
9764 [(set (match_dup 7)
9765 (compare
9766 (and:SI
9767 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9768 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9769 (const_int 0)))
9770 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9771 "operands[7]
9772 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9773 DOM_CC_X_AND_Y),
9774 CC_REGNUM);"
9775 [(set_attr "conds" "clob")
9776 (set_attr "enabled_for_short_it" "yes,no")
9777 (set_attr "length" "16")
9778 (set_attr "type" "multiple")]
9779 )
9780
9781 ; If the above pattern is followed by a CMP insn, then the compare is
9782 ; redundant, since we can rework the conditional instruction that follows.
9783 (define_insn_and_split "*and_scc_scc_cmp"
9784 [(set (match_operand 0 "dominant_cc_register" "")
9785 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9786 [(match_operand:SI 1 "s_register_operand" "l,r")
9787 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9788 (match_operator:SI 6 "arm_comparison_operator"
9789 [(match_operand:SI 4 "s_register_operand" "l,r")
9790 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9791 (const_int 0)))
9792 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9793 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9794 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9795 "TARGET_32BIT"
9796 "#"
9797 "TARGET_32BIT && reload_completed"
9798 [(set (match_dup 0)
9799 (compare
9800 (and:SI
9801 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9802 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9803 (const_int 0)))
9804 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9805 ""
9806 [(set_attr "conds" "set")
9807 (set_attr "enabled_for_short_it" "yes,no")
9808 (set_attr "length" "16")
9809 (set_attr "type" "multiple")]
9810 )
9811
9812 ;; If there is no dominance in the comparison, then we can still save an
9813 ;; instruction in the AND case, since we can know that the second compare
9814 ;; need only zero the value if false (if true, then the value is already
9815 ;; correct).
9816 (define_insn_and_split "*and_scc_scc_nodom"
9817 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9818 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9819 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9820 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9821 (match_operator:SI 6 "arm_comparison_operator"
9822 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9823 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9824 (clobber (reg:CC CC_REGNUM))]
9825 "TARGET_32BIT
9826 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9827 == CCmode)"
9828 "#"
9829 "TARGET_32BIT && reload_completed"
9830 [(parallel [(set (match_dup 0)
9831 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9832 (clobber (reg:CC CC_REGNUM))])
9833 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9834 (set (match_dup 0)
9835 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9836 (match_dup 0)
9837 (const_int 0)))]
9838 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9839 operands[4], operands[5]),
9840 CC_REGNUM);
9841 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9842 operands[5]);"
9843 [(set_attr "conds" "clob")
9844 (set_attr "length" "20")
9845 (set_attr "type" "multiple")]
9846 )
9847
9848 (define_split
9849 [(set (reg:CC_NOOV CC_REGNUM)
9850 (compare:CC_NOOV (ior:SI
9851 (and:SI (match_operand:SI 0 "s_register_operand" "")
9852 (const_int 1))
9853 (match_operator:SI 1 "arm_comparison_operator"
9854 [(match_operand:SI 2 "s_register_operand" "")
9855 (match_operand:SI 3 "arm_add_operand" "")]))
9856 (const_int 0)))
9857 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9858 "TARGET_ARM"
9859 [(set (match_dup 4)
9860 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9861 (match_dup 0)))
9862 (set (reg:CC_NOOV CC_REGNUM)
9863 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9864 (const_int 0)))]
9865 "")
9866
9867 (define_split
9868 [(set (reg:CC_NOOV CC_REGNUM)
9869 (compare:CC_NOOV (ior:SI
9870 (match_operator:SI 1 "arm_comparison_operator"
9871 [(match_operand:SI 2 "s_register_operand" "")
9872 (match_operand:SI 3 "arm_add_operand" "")])
9873 (and:SI (match_operand:SI 0 "s_register_operand" "")
9874 (const_int 1)))
9875 (const_int 0)))
9876 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9877 "TARGET_ARM"
9878 [(set (match_dup 4)
9879 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9880 (match_dup 0)))
9881 (set (reg:CC_NOOV CC_REGNUM)
9882 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9883 (const_int 0)))]
9884 "")
9885 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9886
9887 (define_insn_and_split "*negscc"
9888 [(set (match_operand:SI 0 "s_register_operand" "=r")
9889 (neg:SI (match_operator 3 "arm_comparison_operator"
9890 [(match_operand:SI 1 "s_register_operand" "r")
9891 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9892 (clobber (reg:CC CC_REGNUM))]
9893 "TARGET_ARM"
9894 "#"
9895 "&& reload_completed"
9896 [(const_int 0)]
9897 {
9898 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9899
9900 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9901 {
9902 /* Emit mov\\t%0, %1, asr #31 */
9903 emit_insn (gen_rtx_SET (operands[0],
9904 gen_rtx_ASHIFTRT (SImode,
9905 operands[1],
9906 GEN_INT (31))));
9907 DONE;
9908 }
9909 else if (GET_CODE (operands[3]) == NE)
9910 {
9911 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9912 if (CONST_INT_P (operands[2]))
9913 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9914 gen_int_mode (-INTVAL (operands[2]),
9915 SImode)));
9916 else
9917 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9918
9919 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9920 gen_rtx_NE (SImode,
9921 cc_reg,
9922 const0_rtx),
9923 gen_rtx_SET (operands[0],
9924 GEN_INT (~0))));
9925 DONE;
9926 }
9927 else
9928 {
9929 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9930 emit_insn (gen_rtx_SET (cc_reg,
9931 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9932 enum rtx_code rc = GET_CODE (operands[3]);
9933
9934 rc = reverse_condition (rc);
9935 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9936 gen_rtx_fmt_ee (rc,
9937 VOIDmode,
9938 cc_reg,
9939 const0_rtx),
9940 gen_rtx_SET (operands[0], const0_rtx)));
9941 rc = GET_CODE (operands[3]);
9942 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9943 gen_rtx_fmt_ee (rc,
9944 VOIDmode,
9945 cc_reg,
9946 const0_rtx),
9947 gen_rtx_SET (operands[0],
9948 GEN_INT (~0))));
9949 DONE;
9950 }
9951 FAIL;
9952 }
9953 [(set_attr "conds" "clob")
9954 (set_attr "length" "12")
9955 (set_attr "type" "multiple")]
9956 )
9957
9958 (define_insn_and_split "movcond_addsi"
9959 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9960 (if_then_else:SI
9961 (match_operator 5 "comparison_operator"
9962 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9963 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9964 (const_int 0)])
9965 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9966 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9967 (clobber (reg:CC CC_REGNUM))]
9968 "TARGET_32BIT"
9969 "#"
9970 "&& reload_completed"
9971 [(set (reg:CC_NOOV CC_REGNUM)
9972 (compare:CC_NOOV
9973 (plus:SI (match_dup 3)
9974 (match_dup 4))
9975 (const_int 0)))
9976 (set (match_dup 0) (match_dup 1))
9977 (cond_exec (match_dup 6)
9978 (set (match_dup 0) (match_dup 2)))]
9979 "
9980 {
9981 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9982 operands[3], operands[4]);
9983 enum rtx_code rc = GET_CODE (operands[5]);
9984 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9985 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9986 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9987 rc = reverse_condition (rc);
9988 else
9989 std::swap (operands[1], operands[2]);
9990
9991 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9992 }
9993 "
9994 [(set_attr "conds" "clob")
9995 (set_attr "enabled_for_short_it" "no,yes,yes")
9996 (set_attr "type" "multiple")]
9997 )
9998
9999 (define_insn "movcond"
10000 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10001 (if_then_else:SI
10002 (match_operator 5 "arm_comparison_operator"
10003 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10004 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10005 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10006 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10007 (clobber (reg:CC CC_REGNUM))]
10008 "TARGET_ARM"
10009 "*
10010 if (GET_CODE (operands[5]) == LT
10011 && (operands[4] == const0_rtx))
10012 {
10013 if (which_alternative != 1 && REG_P (operands[1]))
10014 {
10015 if (operands[2] == const0_rtx)
10016 return \"and\\t%0, %1, %3, asr #31\";
10017 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10018 }
10019 else if (which_alternative != 0 && REG_P (operands[2]))
10020 {
10021 if (operands[1] == const0_rtx)
10022 return \"bic\\t%0, %2, %3, asr #31\";
10023 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10024 }
10025 /* The only case that falls through to here is when both ops 1 & 2
10026 are constants. */
10027 }
10028
10029 if (GET_CODE (operands[5]) == GE
10030 && (operands[4] == const0_rtx))
10031 {
10032 if (which_alternative != 1 && REG_P (operands[1]))
10033 {
10034 if (operands[2] == const0_rtx)
10035 return \"bic\\t%0, %1, %3, asr #31\";
10036 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10037 }
10038 else if (which_alternative != 0 && REG_P (operands[2]))
10039 {
10040 if (operands[1] == const0_rtx)
10041 return \"and\\t%0, %2, %3, asr #31\";
10042 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10043 }
10044 /* The only case that falls through to here is when both ops 1 & 2
10045 are constants. */
10046 }
10047 if (CONST_INT_P (operands[4])
10048 && !const_ok_for_arm (INTVAL (operands[4])))
10049 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10050 else
10051 output_asm_insn (\"cmp\\t%3, %4\", operands);
10052 if (which_alternative != 0)
10053 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10054 if (which_alternative != 1)
10055 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10056 return \"\";
10057 "
10058 [(set_attr "conds" "clob")
10059 (set_attr "length" "8,8,12")
10060 (set_attr "type" "multiple")]
10061 )
10062
10063 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10064
10065 (define_insn "*ifcompare_plus_move"
10066 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10067 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10068 [(match_operand:SI 4 "s_register_operand" "r,r")
10069 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10070 (plus:SI
10071 (match_operand:SI 2 "s_register_operand" "r,r")
10072 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10073 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10074 (clobber (reg:CC CC_REGNUM))]
10075 "TARGET_ARM"
10076 "#"
10077 [(set_attr "conds" "clob")
10078 (set_attr "length" "8,12")
10079 (set_attr "type" "multiple")]
10080 )
10081
10082 (define_insn "*if_plus_move"
10083 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10084 (if_then_else:SI
10085 (match_operator 4 "arm_comparison_operator"
10086 [(match_operand 5 "cc_register" "") (const_int 0)])
10087 (plus:SI
10088 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10089 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10090 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10091 "TARGET_ARM"
10092 "@
10093 add%d4\\t%0, %2, %3
10094 sub%d4\\t%0, %2, #%n3
10095 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10096 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10097 [(set_attr "conds" "use")
10098 (set_attr "length" "4,4,8,8")
10099 (set_attr_alternative "type"
10100 [(if_then_else (match_operand 3 "const_int_operand" "")
10101 (const_string "alu_imm" )
10102 (const_string "alu_sreg"))
10103 (const_string "alu_imm")
10104 (const_string "multiple")
10105 (const_string "multiple")])]
10106 )
10107
10108 (define_insn "*ifcompare_move_plus"
10109 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10110 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10111 [(match_operand:SI 4 "s_register_operand" "r,r")
10112 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10113 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10114 (plus:SI
10115 (match_operand:SI 2 "s_register_operand" "r,r")
10116 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10117 (clobber (reg:CC CC_REGNUM))]
10118 "TARGET_ARM"
10119 "#"
10120 [(set_attr "conds" "clob")
10121 (set_attr "length" "8,12")
10122 (set_attr "type" "multiple")]
10123 )
10124
10125 (define_insn "*if_move_plus"
10126 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10127 (if_then_else:SI
10128 (match_operator 4 "arm_comparison_operator"
10129 [(match_operand 5 "cc_register" "") (const_int 0)])
10130 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10131 (plus:SI
10132 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10133 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10134 "TARGET_ARM"
10135 "@
10136 add%D4\\t%0, %2, %3
10137 sub%D4\\t%0, %2, #%n3
10138 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10139 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10140 [(set_attr "conds" "use")
10141 (set_attr "length" "4,4,8,8")
10142 (set_attr_alternative "type"
10143 [(if_then_else (match_operand 3 "const_int_operand" "")
10144 (const_string "alu_imm" )
10145 (const_string "alu_sreg"))
10146 (const_string "alu_imm")
10147 (const_string "multiple")
10148 (const_string "multiple")])]
10149 )
10150
10151 (define_insn "*ifcompare_arith_arith"
10152 [(set (match_operand:SI 0 "s_register_operand" "=r")
10153 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10154 [(match_operand:SI 5 "s_register_operand" "r")
10155 (match_operand:SI 6 "arm_add_operand" "rIL")])
10156 (match_operator:SI 8 "shiftable_operator"
10157 [(match_operand:SI 1 "s_register_operand" "r")
10158 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10159 (match_operator:SI 7 "shiftable_operator"
10160 [(match_operand:SI 3 "s_register_operand" "r")
10161 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10162 (clobber (reg:CC CC_REGNUM))]
10163 "TARGET_ARM"
10164 "#"
10165 [(set_attr "conds" "clob")
10166 (set_attr "length" "12")
10167 (set_attr "type" "multiple")]
10168 )
10169
10170 (define_insn "*if_arith_arith"
10171 [(set (match_operand:SI 0 "s_register_operand" "=r")
10172 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10173 [(match_operand 8 "cc_register" "") (const_int 0)])
10174 (match_operator:SI 6 "shiftable_operator"
10175 [(match_operand:SI 1 "s_register_operand" "r")
10176 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10177 (match_operator:SI 7 "shiftable_operator"
10178 [(match_operand:SI 3 "s_register_operand" "r")
10179 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10180 "TARGET_ARM"
10181 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10182 [(set_attr "conds" "use")
10183 (set_attr "length" "8")
10184 (set_attr "type" "multiple")]
10185 )
10186
10187 (define_insn "*ifcompare_arith_move"
10188 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10189 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10190 [(match_operand:SI 2 "s_register_operand" "r,r")
10191 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10192 (match_operator:SI 7 "shiftable_operator"
10193 [(match_operand:SI 4 "s_register_operand" "r,r")
10194 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10195 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10196 (clobber (reg:CC CC_REGNUM))]
10197 "TARGET_ARM"
10198 "*
10199 /* If we have an operation where (op x 0) is the identity operation and
10200 the conditional operator is LT or GE and we are comparing against zero and
10201 everything is in registers then we can do this in two instructions. */
10202 if (operands[3] == const0_rtx
10203 && GET_CODE (operands[7]) != AND
10204 && REG_P (operands[5])
10205 && REG_P (operands[1])
10206 && REGNO (operands[1]) == REGNO (operands[4])
10207 && REGNO (operands[4]) != REGNO (operands[0]))
10208 {
10209 if (GET_CODE (operands[6]) == LT)
10210 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10211 else if (GET_CODE (operands[6]) == GE)
10212 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10213 }
10214 if (CONST_INT_P (operands[3])
10215 && !const_ok_for_arm (INTVAL (operands[3])))
10216 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10217 else
10218 output_asm_insn (\"cmp\\t%2, %3\", operands);
10219 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10220 if (which_alternative != 0)
10221 return \"mov%D6\\t%0, %1\";
10222 return \"\";
10223 "
10224 [(set_attr "conds" "clob")
10225 (set_attr "length" "8,12")
10226 (set_attr "type" "multiple")]
10227 )
10228
10229 (define_insn "*if_arith_move"
10230 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10231 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10232 [(match_operand 6 "cc_register" "") (const_int 0)])
10233 (match_operator:SI 5 "shiftable_operator"
10234 [(match_operand:SI 2 "s_register_operand" "r,r")
10235 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10236 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10237 "TARGET_ARM"
10238 "@
10239 %I5%d4\\t%0, %2, %3
10240 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10241 [(set_attr "conds" "use")
10242 (set_attr "length" "4,8")
10243 (set_attr_alternative "type"
10244 [(if_then_else (match_operand 3 "const_int_operand" "")
10245 (const_string "alu_shift_imm" )
10246 (const_string "alu_shift_reg"))
10247 (const_string "multiple")])]
10248 )
10249
10250 (define_insn "*ifcompare_move_arith"
10251 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10252 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10253 [(match_operand:SI 4 "s_register_operand" "r,r")
10254 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10255 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10256 (match_operator:SI 7 "shiftable_operator"
10257 [(match_operand:SI 2 "s_register_operand" "r,r")
10258 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10259 (clobber (reg:CC CC_REGNUM))]
10260 "TARGET_ARM"
10261 "*
10262 /* If we have an operation where (op x 0) is the identity operation and
10263 the conditional operator is LT or GE and we are comparing against zero and
10264 everything is in registers then we can do this in two instructions */
10265 if (operands[5] == const0_rtx
10266 && GET_CODE (operands[7]) != AND
10267 && REG_P (operands[3])
10268 && REG_P (operands[1])
10269 && REGNO (operands[1]) == REGNO (operands[2])
10270 && REGNO (operands[2]) != REGNO (operands[0]))
10271 {
10272 if (GET_CODE (operands[6]) == GE)
10273 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10274 else if (GET_CODE (operands[6]) == LT)
10275 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10276 }
10277
10278 if (CONST_INT_P (operands[5])
10279 && !const_ok_for_arm (INTVAL (operands[5])))
10280 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10281 else
10282 output_asm_insn (\"cmp\\t%4, %5\", operands);
10283
10284 if (which_alternative != 0)
10285 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10286 return \"%I7%D6\\t%0, %2, %3\";
10287 "
10288 [(set_attr "conds" "clob")
10289 (set_attr "length" "8,12")
10290 (set_attr "type" "multiple")]
10291 )
10292
10293 (define_insn "*if_move_arith"
10294 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10295 (if_then_else:SI
10296 (match_operator 4 "arm_comparison_operator"
10297 [(match_operand 6 "cc_register" "") (const_int 0)])
10298 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10299 (match_operator:SI 5 "shiftable_operator"
10300 [(match_operand:SI 2 "s_register_operand" "r,r")
10301 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10302 "TARGET_ARM"
10303 "@
10304 %I5%D4\\t%0, %2, %3
10305 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10306 [(set_attr "conds" "use")
10307 (set_attr "length" "4,8")
10308 (set_attr_alternative "type"
10309 [(if_then_else (match_operand 3 "const_int_operand" "")
10310 (const_string "alu_shift_imm" )
10311 (const_string "alu_shift_reg"))
10312 (const_string "multiple")])]
10313 )
10314
10315 (define_insn "*ifcompare_move_not"
10316 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10317 (if_then_else:SI
10318 (match_operator 5 "arm_comparison_operator"
10319 [(match_operand:SI 3 "s_register_operand" "r,r")
10320 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10321 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10322 (not:SI
10323 (match_operand:SI 2 "s_register_operand" "r,r"))))
10324 (clobber (reg:CC CC_REGNUM))]
10325 "TARGET_ARM"
10326 "#"
10327 [(set_attr "conds" "clob")
10328 (set_attr "length" "8,12")
10329 (set_attr "type" "multiple")]
10330 )
10331
10332 (define_insn "*if_move_not"
10333 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10334 (if_then_else:SI
10335 (match_operator 4 "arm_comparison_operator"
10336 [(match_operand 3 "cc_register" "") (const_int 0)])
10337 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10338 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10339 "TARGET_ARM"
10340 "@
10341 mvn%D4\\t%0, %2
10342 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10343 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10344 [(set_attr "conds" "use")
10345 (set_attr "type" "mvn_reg")
10346 (set_attr "length" "4,8,8")
10347 (set_attr "type" "mvn_reg,multiple,multiple")]
10348 )
10349
10350 (define_insn "*ifcompare_not_move"
10351 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10352 (if_then_else:SI
10353 (match_operator 5 "arm_comparison_operator"
10354 [(match_operand:SI 3 "s_register_operand" "r,r")
10355 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10356 (not:SI
10357 (match_operand:SI 2 "s_register_operand" "r,r"))
10358 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10359 (clobber (reg:CC CC_REGNUM))]
10360 "TARGET_ARM"
10361 "#"
10362 [(set_attr "conds" "clob")
10363 (set_attr "length" "8,12")
10364 (set_attr "type" "multiple")]
10365 )
10366
10367 (define_insn "*if_not_move"
10368 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10369 (if_then_else:SI
10370 (match_operator 4 "arm_comparison_operator"
10371 [(match_operand 3 "cc_register" "") (const_int 0)])
10372 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10373 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10374 "TARGET_ARM"
10375 "@
10376 mvn%d4\\t%0, %2
10377 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10378 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10379 [(set_attr "conds" "use")
10380 (set_attr "type" "mvn_reg,multiple,multiple")
10381 (set_attr "length" "4,8,8")]
10382 )
10383
10384 (define_insn "*ifcompare_shift_move"
10385 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10386 (if_then_else:SI
10387 (match_operator 6 "arm_comparison_operator"
10388 [(match_operand:SI 4 "s_register_operand" "r,r")
10389 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10390 (match_operator:SI 7 "shift_operator"
10391 [(match_operand:SI 2 "s_register_operand" "r,r")
10392 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10393 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10394 (clobber (reg:CC CC_REGNUM))]
10395 "TARGET_ARM"
10396 "#"
10397 [(set_attr "conds" "clob")
10398 (set_attr "length" "8,12")
10399 (set_attr "type" "multiple")]
10400 )
10401
10402 (define_insn "*if_shift_move"
10403 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10404 (if_then_else:SI
10405 (match_operator 5 "arm_comparison_operator"
10406 [(match_operand 6 "cc_register" "") (const_int 0)])
10407 (match_operator:SI 4 "shift_operator"
10408 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10409 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10410 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10411 "TARGET_ARM"
10412 "@
10413 mov%d5\\t%0, %2%S4
10414 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10415 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10416 [(set_attr "conds" "use")
10417 (set_attr "shift" "2")
10418 (set_attr "length" "4,8,8")
10419 (set_attr_alternative "type"
10420 [(if_then_else (match_operand 3 "const_int_operand" "")
10421 (const_string "mov_shift" )
10422 (const_string "mov_shift_reg"))
10423 (const_string "multiple")
10424 (const_string "multiple")])]
10425 )
10426
10427 (define_insn "*ifcompare_move_shift"
10428 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10429 (if_then_else:SI
10430 (match_operator 6 "arm_comparison_operator"
10431 [(match_operand:SI 4 "s_register_operand" "r,r")
10432 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10433 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10434 (match_operator:SI 7 "shift_operator"
10435 [(match_operand:SI 2 "s_register_operand" "r,r")
10436 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10437 (clobber (reg:CC CC_REGNUM))]
10438 "TARGET_ARM"
10439 "#"
10440 [(set_attr "conds" "clob")
10441 (set_attr "length" "8,12")
10442 (set_attr "type" "multiple")]
10443 )
10444
10445 (define_insn "*if_move_shift"
10446 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10447 (if_then_else:SI
10448 (match_operator 5 "arm_comparison_operator"
10449 [(match_operand 6 "cc_register" "") (const_int 0)])
10450 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10451 (match_operator:SI 4 "shift_operator"
10452 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10453 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10454 "TARGET_ARM"
10455 "@
10456 mov%D5\\t%0, %2%S4
10457 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10458 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10459 [(set_attr "conds" "use")
10460 (set_attr "shift" "2")
10461 (set_attr "length" "4,8,8")
10462 (set_attr_alternative "type"
10463 [(if_then_else (match_operand 3 "const_int_operand" "")
10464 (const_string "mov_shift" )
10465 (const_string "mov_shift_reg"))
10466 (const_string "multiple")
10467 (const_string "multiple")])]
10468 )
10469
10470 (define_insn "*ifcompare_shift_shift"
10471 [(set (match_operand:SI 0 "s_register_operand" "=r")
10472 (if_then_else:SI
10473 (match_operator 7 "arm_comparison_operator"
10474 [(match_operand:SI 5 "s_register_operand" "r")
10475 (match_operand:SI 6 "arm_add_operand" "rIL")])
10476 (match_operator:SI 8 "shift_operator"
10477 [(match_operand:SI 1 "s_register_operand" "r")
10478 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10479 (match_operator:SI 9 "shift_operator"
10480 [(match_operand:SI 3 "s_register_operand" "r")
10481 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10482 (clobber (reg:CC CC_REGNUM))]
10483 "TARGET_ARM"
10484 "#"
10485 [(set_attr "conds" "clob")
10486 (set_attr "length" "12")
10487 (set_attr "type" "multiple")]
10488 )
10489
10490 (define_insn "*if_shift_shift"
10491 [(set (match_operand:SI 0 "s_register_operand" "=r")
10492 (if_then_else:SI
10493 (match_operator 5 "arm_comparison_operator"
10494 [(match_operand 8 "cc_register" "") (const_int 0)])
10495 (match_operator:SI 6 "shift_operator"
10496 [(match_operand:SI 1 "s_register_operand" "r")
10497 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10498 (match_operator:SI 7 "shift_operator"
10499 [(match_operand:SI 3 "s_register_operand" "r")
10500 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10501 "TARGET_ARM"
10502 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10503 [(set_attr "conds" "use")
10504 (set_attr "shift" "1")
10505 (set_attr "length" "8")
10506 (set (attr "type") (if_then_else
10507 (and (match_operand 2 "const_int_operand" "")
10508 (match_operand 4 "const_int_operand" ""))
10509 (const_string "mov_shift")
10510 (const_string "mov_shift_reg")))]
10511 )
10512
10513 (define_insn "*ifcompare_not_arith"
10514 [(set (match_operand:SI 0 "s_register_operand" "=r")
10515 (if_then_else:SI
10516 (match_operator 6 "arm_comparison_operator"
10517 [(match_operand:SI 4 "s_register_operand" "r")
10518 (match_operand:SI 5 "arm_add_operand" "rIL")])
10519 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10520 (match_operator:SI 7 "shiftable_operator"
10521 [(match_operand:SI 2 "s_register_operand" "r")
10522 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10523 (clobber (reg:CC CC_REGNUM))]
10524 "TARGET_ARM"
10525 "#"
10526 [(set_attr "conds" "clob")
10527 (set_attr "length" "12")
10528 (set_attr "type" "multiple")]
10529 )
10530
10531 (define_insn "*if_not_arith"
10532 [(set (match_operand:SI 0 "s_register_operand" "=r")
10533 (if_then_else:SI
10534 (match_operator 5 "arm_comparison_operator"
10535 [(match_operand 4 "cc_register" "") (const_int 0)])
10536 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10537 (match_operator:SI 6 "shiftable_operator"
10538 [(match_operand:SI 2 "s_register_operand" "r")
10539 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10540 "TARGET_ARM"
10541 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10542 [(set_attr "conds" "use")
10543 (set_attr "type" "mvn_reg")
10544 (set_attr "length" "8")]
10545 )
10546
10547 (define_insn "*ifcompare_arith_not"
10548 [(set (match_operand:SI 0 "s_register_operand" "=r")
10549 (if_then_else:SI
10550 (match_operator 6 "arm_comparison_operator"
10551 [(match_operand:SI 4 "s_register_operand" "r")
10552 (match_operand:SI 5 "arm_add_operand" "rIL")])
10553 (match_operator:SI 7 "shiftable_operator"
10554 [(match_operand:SI 2 "s_register_operand" "r")
10555 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10556 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10557 (clobber (reg:CC CC_REGNUM))]
10558 "TARGET_ARM"
10559 "#"
10560 [(set_attr "conds" "clob")
10561 (set_attr "length" "12")
10562 (set_attr "type" "multiple")]
10563 )
10564
10565 (define_insn "*if_arith_not"
10566 [(set (match_operand:SI 0 "s_register_operand" "=r")
10567 (if_then_else:SI
10568 (match_operator 5 "arm_comparison_operator"
10569 [(match_operand 4 "cc_register" "") (const_int 0)])
10570 (match_operator:SI 6 "shiftable_operator"
10571 [(match_operand:SI 2 "s_register_operand" "r")
10572 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10573 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10574 "TARGET_ARM"
10575 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10576 [(set_attr "conds" "use")
10577 (set_attr "type" "multiple")
10578 (set_attr "length" "8")]
10579 )
10580
10581 (define_insn "*ifcompare_neg_move"
10582 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10583 (if_then_else:SI
10584 (match_operator 5 "arm_comparison_operator"
10585 [(match_operand:SI 3 "s_register_operand" "r,r")
10586 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10587 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10588 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10589 (clobber (reg:CC CC_REGNUM))]
10590 "TARGET_ARM"
10591 "#"
10592 [(set_attr "conds" "clob")
10593 (set_attr "length" "8,12")
10594 (set_attr "type" "multiple")]
10595 )
10596
10597 (define_insn_and_split "*if_neg_move"
10598 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10599 (if_then_else:SI
10600 (match_operator 4 "arm_comparison_operator"
10601 [(match_operand 3 "cc_register" "") (const_int 0)])
10602 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
10603 (match_operand:SI 1 "s_register_operand" "0,0")))]
10604 "TARGET_32BIT"
10605 "#"
10606 "&& reload_completed"
10607 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
10608 (set (match_dup 0) (neg:SI (match_dup 2))))]
10609 ""
10610 [(set_attr "conds" "use")
10611 (set_attr "length" "4")
10612 (set_attr "arch" "t2,32")
10613 (set_attr "enabled_for_short_it" "yes,no")
10614 (set_attr "type" "logic_shift_imm")]
10615 )
10616
10617 (define_insn "*ifcompare_move_neg"
10618 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10619 (if_then_else:SI
10620 (match_operator 5 "arm_comparison_operator"
10621 [(match_operand:SI 3 "s_register_operand" "r,r")
10622 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10623 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10624 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10625 (clobber (reg:CC CC_REGNUM))]
10626 "TARGET_ARM"
10627 "#"
10628 [(set_attr "conds" "clob")
10629 (set_attr "length" "8,12")
10630 (set_attr "type" "multiple")]
10631 )
10632
10633 (define_insn_and_split "*if_move_neg"
10634 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10635 (if_then_else:SI
10636 (match_operator 4 "arm_comparison_operator"
10637 [(match_operand 3 "cc_register" "") (const_int 0)])
10638 (match_operand:SI 1 "s_register_operand" "0,0")
10639 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
10640 "TARGET_32BIT"
10641 "#"
10642 "&& reload_completed"
10643 [(cond_exec (match_dup 5)
10644 (set (match_dup 0) (neg:SI (match_dup 2))))]
10645 {
10646 machine_mode mode = GET_MODE (operands[3]);
10647 rtx_code rc = GET_CODE (operands[4]);
10648
10649 if (mode == CCFPmode || mode == CCFPEmode)
10650 rc = reverse_condition_maybe_unordered (rc);
10651 else
10652 rc = reverse_condition (rc);
10653
10654 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
10655 }
10656 [(set_attr "conds" "use")
10657 (set_attr "length" "4")
10658 (set_attr "arch" "t2,32")
10659 (set_attr "enabled_for_short_it" "yes,no")
10660 (set_attr "type" "logic_shift_imm")]
10661 )
10662
10663 (define_insn "*arith_adjacentmem"
10664 [(set (match_operand:SI 0 "s_register_operand" "=r")
10665 (match_operator:SI 1 "shiftable_operator"
10666 [(match_operand:SI 2 "memory_operand" "m")
10667 (match_operand:SI 3 "memory_operand" "m")]))
10668 (clobber (match_scratch:SI 4 "=r"))]
10669 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10670 "*
10671 {
10672 rtx ldm[3];
10673 rtx arith[4];
10674 rtx base_reg;
10675 HOST_WIDE_INT val1 = 0, val2 = 0;
10676
10677 if (REGNO (operands[0]) > REGNO (operands[4]))
10678 {
10679 ldm[1] = operands[4];
10680 ldm[2] = operands[0];
10681 }
10682 else
10683 {
10684 ldm[1] = operands[0];
10685 ldm[2] = operands[4];
10686 }
10687
10688 base_reg = XEXP (operands[2], 0);
10689
10690 if (!REG_P (base_reg))
10691 {
10692 val1 = INTVAL (XEXP (base_reg, 1));
10693 base_reg = XEXP (base_reg, 0);
10694 }
10695
10696 if (!REG_P (XEXP (operands[3], 0)))
10697 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10698
10699 arith[0] = operands[0];
10700 arith[3] = operands[1];
10701
10702 if (val1 < val2)
10703 {
10704 arith[1] = ldm[1];
10705 arith[2] = ldm[2];
10706 }
10707 else
10708 {
10709 arith[1] = ldm[2];
10710 arith[2] = ldm[1];
10711 }
10712
10713 ldm[0] = base_reg;
10714 if (val1 !=0 && val2 != 0)
10715 {
10716 rtx ops[3];
10717
10718 if (val1 == 4 || val2 == 4)
10719 /* Other val must be 8, since we know they are adjacent and neither
10720 is zero. */
10721 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10722 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10723 {
10724 ldm[0] = ops[0] = operands[4];
10725 ops[1] = base_reg;
10726 ops[2] = GEN_INT (val1);
10727 output_add_immediate (ops);
10728 if (val1 < val2)
10729 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10730 else
10731 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10732 }
10733 else
10734 {
10735 /* Offset is out of range for a single add, so use two ldr. */
10736 ops[0] = ldm[1];
10737 ops[1] = base_reg;
10738 ops[2] = GEN_INT (val1);
10739 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10740 ops[0] = ldm[2];
10741 ops[2] = GEN_INT (val2);
10742 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10743 }
10744 }
10745 else if (val1 != 0)
10746 {
10747 if (val1 < val2)
10748 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10749 else
10750 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10751 }
10752 else
10753 {
10754 if (val1 < val2)
10755 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10756 else
10757 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10758 }
10759 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10760 return \"\";
10761 }"
10762 [(set_attr "length" "12")
10763 (set_attr "predicable" "yes")
10764 (set_attr "type" "load_4")]
10765 )
10766
10767 ; This pattern is never tried by combine, so do it as a peephole
10768
10769 (define_peephole2
10770 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10771 (match_operand:SI 1 "arm_general_register_operand" ""))
10772 (set (reg:CC CC_REGNUM)
10773 (compare:CC (match_dup 1) (const_int 0)))]
10774 "TARGET_ARM"
10775 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10776 (set (match_dup 0) (match_dup 1))])]
10777 ""
10778 )
10779
10780 (define_split
10781 [(set (match_operand:SI 0 "s_register_operand" "")
10782 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10783 (const_int 0))
10784 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10785 [(match_operand:SI 3 "s_register_operand" "")
10786 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10787 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10788 "TARGET_ARM"
10789 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10790 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10791 (match_dup 5)))]
10792 ""
10793 )
10794
10795 ;; This split can be used because CC_Z mode implies that the following
10796 ;; branch will be an equality, or an unsigned inequality, so the sign
10797 ;; extension is not needed.
10798
10799 (define_split
10800 [(set (reg:CC_Z CC_REGNUM)
10801 (compare:CC_Z
10802 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10803 (const_int 24))
10804 (match_operand 1 "const_int_operand" "")))
10805 (clobber (match_scratch:SI 2 ""))]
10806 "TARGET_ARM
10807 && ((UINTVAL (operands[1]))
10808 == ((UINTVAL (operands[1])) >> 24) << 24)"
10809 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10810 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10811 "
10812 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10813 "
10814 )
10815 ;; ??? Check the patterns above for Thumb-2 usefulness
10816
10817 (define_expand "prologue"
10818 [(clobber (const_int 0))]
10819 "TARGET_EITHER"
10820 "if (TARGET_32BIT)
10821 arm_expand_prologue ();
10822 else
10823 thumb1_expand_prologue ();
10824 DONE;
10825 "
10826 )
10827
10828 (define_expand "epilogue"
10829 [(clobber (const_int 0))]
10830 "TARGET_EITHER"
10831 "
10832 if (crtl->calls_eh_return)
10833 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10834 if (TARGET_THUMB1)
10835 {
10836 thumb1_expand_epilogue ();
10837 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10838 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10839 }
10840 else if (HAVE_return)
10841 {
10842 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10843 no need for explicit testing again. */
10844 emit_jump_insn (gen_return ());
10845 }
10846 else if (TARGET_32BIT)
10847 {
10848 arm_expand_epilogue (true);
10849 }
10850 DONE;
10851 "
10852 )
10853
10854 ;; Note - although unspec_volatile's USE all hard registers,
10855 ;; USEs are ignored after relaod has completed. Thus we need
10856 ;; to add an unspec of the link register to ensure that flow
10857 ;; does not think that it is unused by the sibcall branch that
10858 ;; will replace the standard function epilogue.
10859 (define_expand "sibcall_epilogue"
10860 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10861 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10862 "TARGET_32BIT"
10863 "
10864 arm_expand_epilogue (false);
10865 DONE;
10866 "
10867 )
10868
10869 (define_expand "eh_epilogue"
10870 [(use (match_operand:SI 0 "register_operand"))
10871 (use (match_operand:SI 1 "register_operand"))
10872 (use (match_operand:SI 2 "register_operand"))]
10873 "TARGET_EITHER"
10874 "
10875 {
10876 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10877 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10878 {
10879 rtx ra = gen_rtx_REG (Pmode, 2);
10880
10881 emit_move_insn (ra, operands[2]);
10882 operands[2] = ra;
10883 }
10884 /* This is a hack -- we may have crystalized the function type too
10885 early. */
10886 cfun->machine->func_type = 0;
10887 }"
10888 )
10889
10890 ;; This split is only used during output to reduce the number of patterns
10891 ;; that need assembler instructions adding to them. We allowed the setting
10892 ;; of the conditions to be implicit during rtl generation so that
10893 ;; the conditional compare patterns would work. However this conflicts to
10894 ;; some extent with the conditional data operations, so we have to split them
10895 ;; up again here.
10896
10897 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10898 ;; conditional execution sufficient?
10899
10900 (define_split
10901 [(set (match_operand:SI 0 "s_register_operand" "")
10902 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10903 [(match_operand 2 "" "") (match_operand 3 "" "")])
10904 (match_dup 0)
10905 (match_operand 4 "" "")))
10906 (clobber (reg:CC CC_REGNUM))]
10907 "TARGET_ARM && reload_completed"
10908 [(set (match_dup 5) (match_dup 6))
10909 (cond_exec (match_dup 7)
10910 (set (match_dup 0) (match_dup 4)))]
10911 "
10912 {
10913 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10914 operands[2], operands[3]);
10915 enum rtx_code rc = GET_CODE (operands[1]);
10916
10917 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10918 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10919 if (mode == CCFPmode || mode == CCFPEmode)
10920 rc = reverse_condition_maybe_unordered (rc);
10921 else
10922 rc = reverse_condition (rc);
10923
10924 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10925 }"
10926 )
10927
10928 (define_split
10929 [(set (match_operand:SI 0 "s_register_operand" "")
10930 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10931 [(match_operand 2 "" "") (match_operand 3 "" "")])
10932 (match_operand 4 "" "")
10933 (match_dup 0)))
10934 (clobber (reg:CC CC_REGNUM))]
10935 "TARGET_ARM && reload_completed"
10936 [(set (match_dup 5) (match_dup 6))
10937 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10938 (set (match_dup 0) (match_dup 4)))]
10939 "
10940 {
10941 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10942 operands[2], operands[3]);
10943
10944 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10945 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10946 }"
10947 )
10948
10949 (define_split
10950 [(set (match_operand:SI 0 "s_register_operand" "")
10951 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10952 [(match_operand 2 "" "") (match_operand 3 "" "")])
10953 (match_operand 4 "" "")
10954 (match_operand 5 "" "")))
10955 (clobber (reg:CC CC_REGNUM))]
10956 "TARGET_ARM && reload_completed"
10957 [(set (match_dup 6) (match_dup 7))
10958 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10959 (set (match_dup 0) (match_dup 4)))
10960 (cond_exec (match_dup 8)
10961 (set (match_dup 0) (match_dup 5)))]
10962 "
10963 {
10964 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10965 operands[2], operands[3]);
10966 enum rtx_code rc = GET_CODE (operands[1]);
10967
10968 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10969 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10970 if (mode == CCFPmode || mode == CCFPEmode)
10971 rc = reverse_condition_maybe_unordered (rc);
10972 else
10973 rc = reverse_condition (rc);
10974
10975 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10976 }"
10977 )
10978
10979 (define_split
10980 [(set (match_operand:SI 0 "s_register_operand" "")
10981 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10982 [(match_operand:SI 2 "s_register_operand" "")
10983 (match_operand:SI 3 "arm_add_operand" "")])
10984 (match_operand:SI 4 "arm_rhs_operand" "")
10985 (not:SI
10986 (match_operand:SI 5 "s_register_operand" ""))))
10987 (clobber (reg:CC CC_REGNUM))]
10988 "TARGET_ARM && reload_completed"
10989 [(set (match_dup 6) (match_dup 7))
10990 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10991 (set (match_dup 0) (match_dup 4)))
10992 (cond_exec (match_dup 8)
10993 (set (match_dup 0) (not:SI (match_dup 5))))]
10994 "
10995 {
10996 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10997 operands[2], operands[3]);
10998 enum rtx_code rc = GET_CODE (operands[1]);
10999
11000 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11001 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11002 if (mode == CCFPmode || mode == CCFPEmode)
11003 rc = reverse_condition_maybe_unordered (rc);
11004 else
11005 rc = reverse_condition (rc);
11006
11007 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11008 }"
11009 )
11010
11011 (define_insn "*cond_move_not"
11012 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11013 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11014 [(match_operand 3 "cc_register" "") (const_int 0)])
11015 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11016 (not:SI
11017 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11018 "TARGET_ARM"
11019 "@
11020 mvn%D4\\t%0, %2
11021 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11022 [(set_attr "conds" "use")
11023 (set_attr "type" "mvn_reg,multiple")
11024 (set_attr "length" "4,8")]
11025 )
11026
11027 ;; The next two patterns occur when an AND operation is followed by a
11028 ;; scc insn sequence
11029
11030 (define_insn "*sign_extract_onebit"
11031 [(set (match_operand:SI 0 "s_register_operand" "=r")
11032 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11033 (const_int 1)
11034 (match_operand:SI 2 "const_int_operand" "n")))
11035 (clobber (reg:CC CC_REGNUM))]
11036 "TARGET_ARM"
11037 "*
11038 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11039 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11040 return \"mvnne\\t%0, #0\";
11041 "
11042 [(set_attr "conds" "clob")
11043 (set_attr "length" "8")
11044 (set_attr "type" "multiple")]
11045 )
11046
11047 (define_insn "*not_signextract_onebit"
11048 [(set (match_operand:SI 0 "s_register_operand" "=r")
11049 (not:SI
11050 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11051 (const_int 1)
11052 (match_operand:SI 2 "const_int_operand" "n"))))
11053 (clobber (reg:CC CC_REGNUM))]
11054 "TARGET_ARM"
11055 "*
11056 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11057 output_asm_insn (\"tst\\t%1, %2\", operands);
11058 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11059 return \"movne\\t%0, #0\";
11060 "
11061 [(set_attr "conds" "clob")
11062 (set_attr "length" "12")
11063 (set_attr "type" "multiple")]
11064 )
11065 ;; ??? The above patterns need auditing for Thumb-2
11066
11067 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11068 ;; expressions. For simplicity, the first register is also in the unspec
11069 ;; part.
11070 ;; To avoid the usage of GNU extension, the length attribute is computed
11071 ;; in a C function arm_attr_length_push_multi.
11072 (define_insn "*push_multi"
11073 [(match_parallel 2 "multi_register_push"
11074 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11075 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11076 UNSPEC_PUSH_MULT))])]
11077 ""
11078 "*
11079 {
11080 int num_saves = XVECLEN (operands[2], 0);
11081
11082 /* For the StrongARM at least it is faster to
11083 use STR to store only a single register.
11084 In Thumb mode always use push, and the assembler will pick
11085 something appropriate. */
11086 if (num_saves == 1 && TARGET_ARM)
11087 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11088 else
11089 {
11090 int i;
11091 char pattern[100];
11092
11093 if (TARGET_32BIT)
11094 strcpy (pattern, \"push%?\\t{%1\");
11095 else
11096 strcpy (pattern, \"push\\t{%1\");
11097
11098 for (i = 1; i < num_saves; i++)
11099 {
11100 strcat (pattern, \", %|\");
11101 strcat (pattern,
11102 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11103 }
11104
11105 strcat (pattern, \"}\");
11106 output_asm_insn (pattern, operands);
11107 }
11108
11109 return \"\";
11110 }"
11111 [(set_attr "type" "store_16")
11112 (set (attr "length")
11113 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11114 )
11115
11116 (define_insn "stack_tie"
11117 [(set (mem:BLK (scratch))
11118 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11119 (match_operand:SI 1 "s_register_operand" "rk")]
11120 UNSPEC_PRLG_STK))]
11121 ""
11122 ""
11123 [(set_attr "length" "0")
11124 (set_attr "type" "block")]
11125 )
11126
11127 ;; Pop (as used in epilogue RTL)
11128 ;;
11129 (define_insn "*load_multiple_with_writeback"
11130 [(match_parallel 0 "load_multiple_operation"
11131 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11132 (plus:SI (match_dup 1)
11133 (match_operand:SI 2 "const_int_I_operand" "I")))
11134 (set (match_operand:SI 3 "s_register_operand" "=rk")
11135 (mem:SI (match_dup 1)))
11136 ])]
11137 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11138 "*
11139 {
11140 arm_output_multireg_pop (operands, /*return_pc=*/false,
11141 /*cond=*/const_true_rtx,
11142 /*reverse=*/false,
11143 /*update=*/true);
11144 return \"\";
11145 }
11146 "
11147 [(set_attr "type" "load_16")
11148 (set_attr "predicable" "yes")
11149 (set (attr "length")
11150 (symbol_ref "arm_attr_length_pop_multi (operands,
11151 /*return_pc=*/false,
11152 /*write_back_p=*/true)"))]
11153 )
11154
11155 ;; Pop with return (as used in epilogue RTL)
11156 ;;
11157 ;; This instruction is generated when the registers are popped at the end of
11158 ;; epilogue. Here, instead of popping the value into LR and then generating
11159 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11160 ;; with (return).
11161 (define_insn "*pop_multiple_with_writeback_and_return"
11162 [(match_parallel 0 "pop_multiple_return"
11163 [(return)
11164 (set (match_operand:SI 1 "s_register_operand" "+rk")
11165 (plus:SI (match_dup 1)
11166 (match_operand:SI 2 "const_int_I_operand" "I")))
11167 (set (match_operand:SI 3 "s_register_operand" "=rk")
11168 (mem:SI (match_dup 1)))
11169 ])]
11170 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11171 "*
11172 {
11173 arm_output_multireg_pop (operands, /*return_pc=*/true,
11174 /*cond=*/const_true_rtx,
11175 /*reverse=*/false,
11176 /*update=*/true);
11177 return \"\";
11178 }
11179 "
11180 [(set_attr "type" "load_16")
11181 (set_attr "predicable" "yes")
11182 (set (attr "length")
11183 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11184 /*write_back_p=*/true)"))]
11185 )
11186
11187 (define_insn "*pop_multiple_with_return"
11188 [(match_parallel 0 "pop_multiple_return"
11189 [(return)
11190 (set (match_operand:SI 2 "s_register_operand" "=rk")
11191 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11192 ])]
11193 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11194 "*
11195 {
11196 arm_output_multireg_pop (operands, /*return_pc=*/true,
11197 /*cond=*/const_true_rtx,
11198 /*reverse=*/false,
11199 /*update=*/false);
11200 return \"\";
11201 }
11202 "
11203 [(set_attr "type" "load_16")
11204 (set_attr "predicable" "yes")
11205 (set (attr "length")
11206 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11207 /*write_back_p=*/false)"))]
11208 )
11209
11210 ;; Load into PC and return
11211 (define_insn "*ldr_with_return"
11212 [(return)
11213 (set (reg:SI PC_REGNUM)
11214 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11215 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11216 "ldr%?\t%|pc, [%0], #4"
11217 [(set_attr "type" "load_4")
11218 (set_attr "predicable" "yes")]
11219 )
11220 ;; Pop for floating point registers (as used in epilogue RTL)
11221 (define_insn "*vfp_pop_multiple_with_writeback"
11222 [(match_parallel 0 "pop_multiple_fp"
11223 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11224 (plus:SI (match_dup 1)
11225 (match_operand:SI 2 "const_int_I_operand" "I")))
11226 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11227 (mem:DF (match_dup 1)))])]
11228 "TARGET_32BIT && TARGET_HARD_FLOAT"
11229 "*
11230 {
11231 int num_regs = XVECLEN (operands[0], 0);
11232 char pattern[100];
11233 rtx op_list[2];
11234 strcpy (pattern, \"vldm\\t\");
11235 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11236 strcat (pattern, \"!, {\");
11237 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11238 strcat (pattern, \"%P0\");
11239 if ((num_regs - 1) > 1)
11240 {
11241 strcat (pattern, \"-%P1\");
11242 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11243 }
11244
11245 strcat (pattern, \"}\");
11246 output_asm_insn (pattern, op_list);
11247 return \"\";
11248 }
11249 "
11250 [(set_attr "type" "load_16")
11251 (set_attr "conds" "unconditional")
11252 (set_attr "predicable" "no")]
11253 )
11254
11255 ;; Special patterns for dealing with the constant pool
11256
11257 (define_insn "align_4"
11258 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11259 "TARGET_EITHER"
11260 "*
11261 assemble_align (32);
11262 return \"\";
11263 "
11264 [(set_attr "type" "no_insn")]
11265 )
11266
11267 (define_insn "align_8"
11268 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11269 "TARGET_EITHER"
11270 "*
11271 assemble_align (64);
11272 return \"\";
11273 "
11274 [(set_attr "type" "no_insn")]
11275 )
11276
11277 (define_insn "consttable_end"
11278 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11279 "TARGET_EITHER"
11280 "*
11281 making_const_table = FALSE;
11282 return \"\";
11283 "
11284 [(set_attr "type" "no_insn")]
11285 )
11286
11287 (define_insn "consttable_1"
11288 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11289 "TARGET_EITHER"
11290 "*
11291 making_const_table = TRUE;
11292 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11293 assemble_zeros (3);
11294 return \"\";
11295 "
11296 [(set_attr "length" "4")
11297 (set_attr "type" "no_insn")]
11298 )
11299
11300 (define_insn "consttable_2"
11301 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11302 "TARGET_EITHER"
11303 "*
11304 {
11305 rtx x = operands[0];
11306 making_const_table = TRUE;
11307 switch (GET_MODE_CLASS (GET_MODE (x)))
11308 {
11309 case MODE_FLOAT:
11310 arm_emit_fp16_const (x);
11311 break;
11312 default:
11313 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11314 assemble_zeros (2);
11315 break;
11316 }
11317 return \"\";
11318 }"
11319 [(set_attr "length" "4")
11320 (set_attr "type" "no_insn")]
11321 )
11322
11323 (define_insn "consttable_4"
11324 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11325 "TARGET_EITHER"
11326 "*
11327 {
11328 rtx x = operands[0];
11329 making_const_table = TRUE;
11330 scalar_float_mode float_mode;
11331 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11332 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11333 else
11334 {
11335 /* XXX: Sometimes gcc does something really dumb and ends up with
11336 a HIGH in a constant pool entry, usually because it's trying to
11337 load into a VFP register. We know this will always be used in
11338 combination with a LO_SUM which ignores the high bits, so just
11339 strip off the HIGH. */
11340 if (GET_CODE (x) == HIGH)
11341 x = XEXP (x, 0);
11342 assemble_integer (x, 4, BITS_PER_WORD, 1);
11343 mark_symbol_refs_as_used (x);
11344 }
11345 return \"\";
11346 }"
11347 [(set_attr "length" "4")
11348 (set_attr "type" "no_insn")]
11349 )
11350
11351 (define_insn "consttable_8"
11352 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11353 "TARGET_EITHER"
11354 "*
11355 {
11356 making_const_table = TRUE;
11357 scalar_float_mode float_mode;
11358 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11359 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11360 float_mode, BITS_PER_WORD);
11361 else
11362 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11363 return \"\";
11364 }"
11365 [(set_attr "length" "8")
11366 (set_attr "type" "no_insn")]
11367 )
11368
11369 (define_insn "consttable_16"
11370 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11371 "TARGET_EITHER"
11372 "*
11373 {
11374 making_const_table = TRUE;
11375 scalar_float_mode float_mode;
11376 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11377 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11378 float_mode, BITS_PER_WORD);
11379 else
11380 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11381 return \"\";
11382 }"
11383 [(set_attr "length" "16")
11384 (set_attr "type" "no_insn")]
11385 )
11386
11387 ;; V5 Instructions,
11388
11389 (define_insn "clzsi2"
11390 [(set (match_operand:SI 0 "s_register_operand" "=r")
11391 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11392 "TARGET_32BIT && arm_arch5t"
11393 "clz%?\\t%0, %1"
11394 [(set_attr "predicable" "yes")
11395 (set_attr "type" "clz")])
11396
11397 (define_insn "rbitsi2"
11398 [(set (match_operand:SI 0 "s_register_operand" "=r")
11399 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11400 "TARGET_32BIT && arm_arch_thumb2"
11401 "rbit%?\\t%0, %1"
11402 [(set_attr "predicable" "yes")
11403 (set_attr "type" "clz")])
11404
11405 ;; Keep this as a CTZ expression until after reload and then split
11406 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
11407 ;; to fold with any other expression.
11408
11409 (define_insn_and_split "ctzsi2"
11410 [(set (match_operand:SI 0 "s_register_operand" "=r")
11411 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11412 "TARGET_32BIT && arm_arch_thumb2"
11413 "#"
11414 "&& reload_completed"
11415 [(const_int 0)]
11416 "
11417 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
11418 emit_insn (gen_clzsi2 (operands[0], operands[0]));
11419 DONE;
11420 ")
11421
11422 ;; V5E instructions.
11423
11424 (define_insn "prefetch"
11425 [(prefetch (match_operand:SI 0 "address_operand" "p")
11426 (match_operand:SI 1 "" "")
11427 (match_operand:SI 2 "" ""))]
11428 "TARGET_32BIT && arm_arch5te"
11429 "pld\\t%a0"
11430 [(set_attr "type" "load_4")]
11431 )
11432
11433 ;; General predication pattern
11434
11435 (define_cond_exec
11436 [(match_operator 0 "arm_comparison_operator"
11437 [(match_operand 1 "cc_register" "")
11438 (const_int 0)])]
11439 "TARGET_32BIT
11440 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
11441 ""
11442 [(set_attr "predicated" "yes")]
11443 )
11444
11445 (define_insn "force_register_use"
11446 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11447 ""
11448 "%@ %0 needed"
11449 [(set_attr "length" "0")
11450 (set_attr "type" "no_insn")]
11451 )
11452
11453
11454 ;; Patterns for exception handling
11455
11456 (define_expand "eh_return"
11457 [(use (match_operand 0 "general_operand"))]
11458 "TARGET_EITHER"
11459 "
11460 {
11461 if (TARGET_32BIT)
11462 emit_insn (gen_arm_eh_return (operands[0]));
11463 else
11464 emit_insn (gen_thumb_eh_return (operands[0]));
11465 DONE;
11466 }"
11467 )
11468
11469 ;; We can't expand this before we know where the link register is stored.
11470 (define_insn_and_split "arm_eh_return"
11471 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11472 VUNSPEC_EH_RETURN)
11473 (clobber (match_scratch:SI 1 "=&r"))]
11474 "TARGET_ARM"
11475 "#"
11476 "&& reload_completed"
11477 [(const_int 0)]
11478 "
11479 {
11480 arm_set_return_address (operands[0], operands[1]);
11481 DONE;
11482 }"
11483 )
11484
11485 \f
11486 ;; TLS support
11487
11488 (define_insn "load_tp_hard"
11489 [(set (match_operand:SI 0 "register_operand" "=r")
11490 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11491 "TARGET_HARD_TP"
11492 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11493 [(set_attr "predicable" "yes")
11494 (set_attr "type" "mrs")]
11495 )
11496
11497 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11498 (define_insn "load_tp_soft_fdpic"
11499 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11500 (clobber (reg:SI FDPIC_REGNUM))
11501 (clobber (reg:SI LR_REGNUM))
11502 (clobber (reg:SI IP_REGNUM))
11503 (clobber (reg:CC CC_REGNUM))]
11504 "TARGET_SOFT_TP && TARGET_FDPIC"
11505 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11506 [(set_attr "conds" "clob")
11507 (set_attr "type" "branch")]
11508 )
11509
11510 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11511 (define_insn "load_tp_soft"
11512 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11513 (clobber (reg:SI LR_REGNUM))
11514 (clobber (reg:SI IP_REGNUM))
11515 (clobber (reg:CC CC_REGNUM))]
11516 "TARGET_SOFT_TP && !TARGET_FDPIC"
11517 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11518 [(set_attr "conds" "clob")
11519 (set_attr "type" "branch")]
11520 )
11521
11522 ;; tls descriptor call
11523 (define_insn "tlscall"
11524 [(set (reg:SI R0_REGNUM)
11525 (unspec:SI [(reg:SI R0_REGNUM)
11526 (match_operand:SI 0 "" "X")
11527 (match_operand 1 "" "")] UNSPEC_TLS))
11528 (clobber (reg:SI R1_REGNUM))
11529 (clobber (reg:SI LR_REGNUM))
11530 (clobber (reg:SI CC_REGNUM))]
11531 "TARGET_GNU2_TLS"
11532 {
11533 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11534 INTVAL (operands[1]));
11535 return "bl\\t%c0(tlscall)";
11536 }
11537 [(set_attr "conds" "clob")
11538 (set_attr "length" "4")
11539 (set_attr "type" "branch")]
11540 )
11541
11542 ;; For thread pointer builtin
11543 (define_expand "get_thread_pointersi"
11544 [(match_operand:SI 0 "s_register_operand")]
11545 ""
11546 "
11547 {
11548 arm_load_tp (operands[0]);
11549 DONE;
11550 }")
11551
11552 ;;
11553
11554 ;; We only care about the lower 16 bits of the constant
11555 ;; being inserted into the upper 16 bits of the register.
11556 (define_insn "*arm_movtas_ze"
11557 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
11558 (const_int 16)
11559 (const_int 16))
11560 (match_operand:SI 1 "const_int_operand" ""))]
11561 "TARGET_HAVE_MOVT"
11562 "@
11563 movt%?\t%0, %L1
11564 movt\t%0, %L1"
11565 [(set_attr "arch" "32,v8mb")
11566 (set_attr "predicable" "yes")
11567 (set_attr "length" "4")
11568 (set_attr "type" "alu_sreg")]
11569 )
11570
11571 (define_insn "*arm_rev"
11572 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11573 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11574 "arm_arch6"
11575 "@
11576 rev\t%0, %1
11577 rev%?\t%0, %1
11578 rev%?\t%0, %1"
11579 [(set_attr "arch" "t1,t2,32")
11580 (set_attr "length" "2,2,4")
11581 (set_attr "predicable" "no,yes,yes")
11582 (set_attr "type" "rev")]
11583 )
11584
11585 (define_expand "arm_legacy_rev"
11586 [(set (match_operand:SI 2 "s_register_operand")
11587 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
11588 (const_int 16))
11589 (match_dup 1)))
11590 (set (match_dup 2)
11591 (lshiftrt:SI (match_dup 2)
11592 (const_int 8)))
11593 (set (match_operand:SI 3 "s_register_operand")
11594 (rotatert:SI (match_dup 1)
11595 (const_int 8)))
11596 (set (match_dup 2)
11597 (and:SI (match_dup 2)
11598 (const_int -65281)))
11599 (set (match_operand:SI 0 "s_register_operand")
11600 (xor:SI (match_dup 3)
11601 (match_dup 2)))]
11602 "TARGET_32BIT"
11603 ""
11604 )
11605
11606 ;; Reuse temporaries to keep register pressure down.
11607 (define_expand "thumb_legacy_rev"
11608 [(set (match_operand:SI 2 "s_register_operand")
11609 (ashift:SI (match_operand:SI 1 "s_register_operand")
11610 (const_int 24)))
11611 (set (match_operand:SI 3 "s_register_operand")
11612 (lshiftrt:SI (match_dup 1)
11613 (const_int 24)))
11614 (set (match_dup 3)
11615 (ior:SI (match_dup 3)
11616 (match_dup 2)))
11617 (set (match_operand:SI 4 "s_register_operand")
11618 (const_int 16))
11619 (set (match_operand:SI 5 "s_register_operand")
11620 (rotatert:SI (match_dup 1)
11621 (match_dup 4)))
11622 (set (match_dup 2)
11623 (ashift:SI (match_dup 5)
11624 (const_int 24)))
11625 (set (match_dup 5)
11626 (lshiftrt:SI (match_dup 5)
11627 (const_int 24)))
11628 (set (match_dup 5)
11629 (ior:SI (match_dup 5)
11630 (match_dup 2)))
11631 (set (match_dup 5)
11632 (rotatert:SI (match_dup 5)
11633 (match_dup 4)))
11634 (set (match_operand:SI 0 "s_register_operand")
11635 (ior:SI (match_dup 5)
11636 (match_dup 3)))]
11637 "TARGET_THUMB"
11638 ""
11639 )
11640
11641 ;; ARM-specific expansion of signed mod by power of 2
11642 ;; using conditional negate.
11643 ;; For r0 % n where n is a power of 2 produce:
11644 ;; rsbs r1, r0, #0
11645 ;; and r0, r0, #(n - 1)
11646 ;; and r1, r1, #(n - 1)
11647 ;; rsbpl r0, r1, #0
11648
11649 (define_expand "modsi3"
11650 [(match_operand:SI 0 "register_operand")
11651 (match_operand:SI 1 "register_operand")
11652 (match_operand:SI 2 "const_int_operand")]
11653 "TARGET_32BIT"
11654 {
11655 HOST_WIDE_INT val = INTVAL (operands[2]);
11656
11657 if (val <= 0
11658 || exact_log2 (val) <= 0)
11659 FAIL;
11660
11661 rtx mask = GEN_INT (val - 1);
11662
11663 /* In the special case of x0 % 2 we can do the even shorter:
11664 cmp r0, #0
11665 and r0, r0, #1
11666 rsblt r0, r0, #0. */
11667
11668 if (val == 2)
11669 {
11670 rtx cc_reg = arm_gen_compare_reg (LT,
11671 operands[1], const0_rtx, NULL_RTX);
11672 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
11673 rtx masked = gen_reg_rtx (SImode);
11674
11675 emit_insn (gen_andsi3 (masked, operands[1], mask));
11676 emit_move_insn (operands[0],
11677 gen_rtx_IF_THEN_ELSE (SImode, cond,
11678 gen_rtx_NEG (SImode,
11679 masked),
11680 masked));
11681 DONE;
11682 }
11683
11684 rtx neg_op = gen_reg_rtx (SImode);
11685 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11686 operands[1]));
11687
11688 /* Extract the condition register and mode. */
11689 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11690 rtx cc_reg = SET_DEST (cmp);
11691 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11692
11693 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11694
11695 rtx masked_neg = gen_reg_rtx (SImode);
11696 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11697
11698 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11699 during expand does not always work. Do an IF_THEN_ELSE instead. */
11700 emit_move_insn (operands[0],
11701 gen_rtx_IF_THEN_ELSE (SImode, cond,
11702 gen_rtx_NEG (SImode, masked_neg),
11703 operands[0]));
11704
11705
11706 DONE;
11707 }
11708 )
11709
11710 (define_expand "bswapsi2"
11711 [(set (match_operand:SI 0 "s_register_operand")
11712 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11713 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11714 "
11715 if (!arm_arch6)
11716 {
11717 rtx op2 = gen_reg_rtx (SImode);
11718 rtx op3 = gen_reg_rtx (SImode);
11719
11720 if (TARGET_THUMB)
11721 {
11722 rtx op4 = gen_reg_rtx (SImode);
11723 rtx op5 = gen_reg_rtx (SImode);
11724
11725 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11726 op2, op3, op4, op5));
11727 }
11728 else
11729 {
11730 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11731 op2, op3));
11732 }
11733
11734 DONE;
11735 }
11736 "
11737 )
11738
11739 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11740 ;; and unsigned variants, respectively. For rev16, expose
11741 ;; byte-swapping in the lower 16 bits only.
11742 (define_insn "*arm_revsh"
11743 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11744 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11745 "arm_arch6"
11746 "@
11747 revsh\t%0, %1
11748 revsh%?\t%0, %1
11749 revsh%?\t%0, %1"
11750 [(set_attr "arch" "t1,t2,32")
11751 (set_attr "length" "2,2,4")
11752 (set_attr "type" "rev")]
11753 )
11754
11755 (define_insn "*arm_rev16"
11756 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11757 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11758 "arm_arch6"
11759 "@
11760 rev16\t%0, %1
11761 rev16%?\t%0, %1
11762 rev16%?\t%0, %1"
11763 [(set_attr "arch" "t1,t2,32")
11764 (set_attr "length" "2,2,4")
11765 (set_attr "type" "rev")]
11766 )
11767
11768 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11769 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11770 ;; each valid permutation.
11771
11772 (define_insn "arm_rev16si2"
11773 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11774 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11775 (const_int 8))
11776 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11777 (and:SI (lshiftrt:SI (match_dup 1)
11778 (const_int 8))
11779 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11780 "arm_arch6
11781 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11782 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11783 "rev16\\t%0, %1"
11784 [(set_attr "arch" "t1,t2,32")
11785 (set_attr "length" "2,2,4")
11786 (set_attr "type" "rev")]
11787 )
11788
11789 (define_insn "arm_rev16si2_alt"
11790 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11791 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11792 (const_int 8))
11793 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11794 (and:SI (ashift:SI (match_dup 1)
11795 (const_int 8))
11796 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11797 "arm_arch6
11798 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11799 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11800 "rev16\\t%0, %1"
11801 [(set_attr "arch" "t1,t2,32")
11802 (set_attr "length" "2,2,4")
11803 (set_attr "type" "rev")]
11804 )
11805
11806 (define_expand "bswaphi2"
11807 [(set (match_operand:HI 0 "s_register_operand")
11808 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11809 "arm_arch6"
11810 ""
11811 )
11812
11813 ;; Patterns for LDRD/STRD in Thumb2 mode
11814
11815 (define_insn "*thumb2_ldrd"
11816 [(set (match_operand:SI 0 "s_register_operand" "=r")
11817 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11818 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11819 (set (match_operand:SI 3 "s_register_operand" "=r")
11820 (mem:SI (plus:SI (match_dup 1)
11821 (match_operand:SI 4 "const_int_operand" ""))))]
11822 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11823 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11824 && (operands_ok_ldrd_strd (operands[0], operands[3],
11825 operands[1], INTVAL (operands[2]),
11826 false, true))"
11827 "ldrd%?\t%0, %3, [%1, %2]"
11828 [(set_attr "type" "load_8")
11829 (set_attr "predicable" "yes")])
11830
11831 (define_insn "*thumb2_ldrd_base"
11832 [(set (match_operand:SI 0 "s_register_operand" "=r")
11833 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11834 (set (match_operand:SI 2 "s_register_operand" "=r")
11835 (mem:SI (plus:SI (match_dup 1)
11836 (const_int 4))))]
11837 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11838 && (operands_ok_ldrd_strd (operands[0], operands[2],
11839 operands[1], 0, false, true))"
11840 "ldrd%?\t%0, %2, [%1]"
11841 [(set_attr "type" "load_8")
11842 (set_attr "predicable" "yes")])
11843
11844 (define_insn "*thumb2_ldrd_base_neg"
11845 [(set (match_operand:SI 0 "s_register_operand" "=r")
11846 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11847 (const_int -4))))
11848 (set (match_operand:SI 2 "s_register_operand" "=r")
11849 (mem:SI (match_dup 1)))]
11850 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11851 && (operands_ok_ldrd_strd (operands[0], operands[2],
11852 operands[1], -4, false, true))"
11853 "ldrd%?\t%0, %2, [%1, #-4]"
11854 [(set_attr "type" "load_8")
11855 (set_attr "predicable" "yes")])
11856
11857 (define_insn "*thumb2_strd"
11858 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11859 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11860 (match_operand:SI 2 "s_register_operand" "r"))
11861 (set (mem:SI (plus:SI (match_dup 0)
11862 (match_operand:SI 3 "const_int_operand" "")))
11863 (match_operand:SI 4 "s_register_operand" "r"))]
11864 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11865 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11866 && (operands_ok_ldrd_strd (operands[2], operands[4],
11867 operands[0], INTVAL (operands[1]),
11868 false, false))"
11869 "strd%?\t%2, %4, [%0, %1]"
11870 [(set_attr "type" "store_8")
11871 (set_attr "predicable" "yes")])
11872
11873 (define_insn "*thumb2_strd_base"
11874 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11875 (match_operand:SI 1 "s_register_operand" "r"))
11876 (set (mem:SI (plus:SI (match_dup 0)
11877 (const_int 4)))
11878 (match_operand:SI 2 "s_register_operand" "r"))]
11879 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11880 && (operands_ok_ldrd_strd (operands[1], operands[2],
11881 operands[0], 0, false, false))"
11882 "strd%?\t%1, %2, [%0]"
11883 [(set_attr "type" "store_8")
11884 (set_attr "predicable" "yes")])
11885
11886 (define_insn "*thumb2_strd_base_neg"
11887 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11888 (const_int -4)))
11889 (match_operand:SI 1 "s_register_operand" "r"))
11890 (set (mem:SI (match_dup 0))
11891 (match_operand:SI 2 "s_register_operand" "r"))]
11892 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11893 && (operands_ok_ldrd_strd (operands[1], operands[2],
11894 operands[0], -4, false, false))"
11895 "strd%?\t%1, %2, [%0, #-4]"
11896 [(set_attr "type" "store_8")
11897 (set_attr "predicable" "yes")])
11898
11899 ;; ARMv8 CRC32 instructions.
11900 (define_insn "arm_<crc_variant>"
11901 [(set (match_operand:SI 0 "s_register_operand" "=r")
11902 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11903 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11904 CRC))]
11905 "TARGET_CRC32"
11906 "<crc_variant>\\t%0, %1, %2"
11907 [(set_attr "type" "crc")
11908 (set_attr "conds" "unconditional")]
11909 )
11910
11911 ;; Load the load/store double peephole optimizations.
11912 (include "ldrdstrd.md")
11913
11914 ;; Load the load/store multiple patterns
11915 (include "ldmstm.md")
11916
11917 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11918 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11919 ;; The operands are validated through the load_multiple_operation
11920 ;; match_parallel predicate rather than through constraints so enable it only
11921 ;; after reload.
11922 (define_insn "*load_multiple"
11923 [(match_parallel 0 "load_multiple_operation"
11924 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11925 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11926 ])]
11927 "TARGET_32BIT && reload_completed"
11928 "*
11929 {
11930 arm_output_multireg_pop (operands, /*return_pc=*/false,
11931 /*cond=*/const_true_rtx,
11932 /*reverse=*/false,
11933 /*update=*/false);
11934 return \"\";
11935 }
11936 "
11937 [(set_attr "predicable" "yes")]
11938 )
11939
11940 (define_expand "copysignsf3"
11941 [(match_operand:SF 0 "register_operand")
11942 (match_operand:SF 1 "register_operand")
11943 (match_operand:SF 2 "register_operand")]
11944 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11945 "{
11946 emit_move_insn (operands[0], operands[2]);
11947 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11948 GEN_INT (31), GEN_INT (0),
11949 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11950 DONE;
11951 }"
11952 )
11953
11954 (define_expand "copysigndf3"
11955 [(match_operand:DF 0 "register_operand")
11956 (match_operand:DF 1 "register_operand")
11957 (match_operand:DF 2 "register_operand")]
11958 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11959 "{
11960 rtx op0_low = gen_lowpart (SImode, operands[0]);
11961 rtx op0_high = gen_highpart (SImode, operands[0]);
11962 rtx op1_low = gen_lowpart (SImode, operands[1]);
11963 rtx op1_high = gen_highpart (SImode, operands[1]);
11964 rtx op2_high = gen_highpart (SImode, operands[2]);
11965
11966 rtx scratch1 = gen_reg_rtx (SImode);
11967 rtx scratch2 = gen_reg_rtx (SImode);
11968 emit_move_insn (scratch1, op2_high);
11969 emit_move_insn (scratch2, op1_high);
11970
11971 emit_insn(gen_rtx_SET(scratch1,
11972 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11973 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11974 emit_move_insn (op0_low, op1_low);
11975 emit_move_insn (op0_high, scratch2);
11976
11977 DONE;
11978 }"
11979 )
11980
11981 ;; movmisalign patterns for HImode and SImode.
11982 (define_expand "movmisalign<mode>"
11983 [(match_operand:HSI 0 "general_operand")
11984 (match_operand:HSI 1 "general_operand")]
11985 "unaligned_access"
11986 {
11987 /* This pattern is not permitted to fail during expansion: if both arguments
11988 are non-registers (e.g. memory := constant), force operand 1 into a
11989 register. */
11990 rtx (* gen_unaligned_load)(rtx, rtx);
11991 rtx tmp_dest = operands[0];
11992 if (!s_register_operand (operands[0], <MODE>mode)
11993 && !s_register_operand (operands[1], <MODE>mode))
11994 operands[1] = force_reg (<MODE>mode, operands[1]);
11995
11996 if (<MODE>mode == HImode)
11997 {
11998 gen_unaligned_load = gen_unaligned_loadhiu;
11999 tmp_dest = gen_reg_rtx (SImode);
12000 }
12001 else
12002 gen_unaligned_load = gen_unaligned_loadsi;
12003
12004 if (MEM_P (operands[1]))
12005 {
12006 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12007 if (<MODE>mode == HImode)
12008 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12009 }
12010 else
12011 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12012
12013 DONE;
12014 })
12015
12016 (define_insn "arm_<cdp>"
12017 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12018 (match_operand:SI 1 "immediate_operand" "n")
12019 (match_operand:SI 2 "immediate_operand" "n")
12020 (match_operand:SI 3 "immediate_operand" "n")
12021 (match_operand:SI 4 "immediate_operand" "n")
12022 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12023 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12024 {
12025 arm_const_bounds (operands[0], 0, 16);
12026 arm_const_bounds (operands[1], 0, 16);
12027 arm_const_bounds (operands[2], 0, (1 << 5));
12028 arm_const_bounds (operands[3], 0, (1 << 5));
12029 arm_const_bounds (operands[4], 0, (1 << 5));
12030 arm_const_bounds (operands[5], 0, 8);
12031 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12032 }
12033 [(set_attr "length" "4")
12034 (set_attr "type" "coproc")])
12035
12036 (define_insn "*ldc"
12037 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12038 (match_operand:SI 1 "immediate_operand" "n")
12039 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12040 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12041 {
12042 arm_const_bounds (operands[0], 0, 16);
12043 arm_const_bounds (operands[1], 0, (1 << 5));
12044 return "<ldc>\\tp%c0, CR%c1, %2";
12045 }
12046 [(set_attr "length" "4")
12047 (set_attr "type" "coproc")])
12048
12049 (define_insn "*stc"
12050 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12051 (match_operand:SI 1 "immediate_operand" "n")
12052 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12053 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12054 {
12055 arm_const_bounds (operands[0], 0, 16);
12056 arm_const_bounds (operands[1], 0, (1 << 5));
12057 return "<stc>\\tp%c0, CR%c1, %2";
12058 }
12059 [(set_attr "length" "4")
12060 (set_attr "type" "coproc")])
12061
12062 (define_expand "arm_<ldc>"
12063 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12064 (match_operand:SI 1 "immediate_operand")
12065 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12066 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12067
12068 (define_expand "arm_<stc>"
12069 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12070 (match_operand:SI 1 "immediate_operand")
12071 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12072 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12073
12074 (define_insn "arm_<mcr>"
12075 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12076 (match_operand:SI 1 "immediate_operand" "n")
12077 (match_operand:SI 2 "s_register_operand" "r")
12078 (match_operand:SI 3 "immediate_operand" "n")
12079 (match_operand:SI 4 "immediate_operand" "n")
12080 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12081 (use (match_dup 2))]
12082 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12083 {
12084 arm_const_bounds (operands[0], 0, 16);
12085 arm_const_bounds (operands[1], 0, 8);
12086 arm_const_bounds (operands[3], 0, (1 << 5));
12087 arm_const_bounds (operands[4], 0, (1 << 5));
12088 arm_const_bounds (operands[5], 0, 8);
12089 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12090 }
12091 [(set_attr "length" "4")
12092 (set_attr "type" "coproc")])
12093
12094 (define_insn "arm_<mrc>"
12095 [(set (match_operand:SI 0 "s_register_operand" "=r")
12096 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12097 (match_operand:SI 2 "immediate_operand" "n")
12098 (match_operand:SI 3 "immediate_operand" "n")
12099 (match_operand:SI 4 "immediate_operand" "n")
12100 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12101 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12102 {
12103 arm_const_bounds (operands[1], 0, 16);
12104 arm_const_bounds (operands[2], 0, 8);
12105 arm_const_bounds (operands[3], 0, (1 << 5));
12106 arm_const_bounds (operands[4], 0, (1 << 5));
12107 arm_const_bounds (operands[5], 0, 8);
12108 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12109 }
12110 [(set_attr "length" "4")
12111 (set_attr "type" "coproc")])
12112
12113 (define_insn "arm_<mcrr>"
12114 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12115 (match_operand:SI 1 "immediate_operand" "n")
12116 (match_operand:DI 2 "s_register_operand" "r")
12117 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12118 (use (match_dup 2))]
12119 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12120 {
12121 arm_const_bounds (operands[0], 0, 16);
12122 arm_const_bounds (operands[1], 0, 8);
12123 arm_const_bounds (operands[3], 0, (1 << 5));
12124 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12125 }
12126 [(set_attr "length" "4")
12127 (set_attr "type" "coproc")])
12128
12129 (define_insn "arm_<mrrc>"
12130 [(set (match_operand:DI 0 "s_register_operand" "=r")
12131 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12132 (match_operand:SI 2 "immediate_operand" "n")
12133 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12134 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12135 {
12136 arm_const_bounds (operands[1], 0, 16);
12137 arm_const_bounds (operands[2], 0, 8);
12138 arm_const_bounds (operands[3], 0, (1 << 5));
12139 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12140 }
12141 [(set_attr "length" "4")
12142 (set_attr "type" "coproc")])
12143
12144 (define_expand "speculation_barrier"
12145 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12146 "TARGET_EITHER"
12147 "
12148 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12149 have a usable barrier (and probably don't need one in practice).
12150 But to be safe if such code is run on later architectures, call a
12151 helper function in libgcc that will do the thing for the active
12152 system. */
12153 if (!(arm_arch7 || arm_arch8))
12154 {
12155 arm_emit_speculation_barrier_function ();
12156 DONE;
12157 }
12158 "
12159 )
12160
12161 ;; Generate a hard speculation barrier when we have not enabled speculation
12162 ;; tracking.
12163 (define_insn "*speculation_barrier_insn"
12164 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12165 "arm_arch7 || arm_arch8"
12166 "isb\;dsb\\tsy"
12167 [(set_attr "type" "block")
12168 (set_attr "length" "8")]
12169 )
12170
12171 ;; Vector bits common to IWMMXT and Neon
12172 (include "vec-common.md")
12173 ;; Load the Intel Wireless Multimedia Extension patterns
12174 (include "iwmmxt.md")
12175 ;; Load the VFP co-processor patterns
12176 (include "vfp.md")
12177 ;; Thumb-1 patterns
12178 (include "thumb1.md")
12179 ;; Thumb-2 patterns
12180 (include "thumb2.md")
12181 ;; Neon patterns
12182 (include "neon.md")
12183 ;; Crypto patterns
12184 (include "crypto.md")
12185 ;; Synchronization Primitives
12186 (include "sync.md")
12187 ;; Fixed-point patterns
12188 (include "arm-fixed.md")