]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
[arm] clean up alu+shift patterns
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51 ;; conditional compare combination
52 (define_constants
53 [(CMP_CMP 0)
54 (CMN_CMP 1)
55 (CMP_CMN 2)
56 (CMN_CMN 3)
57 (NUM_OF_COND_CMP 4)
58 ]
59 )
60
61 \f
62 ;;---------------------------------------------------------------------------
63 ;; Attributes
64
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
67
68 ;; Instruction classification types
69 (include "types.md")
70
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
77
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
85
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
101
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
104 ;; registers.
105 (define_attr "fp" "no,yes" (const_string "no"))
106
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
112
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
117
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
120 (const_int 4))
121
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
131
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
134 (const_string "yes")
135
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
138 (const_string "yes")
139
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
142 (const_string "yes")
143
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
182 (const_string "yes")
183 ]
184
185 (const_string "no")))
186
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
189
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
192 (const_string "yes")
193
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
196 (const_string "yes")
197
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
202
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
208
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
225 (const_string "no")
226
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
229 (const_string "no")
230
231 (eq_attr "arch_enabled" "no")
232 (const_string "no")]
233 (const_string "yes")))
234
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
254
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
262
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
270 ;
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
273 ; inlined branches
274 ;
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
277 ;
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
280 ;
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
283 ;
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
286
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (if_then_else
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
295
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
301
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
312 "block,call,load_4")
313 (const_string "yes")
314 (const_string "no")))
315
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
338
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
342
343
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
347
348 ;;---------------------------------------------------------------------------
349 ;; Unspecs
350
351 (include "unspecs.md")
352
353 ;;---------------------------------------------------------------------------
354 ;; Mode iterators
355
356 (include "iterators.md")
357
358 ;;---------------------------------------------------------------------------
359 ;; Predicates
360
361 (include "predicates.md")
362 (include "constraints.md")
363
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
366
367 (define_attr "tune_cortexr4" "yes,no"
368 (const (if_then_else
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "yes")
371 (const_string "no"))))
372
373 ;; True if the generic scheduling description should be used.
374
375 (define_attr "generic_sched" "yes,no"
376 (const (if_then_else
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "no")
385 (const_string "yes"))))
386
387 (define_attr "generic_vfp" "yes,no"
388 (const (if_then_else
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
394 (const_string "yes")
395 (const_string "no"))))
396
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
403 (include "fa526.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
422 (include "vfp11.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
425
426 \f
427 ;;---------------------------------------------------------------------------
428 ;; Insn patterns
429 ;;
430 ;; Addition insns.
431
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
435
436 (define_expand "adddi3"
437 [(parallel
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
442 "TARGET_EITHER"
443 "
444 if (TARGET_THUMB1)
445 {
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
448 }
449 else
450 {
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
454 &lo_op2, &hi_op2);
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
457
458 if (lo_op2 == const0_rtx)
459 {
460 lo_dest = lo_op1;
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
465 }
466 else
467 {
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
472
473 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
475 const0_rtx);
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
478 else
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
480 }
481
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
486 DONE;
487 }
488 "
489 )
490
491 (define_expand "addvsi4"
492 [(match_operand:SI 0 "s_register_operand")
493 (match_operand:SI 1 "s_register_operand")
494 (match_operand:SI 2 "arm_add_operand")
495 (match_operand 3 "")]
496 "TARGET_32BIT"
497 {
498 if (CONST_INT_P (operands[2]))
499 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
500 else
501 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
502 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
503
504 DONE;
505 })
506
507 (define_expand "addvdi4"
508 [(match_operand:DI 0 "s_register_operand")
509 (match_operand:DI 1 "s_register_operand")
510 (match_operand:DI 2 "reg_or_int_operand")
511 (match_operand 3 "")]
512 "TARGET_32BIT"
513 {
514 rtx lo_result, hi_result;
515 rtx lo_op1, hi_op1, lo_op2, hi_op2;
516 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
517 &lo_op2, &hi_op2);
518 lo_result = gen_lowpart (SImode, operands[0]);
519 hi_result = gen_highpart (SImode, operands[0]);
520
521 if (lo_op2 == const0_rtx)
522 {
523 emit_move_insn (lo_result, lo_op1);
524 if (!arm_add_operand (hi_op2, SImode))
525 hi_op2 = force_reg (SImode, hi_op2);
526
527 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
528 }
529 else
530 {
531 if (!arm_add_operand (lo_op2, SImode))
532 lo_op2 = force_reg (SImode, lo_op2);
533 if (!arm_not_operand (hi_op2, SImode))
534 hi_op2 = force_reg (SImode, hi_op2);
535
536 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
537
538 if (hi_op2 == const0_rtx)
539 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
540 else if (CONST_INT_P (hi_op2))
541 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
542 else
543 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
544
545 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
546 }
547
548 DONE;
549 })
550
551 (define_expand "addsi3_cin_vout_reg"
552 [(parallel
553 [(set (match_dup 3)
554 (compare:CC_V
555 (plus:DI
556 (plus:DI (match_dup 4)
557 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
558 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
559 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
560 (match_dup 2)))))
561 (set (match_operand:SI 0 "s_register_operand")
562 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
563 (match_dup 2)))])]
564 "TARGET_32BIT"
565 {
566 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
567 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
568 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
569 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
570 }
571 )
572
573 (define_insn "*addsi3_cin_vout_reg_insn"
574 [(set (reg:CC_V CC_REGNUM)
575 (compare:CC_V
576 (plus:DI
577 (plus:DI
578 (match_operand:DI 3 "arm_carry_operation" "")
579 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
581 (sign_extend:DI
582 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
583 (match_dup 1))
584 (match_dup 2)))))
585 (set (match_operand:SI 0 "s_register_operand" "=l,r")
586 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
587 (match_dup 2)))]
588 "TARGET_32BIT"
589 "@
590 adcs%?\\t%0, %0, %2
591 adcs%?\\t%0, %1, %2"
592 [(set_attr "type" "alus_sreg")
593 (set_attr "arch" "t2,*")
594 (set_attr "length" "2,4")]
595 )
596
597 (define_expand "addsi3_cin_vout_imm"
598 [(parallel
599 [(set (match_dup 3)
600 (compare:CC_V
601 (plus:DI
602 (plus:DI (match_dup 4)
603 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
604 (match_dup 2))
605 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
606 (match_dup 2)))))
607 (set (match_operand:SI 0 "s_register_operand")
608 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
609 (match_operand 2 "arm_adcimm_operand")))])]
610 "TARGET_32BIT"
611 {
612 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
613 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
614 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
615 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
616 }
617 )
618
619 (define_insn "*addsi3_cin_vout_imm_insn"
620 [(set (reg:CC_V CC_REGNUM)
621 (compare:CC_V
622 (plus:DI
623 (plus:DI
624 (match_operand:DI 3 "arm_carry_operation" "")
625 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
626 (match_operand 2 "arm_adcimm_operand" "I,K"))
627 (sign_extend:DI
628 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
629 (match_dup 1))
630 (match_dup 2)))))
631 (set (match_operand:SI 0 "s_register_operand" "=r,r")
632 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
633 (match_dup 2)))]
634 "TARGET_32BIT"
635 "@
636 adcs%?\\t%0, %1, %2
637 sbcs%?\\t%0, %1, #%B2"
638 [(set_attr "type" "alus_imm")]
639 )
640
641 (define_expand "addsi3_cin_vout_0"
642 [(parallel
643 [(set (match_dup 2)
644 (compare:CC_V
645 (plus:DI (match_dup 3)
646 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
647 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
648 (set (match_operand:SI 0 "s_register_operand")
649 (plus:SI (match_dup 4) (match_dup 1)))])]
650 "TARGET_32BIT"
651 {
652 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
656 }
657 )
658
659 (define_insn "*addsi3_cin_vout_0_insn"
660 [(set (reg:CC_V CC_REGNUM)
661 (compare:CC_V
662 (plus:DI
663 (match_operand:DI 2 "arm_carry_operation" "")
664 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
665 (sign_extend:DI (plus:SI
666 (match_operand:SI 3 "arm_carry_operation" "")
667 (match_dup 1)))))
668 (set (match_operand:SI 0 "s_register_operand" "=r")
669 (plus:SI (match_dup 3) (match_dup 1)))]
670 "TARGET_32BIT"
671 "adcs%?\\t%0, %1, #0"
672 [(set_attr "type" "alus_imm")]
673 )
674
675 (define_expand "uaddvsi4"
676 [(match_operand:SI 0 "s_register_operand")
677 (match_operand:SI 1 "s_register_operand")
678 (match_operand:SI 2 "arm_add_operand")
679 (match_operand 3 "")]
680 "TARGET_32BIT"
681 {
682 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
683 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
684
685 DONE;
686 })
687
688 (define_expand "uaddvdi4"
689 [(match_operand:DI 0 "s_register_operand")
690 (match_operand:DI 1 "s_register_operand")
691 (match_operand:DI 2 "reg_or_int_operand")
692 (match_operand 3 "")]
693 "TARGET_32BIT"
694 {
695 rtx lo_result, hi_result;
696 rtx lo_op1, hi_op1, lo_op2, hi_op2;
697 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
698 &lo_op2, &hi_op2);
699 lo_result = gen_lowpart (SImode, operands[0]);
700 hi_result = gen_highpart (SImode, operands[0]);
701
702 if (lo_op2 == const0_rtx)
703 {
704 emit_move_insn (lo_result, lo_op1);
705 if (!arm_add_operand (hi_op2, SImode))
706 hi_op2 = force_reg (SImode, hi_op2);
707
708 gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]);
709 }
710 else
711 {
712 if (!arm_add_operand (lo_op2, SImode))
713 lo_op2 = force_reg (SImode, lo_op2);
714 if (!arm_not_operand (hi_op2, SImode))
715 hi_op2 = force_reg (SImode, hi_op2);
716
717 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
718
719 if (hi_op2 == const0_rtx)
720 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
721 else if (CONST_INT_P (hi_op2))
722 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
723 else
724 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
725
726 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
727 }
728
729 DONE;
730 })
731
732 (define_expand "addsi3_cin_cout_reg"
733 [(parallel
734 [(set (match_dup 3)
735 (compare:CC_ADC
736 (plus:DI
737 (plus:DI (match_dup 4)
738 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
739 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
740 (const_int 4294967296)))
741 (set (match_operand:SI 0 "s_register_operand")
742 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
743 (match_dup 2)))])]
744 "TARGET_32BIT"
745 {
746 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
747 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
748 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
749 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
750 }
751 )
752
753 (define_insn "*addsi3_cin_cout_reg_insn"
754 [(set (reg:CC_ADC CC_REGNUM)
755 (compare:CC_ADC
756 (plus:DI
757 (plus:DI
758 (match_operand:DI 3 "arm_carry_operation" "")
759 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
760 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
761 (const_int 4294967296)))
762 (set (match_operand:SI 0 "s_register_operand" "=l,r")
763 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
764 (match_dup 1))
765 (match_dup 2)))]
766 "TARGET_32BIT"
767 "@
768 adcs%?\\t%0, %0, %2
769 adcs%?\\t%0, %1, %2"
770 [(set_attr "type" "alus_sreg")
771 (set_attr "arch" "t2,*")
772 (set_attr "length" "2,4")]
773 )
774
775 (define_expand "addsi3_cin_cout_imm"
776 [(parallel
777 [(set (match_dup 3)
778 (compare:CC_ADC
779 (plus:DI
780 (plus:DI (match_dup 4)
781 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
782 (match_dup 6))
783 (const_int 4294967296)))
784 (set (match_operand:SI 0 "s_register_operand")
785 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
786 (match_operand:SI 2 "arm_adcimm_operand")))])]
787 "TARGET_32BIT"
788 {
789 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
790 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
791 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
792 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
793 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
794 }
795 )
796
797 (define_insn "*addsi3_cin_cout_imm_insn"
798 [(set (reg:CC_ADC CC_REGNUM)
799 (compare:CC_ADC
800 (plus:DI
801 (plus:DI
802 (match_operand:DI 3 "arm_carry_operation" "")
803 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
804 (match_operand:DI 5 "const_int_operand" "n,n"))
805 (const_int 4294967296)))
806 (set (match_operand:SI 0 "s_register_operand" "=r,r")
807 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
808 (match_dup 1))
809 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
810 "TARGET_32BIT
811 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
812 "@
813 adcs%?\\t%0, %1, %2
814 sbcs%?\\t%0, %1, #%B2"
815 [(set_attr "type" "alus_imm")]
816 )
817
818 (define_expand "addsi3_cin_cout_0"
819 [(parallel
820 [(set (match_dup 2)
821 (compare:CC_ADC
822 (plus:DI (match_dup 3)
823 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
824 (const_int 4294967296)))
825 (set (match_operand:SI 0 "s_register_operand")
826 (plus:SI (match_dup 4) (match_dup 1)))])]
827 "TARGET_32BIT"
828 {
829 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
833 }
834 )
835
836 (define_insn "*addsi3_cin_cout_0_insn"
837 [(set (reg:CC_ADC CC_REGNUM)
838 (compare:CC_ADC
839 (plus:DI
840 (match_operand:DI 2 "arm_carry_operation" "")
841 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
842 (const_int 4294967296)))
843 (set (match_operand:SI 0 "s_register_operand" "=r")
844 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
845 "TARGET_32BIT"
846 "adcs%?\\t%0, %1, #0"
847 [(set_attr "type" "alus_imm")]
848 )
849
850 (define_expand "addsi3"
851 [(set (match_operand:SI 0 "s_register_operand")
852 (plus:SI (match_operand:SI 1 "s_register_operand")
853 (match_operand:SI 2 "reg_or_int_operand")))]
854 "TARGET_EITHER"
855 "
856 if (TARGET_32BIT && CONST_INT_P (operands[2]))
857 {
858 arm_split_constant (PLUS, SImode, NULL_RTX,
859 INTVAL (operands[2]), operands[0], operands[1],
860 optimize && can_create_pseudo_p ());
861 DONE;
862 }
863 "
864 )
865
866 ; If there is a scratch available, this will be faster than synthesizing the
867 ; addition.
868 (define_peephole2
869 [(match_scratch:SI 3 "r")
870 (set (match_operand:SI 0 "arm_general_register_operand" "")
871 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
872 (match_operand:SI 2 "const_int_operand" "")))]
873 "TARGET_32BIT &&
874 !(const_ok_for_arm (INTVAL (operands[2]))
875 || const_ok_for_arm (-INTVAL (operands[2])))
876 && const_ok_for_arm (~INTVAL (operands[2]))"
877 [(set (match_dup 3) (match_dup 2))
878 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
879 ""
880 )
881
882 ;; The r/r/k alternative is required when reloading the address
883 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
884 ;; put the duplicated register first, and not try the commutative version.
885 (define_insn_and_split "*arm_addsi3"
886 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
887 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
888 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
889 "TARGET_32BIT"
890 "@
891 add%?\\t%0, %0, %2
892 add%?\\t%0, %1, %2
893 add%?\\t%0, %1, %2
894 add%?\\t%0, %1, %2
895 add%?\\t%0, %1, %2
896 add%?\\t%0, %1, %2
897 add%?\\t%0, %2, %1
898 add%?\\t%0, %1, %2
899 addw%?\\t%0, %1, %2
900 addw%?\\t%0, %1, %2
901 sub%?\\t%0, %1, #%n2
902 sub%?\\t%0, %1, #%n2
903 sub%?\\t%0, %1, #%n2
904 subw%?\\t%0, %1, #%n2
905 subw%?\\t%0, %1, #%n2
906 #"
907 "TARGET_32BIT
908 && CONST_INT_P (operands[2])
909 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
910 && (reload_completed || !arm_eliminable_register (operands[1]))"
911 [(clobber (const_int 0))]
912 "
913 arm_split_constant (PLUS, SImode, curr_insn,
914 INTVAL (operands[2]), operands[0],
915 operands[1], 0);
916 DONE;
917 "
918 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
919 (set_attr "predicable" "yes")
920 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
921 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
922 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
923 (const_string "alu_imm")
924 (const_string "alu_sreg")))
925 ]
926 )
927
928 (define_insn "addsi3_compareV_reg"
929 [(set (reg:CC_V CC_REGNUM)
930 (compare:CC_V
931 (plus:DI
932 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
933 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
934 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
935 (set (match_operand:SI 0 "register_operand" "=l,r,r")
936 (plus:SI (match_dup 1) (match_dup 2)))]
937 "TARGET_32BIT"
938 "adds%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "arch" "t2,t2,*")
941 (set_attr "length" "2,2,4")
942 (set_attr "type" "alus_sreg")]
943 )
944
945 (define_insn "*addsi3_compareV_reg_nosum"
946 [(set (reg:CC_V CC_REGNUM)
947 (compare:CC_V
948 (plus:DI
949 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
950 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
951 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
952 "TARGET_32BIT"
953 "cmn%?\\t%0, %1"
954 [(set_attr "conds" "set")
955 (set_attr "arch" "t2,*")
956 (set_attr "length" "2,4")
957 (set_attr "type" "alus_sreg")]
958 )
959
960 (define_insn "subvsi3_intmin"
961 [(set (reg:CC_V CC_REGNUM)
962 (compare:CC_V
963 (plus:DI
964 (sign_extend:DI
965 (match_operand:SI 1 "register_operand" "r"))
966 (const_int 2147483648))
967 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
968 (set (match_operand:SI 0 "register_operand" "=r")
969 (plus:SI (match_dup 1) (const_int -2147483648)))]
970 "TARGET_32BIT"
971 "subs%?\\t%0, %1, #-2147483648"
972 [(set_attr "conds" "set")
973 (set_attr "type" "alus_imm")]
974 )
975
976 (define_insn "addsi3_compareV_imm"
977 [(set (reg:CC_V CC_REGNUM)
978 (compare:CC_V
979 (plus:DI
980 (sign_extend:DI
981 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
982 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
983 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
984 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
985 (plus:SI (match_dup 1) (match_dup 2)))]
986 "TARGET_32BIT
987 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
988 "@
989 adds%?\\t%0, %1, %2
990 adds%?\\t%0, %0, %2
991 subs%?\\t%0, %1, #%n2
992 subs%?\\t%0, %0, #%n2
993 adds%?\\t%0, %1, %2
994 subs%?\\t%0, %1, #%n2"
995 [(set_attr "conds" "set")
996 (set_attr "arch" "t2,t2,t2,t2,*,*")
997 (set_attr "length" "2,2,2,2,4,4")
998 (set_attr "type" "alus_imm")]
999 )
1000
1001 (define_insn "addsi3_compareV_imm_nosum"
1002 [(set (reg:CC_V CC_REGNUM)
1003 (compare:CC_V
1004 (plus:DI
1005 (sign_extend:DI
1006 (match_operand:SI 0 "register_operand" "l,r,r"))
1007 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1008 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1009 "TARGET_32BIT
1010 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1011 "@
1012 cmp%?\\t%0, #%n1
1013 cmn%?\\t%0, %1
1014 cmp%?\\t%0, #%n1"
1015 [(set_attr "conds" "set")
1016 (set_attr "arch" "t2,*,*")
1017 (set_attr "length" "2,4,4")
1018 (set_attr "type" "alus_imm")]
1019 )
1020
1021 ;; We can handle more constants efficently if we can clobber either a scratch
1022 ;; or the other source operand. We deliberately leave this late as in
1023 ;; high register pressure situations it's not worth forcing any reloads.
1024 (define_peephole2
1025 [(match_scratch:SI 2 "l")
1026 (set (reg:CC_V CC_REGNUM)
1027 (compare:CC_V
1028 (plus:DI
1029 (sign_extend:DI
1030 (match_operand:SI 0 "low_register_operand"))
1031 (match_operand 1 "const_int_operand"))
1032 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1033 "TARGET_THUMB2
1034 && satisfies_constraint_Pd (operands[1])"
1035 [(parallel[
1036 (set (reg:CC_V CC_REGNUM)
1037 (compare:CC_V
1038 (plus:DI (sign_extend:DI (match_dup 0))
1039 (sign_extend:DI (match_dup 1)))
1040 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1041 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1042 )
1043
1044 (define_peephole2
1045 [(set (reg:CC_V CC_REGNUM)
1046 (compare:CC_V
1047 (plus:DI
1048 (sign_extend:DI
1049 (match_operand:SI 0 "low_register_operand"))
1050 (match_operand 1 "const_int_operand"))
1051 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1052 "TARGET_THUMB2
1053 && dead_or_set_p (peep2_next_insn (0), operands[0])
1054 && satisfies_constraint_Py (operands[1])"
1055 [(parallel[
1056 (set (reg:CC_V CC_REGNUM)
1057 (compare:CC_V
1058 (plus:DI (sign_extend:DI (match_dup 0))
1059 (sign_extend:DI (match_dup 1)))
1060 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1061 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1062 )
1063
1064 (define_insn "addsi3_compare0"
1065 [(set (reg:CC_NOOV CC_REGNUM)
1066 (compare:CC_NOOV
1067 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1068 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1069 (const_int 0)))
1070 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1071 (plus:SI (match_dup 1) (match_dup 2)))]
1072 "TARGET_ARM"
1073 "@
1074 adds%?\\t%0, %1, %2
1075 subs%?\\t%0, %1, #%n2
1076 adds%?\\t%0, %1, %2"
1077 [(set_attr "conds" "set")
1078 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1079 )
1080
1081 (define_insn "*addsi3_compare0_scratch"
1082 [(set (reg:CC_NOOV CC_REGNUM)
1083 (compare:CC_NOOV
1084 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1085 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1086 (const_int 0)))]
1087 "TARGET_ARM"
1088 "@
1089 cmn%?\\t%0, %1
1090 cmp%?\\t%0, #%n1
1091 cmn%?\\t%0, %1"
1092 [(set_attr "conds" "set")
1093 (set_attr "predicable" "yes")
1094 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1095 )
1096
1097 (define_insn "*compare_negsi_si"
1098 [(set (reg:CC_Z CC_REGNUM)
1099 (compare:CC_Z
1100 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1101 (match_operand:SI 1 "s_register_operand" "l,r")))]
1102 "TARGET_32BIT"
1103 "cmn%?\\t%1, %0"
1104 [(set_attr "conds" "set")
1105 (set_attr "predicable" "yes")
1106 (set_attr "arch" "t2,*")
1107 (set_attr "length" "2,4")
1108 (set_attr "predicable_short_it" "yes,no")
1109 (set_attr "type" "alus_sreg")]
1110 )
1111
1112 ;; This is the canonicalization of subsi3_compare when the
1113 ;; addend is a constant.
1114 (define_insn "cmpsi2_addneg"
1115 [(set (reg:CC CC_REGNUM)
1116 (compare:CC
1117 (match_operand:SI 1 "s_register_operand" "r,r")
1118 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1119 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1120 (plus:SI (match_dup 1)
1121 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1122 "TARGET_32BIT
1123 && (INTVAL (operands[2])
1124 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1125 {
1126 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1127 in different condition codes (like cmn rather than like cmp), so that
1128 alternative comes first. Both alternatives can match for any 0x??000000
1129 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1130 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1131 as it is shorter. */
1132 if (which_alternative == 0 && operands[3] != const1_rtx)
1133 return "subs%?\\t%0, %1, #%n3";
1134 else
1135 return "adds%?\\t%0, %1, %3";
1136 }
1137 [(set_attr "conds" "set")
1138 (set_attr "type" "alus_sreg")]
1139 )
1140
1141 ;; Convert the sequence
1142 ;; sub rd, rn, #1
1143 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1144 ;; bne dest
1145 ;; into
1146 ;; subs rd, rn, #1
1147 ;; bcs dest ((unsigned)rn >= 1)
1148 ;; similarly for the beq variant using bcc.
1149 ;; This is a common looping idiom (while (n--))
1150 (define_peephole2
1151 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1152 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1153 (const_int -1)))
1154 (set (match_operand 2 "cc_register" "")
1155 (compare (match_dup 0) (const_int -1)))
1156 (set (pc)
1157 (if_then_else (match_operator 3 "equality_operator"
1158 [(match_dup 2) (const_int 0)])
1159 (match_operand 4 "" "")
1160 (match_operand 5 "" "")))]
1161 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1162 [(parallel[
1163 (set (match_dup 2)
1164 (compare:CC
1165 (match_dup 1) (const_int 1)))
1166 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1167 (set (pc)
1168 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1169 (match_dup 4)
1170 (match_dup 5)))]
1171 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1172 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1173 ? GEU : LTU),
1174 VOIDmode,
1175 operands[2], const0_rtx);"
1176 )
1177
1178 ;; The next four insns work because they compare the result with one of
1179 ;; the operands, and we know that the use of the condition code is
1180 ;; either GEU or LTU, so we can use the carry flag from the addition
1181 ;; instead of doing the compare a second time.
1182 (define_insn "addsi3_compare_op1"
1183 [(set (reg:CC_C CC_REGNUM)
1184 (compare:CC_C
1185 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1186 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1187 (match_dup 1)))
1188 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1189 (plus:SI (match_dup 1) (match_dup 2)))]
1190 "TARGET_32BIT"
1191 "@
1192 adds%?\\t%0, %1, %2
1193 adds%?\\t%0, %0, %2
1194 subs%?\\t%0, %1, #%n2
1195 subs%?\\t%0, %0, #%n2
1196 adds%?\\t%0, %1, %2
1197 subs%?\\t%0, %1, #%n2"
1198 [(set_attr "conds" "set")
1199 (set_attr "arch" "t2,t2,t2,t2,*,*")
1200 (set_attr "length" "2,2,2,2,4,4")
1201 (set (attr "type")
1202 (if_then_else (match_operand 2 "const_int_operand")
1203 (const_string "alu_imm")
1204 (const_string "alu_sreg")))]
1205 )
1206
1207 (define_insn "*addsi3_compare_op2"
1208 [(set (reg:CC_C CC_REGNUM)
1209 (compare:CC_C
1210 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1211 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1212 (match_dup 2)))
1213 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1214 (plus:SI (match_dup 1) (match_dup 2)))]
1215 "TARGET_32BIT"
1216 "@
1217 adds%?\\t%0, %1, %2
1218 adds%?\\t%0, %0, %2
1219 subs%?\\t%0, %1, #%n2
1220 subs%?\\t%0, %0, #%n2
1221 adds%?\\t%0, %1, %2
1222 subs%?\\t%0, %1, #%n2"
1223 [(set_attr "conds" "set")
1224 (set_attr "arch" "t2,t2,t2,t2,*,*")
1225 (set_attr "length" "2,2,2,2,4,4")
1226 (set (attr "type")
1227 (if_then_else (match_operand 2 "const_int_operand")
1228 (const_string "alu_imm")
1229 (const_string "alu_sreg")))]
1230 )
1231
1232 (define_insn "*compare_addsi2_op0"
1233 [(set (reg:CC_C CC_REGNUM)
1234 (compare:CC_C
1235 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1236 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1237 (match_dup 0)))]
1238 "TARGET_32BIT"
1239 "@
1240 cmn%?\\t%0, %1
1241 cmp%?\\t%0, #%n1
1242 cmn%?\\t%0, %1
1243 cmp%?\\t%0, #%n1"
1244 [(set_attr "conds" "set")
1245 (set_attr "predicable" "yes")
1246 (set_attr "arch" "t2,t2,*,*")
1247 (set_attr "predicable_short_it" "yes,yes,no,no")
1248 (set_attr "length" "2,2,4,4")
1249 (set (attr "type")
1250 (if_then_else (match_operand 1 "const_int_operand")
1251 (const_string "alu_imm")
1252 (const_string "alu_sreg")))]
1253 )
1254
1255 (define_insn "*compare_addsi2_op1"
1256 [(set (reg:CC_C CC_REGNUM)
1257 (compare:CC_C
1258 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1259 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1260 (match_dup 1)))]
1261 "TARGET_32BIT"
1262 "@
1263 cmn%?\\t%0, %1
1264 cmp%?\\t%0, #%n1
1265 cmn%?\\t%0, %1
1266 cmp%?\\t%0, #%n1"
1267 [(set_attr "conds" "set")
1268 (set_attr "predicable" "yes")
1269 (set_attr "arch" "t2,t2,*,*")
1270 (set_attr "predicable_short_it" "yes,yes,no,no")
1271 (set_attr "length" "2,2,4,4")
1272 (set (attr "type")
1273 (if_then_else (match_operand 1 "const_int_operand")
1274 (const_string "alu_imm")
1275 (const_string "alu_sreg")))]
1276 )
1277
1278 (define_insn "addsi3_carryin"
1279 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1280 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1281 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1282 (match_operand:SI 3 "arm_carry_operation" "")))]
1283 "TARGET_32BIT"
1284 "@
1285 adc%?\\t%0, %1, %2
1286 adc%?\\t%0, %1, %2
1287 sbc%?\\t%0, %1, #%B2"
1288 [(set_attr "conds" "use")
1289 (set_attr "predicable" "yes")
1290 (set_attr "arch" "t2,*,*")
1291 (set_attr "length" "4")
1292 (set_attr "predicable_short_it" "yes,no,no")
1293 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1294 )
1295
1296 ;; Canonicalization of the above when the immediate is zero.
1297 (define_insn "add0si3_carryin"
1298 [(set (match_operand:SI 0 "s_register_operand" "=r")
1299 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1300 (match_operand:SI 1 "arm_not_operand" "r")))]
1301 "TARGET_32BIT"
1302 "adc%?\\t%0, %1, #0"
1303 [(set_attr "conds" "use")
1304 (set_attr "predicable" "yes")
1305 (set_attr "length" "4")
1306 (set_attr "type" "adc_imm")]
1307 )
1308
1309 (define_insn "*addsi3_carryin_alt2"
1310 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1311 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1312 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1313 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1314 "TARGET_32BIT"
1315 "@
1316 adc%?\\t%0, %1, %2
1317 adc%?\\t%0, %1, %2
1318 sbc%?\\t%0, %1, #%B2"
1319 [(set_attr "conds" "use")
1320 (set_attr "predicable" "yes")
1321 (set_attr "arch" "t2,*,*")
1322 (set_attr "length" "4")
1323 (set_attr "predicable_short_it" "yes,no,no")
1324 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1325 )
1326
1327 (define_insn "*addsi3_carryin_shift"
1328 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1329 (plus:SI (plus:SI
1330 (match_operator:SI 2 "shift_operator"
1331 [(match_operand:SI 3 "s_register_operand" "r,r")
1332 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1333 (match_operand:SI 5 "arm_carry_operation" ""))
1334 (match_operand:SI 1 "s_register_operand" "r,r")))]
1335 "TARGET_32BIT"
1336 "adc%?\\t%0, %1, %3%S2"
1337 [(set_attr "conds" "use")
1338 (set_attr "arch" "32,a")
1339 (set_attr "shift" "3")
1340 (set_attr "predicable" "yes")
1341 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1342 )
1343
1344 (define_insn "*addsi3_carryin_clobercc"
1345 [(set (match_operand:SI 0 "s_register_operand" "=r")
1346 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1347 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1348 (match_operand:SI 3 "arm_carry_operation" "")))
1349 (clobber (reg:CC CC_REGNUM))]
1350 "TARGET_32BIT"
1351 "adcs%?\\t%0, %1, %2"
1352 [(set_attr "conds" "set")
1353 (set_attr "type" "adcs_reg")]
1354 )
1355
1356 (define_expand "subvsi4"
1357 [(match_operand:SI 0 "s_register_operand")
1358 (match_operand:SI 1 "arm_rhs_operand")
1359 (match_operand:SI 2 "arm_add_operand")
1360 (match_operand 3 "")]
1361 "TARGET_32BIT"
1362 {
1363 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1364 {
1365 /* If both operands are constants we can decide the result statically. */
1366 wi::overflow_type overflow;
1367 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1368 rtx_mode_t (operands[2], SImode),
1369 SIGNED, &overflow);
1370 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1371 if (overflow != wi::OVF_NONE)
1372 emit_jump_insn (gen_jump (operands[3]));
1373 DONE;
1374 }
1375 else if (CONST_INT_P (operands[2]))
1376 {
1377 operands[2] = GEN_INT (-INTVAL (operands[2]));
1378 /* Special case for INT_MIN. */
1379 if (INTVAL (operands[2]) == 0x80000000)
1380 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1381 else
1382 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1383 operands[2]));
1384 }
1385 else if (CONST_INT_P (operands[1]))
1386 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1387 else
1388 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1389
1390 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1391 DONE;
1392 })
1393
1394 (define_expand "subvdi4"
1395 [(match_operand:DI 0 "s_register_operand")
1396 (match_operand:DI 1 "reg_or_int_operand")
1397 (match_operand:DI 2 "reg_or_int_operand")
1398 (match_operand 3 "")]
1399 "TARGET_32BIT"
1400 {
1401 rtx lo_result, hi_result;
1402 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1403 lo_result = gen_lowpart (SImode, operands[0]);
1404 hi_result = gen_highpart (SImode, operands[0]);
1405 machine_mode mode = CCmode;
1406
1407 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1408 {
1409 /* If both operands are constants we can decide the result statically. */
1410 wi::overflow_type overflow;
1411 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1412 rtx_mode_t (operands[2], DImode),
1413 SIGNED, &overflow);
1414 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1415 if (overflow != wi::OVF_NONE)
1416 emit_jump_insn (gen_jump (operands[3]));
1417 DONE;
1418 }
1419 else if (CONST_INT_P (operands[1]))
1420 {
1421 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1422 &lo_op1, &hi_op1);
1423 if (const_ok_for_arm (INTVAL (lo_op1)))
1424 {
1425 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1426 GEN_INT (~UINTVAL (lo_op1))));
1427 /* We could potentially use RSC here in Arm state, but not
1428 in Thumb, so it's probably not worth the effort of handling
1429 this. */
1430 hi_op1 = force_reg (SImode, hi_op1);
1431 mode = CC_RSBmode;
1432 goto highpart;
1433 }
1434 operands[1] = force_reg (DImode, operands[1]);
1435 }
1436
1437 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1438 &lo_op2, &hi_op2);
1439 if (lo_op2 == const0_rtx)
1440 {
1441 emit_move_insn (lo_result, lo_op1);
1442 if (!arm_add_operand (hi_op2, SImode))
1443 hi_op2 = force_reg (SImode, hi_op2);
1444 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1445 DONE;
1446 }
1447
1448 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1449 lo_op2 = force_reg (SImode, lo_op2);
1450 if (CONST_INT_P (lo_op2))
1451 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1452 GEN_INT (-INTVAL (lo_op2))));
1453 else
1454 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1455
1456 highpart:
1457 if (!arm_not_operand (hi_op2, SImode))
1458 hi_op2 = force_reg (SImode, hi_op2);
1459 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1460 if (CONST_INT_P (hi_op2))
1461 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1462 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1463 gen_rtx_LTU (DImode, ccreg,
1464 const0_rtx)));
1465 else
1466 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1467 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1468 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1469 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1470
1471 DONE;
1472 })
1473
1474 (define_expand "usubvsi4"
1475 [(match_operand:SI 0 "s_register_operand")
1476 (match_operand:SI 1 "arm_rhs_operand")
1477 (match_operand:SI 2 "arm_add_operand")
1478 (match_operand 3 "")]
1479 "TARGET_32BIT"
1480 {
1481 machine_mode mode = CCmode;
1482 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1483 {
1484 /* If both operands are constants we can decide the result statically. */
1485 wi::overflow_type overflow;
1486 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1487 rtx_mode_t (operands[2], SImode),
1488 UNSIGNED, &overflow);
1489 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1490 if (overflow != wi::OVF_NONE)
1491 emit_jump_insn (gen_jump (operands[3]));
1492 DONE;
1493 }
1494 else if (CONST_INT_P (operands[2]))
1495 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1496 GEN_INT (-INTVAL (operands[2]))));
1497 else if (CONST_INT_P (operands[1]))
1498 {
1499 mode = CC_RSBmode;
1500 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1501 GEN_INT (~UINTVAL (operands[1]))));
1502 }
1503 else
1504 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1505 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1506
1507 DONE;
1508 })
1509
1510 (define_expand "usubvdi4"
1511 [(match_operand:DI 0 "s_register_operand")
1512 (match_operand:DI 1 "reg_or_int_operand")
1513 (match_operand:DI 2 "reg_or_int_operand")
1514 (match_operand 3 "")]
1515 "TARGET_32BIT"
1516 {
1517 rtx lo_result, hi_result;
1518 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1519 lo_result = gen_lowpart (SImode, operands[0]);
1520 hi_result = gen_highpart (SImode, operands[0]);
1521 machine_mode mode = CCmode;
1522
1523 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1524 {
1525 /* If both operands are constants we can decide the result statically. */
1526 wi::overflow_type overflow;
1527 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1528 rtx_mode_t (operands[2], DImode),
1529 UNSIGNED, &overflow);
1530 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1531 if (overflow != wi::OVF_NONE)
1532 emit_jump_insn (gen_jump (operands[3]));
1533 DONE;
1534 }
1535 else if (CONST_INT_P (operands[1]))
1536 {
1537 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1538 &lo_op1, &hi_op1);
1539 if (const_ok_for_arm (INTVAL (lo_op1)))
1540 {
1541 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1542 GEN_INT (~UINTVAL (lo_op1))));
1543 /* We could potentially use RSC here in Arm state, but not
1544 in Thumb, so it's probably not worth the effort of handling
1545 this. */
1546 hi_op1 = force_reg (SImode, hi_op1);
1547 mode = CC_RSBmode;
1548 goto highpart;
1549 }
1550 operands[1] = force_reg (DImode, operands[1]);
1551 }
1552
1553 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1554 &lo_op2, &hi_op2);
1555 if (lo_op2 == const0_rtx)
1556 {
1557 emit_move_insn (lo_result, lo_op1);
1558 if (!arm_add_operand (hi_op2, SImode))
1559 hi_op2 = force_reg (SImode, hi_op2);
1560 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1561 DONE;
1562 }
1563
1564 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1565 lo_op2 = force_reg (SImode, lo_op2);
1566 if (CONST_INT_P (lo_op2))
1567 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1568 GEN_INT (-INTVAL (lo_op2))));
1569 else
1570 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1571
1572 highpart:
1573 if (!arm_not_operand (hi_op2, SImode))
1574 hi_op2 = force_reg (SImode, hi_op2);
1575 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1576 if (CONST_INT_P (hi_op2))
1577 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1578 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1579 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1580 gen_rtx_LTU (DImode, ccreg,
1581 const0_rtx)));
1582 else
1583 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1584 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1585 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1586 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1587
1588 DONE;
1589 })
1590
1591 (define_insn "subsi3_compare1"
1592 [(set (reg:CC CC_REGNUM)
1593 (compare:CC
1594 (match_operand:SI 1 "register_operand" "r")
1595 (match_operand:SI 2 "register_operand" "r")))
1596 (set (match_operand:SI 0 "register_operand" "=r")
1597 (minus:SI (match_dup 1) (match_dup 2)))]
1598 "TARGET_32BIT"
1599 "subs%?\\t%0, %1, %2"
1600 [(set_attr "conds" "set")
1601 (set_attr "type" "alus_sreg")]
1602 )
1603
1604 (define_insn "subvsi3"
1605 [(set (reg:CC_V CC_REGNUM)
1606 (compare:CC_V
1607 (minus:DI
1608 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1609 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1610 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1611 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1612 (minus:SI (match_dup 1) (match_dup 2)))]
1613 "TARGET_32BIT"
1614 "subs%?\\t%0, %1, %2"
1615 [(set_attr "conds" "set")
1616 (set_attr "arch" "t2,*")
1617 (set_attr "length" "2,4")
1618 (set_attr "type" "alus_sreg")]
1619 )
1620
1621 (define_insn "subvsi3_imm1"
1622 [(set (reg:CC_V CC_REGNUM)
1623 (compare:CC_V
1624 (minus:DI
1625 (match_operand 1 "arm_immediate_operand" "I")
1626 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1627 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1628 (set (match_operand:SI 0 "s_register_operand" "=r")
1629 (minus:SI (match_dup 1) (match_dup 2)))]
1630 "TARGET_32BIT"
1631 "rsbs%?\\t%0, %2, %1"
1632 [(set_attr "conds" "set")
1633 (set_attr "type" "alus_imm")]
1634 )
1635
1636 (define_insn "subsi3_carryin"
1637 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1638 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1639 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1640 (match_operand:SI 3 "arm_borrow_operation" "")))]
1641 "TARGET_32BIT"
1642 "@
1643 sbc%?\\t%0, %1, %2
1644 rsc%?\\t%0, %2, %1
1645 sbc%?\\t%0, %2, %2, lsl #1"
1646 [(set_attr "conds" "use")
1647 (set_attr "arch" "*,a,t2")
1648 (set_attr "predicable" "yes")
1649 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1650 )
1651
1652 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1653 [(set (reg:<CC_EXTEND> CC_REGNUM)
1654 (compare:<CC_EXTEND>
1655 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1656 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1657 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1658 (clobber (match_scratch:SI 0 "=l,r"))]
1659 "TARGET_32BIT"
1660 "sbcs\\t%0, %1, %2"
1661 [(set_attr "conds" "set")
1662 (set_attr "arch" "t2,*")
1663 (set_attr "length" "2,4")
1664 (set_attr "type" "adc_reg")]
1665 )
1666
1667 ;; Similar to the above, but handling a constant which has a different
1668 ;; canonicalization.
1669 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1670 [(set (reg:<CC_EXTEND> CC_REGNUM)
1671 (compare:<CC_EXTEND>
1672 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1673 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1674 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1675 (clobber (match_scratch:SI 0 "=l,r"))]
1676 "TARGET_32BIT"
1677 "@
1678 sbcs\\t%0, %1, %2
1679 adcs\\t%0, %1, #%B2"
1680 [(set_attr "conds" "set")
1681 (set_attr "type" "adc_imm")]
1682 )
1683
1684 ;; Further canonicalization when the constant is zero.
1685 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1686 [(set (reg:<CC_EXTEND> CC_REGNUM)
1687 (compare:<CC_EXTEND>
1688 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1689 (match_operand:DI 2 "arm_borrow_operation" "")))
1690 (clobber (match_scratch:SI 0 "=l,r"))]
1691 "TARGET_32BIT"
1692 "sbcs\\t%0, %1, #0"
1693 [(set_attr "conds" "set")
1694 (set_attr "type" "adc_imm")]
1695 )
1696
1697 (define_insn "*subsi3_carryin_const"
1698 [(set (match_operand:SI 0 "s_register_operand" "=r")
1699 (minus:SI (plus:SI
1700 (match_operand:SI 1 "s_register_operand" "r")
1701 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1702 (match_operand:SI 3 "arm_borrow_operation" "")))]
1703 "TARGET_32BIT"
1704 "sbc\\t%0, %1, #%n2"
1705 [(set_attr "conds" "use")
1706 (set_attr "type" "adc_imm")]
1707 )
1708
1709 (define_insn "*subsi3_carryin_const0"
1710 [(set (match_operand:SI 0 "s_register_operand" "=r")
1711 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1712 (match_operand:SI 2 "arm_borrow_operation" "")))]
1713 "TARGET_32BIT"
1714 "sbc\\t%0, %1, #0"
1715 [(set_attr "conds" "use")
1716 (set_attr "type" "adc_imm")]
1717 )
1718
1719 (define_insn "*subsi3_carryin_shift"
1720 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1721 (minus:SI (minus:SI
1722 (match_operand:SI 1 "s_register_operand" "r,r")
1723 (match_operator:SI 2 "shift_operator"
1724 [(match_operand:SI 3 "s_register_operand" "r,r")
1725 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1726 (match_operand:SI 5 "arm_borrow_operation" "")))]
1727 "TARGET_32BIT"
1728 "sbc%?\\t%0, %1, %3%S2"
1729 [(set_attr "conds" "use")
1730 (set_attr "arch" "32,a")
1731 (set_attr "shift" "3")
1732 (set_attr "predicable" "yes")
1733 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1734 )
1735
1736 (define_insn "*subsi3_carryin_shift_alt"
1737 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1738 (minus:SI (minus:SI
1739 (match_operand:SI 1 "s_register_operand" "r,r")
1740 (match_operand:SI 5 "arm_borrow_operation" ""))
1741 (match_operator:SI 2 "shift_operator"
1742 [(match_operand:SI 3 "s_register_operand" "r,r")
1743 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1744 "TARGET_32BIT"
1745 "sbc%?\\t%0, %1, %3%S2"
1746 [(set_attr "conds" "use")
1747 (set_attr "arch" "32,a")
1748 (set_attr "shift" "3")
1749 (set_attr "predicable" "yes")
1750 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1751 )
1752
1753 ;; No RSC in Thumb2
1754 (define_insn "*rsbsi3_carryin_shift"
1755 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1756 (minus:SI (minus:SI
1757 (match_operator:SI 2 "shift_operator"
1758 [(match_operand:SI 3 "s_register_operand" "r,r")
1759 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1760 (match_operand:SI 1 "s_register_operand" "r,r"))
1761 (match_operand:SI 5 "arm_borrow_operation" "")))]
1762 "TARGET_ARM"
1763 "rsc%?\\t%0, %1, %3%S2"
1764 [(set_attr "conds" "use")
1765 (set_attr "predicable" "yes")
1766 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1767 )
1768
1769 (define_insn "*rsbsi3_carryin_shift_alt"
1770 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1771 (minus:SI (minus:SI
1772 (match_operator:SI 2 "shift_operator"
1773 [(match_operand:SI 3 "s_register_operand" "r,r")
1774 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1775 (match_operand:SI 5 "arm_borrow_operation" ""))
1776 (match_operand:SI 1 "s_register_operand" "r,r")))]
1777 "TARGET_ARM"
1778 "rsc%?\\t%0, %1, %3%S2"
1779 [(set_attr "conds" "use")
1780 (set_attr "predicable" "yes")
1781 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1782 )
1783
1784 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1785 (define_split
1786 [(set (match_operand:SI 0 "s_register_operand" "")
1787 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1788 (match_operand:SI 2 "s_register_operand" ""))
1789 (const_int -1)))
1790 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1791 "TARGET_32BIT"
1792 [(set (match_dup 3) (match_dup 1))
1793 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1794 "
1795 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1796 ")
1797
1798 (define_expand "addsf3"
1799 [(set (match_operand:SF 0 "s_register_operand")
1800 (plus:SF (match_operand:SF 1 "s_register_operand")
1801 (match_operand:SF 2 "s_register_operand")))]
1802 "TARGET_32BIT && TARGET_HARD_FLOAT"
1803 "
1804 ")
1805
1806 (define_expand "adddf3"
1807 [(set (match_operand:DF 0 "s_register_operand")
1808 (plus:DF (match_operand:DF 1 "s_register_operand")
1809 (match_operand:DF 2 "s_register_operand")))]
1810 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1811 "
1812 ")
1813
1814 (define_expand "subdi3"
1815 [(parallel
1816 [(set (match_operand:DI 0 "s_register_operand")
1817 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1818 (match_operand:DI 2 "s_register_operand")))
1819 (clobber (reg:CC CC_REGNUM))])]
1820 "TARGET_EITHER"
1821 "
1822 if (TARGET_THUMB1)
1823 {
1824 if (!REG_P (operands[1]))
1825 operands[1] = force_reg (DImode, operands[1]);
1826 }
1827 else
1828 {
1829 rtx lo_result, hi_result, lo_dest, hi_dest;
1830 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1831 rtx condition;
1832
1833 /* Since operands[1] may be an integer, pass it second, so that
1834 any necessary simplifications will be done on the decomposed
1835 constant. */
1836 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1837 &lo_op1, &hi_op1);
1838 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1839 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1840
1841 if (!arm_rhs_operand (lo_op1, SImode))
1842 lo_op1 = force_reg (SImode, lo_op1);
1843
1844 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1845 || !arm_rhs_operand (hi_op1, SImode))
1846 hi_op1 = force_reg (SImode, hi_op1);
1847
1848 rtx cc_reg;
1849 if (lo_op1 == const0_rtx)
1850 {
1851 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1852 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1853 }
1854 else if (CONST_INT_P (lo_op1))
1855 {
1856 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1857 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1858 GEN_INT (~UINTVAL (lo_op1))));
1859 }
1860 else
1861 {
1862 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1863 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1864 }
1865
1866 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1867
1868 if (hi_op1 == const0_rtx)
1869 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1870 else
1871 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1872
1873 if (lo_result != lo_dest)
1874 emit_move_insn (lo_result, lo_dest);
1875
1876 if (hi_result != hi_dest)
1877 emit_move_insn (hi_result, hi_dest);
1878
1879 DONE;
1880 }
1881 "
1882 )
1883
1884 (define_expand "subsi3"
1885 [(set (match_operand:SI 0 "s_register_operand")
1886 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1887 (match_operand:SI 2 "s_register_operand")))]
1888 "TARGET_EITHER"
1889 "
1890 if (CONST_INT_P (operands[1]))
1891 {
1892 if (TARGET_32BIT)
1893 {
1894 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1895 operands[1] = force_reg (SImode, operands[1]);
1896 else
1897 {
1898 arm_split_constant (MINUS, SImode, NULL_RTX,
1899 INTVAL (operands[1]), operands[0],
1900 operands[2],
1901 optimize && can_create_pseudo_p ());
1902 DONE;
1903 }
1904 }
1905 else /* TARGET_THUMB1 */
1906 operands[1] = force_reg (SImode, operands[1]);
1907 }
1908 "
1909 )
1910
1911 ; ??? Check Thumb-2 split length
1912 (define_insn_and_split "*arm_subsi3_insn"
1913 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1914 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1915 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1916 "TARGET_32BIT"
1917 "@
1918 sub%?\\t%0, %1, %2
1919 sub%?\\t%0, %2
1920 sub%?\\t%0, %1, %2
1921 rsb%?\\t%0, %2, %1
1922 rsb%?\\t%0, %2, %1
1923 sub%?\\t%0, %1, %2
1924 sub%?\\t%0, %1, %2
1925 sub%?\\t%0, %1, %2
1926 #"
1927 "&& (CONST_INT_P (operands[1])
1928 && !const_ok_for_arm (INTVAL (operands[1])))"
1929 [(clobber (const_int 0))]
1930 "
1931 arm_split_constant (MINUS, SImode, curr_insn,
1932 INTVAL (operands[1]), operands[0], operands[2], 0);
1933 DONE;
1934 "
1935 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1936 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1937 (set_attr "predicable" "yes")
1938 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1939 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1940 )
1941
1942 (define_peephole2
1943 [(match_scratch:SI 3 "r")
1944 (set (match_operand:SI 0 "arm_general_register_operand" "")
1945 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1946 (match_operand:SI 2 "arm_general_register_operand" "")))]
1947 "TARGET_32BIT
1948 && !const_ok_for_arm (INTVAL (operands[1]))
1949 && const_ok_for_arm (~INTVAL (operands[1]))"
1950 [(set (match_dup 3) (match_dup 1))
1951 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1952 ""
1953 )
1954
1955 (define_insn "subsi3_compare0"
1956 [(set (reg:CC_NOOV CC_REGNUM)
1957 (compare:CC_NOOV
1958 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1959 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1960 (const_int 0)))
1961 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1962 (minus:SI (match_dup 1) (match_dup 2)))]
1963 "TARGET_32BIT"
1964 "@
1965 subs%?\\t%0, %1, %2
1966 subs%?\\t%0, %1, %2
1967 rsbs%?\\t%0, %2, %1"
1968 [(set_attr "conds" "set")
1969 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1970 )
1971
1972 (define_insn "subsi3_compare"
1973 [(set (reg:CC CC_REGNUM)
1974 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1975 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1976 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1977 (minus:SI (match_dup 1) (match_dup 2)))]
1978 "TARGET_32BIT"
1979 "@
1980 subs%?\\t%0, %1, %2
1981 subs%?\\t%0, %1, %2
1982 rsbs%?\\t%0, %2, %1"
1983 [(set_attr "conds" "set")
1984 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1985 )
1986
1987 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1988 ;; rather than (0 cmp reg). This gives the same results for unsigned
1989 ;; and equality compares which is what we mostly need here.
1990 (define_insn "rsb_imm_compare"
1991 [(set (reg:CC_RSB CC_REGNUM)
1992 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1993 (match_operand 3 "const_int_operand" "")))
1994 (set (match_operand:SI 0 "s_register_operand" "=r")
1995 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1996 (match_dup 2)))]
1997 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1998 "rsbs\\t%0, %2, %1"
1999 [(set_attr "conds" "set")
2000 (set_attr "type" "alus_imm")]
2001 )
2002
2003 ;; Similarly, but the result is unused.
2004 (define_insn "rsb_imm_compare_scratch"
2005 [(set (reg:CC_RSB CC_REGNUM)
2006 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2007 (match_operand 1 "arm_not_immediate_operand" "K")))
2008 (clobber (match_scratch:SI 0 "=r"))]
2009 "TARGET_32BIT"
2010 "rsbs\\t%0, %2, #%B1"
2011 [(set_attr "conds" "set")
2012 (set_attr "type" "alus_imm")]
2013 )
2014
2015 ;; Compare the sum of a value plus a carry against a constant. Uses
2016 ;; RSC, so the result is swapped. Only available on Arm
2017 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2018 [(set (reg:CC_SWP CC_REGNUM)
2019 (compare:CC_SWP
2020 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2021 (match_operand:DI 3 "arm_borrow_operation" ""))
2022 (match_operand 1 "arm_immediate_operand" "I")))
2023 (clobber (match_scratch:SI 0 "=r"))]
2024 "TARGET_ARM"
2025 "rscs\\t%0, %2, %1"
2026 [(set_attr "conds" "set")
2027 (set_attr "type" "alus_imm")]
2028 )
2029
2030 (define_insn "usubvsi3_borrow"
2031 [(set (reg:CC_B CC_REGNUM)
2032 (compare:CC_B
2033 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2034 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2035 (zero_extend:DI
2036 (match_operand:SI 2 "s_register_operand" "l,r")))))
2037 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2038 (minus:SI (match_dup 1)
2039 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2040 (match_dup 2))))]
2041 "TARGET_32BIT"
2042 "sbcs%?\\t%0, %1, %2"
2043 [(set_attr "conds" "set")
2044 (set_attr "arch" "t2,*")
2045 (set_attr "length" "2,4")]
2046 )
2047
2048 (define_insn "usubvsi3_borrow_imm"
2049 [(set (reg:CC_B CC_REGNUM)
2050 (compare:CC_B
2051 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2052 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2053 (match_operand:DI 3 "const_int_operand" "n,n"))))
2054 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2055 (minus:SI (match_dup 1)
2056 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2057 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2058 "TARGET_32BIT
2059 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2060 "@
2061 sbcs%?\\t%0, %1, %2
2062 adcs%?\\t%0, %1, #%B2"
2063 [(set_attr "conds" "set")
2064 (set_attr "type" "alus_imm")]
2065 )
2066
2067 (define_insn "subvsi3_borrow"
2068 [(set (reg:CC_V CC_REGNUM)
2069 (compare:CC_V
2070 (minus:DI
2071 (minus:DI
2072 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2073 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2074 (match_operand:DI 4 "arm_borrow_operation" ""))
2075 (sign_extend:DI
2076 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2077 (match_operand:SI 3 "arm_borrow_operation" "")))))
2078 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2079 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2080 (match_dup 3)))]
2081 "TARGET_32BIT"
2082 "sbcs%?\\t%0, %1, %2"
2083 [(set_attr "conds" "set")
2084 (set_attr "arch" "t2,*")
2085 (set_attr "length" "2,4")]
2086 )
2087
2088 (define_insn "subvsi3_borrow_imm"
2089 [(set (reg:CC_V CC_REGNUM)
2090 (compare:CC_V
2091 (minus:DI
2092 (minus:DI
2093 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2094 (match_operand 2 "arm_adcimm_operand" "I,K"))
2095 (match_operand:DI 4 "arm_borrow_operation" ""))
2096 (sign_extend:DI
2097 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2098 (match_operand:SI 3 "arm_borrow_operation" "")))))
2099 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2100 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2101 (match_dup 3)))]
2102 "TARGET_32BIT
2103 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2104 "@
2105 sbcs%?\\t%0, %1, %2
2106 adcs%?\\t%0, %1, #%B2"
2107 [(set_attr "conds" "set")
2108 (set_attr "type" "alus_imm")]
2109 )
2110
2111 (define_expand "subsf3"
2112 [(set (match_operand:SF 0 "s_register_operand")
2113 (minus:SF (match_operand:SF 1 "s_register_operand")
2114 (match_operand:SF 2 "s_register_operand")))]
2115 "TARGET_32BIT && TARGET_HARD_FLOAT"
2116 "
2117 ")
2118
2119 (define_expand "subdf3"
2120 [(set (match_operand:DF 0 "s_register_operand")
2121 (minus:DF (match_operand:DF 1 "s_register_operand")
2122 (match_operand:DF 2 "s_register_operand")))]
2123 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2124 "
2125 ")
2126
2127 \f
2128 ;; Multiplication insns
2129
2130 (define_expand "mulhi3"
2131 [(set (match_operand:HI 0 "s_register_operand")
2132 (mult:HI (match_operand:HI 1 "s_register_operand")
2133 (match_operand:HI 2 "s_register_operand")))]
2134 "TARGET_DSP_MULTIPLY"
2135 "
2136 {
2137 rtx result = gen_reg_rtx (SImode);
2138 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2139 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2140 DONE;
2141 }"
2142 )
2143
2144 (define_expand "mulsi3"
2145 [(set (match_operand:SI 0 "s_register_operand")
2146 (mult:SI (match_operand:SI 2 "s_register_operand")
2147 (match_operand:SI 1 "s_register_operand")))]
2148 "TARGET_EITHER"
2149 ""
2150 )
2151
2152 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2153 (define_insn "*mul"
2154 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2155 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2156 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2157 "TARGET_32BIT"
2158 "mul%?\\t%0, %2, %1"
2159 [(set_attr "type" "mul")
2160 (set_attr "predicable" "yes")
2161 (set_attr "arch" "t2,v6,nov6,nov6")
2162 (set_attr "length" "4")
2163 (set_attr "predicable_short_it" "yes,no,*,*")]
2164 )
2165
2166 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2167 ;; reusing the same register.
2168
2169 (define_insn "*mla"
2170 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2171 (plus:SI
2172 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2173 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2174 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2175 "TARGET_32BIT"
2176 "mla%?\\t%0, %3, %2, %1"
2177 [(set_attr "type" "mla")
2178 (set_attr "predicable" "yes")
2179 (set_attr "arch" "v6,nov6,nov6,nov6")]
2180 )
2181
2182 (define_insn "*mls"
2183 [(set (match_operand:SI 0 "s_register_operand" "=r")
2184 (minus:SI
2185 (match_operand:SI 1 "s_register_operand" "r")
2186 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2187 (match_operand:SI 2 "s_register_operand" "r"))))]
2188 "TARGET_32BIT && arm_arch_thumb2"
2189 "mls%?\\t%0, %3, %2, %1"
2190 [(set_attr "type" "mla")
2191 (set_attr "predicable" "yes")]
2192 )
2193
2194 (define_insn "*mulsi3_compare0"
2195 [(set (reg:CC_NOOV CC_REGNUM)
2196 (compare:CC_NOOV (mult:SI
2197 (match_operand:SI 2 "s_register_operand" "r,r")
2198 (match_operand:SI 1 "s_register_operand" "%0,r"))
2199 (const_int 0)))
2200 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2201 (mult:SI (match_dup 2) (match_dup 1)))]
2202 "TARGET_ARM && !arm_arch6"
2203 "muls%?\\t%0, %2, %1"
2204 [(set_attr "conds" "set")
2205 (set_attr "type" "muls")]
2206 )
2207
2208 (define_insn "*mulsi3_compare0_v6"
2209 [(set (reg:CC_NOOV CC_REGNUM)
2210 (compare:CC_NOOV (mult:SI
2211 (match_operand:SI 2 "s_register_operand" "r")
2212 (match_operand:SI 1 "s_register_operand" "r"))
2213 (const_int 0)))
2214 (set (match_operand:SI 0 "s_register_operand" "=r")
2215 (mult:SI (match_dup 2) (match_dup 1)))]
2216 "TARGET_ARM && arm_arch6 && optimize_size"
2217 "muls%?\\t%0, %2, %1"
2218 [(set_attr "conds" "set")
2219 (set_attr "type" "muls")]
2220 )
2221
2222 (define_insn "*mulsi_compare0_scratch"
2223 [(set (reg:CC_NOOV CC_REGNUM)
2224 (compare:CC_NOOV (mult:SI
2225 (match_operand:SI 2 "s_register_operand" "r,r")
2226 (match_operand:SI 1 "s_register_operand" "%0,r"))
2227 (const_int 0)))
2228 (clobber (match_scratch:SI 0 "=&r,&r"))]
2229 "TARGET_ARM && !arm_arch6"
2230 "muls%?\\t%0, %2, %1"
2231 [(set_attr "conds" "set")
2232 (set_attr "type" "muls")]
2233 )
2234
2235 (define_insn "*mulsi_compare0_scratch_v6"
2236 [(set (reg:CC_NOOV CC_REGNUM)
2237 (compare:CC_NOOV (mult:SI
2238 (match_operand:SI 2 "s_register_operand" "r")
2239 (match_operand:SI 1 "s_register_operand" "r"))
2240 (const_int 0)))
2241 (clobber (match_scratch:SI 0 "=r"))]
2242 "TARGET_ARM && arm_arch6 && optimize_size"
2243 "muls%?\\t%0, %2, %1"
2244 [(set_attr "conds" "set")
2245 (set_attr "type" "muls")]
2246 )
2247
2248 (define_insn "*mulsi3addsi_compare0"
2249 [(set (reg:CC_NOOV CC_REGNUM)
2250 (compare:CC_NOOV
2251 (plus:SI (mult:SI
2252 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2253 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2254 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2255 (const_int 0)))
2256 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2257 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2258 (match_dup 3)))]
2259 "TARGET_ARM && arm_arch6"
2260 "mlas%?\\t%0, %2, %1, %3"
2261 [(set_attr "conds" "set")
2262 (set_attr "type" "mlas")]
2263 )
2264
2265 (define_insn "*mulsi3addsi_compare0_v6"
2266 [(set (reg:CC_NOOV CC_REGNUM)
2267 (compare:CC_NOOV
2268 (plus:SI (mult:SI
2269 (match_operand:SI 2 "s_register_operand" "r")
2270 (match_operand:SI 1 "s_register_operand" "r"))
2271 (match_operand:SI 3 "s_register_operand" "r"))
2272 (const_int 0)))
2273 (set (match_operand:SI 0 "s_register_operand" "=r")
2274 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2275 (match_dup 3)))]
2276 "TARGET_ARM && arm_arch6 && optimize_size"
2277 "mlas%?\\t%0, %2, %1, %3"
2278 [(set_attr "conds" "set")
2279 (set_attr "type" "mlas")]
2280 )
2281
2282 (define_insn "*mulsi3addsi_compare0_scratch"
2283 [(set (reg:CC_NOOV CC_REGNUM)
2284 (compare:CC_NOOV
2285 (plus:SI (mult:SI
2286 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2287 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2288 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2289 (const_int 0)))
2290 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2291 "TARGET_ARM && !arm_arch6"
2292 "mlas%?\\t%0, %2, %1, %3"
2293 [(set_attr "conds" "set")
2294 (set_attr "type" "mlas")]
2295 )
2296
2297 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2298 [(set (reg:CC_NOOV CC_REGNUM)
2299 (compare:CC_NOOV
2300 (plus:SI (mult:SI
2301 (match_operand:SI 2 "s_register_operand" "r")
2302 (match_operand:SI 1 "s_register_operand" "r"))
2303 (match_operand:SI 3 "s_register_operand" "r"))
2304 (const_int 0)))
2305 (clobber (match_scratch:SI 0 "=r"))]
2306 "TARGET_ARM && arm_arch6 && optimize_size"
2307 "mlas%?\\t%0, %2, %1, %3"
2308 [(set_attr "conds" "set")
2309 (set_attr "type" "mlas")]
2310 )
2311
2312 ;; 32x32->64 widening multiply.
2313 ;; The only difference between the v3-5 and v6+ versions is the requirement
2314 ;; that the output does not overlap with either input.
2315
2316 (define_expand "<Us>mulsidi3"
2317 [(set (match_operand:DI 0 "s_register_operand")
2318 (mult:DI
2319 (SE:DI (match_operand:SI 1 "s_register_operand"))
2320 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2321 "TARGET_32BIT"
2322 {
2323 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2324 gen_highpart (SImode, operands[0]),
2325 operands[1], operands[2]));
2326 DONE;
2327 }
2328 )
2329
2330 (define_insn "<US>mull"
2331 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2332 (mult:SI
2333 (match_operand:SI 2 "s_register_operand" "%r,r")
2334 (match_operand:SI 3 "s_register_operand" "r,r")))
2335 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2336 (truncate:SI
2337 (lshiftrt:DI
2338 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2339 (const_int 32))))]
2340 "TARGET_32BIT"
2341 "<US>mull%?\\t%0, %1, %2, %3"
2342 [(set_attr "type" "umull")
2343 (set_attr "predicable" "yes")
2344 (set_attr "arch" "v6,nov6")]
2345 )
2346
2347 (define_expand "<Us>maddsidi4"
2348 [(set (match_operand:DI 0 "s_register_operand")
2349 (plus:DI
2350 (mult:DI
2351 (SE:DI (match_operand:SI 1 "s_register_operand"))
2352 (SE:DI (match_operand:SI 2 "s_register_operand")))
2353 (match_operand:DI 3 "s_register_operand")))]
2354 "TARGET_32BIT"
2355 {
2356 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2357 gen_lowpart (SImode, operands[3]),
2358 gen_highpart (SImode, operands[0]),
2359 gen_highpart (SImode, operands[3]),
2360 operands[1], operands[2]));
2361 DONE;
2362 }
2363 )
2364
2365 (define_insn "<US>mlal"
2366 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2367 (plus:SI
2368 (mult:SI
2369 (match_operand:SI 4 "s_register_operand" "%r,r")
2370 (match_operand:SI 5 "s_register_operand" "r,r"))
2371 (match_operand:SI 1 "s_register_operand" "0,0")))
2372 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2373 (plus:SI
2374 (truncate:SI
2375 (lshiftrt:DI
2376 (plus:DI
2377 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2378 (zero_extend:DI (match_dup 1)))
2379 (const_int 32)))
2380 (match_operand:SI 3 "s_register_operand" "2,2")))]
2381 "TARGET_32BIT"
2382 "<US>mlal%?\\t%0, %2, %4, %5"
2383 [(set_attr "type" "umlal")
2384 (set_attr "predicable" "yes")
2385 (set_attr "arch" "v6,nov6")]
2386 )
2387
2388 (define_expand "<US>mulsi3_highpart"
2389 [(parallel
2390 [(set (match_operand:SI 0 "s_register_operand")
2391 (truncate:SI
2392 (lshiftrt:DI
2393 (mult:DI
2394 (SE:DI (match_operand:SI 1 "s_register_operand"))
2395 (SE:DI (match_operand:SI 2 "s_register_operand")))
2396 (const_int 32))))
2397 (clobber (match_scratch:SI 3 ""))])]
2398 "TARGET_32BIT"
2399 ""
2400 )
2401
2402 (define_insn "*<US>mull_high"
2403 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2404 (truncate:SI
2405 (lshiftrt:DI
2406 (mult:DI
2407 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2408 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2409 (const_int 32))))
2410 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2411 "TARGET_32BIT"
2412 "<US>mull%?\\t%3, %0, %2, %1"
2413 [(set_attr "type" "umull")
2414 (set_attr "predicable" "yes")
2415 (set_attr "arch" "v6,nov6,nov6")]
2416 )
2417
2418 (define_insn "mulhisi3"
2419 [(set (match_operand:SI 0 "s_register_operand" "=r")
2420 (mult:SI (sign_extend:SI
2421 (match_operand:HI 1 "s_register_operand" "%r"))
2422 (sign_extend:SI
2423 (match_operand:HI 2 "s_register_operand" "r"))))]
2424 "TARGET_DSP_MULTIPLY"
2425 "smulbb%?\\t%0, %1, %2"
2426 [(set_attr "type" "smulxy")
2427 (set_attr "predicable" "yes")]
2428 )
2429
2430 (define_insn "*mulhisi3tb"
2431 [(set (match_operand:SI 0 "s_register_operand" "=r")
2432 (mult:SI (ashiftrt:SI
2433 (match_operand:SI 1 "s_register_operand" "r")
2434 (const_int 16))
2435 (sign_extend:SI
2436 (match_operand:HI 2 "s_register_operand" "r"))))]
2437 "TARGET_DSP_MULTIPLY"
2438 "smultb%?\\t%0, %1, %2"
2439 [(set_attr "type" "smulxy")
2440 (set_attr "predicable" "yes")]
2441 )
2442
2443 (define_insn "*mulhisi3bt"
2444 [(set (match_operand:SI 0 "s_register_operand" "=r")
2445 (mult:SI (sign_extend:SI
2446 (match_operand:HI 1 "s_register_operand" "r"))
2447 (ashiftrt:SI
2448 (match_operand:SI 2 "s_register_operand" "r")
2449 (const_int 16))))]
2450 "TARGET_DSP_MULTIPLY"
2451 "smulbt%?\\t%0, %1, %2"
2452 [(set_attr "type" "smulxy")
2453 (set_attr "predicable" "yes")]
2454 )
2455
2456 (define_insn "*mulhisi3tt"
2457 [(set (match_operand:SI 0 "s_register_operand" "=r")
2458 (mult:SI (ashiftrt:SI
2459 (match_operand:SI 1 "s_register_operand" "r")
2460 (const_int 16))
2461 (ashiftrt:SI
2462 (match_operand:SI 2 "s_register_operand" "r")
2463 (const_int 16))))]
2464 "TARGET_DSP_MULTIPLY"
2465 "smultt%?\\t%0, %1, %2"
2466 [(set_attr "type" "smulxy")
2467 (set_attr "predicable" "yes")]
2468 )
2469
2470 (define_insn "maddhisi4"
2471 [(set (match_operand:SI 0 "s_register_operand" "=r")
2472 (plus:SI (mult:SI (sign_extend:SI
2473 (match_operand:HI 1 "s_register_operand" "r"))
2474 (sign_extend:SI
2475 (match_operand:HI 2 "s_register_operand" "r")))
2476 (match_operand:SI 3 "s_register_operand" "r")))]
2477 "TARGET_DSP_MULTIPLY"
2478 "smlabb%?\\t%0, %1, %2, %3"
2479 [(set_attr "type" "smlaxy")
2480 (set_attr "predicable" "yes")]
2481 )
2482
2483 ;; Note: there is no maddhisi4ibt because this one is canonical form
2484 (define_insn "*maddhisi4tb"
2485 [(set (match_operand:SI 0 "s_register_operand" "=r")
2486 (plus:SI (mult:SI (ashiftrt:SI
2487 (match_operand:SI 1 "s_register_operand" "r")
2488 (const_int 16))
2489 (sign_extend:SI
2490 (match_operand:HI 2 "s_register_operand" "r")))
2491 (match_operand:SI 3 "s_register_operand" "r")))]
2492 "TARGET_DSP_MULTIPLY"
2493 "smlatb%?\\t%0, %1, %2, %3"
2494 [(set_attr "type" "smlaxy")
2495 (set_attr "predicable" "yes")]
2496 )
2497
2498 (define_insn "*maddhisi4tt"
2499 [(set (match_operand:SI 0 "s_register_operand" "=r")
2500 (plus:SI (mult:SI (ashiftrt:SI
2501 (match_operand:SI 1 "s_register_operand" "r")
2502 (const_int 16))
2503 (ashiftrt:SI
2504 (match_operand:SI 2 "s_register_operand" "r")
2505 (const_int 16)))
2506 (match_operand:SI 3 "s_register_operand" "r")))]
2507 "TARGET_DSP_MULTIPLY"
2508 "smlatt%?\\t%0, %1, %2, %3"
2509 [(set_attr "type" "smlaxy")
2510 (set_attr "predicable" "yes")]
2511 )
2512
2513 (define_insn "maddhidi4"
2514 [(set (match_operand:DI 0 "s_register_operand" "=r")
2515 (plus:DI
2516 (mult:DI (sign_extend:DI
2517 (match_operand:HI 1 "s_register_operand" "r"))
2518 (sign_extend:DI
2519 (match_operand:HI 2 "s_register_operand" "r")))
2520 (match_operand:DI 3 "s_register_operand" "0")))]
2521 "TARGET_DSP_MULTIPLY"
2522 "smlalbb%?\\t%Q0, %R0, %1, %2"
2523 [(set_attr "type" "smlalxy")
2524 (set_attr "predicable" "yes")])
2525
2526 ;; Note: there is no maddhidi4ibt because this one is canonical form
2527 (define_insn "*maddhidi4tb"
2528 [(set (match_operand:DI 0 "s_register_operand" "=r")
2529 (plus:DI
2530 (mult:DI (sign_extend:DI
2531 (ashiftrt:SI
2532 (match_operand:SI 1 "s_register_operand" "r")
2533 (const_int 16)))
2534 (sign_extend:DI
2535 (match_operand:HI 2 "s_register_operand" "r")))
2536 (match_operand:DI 3 "s_register_operand" "0")))]
2537 "TARGET_DSP_MULTIPLY"
2538 "smlaltb%?\\t%Q0, %R0, %1, %2"
2539 [(set_attr "type" "smlalxy")
2540 (set_attr "predicable" "yes")])
2541
2542 (define_insn "*maddhidi4tt"
2543 [(set (match_operand:DI 0 "s_register_operand" "=r")
2544 (plus:DI
2545 (mult:DI (sign_extend:DI
2546 (ashiftrt:SI
2547 (match_operand:SI 1 "s_register_operand" "r")
2548 (const_int 16)))
2549 (sign_extend:DI
2550 (ashiftrt:SI
2551 (match_operand:SI 2 "s_register_operand" "r")
2552 (const_int 16))))
2553 (match_operand:DI 3 "s_register_operand" "0")))]
2554 "TARGET_DSP_MULTIPLY"
2555 "smlaltt%?\\t%Q0, %R0, %1, %2"
2556 [(set_attr "type" "smlalxy")
2557 (set_attr "predicable" "yes")])
2558
2559 (define_expand "mulsf3"
2560 [(set (match_operand:SF 0 "s_register_operand")
2561 (mult:SF (match_operand:SF 1 "s_register_operand")
2562 (match_operand:SF 2 "s_register_operand")))]
2563 "TARGET_32BIT && TARGET_HARD_FLOAT"
2564 "
2565 ")
2566
2567 (define_expand "muldf3"
2568 [(set (match_operand:DF 0 "s_register_operand")
2569 (mult:DF (match_operand:DF 1 "s_register_operand")
2570 (match_operand:DF 2 "s_register_operand")))]
2571 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2572 "
2573 ")
2574 \f
2575 ;; Division insns
2576
2577 (define_expand "divsf3"
2578 [(set (match_operand:SF 0 "s_register_operand")
2579 (div:SF (match_operand:SF 1 "s_register_operand")
2580 (match_operand:SF 2 "s_register_operand")))]
2581 "TARGET_32BIT && TARGET_HARD_FLOAT"
2582 "")
2583
2584 (define_expand "divdf3"
2585 [(set (match_operand:DF 0 "s_register_operand")
2586 (div:DF (match_operand:DF 1 "s_register_operand")
2587 (match_operand:DF 2 "s_register_operand")))]
2588 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2589 "")
2590 \f
2591
2592 ; Expand logical operations. The mid-end expander does not split off memory
2593 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2594 ; So an explicit expander is needed to generate better code.
2595
2596 (define_expand "<LOGICAL:optab>di3"
2597 [(set (match_operand:DI 0 "s_register_operand")
2598 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2599 (match_operand:DI 2 "arm_<optab>di_operand")))]
2600 "TARGET_32BIT"
2601 {
2602 rtx low = simplify_gen_binary (<CODE>, SImode,
2603 gen_lowpart (SImode, operands[1]),
2604 gen_lowpart (SImode, operands[2]));
2605 rtx high = simplify_gen_binary (<CODE>, SImode,
2606 gen_highpart (SImode, operands[1]),
2607 gen_highpart_mode (SImode, DImode,
2608 operands[2]));
2609
2610 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2611 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2612 DONE;
2613 }
2614 )
2615
2616 (define_expand "one_cmpldi2"
2617 [(set (match_operand:DI 0 "s_register_operand")
2618 (not:DI (match_operand:DI 1 "s_register_operand")))]
2619 "TARGET_32BIT"
2620 {
2621 rtx low = simplify_gen_unary (NOT, SImode,
2622 gen_lowpart (SImode, operands[1]),
2623 SImode);
2624 rtx high = simplify_gen_unary (NOT, SImode,
2625 gen_highpart_mode (SImode, DImode,
2626 operands[1]),
2627 SImode);
2628
2629 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2630 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2631 DONE;
2632 }
2633 )
2634
2635 ;; Split DImode and, ior, xor operations. Simply perform the logical
2636 ;; operation on the upper and lower halves of the registers.
2637 ;; This is needed for atomic operations in arm_split_atomic_op.
2638 ;; Avoid splitting IWMMXT instructions.
2639 (define_split
2640 [(set (match_operand:DI 0 "s_register_operand" "")
2641 (match_operator:DI 6 "logical_binary_operator"
2642 [(match_operand:DI 1 "s_register_operand" "")
2643 (match_operand:DI 2 "s_register_operand" "")]))]
2644 "TARGET_32BIT && reload_completed
2645 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2646 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2647 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2648 "
2649 {
2650 operands[3] = gen_highpart (SImode, operands[0]);
2651 operands[0] = gen_lowpart (SImode, operands[0]);
2652 operands[4] = gen_highpart (SImode, operands[1]);
2653 operands[1] = gen_lowpart (SImode, operands[1]);
2654 operands[5] = gen_highpart (SImode, operands[2]);
2655 operands[2] = gen_lowpart (SImode, operands[2]);
2656 }"
2657 )
2658
2659 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2660 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2661 (define_split
2662 [(set (match_operand:DI 0 "s_register_operand")
2663 (not:DI (match_operand:DI 1 "s_register_operand")))]
2664 "TARGET_32BIT"
2665 [(set (match_dup 0) (not:SI (match_dup 1)))
2666 (set (match_dup 2) (not:SI (match_dup 3)))]
2667 "
2668 {
2669 operands[2] = gen_highpart (SImode, operands[0]);
2670 operands[0] = gen_lowpart (SImode, operands[0]);
2671 operands[3] = gen_highpart (SImode, operands[1]);
2672 operands[1] = gen_lowpart (SImode, operands[1]);
2673 }"
2674 )
2675
2676 (define_expand "andsi3"
2677 [(set (match_operand:SI 0 "s_register_operand")
2678 (and:SI (match_operand:SI 1 "s_register_operand")
2679 (match_operand:SI 2 "reg_or_int_operand")))]
2680 "TARGET_EITHER"
2681 "
2682 if (TARGET_32BIT)
2683 {
2684 if (CONST_INT_P (operands[2]))
2685 {
2686 if (INTVAL (operands[2]) == 255 && arm_arch6)
2687 {
2688 operands[1] = convert_to_mode (QImode, operands[1], 1);
2689 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2690 operands[1]));
2691 DONE;
2692 }
2693 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2694 operands[2] = force_reg (SImode, operands[2]);
2695 else
2696 {
2697 arm_split_constant (AND, SImode, NULL_RTX,
2698 INTVAL (operands[2]), operands[0],
2699 operands[1],
2700 optimize && can_create_pseudo_p ());
2701
2702 DONE;
2703 }
2704 }
2705 }
2706 else /* TARGET_THUMB1 */
2707 {
2708 if (!CONST_INT_P (operands[2]))
2709 {
2710 rtx tmp = force_reg (SImode, operands[2]);
2711 if (rtx_equal_p (operands[0], operands[1]))
2712 operands[2] = tmp;
2713 else
2714 {
2715 operands[2] = operands[1];
2716 operands[1] = tmp;
2717 }
2718 }
2719 else
2720 {
2721 int i;
2722
2723 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2724 {
2725 operands[2] = force_reg (SImode,
2726 GEN_INT (~INTVAL (operands[2])));
2727
2728 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2729
2730 DONE;
2731 }
2732
2733 for (i = 9; i <= 31; i++)
2734 {
2735 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2736 {
2737 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2738 const0_rtx));
2739 DONE;
2740 }
2741 else if ((HOST_WIDE_INT_1 << i) - 1
2742 == ~INTVAL (operands[2]))
2743 {
2744 rtx shift = GEN_INT (i);
2745 rtx reg = gen_reg_rtx (SImode);
2746
2747 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2748 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2749
2750 DONE;
2751 }
2752 }
2753
2754 operands[2] = force_reg (SImode, operands[2]);
2755 }
2756 }
2757 "
2758 )
2759
2760 ; ??? Check split length for Thumb-2
2761 (define_insn_and_split "*arm_andsi3_insn"
2762 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2763 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2764 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2765 "TARGET_32BIT"
2766 "@
2767 and%?\\t%0, %1, %2
2768 and%?\\t%0, %1, %2
2769 bic%?\\t%0, %1, #%B2
2770 and%?\\t%0, %1, %2
2771 #"
2772 "TARGET_32BIT
2773 && CONST_INT_P (operands[2])
2774 && !(const_ok_for_arm (INTVAL (operands[2]))
2775 || const_ok_for_arm (~INTVAL (operands[2])))"
2776 [(clobber (const_int 0))]
2777 "
2778 arm_split_constant (AND, SImode, curr_insn,
2779 INTVAL (operands[2]), operands[0], operands[1], 0);
2780 DONE;
2781 "
2782 [(set_attr "length" "4,4,4,4,16")
2783 (set_attr "predicable" "yes")
2784 (set_attr "predicable_short_it" "no,yes,no,no,no")
2785 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2786 )
2787
2788 (define_insn "*andsi3_compare0"
2789 [(set (reg:CC_NOOV CC_REGNUM)
2790 (compare:CC_NOOV
2791 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2792 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2793 (const_int 0)))
2794 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2795 (and:SI (match_dup 1) (match_dup 2)))]
2796 "TARGET_32BIT"
2797 "@
2798 ands%?\\t%0, %1, %2
2799 bics%?\\t%0, %1, #%B2
2800 ands%?\\t%0, %1, %2"
2801 [(set_attr "conds" "set")
2802 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2803 )
2804
2805 (define_insn "*andsi3_compare0_scratch"
2806 [(set (reg:CC_NOOV CC_REGNUM)
2807 (compare:CC_NOOV
2808 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2809 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2810 (const_int 0)))
2811 (clobber (match_scratch:SI 2 "=X,r,X"))]
2812 "TARGET_32BIT"
2813 "@
2814 tst%?\\t%0, %1
2815 bics%?\\t%2, %0, #%B1
2816 tst%?\\t%0, %1"
2817 [(set_attr "conds" "set")
2818 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2819 )
2820
2821 (define_insn "*zeroextractsi_compare0_scratch"
2822 [(set (reg:CC_NOOV CC_REGNUM)
2823 (compare:CC_NOOV (zero_extract:SI
2824 (match_operand:SI 0 "s_register_operand" "r")
2825 (match_operand 1 "const_int_operand" "n")
2826 (match_operand 2 "const_int_operand" "n"))
2827 (const_int 0)))]
2828 "TARGET_32BIT
2829 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2830 && INTVAL (operands[1]) > 0
2831 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2832 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2833 "*
2834 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2835 << INTVAL (operands[2]));
2836 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2837 return \"\";
2838 "
2839 [(set_attr "conds" "set")
2840 (set_attr "predicable" "yes")
2841 (set_attr "type" "logics_imm")]
2842 )
2843
2844 (define_insn_and_split "*ne_zeroextractsi"
2845 [(set (match_operand:SI 0 "s_register_operand" "=r")
2846 (ne:SI (zero_extract:SI
2847 (match_operand:SI 1 "s_register_operand" "r")
2848 (match_operand:SI 2 "const_int_operand" "n")
2849 (match_operand:SI 3 "const_int_operand" "n"))
2850 (const_int 0)))
2851 (clobber (reg:CC CC_REGNUM))]
2852 "TARGET_32BIT
2853 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2854 && INTVAL (operands[2]) > 0
2855 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2856 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2857 "#"
2858 "TARGET_32BIT
2859 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2860 && INTVAL (operands[2]) > 0
2861 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2862 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2863 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2864 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2865 (const_int 0)))
2866 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2867 (set (match_dup 0)
2868 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2869 (match_dup 0) (const_int 1)))]
2870 "
2871 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2872 << INTVAL (operands[3]));
2873 "
2874 [(set_attr "conds" "clob")
2875 (set (attr "length")
2876 (if_then_else (eq_attr "is_thumb" "yes")
2877 (const_int 12)
2878 (const_int 8)))
2879 (set_attr "type" "multiple")]
2880 )
2881
2882 (define_insn_and_split "*ne_zeroextractsi_shifted"
2883 [(set (match_operand:SI 0 "s_register_operand" "=r")
2884 (ne:SI (zero_extract:SI
2885 (match_operand:SI 1 "s_register_operand" "r")
2886 (match_operand:SI 2 "const_int_operand" "n")
2887 (const_int 0))
2888 (const_int 0)))
2889 (clobber (reg:CC CC_REGNUM))]
2890 "TARGET_ARM"
2891 "#"
2892 "TARGET_ARM"
2893 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2894 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2895 (const_int 0)))
2896 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2897 (set (match_dup 0)
2898 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2899 (match_dup 0) (const_int 1)))]
2900 "
2901 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2902 "
2903 [(set_attr "conds" "clob")
2904 (set_attr "length" "8")
2905 (set_attr "type" "multiple")]
2906 )
2907
2908 (define_insn_and_split "*ite_ne_zeroextractsi"
2909 [(set (match_operand:SI 0 "s_register_operand" "=r")
2910 (if_then_else:SI (ne (zero_extract:SI
2911 (match_operand:SI 1 "s_register_operand" "r")
2912 (match_operand:SI 2 "const_int_operand" "n")
2913 (match_operand:SI 3 "const_int_operand" "n"))
2914 (const_int 0))
2915 (match_operand:SI 4 "arm_not_operand" "rIK")
2916 (const_int 0)))
2917 (clobber (reg:CC CC_REGNUM))]
2918 "TARGET_ARM
2919 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2920 && INTVAL (operands[2]) > 0
2921 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2922 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2923 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2924 "#"
2925 "TARGET_ARM
2926 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2927 && INTVAL (operands[2]) > 0
2928 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2929 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2930 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2931 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2932 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2933 (const_int 0)))
2934 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2935 (set (match_dup 0)
2936 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2937 (match_dup 0) (match_dup 4)))]
2938 "
2939 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2940 << INTVAL (operands[3]));
2941 "
2942 [(set_attr "conds" "clob")
2943 (set_attr "length" "8")
2944 (set_attr "type" "multiple")]
2945 )
2946
2947 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2948 [(set (match_operand:SI 0 "s_register_operand" "=r")
2949 (if_then_else:SI (ne (zero_extract:SI
2950 (match_operand:SI 1 "s_register_operand" "r")
2951 (match_operand:SI 2 "const_int_operand" "n")
2952 (const_int 0))
2953 (const_int 0))
2954 (match_operand:SI 3 "arm_not_operand" "rIK")
2955 (const_int 0)))
2956 (clobber (reg:CC CC_REGNUM))]
2957 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2958 "#"
2959 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2960 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2961 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2962 (const_int 0)))
2963 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2964 (set (match_dup 0)
2965 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2966 (match_dup 0) (match_dup 3)))]
2967 "
2968 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2969 "
2970 [(set_attr "conds" "clob")
2971 (set_attr "length" "8")
2972 (set_attr "type" "multiple")]
2973 )
2974
2975 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2976 (define_split
2977 [(set (match_operand:SI 0 "s_register_operand" "")
2978 (match_operator:SI 1 "shiftable_operator"
2979 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2980 (match_operand:SI 3 "const_int_operand" "")
2981 (match_operand:SI 4 "const_int_operand" ""))
2982 (match_operand:SI 5 "s_register_operand" "")]))
2983 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2984 "TARGET_ARM"
2985 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2986 (set (match_dup 0)
2987 (match_op_dup 1
2988 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2989 (match_dup 5)]))]
2990 "{
2991 HOST_WIDE_INT temp = INTVAL (operands[3]);
2992
2993 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2994 operands[4] = GEN_INT (32 - temp);
2995 }"
2996 )
2997
2998 (define_split
2999 [(set (match_operand:SI 0 "s_register_operand" "")
3000 (match_operator:SI 1 "shiftable_operator"
3001 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3002 (match_operand:SI 3 "const_int_operand" "")
3003 (match_operand:SI 4 "const_int_operand" ""))
3004 (match_operand:SI 5 "s_register_operand" "")]))
3005 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3006 "TARGET_ARM"
3007 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3008 (set (match_dup 0)
3009 (match_op_dup 1
3010 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3011 (match_dup 5)]))]
3012 "{
3013 HOST_WIDE_INT temp = INTVAL (operands[3]);
3014
3015 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3016 operands[4] = GEN_INT (32 - temp);
3017 }"
3018 )
3019
3020 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3021 ;;; represented by the bitfield, then this will produce incorrect results.
3022 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3023 ;;; which have a real bit-field insert instruction, the truncation happens
3024 ;;; in the bit-field insert instruction itself. Since arm does not have a
3025 ;;; bit-field insert instruction, we would have to emit code here to truncate
3026 ;;; the value before we insert. This loses some of the advantage of having
3027 ;;; this insv pattern, so this pattern needs to be reevalutated.
3028
3029 (define_expand "insv"
3030 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3031 (match_operand 1 "general_operand")
3032 (match_operand 2 "general_operand"))
3033 (match_operand 3 "reg_or_int_operand"))]
3034 "TARGET_ARM || arm_arch_thumb2"
3035 "
3036 {
3037 int start_bit = INTVAL (operands[2]);
3038 int width = INTVAL (operands[1]);
3039 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3040 rtx target, subtarget;
3041
3042 if (arm_arch_thumb2)
3043 {
3044 if (unaligned_access && MEM_P (operands[0])
3045 && s_register_operand (operands[3], GET_MODE (operands[3]))
3046 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3047 {
3048 rtx base_addr;
3049
3050 if (BYTES_BIG_ENDIAN)
3051 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3052 - start_bit;
3053
3054 if (width == 32)
3055 {
3056 base_addr = adjust_address (operands[0], SImode,
3057 start_bit / BITS_PER_UNIT);
3058 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3059 }
3060 else
3061 {
3062 rtx tmp = gen_reg_rtx (HImode);
3063
3064 base_addr = adjust_address (operands[0], HImode,
3065 start_bit / BITS_PER_UNIT);
3066 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3067 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3068 }
3069 DONE;
3070 }
3071 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3072 {
3073 bool use_bfi = TRUE;
3074
3075 if (CONST_INT_P (operands[3]))
3076 {
3077 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3078
3079 if (val == 0)
3080 {
3081 emit_insn (gen_insv_zero (operands[0], operands[1],
3082 operands[2]));
3083 DONE;
3084 }
3085
3086 /* See if the set can be done with a single orr instruction. */
3087 if (val == mask && const_ok_for_arm (val << start_bit))
3088 use_bfi = FALSE;
3089 }
3090
3091 if (use_bfi)
3092 {
3093 if (!REG_P (operands[3]))
3094 operands[3] = force_reg (SImode, operands[3]);
3095
3096 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3097 operands[3]));
3098 DONE;
3099 }
3100 }
3101 else
3102 FAIL;
3103 }
3104
3105 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3106 FAIL;
3107
3108 target = copy_rtx (operands[0]);
3109 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3110 subreg as the final target. */
3111 if (GET_CODE (target) == SUBREG)
3112 {
3113 subtarget = gen_reg_rtx (SImode);
3114 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3115 < GET_MODE_SIZE (SImode))
3116 target = SUBREG_REG (target);
3117 }
3118 else
3119 subtarget = target;
3120
3121 if (CONST_INT_P (operands[3]))
3122 {
3123 /* Since we are inserting a known constant, we may be able to
3124 reduce the number of bits that we have to clear so that
3125 the mask becomes simple. */
3126 /* ??? This code does not check to see if the new mask is actually
3127 simpler. It may not be. */
3128 rtx op1 = gen_reg_rtx (SImode);
3129 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3130 start of this pattern. */
3131 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3132 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3133
3134 emit_insn (gen_andsi3 (op1, operands[0],
3135 gen_int_mode (~mask2, SImode)));
3136 emit_insn (gen_iorsi3 (subtarget, op1,
3137 gen_int_mode (op3_value << start_bit, SImode)));
3138 }
3139 else if (start_bit == 0
3140 && !(const_ok_for_arm (mask)
3141 || const_ok_for_arm (~mask)))
3142 {
3143 /* A Trick, since we are setting the bottom bits in the word,
3144 we can shift operand[3] up, operand[0] down, OR them together
3145 and rotate the result back again. This takes 3 insns, and
3146 the third might be mergeable into another op. */
3147 /* The shift up copes with the possibility that operand[3] is
3148 wider than the bitfield. */
3149 rtx op0 = gen_reg_rtx (SImode);
3150 rtx op1 = gen_reg_rtx (SImode);
3151
3152 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3153 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3154 emit_insn (gen_iorsi3 (op1, op1, op0));
3155 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3156 }
3157 else if ((width + start_bit == 32)
3158 && !(const_ok_for_arm (mask)
3159 || const_ok_for_arm (~mask)))
3160 {
3161 /* Similar trick, but slightly less efficient. */
3162
3163 rtx op0 = gen_reg_rtx (SImode);
3164 rtx op1 = gen_reg_rtx (SImode);
3165
3166 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3167 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3168 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3169 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3170 }
3171 else
3172 {
3173 rtx op0 = gen_int_mode (mask, SImode);
3174 rtx op1 = gen_reg_rtx (SImode);
3175 rtx op2 = gen_reg_rtx (SImode);
3176
3177 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3178 {
3179 rtx tmp = gen_reg_rtx (SImode);
3180
3181 emit_insn (gen_movsi (tmp, op0));
3182 op0 = tmp;
3183 }
3184
3185 /* Mask out any bits in operand[3] that are not needed. */
3186 emit_insn (gen_andsi3 (op1, operands[3], op0));
3187
3188 if (CONST_INT_P (op0)
3189 && (const_ok_for_arm (mask << start_bit)
3190 || const_ok_for_arm (~(mask << start_bit))))
3191 {
3192 op0 = gen_int_mode (~(mask << start_bit), SImode);
3193 emit_insn (gen_andsi3 (op2, operands[0], op0));
3194 }
3195 else
3196 {
3197 if (CONST_INT_P (op0))
3198 {
3199 rtx tmp = gen_reg_rtx (SImode);
3200
3201 emit_insn (gen_movsi (tmp, op0));
3202 op0 = tmp;
3203 }
3204
3205 if (start_bit != 0)
3206 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3207
3208 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3209 }
3210
3211 if (start_bit != 0)
3212 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3213
3214 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3215 }
3216
3217 if (subtarget != target)
3218 {
3219 /* If TARGET is still a SUBREG, then it must be wider than a word,
3220 so we must be careful only to set the subword we were asked to. */
3221 if (GET_CODE (target) == SUBREG)
3222 emit_move_insn (target, subtarget);
3223 else
3224 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3225 }
3226
3227 DONE;
3228 }"
3229 )
3230
3231 (define_insn "insv_zero"
3232 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3233 (match_operand:SI 1 "const_int_M_operand" "M")
3234 (match_operand:SI 2 "const_int_M_operand" "M"))
3235 (const_int 0))]
3236 "arm_arch_thumb2"
3237 "bfc%?\t%0, %2, %1"
3238 [(set_attr "length" "4")
3239 (set_attr "predicable" "yes")
3240 (set_attr "type" "bfm")]
3241 )
3242
3243 (define_insn "insv_t2"
3244 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3245 (match_operand:SI 1 "const_int_M_operand" "M")
3246 (match_operand:SI 2 "const_int_M_operand" "M"))
3247 (match_operand:SI 3 "s_register_operand" "r"))]
3248 "arm_arch_thumb2"
3249 "bfi%?\t%0, %3, %2, %1"
3250 [(set_attr "length" "4")
3251 (set_attr "predicable" "yes")
3252 (set_attr "type" "bfm")]
3253 )
3254
3255 (define_insn "andsi_notsi_si"
3256 [(set (match_operand:SI 0 "s_register_operand" "=r")
3257 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3258 (match_operand:SI 1 "s_register_operand" "r")))]
3259 "TARGET_32BIT"
3260 "bic%?\\t%0, %1, %2"
3261 [(set_attr "predicable" "yes")
3262 (set_attr "type" "logic_reg")]
3263 )
3264
3265 (define_insn "andsi_not_shiftsi_si"
3266 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3267 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3268 [(match_operand:SI 2 "s_register_operand" "r,r")
3269 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3270 (match_operand:SI 1 "s_register_operand" "r,r")))]
3271 "TARGET_32BIT"
3272 "bic%?\\t%0, %1, %2%S4"
3273 [(set_attr "predicable" "yes")
3274 (set_attr "shift" "2")
3275 (set_attr "arch" "32,a")
3276 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3277 )
3278
3279 ;; Shifted bics pattern used to set up CC status register and not reusing
3280 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3281 ;; does not support shift by register.
3282 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3283 [(set (reg:CC_NOOV CC_REGNUM)
3284 (compare:CC_NOOV
3285 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3286 [(match_operand:SI 1 "s_register_operand" "r,r")
3287 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3288 (match_operand:SI 3 "s_register_operand" "r,r"))
3289 (const_int 0)))
3290 (clobber (match_scratch:SI 4 "=r,r"))]
3291 "TARGET_32BIT"
3292 "bics%?\\t%4, %3, %1%S0"
3293 [(set_attr "predicable" "yes")
3294 (set_attr "arch" "32,a")
3295 (set_attr "conds" "set")
3296 (set_attr "shift" "1")
3297 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3298 )
3299
3300 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3301 ;; getting reused later.
3302 (define_insn "andsi_not_shiftsi_si_scc"
3303 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3304 (compare:CC_NOOV
3305 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3306 [(match_operand:SI 1 "s_register_operand" "r,r")
3307 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3308 (match_operand:SI 3 "s_register_operand" "r,r"))
3309 (const_int 0)))
3310 (set (match_operand:SI 4 "s_register_operand" "=r,r")
3311 (and:SI (not:SI (match_op_dup 0
3312 [(match_dup 1)
3313 (match_dup 2)]))
3314 (match_dup 3)))])]
3315 "TARGET_32BIT"
3316 "bics%?\\t%4, %3, %1%S0"
3317 [(set_attr "predicable" "yes")
3318 (set_attr "arch" "32,a")
3319 (set_attr "conds" "set")
3320 (set_attr "shift" "1")
3321 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3322 )
3323
3324 (define_insn "*andsi_notsi_si_compare0"
3325 [(set (reg:CC_NOOV CC_REGNUM)
3326 (compare:CC_NOOV
3327 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3328 (match_operand:SI 1 "s_register_operand" "r"))
3329 (const_int 0)))
3330 (set (match_operand:SI 0 "s_register_operand" "=r")
3331 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3332 "TARGET_32BIT"
3333 "bics\\t%0, %1, %2"
3334 [(set_attr "conds" "set")
3335 (set_attr "type" "logics_shift_reg")]
3336 )
3337
3338 (define_insn "*andsi_notsi_si_compare0_scratch"
3339 [(set (reg:CC_NOOV CC_REGNUM)
3340 (compare:CC_NOOV
3341 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3342 (match_operand:SI 1 "s_register_operand" "r"))
3343 (const_int 0)))
3344 (clobber (match_scratch:SI 0 "=r"))]
3345 "TARGET_32BIT"
3346 "bics\\t%0, %1, %2"
3347 [(set_attr "conds" "set")
3348 (set_attr "type" "logics_shift_reg")]
3349 )
3350
3351 (define_expand "iorsi3"
3352 [(set (match_operand:SI 0 "s_register_operand")
3353 (ior:SI (match_operand:SI 1 "s_register_operand")
3354 (match_operand:SI 2 "reg_or_int_operand")))]
3355 "TARGET_EITHER"
3356 "
3357 if (CONST_INT_P (operands[2]))
3358 {
3359 if (TARGET_32BIT)
3360 {
3361 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3362 operands[2] = force_reg (SImode, operands[2]);
3363 else
3364 {
3365 arm_split_constant (IOR, SImode, NULL_RTX,
3366 INTVAL (operands[2]), operands[0],
3367 operands[1],
3368 optimize && can_create_pseudo_p ());
3369 DONE;
3370 }
3371 }
3372 else /* TARGET_THUMB1 */
3373 {
3374 rtx tmp = force_reg (SImode, operands[2]);
3375 if (rtx_equal_p (operands[0], operands[1]))
3376 operands[2] = tmp;
3377 else
3378 {
3379 operands[2] = operands[1];
3380 operands[1] = tmp;
3381 }
3382 }
3383 }
3384 "
3385 )
3386
3387 (define_insn_and_split "*iorsi3_insn"
3388 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3389 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3390 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3391 "TARGET_32BIT"
3392 "@
3393 orr%?\\t%0, %1, %2
3394 orr%?\\t%0, %1, %2
3395 orn%?\\t%0, %1, #%B2
3396 orr%?\\t%0, %1, %2
3397 #"
3398 "TARGET_32BIT
3399 && CONST_INT_P (operands[2])
3400 && !(const_ok_for_arm (INTVAL (operands[2]))
3401 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3402 [(clobber (const_int 0))]
3403 {
3404 arm_split_constant (IOR, SImode, curr_insn,
3405 INTVAL (operands[2]), operands[0], operands[1], 0);
3406 DONE;
3407 }
3408 [(set_attr "length" "4,4,4,4,16")
3409 (set_attr "arch" "32,t2,t2,32,32")
3410 (set_attr "predicable" "yes")
3411 (set_attr "predicable_short_it" "no,yes,no,no,no")
3412 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3413 )
3414
3415 (define_peephole2
3416 [(match_scratch:SI 3 "r")
3417 (set (match_operand:SI 0 "arm_general_register_operand" "")
3418 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3419 (match_operand:SI 2 "const_int_operand" "")))]
3420 "TARGET_ARM
3421 && !const_ok_for_arm (INTVAL (operands[2]))
3422 && const_ok_for_arm (~INTVAL (operands[2]))"
3423 [(set (match_dup 3) (match_dup 2))
3424 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3425 ""
3426 )
3427
3428 (define_insn "*iorsi3_compare0"
3429 [(set (reg:CC_NOOV CC_REGNUM)
3430 (compare:CC_NOOV
3431 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3432 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3433 (const_int 0)))
3434 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3435 (ior:SI (match_dup 1) (match_dup 2)))]
3436 "TARGET_32BIT"
3437 "orrs%?\\t%0, %1, %2"
3438 [(set_attr "conds" "set")
3439 (set_attr "arch" "*,t2,*")
3440 (set_attr "length" "4,2,4")
3441 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3442 )
3443
3444 (define_insn "*iorsi3_compare0_scratch"
3445 [(set (reg:CC_NOOV CC_REGNUM)
3446 (compare:CC_NOOV
3447 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3448 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3449 (const_int 0)))
3450 (clobber (match_scratch:SI 0 "=r,l,r"))]
3451 "TARGET_32BIT"
3452 "orrs%?\\t%0, %1, %2"
3453 [(set_attr "conds" "set")
3454 (set_attr "arch" "*,t2,*")
3455 (set_attr "length" "4,2,4")
3456 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3457 )
3458
3459 (define_expand "xorsi3"
3460 [(set (match_operand:SI 0 "s_register_operand")
3461 (xor:SI (match_operand:SI 1 "s_register_operand")
3462 (match_operand:SI 2 "reg_or_int_operand")))]
3463 "TARGET_EITHER"
3464 "if (CONST_INT_P (operands[2]))
3465 {
3466 if (TARGET_32BIT)
3467 {
3468 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3469 operands[2] = force_reg (SImode, operands[2]);
3470 else
3471 {
3472 arm_split_constant (XOR, SImode, NULL_RTX,
3473 INTVAL (operands[2]), operands[0],
3474 operands[1],
3475 optimize && can_create_pseudo_p ());
3476 DONE;
3477 }
3478 }
3479 else /* TARGET_THUMB1 */
3480 {
3481 rtx tmp = force_reg (SImode, operands[2]);
3482 if (rtx_equal_p (operands[0], operands[1]))
3483 operands[2] = tmp;
3484 else
3485 {
3486 operands[2] = operands[1];
3487 operands[1] = tmp;
3488 }
3489 }
3490 }"
3491 )
3492
3493 (define_insn_and_split "*arm_xorsi3"
3494 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3495 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3496 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3497 "TARGET_32BIT"
3498 "@
3499 eor%?\\t%0, %1, %2
3500 eor%?\\t%0, %1, %2
3501 eor%?\\t%0, %1, %2
3502 #"
3503 "TARGET_32BIT
3504 && CONST_INT_P (operands[2])
3505 && !const_ok_for_arm (INTVAL (operands[2]))"
3506 [(clobber (const_int 0))]
3507 {
3508 arm_split_constant (XOR, SImode, curr_insn,
3509 INTVAL (operands[2]), operands[0], operands[1], 0);
3510 DONE;
3511 }
3512 [(set_attr "length" "4,4,4,16")
3513 (set_attr "predicable" "yes")
3514 (set_attr "predicable_short_it" "no,yes,no,no")
3515 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3516 )
3517
3518 (define_insn "*xorsi3_compare0"
3519 [(set (reg:CC_NOOV CC_REGNUM)
3520 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3521 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3522 (const_int 0)))
3523 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3524 (xor:SI (match_dup 1) (match_dup 2)))]
3525 "TARGET_32BIT"
3526 "eors%?\\t%0, %1, %2"
3527 [(set_attr "conds" "set")
3528 (set_attr "type" "logics_imm,logics_reg")]
3529 )
3530
3531 (define_insn "*xorsi3_compare0_scratch"
3532 [(set (reg:CC_NOOV CC_REGNUM)
3533 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3534 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3535 (const_int 0)))]
3536 "TARGET_32BIT"
3537 "teq%?\\t%0, %1"
3538 [(set_attr "conds" "set")
3539 (set_attr "type" "logics_imm,logics_reg")]
3540 )
3541
3542 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3543 ; (NOT D) we can sometimes merge the final NOT into one of the following
3544 ; insns.
3545
3546 (define_split
3547 [(set (match_operand:SI 0 "s_register_operand" "")
3548 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3549 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3550 (match_operand:SI 3 "arm_rhs_operand" "")))
3551 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3552 "TARGET_32BIT"
3553 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3554 (not:SI (match_dup 3))))
3555 (set (match_dup 0) (not:SI (match_dup 4)))]
3556 ""
3557 )
3558
3559 (define_insn_and_split "*andsi_iorsi3_notsi"
3560 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3561 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3562 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3563 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3564 "TARGET_32BIT"
3565 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3566 "&& reload_completed"
3567 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3568 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3569 {
3570 /* If operands[3] is a constant make sure to fold the NOT into it
3571 to avoid creating a NOT of a CONST_INT. */
3572 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3573 if (CONST_INT_P (not_rtx))
3574 {
3575 operands[4] = operands[0];
3576 operands[5] = not_rtx;
3577 }
3578 else
3579 {
3580 operands[5] = operands[0];
3581 operands[4] = not_rtx;
3582 }
3583 }
3584 [(set_attr "length" "8")
3585 (set_attr "ce_count" "2")
3586 (set_attr "predicable" "yes")
3587 (set_attr "type" "multiple")]
3588 )
3589
3590 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3591 ; insns are available?
3592 (define_split
3593 [(set (match_operand:SI 0 "s_register_operand" "")
3594 (match_operator:SI 1 "logical_binary_operator"
3595 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3596 (match_operand:SI 3 "const_int_operand" "")
3597 (match_operand:SI 4 "const_int_operand" ""))
3598 (match_operator:SI 9 "logical_binary_operator"
3599 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3600 (match_operand:SI 6 "const_int_operand" ""))
3601 (match_operand:SI 7 "s_register_operand" "")])]))
3602 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3603 "TARGET_32BIT
3604 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3605 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3606 [(set (match_dup 8)
3607 (match_op_dup 1
3608 [(ashift:SI (match_dup 2) (match_dup 4))
3609 (match_dup 5)]))
3610 (set (match_dup 0)
3611 (match_op_dup 1
3612 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3613 (match_dup 7)]))]
3614 "
3615 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3616 ")
3617
3618 (define_split
3619 [(set (match_operand:SI 0 "s_register_operand" "")
3620 (match_operator:SI 1 "logical_binary_operator"
3621 [(match_operator:SI 9 "logical_binary_operator"
3622 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3623 (match_operand:SI 6 "const_int_operand" ""))
3624 (match_operand:SI 7 "s_register_operand" "")])
3625 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3626 (match_operand:SI 3 "const_int_operand" "")
3627 (match_operand:SI 4 "const_int_operand" ""))]))
3628 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3629 "TARGET_32BIT
3630 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3631 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3632 [(set (match_dup 8)
3633 (match_op_dup 1
3634 [(ashift:SI (match_dup 2) (match_dup 4))
3635 (match_dup 5)]))
3636 (set (match_dup 0)
3637 (match_op_dup 1
3638 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3639 (match_dup 7)]))]
3640 "
3641 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3642 ")
3643
3644 (define_split
3645 [(set (match_operand:SI 0 "s_register_operand" "")
3646 (match_operator:SI 1 "logical_binary_operator"
3647 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3648 (match_operand:SI 3 "const_int_operand" "")
3649 (match_operand:SI 4 "const_int_operand" ""))
3650 (match_operator:SI 9 "logical_binary_operator"
3651 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3652 (match_operand:SI 6 "const_int_operand" ""))
3653 (match_operand:SI 7 "s_register_operand" "")])]))
3654 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3655 "TARGET_32BIT
3656 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3657 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3658 [(set (match_dup 8)
3659 (match_op_dup 1
3660 [(ashift:SI (match_dup 2) (match_dup 4))
3661 (match_dup 5)]))
3662 (set (match_dup 0)
3663 (match_op_dup 1
3664 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3665 (match_dup 7)]))]
3666 "
3667 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3668 ")
3669
3670 (define_split
3671 [(set (match_operand:SI 0 "s_register_operand" "")
3672 (match_operator:SI 1 "logical_binary_operator"
3673 [(match_operator:SI 9 "logical_binary_operator"
3674 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3675 (match_operand:SI 6 "const_int_operand" ""))
3676 (match_operand:SI 7 "s_register_operand" "")])
3677 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3678 (match_operand:SI 3 "const_int_operand" "")
3679 (match_operand:SI 4 "const_int_operand" ""))]))
3680 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3681 "TARGET_32BIT
3682 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3683 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3684 [(set (match_dup 8)
3685 (match_op_dup 1
3686 [(ashift:SI (match_dup 2) (match_dup 4))
3687 (match_dup 5)]))
3688 (set (match_dup 0)
3689 (match_op_dup 1
3690 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3691 (match_dup 7)]))]
3692 "
3693 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3694 ")
3695 \f
3696
3697 ;; Minimum and maximum insns
3698
3699 (define_expand "smaxsi3"
3700 [(parallel [
3701 (set (match_operand:SI 0 "s_register_operand")
3702 (smax:SI (match_operand:SI 1 "s_register_operand")
3703 (match_operand:SI 2 "arm_rhs_operand")))
3704 (clobber (reg:CC CC_REGNUM))])]
3705 "TARGET_32BIT"
3706 "
3707 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3708 {
3709 /* No need for a clobber of the condition code register here. */
3710 emit_insn (gen_rtx_SET (operands[0],
3711 gen_rtx_SMAX (SImode, operands[1],
3712 operands[2])));
3713 DONE;
3714 }
3715 ")
3716
3717 (define_insn "*smax_0"
3718 [(set (match_operand:SI 0 "s_register_operand" "=r")
3719 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3720 (const_int 0)))]
3721 "TARGET_32BIT"
3722 "bic%?\\t%0, %1, %1, asr #31"
3723 [(set_attr "predicable" "yes")
3724 (set_attr "type" "logic_shift_reg")]
3725 )
3726
3727 (define_insn "*smax_m1"
3728 [(set (match_operand:SI 0 "s_register_operand" "=r")
3729 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3730 (const_int -1)))]
3731 "TARGET_32BIT"
3732 "orr%?\\t%0, %1, %1, asr #31"
3733 [(set_attr "predicable" "yes")
3734 (set_attr "type" "logic_shift_reg")]
3735 )
3736
3737 (define_insn_and_split "*arm_smax_insn"
3738 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3739 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3740 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3741 (clobber (reg:CC CC_REGNUM))]
3742 "TARGET_ARM"
3743 "#"
3744 ; cmp\\t%1, %2\;movlt\\t%0, %2
3745 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3746 "TARGET_ARM"
3747 [(set (reg:CC CC_REGNUM)
3748 (compare:CC (match_dup 1) (match_dup 2)))
3749 (set (match_dup 0)
3750 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3751 (match_dup 1)
3752 (match_dup 2)))]
3753 ""
3754 [(set_attr "conds" "clob")
3755 (set_attr "length" "8,12")
3756 (set_attr "type" "multiple")]
3757 )
3758
3759 (define_expand "sminsi3"
3760 [(parallel [
3761 (set (match_operand:SI 0 "s_register_operand")
3762 (smin:SI (match_operand:SI 1 "s_register_operand")
3763 (match_operand:SI 2 "arm_rhs_operand")))
3764 (clobber (reg:CC CC_REGNUM))])]
3765 "TARGET_32BIT"
3766 "
3767 if (operands[2] == const0_rtx)
3768 {
3769 /* No need for a clobber of the condition code register here. */
3770 emit_insn (gen_rtx_SET (operands[0],
3771 gen_rtx_SMIN (SImode, operands[1],
3772 operands[2])));
3773 DONE;
3774 }
3775 ")
3776
3777 (define_insn "*smin_0"
3778 [(set (match_operand:SI 0 "s_register_operand" "=r")
3779 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3780 (const_int 0)))]
3781 "TARGET_32BIT"
3782 "and%?\\t%0, %1, %1, asr #31"
3783 [(set_attr "predicable" "yes")
3784 (set_attr "type" "logic_shift_reg")]
3785 )
3786
3787 (define_insn_and_split "*arm_smin_insn"
3788 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3789 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3790 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3791 (clobber (reg:CC CC_REGNUM))]
3792 "TARGET_ARM"
3793 "#"
3794 ; cmp\\t%1, %2\;movge\\t%0, %2
3795 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3796 "TARGET_ARM"
3797 [(set (reg:CC CC_REGNUM)
3798 (compare:CC (match_dup 1) (match_dup 2)))
3799 (set (match_dup 0)
3800 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3801 (match_dup 1)
3802 (match_dup 2)))]
3803 ""
3804 [(set_attr "conds" "clob")
3805 (set_attr "length" "8,12")
3806 (set_attr "type" "multiple,multiple")]
3807 )
3808
3809 (define_expand "umaxsi3"
3810 [(parallel [
3811 (set (match_operand:SI 0 "s_register_operand")
3812 (umax:SI (match_operand:SI 1 "s_register_operand")
3813 (match_operand:SI 2 "arm_rhs_operand")))
3814 (clobber (reg:CC CC_REGNUM))])]
3815 "TARGET_32BIT"
3816 ""
3817 )
3818
3819 (define_insn_and_split "*arm_umaxsi3"
3820 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3821 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3822 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3823 (clobber (reg:CC CC_REGNUM))]
3824 "TARGET_ARM"
3825 "#"
3826 ; cmp\\t%1, %2\;movcc\\t%0, %2
3827 ; cmp\\t%1, %2\;movcs\\t%0, %1
3828 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3829 "TARGET_ARM"
3830 [(set (reg:CC CC_REGNUM)
3831 (compare:CC (match_dup 1) (match_dup 2)))
3832 (set (match_dup 0)
3833 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3834 (match_dup 1)
3835 (match_dup 2)))]
3836 ""
3837 [(set_attr "conds" "clob")
3838 (set_attr "length" "8,8,12")
3839 (set_attr "type" "store_4")]
3840 )
3841
3842 (define_expand "uminsi3"
3843 [(parallel [
3844 (set (match_operand:SI 0 "s_register_operand")
3845 (umin:SI (match_operand:SI 1 "s_register_operand")
3846 (match_operand:SI 2 "arm_rhs_operand")))
3847 (clobber (reg:CC CC_REGNUM))])]
3848 "TARGET_32BIT"
3849 ""
3850 )
3851
3852 (define_insn_and_split "*arm_uminsi3"
3853 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3854 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3855 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3856 (clobber (reg:CC CC_REGNUM))]
3857 "TARGET_ARM"
3858 "#"
3859 ; cmp\\t%1, %2\;movcs\\t%0, %2
3860 ; cmp\\t%1, %2\;movcc\\t%0, %1
3861 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3862 "TARGET_ARM"
3863 [(set (reg:CC CC_REGNUM)
3864 (compare:CC (match_dup 1) (match_dup 2)))
3865 (set (match_dup 0)
3866 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3867 (match_dup 1)
3868 (match_dup 2)))]
3869 ""
3870 [(set_attr "conds" "clob")
3871 (set_attr "length" "8,8,12")
3872 (set_attr "type" "store_4")]
3873 )
3874
3875 (define_insn "*store_minmaxsi"
3876 [(set (match_operand:SI 0 "memory_operand" "=m")
3877 (match_operator:SI 3 "minmax_operator"
3878 [(match_operand:SI 1 "s_register_operand" "r")
3879 (match_operand:SI 2 "s_register_operand" "r")]))
3880 (clobber (reg:CC CC_REGNUM))]
3881 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3882 "*
3883 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3884 operands[1], operands[2]);
3885 output_asm_insn (\"cmp\\t%1, %2\", operands);
3886 if (TARGET_THUMB2)
3887 output_asm_insn (\"ite\t%d3\", operands);
3888 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3889 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3890 return \"\";
3891 "
3892 [(set_attr "conds" "clob")
3893 (set (attr "length")
3894 (if_then_else (eq_attr "is_thumb" "yes")
3895 (const_int 14)
3896 (const_int 12)))
3897 (set_attr "type" "store_4")]
3898 )
3899
3900 ; Reject the frame pointer in operand[1], since reloading this after
3901 ; it has been eliminated can cause carnage.
3902 (define_insn "*minmax_arithsi"
3903 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3904 (match_operator:SI 4 "shiftable_operator"
3905 [(match_operator:SI 5 "minmax_operator"
3906 [(match_operand:SI 2 "s_register_operand" "r,r")
3907 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3908 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3909 (clobber (reg:CC CC_REGNUM))]
3910 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3911 "*
3912 {
3913 enum rtx_code code = GET_CODE (operands[4]);
3914 bool need_else;
3915
3916 if (which_alternative != 0 || operands[3] != const0_rtx
3917 || (code != PLUS && code != IOR && code != XOR))
3918 need_else = true;
3919 else
3920 need_else = false;
3921
3922 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3923 operands[2], operands[3]);
3924 output_asm_insn (\"cmp\\t%2, %3\", operands);
3925 if (TARGET_THUMB2)
3926 {
3927 if (need_else)
3928 output_asm_insn (\"ite\\t%d5\", operands);
3929 else
3930 output_asm_insn (\"it\\t%d5\", operands);
3931 }
3932 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3933 if (need_else)
3934 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3935 return \"\";
3936 }"
3937 [(set_attr "conds" "clob")
3938 (set (attr "length")
3939 (if_then_else (eq_attr "is_thumb" "yes")
3940 (const_int 14)
3941 (const_int 12)))
3942 (set_attr "type" "multiple")]
3943 )
3944
3945 ; Reject the frame pointer in operand[1], since reloading this after
3946 ; it has been eliminated can cause carnage.
3947 (define_insn_and_split "*minmax_arithsi_non_canon"
3948 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3949 (minus:SI
3950 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3951 (match_operator:SI 4 "minmax_operator"
3952 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3953 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3954 (clobber (reg:CC CC_REGNUM))]
3955 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3956 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3957 "#"
3958 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3959 [(set (reg:CC CC_REGNUM)
3960 (compare:CC (match_dup 2) (match_dup 3)))
3961
3962 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3963 (set (match_dup 0)
3964 (minus:SI (match_dup 1)
3965 (match_dup 2))))
3966 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3967 (set (match_dup 0)
3968 (match_dup 6)))]
3969 {
3970 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3971 operands[2], operands[3]);
3972 enum rtx_code rc = minmax_code (operands[4]);
3973 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3974 operands[2], operands[3]);
3975
3976 if (mode == CCFPmode || mode == CCFPEmode)
3977 rc = reverse_condition_maybe_unordered (rc);
3978 else
3979 rc = reverse_condition (rc);
3980 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3981 if (CONST_INT_P (operands[3]))
3982 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3983 else
3984 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3985 }
3986 [(set_attr "conds" "clob")
3987 (set (attr "length")
3988 (if_then_else (eq_attr "is_thumb" "yes")
3989 (const_int 14)
3990 (const_int 12)))
3991 (set_attr "type" "multiple")]
3992 )
3993
3994 (define_code_iterator SAT [smin smax])
3995 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3996 (define_code_attr SATlo [(smin "1") (smax "2")])
3997 (define_code_attr SAThi [(smin "2") (smax "1")])
3998
3999 (define_insn "*satsi_<SAT:code>"
4000 [(set (match_operand:SI 0 "s_register_operand" "=r")
4001 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4002 (match_operand:SI 1 "const_int_operand" "i"))
4003 (match_operand:SI 2 "const_int_operand" "i")))]
4004 "TARGET_32BIT && arm_arch6
4005 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4006 {
4007 int mask;
4008 bool signed_sat;
4009 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4010 &mask, &signed_sat))
4011 gcc_unreachable ();
4012
4013 operands[1] = GEN_INT (mask);
4014 if (signed_sat)
4015 return "ssat%?\t%0, %1, %3";
4016 else
4017 return "usat%?\t%0, %1, %3";
4018 }
4019 [(set_attr "predicable" "yes")
4020 (set_attr "type" "alus_imm")]
4021 )
4022
4023 (define_insn "*satsi_<SAT:code>_shift"
4024 [(set (match_operand:SI 0 "s_register_operand" "=r")
4025 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4026 [(match_operand:SI 4 "s_register_operand" "r")
4027 (match_operand:SI 5 "const_int_operand" "i")])
4028 (match_operand:SI 1 "const_int_operand" "i"))
4029 (match_operand:SI 2 "const_int_operand" "i")))]
4030 "TARGET_32BIT && arm_arch6
4031 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4032 {
4033 int mask;
4034 bool signed_sat;
4035 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4036 &mask, &signed_sat))
4037 gcc_unreachable ();
4038
4039 operands[1] = GEN_INT (mask);
4040 if (signed_sat)
4041 return "ssat%?\t%0, %1, %4%S3";
4042 else
4043 return "usat%?\t%0, %1, %4%S3";
4044 }
4045 [(set_attr "predicable" "yes")
4046 (set_attr "shift" "3")
4047 (set_attr "type" "logic_shift_reg")])
4048 \f
4049 ;; Shift and rotation insns
4050
4051 (define_expand "ashldi3"
4052 [(set (match_operand:DI 0 "s_register_operand")
4053 (ashift:DI (match_operand:DI 1 "s_register_operand")
4054 (match_operand:SI 2 "reg_or_int_operand")))]
4055 "TARGET_32BIT"
4056 "
4057 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4058 operands[2], gen_reg_rtx (SImode),
4059 gen_reg_rtx (SImode));
4060 DONE;
4061 ")
4062
4063 (define_expand "ashlsi3"
4064 [(set (match_operand:SI 0 "s_register_operand")
4065 (ashift:SI (match_operand:SI 1 "s_register_operand")
4066 (match_operand:SI 2 "arm_rhs_operand")))]
4067 "TARGET_EITHER"
4068 "
4069 if (CONST_INT_P (operands[2])
4070 && (UINTVAL (operands[2])) > 31)
4071 {
4072 emit_insn (gen_movsi (operands[0], const0_rtx));
4073 DONE;
4074 }
4075 "
4076 )
4077
4078 (define_expand "ashrdi3"
4079 [(set (match_operand:DI 0 "s_register_operand")
4080 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4081 (match_operand:SI 2 "reg_or_int_operand")))]
4082 "TARGET_32BIT"
4083 "
4084 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4085 operands[2], gen_reg_rtx (SImode),
4086 gen_reg_rtx (SImode));
4087 DONE;
4088 ")
4089
4090 (define_expand "ashrsi3"
4091 [(set (match_operand:SI 0 "s_register_operand")
4092 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4093 (match_operand:SI 2 "arm_rhs_operand")))]
4094 "TARGET_EITHER"
4095 "
4096 if (CONST_INT_P (operands[2])
4097 && UINTVAL (operands[2]) > 31)
4098 operands[2] = GEN_INT (31);
4099 "
4100 )
4101
4102 (define_expand "lshrdi3"
4103 [(set (match_operand:DI 0 "s_register_operand")
4104 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4105 (match_operand:SI 2 "reg_or_int_operand")))]
4106 "TARGET_32BIT"
4107 "
4108 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4109 operands[2], gen_reg_rtx (SImode),
4110 gen_reg_rtx (SImode));
4111 DONE;
4112 ")
4113
4114 (define_expand "lshrsi3"
4115 [(set (match_operand:SI 0 "s_register_operand")
4116 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4117 (match_operand:SI 2 "arm_rhs_operand")))]
4118 "TARGET_EITHER"
4119 "
4120 if (CONST_INT_P (operands[2])
4121 && (UINTVAL (operands[2])) > 31)
4122 {
4123 emit_insn (gen_movsi (operands[0], const0_rtx));
4124 DONE;
4125 }
4126 "
4127 )
4128
4129 (define_expand "rotlsi3"
4130 [(set (match_operand:SI 0 "s_register_operand")
4131 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4132 (match_operand:SI 2 "reg_or_int_operand")))]
4133 "TARGET_32BIT"
4134 "
4135 if (CONST_INT_P (operands[2]))
4136 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4137 else
4138 {
4139 rtx reg = gen_reg_rtx (SImode);
4140 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4141 operands[2] = reg;
4142 }
4143 "
4144 )
4145
4146 (define_expand "rotrsi3"
4147 [(set (match_operand:SI 0 "s_register_operand")
4148 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4149 (match_operand:SI 2 "arm_rhs_operand")))]
4150 "TARGET_EITHER"
4151 "
4152 if (TARGET_32BIT)
4153 {
4154 if (CONST_INT_P (operands[2])
4155 && UINTVAL (operands[2]) > 31)
4156 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4157 }
4158 else /* TARGET_THUMB1 */
4159 {
4160 if (CONST_INT_P (operands [2]))
4161 operands [2] = force_reg (SImode, operands[2]);
4162 }
4163 "
4164 )
4165
4166 (define_insn "*arm_shiftsi3"
4167 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4168 (match_operator:SI 3 "shift_operator"
4169 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4170 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4171 "TARGET_32BIT"
4172 "* return arm_output_shift(operands, 0);"
4173 [(set_attr "predicable" "yes")
4174 (set_attr "arch" "t2,t2,*,*")
4175 (set_attr "predicable_short_it" "yes,yes,no,no")
4176 (set_attr "length" "4")
4177 (set_attr "shift" "1")
4178 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
4179 )
4180
4181 (define_insn "*shiftsi3_compare0"
4182 [(set (reg:CC_NOOV CC_REGNUM)
4183 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4184 [(match_operand:SI 1 "s_register_operand" "r,r")
4185 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4186 (const_int 0)))
4187 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4188 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4189 "TARGET_32BIT"
4190 "* return arm_output_shift(operands, 1);"
4191 [(set_attr "conds" "set")
4192 (set_attr "shift" "1")
4193 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4194 )
4195
4196 (define_insn "*shiftsi3_compare0_scratch"
4197 [(set (reg:CC_NOOV CC_REGNUM)
4198 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4199 [(match_operand:SI 1 "s_register_operand" "r,r")
4200 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4201 (const_int 0)))
4202 (clobber (match_scratch:SI 0 "=r,r"))]
4203 "TARGET_32BIT"
4204 "* return arm_output_shift(operands, 1);"
4205 [(set_attr "conds" "set")
4206 (set_attr "shift" "1")
4207 (set_attr "type" "shift_imm,shift_reg")]
4208 )
4209
4210 (define_insn "*not_shiftsi"
4211 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4212 (not:SI (match_operator:SI 3 "shift_operator"
4213 [(match_operand:SI 1 "s_register_operand" "r,r")
4214 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4215 "TARGET_32BIT"
4216 "mvn%?\\t%0, %1%S3"
4217 [(set_attr "predicable" "yes")
4218 (set_attr "shift" "1")
4219 (set_attr "arch" "32,a")
4220 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4221
4222 (define_insn "*not_shiftsi_compare0"
4223 [(set (reg:CC_NOOV CC_REGNUM)
4224 (compare:CC_NOOV
4225 (not:SI (match_operator:SI 3 "shift_operator"
4226 [(match_operand:SI 1 "s_register_operand" "r,r")
4227 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4228 (const_int 0)))
4229 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4230 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4231 "TARGET_32BIT"
4232 "mvns%?\\t%0, %1%S3"
4233 [(set_attr "conds" "set")
4234 (set_attr "shift" "1")
4235 (set_attr "arch" "32,a")
4236 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4237
4238 (define_insn "*not_shiftsi_compare0_scratch"
4239 [(set (reg:CC_NOOV CC_REGNUM)
4240 (compare:CC_NOOV
4241 (not:SI (match_operator:SI 3 "shift_operator"
4242 [(match_operand:SI 1 "s_register_operand" "r,r")
4243 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4244 (const_int 0)))
4245 (clobber (match_scratch:SI 0 "=r,r"))]
4246 "TARGET_32BIT"
4247 "mvns%?\\t%0, %1%S3"
4248 [(set_attr "conds" "set")
4249 (set_attr "shift" "1")
4250 (set_attr "arch" "32,a")
4251 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4252
4253 ;; We don't really have extzv, but defining this using shifts helps
4254 ;; to reduce register pressure later on.
4255
4256 (define_expand "extzv"
4257 [(set (match_operand 0 "s_register_operand")
4258 (zero_extract (match_operand 1 "nonimmediate_operand")
4259 (match_operand 2 "const_int_operand")
4260 (match_operand 3 "const_int_operand")))]
4261 "TARGET_THUMB1 || arm_arch_thumb2"
4262 "
4263 {
4264 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4265 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4266
4267 if (arm_arch_thumb2)
4268 {
4269 HOST_WIDE_INT width = INTVAL (operands[2]);
4270 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4271
4272 if (unaligned_access && MEM_P (operands[1])
4273 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4274 {
4275 rtx base_addr;
4276
4277 if (BYTES_BIG_ENDIAN)
4278 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4279 - bitpos;
4280
4281 if (width == 32)
4282 {
4283 base_addr = adjust_address (operands[1], SImode,
4284 bitpos / BITS_PER_UNIT);
4285 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4286 }
4287 else
4288 {
4289 rtx dest = operands[0];
4290 rtx tmp = gen_reg_rtx (SImode);
4291
4292 /* We may get a paradoxical subreg here. Strip it off. */
4293 if (GET_CODE (dest) == SUBREG
4294 && GET_MODE (dest) == SImode
4295 && GET_MODE (SUBREG_REG (dest)) == HImode)
4296 dest = SUBREG_REG (dest);
4297
4298 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4299 FAIL;
4300
4301 base_addr = adjust_address (operands[1], HImode,
4302 bitpos / BITS_PER_UNIT);
4303 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4304 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4305 }
4306 DONE;
4307 }
4308 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4309 {
4310 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4311 operands[3]));
4312 DONE;
4313 }
4314 else
4315 FAIL;
4316 }
4317
4318 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4319 FAIL;
4320
4321 operands[3] = GEN_INT (rshift);
4322
4323 if (lshift == 0)
4324 {
4325 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4326 DONE;
4327 }
4328
4329 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4330 operands[3], gen_reg_rtx (SImode)));
4331 DONE;
4332 }"
4333 )
4334
4335 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4336
4337 (define_expand "extzv_t1"
4338 [(set (match_operand:SI 4 "s_register_operand")
4339 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4340 (match_operand:SI 2 "const_int_operand")))
4341 (set (match_operand:SI 0 "s_register_operand")
4342 (lshiftrt:SI (match_dup 4)
4343 (match_operand:SI 3 "const_int_operand")))]
4344 "TARGET_THUMB1"
4345 "")
4346
4347 (define_expand "extv"
4348 [(set (match_operand 0 "s_register_operand")
4349 (sign_extract (match_operand 1 "nonimmediate_operand")
4350 (match_operand 2 "const_int_operand")
4351 (match_operand 3 "const_int_operand")))]
4352 "arm_arch_thumb2"
4353 {
4354 HOST_WIDE_INT width = INTVAL (operands[2]);
4355 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4356
4357 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4358 && (bitpos % BITS_PER_UNIT) == 0)
4359 {
4360 rtx base_addr;
4361
4362 if (BYTES_BIG_ENDIAN)
4363 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4364
4365 if (width == 32)
4366 {
4367 base_addr = adjust_address (operands[1], SImode,
4368 bitpos / BITS_PER_UNIT);
4369 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4370 }
4371 else
4372 {
4373 rtx dest = operands[0];
4374 rtx tmp = gen_reg_rtx (SImode);
4375
4376 /* We may get a paradoxical subreg here. Strip it off. */
4377 if (GET_CODE (dest) == SUBREG
4378 && GET_MODE (dest) == SImode
4379 && GET_MODE (SUBREG_REG (dest)) == HImode)
4380 dest = SUBREG_REG (dest);
4381
4382 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4383 FAIL;
4384
4385 base_addr = adjust_address (operands[1], HImode,
4386 bitpos / BITS_PER_UNIT);
4387 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4388 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4389 }
4390
4391 DONE;
4392 }
4393 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4394 FAIL;
4395 else if (GET_MODE (operands[0]) == SImode
4396 && GET_MODE (operands[1]) == SImode)
4397 {
4398 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4399 operands[3]));
4400 DONE;
4401 }
4402
4403 FAIL;
4404 })
4405
4406 ; Helper to expand register forms of extv with the proper modes.
4407
4408 (define_expand "extv_regsi"
4409 [(set (match_operand:SI 0 "s_register_operand")
4410 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4411 (match_operand 2 "const_int_operand")
4412 (match_operand 3 "const_int_operand")))]
4413 ""
4414 {
4415 })
4416
4417 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4418
4419 (define_insn "unaligned_loaddi"
4420 [(set (match_operand:DI 0 "s_register_operand" "=r")
4421 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4422 UNSPEC_UNALIGNED_LOAD))]
4423 "TARGET_32BIT && TARGET_LDRD"
4424 "*
4425 return output_move_double (operands, true, NULL);
4426 "
4427 [(set_attr "length" "8")
4428 (set_attr "type" "load_8")])
4429
4430 (define_insn "unaligned_loadsi"
4431 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4432 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4433 UNSPEC_UNALIGNED_LOAD))]
4434 "unaligned_access"
4435 "@
4436 ldr\t%0, %1\t@ unaligned
4437 ldr%?\t%0, %1\t@ unaligned
4438 ldr%?\t%0, %1\t@ unaligned"
4439 [(set_attr "arch" "t1,t2,32")
4440 (set_attr "length" "2,2,4")
4441 (set_attr "predicable" "no,yes,yes")
4442 (set_attr "predicable_short_it" "no,yes,no")
4443 (set_attr "type" "load_4")])
4444
4445 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4446 ;; address (there's no immediate format). That's tricky to support
4447 ;; here and we don't really need this pattern for that case, so only
4448 ;; enable for 32-bit ISAs.
4449 (define_insn "unaligned_loadhis"
4450 [(set (match_operand:SI 0 "s_register_operand" "=r")
4451 (sign_extend:SI
4452 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4453 UNSPEC_UNALIGNED_LOAD)))]
4454 "unaligned_access && TARGET_32BIT"
4455 "ldrsh%?\t%0, %1\t@ unaligned"
4456 [(set_attr "predicable" "yes")
4457 (set_attr "type" "load_byte")])
4458
4459 (define_insn "unaligned_loadhiu"
4460 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4461 (zero_extend:SI
4462 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4463 UNSPEC_UNALIGNED_LOAD)))]
4464 "unaligned_access"
4465 "@
4466 ldrh\t%0, %1\t@ unaligned
4467 ldrh%?\t%0, %1\t@ unaligned
4468 ldrh%?\t%0, %1\t@ unaligned"
4469 [(set_attr "arch" "t1,t2,32")
4470 (set_attr "length" "2,2,4")
4471 (set_attr "predicable" "no,yes,yes")
4472 (set_attr "predicable_short_it" "no,yes,no")
4473 (set_attr "type" "load_byte")])
4474
4475 (define_insn "unaligned_storedi"
4476 [(set (match_operand:DI 0 "memory_operand" "=m")
4477 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4478 UNSPEC_UNALIGNED_STORE))]
4479 "TARGET_32BIT && TARGET_LDRD"
4480 "*
4481 return output_move_double (operands, true, NULL);
4482 "
4483 [(set_attr "length" "8")
4484 (set_attr "type" "store_8")])
4485
4486 (define_insn "unaligned_storesi"
4487 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4488 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4489 UNSPEC_UNALIGNED_STORE))]
4490 "unaligned_access"
4491 "@
4492 str\t%1, %0\t@ unaligned
4493 str%?\t%1, %0\t@ unaligned
4494 str%?\t%1, %0\t@ unaligned"
4495 [(set_attr "arch" "t1,t2,32")
4496 (set_attr "length" "2,2,4")
4497 (set_attr "predicable" "no,yes,yes")
4498 (set_attr "predicable_short_it" "no,yes,no")
4499 (set_attr "type" "store_4")])
4500
4501 (define_insn "unaligned_storehi"
4502 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4503 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4504 UNSPEC_UNALIGNED_STORE))]
4505 "unaligned_access"
4506 "@
4507 strh\t%1, %0\t@ unaligned
4508 strh%?\t%1, %0\t@ unaligned
4509 strh%?\t%1, %0\t@ unaligned"
4510 [(set_attr "arch" "t1,t2,32")
4511 (set_attr "length" "2,2,4")
4512 (set_attr "predicable" "no,yes,yes")
4513 (set_attr "predicable_short_it" "no,yes,no")
4514 (set_attr "type" "store_4")])
4515
4516
4517 (define_insn "*extv_reg"
4518 [(set (match_operand:SI 0 "s_register_operand" "=r")
4519 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4520 (match_operand:SI 2 "const_int_operand" "n")
4521 (match_operand:SI 3 "const_int_operand" "n")))]
4522 "arm_arch_thumb2
4523 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4524 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4525 "sbfx%?\t%0, %1, %3, %2"
4526 [(set_attr "length" "4")
4527 (set_attr "predicable" "yes")
4528 (set_attr "type" "bfm")]
4529 )
4530
4531 (define_insn "extzv_t2"
4532 [(set (match_operand:SI 0 "s_register_operand" "=r")
4533 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4534 (match_operand:SI 2 "const_int_operand" "n")
4535 (match_operand:SI 3 "const_int_operand" "n")))]
4536 "arm_arch_thumb2
4537 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4538 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4539 "ubfx%?\t%0, %1, %3, %2"
4540 [(set_attr "length" "4")
4541 (set_attr "predicable" "yes")
4542 (set_attr "type" "bfm")]
4543 )
4544
4545
4546 ;; Division instructions
4547 (define_insn "divsi3"
4548 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4549 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
4550 (match_operand:SI 2 "s_register_operand" "r,r")))]
4551 "TARGET_IDIV"
4552 "@
4553 sdiv%?\t%0, %1, %2
4554 sdiv\t%0, %1, %2"
4555 [(set_attr "arch" "32,v8mb")
4556 (set_attr "predicable" "yes")
4557 (set_attr "type" "sdiv")]
4558 )
4559
4560 (define_insn "udivsi3"
4561 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4562 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
4563 (match_operand:SI 2 "s_register_operand" "r,r")))]
4564 "TARGET_IDIV"
4565 "@
4566 udiv%?\t%0, %1, %2
4567 udiv\t%0, %1, %2"
4568 [(set_attr "arch" "32,v8mb")
4569 (set_attr "predicable" "yes")
4570 (set_attr "type" "udiv")]
4571 )
4572
4573 \f
4574 ;; Unary arithmetic insns
4575
4576 (define_expand "negv<SIDI:mode>3"
4577 [(match_operand:SIDI 0 "s_register_operand")
4578 (match_operand:SIDI 1 "s_register_operand")
4579 (match_operand 2 "")]
4580 "TARGET_32BIT"
4581 {
4582 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
4583 operands[2]));
4584 DONE;
4585 })
4586
4587 (define_expand "negsi2"
4588 [(set (match_operand:SI 0 "s_register_operand")
4589 (neg:SI (match_operand:SI 1 "s_register_operand")))]
4590 "TARGET_EITHER"
4591 ""
4592 )
4593
4594 (define_insn "*arm_negsi2"
4595 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4596 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4597 "TARGET_32BIT"
4598 "rsb%?\\t%0, %1, #0"
4599 [(set_attr "predicable" "yes")
4600 (set_attr "predicable_short_it" "yes,no")
4601 (set_attr "arch" "t2,*")
4602 (set_attr "length" "4")
4603 (set_attr "type" "alu_imm")]
4604 )
4605
4606 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
4607 ;; rather than (0 cmp reg). This gives the same results for unsigned
4608 ;; and equality compares which is what we mostly need here.
4609 (define_insn "negsi2_0compare"
4610 [(set (reg:CC_RSB CC_REGNUM)
4611 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
4612 (const_int -1)))
4613 (set (match_operand:SI 0 "s_register_operand" "=l,r")
4614 (neg:SI (match_dup 1)))]
4615 "TARGET_32BIT"
4616 "@
4617 negs\\t%0, %1
4618 rsbs\\t%0, %1, #0"
4619 [(set_attr "conds" "set")
4620 (set_attr "arch" "t2,*")
4621 (set_attr "length" "2,*")
4622 (set_attr "type" "alus_imm")]
4623 )
4624
4625 (define_insn "negsi2_carryin"
4626 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4627 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
4628 (match_operand:SI 2 "arm_borrow_operation" "")))]
4629 "TARGET_32BIT"
4630 "@
4631 rsc\\t%0, %1, #0
4632 sbc\\t%0, %1, %1, lsl #1"
4633 [(set_attr "conds" "use")
4634 (set_attr "arch" "a,t2")
4635 (set_attr "type" "adc_imm,adc_reg")]
4636 )
4637
4638 (define_expand "negsf2"
4639 [(set (match_operand:SF 0 "s_register_operand")
4640 (neg:SF (match_operand:SF 1 "s_register_operand")))]
4641 "TARGET_32BIT && TARGET_HARD_FLOAT"
4642 ""
4643 )
4644
4645 (define_expand "negdf2"
4646 [(set (match_operand:DF 0 "s_register_operand")
4647 (neg:DF (match_operand:DF 1 "s_register_operand")))]
4648 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4649 "")
4650
4651 ;; abssi2 doesn't really clobber the condition codes if a different register
4652 ;; is being set. To keep things simple, assume during rtl manipulations that
4653 ;; it does, but tell the final scan operator the truth. Similarly for
4654 ;; (neg (abs...))
4655
4656 (define_expand "abssi2"
4657 [(parallel
4658 [(set (match_operand:SI 0 "s_register_operand")
4659 (abs:SI (match_operand:SI 1 "s_register_operand")))
4660 (clobber (match_dup 2))])]
4661 "TARGET_EITHER"
4662 "
4663 if (TARGET_THUMB1)
4664 operands[2] = gen_rtx_SCRATCH (SImode);
4665 else
4666 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4667 ")
4668
4669 (define_insn_and_split "*arm_abssi2"
4670 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4671 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4672 (clobber (reg:CC CC_REGNUM))]
4673 "TARGET_ARM"
4674 "#"
4675 "&& reload_completed"
4676 [(const_int 0)]
4677 {
4678 /* if (which_alternative == 0) */
4679 if (REGNO(operands[0]) == REGNO(operands[1]))
4680 {
4681 /* Emit the pattern:
4682 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4683 [(set (reg:CC CC_REGNUM)
4684 (compare:CC (match_dup 0) (const_int 0)))
4685 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4686 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4687 */
4688 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4689 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4690 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4691 (gen_rtx_LT (SImode,
4692 gen_rtx_REG (CCmode, CC_REGNUM),
4693 const0_rtx)),
4694 (gen_rtx_SET (operands[0],
4695 (gen_rtx_MINUS (SImode,
4696 const0_rtx,
4697 operands[1]))))));
4698 DONE;
4699 }
4700 else
4701 {
4702 /* Emit the pattern:
4703 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4704 [(set (match_dup 0)
4705 (xor:SI (match_dup 1)
4706 (ashiftrt:SI (match_dup 1) (const_int 31))))
4707 (set (match_dup 0)
4708 (minus:SI (match_dup 0)
4709 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4710 */
4711 emit_insn (gen_rtx_SET (operands[0],
4712 gen_rtx_XOR (SImode,
4713 gen_rtx_ASHIFTRT (SImode,
4714 operands[1],
4715 GEN_INT (31)),
4716 operands[1])));
4717 emit_insn (gen_rtx_SET (operands[0],
4718 gen_rtx_MINUS (SImode,
4719 operands[0],
4720 gen_rtx_ASHIFTRT (SImode,
4721 operands[1],
4722 GEN_INT (31)))));
4723 DONE;
4724 }
4725 }
4726 [(set_attr "conds" "clob,*")
4727 (set_attr "shift" "1")
4728 (set_attr "predicable" "no, yes")
4729 (set_attr "length" "8")
4730 (set_attr "type" "multiple")]
4731 )
4732
4733 (define_insn_and_split "*arm_neg_abssi2"
4734 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4735 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4736 (clobber (reg:CC CC_REGNUM))]
4737 "TARGET_ARM"
4738 "#"
4739 "&& reload_completed"
4740 [(const_int 0)]
4741 {
4742 /* if (which_alternative == 0) */
4743 if (REGNO (operands[0]) == REGNO (operands[1]))
4744 {
4745 /* Emit the pattern:
4746 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4747 */
4748 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4749 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4750 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4751 gen_rtx_GT (SImode,
4752 gen_rtx_REG (CCmode, CC_REGNUM),
4753 const0_rtx),
4754 gen_rtx_SET (operands[0],
4755 (gen_rtx_MINUS (SImode,
4756 const0_rtx,
4757 operands[1])))));
4758 }
4759 else
4760 {
4761 /* Emit the pattern:
4762 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4763 */
4764 emit_insn (gen_rtx_SET (operands[0],
4765 gen_rtx_XOR (SImode,
4766 gen_rtx_ASHIFTRT (SImode,
4767 operands[1],
4768 GEN_INT (31)),
4769 operands[1])));
4770 emit_insn (gen_rtx_SET (operands[0],
4771 gen_rtx_MINUS (SImode,
4772 gen_rtx_ASHIFTRT (SImode,
4773 operands[1],
4774 GEN_INT (31)),
4775 operands[0])));
4776 }
4777 DONE;
4778 }
4779 [(set_attr "conds" "clob,*")
4780 (set_attr "shift" "1")
4781 (set_attr "predicable" "no, yes")
4782 (set_attr "length" "8")
4783 (set_attr "type" "multiple")]
4784 )
4785
4786 (define_expand "abssf2"
4787 [(set (match_operand:SF 0 "s_register_operand")
4788 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4789 "TARGET_32BIT && TARGET_HARD_FLOAT"
4790 "")
4791
4792 (define_expand "absdf2"
4793 [(set (match_operand:DF 0 "s_register_operand")
4794 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4795 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4796 "")
4797
4798 (define_expand "sqrtsf2"
4799 [(set (match_operand:SF 0 "s_register_operand")
4800 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4801 "TARGET_32BIT && TARGET_HARD_FLOAT"
4802 "")
4803
4804 (define_expand "sqrtdf2"
4805 [(set (match_operand:DF 0 "s_register_operand")
4806 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4807 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4808 "")
4809
4810 (define_expand "one_cmplsi2"
4811 [(set (match_operand:SI 0 "s_register_operand")
4812 (not:SI (match_operand:SI 1 "s_register_operand")))]
4813 "TARGET_EITHER"
4814 ""
4815 )
4816
4817 (define_insn "*arm_one_cmplsi2"
4818 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4819 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4820 "TARGET_32BIT"
4821 "mvn%?\\t%0, %1"
4822 [(set_attr "predicable" "yes")
4823 (set_attr "predicable_short_it" "yes,no")
4824 (set_attr "arch" "t2,*")
4825 (set_attr "length" "4")
4826 (set_attr "type" "mvn_reg")]
4827 )
4828
4829 (define_insn "*notsi_compare0"
4830 [(set (reg:CC_NOOV CC_REGNUM)
4831 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4832 (const_int 0)))
4833 (set (match_operand:SI 0 "s_register_operand" "=r")
4834 (not:SI (match_dup 1)))]
4835 "TARGET_32BIT"
4836 "mvns%?\\t%0, %1"
4837 [(set_attr "conds" "set")
4838 (set_attr "type" "mvn_reg")]
4839 )
4840
4841 (define_insn "*notsi_compare0_scratch"
4842 [(set (reg:CC_NOOV CC_REGNUM)
4843 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4844 (const_int 0)))
4845 (clobber (match_scratch:SI 0 "=r"))]
4846 "TARGET_32BIT"
4847 "mvns%?\\t%0, %1"
4848 [(set_attr "conds" "set")
4849 (set_attr "type" "mvn_reg")]
4850 )
4851 \f
4852 ;; Fixed <--> Floating conversion insns
4853
4854 (define_expand "floatsihf2"
4855 [(set (match_operand:HF 0 "general_operand")
4856 (float:HF (match_operand:SI 1 "general_operand")))]
4857 "TARGET_EITHER"
4858 "
4859 {
4860 rtx op1 = gen_reg_rtx (SFmode);
4861 expand_float (op1, operands[1], 0);
4862 op1 = convert_to_mode (HFmode, op1, 0);
4863 emit_move_insn (operands[0], op1);
4864 DONE;
4865 }"
4866 )
4867
4868 (define_expand "floatdihf2"
4869 [(set (match_operand:HF 0 "general_operand")
4870 (float:HF (match_operand:DI 1 "general_operand")))]
4871 "TARGET_EITHER"
4872 "
4873 {
4874 rtx op1 = gen_reg_rtx (SFmode);
4875 expand_float (op1, operands[1], 0);
4876 op1 = convert_to_mode (HFmode, op1, 0);
4877 emit_move_insn (operands[0], op1);
4878 DONE;
4879 }"
4880 )
4881
4882 (define_expand "floatsisf2"
4883 [(set (match_operand:SF 0 "s_register_operand")
4884 (float:SF (match_operand:SI 1 "s_register_operand")))]
4885 "TARGET_32BIT && TARGET_HARD_FLOAT"
4886 "
4887 ")
4888
4889 (define_expand "floatsidf2"
4890 [(set (match_operand:DF 0 "s_register_operand")
4891 (float:DF (match_operand:SI 1 "s_register_operand")))]
4892 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4893 "
4894 ")
4895
4896 (define_expand "fix_trunchfsi2"
4897 [(set (match_operand:SI 0 "general_operand")
4898 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4899 "TARGET_EITHER"
4900 "
4901 {
4902 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4903 expand_fix (operands[0], op1, 0);
4904 DONE;
4905 }"
4906 )
4907
4908 (define_expand "fix_trunchfdi2"
4909 [(set (match_operand:DI 0 "general_operand")
4910 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4911 "TARGET_EITHER"
4912 "
4913 {
4914 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4915 expand_fix (operands[0], op1, 0);
4916 DONE;
4917 }"
4918 )
4919
4920 (define_expand "fix_truncsfsi2"
4921 [(set (match_operand:SI 0 "s_register_operand")
4922 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4923 "TARGET_32BIT && TARGET_HARD_FLOAT"
4924 "
4925 ")
4926
4927 (define_expand "fix_truncdfsi2"
4928 [(set (match_operand:SI 0 "s_register_operand")
4929 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4930 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4931 "
4932 ")
4933
4934 ;; Truncation insns
4935
4936 (define_expand "truncdfsf2"
4937 [(set (match_operand:SF 0 "s_register_operand")
4938 (float_truncate:SF
4939 (match_operand:DF 1 "s_register_operand")))]
4940 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4941 ""
4942 )
4943
4944 ;; DFmode to HFmode conversions on targets without a single-step hardware
4945 ;; instruction for it would have to go through SFmode. This is dangerous
4946 ;; as it introduces double rounding.
4947 ;;
4948 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4949 ;; a single-step instruction.
4950
4951 (define_expand "truncdfhf2"
4952 [(set (match_operand:HF 0 "s_register_operand")
4953 (float_truncate:HF
4954 (match_operand:DF 1 "s_register_operand")))]
4955 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4956 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4957 {
4958 /* We don't have a direct instruction for this, so we must be in
4959 an unsafe math mode, and going via SFmode. */
4960
4961 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4962 {
4963 rtx op1;
4964 op1 = convert_to_mode (SFmode, operands[1], 0);
4965 op1 = convert_to_mode (HFmode, op1, 0);
4966 emit_move_insn (operands[0], op1);
4967 DONE;
4968 }
4969 /* Otherwise, we will pick this up as a single instruction with
4970 no intermediary rounding. */
4971 }
4972 )
4973 \f
4974 ;; Zero and sign extension instructions.
4975
4976 (define_expand "zero_extend<mode>di2"
4977 [(set (match_operand:DI 0 "s_register_operand" "")
4978 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4979 "TARGET_32BIT <qhs_zextenddi_cond>"
4980 {
4981 rtx res_lo, res_hi, op0_lo, op0_hi;
4982 res_lo = gen_lowpart (SImode, operands[0]);
4983 res_hi = gen_highpart (SImode, operands[0]);
4984 if (can_create_pseudo_p ())
4985 {
4986 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4987 op0_hi = gen_reg_rtx (SImode);
4988 }
4989 else
4990 {
4991 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4992 op0_hi = res_hi;
4993 }
4994 if (<MODE>mode != SImode)
4995 emit_insn (gen_rtx_SET (op0_lo,
4996 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4997 emit_insn (gen_movsi (op0_hi, const0_rtx));
4998 if (res_lo != op0_lo)
4999 emit_move_insn (res_lo, op0_lo);
5000 if (res_hi != op0_hi)
5001 emit_move_insn (res_hi, op0_hi);
5002 DONE;
5003 }
5004 )
5005
5006 (define_expand "extend<mode>di2"
5007 [(set (match_operand:DI 0 "s_register_operand" "")
5008 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5009 "TARGET_32BIT <qhs_sextenddi_cond>"
5010 {
5011 rtx res_lo, res_hi, op0_lo, op0_hi;
5012 res_lo = gen_lowpart (SImode, operands[0]);
5013 res_hi = gen_highpart (SImode, operands[0]);
5014 if (can_create_pseudo_p ())
5015 {
5016 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5017 op0_hi = gen_reg_rtx (SImode);
5018 }
5019 else
5020 {
5021 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5022 op0_hi = res_hi;
5023 }
5024 if (<MODE>mode != SImode)
5025 emit_insn (gen_rtx_SET (op0_lo,
5026 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5027 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5028 if (res_lo != op0_lo)
5029 emit_move_insn (res_lo, op0_lo);
5030 if (res_hi != op0_hi)
5031 emit_move_insn (res_hi, op0_hi);
5032 DONE;
5033 }
5034 )
5035
5036 ;; Splits for all extensions to DImode
5037 (define_split
5038 [(set (match_operand:DI 0 "s_register_operand" "")
5039 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5040 "TARGET_32BIT"
5041 [(set (match_dup 0) (match_dup 1))]
5042 {
5043 rtx lo_part = gen_lowpart (SImode, operands[0]);
5044 machine_mode src_mode = GET_MODE (operands[1]);
5045
5046 if (src_mode == SImode)
5047 emit_move_insn (lo_part, operands[1]);
5048 else
5049 emit_insn (gen_rtx_SET (lo_part,
5050 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5051 operands[0] = gen_highpart (SImode, operands[0]);
5052 operands[1] = const0_rtx;
5053 })
5054
5055 (define_split
5056 [(set (match_operand:DI 0 "s_register_operand" "")
5057 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5058 "TARGET_32BIT"
5059 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5060 {
5061 rtx lo_part = gen_lowpart (SImode, operands[0]);
5062 machine_mode src_mode = GET_MODE (operands[1]);
5063
5064 if (src_mode == SImode)
5065 emit_move_insn (lo_part, operands[1]);
5066 else
5067 emit_insn (gen_rtx_SET (lo_part,
5068 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5069 operands[1] = lo_part;
5070 operands[0] = gen_highpart (SImode, operands[0]);
5071 })
5072
5073 (define_expand "zero_extendhisi2"
5074 [(set (match_operand:SI 0 "s_register_operand")
5075 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5076 "TARGET_EITHER"
5077 {
5078 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5079 {
5080 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5081 DONE;
5082 }
5083 if (!arm_arch6 && !MEM_P (operands[1]))
5084 {
5085 rtx t = gen_lowpart (SImode, operands[1]);
5086 rtx tmp = gen_reg_rtx (SImode);
5087 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5088 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5089 DONE;
5090 }
5091 })
5092
5093 (define_split
5094 [(set (match_operand:SI 0 "s_register_operand" "")
5095 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5096 "!TARGET_THUMB2 && !arm_arch6"
5097 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5098 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5099 {
5100 operands[2] = gen_lowpart (SImode, operands[1]);
5101 })
5102
5103 (define_insn "*arm_zero_extendhisi2"
5104 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5105 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5106 "TARGET_ARM && arm_arch4 && !arm_arch6"
5107 "@
5108 #
5109 ldrh%?\\t%0, %1"
5110 [(set_attr "type" "alu_shift_reg,load_byte")
5111 (set_attr "predicable" "yes")]
5112 )
5113
5114 (define_insn "*arm_zero_extendhisi2_v6"
5115 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5116 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5117 "TARGET_ARM && arm_arch6"
5118 "@
5119 uxth%?\\t%0, %1
5120 ldrh%?\\t%0, %1"
5121 [(set_attr "predicable" "yes")
5122 (set_attr "type" "extend,load_byte")]
5123 )
5124
5125 (define_insn "*arm_zero_extendhisi2addsi"
5126 [(set (match_operand:SI 0 "s_register_operand" "=r")
5127 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5128 (match_operand:SI 2 "s_register_operand" "r")))]
5129 "TARGET_INT_SIMD"
5130 "uxtah%?\\t%0, %2, %1"
5131 [(set_attr "type" "alu_shift_reg")
5132 (set_attr "predicable" "yes")]
5133 )
5134
5135 (define_expand "zero_extendqisi2"
5136 [(set (match_operand:SI 0 "s_register_operand")
5137 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5138 "TARGET_EITHER"
5139 {
5140 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5141 {
5142 emit_insn (gen_andsi3 (operands[0],
5143 gen_lowpart (SImode, operands[1]),
5144 GEN_INT (255)));
5145 DONE;
5146 }
5147 if (!arm_arch6 && !MEM_P (operands[1]))
5148 {
5149 rtx t = gen_lowpart (SImode, operands[1]);
5150 rtx tmp = gen_reg_rtx (SImode);
5151 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5152 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5153 DONE;
5154 }
5155 })
5156
5157 (define_split
5158 [(set (match_operand:SI 0 "s_register_operand" "")
5159 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5160 "!arm_arch6"
5161 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5162 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5163 {
5164 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5165 if (TARGET_ARM)
5166 {
5167 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5168 DONE;
5169 }
5170 })
5171
5172 (define_insn "*arm_zero_extendqisi2"
5173 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5174 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5175 "TARGET_ARM && !arm_arch6"
5176 "@
5177 #
5178 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5179 [(set_attr "length" "8,4")
5180 (set_attr "type" "alu_shift_reg,load_byte")
5181 (set_attr "predicable" "yes")]
5182 )
5183
5184 (define_insn "*arm_zero_extendqisi2_v6"
5185 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5186 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5187 "TARGET_ARM && arm_arch6"
5188 "@
5189 uxtb%?\\t%0, %1
5190 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5191 [(set_attr "type" "extend,load_byte")
5192 (set_attr "predicable" "yes")]
5193 )
5194
5195 (define_insn "*arm_zero_extendqisi2addsi"
5196 [(set (match_operand:SI 0 "s_register_operand" "=r")
5197 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5198 (match_operand:SI 2 "s_register_operand" "r")))]
5199 "TARGET_INT_SIMD"
5200 "uxtab%?\\t%0, %2, %1"
5201 [(set_attr "predicable" "yes")
5202 (set_attr "type" "alu_shift_reg")]
5203 )
5204
5205 (define_split
5206 [(set (match_operand:SI 0 "s_register_operand" "")
5207 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5208 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5209 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5210 [(set (match_dup 2) (match_dup 1))
5211 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5212 ""
5213 )
5214
5215 (define_split
5216 [(set (match_operand:SI 0 "s_register_operand" "")
5217 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5218 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5219 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5220 [(set (match_dup 2) (match_dup 1))
5221 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5222 ""
5223 )
5224
5225
5226 (define_split
5227 [(set (match_operand:SI 0 "s_register_operand" "")
5228 (IOR_XOR:SI (and:SI (ashift:SI
5229 (match_operand:SI 1 "s_register_operand" "")
5230 (match_operand:SI 2 "const_int_operand" ""))
5231 (match_operand:SI 3 "const_int_operand" ""))
5232 (zero_extend:SI
5233 (match_operator 5 "subreg_lowpart_operator"
5234 [(match_operand:SI 4 "s_register_operand" "")]))))]
5235 "TARGET_32BIT
5236 && (UINTVAL (operands[3])
5237 == (GET_MODE_MASK (GET_MODE (operands[5]))
5238 & (GET_MODE_MASK (GET_MODE (operands[5]))
5239 << (INTVAL (operands[2])))))"
5240 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5241 (match_dup 4)))
5242 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5243 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5244 )
5245
5246 (define_insn "*compareqi_eq0"
5247 [(set (reg:CC_Z CC_REGNUM)
5248 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5249 (const_int 0)))]
5250 "TARGET_32BIT"
5251 "tst%?\\t%0, #255"
5252 [(set_attr "conds" "set")
5253 (set_attr "predicable" "yes")
5254 (set_attr "type" "logic_imm")]
5255 )
5256
5257 (define_expand "extendhisi2"
5258 [(set (match_operand:SI 0 "s_register_operand")
5259 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5260 "TARGET_EITHER"
5261 {
5262 if (TARGET_THUMB1)
5263 {
5264 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5265 DONE;
5266 }
5267 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5268 {
5269 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5270 DONE;
5271 }
5272
5273 if (!arm_arch6 && !MEM_P (operands[1]))
5274 {
5275 rtx t = gen_lowpart (SImode, operands[1]);
5276 rtx tmp = gen_reg_rtx (SImode);
5277 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5278 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5279 DONE;
5280 }
5281 })
5282
5283 (define_split
5284 [(parallel
5285 [(set (match_operand:SI 0 "register_operand" "")
5286 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5287 (clobber (match_scratch:SI 2 ""))])]
5288 "!arm_arch6"
5289 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5290 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5291 {
5292 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5293 })
5294
5295 ;; This pattern will only be used when ldsh is not available
5296 (define_expand "extendhisi2_mem"
5297 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5298 (set (match_dup 3)
5299 (zero_extend:SI (match_dup 7)))
5300 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5301 (set (match_operand:SI 0 "" "")
5302 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5303 "TARGET_ARM"
5304 "
5305 {
5306 rtx mem1, mem2;
5307 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5308
5309 mem1 = change_address (operands[1], QImode, addr);
5310 mem2 = change_address (operands[1], QImode,
5311 plus_constant (Pmode, addr, 1));
5312 operands[0] = gen_lowpart (SImode, operands[0]);
5313 operands[1] = mem1;
5314 operands[2] = gen_reg_rtx (SImode);
5315 operands[3] = gen_reg_rtx (SImode);
5316 operands[6] = gen_reg_rtx (SImode);
5317 operands[7] = mem2;
5318
5319 if (BYTES_BIG_ENDIAN)
5320 {
5321 operands[4] = operands[2];
5322 operands[5] = operands[3];
5323 }
5324 else
5325 {
5326 operands[4] = operands[3];
5327 operands[5] = operands[2];
5328 }
5329 }"
5330 )
5331
5332 (define_split
5333 [(set (match_operand:SI 0 "register_operand" "")
5334 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5335 "!arm_arch6"
5336 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5337 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5338 {
5339 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5340 })
5341
5342 (define_insn "*arm_extendhisi2"
5343 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5344 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5345 "TARGET_ARM && arm_arch4 && !arm_arch6"
5346 "@
5347 #
5348 ldrsh%?\\t%0, %1"
5349 [(set_attr "length" "8,4")
5350 (set_attr "type" "alu_shift_reg,load_byte")
5351 (set_attr "predicable" "yes")]
5352 )
5353
5354 ;; ??? Check Thumb-2 pool range
5355 (define_insn "*arm_extendhisi2_v6"
5356 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5357 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5358 "TARGET_32BIT && arm_arch6"
5359 "@
5360 sxth%?\\t%0, %1
5361 ldrsh%?\\t%0, %1"
5362 [(set_attr "type" "extend,load_byte")
5363 (set_attr "predicable" "yes")]
5364 )
5365
5366 (define_insn "*arm_extendhisi2addsi"
5367 [(set (match_operand:SI 0 "s_register_operand" "=r")
5368 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5369 (match_operand:SI 2 "s_register_operand" "r")))]
5370 "TARGET_INT_SIMD"
5371 "sxtah%?\\t%0, %2, %1"
5372 [(set_attr "type" "alu_shift_reg")]
5373 )
5374
5375 (define_expand "extendqihi2"
5376 [(set (match_dup 2)
5377 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5378 (const_int 24)))
5379 (set (match_operand:HI 0 "s_register_operand")
5380 (ashiftrt:SI (match_dup 2)
5381 (const_int 24)))]
5382 "TARGET_ARM"
5383 "
5384 {
5385 if (arm_arch4 && MEM_P (operands[1]))
5386 {
5387 emit_insn (gen_rtx_SET (operands[0],
5388 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5389 DONE;
5390 }
5391 if (!s_register_operand (operands[1], QImode))
5392 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5393 operands[0] = gen_lowpart (SImode, operands[0]);
5394 operands[1] = gen_lowpart (SImode, operands[1]);
5395 operands[2] = gen_reg_rtx (SImode);
5396 }"
5397 )
5398
5399 (define_insn "*arm_extendqihi_insn"
5400 [(set (match_operand:HI 0 "s_register_operand" "=r")
5401 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5402 "TARGET_ARM && arm_arch4"
5403 "ldrsb%?\\t%0, %1"
5404 [(set_attr "type" "load_byte")
5405 (set_attr "predicable" "yes")]
5406 )
5407
5408 (define_expand "extendqisi2"
5409 [(set (match_operand:SI 0 "s_register_operand")
5410 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5411 "TARGET_EITHER"
5412 {
5413 if (!arm_arch4 && MEM_P (operands[1]))
5414 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5415
5416 if (!arm_arch6 && !MEM_P (operands[1]))
5417 {
5418 rtx t = gen_lowpart (SImode, operands[1]);
5419 rtx tmp = gen_reg_rtx (SImode);
5420 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5421 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5422 DONE;
5423 }
5424 })
5425
5426 (define_split
5427 [(set (match_operand:SI 0 "register_operand" "")
5428 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5429 "!arm_arch6"
5430 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5431 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5432 {
5433 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5434 })
5435
5436 (define_insn "*arm_extendqisi"
5437 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5438 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5439 "TARGET_ARM && arm_arch4 && !arm_arch6"
5440 "@
5441 #
5442 ldrsb%?\\t%0, %1"
5443 [(set_attr "length" "8,4")
5444 (set_attr "type" "alu_shift_reg,load_byte")
5445 (set_attr "predicable" "yes")]
5446 )
5447
5448 (define_insn "*arm_extendqisi_v6"
5449 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5450 (sign_extend:SI
5451 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5452 "TARGET_ARM && arm_arch6"
5453 "@
5454 sxtb%?\\t%0, %1
5455 ldrsb%?\\t%0, %1"
5456 [(set_attr "type" "extend,load_byte")
5457 (set_attr "predicable" "yes")]
5458 )
5459
5460 (define_insn "*arm_extendqisi2addsi"
5461 [(set (match_operand:SI 0 "s_register_operand" "=r")
5462 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5463 (match_operand:SI 2 "s_register_operand" "r")))]
5464 "TARGET_INT_SIMD"
5465 "sxtab%?\\t%0, %2, %1"
5466 [(set_attr "type" "alu_shift_reg")
5467 (set_attr "predicable" "yes")]
5468 )
5469
5470 (define_insn "arm_<sup>xtb16"
5471 [(set (match_operand:SI 0 "s_register_operand" "=r")
5472 (unspec:SI
5473 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5474 "TARGET_INT_SIMD"
5475 "<sup>xtb16%?\\t%0, %1"
5476 [(set_attr "predicable" "yes")
5477 (set_attr "type" "alu_dsp_reg")])
5478
5479 (define_insn "arm_<simd32_op>"
5480 [(set (match_operand:SI 0 "s_register_operand" "=r")
5481 (unspec:SI
5482 [(match_operand:SI 1 "s_register_operand" "r")
5483 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5484 "TARGET_INT_SIMD"
5485 "<simd32_op>%?\\t%0, %1, %2"
5486 [(set_attr "predicable" "yes")
5487 (set_attr "type" "alu_dsp_reg")])
5488
5489 (define_insn "arm_usada8"
5490 [(set (match_operand:SI 0 "s_register_operand" "=r")
5491 (unspec:SI
5492 [(match_operand:SI 1 "s_register_operand" "r")
5493 (match_operand:SI 2 "s_register_operand" "r")
5494 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5495 "TARGET_INT_SIMD"
5496 "usada8%?\\t%0, %1, %2, %3"
5497 [(set_attr "predicable" "yes")
5498 (set_attr "type" "alu_dsp_reg")])
5499
5500 (define_insn "arm_<simd32_op>"
5501 [(set (match_operand:DI 0 "s_register_operand" "=r")
5502 (unspec:DI
5503 [(match_operand:SI 1 "s_register_operand" "r")
5504 (match_operand:SI 2 "s_register_operand" "r")
5505 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5506 "TARGET_INT_SIMD"
5507 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5508 [(set_attr "predicable" "yes")
5509 (set_attr "type" "smlald")])
5510
5511 (define_expand "extendsfdf2"
5512 [(set (match_operand:DF 0 "s_register_operand")
5513 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
5514 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5515 ""
5516 )
5517
5518 ;; HFmode -> DFmode conversions where we don't have an instruction for it
5519 ;; must go through SFmode.
5520 ;;
5521 ;; This is always safe for an extend.
5522
5523 (define_expand "extendhfdf2"
5524 [(set (match_operand:DF 0 "s_register_operand")
5525 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
5526 "TARGET_EITHER"
5527 {
5528 /* We don't have a direct instruction for this, so go via SFmode. */
5529 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5530 {
5531 rtx op1;
5532 op1 = convert_to_mode (SFmode, operands[1], 0);
5533 op1 = convert_to_mode (DFmode, op1, 0);
5534 emit_insn (gen_movdf (operands[0], op1));
5535 DONE;
5536 }
5537 /* Otherwise, we're done producing RTL and will pick up the correct
5538 pattern to do this with one rounding-step in a single instruction. */
5539 }
5540 )
5541 \f
5542 ;; Move insns (including loads and stores)
5543
5544 ;; XXX Just some ideas about movti.
5545 ;; I don't think these are a good idea on the arm, there just aren't enough
5546 ;; registers
5547 ;;(define_expand "loadti"
5548 ;; [(set (match_operand:TI 0 "s_register_operand")
5549 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
5550 ;; "" "")
5551
5552 ;;(define_expand "storeti"
5553 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
5554 ;; (match_operand:TI 1 "s_register_operand"))]
5555 ;; "" "")
5556
5557 ;;(define_expand "movti"
5558 ;; [(set (match_operand:TI 0 "general_operand")
5559 ;; (match_operand:TI 1 "general_operand"))]
5560 ;; ""
5561 ;; "
5562 ;;{
5563 ;; rtx insn;
5564 ;;
5565 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5566 ;; operands[1] = copy_to_reg (operands[1]);
5567 ;; if (MEM_P (operands[0]))
5568 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5569 ;; else if (MEM_P (operands[1]))
5570 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5571 ;; else
5572 ;; FAIL;
5573 ;;
5574 ;; emit_insn (insn);
5575 ;; DONE;
5576 ;;}")
5577
5578 ;; Recognize garbage generated above.
5579
5580 ;;(define_insn ""
5581 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5582 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5583 ;; ""
5584 ;; "*
5585 ;; {
5586 ;; register mem = (which_alternative < 3);
5587 ;; register const char *template;
5588 ;;
5589 ;; operands[mem] = XEXP (operands[mem], 0);
5590 ;; switch (which_alternative)
5591 ;; {
5592 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5593 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5594 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5595 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5596 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5597 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5598 ;; }
5599 ;; output_asm_insn (template, operands);
5600 ;; return \"\";
5601 ;; }")
5602
5603 (define_expand "movdi"
5604 [(set (match_operand:DI 0 "general_operand")
5605 (match_operand:DI 1 "general_operand"))]
5606 "TARGET_EITHER"
5607 "
5608 gcc_checking_assert (aligned_operand (operands[0], DImode));
5609 gcc_checking_assert (aligned_operand (operands[1], DImode));
5610 if (can_create_pseudo_p ())
5611 {
5612 if (!REG_P (operands[0]))
5613 operands[1] = force_reg (DImode, operands[1]);
5614 }
5615 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
5616 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
5617 {
5618 /* Avoid LDRD's into an odd-numbered register pair in ARM state
5619 when expanding function calls. */
5620 gcc_assert (can_create_pseudo_p ());
5621 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
5622 {
5623 /* Perform load into legal reg pair first, then move. */
5624 rtx reg = gen_reg_rtx (DImode);
5625 emit_insn (gen_movdi (reg, operands[1]));
5626 operands[1] = reg;
5627 }
5628 emit_move_insn (gen_lowpart (SImode, operands[0]),
5629 gen_lowpart (SImode, operands[1]));
5630 emit_move_insn (gen_highpart (SImode, operands[0]),
5631 gen_highpart (SImode, operands[1]));
5632 DONE;
5633 }
5634 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
5635 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
5636 {
5637 /* Avoid STRD's from an odd-numbered register pair in ARM state
5638 when expanding function prologue. */
5639 gcc_assert (can_create_pseudo_p ());
5640 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
5641 ? gen_reg_rtx (DImode)
5642 : operands[0];
5643 emit_move_insn (gen_lowpart (SImode, split_dest),
5644 gen_lowpart (SImode, operands[1]));
5645 emit_move_insn (gen_highpart (SImode, split_dest),
5646 gen_highpart (SImode, operands[1]));
5647 if (split_dest != operands[0])
5648 emit_insn (gen_movdi (operands[0], split_dest));
5649 DONE;
5650 }
5651 "
5652 )
5653
5654 (define_insn "*arm_movdi"
5655 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5656 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5657 "TARGET_32BIT
5658 && !(TARGET_HARD_FLOAT)
5659 && !TARGET_IWMMXT
5660 && ( register_operand (operands[0], DImode)
5661 || register_operand (operands[1], DImode))"
5662 "*
5663 switch (which_alternative)
5664 {
5665 case 0:
5666 case 1:
5667 case 2:
5668 return \"#\";
5669 case 3:
5670 /* Cannot load it directly, split to load it via MOV / MOVT. */
5671 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
5672 return \"#\";
5673 /* Fall through. */
5674 default:
5675 return output_move_double (operands, true, NULL);
5676 }
5677 "
5678 [(set_attr "length" "8,12,16,8,8")
5679 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
5680 (set_attr "arm_pool_range" "*,*,*,1020,*")
5681 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5682 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5683 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5684 )
5685
5686 (define_split
5687 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5688 (match_operand:ANY64 1 "immediate_operand" ""))]
5689 "TARGET_32BIT
5690 && reload_completed
5691 && (arm_disable_literal_pool
5692 || (arm_const_double_inline_cost (operands[1])
5693 <= arm_max_const_double_inline_cost ()))"
5694 [(const_int 0)]
5695 "
5696 arm_split_constant (SET, SImode, curr_insn,
5697 INTVAL (gen_lowpart (SImode, operands[1])),
5698 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5699 arm_split_constant (SET, SImode, curr_insn,
5700 INTVAL (gen_highpart_mode (SImode,
5701 GET_MODE (operands[0]),
5702 operands[1])),
5703 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5704 DONE;
5705 "
5706 )
5707
5708 ; If optimizing for size, or if we have load delay slots, then
5709 ; we want to split the constant into two separate operations.
5710 ; In both cases this may split a trivial part into a single data op
5711 ; leaving a single complex constant to load. We can also get longer
5712 ; offsets in a LDR which means we get better chances of sharing the pool
5713 ; entries. Finally, we can normally do a better job of scheduling
5714 ; LDR instructions than we can with LDM.
5715 ; This pattern will only match if the one above did not.
5716 (define_split
5717 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5718 (match_operand:ANY64 1 "const_double_operand" ""))]
5719 "TARGET_ARM && reload_completed
5720 && arm_const_double_by_parts (operands[1])"
5721 [(set (match_dup 0) (match_dup 1))
5722 (set (match_dup 2) (match_dup 3))]
5723 "
5724 operands[2] = gen_highpart (SImode, operands[0]);
5725 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5726 operands[1]);
5727 operands[0] = gen_lowpart (SImode, operands[0]);
5728 operands[1] = gen_lowpart (SImode, operands[1]);
5729 "
5730 )
5731
5732 (define_split
5733 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5734 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5735 "TARGET_EITHER && reload_completed"
5736 [(set (match_dup 0) (match_dup 1))
5737 (set (match_dup 2) (match_dup 3))]
5738 "
5739 operands[2] = gen_highpart (SImode, operands[0]);
5740 operands[3] = gen_highpart (SImode, operands[1]);
5741 operands[0] = gen_lowpart (SImode, operands[0]);
5742 operands[1] = gen_lowpart (SImode, operands[1]);
5743
5744 /* Handle a partial overlap. */
5745 if (rtx_equal_p (operands[0], operands[3]))
5746 {
5747 rtx tmp0 = operands[0];
5748 rtx tmp1 = operands[1];
5749
5750 operands[0] = operands[2];
5751 operands[1] = operands[3];
5752 operands[2] = tmp0;
5753 operands[3] = tmp1;
5754 }
5755 "
5756 )
5757
5758 ;; We can't actually do base+index doubleword loads if the index and
5759 ;; destination overlap. Split here so that we at least have chance to
5760 ;; schedule.
5761 (define_split
5762 [(set (match_operand:DI 0 "s_register_operand" "")
5763 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5764 (match_operand:SI 2 "s_register_operand" ""))))]
5765 "TARGET_LDRD
5766 && reg_overlap_mentioned_p (operands[0], operands[1])
5767 && reg_overlap_mentioned_p (operands[0], operands[2])"
5768 [(set (match_dup 4)
5769 (plus:SI (match_dup 1)
5770 (match_dup 2)))
5771 (set (match_dup 0)
5772 (mem:DI (match_dup 4)))]
5773 "
5774 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5775 "
5776 )
5777
5778 (define_expand "movsi"
5779 [(set (match_operand:SI 0 "general_operand")
5780 (match_operand:SI 1 "general_operand"))]
5781 "TARGET_EITHER"
5782 "
5783 {
5784 rtx base, offset, tmp;
5785
5786 gcc_checking_assert (aligned_operand (operands[0], SImode));
5787 gcc_checking_assert (aligned_operand (operands[1], SImode));
5788 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5789 {
5790 /* Everything except mem = const or mem = mem can be done easily. */
5791 if (MEM_P (operands[0]))
5792 operands[1] = force_reg (SImode, operands[1]);
5793 if (arm_general_register_operand (operands[0], SImode)
5794 && CONST_INT_P (operands[1])
5795 && !(const_ok_for_arm (INTVAL (operands[1]))
5796 || const_ok_for_arm (~INTVAL (operands[1]))))
5797 {
5798 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5799 {
5800 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5801 DONE;
5802 }
5803 else
5804 {
5805 arm_split_constant (SET, SImode, NULL_RTX,
5806 INTVAL (operands[1]), operands[0], NULL_RTX,
5807 optimize && can_create_pseudo_p ());
5808 DONE;
5809 }
5810 }
5811 }
5812 else /* Target doesn't have MOVT... */
5813 {
5814 if (can_create_pseudo_p ())
5815 {
5816 if (!REG_P (operands[0]))
5817 operands[1] = force_reg (SImode, operands[1]);
5818 }
5819 }
5820
5821 split_const (operands[1], &base, &offset);
5822 if (INTVAL (offset) != 0
5823 && targetm.cannot_force_const_mem (SImode, operands[1]))
5824 {
5825 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5826 emit_move_insn (tmp, base);
5827 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5828 DONE;
5829 }
5830
5831 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5832
5833 /* Recognize the case where operand[1] is a reference to thread-local
5834 data and load its address to a register. Offsets have been split off
5835 already. */
5836 if (arm_tls_referenced_p (operands[1]))
5837 operands[1] = legitimize_tls_address (operands[1], tmp);
5838 else if (flag_pic
5839 && (CONSTANT_P (operands[1])
5840 || symbol_mentioned_p (operands[1])
5841 || label_mentioned_p (operands[1])))
5842 operands[1] =
5843 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5844 }
5845 "
5846 )
5847
5848 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5849 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5850 ;; so this does not matter.
5851 (define_insn "*arm_movt"
5852 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5853 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5854 (match_operand:SI 2 "general_operand" "i,i")))]
5855 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5856 "@
5857 movt%?\t%0, #:upper16:%c2
5858 movt\t%0, #:upper16:%c2"
5859 [(set_attr "arch" "32,v8mb")
5860 (set_attr "predicable" "yes")
5861 (set_attr "length" "4")
5862 (set_attr "type" "alu_sreg")]
5863 )
5864
5865 (define_insn "*arm_movsi_insn"
5866 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5867 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5868 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5869 && ( register_operand (operands[0], SImode)
5870 || register_operand (operands[1], SImode))"
5871 "@
5872 mov%?\\t%0, %1
5873 mov%?\\t%0, %1
5874 mvn%?\\t%0, #%B1
5875 movw%?\\t%0, %1
5876 ldr%?\\t%0, %1
5877 str%?\\t%1, %0"
5878 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5879 (set_attr "predicable" "yes")
5880 (set_attr "arch" "*,*,*,v6t2,*,*")
5881 (set_attr "pool_range" "*,*,*,*,4096,*")
5882 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5883 )
5884
5885 (define_split
5886 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5887 (match_operand:SI 1 "const_int_operand" ""))]
5888 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5889 && (!(const_ok_for_arm (INTVAL (operands[1]))
5890 || const_ok_for_arm (~INTVAL (operands[1]))))"
5891 [(clobber (const_int 0))]
5892 "
5893 arm_split_constant (SET, SImode, NULL_RTX,
5894 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5895 DONE;
5896 "
5897 )
5898
5899 ;; A normal way to do (symbol + offset) requires three instructions at least
5900 ;; (depends on how big the offset is) as below:
5901 ;; movw r0, #:lower16:g
5902 ;; movw r0, #:upper16:g
5903 ;; adds r0, #4
5904 ;;
5905 ;; A better way would be:
5906 ;; movw r0, #:lower16:g+4
5907 ;; movw r0, #:upper16:g+4
5908 ;;
5909 ;; The limitation of this way is that the length of offset should be a 16-bit
5910 ;; signed value, because current assembler only supports REL type relocation for
5911 ;; such case. If the more powerful RELA type is supported in future, we should
5912 ;; update this pattern to go with better way.
5913 (define_split
5914 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5915 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5916 (match_operand:SI 2 "const_int_operand" ""))))]
5917 "TARGET_THUMB
5918 && TARGET_HAVE_MOVT
5919 && arm_disable_literal_pool
5920 && reload_completed
5921 && GET_CODE (operands[1]) == SYMBOL_REF"
5922 [(clobber (const_int 0))]
5923 "
5924 int offset = INTVAL (operands[2]);
5925
5926 if (offset < -0x8000 || offset > 0x7fff)
5927 {
5928 arm_emit_movpair (operands[0], operands[1]);
5929 emit_insn (gen_rtx_SET (operands[0],
5930 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5931 }
5932 else
5933 {
5934 rtx op = gen_rtx_CONST (SImode,
5935 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5936 arm_emit_movpair (operands[0], op);
5937 }
5938 "
5939 )
5940
5941 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5942 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5943 ;; and lo_sum would be merged back into memory load at cprop. However,
5944 ;; if the default is to prefer movt/movw rather than a load from the constant
5945 ;; pool, the performance is better.
5946 (define_split
5947 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5948 (match_operand:SI 1 "general_operand" ""))]
5949 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5950 && !target_word_relocations
5951 && !arm_tls_referenced_p (operands[1])"
5952 [(clobber (const_int 0))]
5953 {
5954 arm_emit_movpair (operands[0], operands[1]);
5955 DONE;
5956 })
5957
5958 ;; When generating pic, we need to load the symbol offset into a register.
5959 ;; So that the optimizer does not confuse this with a normal symbol load
5960 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5961 ;; since that is the only type of relocation we can use.
5962
5963 ;; Wrap calculation of the whole PIC address in a single pattern for the
5964 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5965 ;; a PIC address involves two loads from memory, so we want to CSE it
5966 ;; as often as possible.
5967 ;; This pattern will be split into one of the pic_load_addr_* patterns
5968 ;; and a move after GCSE optimizations.
5969 ;;
5970 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5971 (define_expand "calculate_pic_address"
5972 [(set (match_operand:SI 0 "register_operand")
5973 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5974 (unspec:SI [(match_operand:SI 2 "" "")]
5975 UNSPEC_PIC_SYM))))]
5976 "flag_pic"
5977 )
5978
5979 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5980 (define_split
5981 [(set (match_operand:SI 0 "register_operand" "")
5982 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5983 (unspec:SI [(match_operand:SI 2 "" "")]
5984 UNSPEC_PIC_SYM))))]
5985 "flag_pic"
5986 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5987 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5988 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5989 )
5990
5991 ;; operand1 is the memory address to go into
5992 ;; pic_load_addr_32bit.
5993 ;; operand2 is the PIC label to be emitted
5994 ;; from pic_add_dot_plus_eight.
5995 ;; We do this to allow hoisting of the entire insn.
5996 (define_insn_and_split "pic_load_addr_unified"
5997 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5998 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5999 (match_operand:SI 2 "" "")]
6000 UNSPEC_PIC_UNIFIED))]
6001 "flag_pic"
6002 "#"
6003 "&& reload_completed"
6004 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6005 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6006 (match_dup 2)] UNSPEC_PIC_BASE))]
6007 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6008 [(set_attr "type" "load_4,load_4,load_4")
6009 (set_attr "pool_range" "4096,4094,1022")
6010 (set_attr "neg_pool_range" "4084,0,0")
6011 (set_attr "arch" "a,t2,t1")
6012 (set_attr "length" "8,6,4")]
6013 )
6014
6015 ;; The rather odd constraints on the following are to force reload to leave
6016 ;; the insn alone, and to force the minipool generation pass to then move
6017 ;; the GOT symbol to memory.
6018
6019 (define_insn "pic_load_addr_32bit"
6020 [(set (match_operand:SI 0 "s_register_operand" "=r")
6021 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6022 "TARGET_32BIT && flag_pic"
6023 "ldr%?\\t%0, %1"
6024 [(set_attr "type" "load_4")
6025 (set (attr "pool_range")
6026 (if_then_else (eq_attr "is_thumb" "no")
6027 (const_int 4096)
6028 (const_int 4094)))
6029 (set (attr "neg_pool_range")
6030 (if_then_else (eq_attr "is_thumb" "no")
6031 (const_int 4084)
6032 (const_int 0)))]
6033 )
6034
6035 (define_insn "pic_load_addr_thumb1"
6036 [(set (match_operand:SI 0 "s_register_operand" "=l")
6037 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6038 "TARGET_THUMB1 && flag_pic"
6039 "ldr\\t%0, %1"
6040 [(set_attr "type" "load_4")
6041 (set (attr "pool_range") (const_int 1018))]
6042 )
6043
6044 (define_insn "pic_add_dot_plus_four"
6045 [(set (match_operand:SI 0 "register_operand" "=r")
6046 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6047 (const_int 4)
6048 (match_operand 2 "" "")]
6049 UNSPEC_PIC_BASE))]
6050 "TARGET_THUMB"
6051 "*
6052 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6053 INTVAL (operands[2]));
6054 return \"add\\t%0, %|pc\";
6055 "
6056 [(set_attr "length" "2")
6057 (set_attr "type" "alu_sreg")]
6058 )
6059
6060 (define_insn "pic_add_dot_plus_eight"
6061 [(set (match_operand:SI 0 "register_operand" "=r")
6062 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6063 (const_int 8)
6064 (match_operand 2 "" "")]
6065 UNSPEC_PIC_BASE))]
6066 "TARGET_ARM"
6067 "*
6068 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6069 INTVAL (operands[2]));
6070 return \"add%?\\t%0, %|pc, %1\";
6071 "
6072 [(set_attr "predicable" "yes")
6073 (set_attr "type" "alu_sreg")]
6074 )
6075
6076 (define_insn "tls_load_dot_plus_eight"
6077 [(set (match_operand:SI 0 "register_operand" "=r")
6078 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6079 (const_int 8)
6080 (match_operand 2 "" "")]
6081 UNSPEC_PIC_BASE)))]
6082 "TARGET_ARM"
6083 "*
6084 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6085 INTVAL (operands[2]));
6086 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6087 "
6088 [(set_attr "predicable" "yes")
6089 (set_attr "type" "load_4")]
6090 )
6091
6092 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6093 ;; followed by a load. These sequences can be crunched down to
6094 ;; tls_load_dot_plus_eight by a peephole.
6095
6096 (define_peephole2
6097 [(set (match_operand:SI 0 "register_operand" "")
6098 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6099 (const_int 8)
6100 (match_operand 1 "" "")]
6101 UNSPEC_PIC_BASE))
6102 (set (match_operand:SI 2 "arm_general_register_operand" "")
6103 (mem:SI (match_dup 0)))]
6104 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6105 [(set (match_dup 2)
6106 (mem:SI (unspec:SI [(match_dup 3)
6107 (const_int 8)
6108 (match_dup 1)]
6109 UNSPEC_PIC_BASE)))]
6110 ""
6111 )
6112
6113 (define_insn "pic_offset_arm"
6114 [(set (match_operand:SI 0 "register_operand" "=r")
6115 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6116 (unspec:SI [(match_operand:SI 2 "" "X")]
6117 UNSPEC_PIC_OFFSET))))]
6118 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6119 "ldr%?\\t%0, [%1,%2]"
6120 [(set_attr "type" "load_4")]
6121 )
6122
6123 (define_expand "builtin_setjmp_receiver"
6124 [(label_ref (match_operand 0 "" ""))]
6125 "flag_pic"
6126 "
6127 {
6128 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6129 register. */
6130 if (arm_pic_register != INVALID_REGNUM)
6131 arm_load_pic_register (1UL << 3, NULL_RTX);
6132 DONE;
6133 }")
6134
6135 ;; If copying one reg to another we can set the condition codes according to
6136 ;; its value. Such a move is common after a return from subroutine and the
6137 ;; result is being tested against zero.
6138
6139 (define_insn "*movsi_compare0"
6140 [(set (reg:CC CC_REGNUM)
6141 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6142 (const_int 0)))
6143 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6144 (match_dup 1))]
6145 "TARGET_32BIT"
6146 "@
6147 cmp%?\\t%0, #0
6148 subs%?\\t%0, %1, #0"
6149 [(set_attr "conds" "set")
6150 (set_attr "type" "alus_imm,alus_imm")]
6151 )
6152
6153 ;; Subroutine to store a half word from a register into memory.
6154 ;; Operand 0 is the source register (HImode)
6155 ;; Operand 1 is the destination address in a register (SImode)
6156
6157 ;; In both this routine and the next, we must be careful not to spill
6158 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6159 ;; can generate unrecognizable rtl.
6160
6161 (define_expand "storehi"
6162 [;; store the low byte
6163 (set (match_operand 1 "" "") (match_dup 3))
6164 ;; extract the high byte
6165 (set (match_dup 2)
6166 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6167 ;; store the high byte
6168 (set (match_dup 4) (match_dup 5))]
6169 "TARGET_ARM"
6170 "
6171 {
6172 rtx op1 = operands[1];
6173 rtx addr = XEXP (op1, 0);
6174 enum rtx_code code = GET_CODE (addr);
6175
6176 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6177 || code == MINUS)
6178 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6179
6180 operands[4] = adjust_address (op1, QImode, 1);
6181 operands[1] = adjust_address (operands[1], QImode, 0);
6182 operands[3] = gen_lowpart (QImode, operands[0]);
6183 operands[0] = gen_lowpart (SImode, operands[0]);
6184 operands[2] = gen_reg_rtx (SImode);
6185 operands[5] = gen_lowpart (QImode, operands[2]);
6186 }"
6187 )
6188
6189 (define_expand "storehi_bigend"
6190 [(set (match_dup 4) (match_dup 3))
6191 (set (match_dup 2)
6192 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6193 (set (match_operand 1 "" "") (match_dup 5))]
6194 "TARGET_ARM"
6195 "
6196 {
6197 rtx op1 = operands[1];
6198 rtx addr = XEXP (op1, 0);
6199 enum rtx_code code = GET_CODE (addr);
6200
6201 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6202 || code == MINUS)
6203 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6204
6205 operands[4] = adjust_address (op1, QImode, 1);
6206 operands[1] = adjust_address (operands[1], QImode, 0);
6207 operands[3] = gen_lowpart (QImode, operands[0]);
6208 operands[0] = gen_lowpart (SImode, operands[0]);
6209 operands[2] = gen_reg_rtx (SImode);
6210 operands[5] = gen_lowpart (QImode, operands[2]);
6211 }"
6212 )
6213
6214 ;; Subroutine to store a half word integer constant into memory.
6215 (define_expand "storeinthi"
6216 [(set (match_operand 0 "" "")
6217 (match_operand 1 "" ""))
6218 (set (match_dup 3) (match_dup 2))]
6219 "TARGET_ARM"
6220 "
6221 {
6222 HOST_WIDE_INT value = INTVAL (operands[1]);
6223 rtx addr = XEXP (operands[0], 0);
6224 rtx op0 = operands[0];
6225 enum rtx_code code = GET_CODE (addr);
6226
6227 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6228 || code == MINUS)
6229 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6230
6231 operands[1] = gen_reg_rtx (SImode);
6232 if (BYTES_BIG_ENDIAN)
6233 {
6234 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6235 if ((value & 255) == ((value >> 8) & 255))
6236 operands[2] = operands[1];
6237 else
6238 {
6239 operands[2] = gen_reg_rtx (SImode);
6240 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6241 }
6242 }
6243 else
6244 {
6245 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6246 if ((value & 255) == ((value >> 8) & 255))
6247 operands[2] = operands[1];
6248 else
6249 {
6250 operands[2] = gen_reg_rtx (SImode);
6251 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6252 }
6253 }
6254
6255 operands[3] = adjust_address (op0, QImode, 1);
6256 operands[0] = adjust_address (operands[0], QImode, 0);
6257 operands[2] = gen_lowpart (QImode, operands[2]);
6258 operands[1] = gen_lowpart (QImode, operands[1]);
6259 }"
6260 )
6261
6262 (define_expand "storehi_single_op"
6263 [(set (match_operand:HI 0 "memory_operand")
6264 (match_operand:HI 1 "general_operand"))]
6265 "TARGET_32BIT && arm_arch4"
6266 "
6267 if (!s_register_operand (operands[1], HImode))
6268 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6269 "
6270 )
6271
6272 (define_expand "movhi"
6273 [(set (match_operand:HI 0 "general_operand")
6274 (match_operand:HI 1 "general_operand"))]
6275 "TARGET_EITHER"
6276 "
6277 gcc_checking_assert (aligned_operand (operands[0], HImode));
6278 gcc_checking_assert (aligned_operand (operands[1], HImode));
6279 if (TARGET_ARM)
6280 {
6281 if (can_create_pseudo_p ())
6282 {
6283 if (MEM_P (operands[0]))
6284 {
6285 if (arm_arch4)
6286 {
6287 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6288 DONE;
6289 }
6290 if (CONST_INT_P (operands[1]))
6291 emit_insn (gen_storeinthi (operands[0], operands[1]));
6292 else
6293 {
6294 if (MEM_P (operands[1]))
6295 operands[1] = force_reg (HImode, operands[1]);
6296 if (BYTES_BIG_ENDIAN)
6297 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6298 else
6299 emit_insn (gen_storehi (operands[1], operands[0]));
6300 }
6301 DONE;
6302 }
6303 /* Sign extend a constant, and keep it in an SImode reg. */
6304 else if (CONST_INT_P (operands[1]))
6305 {
6306 rtx reg = gen_reg_rtx (SImode);
6307 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6308
6309 /* If the constant is already valid, leave it alone. */
6310 if (!const_ok_for_arm (val))
6311 {
6312 /* If setting all the top bits will make the constant
6313 loadable in a single instruction, then set them.
6314 Otherwise, sign extend the number. */
6315
6316 if (const_ok_for_arm (~(val | ~0xffff)))
6317 val |= ~0xffff;
6318 else if (val & 0x8000)
6319 val |= ~0xffff;
6320 }
6321
6322 emit_insn (gen_movsi (reg, GEN_INT (val)));
6323 operands[1] = gen_lowpart (HImode, reg);
6324 }
6325 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6326 && MEM_P (operands[1]))
6327 {
6328 rtx reg = gen_reg_rtx (SImode);
6329
6330 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6331 operands[1] = gen_lowpart (HImode, reg);
6332 }
6333 else if (!arm_arch4)
6334 {
6335 if (MEM_P (operands[1]))
6336 {
6337 rtx base;
6338 rtx offset = const0_rtx;
6339 rtx reg = gen_reg_rtx (SImode);
6340
6341 if ((REG_P (base = XEXP (operands[1], 0))
6342 || (GET_CODE (base) == PLUS
6343 && (CONST_INT_P (offset = XEXP (base, 1)))
6344 && ((INTVAL(offset) & 1) != 1)
6345 && REG_P (base = XEXP (base, 0))))
6346 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6347 {
6348 rtx new_rtx;
6349
6350 new_rtx = widen_memory_access (operands[1], SImode,
6351 ((INTVAL (offset) & ~3)
6352 - INTVAL (offset)));
6353 emit_insn (gen_movsi (reg, new_rtx));
6354 if (((INTVAL (offset) & 2) != 0)
6355 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6356 {
6357 rtx reg2 = gen_reg_rtx (SImode);
6358
6359 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6360 reg = reg2;
6361 }
6362 }
6363 else
6364 emit_insn (gen_movhi_bytes (reg, operands[1]));
6365
6366 operands[1] = gen_lowpart (HImode, reg);
6367 }
6368 }
6369 }
6370 /* Handle loading a large integer during reload. */
6371 else if (CONST_INT_P (operands[1])
6372 && !const_ok_for_arm (INTVAL (operands[1]))
6373 && !const_ok_for_arm (~INTVAL (operands[1])))
6374 {
6375 /* Writing a constant to memory needs a scratch, which should
6376 be handled with SECONDARY_RELOADs. */
6377 gcc_assert (REG_P (operands[0]));
6378
6379 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6380 emit_insn (gen_movsi (operands[0], operands[1]));
6381 DONE;
6382 }
6383 }
6384 else if (TARGET_THUMB2)
6385 {
6386 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6387 if (can_create_pseudo_p ())
6388 {
6389 if (!REG_P (operands[0]))
6390 operands[1] = force_reg (HImode, operands[1]);
6391 /* Zero extend a constant, and keep it in an SImode reg. */
6392 else if (CONST_INT_P (operands[1]))
6393 {
6394 rtx reg = gen_reg_rtx (SImode);
6395 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6396
6397 emit_insn (gen_movsi (reg, GEN_INT (val)));
6398 operands[1] = gen_lowpart (HImode, reg);
6399 }
6400 }
6401 }
6402 else /* TARGET_THUMB1 */
6403 {
6404 if (can_create_pseudo_p ())
6405 {
6406 if (CONST_INT_P (operands[1]))
6407 {
6408 rtx reg = gen_reg_rtx (SImode);
6409
6410 emit_insn (gen_movsi (reg, operands[1]));
6411 operands[1] = gen_lowpart (HImode, reg);
6412 }
6413
6414 /* ??? We shouldn't really get invalid addresses here, but this can
6415 happen if we are passed a SP (never OK for HImode/QImode) or
6416 virtual register (also rejected as illegitimate for HImode/QImode)
6417 relative address. */
6418 /* ??? This should perhaps be fixed elsewhere, for instance, in
6419 fixup_stack_1, by checking for other kinds of invalid addresses,
6420 e.g. a bare reference to a virtual register. This may confuse the
6421 alpha though, which must handle this case differently. */
6422 if (MEM_P (operands[0])
6423 && !memory_address_p (GET_MODE (operands[0]),
6424 XEXP (operands[0], 0)))
6425 operands[0]
6426 = replace_equiv_address (operands[0],
6427 copy_to_reg (XEXP (operands[0], 0)));
6428
6429 if (MEM_P (operands[1])
6430 && !memory_address_p (GET_MODE (operands[1]),
6431 XEXP (operands[1], 0)))
6432 operands[1]
6433 = replace_equiv_address (operands[1],
6434 copy_to_reg (XEXP (operands[1], 0)));
6435
6436 if (MEM_P (operands[1]) && optimize > 0)
6437 {
6438 rtx reg = gen_reg_rtx (SImode);
6439
6440 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6441 operands[1] = gen_lowpart (HImode, reg);
6442 }
6443
6444 if (MEM_P (operands[0]))
6445 operands[1] = force_reg (HImode, operands[1]);
6446 }
6447 else if (CONST_INT_P (operands[1])
6448 && !satisfies_constraint_I (operands[1]))
6449 {
6450 /* Handle loading a large integer during reload. */
6451
6452 /* Writing a constant to memory needs a scratch, which should
6453 be handled with SECONDARY_RELOADs. */
6454 gcc_assert (REG_P (operands[0]));
6455
6456 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6457 emit_insn (gen_movsi (operands[0], operands[1]));
6458 DONE;
6459 }
6460 }
6461 "
6462 )
6463
6464 (define_expand "movhi_bytes"
6465 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6466 (set (match_dup 3)
6467 (zero_extend:SI (match_dup 6)))
6468 (set (match_operand:SI 0 "" "")
6469 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6470 "TARGET_ARM"
6471 "
6472 {
6473 rtx mem1, mem2;
6474 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6475
6476 mem1 = change_address (operands[1], QImode, addr);
6477 mem2 = change_address (operands[1], QImode,
6478 plus_constant (Pmode, addr, 1));
6479 operands[0] = gen_lowpart (SImode, operands[0]);
6480 operands[1] = mem1;
6481 operands[2] = gen_reg_rtx (SImode);
6482 operands[3] = gen_reg_rtx (SImode);
6483 operands[6] = mem2;
6484
6485 if (BYTES_BIG_ENDIAN)
6486 {
6487 operands[4] = operands[2];
6488 operands[5] = operands[3];
6489 }
6490 else
6491 {
6492 operands[4] = operands[3];
6493 operands[5] = operands[2];
6494 }
6495 }"
6496 )
6497
6498 (define_expand "movhi_bigend"
6499 [(set (match_dup 2)
6500 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
6501 (const_int 16)))
6502 (set (match_dup 3)
6503 (ashiftrt:SI (match_dup 2) (const_int 16)))
6504 (set (match_operand:HI 0 "s_register_operand")
6505 (match_dup 4))]
6506 "TARGET_ARM"
6507 "
6508 operands[2] = gen_reg_rtx (SImode);
6509 operands[3] = gen_reg_rtx (SImode);
6510 operands[4] = gen_lowpart (HImode, operands[3]);
6511 "
6512 )
6513
6514 ;; Pattern to recognize insn generated default case above
6515 (define_insn "*movhi_insn_arch4"
6516 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6517 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
6518 "TARGET_ARM
6519 && arm_arch4 && !TARGET_HARD_FLOAT
6520 && (register_operand (operands[0], HImode)
6521 || register_operand (operands[1], HImode))"
6522 "@
6523 mov%?\\t%0, %1\\t%@ movhi
6524 mvn%?\\t%0, #%B1\\t%@ movhi
6525 movw%?\\t%0, %L1\\t%@ movhi
6526 strh%?\\t%1, %0\\t%@ movhi
6527 ldrh%?\\t%0, %1\\t%@ movhi"
6528 [(set_attr "predicable" "yes")
6529 (set_attr "pool_range" "*,*,*,*,256")
6530 (set_attr "neg_pool_range" "*,*,*,*,244")
6531 (set_attr "arch" "*,*,v6t2,*,*")
6532 (set_attr_alternative "type"
6533 [(if_then_else (match_operand 1 "const_int_operand" "")
6534 (const_string "mov_imm" )
6535 (const_string "mov_reg"))
6536 (const_string "mvn_imm")
6537 (const_string "mov_imm")
6538 (const_string "store_4")
6539 (const_string "load_4")])]
6540 )
6541
6542 (define_insn "*movhi_bytes"
6543 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6544 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
6545 "TARGET_ARM && !TARGET_HARD_FLOAT"
6546 "@
6547 mov%?\\t%0, %1\\t%@ movhi
6548 mov%?\\t%0, %1\\t%@ movhi
6549 mvn%?\\t%0, #%B1\\t%@ movhi"
6550 [(set_attr "predicable" "yes")
6551 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
6552 )
6553
6554 ;; We use a DImode scratch because we may occasionally need an additional
6555 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6556 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6557 ;; The reload_in<m> and reload_out<m> patterns require special constraints
6558 ;; to be correctly handled in default_secondary_reload function.
6559 (define_expand "reload_outhi"
6560 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6561 (match_operand:HI 1 "s_register_operand" "r")
6562 (match_operand:DI 2 "s_register_operand" "=&l")])]
6563 "TARGET_EITHER"
6564 "if (TARGET_ARM)
6565 arm_reload_out_hi (operands);
6566 else
6567 thumb_reload_out_hi (operands);
6568 DONE;
6569 "
6570 )
6571
6572 (define_expand "reload_inhi"
6573 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6574 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6575 (match_operand:DI 2 "s_register_operand" "=&r")])]
6576 "TARGET_EITHER"
6577 "
6578 if (TARGET_ARM)
6579 arm_reload_in_hi (operands);
6580 else
6581 thumb_reload_out_hi (operands);
6582 DONE;
6583 ")
6584
6585 (define_expand "movqi"
6586 [(set (match_operand:QI 0 "general_operand")
6587 (match_operand:QI 1 "general_operand"))]
6588 "TARGET_EITHER"
6589 "
6590 /* Everything except mem = const or mem = mem can be done easily */
6591
6592 if (can_create_pseudo_p ())
6593 {
6594 if (CONST_INT_P (operands[1]))
6595 {
6596 rtx reg = gen_reg_rtx (SImode);
6597
6598 /* For thumb we want an unsigned immediate, then we are more likely
6599 to be able to use a movs insn. */
6600 if (TARGET_THUMB)
6601 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6602
6603 emit_insn (gen_movsi (reg, operands[1]));
6604 operands[1] = gen_lowpart (QImode, reg);
6605 }
6606
6607 if (TARGET_THUMB)
6608 {
6609 /* ??? We shouldn't really get invalid addresses here, but this can
6610 happen if we are passed a SP (never OK for HImode/QImode) or
6611 virtual register (also rejected as illegitimate for HImode/QImode)
6612 relative address. */
6613 /* ??? This should perhaps be fixed elsewhere, for instance, in
6614 fixup_stack_1, by checking for other kinds of invalid addresses,
6615 e.g. a bare reference to a virtual register. This may confuse the
6616 alpha though, which must handle this case differently. */
6617 if (MEM_P (operands[0])
6618 && !memory_address_p (GET_MODE (operands[0]),
6619 XEXP (operands[0], 0)))
6620 operands[0]
6621 = replace_equiv_address (operands[0],
6622 copy_to_reg (XEXP (operands[0], 0)));
6623 if (MEM_P (operands[1])
6624 && !memory_address_p (GET_MODE (operands[1]),
6625 XEXP (operands[1], 0)))
6626 operands[1]
6627 = replace_equiv_address (operands[1],
6628 copy_to_reg (XEXP (operands[1], 0)));
6629 }
6630
6631 if (MEM_P (operands[1]) && optimize > 0)
6632 {
6633 rtx reg = gen_reg_rtx (SImode);
6634
6635 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6636 operands[1] = gen_lowpart (QImode, reg);
6637 }
6638
6639 if (MEM_P (operands[0]))
6640 operands[1] = force_reg (QImode, operands[1]);
6641 }
6642 else if (TARGET_THUMB
6643 && CONST_INT_P (operands[1])
6644 && !satisfies_constraint_I (operands[1]))
6645 {
6646 /* Handle loading a large integer during reload. */
6647
6648 /* Writing a constant to memory needs a scratch, which should
6649 be handled with SECONDARY_RELOADs. */
6650 gcc_assert (REG_P (operands[0]));
6651
6652 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6653 emit_insn (gen_movsi (operands[0], operands[1]));
6654 DONE;
6655 }
6656 "
6657 )
6658
6659 (define_insn "*arm_movqi_insn"
6660 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
6661 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
6662 "TARGET_32BIT
6663 && ( register_operand (operands[0], QImode)
6664 || register_operand (operands[1], QImode))"
6665 "@
6666 mov%?\\t%0, %1
6667 mov%?\\t%0, %1
6668 mov%?\\t%0, %1
6669 mov%?\\t%0, %1
6670 mvn%?\\t%0, #%B1
6671 ldrb%?\\t%0, %1
6672 strb%?\\t%1, %0
6673 ldrb%?\\t%0, %1
6674 strb%?\\t%1, %0"
6675 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
6676 (set_attr "predicable" "yes")
6677 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
6678 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
6679 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
6680 )
6681
6682 ;; HFmode moves
6683 (define_expand "movhf"
6684 [(set (match_operand:HF 0 "general_operand")
6685 (match_operand:HF 1 "general_operand"))]
6686 "TARGET_EITHER"
6687 "
6688 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6689 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6690 if (TARGET_32BIT)
6691 {
6692 if (MEM_P (operands[0]))
6693 operands[1] = force_reg (HFmode, operands[1]);
6694 }
6695 else /* TARGET_THUMB1 */
6696 {
6697 if (can_create_pseudo_p ())
6698 {
6699 if (!REG_P (operands[0]))
6700 operands[1] = force_reg (HFmode, operands[1]);
6701 }
6702 }
6703 "
6704 )
6705
6706 (define_insn "*arm32_movhf"
6707 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6708 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6709 "TARGET_32BIT && !TARGET_HARD_FLOAT
6710 && ( s_register_operand (operands[0], HFmode)
6711 || s_register_operand (operands[1], HFmode))"
6712 "*
6713 switch (which_alternative)
6714 {
6715 case 0: /* ARM register from memory */
6716 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6717 case 1: /* memory from ARM register */
6718 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6719 case 2: /* ARM register from ARM register */
6720 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6721 case 3: /* ARM register from constant */
6722 {
6723 long bits;
6724 rtx ops[4];
6725
6726 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6727 HFmode);
6728 ops[0] = operands[0];
6729 ops[1] = GEN_INT (bits);
6730 ops[2] = GEN_INT (bits & 0xff00);
6731 ops[3] = GEN_INT (bits & 0x00ff);
6732
6733 if (arm_arch_thumb2)
6734 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6735 else
6736 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6737 return \"\";
6738 }
6739 default:
6740 gcc_unreachable ();
6741 }
6742 "
6743 [(set_attr "conds" "unconditional")
6744 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6745 (set_attr "length" "4,4,4,8")
6746 (set_attr "predicable" "yes")]
6747 )
6748
6749 (define_expand "movsf"
6750 [(set (match_operand:SF 0 "general_operand")
6751 (match_operand:SF 1 "general_operand"))]
6752 "TARGET_EITHER"
6753 "
6754 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6755 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6756 if (TARGET_32BIT)
6757 {
6758 if (MEM_P (operands[0]))
6759 operands[1] = force_reg (SFmode, operands[1]);
6760 }
6761 else /* TARGET_THUMB1 */
6762 {
6763 if (can_create_pseudo_p ())
6764 {
6765 if (!REG_P (operands[0]))
6766 operands[1] = force_reg (SFmode, operands[1]);
6767 }
6768 }
6769
6770 /* Cannot load it directly, generate a load with clobber so that it can be
6771 loaded via GPR with MOV / MOVT. */
6772 if (arm_disable_literal_pool
6773 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6774 && CONST_DOUBLE_P (operands[1])
6775 && TARGET_HARD_FLOAT
6776 && !vfp3_const_double_rtx (operands[1]))
6777 {
6778 rtx clobreg = gen_reg_rtx (SFmode);
6779 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6780 clobreg));
6781 DONE;
6782 }
6783 "
6784 )
6785
6786 ;; Transform a floating-point move of a constant into a core register into
6787 ;; an SImode operation.
6788 (define_split
6789 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6790 (match_operand:SF 1 "immediate_operand" ""))]
6791 "TARGET_EITHER
6792 && reload_completed
6793 && CONST_DOUBLE_P (operands[1])"
6794 [(set (match_dup 2) (match_dup 3))]
6795 "
6796 operands[2] = gen_lowpart (SImode, operands[0]);
6797 operands[3] = gen_lowpart (SImode, operands[1]);
6798 if (operands[2] == 0 || operands[3] == 0)
6799 FAIL;
6800 "
6801 )
6802
6803 (define_insn "*arm_movsf_soft_insn"
6804 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6805 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6806 "TARGET_32BIT
6807 && TARGET_SOFT_FLOAT
6808 && (!MEM_P (operands[0])
6809 || register_operand (operands[1], SFmode))"
6810 {
6811 switch (which_alternative)
6812 {
6813 case 0: return \"mov%?\\t%0, %1\";
6814 case 1:
6815 /* Cannot load it directly, split to load it via MOV / MOVT. */
6816 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6817 return \"#\";
6818 return \"ldr%?\\t%0, %1\\t%@ float\";
6819 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6820 default: gcc_unreachable ();
6821 }
6822 }
6823 [(set_attr "predicable" "yes")
6824 (set_attr "type" "mov_reg,load_4,store_4")
6825 (set_attr "arm_pool_range" "*,4096,*")
6826 (set_attr "thumb2_pool_range" "*,4094,*")
6827 (set_attr "arm_neg_pool_range" "*,4084,*")
6828 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6829 )
6830
6831 ;; Splitter for the above.
6832 (define_split
6833 [(set (match_operand:SF 0 "s_register_operand")
6834 (match_operand:SF 1 "const_double_operand"))]
6835 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6836 [(const_int 0)]
6837 {
6838 long buf;
6839 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6840 rtx cst = gen_int_mode (buf, SImode);
6841 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6842 DONE;
6843 }
6844 )
6845
6846 (define_expand "movdf"
6847 [(set (match_operand:DF 0 "general_operand")
6848 (match_operand:DF 1 "general_operand"))]
6849 "TARGET_EITHER"
6850 "
6851 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6852 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6853 if (TARGET_32BIT)
6854 {
6855 if (MEM_P (operands[0]))
6856 operands[1] = force_reg (DFmode, operands[1]);
6857 }
6858 else /* TARGET_THUMB */
6859 {
6860 if (can_create_pseudo_p ())
6861 {
6862 if (!REG_P (operands[0]))
6863 operands[1] = force_reg (DFmode, operands[1]);
6864 }
6865 }
6866
6867 /* Cannot load it directly, generate a load with clobber so that it can be
6868 loaded via GPR with MOV / MOVT. */
6869 if (arm_disable_literal_pool
6870 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6871 && CONSTANT_P (operands[1])
6872 && TARGET_HARD_FLOAT
6873 && !arm_const_double_rtx (operands[1])
6874 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6875 {
6876 rtx clobreg = gen_reg_rtx (DFmode);
6877 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6878 clobreg));
6879 DONE;
6880 }
6881 "
6882 )
6883
6884 ;; Reloading a df mode value stored in integer regs to memory can require a
6885 ;; scratch reg.
6886 ;; Another reload_out<m> pattern that requires special constraints.
6887 (define_expand "reload_outdf"
6888 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6889 (match_operand:DF 1 "s_register_operand" "r")
6890 (match_operand:SI 2 "s_register_operand" "=&r")]
6891 "TARGET_THUMB2"
6892 "
6893 {
6894 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6895
6896 if (code == REG)
6897 operands[2] = XEXP (operands[0], 0);
6898 else if (code == POST_INC || code == PRE_DEC)
6899 {
6900 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6901 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6902 emit_insn (gen_movdi (operands[0], operands[1]));
6903 DONE;
6904 }
6905 else if (code == PRE_INC)
6906 {
6907 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6908
6909 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6910 operands[2] = reg;
6911 }
6912 else if (code == POST_DEC)
6913 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6914 else
6915 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6916 XEXP (XEXP (operands[0], 0), 1)));
6917
6918 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6919 operands[1]));
6920
6921 if (code == POST_DEC)
6922 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6923
6924 DONE;
6925 }"
6926 )
6927
6928 (define_insn "*movdf_soft_insn"
6929 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6930 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6931 "TARGET_32BIT && TARGET_SOFT_FLOAT
6932 && ( register_operand (operands[0], DFmode)
6933 || register_operand (operands[1], DFmode))"
6934 "*
6935 switch (which_alternative)
6936 {
6937 case 0:
6938 case 1:
6939 case 2:
6940 return \"#\";
6941 case 3:
6942 /* Cannot load it directly, split to load it via MOV / MOVT. */
6943 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6944 return \"#\";
6945 /* Fall through. */
6946 default:
6947 return output_move_double (operands, true, NULL);
6948 }
6949 "
6950 [(set_attr "length" "8,12,16,8,8")
6951 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6952 (set_attr "arm_pool_range" "*,*,*,1020,*")
6953 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6954 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6955 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6956 )
6957
6958 ;; Splitter for the above.
6959 (define_split
6960 [(set (match_operand:DF 0 "s_register_operand")
6961 (match_operand:DF 1 "const_double_operand"))]
6962 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6963 [(const_int 0)]
6964 {
6965 long buf[2];
6966 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6967 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6968 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6969 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6970 rtx cst = gen_int_mode (ival, DImode);
6971 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6972 DONE;
6973 }
6974 )
6975 \f
6976
6977 ;; load- and store-multiple insns
6978 ;; The arm can load/store any set of registers, provided that they are in
6979 ;; ascending order, but these expanders assume a contiguous set.
6980
6981 (define_expand "load_multiple"
6982 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6983 (match_operand:SI 1 "" ""))
6984 (use (match_operand:SI 2 "" ""))])]
6985 "TARGET_32BIT"
6986 {
6987 HOST_WIDE_INT offset = 0;
6988
6989 /* Support only fixed point registers. */
6990 if (!CONST_INT_P (operands[2])
6991 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6992 || INTVAL (operands[2]) < 2
6993 || !MEM_P (operands[1])
6994 || !REG_P (operands[0])
6995 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6996 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6997 FAIL;
6998
6999 operands[3]
7000 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7001 INTVAL (operands[2]),
7002 force_reg (SImode, XEXP (operands[1], 0)),
7003 FALSE, operands[1], &offset);
7004 })
7005
7006 (define_expand "store_multiple"
7007 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7008 (match_operand:SI 1 "" ""))
7009 (use (match_operand:SI 2 "" ""))])]
7010 "TARGET_32BIT"
7011 {
7012 HOST_WIDE_INT offset = 0;
7013
7014 /* Support only fixed point registers. */
7015 if (!CONST_INT_P (operands[2])
7016 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7017 || INTVAL (operands[2]) < 2
7018 || !REG_P (operands[1])
7019 || !MEM_P (operands[0])
7020 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7021 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7022 FAIL;
7023
7024 operands[3]
7025 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7026 INTVAL (operands[2]),
7027 force_reg (SImode, XEXP (operands[0], 0)),
7028 FALSE, operands[0], &offset);
7029 })
7030
7031
7032 (define_expand "setmemsi"
7033 [(match_operand:BLK 0 "general_operand")
7034 (match_operand:SI 1 "const_int_operand")
7035 (match_operand:SI 2 "const_int_operand")
7036 (match_operand:SI 3 "const_int_operand")]
7037 "TARGET_32BIT"
7038 {
7039 if (arm_gen_setmem (operands))
7040 DONE;
7041
7042 FAIL;
7043 })
7044
7045
7046 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7047 ;; We could let this apply for blocks of less than this, but it clobbers so
7048 ;; many registers that there is then probably a better way.
7049
7050 (define_expand "cpymemqi"
7051 [(match_operand:BLK 0 "general_operand")
7052 (match_operand:BLK 1 "general_operand")
7053 (match_operand:SI 2 "const_int_operand")
7054 (match_operand:SI 3 "const_int_operand")]
7055 ""
7056 "
7057 if (TARGET_32BIT)
7058 {
7059 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7060 && !optimize_function_for_size_p (cfun))
7061 {
7062 if (gen_cpymem_ldrd_strd (operands))
7063 DONE;
7064 FAIL;
7065 }
7066
7067 if (arm_gen_cpymemqi (operands))
7068 DONE;
7069 FAIL;
7070 }
7071 else /* TARGET_THUMB1 */
7072 {
7073 if ( INTVAL (operands[3]) != 4
7074 || INTVAL (operands[2]) > 48)
7075 FAIL;
7076
7077 thumb_expand_cpymemqi (operands);
7078 DONE;
7079 }
7080 "
7081 )
7082 \f
7083
7084 ;; Compare & branch insns
7085 ;; The range calculations are based as follows:
7086 ;; For forward branches, the address calculation returns the address of
7087 ;; the next instruction. This is 2 beyond the branch instruction.
7088 ;; For backward branches, the address calculation returns the address of
7089 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7090 ;; instruction for the shortest sequence, and 4 before the branch instruction
7091 ;; if we have to jump around an unconditional branch.
7092 ;; To the basic branch range the PC offset must be added (this is +4).
7093 ;; So for forward branches we have
7094 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7095 ;; And for backward branches we have
7096 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7097 ;;
7098 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7099 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7100
7101 (define_expand "cbranchsi4"
7102 [(set (pc) (if_then_else
7103 (match_operator 0 "expandable_comparison_operator"
7104 [(match_operand:SI 1 "s_register_operand")
7105 (match_operand:SI 2 "nonmemory_operand")])
7106 (label_ref (match_operand 3 "" ""))
7107 (pc)))]
7108 "TARGET_EITHER"
7109 "
7110 if (!TARGET_THUMB1)
7111 {
7112 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7113 FAIL;
7114 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7115 operands[3]));
7116 DONE;
7117 }
7118 if (thumb1_cmpneg_operand (operands[2], SImode))
7119 {
7120 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7121 operands[3], operands[0]));
7122 DONE;
7123 }
7124 if (!thumb1_cmp_operand (operands[2], SImode))
7125 operands[2] = force_reg (SImode, operands[2]);
7126 ")
7127
7128 (define_expand "cbranchsf4"
7129 [(set (pc) (if_then_else
7130 (match_operator 0 "expandable_comparison_operator"
7131 [(match_operand:SF 1 "s_register_operand")
7132 (match_operand:SF 2 "vfp_compare_operand")])
7133 (label_ref (match_operand 3 "" ""))
7134 (pc)))]
7135 "TARGET_32BIT && TARGET_HARD_FLOAT"
7136 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7137 operands[3])); DONE;"
7138 )
7139
7140 (define_expand "cbranchdf4"
7141 [(set (pc) (if_then_else
7142 (match_operator 0 "expandable_comparison_operator"
7143 [(match_operand:DF 1 "s_register_operand")
7144 (match_operand:DF 2 "vfp_compare_operand")])
7145 (label_ref (match_operand 3 "" ""))
7146 (pc)))]
7147 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7148 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7149 operands[3])); DONE;"
7150 )
7151
7152 (define_expand "cbranchdi4"
7153 [(set (pc) (if_then_else
7154 (match_operator 0 "expandable_comparison_operator"
7155 [(match_operand:DI 1 "s_register_operand")
7156 (match_operand:DI 2 "reg_or_int_operand")])
7157 (label_ref (match_operand 3 "" ""))
7158 (pc)))]
7159 "TARGET_32BIT"
7160 "{
7161 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7162 FAIL;
7163 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7164 operands[3]));
7165 DONE;
7166 }"
7167 )
7168
7169 ;; Comparison and test insns
7170
7171 (define_insn "*arm_cmpsi_insn"
7172 [(set (reg:CC CC_REGNUM)
7173 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7174 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7175 "TARGET_32BIT"
7176 "@
7177 cmp%?\\t%0, %1
7178 cmp%?\\t%0, %1
7179 cmp%?\\t%0, %1
7180 cmp%?\\t%0, %1
7181 cmn%?\\t%0, #%n1"
7182 [(set_attr "conds" "set")
7183 (set_attr "arch" "t2,t2,any,any,any")
7184 (set_attr "length" "2,2,4,4,4")
7185 (set_attr "predicable" "yes")
7186 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7187 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7188 )
7189
7190 (define_insn "*cmpsi_shiftsi"
7191 [(set (reg:CC CC_REGNUM)
7192 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7193 (match_operator:SI 3 "shift_operator"
7194 [(match_operand:SI 1 "s_register_operand" "r,r")
7195 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7196 "TARGET_32BIT"
7197 "cmp\\t%0, %1%S3"
7198 [(set_attr "conds" "set")
7199 (set_attr "shift" "1")
7200 (set_attr "arch" "32,a")
7201 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7202
7203 (define_insn "*cmpsi_shiftsi_swp"
7204 [(set (reg:CC_SWP CC_REGNUM)
7205 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7206 [(match_operand:SI 1 "s_register_operand" "r,r")
7207 (match_operand:SI 2 "shift_amount_operand" "M,r")])
7208 (match_operand:SI 0 "s_register_operand" "r,r")))]
7209 "TARGET_32BIT"
7210 "cmp%?\\t%0, %1%S3"
7211 [(set_attr "conds" "set")
7212 (set_attr "shift" "1")
7213 (set_attr "arch" "32,a")
7214 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7215
7216 (define_insn "*arm_cmpsi_negshiftsi_si"
7217 [(set (reg:CC_Z CC_REGNUM)
7218 (compare:CC_Z
7219 (neg:SI (match_operator:SI 1 "shift_operator"
7220 [(match_operand:SI 2 "s_register_operand" "r,r")
7221 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7222 (match_operand:SI 0 "s_register_operand" "r,r")))]
7223 "TARGET_32BIT"
7224 "cmn%?\\t%0, %2%S1"
7225 [(set_attr "conds" "set")
7226 (set_attr "arch" "32,a")
7227 (set_attr "shift" "2")
7228 (set_attr "type" "alus_shift_imm,alus_shift_reg")
7229 (set_attr "predicable" "yes")]
7230 )
7231
7232 ; This insn allows redundant compares to be removed by cse, nothing should
7233 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7234 ; is deleted later on. The match_dup will match the mode here, so that
7235 ; mode changes of the condition codes aren't lost by this even though we don't
7236 ; specify what they are.
7237
7238 (define_insn "*deleted_compare"
7239 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7240 "TARGET_32BIT"
7241 "\\t%@ deleted compare"
7242 [(set_attr "conds" "set")
7243 (set_attr "length" "0")
7244 (set_attr "type" "no_insn")]
7245 )
7246
7247 \f
7248 ;; Conditional branch insns
7249
7250 (define_expand "cbranch_cc"
7251 [(set (pc)
7252 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7253 (match_operand 2 "" "")])
7254 (label_ref (match_operand 3 "" ""))
7255 (pc)))]
7256 "TARGET_32BIT"
7257 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7258 operands[1], operands[2], NULL_RTX);
7259 operands[2] = const0_rtx;"
7260 )
7261
7262 ;;
7263 ;; Patterns to match conditional branch insns.
7264 ;;
7265
7266 (define_insn "arm_cond_branch"
7267 [(set (pc)
7268 (if_then_else (match_operator 1 "arm_comparison_operator"
7269 [(match_operand 2 "cc_register" "") (const_int 0)])
7270 (label_ref (match_operand 0 "" ""))
7271 (pc)))]
7272 "TARGET_32BIT"
7273 "*
7274 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7275 {
7276 arm_ccfsm_state += 2;
7277 return \"\";
7278 }
7279 return \"b%d1\\t%l0\";
7280 "
7281 [(set_attr "conds" "use")
7282 (set_attr "type" "branch")
7283 (set (attr "length")
7284 (if_then_else
7285 (and (match_test "TARGET_THUMB2")
7286 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7287 (le (minus (match_dup 0) (pc)) (const_int 256))))
7288 (const_int 2)
7289 (const_int 4)))]
7290 )
7291
7292 (define_insn "*arm_cond_branch_reversed"
7293 [(set (pc)
7294 (if_then_else (match_operator 1 "arm_comparison_operator"
7295 [(match_operand 2 "cc_register" "") (const_int 0)])
7296 (pc)
7297 (label_ref (match_operand 0 "" ""))))]
7298 "TARGET_32BIT"
7299 "*
7300 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7301 {
7302 arm_ccfsm_state += 2;
7303 return \"\";
7304 }
7305 return \"b%D1\\t%l0\";
7306 "
7307 [(set_attr "conds" "use")
7308 (set_attr "type" "branch")
7309 (set (attr "length")
7310 (if_then_else
7311 (and (match_test "TARGET_THUMB2")
7312 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7313 (le (minus (match_dup 0) (pc)) (const_int 256))))
7314 (const_int 2)
7315 (const_int 4)))]
7316 )
7317
7318 \f
7319
7320 ; scc insns
7321
7322 (define_expand "cstore_cc"
7323 [(set (match_operand:SI 0 "s_register_operand")
7324 (match_operator:SI 1 "" [(match_operand 2 "" "")
7325 (match_operand 3 "" "")]))]
7326 "TARGET_32BIT"
7327 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7328 operands[2], operands[3], NULL_RTX);
7329 operands[3] = const0_rtx;"
7330 )
7331
7332 (define_insn_and_split "*mov_scc"
7333 [(set (match_operand:SI 0 "s_register_operand" "=r")
7334 (match_operator:SI 1 "arm_comparison_operator_mode"
7335 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7336 "TARGET_ARM"
7337 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7338 "TARGET_ARM"
7339 [(set (match_dup 0)
7340 (if_then_else:SI (match_dup 1)
7341 (const_int 1)
7342 (const_int 0)))]
7343 ""
7344 [(set_attr "conds" "use")
7345 (set_attr "length" "8")
7346 (set_attr "type" "multiple")]
7347 )
7348
7349 (define_insn "*negscc_borrow"
7350 [(set (match_operand:SI 0 "s_register_operand" "=r")
7351 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
7352 "TARGET_32BIT"
7353 "sbc\\t%0, %0, %0"
7354 [(set_attr "conds" "use")
7355 (set_attr "length" "4")
7356 (set_attr "type" "adc_reg")]
7357 )
7358
7359 (define_insn_and_split "*mov_negscc"
7360 [(set (match_operand:SI 0 "s_register_operand" "=r")
7361 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
7362 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7363 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
7364 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7365 "&& true"
7366 [(set (match_dup 0)
7367 (if_then_else:SI (match_dup 1)
7368 (match_dup 3)
7369 (const_int 0)))]
7370 {
7371 operands[3] = GEN_INT (~0);
7372 }
7373 [(set_attr "conds" "use")
7374 (set_attr "length" "8")
7375 (set_attr "type" "multiple")]
7376 )
7377
7378 (define_insn_and_split "*mov_notscc"
7379 [(set (match_operand:SI 0 "s_register_operand" "=r")
7380 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7381 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7382 "TARGET_ARM"
7383 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7384 "TARGET_ARM"
7385 [(set (match_dup 0)
7386 (if_then_else:SI (match_dup 1)
7387 (match_dup 3)
7388 (match_dup 4)))]
7389 {
7390 operands[3] = GEN_INT (~1);
7391 operands[4] = GEN_INT (~0);
7392 }
7393 [(set_attr "conds" "use")
7394 (set_attr "length" "8")
7395 (set_attr "type" "multiple")]
7396 )
7397
7398 (define_expand "cstoresi4"
7399 [(set (match_operand:SI 0 "s_register_operand")
7400 (match_operator:SI 1 "expandable_comparison_operator"
7401 [(match_operand:SI 2 "s_register_operand")
7402 (match_operand:SI 3 "reg_or_int_operand")]))]
7403 "TARGET_32BIT || TARGET_THUMB1"
7404 "{
7405 rtx op3, scratch, scratch2;
7406
7407 if (!TARGET_THUMB1)
7408 {
7409 if (!arm_add_operand (operands[3], SImode))
7410 operands[3] = force_reg (SImode, operands[3]);
7411 emit_insn (gen_cstore_cc (operands[0], operands[1],
7412 operands[2], operands[3]));
7413 DONE;
7414 }
7415
7416 if (operands[3] == const0_rtx)
7417 {
7418 switch (GET_CODE (operands[1]))
7419 {
7420 case EQ:
7421 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7422 break;
7423
7424 case NE:
7425 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7426 break;
7427
7428 case LE:
7429 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7430 NULL_RTX, 0, OPTAB_WIDEN);
7431 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7432 NULL_RTX, 0, OPTAB_WIDEN);
7433 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7434 operands[0], 1, OPTAB_WIDEN);
7435 break;
7436
7437 case GE:
7438 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7439 NULL_RTX, 1);
7440 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7441 NULL_RTX, 1, OPTAB_WIDEN);
7442 break;
7443
7444 case GT:
7445 scratch = expand_binop (SImode, ashr_optab, operands[2],
7446 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7447 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7448 NULL_RTX, 0, OPTAB_WIDEN);
7449 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7450 0, OPTAB_WIDEN);
7451 break;
7452
7453 /* LT is handled by generic code. No need for unsigned with 0. */
7454 default:
7455 FAIL;
7456 }
7457 DONE;
7458 }
7459
7460 switch (GET_CODE (operands[1]))
7461 {
7462 case EQ:
7463 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7464 NULL_RTX, 0, OPTAB_WIDEN);
7465 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7466 break;
7467
7468 case NE:
7469 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7470 NULL_RTX, 0, OPTAB_WIDEN);
7471 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7472 break;
7473
7474 case LE:
7475 op3 = force_reg (SImode, operands[3]);
7476
7477 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7478 NULL_RTX, 1, OPTAB_WIDEN);
7479 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7480 NULL_RTX, 0, OPTAB_WIDEN);
7481 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7482 op3, operands[2]));
7483 break;
7484
7485 case GE:
7486 op3 = operands[3];
7487 if (!thumb1_cmp_operand (op3, SImode))
7488 op3 = force_reg (SImode, op3);
7489 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7490 NULL_RTX, 0, OPTAB_WIDEN);
7491 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7492 NULL_RTX, 1, OPTAB_WIDEN);
7493 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7494 operands[2], op3));
7495 break;
7496
7497 case LEU:
7498 op3 = force_reg (SImode, operands[3]);
7499 scratch = force_reg (SImode, const0_rtx);
7500 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7501 op3, operands[2]));
7502 break;
7503
7504 case GEU:
7505 op3 = operands[3];
7506 if (!thumb1_cmp_operand (op3, SImode))
7507 op3 = force_reg (SImode, op3);
7508 scratch = force_reg (SImode, const0_rtx);
7509 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7510 operands[2], op3));
7511 break;
7512
7513 case LTU:
7514 op3 = operands[3];
7515 if (!thumb1_cmp_operand (op3, SImode))
7516 op3 = force_reg (SImode, op3);
7517 scratch = gen_reg_rtx (SImode);
7518 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7519 break;
7520
7521 case GTU:
7522 op3 = force_reg (SImode, operands[3]);
7523 scratch = gen_reg_rtx (SImode);
7524 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7525 break;
7526
7527 /* No good sequences for GT, LT. */
7528 default:
7529 FAIL;
7530 }
7531 DONE;
7532 }")
7533
7534 (define_expand "cstorehf4"
7535 [(set (match_operand:SI 0 "s_register_operand")
7536 (match_operator:SI 1 "expandable_comparison_operator"
7537 [(match_operand:HF 2 "s_register_operand")
7538 (match_operand:HF 3 "vfp_compare_operand")]))]
7539 "TARGET_VFP_FP16INST"
7540 {
7541 if (!arm_validize_comparison (&operands[1],
7542 &operands[2],
7543 &operands[3]))
7544 FAIL;
7545
7546 emit_insn (gen_cstore_cc (operands[0], operands[1],
7547 operands[2], operands[3]));
7548 DONE;
7549 }
7550 )
7551
7552 (define_expand "cstoresf4"
7553 [(set (match_operand:SI 0 "s_register_operand")
7554 (match_operator:SI 1 "expandable_comparison_operator"
7555 [(match_operand:SF 2 "s_register_operand")
7556 (match_operand:SF 3 "vfp_compare_operand")]))]
7557 "TARGET_32BIT && TARGET_HARD_FLOAT"
7558 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7559 operands[2], operands[3])); DONE;"
7560 )
7561
7562 (define_expand "cstoredf4"
7563 [(set (match_operand:SI 0 "s_register_operand")
7564 (match_operator:SI 1 "expandable_comparison_operator"
7565 [(match_operand:DF 2 "s_register_operand")
7566 (match_operand:DF 3 "vfp_compare_operand")]))]
7567 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7568 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7569 operands[2], operands[3])); DONE;"
7570 )
7571
7572 (define_expand "cstoredi4"
7573 [(set (match_operand:SI 0 "s_register_operand")
7574 (match_operator:SI 1 "expandable_comparison_operator"
7575 [(match_operand:DI 2 "s_register_operand")
7576 (match_operand:DI 3 "reg_or_int_operand")]))]
7577 "TARGET_32BIT"
7578 "{
7579 if (!arm_validize_comparison (&operands[1],
7580 &operands[2],
7581 &operands[3]))
7582 FAIL;
7583 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7584 operands[3]));
7585 DONE;
7586 }"
7587 )
7588
7589 \f
7590 ;; Conditional move insns
7591
7592 (define_expand "movsicc"
7593 [(set (match_operand:SI 0 "s_register_operand")
7594 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
7595 (match_operand:SI 2 "arm_not_operand")
7596 (match_operand:SI 3 "arm_not_operand")))]
7597 "TARGET_32BIT"
7598 "
7599 {
7600 enum rtx_code code;
7601 rtx ccreg;
7602
7603 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7604 &XEXP (operands[1], 1)))
7605 FAIL;
7606
7607 code = GET_CODE (operands[1]);
7608 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7609 XEXP (operands[1], 1), NULL_RTX);
7610 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7611 }"
7612 )
7613
7614 (define_expand "movhfcc"
7615 [(set (match_operand:HF 0 "s_register_operand")
7616 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
7617 (match_operand:HF 2 "s_register_operand")
7618 (match_operand:HF 3 "s_register_operand")))]
7619 "TARGET_VFP_FP16INST"
7620 "
7621 {
7622 enum rtx_code code = GET_CODE (operands[1]);
7623 rtx ccreg;
7624
7625 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7626 &XEXP (operands[1], 1)))
7627 FAIL;
7628
7629 code = GET_CODE (operands[1]);
7630 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7631 XEXP (operands[1], 1), NULL_RTX);
7632 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7633 }"
7634 )
7635
7636 (define_expand "movsfcc"
7637 [(set (match_operand:SF 0 "s_register_operand")
7638 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
7639 (match_operand:SF 2 "s_register_operand")
7640 (match_operand:SF 3 "s_register_operand")))]
7641 "TARGET_32BIT && TARGET_HARD_FLOAT"
7642 "
7643 {
7644 enum rtx_code code = GET_CODE (operands[1]);
7645 rtx ccreg;
7646
7647 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7648 &XEXP (operands[1], 1)))
7649 FAIL;
7650
7651 code = GET_CODE (operands[1]);
7652 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7653 XEXP (operands[1], 1), NULL_RTX);
7654 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7655 }"
7656 )
7657
7658 (define_expand "movdfcc"
7659 [(set (match_operand:DF 0 "s_register_operand")
7660 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7661 (match_operand:DF 2 "s_register_operand")
7662 (match_operand:DF 3 "s_register_operand")))]
7663 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7664 "
7665 {
7666 enum rtx_code code = GET_CODE (operands[1]);
7667 rtx ccreg;
7668
7669 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7670 &XEXP (operands[1], 1)))
7671 FAIL;
7672 code = GET_CODE (operands[1]);
7673 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7674 XEXP (operands[1], 1), NULL_RTX);
7675 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7676 }"
7677 )
7678
7679 (define_insn "*cmov<mode>"
7680 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7681 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7682 [(match_operand 2 "cc_register" "") (const_int 0)])
7683 (match_operand:SDF 3 "s_register_operand"
7684 "<F_constraint>")
7685 (match_operand:SDF 4 "s_register_operand"
7686 "<F_constraint>")))]
7687 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7688 "*
7689 {
7690 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7691 switch (code)
7692 {
7693 case ARM_GE:
7694 case ARM_GT:
7695 case ARM_EQ:
7696 case ARM_VS:
7697 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7698 case ARM_LT:
7699 case ARM_LE:
7700 case ARM_NE:
7701 case ARM_VC:
7702 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7703 default:
7704 gcc_unreachable ();
7705 }
7706 return \"\";
7707 }"
7708 [(set_attr "conds" "use")
7709 (set_attr "type" "fcsel")]
7710 )
7711
7712 (define_insn "*cmovhf"
7713 [(set (match_operand:HF 0 "s_register_operand" "=t")
7714 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7715 [(match_operand 2 "cc_register" "") (const_int 0)])
7716 (match_operand:HF 3 "s_register_operand" "t")
7717 (match_operand:HF 4 "s_register_operand" "t")))]
7718 "TARGET_VFP_FP16INST"
7719 "*
7720 {
7721 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7722 switch (code)
7723 {
7724 case ARM_GE:
7725 case ARM_GT:
7726 case ARM_EQ:
7727 case ARM_VS:
7728 return \"vsel%d1.f16\\t%0, %3, %4\";
7729 case ARM_LT:
7730 case ARM_LE:
7731 case ARM_NE:
7732 case ARM_VC:
7733 return \"vsel%D1.f16\\t%0, %4, %3\";
7734 default:
7735 gcc_unreachable ();
7736 }
7737 return \"\";
7738 }"
7739 [(set_attr "conds" "use")
7740 (set_attr "type" "fcsel")]
7741 )
7742
7743 (define_insn_and_split "*movsicc_insn"
7744 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7745 (if_then_else:SI
7746 (match_operator 3 "arm_comparison_operator"
7747 [(match_operand 4 "cc_register" "") (const_int 0)])
7748 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7749 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7750 "TARGET_ARM"
7751 "@
7752 mov%D3\\t%0, %2
7753 mvn%D3\\t%0, #%B2
7754 mov%d3\\t%0, %1
7755 mvn%d3\\t%0, #%B1
7756 #
7757 #
7758 #
7759 #"
7760 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7761 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7762 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7763 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7764 "&& reload_completed"
7765 [(const_int 0)]
7766 {
7767 enum rtx_code rev_code;
7768 machine_mode mode;
7769 rtx rev_cond;
7770
7771 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7772 operands[3],
7773 gen_rtx_SET (operands[0], operands[1])));
7774
7775 rev_code = GET_CODE (operands[3]);
7776 mode = GET_MODE (operands[4]);
7777 if (mode == CCFPmode || mode == CCFPEmode)
7778 rev_code = reverse_condition_maybe_unordered (rev_code);
7779 else
7780 rev_code = reverse_condition (rev_code);
7781
7782 rev_cond = gen_rtx_fmt_ee (rev_code,
7783 VOIDmode,
7784 operands[4],
7785 const0_rtx);
7786 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7787 rev_cond,
7788 gen_rtx_SET (operands[0], operands[2])));
7789 DONE;
7790 }
7791 [(set_attr "length" "4,4,4,4,8,8,8,8")
7792 (set_attr "conds" "use")
7793 (set_attr_alternative "type"
7794 [(if_then_else (match_operand 2 "const_int_operand" "")
7795 (const_string "mov_imm")
7796 (const_string "mov_reg"))
7797 (const_string "mvn_imm")
7798 (if_then_else (match_operand 1 "const_int_operand" "")
7799 (const_string "mov_imm")
7800 (const_string "mov_reg"))
7801 (const_string "mvn_imm")
7802 (const_string "multiple")
7803 (const_string "multiple")
7804 (const_string "multiple")
7805 (const_string "multiple")])]
7806 )
7807
7808 (define_insn "*movsfcc_soft_insn"
7809 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7810 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7811 [(match_operand 4 "cc_register" "") (const_int 0)])
7812 (match_operand:SF 1 "s_register_operand" "0,r")
7813 (match_operand:SF 2 "s_register_operand" "r,0")))]
7814 "TARGET_ARM && TARGET_SOFT_FLOAT"
7815 "@
7816 mov%D3\\t%0, %2
7817 mov%d3\\t%0, %1"
7818 [(set_attr "conds" "use")
7819 (set_attr "type" "mov_reg")]
7820 )
7821
7822 \f
7823 ;; Jump and linkage insns
7824
7825 (define_expand "jump"
7826 [(set (pc)
7827 (label_ref (match_operand 0 "" "")))]
7828 "TARGET_EITHER"
7829 ""
7830 )
7831
7832 (define_insn "*arm_jump"
7833 [(set (pc)
7834 (label_ref (match_operand 0 "" "")))]
7835 "TARGET_32BIT"
7836 "*
7837 {
7838 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7839 {
7840 arm_ccfsm_state += 2;
7841 return \"\";
7842 }
7843 return \"b%?\\t%l0\";
7844 }
7845 "
7846 [(set_attr "predicable" "yes")
7847 (set (attr "length")
7848 (if_then_else
7849 (and (match_test "TARGET_THUMB2")
7850 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7851 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7852 (const_int 2)
7853 (const_int 4)))
7854 (set_attr "type" "branch")]
7855 )
7856
7857 (define_expand "call"
7858 [(parallel [(call (match_operand 0 "memory_operand")
7859 (match_operand 1 "general_operand"))
7860 (use (match_operand 2 "" ""))
7861 (clobber (reg:SI LR_REGNUM))])]
7862 "TARGET_EITHER"
7863 "
7864 {
7865 rtx callee, pat;
7866 tree addr = MEM_EXPR (operands[0]);
7867
7868 /* In an untyped call, we can get NULL for operand 2. */
7869 if (operands[2] == NULL_RTX)
7870 operands[2] = const0_rtx;
7871
7872 /* Decide if we should generate indirect calls by loading the
7873 32-bit address of the callee into a register before performing the
7874 branch and link. */
7875 callee = XEXP (operands[0], 0);
7876 if (GET_CODE (callee) == SYMBOL_REF
7877 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7878 : !REG_P (callee))
7879 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7880
7881 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7882 /* Indirect call: set r9 with FDPIC value of callee. */
7883 XEXP (operands[0], 0)
7884 = arm_load_function_descriptor (XEXP (operands[0], 0));
7885
7886 if (detect_cmse_nonsecure_call (addr))
7887 {
7888 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7889 operands[2]);
7890 emit_call_insn (pat);
7891 }
7892 else
7893 {
7894 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7895 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7896 }
7897
7898 /* Restore FDPIC register (r9) after call. */
7899 if (TARGET_FDPIC)
7900 {
7901 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7902 rtx initial_fdpic_reg
7903 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7904
7905 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7906 initial_fdpic_reg));
7907 }
7908
7909 DONE;
7910 }"
7911 )
7912
7913 (define_insn "restore_pic_register_after_call"
7914 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7915 (unspec:SI [(match_dup 0)
7916 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7917 UNSPEC_PIC_RESTORE))]
7918 ""
7919 "@
7920 mov\t%0, %1
7921 ldr\t%0, %1"
7922 )
7923
7924 (define_expand "call_internal"
7925 [(parallel [(call (match_operand 0 "memory_operand")
7926 (match_operand 1 "general_operand"))
7927 (use (match_operand 2 "" ""))
7928 (clobber (reg:SI LR_REGNUM))])])
7929
7930 (define_expand "nonsecure_call_internal"
7931 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7932 UNSPEC_NONSECURE_MEM)
7933 (match_operand 1 "general_operand"))
7934 (use (match_operand 2 "" ""))
7935 (clobber (reg:SI LR_REGNUM))])]
7936 "use_cmse"
7937 "
7938 {
7939 rtx tmp;
7940 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7941 gen_rtx_REG (SImode, R4_REGNUM),
7942 SImode);
7943
7944 operands[0] = replace_equiv_address (operands[0], tmp);
7945 }")
7946
7947 (define_insn "*call_reg_armv5"
7948 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7949 (match_operand 1 "" ""))
7950 (use (match_operand 2 "" ""))
7951 (clobber (reg:SI LR_REGNUM))]
7952 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7953 "blx%?\\t%0"
7954 [(set_attr "type" "call")]
7955 )
7956
7957 (define_insn "*call_reg_arm"
7958 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7959 (match_operand 1 "" ""))
7960 (use (match_operand 2 "" ""))
7961 (clobber (reg:SI LR_REGNUM))]
7962 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7963 "*
7964 return output_call (operands);
7965 "
7966 ;; length is worst case, normally it is only two
7967 [(set_attr "length" "12")
7968 (set_attr "type" "call")]
7969 )
7970
7971
7972 (define_expand "call_value"
7973 [(parallel [(set (match_operand 0 "" "")
7974 (call (match_operand 1 "memory_operand")
7975 (match_operand 2 "general_operand")))
7976 (use (match_operand 3 "" ""))
7977 (clobber (reg:SI LR_REGNUM))])]
7978 "TARGET_EITHER"
7979 "
7980 {
7981 rtx pat, callee;
7982 tree addr = MEM_EXPR (operands[1]);
7983
7984 /* In an untyped call, we can get NULL for operand 2. */
7985 if (operands[3] == 0)
7986 operands[3] = const0_rtx;
7987
7988 /* Decide if we should generate indirect calls by loading the
7989 32-bit address of the callee into a register before performing the
7990 branch and link. */
7991 callee = XEXP (operands[1], 0);
7992 if (GET_CODE (callee) == SYMBOL_REF
7993 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7994 : !REG_P (callee))
7995 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7996
7997 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7998 /* Indirect call: set r9 with FDPIC value of callee. */
7999 XEXP (operands[1], 0)
8000 = arm_load_function_descriptor (XEXP (operands[1], 0));
8001
8002 if (detect_cmse_nonsecure_call (addr))
8003 {
8004 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8005 operands[2], operands[3]);
8006 emit_call_insn (pat);
8007 }
8008 else
8009 {
8010 pat = gen_call_value_internal (operands[0], operands[1],
8011 operands[2], operands[3]);
8012 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8013 }
8014
8015 /* Restore FDPIC register (r9) after call. */
8016 if (TARGET_FDPIC)
8017 {
8018 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8019 rtx initial_fdpic_reg
8020 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8021
8022 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8023 initial_fdpic_reg));
8024 }
8025
8026 DONE;
8027 }"
8028 )
8029
8030 (define_expand "call_value_internal"
8031 [(parallel [(set (match_operand 0 "" "")
8032 (call (match_operand 1 "memory_operand")
8033 (match_operand 2 "general_operand")))
8034 (use (match_operand 3 "" ""))
8035 (clobber (reg:SI LR_REGNUM))])])
8036
8037 (define_expand "nonsecure_call_value_internal"
8038 [(parallel [(set (match_operand 0 "" "")
8039 (call (unspec:SI [(match_operand 1 "memory_operand")]
8040 UNSPEC_NONSECURE_MEM)
8041 (match_operand 2 "general_operand")))
8042 (use (match_operand 3 "" ""))
8043 (clobber (reg:SI LR_REGNUM))])]
8044 "use_cmse"
8045 "
8046 {
8047 rtx tmp;
8048 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
8049 gen_rtx_REG (SImode, R4_REGNUM),
8050 SImode);
8051
8052 operands[1] = replace_equiv_address (operands[1], tmp);
8053 }")
8054
8055 (define_insn "*call_value_reg_armv5"
8056 [(set (match_operand 0 "" "")
8057 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8058 (match_operand 2 "" "")))
8059 (use (match_operand 3 "" ""))
8060 (clobber (reg:SI LR_REGNUM))]
8061 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8062 "blx%?\\t%1"
8063 [(set_attr "type" "call")]
8064 )
8065
8066 (define_insn "*call_value_reg_arm"
8067 [(set (match_operand 0 "" "")
8068 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8069 (match_operand 2 "" "")))
8070 (use (match_operand 3 "" ""))
8071 (clobber (reg:SI LR_REGNUM))]
8072 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8073 "*
8074 return output_call (&operands[1]);
8075 "
8076 [(set_attr "length" "12")
8077 (set_attr "type" "call")]
8078 )
8079
8080 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8081 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8082
8083 (define_insn "*call_symbol"
8084 [(call (mem:SI (match_operand:SI 0 "" ""))
8085 (match_operand 1 "" ""))
8086 (use (match_operand 2 "" ""))
8087 (clobber (reg:SI LR_REGNUM))]
8088 "TARGET_32BIT
8089 && !SIBLING_CALL_P (insn)
8090 && (GET_CODE (operands[0]) == SYMBOL_REF)
8091 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8092 "*
8093 {
8094 rtx op = operands[0];
8095
8096 /* Switch mode now when possible. */
8097 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8098 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8099 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8100
8101 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8102 }"
8103 [(set_attr "type" "call")]
8104 )
8105
8106 (define_insn "*call_value_symbol"
8107 [(set (match_operand 0 "" "")
8108 (call (mem:SI (match_operand:SI 1 "" ""))
8109 (match_operand:SI 2 "" "")))
8110 (use (match_operand 3 "" ""))
8111 (clobber (reg:SI LR_REGNUM))]
8112 "TARGET_32BIT
8113 && !SIBLING_CALL_P (insn)
8114 && (GET_CODE (operands[1]) == SYMBOL_REF)
8115 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8116 "*
8117 {
8118 rtx op = operands[1];
8119
8120 /* Switch mode now when possible. */
8121 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8122 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8123 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8124
8125 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8126 }"
8127 [(set_attr "type" "call")]
8128 )
8129
8130 (define_expand "sibcall_internal"
8131 [(parallel [(call (match_operand 0 "memory_operand")
8132 (match_operand 1 "general_operand"))
8133 (return)
8134 (use (match_operand 2 "" ""))])])
8135
8136 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8137 (define_expand "sibcall"
8138 [(parallel [(call (match_operand 0 "memory_operand")
8139 (match_operand 1 "general_operand"))
8140 (return)
8141 (use (match_operand 2 "" ""))])]
8142 "TARGET_32BIT"
8143 "
8144 {
8145 rtx pat;
8146
8147 if ((!REG_P (XEXP (operands[0], 0))
8148 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8149 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8150 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8151 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8152
8153 if (operands[2] == NULL_RTX)
8154 operands[2] = const0_rtx;
8155
8156 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8157 arm_emit_call_insn (pat, operands[0], true);
8158 DONE;
8159 }"
8160 )
8161
8162 (define_expand "sibcall_value_internal"
8163 [(parallel [(set (match_operand 0 "" "")
8164 (call (match_operand 1 "memory_operand")
8165 (match_operand 2 "general_operand")))
8166 (return)
8167 (use (match_operand 3 "" ""))])])
8168
8169 (define_expand "sibcall_value"
8170 [(parallel [(set (match_operand 0 "" "")
8171 (call (match_operand 1 "memory_operand")
8172 (match_operand 2 "general_operand")))
8173 (return)
8174 (use (match_operand 3 "" ""))])]
8175 "TARGET_32BIT"
8176 "
8177 {
8178 rtx pat;
8179
8180 if ((!REG_P (XEXP (operands[1], 0))
8181 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8182 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8183 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8184 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8185
8186 if (operands[3] == NULL_RTX)
8187 operands[3] = const0_rtx;
8188
8189 pat = gen_sibcall_value_internal (operands[0], operands[1],
8190 operands[2], operands[3]);
8191 arm_emit_call_insn (pat, operands[1], true);
8192 DONE;
8193 }"
8194 )
8195
8196 (define_insn "*sibcall_insn"
8197 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8198 (match_operand 1 "" ""))
8199 (return)
8200 (use (match_operand 2 "" ""))]
8201 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8202 "*
8203 if (which_alternative == 1)
8204 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8205 else
8206 {
8207 if (arm_arch5t || arm_arch4t)
8208 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8209 else
8210 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8211 }
8212 "
8213 [(set_attr "type" "call")]
8214 )
8215
8216 (define_insn "*sibcall_value_insn"
8217 [(set (match_operand 0 "" "")
8218 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8219 (match_operand 2 "" "")))
8220 (return)
8221 (use (match_operand 3 "" ""))]
8222 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8223 "*
8224 if (which_alternative == 1)
8225 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8226 else
8227 {
8228 if (arm_arch5t || arm_arch4t)
8229 return \"bx%?\\t%1\";
8230 else
8231 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8232 }
8233 "
8234 [(set_attr "type" "call")]
8235 )
8236
8237 (define_expand "<return_str>return"
8238 [(RETURNS)]
8239 "(TARGET_ARM || (TARGET_THUMB2
8240 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8241 && !IS_STACKALIGN (arm_current_func_type ())))
8242 <return_cond_false>"
8243 "
8244 {
8245 if (TARGET_THUMB2)
8246 {
8247 thumb2_expand_return (<return_simple_p>);
8248 DONE;
8249 }
8250 }
8251 "
8252 )
8253
8254 ;; Often the return insn will be the same as loading from memory, so set attr
8255 (define_insn "*arm_return"
8256 [(return)]
8257 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8258 "*
8259 {
8260 if (arm_ccfsm_state == 2)
8261 {
8262 arm_ccfsm_state += 2;
8263 return \"\";
8264 }
8265 return output_return_instruction (const_true_rtx, true, false, false);
8266 }"
8267 [(set_attr "type" "load_4")
8268 (set_attr "length" "12")
8269 (set_attr "predicable" "yes")]
8270 )
8271
8272 (define_insn "*cond_<return_str>return"
8273 [(set (pc)
8274 (if_then_else (match_operator 0 "arm_comparison_operator"
8275 [(match_operand 1 "cc_register" "") (const_int 0)])
8276 (RETURNS)
8277 (pc)))]
8278 "TARGET_ARM <return_cond_true>"
8279 "*
8280 {
8281 if (arm_ccfsm_state == 2)
8282 {
8283 arm_ccfsm_state += 2;
8284 return \"\";
8285 }
8286 return output_return_instruction (operands[0], true, false,
8287 <return_simple_p>);
8288 }"
8289 [(set_attr "conds" "use")
8290 (set_attr "length" "12")
8291 (set_attr "type" "load_4")]
8292 )
8293
8294 (define_insn "*cond_<return_str>return_inverted"
8295 [(set (pc)
8296 (if_then_else (match_operator 0 "arm_comparison_operator"
8297 [(match_operand 1 "cc_register" "") (const_int 0)])
8298 (pc)
8299 (RETURNS)))]
8300 "TARGET_ARM <return_cond_true>"
8301 "*
8302 {
8303 if (arm_ccfsm_state == 2)
8304 {
8305 arm_ccfsm_state += 2;
8306 return \"\";
8307 }
8308 return output_return_instruction (operands[0], true, true,
8309 <return_simple_p>);
8310 }"
8311 [(set_attr "conds" "use")
8312 (set_attr "length" "12")
8313 (set_attr "type" "load_4")]
8314 )
8315
8316 (define_insn "*arm_simple_return"
8317 [(simple_return)]
8318 "TARGET_ARM"
8319 "*
8320 {
8321 if (arm_ccfsm_state == 2)
8322 {
8323 arm_ccfsm_state += 2;
8324 return \"\";
8325 }
8326 return output_return_instruction (const_true_rtx, true, false, true);
8327 }"
8328 [(set_attr "type" "branch")
8329 (set_attr "length" "4")
8330 (set_attr "predicable" "yes")]
8331 )
8332
8333 ;; Generate a sequence of instructions to determine if the processor is
8334 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8335 ;; mask.
8336
8337 (define_expand "return_addr_mask"
8338 [(set (match_dup 1)
8339 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8340 (const_int 0)))
8341 (set (match_operand:SI 0 "s_register_operand")
8342 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8343 (const_int -1)
8344 (const_int 67108860)))] ; 0x03fffffc
8345 "TARGET_ARM"
8346 "
8347 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8348 ")
8349
8350 (define_insn "*check_arch2"
8351 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8352 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8353 (const_int 0)))]
8354 "TARGET_ARM"
8355 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8356 [(set_attr "length" "8")
8357 (set_attr "conds" "set")
8358 (set_attr "type" "multiple")]
8359 )
8360
8361 ;; Call subroutine returning any type.
8362
8363 (define_expand "untyped_call"
8364 [(parallel [(call (match_operand 0 "" "")
8365 (const_int 0))
8366 (match_operand 1 "" "")
8367 (match_operand 2 "" "")])]
8368 "TARGET_EITHER && !TARGET_FDPIC"
8369 "
8370 {
8371 int i;
8372 rtx par = gen_rtx_PARALLEL (VOIDmode,
8373 rtvec_alloc (XVECLEN (operands[2], 0)));
8374 rtx addr = gen_reg_rtx (Pmode);
8375 rtx mem;
8376 int size = 0;
8377
8378 emit_move_insn (addr, XEXP (operands[1], 0));
8379 mem = change_address (operands[1], BLKmode, addr);
8380
8381 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8382 {
8383 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8384
8385 /* Default code only uses r0 as a return value, but we could
8386 be using anything up to 4 registers. */
8387 if (REGNO (src) == R0_REGNUM)
8388 src = gen_rtx_REG (TImode, R0_REGNUM);
8389
8390 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8391 GEN_INT (size));
8392 size += GET_MODE_SIZE (GET_MODE (src));
8393 }
8394
8395 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
8396
8397 size = 0;
8398
8399 for (i = 0; i < XVECLEN (par, 0); i++)
8400 {
8401 HOST_WIDE_INT offset = 0;
8402 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8403
8404 if (size != 0)
8405 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8406
8407 mem = change_address (mem, GET_MODE (reg), NULL);
8408 if (REGNO (reg) == R0_REGNUM)
8409 {
8410 /* On thumb we have to use a write-back instruction. */
8411 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8412 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8413 size = TARGET_ARM ? 16 : 0;
8414 }
8415 else
8416 {
8417 emit_move_insn (mem, reg);
8418 size = GET_MODE_SIZE (GET_MODE (reg));
8419 }
8420 }
8421
8422 /* The optimizer does not know that the call sets the function value
8423 registers we stored in the result block. We avoid problems by
8424 claiming that all hard registers are used and clobbered at this
8425 point. */
8426 emit_insn (gen_blockage ());
8427
8428 DONE;
8429 }"
8430 )
8431
8432 (define_expand "untyped_return"
8433 [(match_operand:BLK 0 "memory_operand")
8434 (match_operand 1 "" "")]
8435 "TARGET_EITHER && !TARGET_FDPIC"
8436 "
8437 {
8438 int i;
8439 rtx addr = gen_reg_rtx (Pmode);
8440 rtx mem;
8441 int size = 0;
8442
8443 emit_move_insn (addr, XEXP (operands[0], 0));
8444 mem = change_address (operands[0], BLKmode, addr);
8445
8446 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8447 {
8448 HOST_WIDE_INT offset = 0;
8449 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8450
8451 if (size != 0)
8452 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8453
8454 mem = change_address (mem, GET_MODE (reg), NULL);
8455 if (REGNO (reg) == R0_REGNUM)
8456 {
8457 /* On thumb we have to use a write-back instruction. */
8458 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8459 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8460 size = TARGET_ARM ? 16 : 0;
8461 }
8462 else
8463 {
8464 emit_move_insn (reg, mem);
8465 size = GET_MODE_SIZE (GET_MODE (reg));
8466 }
8467 }
8468
8469 /* Emit USE insns before the return. */
8470 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8471 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8472
8473 /* Construct the return. */
8474 expand_naked_return ();
8475
8476 DONE;
8477 }"
8478 )
8479
8480 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8481 ;; all of memory. This blocks insns from being moved across this point.
8482
8483 (define_insn "blockage"
8484 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8485 "TARGET_EITHER"
8486 ""
8487 [(set_attr "length" "0")
8488 (set_attr "type" "block")]
8489 )
8490
8491 ;; Since we hard code r0 here use the 'o' constraint to prevent
8492 ;; provoking undefined behaviour in the hardware with putting out
8493 ;; auto-increment operations with potentially r0 as the base register.
8494 (define_insn "probe_stack"
8495 [(set (match_operand:SI 0 "memory_operand" "=o")
8496 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
8497 "TARGET_32BIT"
8498 "str%?\\tr0, %0"
8499 [(set_attr "type" "store_4")
8500 (set_attr "predicable" "yes")]
8501 )
8502
8503 (define_insn "probe_stack_range"
8504 [(set (match_operand:SI 0 "register_operand" "=r")
8505 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
8506 (match_operand:SI 2 "register_operand" "r")]
8507 VUNSPEC_PROBE_STACK_RANGE))]
8508 "TARGET_32BIT"
8509 {
8510 return output_probe_stack_range (operands[0], operands[2]);
8511 }
8512 [(set_attr "type" "multiple")
8513 (set_attr "conds" "clob")]
8514 )
8515
8516 ;; Named patterns for stack smashing protection.
8517 (define_expand "stack_protect_combined_set"
8518 [(parallel
8519 [(set (match_operand:SI 0 "memory_operand")
8520 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8521 UNSPEC_SP_SET))
8522 (clobber (match_scratch:SI 2 ""))
8523 (clobber (match_scratch:SI 3 ""))])]
8524 ""
8525 ""
8526 )
8527
8528 ;; Use a separate insn from the above expand to be able to have the mem outside
8529 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8530 ;; try to reload the guard since we need to control how PIC access is done in
8531 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8532 ;; legitimize_pic_address ()).
8533 (define_insn_and_split "*stack_protect_combined_set_insn"
8534 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8535 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8536 UNSPEC_SP_SET))
8537 (clobber (match_scratch:SI 2 "=&l,&r"))
8538 (clobber (match_scratch:SI 3 "=&l,&r"))]
8539 ""
8540 "#"
8541 "reload_completed"
8542 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
8543 UNSPEC_SP_SET))
8544 (clobber (match_dup 2))])]
8545 "
8546 {
8547 if (flag_pic)
8548 {
8549 rtx pic_reg;
8550
8551 if (TARGET_FDPIC)
8552 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8553 else
8554 pic_reg = operands[3];
8555
8556 /* Forces recomputing of GOT base now. */
8557 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
8558 true /*compute_now*/);
8559 }
8560 else
8561 {
8562 if (address_operand (operands[1], SImode))
8563 operands[2] = operands[1];
8564 else
8565 {
8566 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8567 emit_move_insn (operands[2], mem);
8568 }
8569 }
8570 }"
8571 [(set_attr "arch" "t1,32")]
8572 )
8573
8574 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
8575 ;; canary value does not live beyond the life of this sequence.
8576 (define_insn "*stack_protect_set_insn"
8577 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8578 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
8579 UNSPEC_SP_SET))
8580 (clobber (match_dup 1))]
8581 ""
8582 "@
8583 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
8584 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
8585 [(set_attr "length" "8,12")
8586 (set_attr "conds" "clob,nocond")
8587 (set_attr "type" "multiple")
8588 (set_attr "arch" "t1,32")]
8589 )
8590
8591 (define_expand "stack_protect_combined_test"
8592 [(parallel
8593 [(set (pc)
8594 (if_then_else
8595 (eq (match_operand:SI 0 "memory_operand")
8596 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8597 UNSPEC_SP_TEST))
8598 (label_ref (match_operand 2))
8599 (pc)))
8600 (clobber (match_scratch:SI 3 ""))
8601 (clobber (match_scratch:SI 4 ""))
8602 (clobber (reg:CC CC_REGNUM))])]
8603 ""
8604 ""
8605 )
8606
8607 ;; Use a separate insn from the above expand to be able to have the mem outside
8608 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8609 ;; try to reload the guard since we need to control how PIC access is done in
8610 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8611 ;; legitimize_pic_address ()).
8612 (define_insn_and_split "*stack_protect_combined_test_insn"
8613 [(set (pc)
8614 (if_then_else
8615 (eq (match_operand:SI 0 "memory_operand" "m,m")
8616 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8617 UNSPEC_SP_TEST))
8618 (label_ref (match_operand 2))
8619 (pc)))
8620 (clobber (match_scratch:SI 3 "=&l,&r"))
8621 (clobber (match_scratch:SI 4 "=&l,&r"))
8622 (clobber (reg:CC CC_REGNUM))]
8623 ""
8624 "#"
8625 "reload_completed"
8626 [(const_int 0)]
8627 {
8628 rtx eq;
8629
8630 if (flag_pic)
8631 {
8632 rtx pic_reg;
8633
8634 if (TARGET_FDPIC)
8635 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8636 else
8637 pic_reg = operands[4];
8638
8639 /* Forces recomputing of GOT base now. */
8640 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
8641 true /*compute_now*/);
8642 }
8643 else
8644 {
8645 if (address_operand (operands[1], SImode))
8646 operands[3] = operands[1];
8647 else
8648 {
8649 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8650 emit_move_insn (operands[3], mem);
8651 }
8652 }
8653 if (TARGET_32BIT)
8654 {
8655 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8656 operands[3]));
8657 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8658 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8659 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8660 }
8661 else
8662 {
8663 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8664 operands[3]));
8665 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8666 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8667 operands[2]));
8668 }
8669 DONE;
8670 }
8671 [(set_attr "arch" "t1,32")]
8672 )
8673
8674 (define_insn "arm_stack_protect_test_insn"
8675 [(set (reg:CC_Z CC_REGNUM)
8676 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8677 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8678 UNSPEC_SP_TEST)
8679 (const_int 0)))
8680 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8681 (clobber (match_dup 2))]
8682 "TARGET_32BIT"
8683 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8684 [(set_attr "length" "8,12")
8685 (set_attr "conds" "set")
8686 (set_attr "type" "multiple")
8687 (set_attr "arch" "t,32")]
8688 )
8689
8690 (define_expand "casesi"
8691 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8692 (match_operand:SI 1 "const_int_operand") ; lower bound
8693 (match_operand:SI 2 "const_int_operand") ; total range
8694 (match_operand:SI 3 "" "") ; table label
8695 (match_operand:SI 4 "" "")] ; Out of range label
8696 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8697 "
8698 {
8699 enum insn_code code;
8700 if (operands[1] != const0_rtx)
8701 {
8702 rtx reg = gen_reg_rtx (SImode);
8703
8704 emit_insn (gen_addsi3 (reg, operands[0],
8705 gen_int_mode (-INTVAL (operands[1]),
8706 SImode)));
8707 operands[0] = reg;
8708 }
8709
8710 if (TARGET_ARM)
8711 code = CODE_FOR_arm_casesi_internal;
8712 else if (TARGET_THUMB1)
8713 code = CODE_FOR_thumb1_casesi_internal_pic;
8714 else if (flag_pic)
8715 code = CODE_FOR_thumb2_casesi_internal_pic;
8716 else
8717 code = CODE_FOR_thumb2_casesi_internal;
8718
8719 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8720 operands[2] = force_reg (SImode, operands[2]);
8721
8722 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8723 operands[3], operands[4]));
8724 DONE;
8725 }"
8726 )
8727
8728 ;; The USE in this pattern is needed to tell flow analysis that this is
8729 ;; a CASESI insn. It has no other purpose.
8730 (define_expand "arm_casesi_internal"
8731 [(parallel [(set (pc)
8732 (if_then_else
8733 (leu (match_operand:SI 0 "s_register_operand")
8734 (match_operand:SI 1 "arm_rhs_operand"))
8735 (match_dup 4)
8736 (label_ref:SI (match_operand 3 ""))))
8737 (clobber (reg:CC CC_REGNUM))
8738 (use (label_ref:SI (match_operand 2 "")))])]
8739 "TARGET_ARM"
8740 {
8741 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8742 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8743 gen_rtx_LABEL_REF (SImode, operands[2]));
8744 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8745 MEM_READONLY_P (operands[4]) = 1;
8746 MEM_NOTRAP_P (operands[4]) = 1;
8747 })
8748
8749 (define_insn "*arm_casesi_internal"
8750 [(parallel [(set (pc)
8751 (if_then_else
8752 (leu (match_operand:SI 0 "s_register_operand" "r")
8753 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8754 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8755 (label_ref:SI (match_operand 2 "" ""))))
8756 (label_ref:SI (match_operand 3 "" ""))))
8757 (clobber (reg:CC CC_REGNUM))
8758 (use (label_ref:SI (match_dup 2)))])]
8759 "TARGET_ARM"
8760 "*
8761 if (flag_pic)
8762 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8763 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8764 "
8765 [(set_attr "conds" "clob")
8766 (set_attr "length" "12")
8767 (set_attr "type" "multiple")]
8768 )
8769
8770 (define_expand "indirect_jump"
8771 [(set (pc)
8772 (match_operand:SI 0 "s_register_operand"))]
8773 "TARGET_EITHER"
8774 "
8775 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8776 address and use bx. */
8777 if (TARGET_THUMB2)
8778 {
8779 rtx tmp;
8780 tmp = gen_reg_rtx (SImode);
8781 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8782 operands[0] = tmp;
8783 }
8784 "
8785 )
8786
8787 ;; NB Never uses BX.
8788 (define_insn "*arm_indirect_jump"
8789 [(set (pc)
8790 (match_operand:SI 0 "s_register_operand" "r"))]
8791 "TARGET_ARM"
8792 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8793 [(set_attr "predicable" "yes")
8794 (set_attr "type" "branch")]
8795 )
8796
8797 (define_insn "*load_indirect_jump"
8798 [(set (pc)
8799 (match_operand:SI 0 "memory_operand" "m"))]
8800 "TARGET_ARM"
8801 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8802 [(set_attr "type" "load_4")
8803 (set_attr "pool_range" "4096")
8804 (set_attr "neg_pool_range" "4084")
8805 (set_attr "predicable" "yes")]
8806 )
8807
8808 \f
8809 ;; Misc insns
8810
8811 (define_insn "nop"
8812 [(const_int 0)]
8813 "TARGET_EITHER"
8814 "nop"
8815 [(set (attr "length")
8816 (if_then_else (eq_attr "is_thumb" "yes")
8817 (const_int 2)
8818 (const_int 4)))
8819 (set_attr "type" "mov_reg")]
8820 )
8821
8822 (define_insn "trap"
8823 [(trap_if (const_int 1) (const_int 0))]
8824 ""
8825 "*
8826 if (TARGET_ARM)
8827 return \".inst\\t0xe7f000f0\";
8828 else
8829 return \".inst\\t0xdeff\";
8830 "
8831 [(set (attr "length")
8832 (if_then_else (eq_attr "is_thumb" "yes")
8833 (const_int 2)
8834 (const_int 4)))
8835 (set_attr "type" "trap")
8836 (set_attr "conds" "unconditional")]
8837 )
8838
8839 \f
8840 ;; Patterns to allow combination of arithmetic, cond code and shifts
8841
8842 (define_insn "*<arith_shift_insn>_multsi"
8843 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8844 (SHIFTABLE_OPS:SI
8845 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8846 (match_operand:SI 3 "power_of_two_operand" ""))
8847 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8848 "TARGET_32BIT"
8849 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8850 [(set_attr "predicable" "yes")
8851 (set_attr "shift" "2")
8852 (set_attr "arch" "a,t2")
8853 (set_attr "type" "alu_shift_imm")])
8854
8855 (define_insn "*<arith_shift_insn>_shiftsi"
8856 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8857 (SHIFTABLE_OPS:SI
8858 (match_operator:SI 2 "shift_nomul_operator"
8859 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8860 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8861 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8862 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8863 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8864 [(set_attr "predicable" "yes")
8865 (set_attr "shift" "3")
8866 (set_attr "arch" "a,t2,a")
8867 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8868
8869 (define_split
8870 [(set (match_operand:SI 0 "s_register_operand" "")
8871 (match_operator:SI 1 "shiftable_operator"
8872 [(match_operator:SI 2 "shiftable_operator"
8873 [(match_operator:SI 3 "shift_operator"
8874 [(match_operand:SI 4 "s_register_operand" "")
8875 (match_operand:SI 5 "reg_or_int_operand" "")])
8876 (match_operand:SI 6 "s_register_operand" "")])
8877 (match_operand:SI 7 "arm_rhs_operand" "")]))
8878 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8879 "TARGET_32BIT"
8880 [(set (match_dup 8)
8881 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8882 (match_dup 6)]))
8883 (set (match_dup 0)
8884 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8885 "")
8886
8887 (define_insn "*arith_shiftsi_compare0"
8888 [(set (reg:CC_NOOV CC_REGNUM)
8889 (compare:CC_NOOV
8890 (match_operator:SI 1 "shiftable_operator"
8891 [(match_operator:SI 3 "shift_operator"
8892 [(match_operand:SI 4 "s_register_operand" "r,r")
8893 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8894 (match_operand:SI 2 "s_register_operand" "r,r")])
8895 (const_int 0)))
8896 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8897 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8898 (match_dup 2)]))]
8899 "TARGET_32BIT"
8900 "%i1s%?\\t%0, %2, %4%S3"
8901 [(set_attr "conds" "set")
8902 (set_attr "shift" "4")
8903 (set_attr "arch" "32,a")
8904 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8905
8906 (define_insn "*arith_shiftsi_compare0_scratch"
8907 [(set (reg:CC_NOOV CC_REGNUM)
8908 (compare:CC_NOOV
8909 (match_operator:SI 1 "shiftable_operator"
8910 [(match_operator:SI 3 "shift_operator"
8911 [(match_operand:SI 4 "s_register_operand" "r,r")
8912 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8913 (match_operand:SI 2 "s_register_operand" "r,r")])
8914 (const_int 0)))
8915 (clobber (match_scratch:SI 0 "=r,r"))]
8916 "TARGET_32BIT"
8917 "%i1s%?\\t%0, %2, %4%S3"
8918 [(set_attr "conds" "set")
8919 (set_attr "shift" "4")
8920 (set_attr "arch" "32,a")
8921 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8922
8923 (define_insn "*sub_shiftsi"
8924 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8925 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8926 (match_operator:SI 2 "shift_operator"
8927 [(match_operand:SI 3 "s_register_operand" "r,r")
8928 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8929 "TARGET_32BIT"
8930 "sub%?\\t%0, %1, %3%S2"
8931 [(set_attr "predicable" "yes")
8932 (set_attr "predicable_short_it" "no")
8933 (set_attr "shift" "3")
8934 (set_attr "arch" "32,a")
8935 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8936
8937 (define_insn "*sub_shiftsi_compare0"
8938 [(set (reg:CC_NOOV CC_REGNUM)
8939 (compare:CC_NOOV
8940 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8941 (match_operator:SI 2 "shift_operator"
8942 [(match_operand:SI 3 "s_register_operand" "r,r")
8943 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
8944 (const_int 0)))
8945 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8946 (minus:SI (match_dup 1)
8947 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8948 "TARGET_32BIT"
8949 "subs%?\\t%0, %1, %3%S2"
8950 [(set_attr "conds" "set")
8951 (set_attr "shift" "3")
8952 (set_attr "arch" "32,a")
8953 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8954
8955 (define_insn "*sub_shiftsi_compare0_scratch"
8956 [(set (reg:CC_NOOV CC_REGNUM)
8957 (compare:CC_NOOV
8958 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8959 (match_operator:SI 2 "shift_operator"
8960 [(match_operand:SI 3 "s_register_operand" "r,r")
8961 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
8962 (const_int 0)))
8963 (clobber (match_scratch:SI 0 "=r,r"))]
8964 "TARGET_32BIT"
8965 "subs%?\\t%0, %1, %3%S2"
8966 [(set_attr "conds" "set")
8967 (set_attr "shift" "3")
8968 (set_attr "arch" "32,a")
8969 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8970 \f
8971
8972 (define_insn_and_split "*and_scc"
8973 [(set (match_operand:SI 0 "s_register_operand" "=r")
8974 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8975 [(match_operand 2 "cc_register" "") (const_int 0)])
8976 (match_operand:SI 3 "s_register_operand" "r")))]
8977 "TARGET_ARM"
8978 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8979 "&& reload_completed"
8980 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8981 (cond_exec (match_dup 4) (set (match_dup 0)
8982 (and:SI (match_dup 3) (const_int 1))))]
8983 {
8984 machine_mode mode = GET_MODE (operands[2]);
8985 enum rtx_code rc = GET_CODE (operands[1]);
8986
8987 /* Note that operands[4] is the same as operands[1],
8988 but with VOIDmode as the result. */
8989 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8990 if (mode == CCFPmode || mode == CCFPEmode)
8991 rc = reverse_condition_maybe_unordered (rc);
8992 else
8993 rc = reverse_condition (rc);
8994 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8995 }
8996 [(set_attr "conds" "use")
8997 (set_attr "type" "multiple")
8998 (set_attr "length" "8")]
8999 )
9000
9001 (define_insn_and_split "*ior_scc"
9002 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9003 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9004 [(match_operand 2 "cc_register" "") (const_int 0)])
9005 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9006 "TARGET_ARM"
9007 "@
9008 orr%d1\\t%0, %3, #1
9009 #"
9010 "&& reload_completed
9011 && REGNO (operands [0]) != REGNO (operands[3])"
9012 ;; && which_alternative == 1
9013 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9014 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9015 (cond_exec (match_dup 4) (set (match_dup 0)
9016 (ior:SI (match_dup 3) (const_int 1))))]
9017 {
9018 machine_mode mode = GET_MODE (operands[2]);
9019 enum rtx_code rc = GET_CODE (operands[1]);
9020
9021 /* Note that operands[4] is the same as operands[1],
9022 but with VOIDmode as the result. */
9023 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9024 if (mode == CCFPmode || mode == CCFPEmode)
9025 rc = reverse_condition_maybe_unordered (rc);
9026 else
9027 rc = reverse_condition (rc);
9028 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9029 }
9030 [(set_attr "conds" "use")
9031 (set_attr "length" "4,8")
9032 (set_attr "type" "logic_imm,multiple")]
9033 )
9034
9035 ; A series of splitters for the compare_scc pattern below. Note that
9036 ; order is important.
9037 (define_split
9038 [(set (match_operand:SI 0 "s_register_operand" "")
9039 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9040 (const_int 0)))
9041 (clobber (reg:CC CC_REGNUM))]
9042 "TARGET_32BIT && reload_completed"
9043 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9044
9045 (define_split
9046 [(set (match_operand:SI 0 "s_register_operand" "")
9047 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9048 (const_int 0)))
9049 (clobber (reg:CC CC_REGNUM))]
9050 "TARGET_32BIT && reload_completed"
9051 [(set (match_dup 0) (not:SI (match_dup 1)))
9052 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9053
9054 (define_split
9055 [(set (match_operand:SI 0 "s_register_operand" "")
9056 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9057 (const_int 0)))
9058 (clobber (reg:CC CC_REGNUM))]
9059 "arm_arch5t && TARGET_32BIT"
9060 [(set (match_dup 0) (clz:SI (match_dup 1)))
9061 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9062 )
9063
9064 (define_split
9065 [(set (match_operand:SI 0 "s_register_operand" "")
9066 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9067 (const_int 0)))
9068 (clobber (reg:CC CC_REGNUM))]
9069 "TARGET_32BIT && reload_completed"
9070 [(parallel
9071 [(set (reg:CC CC_REGNUM)
9072 (compare:CC (const_int 1) (match_dup 1)))
9073 (set (match_dup 0)
9074 (minus:SI (const_int 1) (match_dup 1)))])
9075 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9076 (set (match_dup 0) (const_int 0)))])
9077
9078 (define_split
9079 [(set (match_operand:SI 0 "s_register_operand" "")
9080 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9081 (match_operand:SI 2 "const_int_operand" "")))
9082 (clobber (reg:CC CC_REGNUM))]
9083 "TARGET_32BIT && reload_completed"
9084 [(parallel
9085 [(set (reg:CC CC_REGNUM)
9086 (compare:CC (match_dup 1) (match_dup 2)))
9087 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9088 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9089 (set (match_dup 0) (const_int 1)))]
9090 {
9091 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9092 })
9093
9094 (define_split
9095 [(set (match_operand:SI 0 "s_register_operand" "")
9096 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9097 (match_operand:SI 2 "arm_add_operand" "")))
9098 (clobber (reg:CC CC_REGNUM))]
9099 "TARGET_32BIT && reload_completed"
9100 [(parallel
9101 [(set (reg:CC_NOOV CC_REGNUM)
9102 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9103 (const_int 0)))
9104 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9105 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9106 (set (match_dup 0) (const_int 1)))])
9107
9108 (define_insn_and_split "*compare_scc"
9109 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9110 (match_operator:SI 1 "arm_comparison_operator"
9111 [(match_operand:SI 2 "s_register_operand" "r,r")
9112 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9113 (clobber (reg:CC CC_REGNUM))]
9114 "TARGET_32BIT"
9115 "#"
9116 "&& reload_completed"
9117 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9118 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9119 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9120 {
9121 rtx tmp1;
9122 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9123 operands[2], operands[3]);
9124 enum rtx_code rc = GET_CODE (operands[1]);
9125
9126 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9127
9128 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9129 if (mode == CCFPmode || mode == CCFPEmode)
9130 rc = reverse_condition_maybe_unordered (rc);
9131 else
9132 rc = reverse_condition (rc);
9133 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9134 }
9135 [(set_attr "type" "multiple")]
9136 )
9137
9138 ;; Attempt to improve the sequence generated by the compare_scc splitters
9139 ;; not to use conditional execution.
9140
9141 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9142 ;; clz Rd, reg1
9143 ;; lsr Rd, Rd, #5
9144 (define_peephole2
9145 [(set (reg:CC CC_REGNUM)
9146 (compare:CC (match_operand:SI 1 "register_operand" "")
9147 (const_int 0)))
9148 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9149 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9150 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9151 (set (match_dup 0) (const_int 1)))]
9152 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9153 [(set (match_dup 0) (clz:SI (match_dup 1)))
9154 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9155 )
9156
9157 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9158 ;; negs Rd, reg1
9159 ;; adc Rd, Rd, reg1
9160 (define_peephole2
9161 [(set (reg:CC CC_REGNUM)
9162 (compare:CC (match_operand:SI 1 "register_operand" "")
9163 (const_int 0)))
9164 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9165 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9166 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9167 (set (match_dup 0) (const_int 1)))
9168 (match_scratch:SI 2 "r")]
9169 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9170 [(parallel
9171 [(set (reg:CC CC_REGNUM)
9172 (compare:CC (const_int 0) (match_dup 1)))
9173 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9174 (set (match_dup 0)
9175 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9176 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9177 )
9178
9179 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9180 ;; sub Rd, Reg1, reg2
9181 ;; clz Rd, Rd
9182 ;; lsr Rd, Rd, #5
9183 (define_peephole2
9184 [(set (reg:CC CC_REGNUM)
9185 (compare:CC (match_operand:SI 1 "register_operand" "")
9186 (match_operand:SI 2 "arm_rhs_operand" "")))
9187 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9188 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9189 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9190 (set (match_dup 0) (const_int 1)))]
9191 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9192 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9193 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9194 (set (match_dup 0) (clz:SI (match_dup 0)))
9195 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9196 )
9197
9198
9199 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9200 ;; sub T1, Reg1, reg2
9201 ;; negs Rd, T1
9202 ;; adc Rd, Rd, T1
9203 (define_peephole2
9204 [(set (reg:CC CC_REGNUM)
9205 (compare:CC (match_operand:SI 1 "register_operand" "")
9206 (match_operand:SI 2 "arm_rhs_operand" "")))
9207 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9208 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9209 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9210 (set (match_dup 0) (const_int 1)))
9211 (match_scratch:SI 3 "r")]
9212 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9213 [(set (match_dup 3) (match_dup 4))
9214 (parallel
9215 [(set (reg:CC CC_REGNUM)
9216 (compare:CC (const_int 0) (match_dup 3)))
9217 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9218 (set (match_dup 0)
9219 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9220 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9221 "
9222 if (CONST_INT_P (operands[2]))
9223 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9224 else
9225 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9226 ")
9227
9228 (define_insn "*cond_move"
9229 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9230 (if_then_else:SI (match_operator 3 "equality_operator"
9231 [(match_operator 4 "arm_comparison_operator"
9232 [(match_operand 5 "cc_register" "") (const_int 0)])
9233 (const_int 0)])
9234 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9235 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9236 "TARGET_ARM"
9237 "*
9238 if (GET_CODE (operands[3]) == NE)
9239 {
9240 if (which_alternative != 1)
9241 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9242 if (which_alternative != 0)
9243 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9244 return \"\";
9245 }
9246 if (which_alternative != 0)
9247 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9248 if (which_alternative != 1)
9249 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9250 return \"\";
9251 "
9252 [(set_attr "conds" "use")
9253 (set_attr_alternative "type"
9254 [(if_then_else (match_operand 2 "const_int_operand" "")
9255 (const_string "mov_imm")
9256 (const_string "mov_reg"))
9257 (if_then_else (match_operand 1 "const_int_operand" "")
9258 (const_string "mov_imm")
9259 (const_string "mov_reg"))
9260 (const_string "multiple")])
9261 (set_attr "length" "4,4,8")]
9262 )
9263
9264 (define_insn "*cond_arith"
9265 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9266 (match_operator:SI 5 "shiftable_operator"
9267 [(match_operator:SI 4 "arm_comparison_operator"
9268 [(match_operand:SI 2 "s_register_operand" "r,r")
9269 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9270 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9271 (clobber (reg:CC CC_REGNUM))]
9272 "TARGET_ARM"
9273 "*
9274 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9275 return \"%i5\\t%0, %1, %2, lsr #31\";
9276
9277 output_asm_insn (\"cmp\\t%2, %3\", operands);
9278 if (GET_CODE (operands[5]) == AND)
9279 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9280 else if (GET_CODE (operands[5]) == MINUS)
9281 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9282 else if (which_alternative != 0)
9283 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9284 return \"%i5%d4\\t%0, %1, #1\";
9285 "
9286 [(set_attr "conds" "clob")
9287 (set_attr "length" "12")
9288 (set_attr "type" "multiple")]
9289 )
9290
9291 (define_insn "*cond_sub"
9292 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9293 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9294 (match_operator:SI 4 "arm_comparison_operator"
9295 [(match_operand:SI 2 "s_register_operand" "r,r")
9296 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9297 (clobber (reg:CC CC_REGNUM))]
9298 "TARGET_ARM"
9299 "*
9300 output_asm_insn (\"cmp\\t%2, %3\", operands);
9301 if (which_alternative != 0)
9302 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9303 return \"sub%d4\\t%0, %1, #1\";
9304 "
9305 [(set_attr "conds" "clob")
9306 (set_attr "length" "8,12")
9307 (set_attr "type" "multiple")]
9308 )
9309
9310 (define_insn "*cmp_ite0"
9311 [(set (match_operand 6 "dominant_cc_register" "")
9312 (compare
9313 (if_then_else:SI
9314 (match_operator 4 "arm_comparison_operator"
9315 [(match_operand:SI 0 "s_register_operand"
9316 "l,l,l,r,r,r,r,r,r")
9317 (match_operand:SI 1 "arm_add_operand"
9318 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9319 (match_operator:SI 5 "arm_comparison_operator"
9320 [(match_operand:SI 2 "s_register_operand"
9321 "l,r,r,l,l,r,r,r,r")
9322 (match_operand:SI 3 "arm_add_operand"
9323 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9324 (const_int 0))
9325 (const_int 0)))]
9326 "TARGET_32BIT"
9327 "*
9328 {
9329 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9330 {
9331 {\"cmp%d5\\t%0, %1\",
9332 \"cmp%d4\\t%2, %3\"},
9333 {\"cmn%d5\\t%0, #%n1\",
9334 \"cmp%d4\\t%2, %3\"},
9335 {\"cmp%d5\\t%0, %1\",
9336 \"cmn%d4\\t%2, #%n3\"},
9337 {\"cmn%d5\\t%0, #%n1\",
9338 \"cmn%d4\\t%2, #%n3\"}
9339 };
9340 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9341 {
9342 {\"cmp\\t%2, %3\",
9343 \"cmp\\t%0, %1\"},
9344 {\"cmp\\t%2, %3\",
9345 \"cmn\\t%0, #%n1\"},
9346 {\"cmn\\t%2, #%n3\",
9347 \"cmp\\t%0, %1\"},
9348 {\"cmn\\t%2, #%n3\",
9349 \"cmn\\t%0, #%n1\"}
9350 };
9351 static const char * const ite[2] =
9352 {
9353 \"it\\t%d5\",
9354 \"it\\t%d4\"
9355 };
9356 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9357 CMP_CMP, CMN_CMP, CMP_CMP,
9358 CMN_CMP, CMP_CMN, CMN_CMN};
9359 int swap =
9360 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9361
9362 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9363 if (TARGET_THUMB2) {
9364 output_asm_insn (ite[swap], operands);
9365 }
9366 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9367 return \"\";
9368 }"
9369 [(set_attr "conds" "set")
9370 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9371 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9372 (set_attr "type" "multiple")
9373 (set_attr_alternative "length"
9374 [(const_int 6)
9375 (const_int 8)
9376 (const_int 8)
9377 (const_int 8)
9378 (const_int 8)
9379 (if_then_else (eq_attr "is_thumb" "no")
9380 (const_int 8)
9381 (const_int 10))
9382 (if_then_else (eq_attr "is_thumb" "no")
9383 (const_int 8)
9384 (const_int 10))
9385 (if_then_else (eq_attr "is_thumb" "no")
9386 (const_int 8)
9387 (const_int 10))
9388 (if_then_else (eq_attr "is_thumb" "no")
9389 (const_int 8)
9390 (const_int 10))])]
9391 )
9392
9393 (define_insn "*cmp_ite1"
9394 [(set (match_operand 6 "dominant_cc_register" "")
9395 (compare
9396 (if_then_else:SI
9397 (match_operator 4 "arm_comparison_operator"
9398 [(match_operand:SI 0 "s_register_operand"
9399 "l,l,l,r,r,r,r,r,r")
9400 (match_operand:SI 1 "arm_add_operand"
9401 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9402 (match_operator:SI 5 "arm_comparison_operator"
9403 [(match_operand:SI 2 "s_register_operand"
9404 "l,r,r,l,l,r,r,r,r")
9405 (match_operand:SI 3 "arm_add_operand"
9406 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9407 (const_int 1))
9408 (const_int 0)))]
9409 "TARGET_32BIT"
9410 "*
9411 {
9412 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9413 {
9414 {\"cmp\\t%0, %1\",
9415 \"cmp\\t%2, %3\"},
9416 {\"cmn\\t%0, #%n1\",
9417 \"cmp\\t%2, %3\"},
9418 {\"cmp\\t%0, %1\",
9419 \"cmn\\t%2, #%n3\"},
9420 {\"cmn\\t%0, #%n1\",
9421 \"cmn\\t%2, #%n3\"}
9422 };
9423 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9424 {
9425 {\"cmp%d4\\t%2, %3\",
9426 \"cmp%D5\\t%0, %1\"},
9427 {\"cmp%d4\\t%2, %3\",
9428 \"cmn%D5\\t%0, #%n1\"},
9429 {\"cmn%d4\\t%2, #%n3\",
9430 \"cmp%D5\\t%0, %1\"},
9431 {\"cmn%d4\\t%2, #%n3\",
9432 \"cmn%D5\\t%0, #%n1\"}
9433 };
9434 static const char * const ite[2] =
9435 {
9436 \"it\\t%d4\",
9437 \"it\\t%D5\"
9438 };
9439 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9440 CMP_CMP, CMN_CMP, CMP_CMP,
9441 CMN_CMP, CMP_CMN, CMN_CMN};
9442 int swap =
9443 comparison_dominates_p (GET_CODE (operands[5]),
9444 reverse_condition (GET_CODE (operands[4])));
9445
9446 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9447 if (TARGET_THUMB2) {
9448 output_asm_insn (ite[swap], operands);
9449 }
9450 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9451 return \"\";
9452 }"
9453 [(set_attr "conds" "set")
9454 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9455 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9456 (set_attr_alternative "length"
9457 [(const_int 6)
9458 (const_int 8)
9459 (const_int 8)
9460 (const_int 8)
9461 (const_int 8)
9462 (if_then_else (eq_attr "is_thumb" "no")
9463 (const_int 8)
9464 (const_int 10))
9465 (if_then_else (eq_attr "is_thumb" "no")
9466 (const_int 8)
9467 (const_int 10))
9468 (if_then_else (eq_attr "is_thumb" "no")
9469 (const_int 8)
9470 (const_int 10))
9471 (if_then_else (eq_attr "is_thumb" "no")
9472 (const_int 8)
9473 (const_int 10))])
9474 (set_attr "type" "multiple")]
9475 )
9476
9477 (define_insn "*cmp_and"
9478 [(set (match_operand 6 "dominant_cc_register" "")
9479 (compare
9480 (and:SI
9481 (match_operator 4 "arm_comparison_operator"
9482 [(match_operand:SI 0 "s_register_operand"
9483 "l,l,l,r,r,r,r,r,r,r")
9484 (match_operand:SI 1 "arm_add_operand"
9485 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9486 (match_operator:SI 5 "arm_comparison_operator"
9487 [(match_operand:SI 2 "s_register_operand"
9488 "l,r,r,l,l,r,r,r,r,r")
9489 (match_operand:SI 3 "arm_add_operand"
9490 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9491 (const_int 0)))]
9492 "TARGET_32BIT"
9493 "*
9494 {
9495 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9496 {
9497 {\"cmp%d5\\t%0, %1\",
9498 \"cmp%d4\\t%2, %3\"},
9499 {\"cmn%d5\\t%0, #%n1\",
9500 \"cmp%d4\\t%2, %3\"},
9501 {\"cmp%d5\\t%0, %1\",
9502 \"cmn%d4\\t%2, #%n3\"},
9503 {\"cmn%d5\\t%0, #%n1\",
9504 \"cmn%d4\\t%2, #%n3\"}
9505 };
9506 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9507 {
9508 {\"cmp\\t%2, %3\",
9509 \"cmp\\t%0, %1\"},
9510 {\"cmp\\t%2, %3\",
9511 \"cmn\\t%0, #%n1\"},
9512 {\"cmn\\t%2, #%n3\",
9513 \"cmp\\t%0, %1\"},
9514 {\"cmn\\t%2, #%n3\",
9515 \"cmn\\t%0, #%n1\"}
9516 };
9517 static const char *const ite[2] =
9518 {
9519 \"it\\t%d5\",
9520 \"it\\t%d4\"
9521 };
9522 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9523 CMP_CMP, CMN_CMP, CMP_CMP,
9524 CMP_CMP, CMN_CMP, CMP_CMN,
9525 CMN_CMN};
9526 int swap =
9527 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9528
9529 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9530 if (TARGET_THUMB2) {
9531 output_asm_insn (ite[swap], operands);
9532 }
9533 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9534 return \"\";
9535 }"
9536 [(set_attr "conds" "set")
9537 (set_attr "predicable" "no")
9538 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9539 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9540 (set_attr_alternative "length"
9541 [(const_int 6)
9542 (const_int 8)
9543 (const_int 8)
9544 (const_int 8)
9545 (const_int 8)
9546 (const_int 6)
9547 (if_then_else (eq_attr "is_thumb" "no")
9548 (const_int 8)
9549 (const_int 10))
9550 (if_then_else (eq_attr "is_thumb" "no")
9551 (const_int 8)
9552 (const_int 10))
9553 (if_then_else (eq_attr "is_thumb" "no")
9554 (const_int 8)
9555 (const_int 10))
9556 (if_then_else (eq_attr "is_thumb" "no")
9557 (const_int 8)
9558 (const_int 10))])
9559 (set_attr "type" "multiple")]
9560 )
9561
9562 (define_insn "*cmp_ior"
9563 [(set (match_operand 6 "dominant_cc_register" "")
9564 (compare
9565 (ior:SI
9566 (match_operator 4 "arm_comparison_operator"
9567 [(match_operand:SI 0 "s_register_operand"
9568 "l,l,l,r,r,r,r,r,r,r")
9569 (match_operand:SI 1 "arm_add_operand"
9570 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9571 (match_operator:SI 5 "arm_comparison_operator"
9572 [(match_operand:SI 2 "s_register_operand"
9573 "l,r,r,l,l,r,r,r,r,r")
9574 (match_operand:SI 3 "arm_add_operand"
9575 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9576 (const_int 0)))]
9577 "TARGET_32BIT"
9578 "*
9579 {
9580 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9581 {
9582 {\"cmp\\t%0, %1\",
9583 \"cmp\\t%2, %3\"},
9584 {\"cmn\\t%0, #%n1\",
9585 \"cmp\\t%2, %3\"},
9586 {\"cmp\\t%0, %1\",
9587 \"cmn\\t%2, #%n3\"},
9588 {\"cmn\\t%0, #%n1\",
9589 \"cmn\\t%2, #%n3\"}
9590 };
9591 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9592 {
9593 {\"cmp%D4\\t%2, %3\",
9594 \"cmp%D5\\t%0, %1\"},
9595 {\"cmp%D4\\t%2, %3\",
9596 \"cmn%D5\\t%0, #%n1\"},
9597 {\"cmn%D4\\t%2, #%n3\",
9598 \"cmp%D5\\t%0, %1\"},
9599 {\"cmn%D4\\t%2, #%n3\",
9600 \"cmn%D5\\t%0, #%n1\"}
9601 };
9602 static const char *const ite[2] =
9603 {
9604 \"it\\t%D4\",
9605 \"it\\t%D5\"
9606 };
9607 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9608 CMP_CMP, CMN_CMP, CMP_CMP,
9609 CMP_CMP, CMN_CMP, CMP_CMN,
9610 CMN_CMN};
9611 int swap =
9612 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9613
9614 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9615 if (TARGET_THUMB2) {
9616 output_asm_insn (ite[swap], operands);
9617 }
9618 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9619 return \"\";
9620 }
9621 "
9622 [(set_attr "conds" "set")
9623 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9624 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9625 (set_attr_alternative "length"
9626 [(const_int 6)
9627 (const_int 8)
9628 (const_int 8)
9629 (const_int 8)
9630 (const_int 8)
9631 (const_int 6)
9632 (if_then_else (eq_attr "is_thumb" "no")
9633 (const_int 8)
9634 (const_int 10))
9635 (if_then_else (eq_attr "is_thumb" "no")
9636 (const_int 8)
9637 (const_int 10))
9638 (if_then_else (eq_attr "is_thumb" "no")
9639 (const_int 8)
9640 (const_int 10))
9641 (if_then_else (eq_attr "is_thumb" "no")
9642 (const_int 8)
9643 (const_int 10))])
9644 (set_attr "type" "multiple")]
9645 )
9646
9647 (define_insn_and_split "*ior_scc_scc"
9648 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9649 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9650 [(match_operand:SI 1 "s_register_operand" "l,r")
9651 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9652 (match_operator:SI 6 "arm_comparison_operator"
9653 [(match_operand:SI 4 "s_register_operand" "l,r")
9654 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9655 (clobber (reg:CC CC_REGNUM))]
9656 "TARGET_32BIT
9657 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9658 != CCmode)"
9659 "#"
9660 "TARGET_32BIT && reload_completed"
9661 [(set (match_dup 7)
9662 (compare
9663 (ior:SI
9664 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9665 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9666 (const_int 0)))
9667 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9668 "operands[7]
9669 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9670 DOM_CC_X_OR_Y),
9671 CC_REGNUM);"
9672 [(set_attr "conds" "clob")
9673 (set_attr "enabled_for_short_it" "yes,no")
9674 (set_attr "length" "16")
9675 (set_attr "type" "multiple")]
9676 )
9677
9678 ; If the above pattern is followed by a CMP insn, then the compare is
9679 ; redundant, since we can rework the conditional instruction that follows.
9680 (define_insn_and_split "*ior_scc_scc_cmp"
9681 [(set (match_operand 0 "dominant_cc_register" "")
9682 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9683 [(match_operand:SI 1 "s_register_operand" "l,r")
9684 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9685 (match_operator:SI 6 "arm_comparison_operator"
9686 [(match_operand:SI 4 "s_register_operand" "l,r")
9687 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9688 (const_int 0)))
9689 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9690 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9691 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9692 "TARGET_32BIT"
9693 "#"
9694 "TARGET_32BIT && reload_completed"
9695 [(set (match_dup 0)
9696 (compare
9697 (ior:SI
9698 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9699 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9700 (const_int 0)))
9701 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9702 ""
9703 [(set_attr "conds" "set")
9704 (set_attr "enabled_for_short_it" "yes,no")
9705 (set_attr "length" "16")
9706 (set_attr "type" "multiple")]
9707 )
9708
9709 (define_insn_and_split "*and_scc_scc"
9710 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9711 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9712 [(match_operand:SI 1 "s_register_operand" "l,r")
9713 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9714 (match_operator:SI 6 "arm_comparison_operator"
9715 [(match_operand:SI 4 "s_register_operand" "l,r")
9716 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9717 (clobber (reg:CC CC_REGNUM))]
9718 "TARGET_32BIT
9719 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9720 != CCmode)"
9721 "#"
9722 "TARGET_32BIT && reload_completed
9723 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9724 != CCmode)"
9725 [(set (match_dup 7)
9726 (compare
9727 (and:SI
9728 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9729 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9730 (const_int 0)))
9731 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9732 "operands[7]
9733 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9734 DOM_CC_X_AND_Y),
9735 CC_REGNUM);"
9736 [(set_attr "conds" "clob")
9737 (set_attr "enabled_for_short_it" "yes,no")
9738 (set_attr "length" "16")
9739 (set_attr "type" "multiple")]
9740 )
9741
9742 ; If the above pattern is followed by a CMP insn, then the compare is
9743 ; redundant, since we can rework the conditional instruction that follows.
9744 (define_insn_and_split "*and_scc_scc_cmp"
9745 [(set (match_operand 0 "dominant_cc_register" "")
9746 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9747 [(match_operand:SI 1 "s_register_operand" "l,r")
9748 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9749 (match_operator:SI 6 "arm_comparison_operator"
9750 [(match_operand:SI 4 "s_register_operand" "l,r")
9751 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9752 (const_int 0)))
9753 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9754 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9755 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9756 "TARGET_32BIT"
9757 "#"
9758 "TARGET_32BIT && reload_completed"
9759 [(set (match_dup 0)
9760 (compare
9761 (and:SI
9762 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9763 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9764 (const_int 0)))
9765 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9766 ""
9767 [(set_attr "conds" "set")
9768 (set_attr "enabled_for_short_it" "yes,no")
9769 (set_attr "length" "16")
9770 (set_attr "type" "multiple")]
9771 )
9772
9773 ;; If there is no dominance in the comparison, then we can still save an
9774 ;; instruction in the AND case, since we can know that the second compare
9775 ;; need only zero the value if false (if true, then the value is already
9776 ;; correct).
9777 (define_insn_and_split "*and_scc_scc_nodom"
9778 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9779 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9780 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9781 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9782 (match_operator:SI 6 "arm_comparison_operator"
9783 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9784 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9785 (clobber (reg:CC CC_REGNUM))]
9786 "TARGET_32BIT
9787 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9788 == CCmode)"
9789 "#"
9790 "TARGET_32BIT && reload_completed"
9791 [(parallel [(set (match_dup 0)
9792 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9793 (clobber (reg:CC CC_REGNUM))])
9794 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9795 (set (match_dup 0)
9796 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9797 (match_dup 0)
9798 (const_int 0)))]
9799 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9800 operands[4], operands[5]),
9801 CC_REGNUM);
9802 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9803 operands[5]);"
9804 [(set_attr "conds" "clob")
9805 (set_attr "length" "20")
9806 (set_attr "type" "multiple")]
9807 )
9808
9809 (define_split
9810 [(set (reg:CC_NOOV CC_REGNUM)
9811 (compare:CC_NOOV (ior:SI
9812 (and:SI (match_operand:SI 0 "s_register_operand" "")
9813 (const_int 1))
9814 (match_operator:SI 1 "arm_comparison_operator"
9815 [(match_operand:SI 2 "s_register_operand" "")
9816 (match_operand:SI 3 "arm_add_operand" "")]))
9817 (const_int 0)))
9818 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9819 "TARGET_ARM"
9820 [(set (match_dup 4)
9821 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9822 (match_dup 0)))
9823 (set (reg:CC_NOOV CC_REGNUM)
9824 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9825 (const_int 0)))]
9826 "")
9827
9828 (define_split
9829 [(set (reg:CC_NOOV CC_REGNUM)
9830 (compare:CC_NOOV (ior:SI
9831 (match_operator:SI 1 "arm_comparison_operator"
9832 [(match_operand:SI 2 "s_register_operand" "")
9833 (match_operand:SI 3 "arm_add_operand" "")])
9834 (and:SI (match_operand:SI 0 "s_register_operand" "")
9835 (const_int 1)))
9836 (const_int 0)))
9837 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9838 "TARGET_ARM"
9839 [(set (match_dup 4)
9840 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9841 (match_dup 0)))
9842 (set (reg:CC_NOOV CC_REGNUM)
9843 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9844 (const_int 0)))]
9845 "")
9846 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9847
9848 (define_insn_and_split "*negscc"
9849 [(set (match_operand:SI 0 "s_register_operand" "=r")
9850 (neg:SI (match_operator 3 "arm_comparison_operator"
9851 [(match_operand:SI 1 "s_register_operand" "r")
9852 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9853 (clobber (reg:CC CC_REGNUM))]
9854 "TARGET_ARM"
9855 "#"
9856 "&& reload_completed"
9857 [(const_int 0)]
9858 {
9859 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9860
9861 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9862 {
9863 /* Emit mov\\t%0, %1, asr #31 */
9864 emit_insn (gen_rtx_SET (operands[0],
9865 gen_rtx_ASHIFTRT (SImode,
9866 operands[1],
9867 GEN_INT (31))));
9868 DONE;
9869 }
9870 else if (GET_CODE (operands[3]) == NE)
9871 {
9872 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9873 if (CONST_INT_P (operands[2]))
9874 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9875 gen_int_mode (-INTVAL (operands[2]),
9876 SImode)));
9877 else
9878 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9879
9880 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9881 gen_rtx_NE (SImode,
9882 cc_reg,
9883 const0_rtx),
9884 gen_rtx_SET (operands[0],
9885 GEN_INT (~0))));
9886 DONE;
9887 }
9888 else
9889 {
9890 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9891 emit_insn (gen_rtx_SET (cc_reg,
9892 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9893 enum rtx_code rc = GET_CODE (operands[3]);
9894
9895 rc = reverse_condition (rc);
9896 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9897 gen_rtx_fmt_ee (rc,
9898 VOIDmode,
9899 cc_reg,
9900 const0_rtx),
9901 gen_rtx_SET (operands[0], const0_rtx)));
9902 rc = GET_CODE (operands[3]);
9903 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9904 gen_rtx_fmt_ee (rc,
9905 VOIDmode,
9906 cc_reg,
9907 const0_rtx),
9908 gen_rtx_SET (operands[0],
9909 GEN_INT (~0))));
9910 DONE;
9911 }
9912 FAIL;
9913 }
9914 [(set_attr "conds" "clob")
9915 (set_attr "length" "12")
9916 (set_attr "type" "multiple")]
9917 )
9918
9919 (define_insn_and_split "movcond_addsi"
9920 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9921 (if_then_else:SI
9922 (match_operator 5 "comparison_operator"
9923 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9924 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9925 (const_int 0)])
9926 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9927 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9928 (clobber (reg:CC CC_REGNUM))]
9929 "TARGET_32BIT"
9930 "#"
9931 "&& reload_completed"
9932 [(set (reg:CC_NOOV CC_REGNUM)
9933 (compare:CC_NOOV
9934 (plus:SI (match_dup 3)
9935 (match_dup 4))
9936 (const_int 0)))
9937 (set (match_dup 0) (match_dup 1))
9938 (cond_exec (match_dup 6)
9939 (set (match_dup 0) (match_dup 2)))]
9940 "
9941 {
9942 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9943 operands[3], operands[4]);
9944 enum rtx_code rc = GET_CODE (operands[5]);
9945 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9946 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9947 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9948 rc = reverse_condition (rc);
9949 else
9950 std::swap (operands[1], operands[2]);
9951
9952 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9953 }
9954 "
9955 [(set_attr "conds" "clob")
9956 (set_attr "enabled_for_short_it" "no,yes,yes")
9957 (set_attr "type" "multiple")]
9958 )
9959
9960 (define_insn "movcond"
9961 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9962 (if_then_else:SI
9963 (match_operator 5 "arm_comparison_operator"
9964 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9965 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9966 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9967 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9968 (clobber (reg:CC CC_REGNUM))]
9969 "TARGET_ARM"
9970 "*
9971 if (GET_CODE (operands[5]) == LT
9972 && (operands[4] == const0_rtx))
9973 {
9974 if (which_alternative != 1 && REG_P (operands[1]))
9975 {
9976 if (operands[2] == const0_rtx)
9977 return \"and\\t%0, %1, %3, asr #31\";
9978 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9979 }
9980 else if (which_alternative != 0 && REG_P (operands[2]))
9981 {
9982 if (operands[1] == const0_rtx)
9983 return \"bic\\t%0, %2, %3, asr #31\";
9984 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9985 }
9986 /* The only case that falls through to here is when both ops 1 & 2
9987 are constants. */
9988 }
9989
9990 if (GET_CODE (operands[5]) == GE
9991 && (operands[4] == const0_rtx))
9992 {
9993 if (which_alternative != 1 && REG_P (operands[1]))
9994 {
9995 if (operands[2] == const0_rtx)
9996 return \"bic\\t%0, %1, %3, asr #31\";
9997 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9998 }
9999 else if (which_alternative != 0 && REG_P (operands[2]))
10000 {
10001 if (operands[1] == const0_rtx)
10002 return \"and\\t%0, %2, %3, asr #31\";
10003 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10004 }
10005 /* The only case that falls through to here is when both ops 1 & 2
10006 are constants. */
10007 }
10008 if (CONST_INT_P (operands[4])
10009 && !const_ok_for_arm (INTVAL (operands[4])))
10010 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10011 else
10012 output_asm_insn (\"cmp\\t%3, %4\", operands);
10013 if (which_alternative != 0)
10014 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10015 if (which_alternative != 1)
10016 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10017 return \"\";
10018 "
10019 [(set_attr "conds" "clob")
10020 (set_attr "length" "8,8,12")
10021 (set_attr "type" "multiple")]
10022 )
10023
10024 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10025
10026 (define_insn "*ifcompare_plus_move"
10027 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10028 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10029 [(match_operand:SI 4 "s_register_operand" "r,r")
10030 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10031 (plus:SI
10032 (match_operand:SI 2 "s_register_operand" "r,r")
10033 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10034 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10035 (clobber (reg:CC CC_REGNUM))]
10036 "TARGET_ARM"
10037 "#"
10038 [(set_attr "conds" "clob")
10039 (set_attr "length" "8,12")
10040 (set_attr "type" "multiple")]
10041 )
10042
10043 (define_insn "*if_plus_move"
10044 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10045 (if_then_else:SI
10046 (match_operator 4 "arm_comparison_operator"
10047 [(match_operand 5 "cc_register" "") (const_int 0)])
10048 (plus:SI
10049 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10050 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10051 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10052 "TARGET_ARM"
10053 "@
10054 add%d4\\t%0, %2, %3
10055 sub%d4\\t%0, %2, #%n3
10056 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10057 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10058 [(set_attr "conds" "use")
10059 (set_attr "length" "4,4,8,8")
10060 (set_attr_alternative "type"
10061 [(if_then_else (match_operand 3 "const_int_operand" "")
10062 (const_string "alu_imm" )
10063 (const_string "alu_sreg"))
10064 (const_string "alu_imm")
10065 (const_string "multiple")
10066 (const_string "multiple")])]
10067 )
10068
10069 (define_insn "*ifcompare_move_plus"
10070 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10071 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10072 [(match_operand:SI 4 "s_register_operand" "r,r")
10073 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10074 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10075 (plus:SI
10076 (match_operand:SI 2 "s_register_operand" "r,r")
10077 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10078 (clobber (reg:CC CC_REGNUM))]
10079 "TARGET_ARM"
10080 "#"
10081 [(set_attr "conds" "clob")
10082 (set_attr "length" "8,12")
10083 (set_attr "type" "multiple")]
10084 )
10085
10086 (define_insn "*if_move_plus"
10087 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10088 (if_then_else:SI
10089 (match_operator 4 "arm_comparison_operator"
10090 [(match_operand 5 "cc_register" "") (const_int 0)])
10091 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10092 (plus:SI
10093 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10094 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10095 "TARGET_ARM"
10096 "@
10097 add%D4\\t%0, %2, %3
10098 sub%D4\\t%0, %2, #%n3
10099 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10100 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10101 [(set_attr "conds" "use")
10102 (set_attr "length" "4,4,8,8")
10103 (set_attr_alternative "type"
10104 [(if_then_else (match_operand 3 "const_int_operand" "")
10105 (const_string "alu_imm" )
10106 (const_string "alu_sreg"))
10107 (const_string "alu_imm")
10108 (const_string "multiple")
10109 (const_string "multiple")])]
10110 )
10111
10112 (define_insn "*ifcompare_arith_arith"
10113 [(set (match_operand:SI 0 "s_register_operand" "=r")
10114 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10115 [(match_operand:SI 5 "s_register_operand" "r")
10116 (match_operand:SI 6 "arm_add_operand" "rIL")])
10117 (match_operator:SI 8 "shiftable_operator"
10118 [(match_operand:SI 1 "s_register_operand" "r")
10119 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10120 (match_operator:SI 7 "shiftable_operator"
10121 [(match_operand:SI 3 "s_register_operand" "r")
10122 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10123 (clobber (reg:CC CC_REGNUM))]
10124 "TARGET_ARM"
10125 "#"
10126 [(set_attr "conds" "clob")
10127 (set_attr "length" "12")
10128 (set_attr "type" "multiple")]
10129 )
10130
10131 (define_insn "*if_arith_arith"
10132 [(set (match_operand:SI 0 "s_register_operand" "=r")
10133 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10134 [(match_operand 8 "cc_register" "") (const_int 0)])
10135 (match_operator:SI 6 "shiftable_operator"
10136 [(match_operand:SI 1 "s_register_operand" "r")
10137 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10138 (match_operator:SI 7 "shiftable_operator"
10139 [(match_operand:SI 3 "s_register_operand" "r")
10140 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10141 "TARGET_ARM"
10142 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10143 [(set_attr "conds" "use")
10144 (set_attr "length" "8")
10145 (set_attr "type" "multiple")]
10146 )
10147
10148 (define_insn "*ifcompare_arith_move"
10149 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10150 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10151 [(match_operand:SI 2 "s_register_operand" "r,r")
10152 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10153 (match_operator:SI 7 "shiftable_operator"
10154 [(match_operand:SI 4 "s_register_operand" "r,r")
10155 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10156 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10157 (clobber (reg:CC CC_REGNUM))]
10158 "TARGET_ARM"
10159 "*
10160 /* If we have an operation where (op x 0) is the identity operation and
10161 the conditional operator is LT or GE and we are comparing against zero and
10162 everything is in registers then we can do this in two instructions. */
10163 if (operands[3] == const0_rtx
10164 && GET_CODE (operands[7]) != AND
10165 && REG_P (operands[5])
10166 && REG_P (operands[1])
10167 && REGNO (operands[1]) == REGNO (operands[4])
10168 && REGNO (operands[4]) != REGNO (operands[0]))
10169 {
10170 if (GET_CODE (operands[6]) == LT)
10171 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10172 else if (GET_CODE (operands[6]) == GE)
10173 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10174 }
10175 if (CONST_INT_P (operands[3])
10176 && !const_ok_for_arm (INTVAL (operands[3])))
10177 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10178 else
10179 output_asm_insn (\"cmp\\t%2, %3\", operands);
10180 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10181 if (which_alternative != 0)
10182 return \"mov%D6\\t%0, %1\";
10183 return \"\";
10184 "
10185 [(set_attr "conds" "clob")
10186 (set_attr "length" "8,12")
10187 (set_attr "type" "multiple")]
10188 )
10189
10190 (define_insn "*if_arith_move"
10191 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10192 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10193 [(match_operand 6 "cc_register" "") (const_int 0)])
10194 (match_operator:SI 5 "shiftable_operator"
10195 [(match_operand:SI 2 "s_register_operand" "r,r")
10196 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10197 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10198 "TARGET_ARM"
10199 "@
10200 %I5%d4\\t%0, %2, %3
10201 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10202 [(set_attr "conds" "use")
10203 (set_attr "length" "4,8")
10204 (set_attr_alternative "type"
10205 [(if_then_else (match_operand 3 "const_int_operand" "")
10206 (const_string "alu_shift_imm" )
10207 (const_string "alu_shift_reg"))
10208 (const_string "multiple")])]
10209 )
10210
10211 (define_insn "*ifcompare_move_arith"
10212 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10213 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10214 [(match_operand:SI 4 "s_register_operand" "r,r")
10215 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10216 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10217 (match_operator:SI 7 "shiftable_operator"
10218 [(match_operand:SI 2 "s_register_operand" "r,r")
10219 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10220 (clobber (reg:CC CC_REGNUM))]
10221 "TARGET_ARM"
10222 "*
10223 /* If we have an operation where (op x 0) is the identity operation and
10224 the conditional operator is LT or GE and we are comparing against zero and
10225 everything is in registers then we can do this in two instructions */
10226 if (operands[5] == const0_rtx
10227 && GET_CODE (operands[7]) != AND
10228 && REG_P (operands[3])
10229 && REG_P (operands[1])
10230 && REGNO (operands[1]) == REGNO (operands[2])
10231 && REGNO (operands[2]) != REGNO (operands[0]))
10232 {
10233 if (GET_CODE (operands[6]) == GE)
10234 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10235 else if (GET_CODE (operands[6]) == LT)
10236 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10237 }
10238
10239 if (CONST_INT_P (operands[5])
10240 && !const_ok_for_arm (INTVAL (operands[5])))
10241 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10242 else
10243 output_asm_insn (\"cmp\\t%4, %5\", operands);
10244
10245 if (which_alternative != 0)
10246 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10247 return \"%I7%D6\\t%0, %2, %3\";
10248 "
10249 [(set_attr "conds" "clob")
10250 (set_attr "length" "8,12")
10251 (set_attr "type" "multiple")]
10252 )
10253
10254 (define_insn "*if_move_arith"
10255 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10256 (if_then_else:SI
10257 (match_operator 4 "arm_comparison_operator"
10258 [(match_operand 6 "cc_register" "") (const_int 0)])
10259 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10260 (match_operator:SI 5 "shiftable_operator"
10261 [(match_operand:SI 2 "s_register_operand" "r,r")
10262 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10263 "TARGET_ARM"
10264 "@
10265 %I5%D4\\t%0, %2, %3
10266 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10267 [(set_attr "conds" "use")
10268 (set_attr "length" "4,8")
10269 (set_attr_alternative "type"
10270 [(if_then_else (match_operand 3 "const_int_operand" "")
10271 (const_string "alu_shift_imm" )
10272 (const_string "alu_shift_reg"))
10273 (const_string "multiple")])]
10274 )
10275
10276 (define_insn "*ifcompare_move_not"
10277 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10278 (if_then_else:SI
10279 (match_operator 5 "arm_comparison_operator"
10280 [(match_operand:SI 3 "s_register_operand" "r,r")
10281 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10282 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10283 (not:SI
10284 (match_operand:SI 2 "s_register_operand" "r,r"))))
10285 (clobber (reg:CC CC_REGNUM))]
10286 "TARGET_ARM"
10287 "#"
10288 [(set_attr "conds" "clob")
10289 (set_attr "length" "8,12")
10290 (set_attr "type" "multiple")]
10291 )
10292
10293 (define_insn "*if_move_not"
10294 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10295 (if_then_else:SI
10296 (match_operator 4 "arm_comparison_operator"
10297 [(match_operand 3 "cc_register" "") (const_int 0)])
10298 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10299 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10300 "TARGET_ARM"
10301 "@
10302 mvn%D4\\t%0, %2
10303 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10304 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10305 [(set_attr "conds" "use")
10306 (set_attr "type" "mvn_reg")
10307 (set_attr "length" "4,8,8")
10308 (set_attr "type" "mvn_reg,multiple,multiple")]
10309 )
10310
10311 (define_insn "*ifcompare_not_move"
10312 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10313 (if_then_else:SI
10314 (match_operator 5 "arm_comparison_operator"
10315 [(match_operand:SI 3 "s_register_operand" "r,r")
10316 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10317 (not:SI
10318 (match_operand:SI 2 "s_register_operand" "r,r"))
10319 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10320 (clobber (reg:CC CC_REGNUM))]
10321 "TARGET_ARM"
10322 "#"
10323 [(set_attr "conds" "clob")
10324 (set_attr "length" "8,12")
10325 (set_attr "type" "multiple")]
10326 )
10327
10328 (define_insn "*if_not_move"
10329 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10330 (if_then_else:SI
10331 (match_operator 4 "arm_comparison_operator"
10332 [(match_operand 3 "cc_register" "") (const_int 0)])
10333 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10334 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10335 "TARGET_ARM"
10336 "@
10337 mvn%d4\\t%0, %2
10338 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10339 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10340 [(set_attr "conds" "use")
10341 (set_attr "type" "mvn_reg,multiple,multiple")
10342 (set_attr "length" "4,8,8")]
10343 )
10344
10345 (define_insn "*ifcompare_shift_move"
10346 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10347 (if_then_else:SI
10348 (match_operator 6 "arm_comparison_operator"
10349 [(match_operand:SI 4 "s_register_operand" "r,r")
10350 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10351 (match_operator:SI 7 "shift_operator"
10352 [(match_operand:SI 2 "s_register_operand" "r,r")
10353 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10354 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10355 (clobber (reg:CC CC_REGNUM))]
10356 "TARGET_ARM"
10357 "#"
10358 [(set_attr "conds" "clob")
10359 (set_attr "length" "8,12")
10360 (set_attr "type" "multiple")]
10361 )
10362
10363 (define_insn "*if_shift_move"
10364 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10365 (if_then_else:SI
10366 (match_operator 5 "arm_comparison_operator"
10367 [(match_operand 6 "cc_register" "") (const_int 0)])
10368 (match_operator:SI 4 "shift_operator"
10369 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10370 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10371 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10372 "TARGET_ARM"
10373 "@
10374 mov%d5\\t%0, %2%S4
10375 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10376 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10377 [(set_attr "conds" "use")
10378 (set_attr "shift" "2")
10379 (set_attr "length" "4,8,8")
10380 (set_attr_alternative "type"
10381 [(if_then_else (match_operand 3 "const_int_operand" "")
10382 (const_string "mov_shift" )
10383 (const_string "mov_shift_reg"))
10384 (const_string "multiple")
10385 (const_string "multiple")])]
10386 )
10387
10388 (define_insn "*ifcompare_move_shift"
10389 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10390 (if_then_else:SI
10391 (match_operator 6 "arm_comparison_operator"
10392 [(match_operand:SI 4 "s_register_operand" "r,r")
10393 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10394 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10395 (match_operator:SI 7 "shift_operator"
10396 [(match_operand:SI 2 "s_register_operand" "r,r")
10397 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10398 (clobber (reg:CC CC_REGNUM))]
10399 "TARGET_ARM"
10400 "#"
10401 [(set_attr "conds" "clob")
10402 (set_attr "length" "8,12")
10403 (set_attr "type" "multiple")]
10404 )
10405
10406 (define_insn "*if_move_shift"
10407 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10408 (if_then_else:SI
10409 (match_operator 5 "arm_comparison_operator"
10410 [(match_operand 6 "cc_register" "") (const_int 0)])
10411 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10412 (match_operator:SI 4 "shift_operator"
10413 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10414 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10415 "TARGET_ARM"
10416 "@
10417 mov%D5\\t%0, %2%S4
10418 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10419 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10420 [(set_attr "conds" "use")
10421 (set_attr "shift" "2")
10422 (set_attr "length" "4,8,8")
10423 (set_attr_alternative "type"
10424 [(if_then_else (match_operand 3 "const_int_operand" "")
10425 (const_string "mov_shift" )
10426 (const_string "mov_shift_reg"))
10427 (const_string "multiple")
10428 (const_string "multiple")])]
10429 )
10430
10431 (define_insn "*ifcompare_shift_shift"
10432 [(set (match_operand:SI 0 "s_register_operand" "=r")
10433 (if_then_else:SI
10434 (match_operator 7 "arm_comparison_operator"
10435 [(match_operand:SI 5 "s_register_operand" "r")
10436 (match_operand:SI 6 "arm_add_operand" "rIL")])
10437 (match_operator:SI 8 "shift_operator"
10438 [(match_operand:SI 1 "s_register_operand" "r")
10439 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10440 (match_operator:SI 9 "shift_operator"
10441 [(match_operand:SI 3 "s_register_operand" "r")
10442 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10443 (clobber (reg:CC CC_REGNUM))]
10444 "TARGET_ARM"
10445 "#"
10446 [(set_attr "conds" "clob")
10447 (set_attr "length" "12")
10448 (set_attr "type" "multiple")]
10449 )
10450
10451 (define_insn "*if_shift_shift"
10452 [(set (match_operand:SI 0 "s_register_operand" "=r")
10453 (if_then_else:SI
10454 (match_operator 5 "arm_comparison_operator"
10455 [(match_operand 8 "cc_register" "") (const_int 0)])
10456 (match_operator:SI 6 "shift_operator"
10457 [(match_operand:SI 1 "s_register_operand" "r")
10458 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10459 (match_operator:SI 7 "shift_operator"
10460 [(match_operand:SI 3 "s_register_operand" "r")
10461 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10462 "TARGET_ARM"
10463 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10464 [(set_attr "conds" "use")
10465 (set_attr "shift" "1")
10466 (set_attr "length" "8")
10467 (set (attr "type") (if_then_else
10468 (and (match_operand 2 "const_int_operand" "")
10469 (match_operand 4 "const_int_operand" ""))
10470 (const_string "mov_shift")
10471 (const_string "mov_shift_reg")))]
10472 )
10473
10474 (define_insn "*ifcompare_not_arith"
10475 [(set (match_operand:SI 0 "s_register_operand" "=r")
10476 (if_then_else:SI
10477 (match_operator 6 "arm_comparison_operator"
10478 [(match_operand:SI 4 "s_register_operand" "r")
10479 (match_operand:SI 5 "arm_add_operand" "rIL")])
10480 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10481 (match_operator:SI 7 "shiftable_operator"
10482 [(match_operand:SI 2 "s_register_operand" "r")
10483 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10484 (clobber (reg:CC CC_REGNUM))]
10485 "TARGET_ARM"
10486 "#"
10487 [(set_attr "conds" "clob")
10488 (set_attr "length" "12")
10489 (set_attr "type" "multiple")]
10490 )
10491
10492 (define_insn "*if_not_arith"
10493 [(set (match_operand:SI 0 "s_register_operand" "=r")
10494 (if_then_else:SI
10495 (match_operator 5 "arm_comparison_operator"
10496 [(match_operand 4 "cc_register" "") (const_int 0)])
10497 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10498 (match_operator:SI 6 "shiftable_operator"
10499 [(match_operand:SI 2 "s_register_operand" "r")
10500 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10501 "TARGET_ARM"
10502 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10503 [(set_attr "conds" "use")
10504 (set_attr "type" "mvn_reg")
10505 (set_attr "length" "8")]
10506 )
10507
10508 (define_insn "*ifcompare_arith_not"
10509 [(set (match_operand:SI 0 "s_register_operand" "=r")
10510 (if_then_else:SI
10511 (match_operator 6 "arm_comparison_operator"
10512 [(match_operand:SI 4 "s_register_operand" "r")
10513 (match_operand:SI 5 "arm_add_operand" "rIL")])
10514 (match_operator:SI 7 "shiftable_operator"
10515 [(match_operand:SI 2 "s_register_operand" "r")
10516 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10517 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10518 (clobber (reg:CC CC_REGNUM))]
10519 "TARGET_ARM"
10520 "#"
10521 [(set_attr "conds" "clob")
10522 (set_attr "length" "12")
10523 (set_attr "type" "multiple")]
10524 )
10525
10526 (define_insn "*if_arith_not"
10527 [(set (match_operand:SI 0 "s_register_operand" "=r")
10528 (if_then_else:SI
10529 (match_operator 5 "arm_comparison_operator"
10530 [(match_operand 4 "cc_register" "") (const_int 0)])
10531 (match_operator:SI 6 "shiftable_operator"
10532 [(match_operand:SI 2 "s_register_operand" "r")
10533 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10534 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10535 "TARGET_ARM"
10536 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10537 [(set_attr "conds" "use")
10538 (set_attr "type" "multiple")
10539 (set_attr "length" "8")]
10540 )
10541
10542 (define_insn "*ifcompare_neg_move"
10543 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10544 (if_then_else:SI
10545 (match_operator 5 "arm_comparison_operator"
10546 [(match_operand:SI 3 "s_register_operand" "r,r")
10547 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10548 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10549 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10550 (clobber (reg:CC CC_REGNUM))]
10551 "TARGET_ARM"
10552 "#"
10553 [(set_attr "conds" "clob")
10554 (set_attr "length" "8,12")
10555 (set_attr "type" "multiple")]
10556 )
10557
10558 (define_insn_and_split "*if_neg_move"
10559 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10560 (if_then_else:SI
10561 (match_operator 4 "arm_comparison_operator"
10562 [(match_operand 3 "cc_register" "") (const_int 0)])
10563 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
10564 (match_operand:SI 1 "s_register_operand" "0,0")))]
10565 "TARGET_32BIT"
10566 "#"
10567 "&& reload_completed"
10568 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
10569 (set (match_dup 0) (neg:SI (match_dup 2))))]
10570 ""
10571 [(set_attr "conds" "use")
10572 (set_attr "length" "4")
10573 (set_attr "arch" "t2,32")
10574 (set_attr "enabled_for_short_it" "yes,no")
10575 (set_attr "type" "logic_shift_imm")]
10576 )
10577
10578 (define_insn "*ifcompare_move_neg"
10579 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10580 (if_then_else:SI
10581 (match_operator 5 "arm_comparison_operator"
10582 [(match_operand:SI 3 "s_register_operand" "r,r")
10583 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10584 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10585 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10586 (clobber (reg:CC CC_REGNUM))]
10587 "TARGET_ARM"
10588 "#"
10589 [(set_attr "conds" "clob")
10590 (set_attr "length" "8,12")
10591 (set_attr "type" "multiple")]
10592 )
10593
10594 (define_insn_and_split "*if_move_neg"
10595 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10596 (if_then_else:SI
10597 (match_operator 4 "arm_comparison_operator"
10598 [(match_operand 3 "cc_register" "") (const_int 0)])
10599 (match_operand:SI 1 "s_register_operand" "0,0")
10600 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
10601 "TARGET_32BIT"
10602 "#"
10603 "&& reload_completed"
10604 [(cond_exec (match_dup 5)
10605 (set (match_dup 0) (neg:SI (match_dup 2))))]
10606 {
10607 machine_mode mode = GET_MODE (operands[3]);
10608 rtx_code rc = GET_CODE (operands[4]);
10609
10610 if (mode == CCFPmode || mode == CCFPEmode)
10611 rc = reverse_condition_maybe_unordered (rc);
10612 else
10613 rc = reverse_condition (rc);
10614
10615 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
10616 }
10617 [(set_attr "conds" "use")
10618 (set_attr "length" "4")
10619 (set_attr "arch" "t2,32")
10620 (set_attr "enabled_for_short_it" "yes,no")
10621 (set_attr "type" "logic_shift_imm")]
10622 )
10623
10624 (define_insn "*arith_adjacentmem"
10625 [(set (match_operand:SI 0 "s_register_operand" "=r")
10626 (match_operator:SI 1 "shiftable_operator"
10627 [(match_operand:SI 2 "memory_operand" "m")
10628 (match_operand:SI 3 "memory_operand" "m")]))
10629 (clobber (match_scratch:SI 4 "=r"))]
10630 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10631 "*
10632 {
10633 rtx ldm[3];
10634 rtx arith[4];
10635 rtx base_reg;
10636 HOST_WIDE_INT val1 = 0, val2 = 0;
10637
10638 if (REGNO (operands[0]) > REGNO (operands[4]))
10639 {
10640 ldm[1] = operands[4];
10641 ldm[2] = operands[0];
10642 }
10643 else
10644 {
10645 ldm[1] = operands[0];
10646 ldm[2] = operands[4];
10647 }
10648
10649 base_reg = XEXP (operands[2], 0);
10650
10651 if (!REG_P (base_reg))
10652 {
10653 val1 = INTVAL (XEXP (base_reg, 1));
10654 base_reg = XEXP (base_reg, 0);
10655 }
10656
10657 if (!REG_P (XEXP (operands[3], 0)))
10658 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10659
10660 arith[0] = operands[0];
10661 arith[3] = operands[1];
10662
10663 if (val1 < val2)
10664 {
10665 arith[1] = ldm[1];
10666 arith[2] = ldm[2];
10667 }
10668 else
10669 {
10670 arith[1] = ldm[2];
10671 arith[2] = ldm[1];
10672 }
10673
10674 ldm[0] = base_reg;
10675 if (val1 !=0 && val2 != 0)
10676 {
10677 rtx ops[3];
10678
10679 if (val1 == 4 || val2 == 4)
10680 /* Other val must be 8, since we know they are adjacent and neither
10681 is zero. */
10682 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10683 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10684 {
10685 ldm[0] = ops[0] = operands[4];
10686 ops[1] = base_reg;
10687 ops[2] = GEN_INT (val1);
10688 output_add_immediate (ops);
10689 if (val1 < val2)
10690 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10691 else
10692 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10693 }
10694 else
10695 {
10696 /* Offset is out of range for a single add, so use two ldr. */
10697 ops[0] = ldm[1];
10698 ops[1] = base_reg;
10699 ops[2] = GEN_INT (val1);
10700 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10701 ops[0] = ldm[2];
10702 ops[2] = GEN_INT (val2);
10703 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10704 }
10705 }
10706 else if (val1 != 0)
10707 {
10708 if (val1 < val2)
10709 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10710 else
10711 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10712 }
10713 else
10714 {
10715 if (val1 < val2)
10716 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10717 else
10718 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10719 }
10720 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10721 return \"\";
10722 }"
10723 [(set_attr "length" "12")
10724 (set_attr "predicable" "yes")
10725 (set_attr "type" "load_4")]
10726 )
10727
10728 ; This pattern is never tried by combine, so do it as a peephole
10729
10730 (define_peephole2
10731 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10732 (match_operand:SI 1 "arm_general_register_operand" ""))
10733 (set (reg:CC CC_REGNUM)
10734 (compare:CC (match_dup 1) (const_int 0)))]
10735 "TARGET_ARM"
10736 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10737 (set (match_dup 0) (match_dup 1))])]
10738 ""
10739 )
10740
10741 (define_split
10742 [(set (match_operand:SI 0 "s_register_operand" "")
10743 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10744 (const_int 0))
10745 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10746 [(match_operand:SI 3 "s_register_operand" "")
10747 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10748 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10749 "TARGET_ARM"
10750 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10751 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10752 (match_dup 5)))]
10753 ""
10754 )
10755
10756 ;; This split can be used because CC_Z mode implies that the following
10757 ;; branch will be an equality, or an unsigned inequality, so the sign
10758 ;; extension is not needed.
10759
10760 (define_split
10761 [(set (reg:CC_Z CC_REGNUM)
10762 (compare:CC_Z
10763 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10764 (const_int 24))
10765 (match_operand 1 "const_int_operand" "")))
10766 (clobber (match_scratch:SI 2 ""))]
10767 "TARGET_ARM
10768 && ((UINTVAL (operands[1]))
10769 == ((UINTVAL (operands[1])) >> 24) << 24)"
10770 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10771 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10772 "
10773 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10774 "
10775 )
10776 ;; ??? Check the patterns above for Thumb-2 usefulness
10777
10778 (define_expand "prologue"
10779 [(clobber (const_int 0))]
10780 "TARGET_EITHER"
10781 "if (TARGET_32BIT)
10782 arm_expand_prologue ();
10783 else
10784 thumb1_expand_prologue ();
10785 DONE;
10786 "
10787 )
10788
10789 (define_expand "epilogue"
10790 [(clobber (const_int 0))]
10791 "TARGET_EITHER"
10792 "
10793 if (crtl->calls_eh_return)
10794 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10795 if (TARGET_THUMB1)
10796 {
10797 thumb1_expand_epilogue ();
10798 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10799 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10800 }
10801 else if (HAVE_return)
10802 {
10803 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10804 no need for explicit testing again. */
10805 emit_jump_insn (gen_return ());
10806 }
10807 else if (TARGET_32BIT)
10808 {
10809 arm_expand_epilogue (true);
10810 }
10811 DONE;
10812 "
10813 )
10814
10815 ;; Note - although unspec_volatile's USE all hard registers,
10816 ;; USEs are ignored after relaod has completed. Thus we need
10817 ;; to add an unspec of the link register to ensure that flow
10818 ;; does not think that it is unused by the sibcall branch that
10819 ;; will replace the standard function epilogue.
10820 (define_expand "sibcall_epilogue"
10821 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10822 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10823 "TARGET_32BIT"
10824 "
10825 arm_expand_epilogue (false);
10826 DONE;
10827 "
10828 )
10829
10830 (define_expand "eh_epilogue"
10831 [(use (match_operand:SI 0 "register_operand"))
10832 (use (match_operand:SI 1 "register_operand"))
10833 (use (match_operand:SI 2 "register_operand"))]
10834 "TARGET_EITHER"
10835 "
10836 {
10837 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10838 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10839 {
10840 rtx ra = gen_rtx_REG (Pmode, 2);
10841
10842 emit_move_insn (ra, operands[2]);
10843 operands[2] = ra;
10844 }
10845 /* This is a hack -- we may have crystalized the function type too
10846 early. */
10847 cfun->machine->func_type = 0;
10848 }"
10849 )
10850
10851 ;; This split is only used during output to reduce the number of patterns
10852 ;; that need assembler instructions adding to them. We allowed the setting
10853 ;; of the conditions to be implicit during rtl generation so that
10854 ;; the conditional compare patterns would work. However this conflicts to
10855 ;; some extent with the conditional data operations, so we have to split them
10856 ;; up again here.
10857
10858 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10859 ;; conditional execution sufficient?
10860
10861 (define_split
10862 [(set (match_operand:SI 0 "s_register_operand" "")
10863 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10864 [(match_operand 2 "" "") (match_operand 3 "" "")])
10865 (match_dup 0)
10866 (match_operand 4 "" "")))
10867 (clobber (reg:CC CC_REGNUM))]
10868 "TARGET_ARM && reload_completed"
10869 [(set (match_dup 5) (match_dup 6))
10870 (cond_exec (match_dup 7)
10871 (set (match_dup 0) (match_dup 4)))]
10872 "
10873 {
10874 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10875 operands[2], operands[3]);
10876 enum rtx_code rc = GET_CODE (operands[1]);
10877
10878 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10879 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10880 if (mode == CCFPmode || mode == CCFPEmode)
10881 rc = reverse_condition_maybe_unordered (rc);
10882 else
10883 rc = reverse_condition (rc);
10884
10885 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10886 }"
10887 )
10888
10889 (define_split
10890 [(set (match_operand:SI 0 "s_register_operand" "")
10891 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10892 [(match_operand 2 "" "") (match_operand 3 "" "")])
10893 (match_operand 4 "" "")
10894 (match_dup 0)))
10895 (clobber (reg:CC CC_REGNUM))]
10896 "TARGET_ARM && reload_completed"
10897 [(set (match_dup 5) (match_dup 6))
10898 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10899 (set (match_dup 0) (match_dup 4)))]
10900 "
10901 {
10902 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10903 operands[2], operands[3]);
10904
10905 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10906 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10907 }"
10908 )
10909
10910 (define_split
10911 [(set (match_operand:SI 0 "s_register_operand" "")
10912 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10913 [(match_operand 2 "" "") (match_operand 3 "" "")])
10914 (match_operand 4 "" "")
10915 (match_operand 5 "" "")))
10916 (clobber (reg:CC CC_REGNUM))]
10917 "TARGET_ARM && reload_completed"
10918 [(set (match_dup 6) (match_dup 7))
10919 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10920 (set (match_dup 0) (match_dup 4)))
10921 (cond_exec (match_dup 8)
10922 (set (match_dup 0) (match_dup 5)))]
10923 "
10924 {
10925 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10926 operands[2], operands[3]);
10927 enum rtx_code rc = GET_CODE (operands[1]);
10928
10929 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10930 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10931 if (mode == CCFPmode || mode == CCFPEmode)
10932 rc = reverse_condition_maybe_unordered (rc);
10933 else
10934 rc = reverse_condition (rc);
10935
10936 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10937 }"
10938 )
10939
10940 (define_split
10941 [(set (match_operand:SI 0 "s_register_operand" "")
10942 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10943 [(match_operand:SI 2 "s_register_operand" "")
10944 (match_operand:SI 3 "arm_add_operand" "")])
10945 (match_operand:SI 4 "arm_rhs_operand" "")
10946 (not:SI
10947 (match_operand:SI 5 "s_register_operand" ""))))
10948 (clobber (reg:CC CC_REGNUM))]
10949 "TARGET_ARM && reload_completed"
10950 [(set (match_dup 6) (match_dup 7))
10951 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10952 (set (match_dup 0) (match_dup 4)))
10953 (cond_exec (match_dup 8)
10954 (set (match_dup 0) (not:SI (match_dup 5))))]
10955 "
10956 {
10957 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10958 operands[2], operands[3]);
10959 enum rtx_code rc = GET_CODE (operands[1]);
10960
10961 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10962 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10963 if (mode == CCFPmode || mode == CCFPEmode)
10964 rc = reverse_condition_maybe_unordered (rc);
10965 else
10966 rc = reverse_condition (rc);
10967
10968 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10969 }"
10970 )
10971
10972 (define_insn "*cond_move_not"
10973 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10974 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10975 [(match_operand 3 "cc_register" "") (const_int 0)])
10976 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10977 (not:SI
10978 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10979 "TARGET_ARM"
10980 "@
10981 mvn%D4\\t%0, %2
10982 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10983 [(set_attr "conds" "use")
10984 (set_attr "type" "mvn_reg,multiple")
10985 (set_attr "length" "4,8")]
10986 )
10987
10988 ;; The next two patterns occur when an AND operation is followed by a
10989 ;; scc insn sequence
10990
10991 (define_insn "*sign_extract_onebit"
10992 [(set (match_operand:SI 0 "s_register_operand" "=r")
10993 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10994 (const_int 1)
10995 (match_operand:SI 2 "const_int_operand" "n")))
10996 (clobber (reg:CC CC_REGNUM))]
10997 "TARGET_ARM"
10998 "*
10999 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11000 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11001 return \"mvnne\\t%0, #0\";
11002 "
11003 [(set_attr "conds" "clob")
11004 (set_attr "length" "8")
11005 (set_attr "type" "multiple")]
11006 )
11007
11008 (define_insn "*not_signextract_onebit"
11009 [(set (match_operand:SI 0 "s_register_operand" "=r")
11010 (not:SI
11011 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11012 (const_int 1)
11013 (match_operand:SI 2 "const_int_operand" "n"))))
11014 (clobber (reg:CC CC_REGNUM))]
11015 "TARGET_ARM"
11016 "*
11017 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11018 output_asm_insn (\"tst\\t%1, %2\", operands);
11019 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11020 return \"movne\\t%0, #0\";
11021 "
11022 [(set_attr "conds" "clob")
11023 (set_attr "length" "12")
11024 (set_attr "type" "multiple")]
11025 )
11026 ;; ??? The above patterns need auditing for Thumb-2
11027
11028 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11029 ;; expressions. For simplicity, the first register is also in the unspec
11030 ;; part.
11031 ;; To avoid the usage of GNU extension, the length attribute is computed
11032 ;; in a C function arm_attr_length_push_multi.
11033 (define_insn "*push_multi"
11034 [(match_parallel 2 "multi_register_push"
11035 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11036 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11037 UNSPEC_PUSH_MULT))])]
11038 ""
11039 "*
11040 {
11041 int num_saves = XVECLEN (operands[2], 0);
11042
11043 /* For the StrongARM at least it is faster to
11044 use STR to store only a single register.
11045 In Thumb mode always use push, and the assembler will pick
11046 something appropriate. */
11047 if (num_saves == 1 && TARGET_ARM)
11048 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11049 else
11050 {
11051 int i;
11052 char pattern[100];
11053
11054 if (TARGET_32BIT)
11055 strcpy (pattern, \"push%?\\t{%1\");
11056 else
11057 strcpy (pattern, \"push\\t{%1\");
11058
11059 for (i = 1; i < num_saves; i++)
11060 {
11061 strcat (pattern, \", %|\");
11062 strcat (pattern,
11063 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11064 }
11065
11066 strcat (pattern, \"}\");
11067 output_asm_insn (pattern, operands);
11068 }
11069
11070 return \"\";
11071 }"
11072 [(set_attr "type" "store_16")
11073 (set (attr "length")
11074 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11075 )
11076
11077 (define_insn "stack_tie"
11078 [(set (mem:BLK (scratch))
11079 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11080 (match_operand:SI 1 "s_register_operand" "rk")]
11081 UNSPEC_PRLG_STK))]
11082 ""
11083 ""
11084 [(set_attr "length" "0")
11085 (set_attr "type" "block")]
11086 )
11087
11088 ;; Pop (as used in epilogue RTL)
11089 ;;
11090 (define_insn "*load_multiple_with_writeback"
11091 [(match_parallel 0 "load_multiple_operation"
11092 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11093 (plus:SI (match_dup 1)
11094 (match_operand:SI 2 "const_int_I_operand" "I")))
11095 (set (match_operand:SI 3 "s_register_operand" "=rk")
11096 (mem:SI (match_dup 1)))
11097 ])]
11098 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11099 "*
11100 {
11101 arm_output_multireg_pop (operands, /*return_pc=*/false,
11102 /*cond=*/const_true_rtx,
11103 /*reverse=*/false,
11104 /*update=*/true);
11105 return \"\";
11106 }
11107 "
11108 [(set_attr "type" "load_16")
11109 (set_attr "predicable" "yes")
11110 (set (attr "length")
11111 (symbol_ref "arm_attr_length_pop_multi (operands,
11112 /*return_pc=*/false,
11113 /*write_back_p=*/true)"))]
11114 )
11115
11116 ;; Pop with return (as used in epilogue RTL)
11117 ;;
11118 ;; This instruction is generated when the registers are popped at the end of
11119 ;; epilogue. Here, instead of popping the value into LR and then generating
11120 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11121 ;; with (return).
11122 (define_insn "*pop_multiple_with_writeback_and_return"
11123 [(match_parallel 0 "pop_multiple_return"
11124 [(return)
11125 (set (match_operand:SI 1 "s_register_operand" "+rk")
11126 (plus:SI (match_dup 1)
11127 (match_operand:SI 2 "const_int_I_operand" "I")))
11128 (set (match_operand:SI 3 "s_register_operand" "=rk")
11129 (mem:SI (match_dup 1)))
11130 ])]
11131 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11132 "*
11133 {
11134 arm_output_multireg_pop (operands, /*return_pc=*/true,
11135 /*cond=*/const_true_rtx,
11136 /*reverse=*/false,
11137 /*update=*/true);
11138 return \"\";
11139 }
11140 "
11141 [(set_attr "type" "load_16")
11142 (set_attr "predicable" "yes")
11143 (set (attr "length")
11144 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11145 /*write_back_p=*/true)"))]
11146 )
11147
11148 (define_insn "*pop_multiple_with_return"
11149 [(match_parallel 0 "pop_multiple_return"
11150 [(return)
11151 (set (match_operand:SI 2 "s_register_operand" "=rk")
11152 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11153 ])]
11154 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11155 "*
11156 {
11157 arm_output_multireg_pop (operands, /*return_pc=*/true,
11158 /*cond=*/const_true_rtx,
11159 /*reverse=*/false,
11160 /*update=*/false);
11161 return \"\";
11162 }
11163 "
11164 [(set_attr "type" "load_16")
11165 (set_attr "predicable" "yes")
11166 (set (attr "length")
11167 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11168 /*write_back_p=*/false)"))]
11169 )
11170
11171 ;; Load into PC and return
11172 (define_insn "*ldr_with_return"
11173 [(return)
11174 (set (reg:SI PC_REGNUM)
11175 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11176 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11177 "ldr%?\t%|pc, [%0], #4"
11178 [(set_attr "type" "load_4")
11179 (set_attr "predicable" "yes")]
11180 )
11181 ;; Pop for floating point registers (as used in epilogue RTL)
11182 (define_insn "*vfp_pop_multiple_with_writeback"
11183 [(match_parallel 0 "pop_multiple_fp"
11184 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11185 (plus:SI (match_dup 1)
11186 (match_operand:SI 2 "const_int_I_operand" "I")))
11187 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11188 (mem:DF (match_dup 1)))])]
11189 "TARGET_32BIT && TARGET_HARD_FLOAT"
11190 "*
11191 {
11192 int num_regs = XVECLEN (operands[0], 0);
11193 char pattern[100];
11194 rtx op_list[2];
11195 strcpy (pattern, \"vldm\\t\");
11196 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11197 strcat (pattern, \"!, {\");
11198 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11199 strcat (pattern, \"%P0\");
11200 if ((num_regs - 1) > 1)
11201 {
11202 strcat (pattern, \"-%P1\");
11203 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11204 }
11205
11206 strcat (pattern, \"}\");
11207 output_asm_insn (pattern, op_list);
11208 return \"\";
11209 }
11210 "
11211 [(set_attr "type" "load_16")
11212 (set_attr "conds" "unconditional")
11213 (set_attr "predicable" "no")]
11214 )
11215
11216 ;; Special patterns for dealing with the constant pool
11217
11218 (define_insn "align_4"
11219 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11220 "TARGET_EITHER"
11221 "*
11222 assemble_align (32);
11223 return \"\";
11224 "
11225 [(set_attr "type" "no_insn")]
11226 )
11227
11228 (define_insn "align_8"
11229 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11230 "TARGET_EITHER"
11231 "*
11232 assemble_align (64);
11233 return \"\";
11234 "
11235 [(set_attr "type" "no_insn")]
11236 )
11237
11238 (define_insn "consttable_end"
11239 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11240 "TARGET_EITHER"
11241 "*
11242 making_const_table = FALSE;
11243 return \"\";
11244 "
11245 [(set_attr "type" "no_insn")]
11246 )
11247
11248 (define_insn "consttable_1"
11249 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11250 "TARGET_EITHER"
11251 "*
11252 making_const_table = TRUE;
11253 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11254 assemble_zeros (3);
11255 return \"\";
11256 "
11257 [(set_attr "length" "4")
11258 (set_attr "type" "no_insn")]
11259 )
11260
11261 (define_insn "consttable_2"
11262 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11263 "TARGET_EITHER"
11264 "*
11265 {
11266 rtx x = operands[0];
11267 making_const_table = TRUE;
11268 switch (GET_MODE_CLASS (GET_MODE (x)))
11269 {
11270 case MODE_FLOAT:
11271 arm_emit_fp16_const (x);
11272 break;
11273 default:
11274 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11275 assemble_zeros (2);
11276 break;
11277 }
11278 return \"\";
11279 }"
11280 [(set_attr "length" "4")
11281 (set_attr "type" "no_insn")]
11282 )
11283
11284 (define_insn "consttable_4"
11285 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11286 "TARGET_EITHER"
11287 "*
11288 {
11289 rtx x = operands[0];
11290 making_const_table = TRUE;
11291 scalar_float_mode float_mode;
11292 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11293 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11294 else
11295 {
11296 /* XXX: Sometimes gcc does something really dumb and ends up with
11297 a HIGH in a constant pool entry, usually because it's trying to
11298 load into a VFP register. We know this will always be used in
11299 combination with a LO_SUM which ignores the high bits, so just
11300 strip off the HIGH. */
11301 if (GET_CODE (x) == HIGH)
11302 x = XEXP (x, 0);
11303 assemble_integer (x, 4, BITS_PER_WORD, 1);
11304 mark_symbol_refs_as_used (x);
11305 }
11306 return \"\";
11307 }"
11308 [(set_attr "length" "4")
11309 (set_attr "type" "no_insn")]
11310 )
11311
11312 (define_insn "consttable_8"
11313 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11314 "TARGET_EITHER"
11315 "*
11316 {
11317 making_const_table = TRUE;
11318 scalar_float_mode float_mode;
11319 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11320 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11321 float_mode, BITS_PER_WORD);
11322 else
11323 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11324 return \"\";
11325 }"
11326 [(set_attr "length" "8")
11327 (set_attr "type" "no_insn")]
11328 )
11329
11330 (define_insn "consttable_16"
11331 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11332 "TARGET_EITHER"
11333 "*
11334 {
11335 making_const_table = TRUE;
11336 scalar_float_mode float_mode;
11337 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11338 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11339 float_mode, BITS_PER_WORD);
11340 else
11341 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11342 return \"\";
11343 }"
11344 [(set_attr "length" "16")
11345 (set_attr "type" "no_insn")]
11346 )
11347
11348 ;; V5 Instructions,
11349
11350 (define_insn "clzsi2"
11351 [(set (match_operand:SI 0 "s_register_operand" "=r")
11352 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11353 "TARGET_32BIT && arm_arch5t"
11354 "clz%?\\t%0, %1"
11355 [(set_attr "predicable" "yes")
11356 (set_attr "type" "clz")])
11357
11358 (define_insn "rbitsi2"
11359 [(set (match_operand:SI 0 "s_register_operand" "=r")
11360 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11361 "TARGET_32BIT && arm_arch_thumb2"
11362 "rbit%?\\t%0, %1"
11363 [(set_attr "predicable" "yes")
11364 (set_attr "type" "clz")])
11365
11366 ;; Keep this as a CTZ expression until after reload and then split
11367 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
11368 ;; to fold with any other expression.
11369
11370 (define_insn_and_split "ctzsi2"
11371 [(set (match_operand:SI 0 "s_register_operand" "=r")
11372 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11373 "TARGET_32BIT && arm_arch_thumb2"
11374 "#"
11375 "&& reload_completed"
11376 [(const_int 0)]
11377 "
11378 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
11379 emit_insn (gen_clzsi2 (operands[0], operands[0]));
11380 DONE;
11381 ")
11382
11383 ;; V5E instructions.
11384
11385 (define_insn "prefetch"
11386 [(prefetch (match_operand:SI 0 "address_operand" "p")
11387 (match_operand:SI 1 "" "")
11388 (match_operand:SI 2 "" ""))]
11389 "TARGET_32BIT && arm_arch5te"
11390 "pld\\t%a0"
11391 [(set_attr "type" "load_4")]
11392 )
11393
11394 ;; General predication pattern
11395
11396 (define_cond_exec
11397 [(match_operator 0 "arm_comparison_operator"
11398 [(match_operand 1 "cc_register" "")
11399 (const_int 0)])]
11400 "TARGET_32BIT
11401 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
11402 ""
11403 [(set_attr "predicated" "yes")]
11404 )
11405
11406 (define_insn "force_register_use"
11407 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11408 ""
11409 "%@ %0 needed"
11410 [(set_attr "length" "0")
11411 (set_attr "type" "no_insn")]
11412 )
11413
11414
11415 ;; Patterns for exception handling
11416
11417 (define_expand "eh_return"
11418 [(use (match_operand 0 "general_operand"))]
11419 "TARGET_EITHER"
11420 "
11421 {
11422 if (TARGET_32BIT)
11423 emit_insn (gen_arm_eh_return (operands[0]));
11424 else
11425 emit_insn (gen_thumb_eh_return (operands[0]));
11426 DONE;
11427 }"
11428 )
11429
11430 ;; We can't expand this before we know where the link register is stored.
11431 (define_insn_and_split "arm_eh_return"
11432 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11433 VUNSPEC_EH_RETURN)
11434 (clobber (match_scratch:SI 1 "=&r"))]
11435 "TARGET_ARM"
11436 "#"
11437 "&& reload_completed"
11438 [(const_int 0)]
11439 "
11440 {
11441 arm_set_return_address (operands[0], operands[1]);
11442 DONE;
11443 }"
11444 )
11445
11446 \f
11447 ;; TLS support
11448
11449 (define_insn "load_tp_hard"
11450 [(set (match_operand:SI 0 "register_operand" "=r")
11451 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11452 "TARGET_HARD_TP"
11453 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11454 [(set_attr "predicable" "yes")
11455 (set_attr "type" "mrs")]
11456 )
11457
11458 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11459 (define_insn "load_tp_soft_fdpic"
11460 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11461 (clobber (reg:SI FDPIC_REGNUM))
11462 (clobber (reg:SI LR_REGNUM))
11463 (clobber (reg:SI IP_REGNUM))
11464 (clobber (reg:CC CC_REGNUM))]
11465 "TARGET_SOFT_TP && TARGET_FDPIC"
11466 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11467 [(set_attr "conds" "clob")
11468 (set_attr "type" "branch")]
11469 )
11470
11471 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11472 (define_insn "load_tp_soft"
11473 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11474 (clobber (reg:SI LR_REGNUM))
11475 (clobber (reg:SI IP_REGNUM))
11476 (clobber (reg:CC CC_REGNUM))]
11477 "TARGET_SOFT_TP && !TARGET_FDPIC"
11478 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11479 [(set_attr "conds" "clob")
11480 (set_attr "type" "branch")]
11481 )
11482
11483 ;; tls descriptor call
11484 (define_insn "tlscall"
11485 [(set (reg:SI R0_REGNUM)
11486 (unspec:SI [(reg:SI R0_REGNUM)
11487 (match_operand:SI 0 "" "X")
11488 (match_operand 1 "" "")] UNSPEC_TLS))
11489 (clobber (reg:SI R1_REGNUM))
11490 (clobber (reg:SI LR_REGNUM))
11491 (clobber (reg:SI CC_REGNUM))]
11492 "TARGET_GNU2_TLS"
11493 {
11494 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11495 INTVAL (operands[1]));
11496 return "bl\\t%c0(tlscall)";
11497 }
11498 [(set_attr "conds" "clob")
11499 (set_attr "length" "4")
11500 (set_attr "type" "branch")]
11501 )
11502
11503 ;; For thread pointer builtin
11504 (define_expand "get_thread_pointersi"
11505 [(match_operand:SI 0 "s_register_operand")]
11506 ""
11507 "
11508 {
11509 arm_load_tp (operands[0]);
11510 DONE;
11511 }")
11512
11513 ;;
11514
11515 ;; We only care about the lower 16 bits of the constant
11516 ;; being inserted into the upper 16 bits of the register.
11517 (define_insn "*arm_movtas_ze"
11518 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
11519 (const_int 16)
11520 (const_int 16))
11521 (match_operand:SI 1 "const_int_operand" ""))]
11522 "TARGET_HAVE_MOVT"
11523 "@
11524 movt%?\t%0, %L1
11525 movt\t%0, %L1"
11526 [(set_attr "arch" "32,v8mb")
11527 (set_attr "predicable" "yes")
11528 (set_attr "length" "4")
11529 (set_attr "type" "alu_sreg")]
11530 )
11531
11532 (define_insn "*arm_rev"
11533 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11534 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11535 "arm_arch6"
11536 "@
11537 rev\t%0, %1
11538 rev%?\t%0, %1
11539 rev%?\t%0, %1"
11540 [(set_attr "arch" "t1,t2,32")
11541 (set_attr "length" "2,2,4")
11542 (set_attr "predicable" "no,yes,yes")
11543 (set_attr "type" "rev")]
11544 )
11545
11546 (define_expand "arm_legacy_rev"
11547 [(set (match_operand:SI 2 "s_register_operand")
11548 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
11549 (const_int 16))
11550 (match_dup 1)))
11551 (set (match_dup 2)
11552 (lshiftrt:SI (match_dup 2)
11553 (const_int 8)))
11554 (set (match_operand:SI 3 "s_register_operand")
11555 (rotatert:SI (match_dup 1)
11556 (const_int 8)))
11557 (set (match_dup 2)
11558 (and:SI (match_dup 2)
11559 (const_int -65281)))
11560 (set (match_operand:SI 0 "s_register_operand")
11561 (xor:SI (match_dup 3)
11562 (match_dup 2)))]
11563 "TARGET_32BIT"
11564 ""
11565 )
11566
11567 ;; Reuse temporaries to keep register pressure down.
11568 (define_expand "thumb_legacy_rev"
11569 [(set (match_operand:SI 2 "s_register_operand")
11570 (ashift:SI (match_operand:SI 1 "s_register_operand")
11571 (const_int 24)))
11572 (set (match_operand:SI 3 "s_register_operand")
11573 (lshiftrt:SI (match_dup 1)
11574 (const_int 24)))
11575 (set (match_dup 3)
11576 (ior:SI (match_dup 3)
11577 (match_dup 2)))
11578 (set (match_operand:SI 4 "s_register_operand")
11579 (const_int 16))
11580 (set (match_operand:SI 5 "s_register_operand")
11581 (rotatert:SI (match_dup 1)
11582 (match_dup 4)))
11583 (set (match_dup 2)
11584 (ashift:SI (match_dup 5)
11585 (const_int 24)))
11586 (set (match_dup 5)
11587 (lshiftrt:SI (match_dup 5)
11588 (const_int 24)))
11589 (set (match_dup 5)
11590 (ior:SI (match_dup 5)
11591 (match_dup 2)))
11592 (set (match_dup 5)
11593 (rotatert:SI (match_dup 5)
11594 (match_dup 4)))
11595 (set (match_operand:SI 0 "s_register_operand")
11596 (ior:SI (match_dup 5)
11597 (match_dup 3)))]
11598 "TARGET_THUMB"
11599 ""
11600 )
11601
11602 ;; ARM-specific expansion of signed mod by power of 2
11603 ;; using conditional negate.
11604 ;; For r0 % n where n is a power of 2 produce:
11605 ;; rsbs r1, r0, #0
11606 ;; and r0, r0, #(n - 1)
11607 ;; and r1, r1, #(n - 1)
11608 ;; rsbpl r0, r1, #0
11609
11610 (define_expand "modsi3"
11611 [(match_operand:SI 0 "register_operand")
11612 (match_operand:SI 1 "register_operand")
11613 (match_operand:SI 2 "const_int_operand")]
11614 "TARGET_32BIT"
11615 {
11616 HOST_WIDE_INT val = INTVAL (operands[2]);
11617
11618 if (val <= 0
11619 || exact_log2 (val) <= 0)
11620 FAIL;
11621
11622 rtx mask = GEN_INT (val - 1);
11623
11624 /* In the special case of x0 % 2 we can do the even shorter:
11625 cmp r0, #0
11626 and r0, r0, #1
11627 rsblt r0, r0, #0. */
11628
11629 if (val == 2)
11630 {
11631 rtx cc_reg = arm_gen_compare_reg (LT,
11632 operands[1], const0_rtx, NULL_RTX);
11633 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
11634 rtx masked = gen_reg_rtx (SImode);
11635
11636 emit_insn (gen_andsi3 (masked, operands[1], mask));
11637 emit_move_insn (operands[0],
11638 gen_rtx_IF_THEN_ELSE (SImode, cond,
11639 gen_rtx_NEG (SImode,
11640 masked),
11641 masked));
11642 DONE;
11643 }
11644
11645 rtx neg_op = gen_reg_rtx (SImode);
11646 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11647 operands[1]));
11648
11649 /* Extract the condition register and mode. */
11650 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11651 rtx cc_reg = SET_DEST (cmp);
11652 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11653
11654 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11655
11656 rtx masked_neg = gen_reg_rtx (SImode);
11657 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11658
11659 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11660 during expand does not always work. Do an IF_THEN_ELSE instead. */
11661 emit_move_insn (operands[0],
11662 gen_rtx_IF_THEN_ELSE (SImode, cond,
11663 gen_rtx_NEG (SImode, masked_neg),
11664 operands[0]));
11665
11666
11667 DONE;
11668 }
11669 )
11670
11671 (define_expand "bswapsi2"
11672 [(set (match_operand:SI 0 "s_register_operand")
11673 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11674 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11675 "
11676 if (!arm_arch6)
11677 {
11678 rtx op2 = gen_reg_rtx (SImode);
11679 rtx op3 = gen_reg_rtx (SImode);
11680
11681 if (TARGET_THUMB)
11682 {
11683 rtx op4 = gen_reg_rtx (SImode);
11684 rtx op5 = gen_reg_rtx (SImode);
11685
11686 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11687 op2, op3, op4, op5));
11688 }
11689 else
11690 {
11691 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11692 op2, op3));
11693 }
11694
11695 DONE;
11696 }
11697 "
11698 )
11699
11700 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11701 ;; and unsigned variants, respectively. For rev16, expose
11702 ;; byte-swapping in the lower 16 bits only.
11703 (define_insn "*arm_revsh"
11704 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11705 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11706 "arm_arch6"
11707 "@
11708 revsh\t%0, %1
11709 revsh%?\t%0, %1
11710 revsh%?\t%0, %1"
11711 [(set_attr "arch" "t1,t2,32")
11712 (set_attr "length" "2,2,4")
11713 (set_attr "type" "rev")]
11714 )
11715
11716 (define_insn "*arm_rev16"
11717 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11718 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11719 "arm_arch6"
11720 "@
11721 rev16\t%0, %1
11722 rev16%?\t%0, %1
11723 rev16%?\t%0, %1"
11724 [(set_attr "arch" "t1,t2,32")
11725 (set_attr "length" "2,2,4")
11726 (set_attr "type" "rev")]
11727 )
11728
11729 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11730 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11731 ;; each valid permutation.
11732
11733 (define_insn "arm_rev16si2"
11734 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11735 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11736 (const_int 8))
11737 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11738 (and:SI (lshiftrt:SI (match_dup 1)
11739 (const_int 8))
11740 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11741 "arm_arch6
11742 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11743 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11744 "rev16\\t%0, %1"
11745 [(set_attr "arch" "t1,t2,32")
11746 (set_attr "length" "2,2,4")
11747 (set_attr "type" "rev")]
11748 )
11749
11750 (define_insn "arm_rev16si2_alt"
11751 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11752 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11753 (const_int 8))
11754 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11755 (and:SI (ashift:SI (match_dup 1)
11756 (const_int 8))
11757 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11758 "arm_arch6
11759 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11760 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11761 "rev16\\t%0, %1"
11762 [(set_attr "arch" "t1,t2,32")
11763 (set_attr "length" "2,2,4")
11764 (set_attr "type" "rev")]
11765 )
11766
11767 (define_expand "bswaphi2"
11768 [(set (match_operand:HI 0 "s_register_operand")
11769 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11770 "arm_arch6"
11771 ""
11772 )
11773
11774 ;; Patterns for LDRD/STRD in Thumb2 mode
11775
11776 (define_insn "*thumb2_ldrd"
11777 [(set (match_operand:SI 0 "s_register_operand" "=r")
11778 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11779 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11780 (set (match_operand:SI 3 "s_register_operand" "=r")
11781 (mem:SI (plus:SI (match_dup 1)
11782 (match_operand:SI 4 "const_int_operand" ""))))]
11783 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11784 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11785 && (operands_ok_ldrd_strd (operands[0], operands[3],
11786 operands[1], INTVAL (operands[2]),
11787 false, true))"
11788 "ldrd%?\t%0, %3, [%1, %2]"
11789 [(set_attr "type" "load_8")
11790 (set_attr "predicable" "yes")])
11791
11792 (define_insn "*thumb2_ldrd_base"
11793 [(set (match_operand:SI 0 "s_register_operand" "=r")
11794 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11795 (set (match_operand:SI 2 "s_register_operand" "=r")
11796 (mem:SI (plus:SI (match_dup 1)
11797 (const_int 4))))]
11798 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11799 && (operands_ok_ldrd_strd (operands[0], operands[2],
11800 operands[1], 0, false, true))"
11801 "ldrd%?\t%0, %2, [%1]"
11802 [(set_attr "type" "load_8")
11803 (set_attr "predicable" "yes")])
11804
11805 (define_insn "*thumb2_ldrd_base_neg"
11806 [(set (match_operand:SI 0 "s_register_operand" "=r")
11807 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11808 (const_int -4))))
11809 (set (match_operand:SI 2 "s_register_operand" "=r")
11810 (mem:SI (match_dup 1)))]
11811 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11812 && (operands_ok_ldrd_strd (operands[0], operands[2],
11813 operands[1], -4, false, true))"
11814 "ldrd%?\t%0, %2, [%1, #-4]"
11815 [(set_attr "type" "load_8")
11816 (set_attr "predicable" "yes")])
11817
11818 (define_insn "*thumb2_strd"
11819 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11820 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11821 (match_operand:SI 2 "s_register_operand" "r"))
11822 (set (mem:SI (plus:SI (match_dup 0)
11823 (match_operand:SI 3 "const_int_operand" "")))
11824 (match_operand:SI 4 "s_register_operand" "r"))]
11825 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11826 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11827 && (operands_ok_ldrd_strd (operands[2], operands[4],
11828 operands[0], INTVAL (operands[1]),
11829 false, false))"
11830 "strd%?\t%2, %4, [%0, %1]"
11831 [(set_attr "type" "store_8")
11832 (set_attr "predicable" "yes")])
11833
11834 (define_insn "*thumb2_strd_base"
11835 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11836 (match_operand:SI 1 "s_register_operand" "r"))
11837 (set (mem:SI (plus:SI (match_dup 0)
11838 (const_int 4)))
11839 (match_operand:SI 2 "s_register_operand" "r"))]
11840 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11841 && (operands_ok_ldrd_strd (operands[1], operands[2],
11842 operands[0], 0, false, false))"
11843 "strd%?\t%1, %2, [%0]"
11844 [(set_attr "type" "store_8")
11845 (set_attr "predicable" "yes")])
11846
11847 (define_insn "*thumb2_strd_base_neg"
11848 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11849 (const_int -4)))
11850 (match_operand:SI 1 "s_register_operand" "r"))
11851 (set (mem:SI (match_dup 0))
11852 (match_operand:SI 2 "s_register_operand" "r"))]
11853 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11854 && (operands_ok_ldrd_strd (operands[1], operands[2],
11855 operands[0], -4, false, false))"
11856 "strd%?\t%1, %2, [%0, #-4]"
11857 [(set_attr "type" "store_8")
11858 (set_attr "predicable" "yes")])
11859
11860 ;; ARMv8 CRC32 instructions.
11861 (define_insn "arm_<crc_variant>"
11862 [(set (match_operand:SI 0 "s_register_operand" "=r")
11863 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11864 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11865 CRC))]
11866 "TARGET_CRC32"
11867 "<crc_variant>\\t%0, %1, %2"
11868 [(set_attr "type" "crc")
11869 (set_attr "conds" "unconditional")]
11870 )
11871
11872 ;; Load the load/store double peephole optimizations.
11873 (include "ldrdstrd.md")
11874
11875 ;; Load the load/store multiple patterns
11876 (include "ldmstm.md")
11877
11878 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11879 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11880 ;; The operands are validated through the load_multiple_operation
11881 ;; match_parallel predicate rather than through constraints so enable it only
11882 ;; after reload.
11883 (define_insn "*load_multiple"
11884 [(match_parallel 0 "load_multiple_operation"
11885 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11886 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11887 ])]
11888 "TARGET_32BIT && reload_completed"
11889 "*
11890 {
11891 arm_output_multireg_pop (operands, /*return_pc=*/false,
11892 /*cond=*/const_true_rtx,
11893 /*reverse=*/false,
11894 /*update=*/false);
11895 return \"\";
11896 }
11897 "
11898 [(set_attr "predicable" "yes")]
11899 )
11900
11901 (define_expand "copysignsf3"
11902 [(match_operand:SF 0 "register_operand")
11903 (match_operand:SF 1 "register_operand")
11904 (match_operand:SF 2 "register_operand")]
11905 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11906 "{
11907 emit_move_insn (operands[0], operands[2]);
11908 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11909 GEN_INT (31), GEN_INT (0),
11910 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11911 DONE;
11912 }"
11913 )
11914
11915 (define_expand "copysigndf3"
11916 [(match_operand:DF 0 "register_operand")
11917 (match_operand:DF 1 "register_operand")
11918 (match_operand:DF 2 "register_operand")]
11919 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11920 "{
11921 rtx op0_low = gen_lowpart (SImode, operands[0]);
11922 rtx op0_high = gen_highpart (SImode, operands[0]);
11923 rtx op1_low = gen_lowpart (SImode, operands[1]);
11924 rtx op1_high = gen_highpart (SImode, operands[1]);
11925 rtx op2_high = gen_highpart (SImode, operands[2]);
11926
11927 rtx scratch1 = gen_reg_rtx (SImode);
11928 rtx scratch2 = gen_reg_rtx (SImode);
11929 emit_move_insn (scratch1, op2_high);
11930 emit_move_insn (scratch2, op1_high);
11931
11932 emit_insn(gen_rtx_SET(scratch1,
11933 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11934 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11935 emit_move_insn (op0_low, op1_low);
11936 emit_move_insn (op0_high, scratch2);
11937
11938 DONE;
11939 }"
11940 )
11941
11942 ;; movmisalign patterns for HImode and SImode.
11943 (define_expand "movmisalign<mode>"
11944 [(match_operand:HSI 0 "general_operand")
11945 (match_operand:HSI 1 "general_operand")]
11946 "unaligned_access"
11947 {
11948 /* This pattern is not permitted to fail during expansion: if both arguments
11949 are non-registers (e.g. memory := constant), force operand 1 into a
11950 register. */
11951 rtx (* gen_unaligned_load)(rtx, rtx);
11952 rtx tmp_dest = operands[0];
11953 if (!s_register_operand (operands[0], <MODE>mode)
11954 && !s_register_operand (operands[1], <MODE>mode))
11955 operands[1] = force_reg (<MODE>mode, operands[1]);
11956
11957 if (<MODE>mode == HImode)
11958 {
11959 gen_unaligned_load = gen_unaligned_loadhiu;
11960 tmp_dest = gen_reg_rtx (SImode);
11961 }
11962 else
11963 gen_unaligned_load = gen_unaligned_loadsi;
11964
11965 if (MEM_P (operands[1]))
11966 {
11967 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11968 if (<MODE>mode == HImode)
11969 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11970 }
11971 else
11972 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11973
11974 DONE;
11975 })
11976
11977 (define_insn "arm_<cdp>"
11978 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11979 (match_operand:SI 1 "immediate_operand" "n")
11980 (match_operand:SI 2 "immediate_operand" "n")
11981 (match_operand:SI 3 "immediate_operand" "n")
11982 (match_operand:SI 4 "immediate_operand" "n")
11983 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11984 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11985 {
11986 arm_const_bounds (operands[0], 0, 16);
11987 arm_const_bounds (operands[1], 0, 16);
11988 arm_const_bounds (operands[2], 0, (1 << 5));
11989 arm_const_bounds (operands[3], 0, (1 << 5));
11990 arm_const_bounds (operands[4], 0, (1 << 5));
11991 arm_const_bounds (operands[5], 0, 8);
11992 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11993 }
11994 [(set_attr "length" "4")
11995 (set_attr "type" "coproc")])
11996
11997 (define_insn "*ldc"
11998 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11999 (match_operand:SI 1 "immediate_operand" "n")
12000 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12001 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12002 {
12003 arm_const_bounds (operands[0], 0, 16);
12004 arm_const_bounds (operands[1], 0, (1 << 5));
12005 return "<ldc>\\tp%c0, CR%c1, %2";
12006 }
12007 [(set_attr "length" "4")
12008 (set_attr "type" "coproc")])
12009
12010 (define_insn "*stc"
12011 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12012 (match_operand:SI 1 "immediate_operand" "n")
12013 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12014 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12015 {
12016 arm_const_bounds (operands[0], 0, 16);
12017 arm_const_bounds (operands[1], 0, (1 << 5));
12018 return "<stc>\\tp%c0, CR%c1, %2";
12019 }
12020 [(set_attr "length" "4")
12021 (set_attr "type" "coproc")])
12022
12023 (define_expand "arm_<ldc>"
12024 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12025 (match_operand:SI 1 "immediate_operand")
12026 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12027 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12028
12029 (define_expand "arm_<stc>"
12030 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12031 (match_operand:SI 1 "immediate_operand")
12032 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12033 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12034
12035 (define_insn "arm_<mcr>"
12036 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12037 (match_operand:SI 1 "immediate_operand" "n")
12038 (match_operand:SI 2 "s_register_operand" "r")
12039 (match_operand:SI 3 "immediate_operand" "n")
12040 (match_operand:SI 4 "immediate_operand" "n")
12041 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12042 (use (match_dup 2))]
12043 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12044 {
12045 arm_const_bounds (operands[0], 0, 16);
12046 arm_const_bounds (operands[1], 0, 8);
12047 arm_const_bounds (operands[3], 0, (1 << 5));
12048 arm_const_bounds (operands[4], 0, (1 << 5));
12049 arm_const_bounds (operands[5], 0, 8);
12050 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12051 }
12052 [(set_attr "length" "4")
12053 (set_attr "type" "coproc")])
12054
12055 (define_insn "arm_<mrc>"
12056 [(set (match_operand:SI 0 "s_register_operand" "=r")
12057 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12058 (match_operand:SI 2 "immediate_operand" "n")
12059 (match_operand:SI 3 "immediate_operand" "n")
12060 (match_operand:SI 4 "immediate_operand" "n")
12061 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12062 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12063 {
12064 arm_const_bounds (operands[1], 0, 16);
12065 arm_const_bounds (operands[2], 0, 8);
12066 arm_const_bounds (operands[3], 0, (1 << 5));
12067 arm_const_bounds (operands[4], 0, (1 << 5));
12068 arm_const_bounds (operands[5], 0, 8);
12069 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12070 }
12071 [(set_attr "length" "4")
12072 (set_attr "type" "coproc")])
12073
12074 (define_insn "arm_<mcrr>"
12075 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12076 (match_operand:SI 1 "immediate_operand" "n")
12077 (match_operand:DI 2 "s_register_operand" "r")
12078 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12079 (use (match_dup 2))]
12080 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12081 {
12082 arm_const_bounds (operands[0], 0, 16);
12083 arm_const_bounds (operands[1], 0, 8);
12084 arm_const_bounds (operands[3], 0, (1 << 5));
12085 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12086 }
12087 [(set_attr "length" "4")
12088 (set_attr "type" "coproc")])
12089
12090 (define_insn "arm_<mrrc>"
12091 [(set (match_operand:DI 0 "s_register_operand" "=r")
12092 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12093 (match_operand:SI 2 "immediate_operand" "n")
12094 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12095 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12096 {
12097 arm_const_bounds (operands[1], 0, 16);
12098 arm_const_bounds (operands[2], 0, 8);
12099 arm_const_bounds (operands[3], 0, (1 << 5));
12100 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12101 }
12102 [(set_attr "length" "4")
12103 (set_attr "type" "coproc")])
12104
12105 (define_expand "speculation_barrier"
12106 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12107 "TARGET_EITHER"
12108 "
12109 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12110 have a usable barrier (and probably don't need one in practice).
12111 But to be safe if such code is run on later architectures, call a
12112 helper function in libgcc that will do the thing for the active
12113 system. */
12114 if (!(arm_arch7 || arm_arch8))
12115 {
12116 arm_emit_speculation_barrier_function ();
12117 DONE;
12118 }
12119 "
12120 )
12121
12122 ;; Generate a hard speculation barrier when we have not enabled speculation
12123 ;; tracking.
12124 (define_insn "*speculation_barrier_insn"
12125 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12126 "arm_arch7 || arm_arch8"
12127 "isb\;dsb\\tsy"
12128 [(set_attr "type" "block")
12129 (set_attr "length" "8")]
12130 )
12131
12132 ;; Vector bits common to IWMMXT and Neon
12133 (include "vec-common.md")
12134 ;; Load the Intel Wireless Multimedia Extension patterns
12135 (include "iwmmxt.md")
12136 ;; Load the VFP co-processor patterns
12137 (include "vfp.md")
12138 ;; Thumb-1 patterns
12139 (include "thumb1.md")
12140 ;; Thumb-2 patterns
12141 (include "thumb2.md")
12142 ;; Neon patterns
12143 (include "neon.md")
12144 ;; Crypto patterns
12145 (include "crypto.md")
12146 ;; Synchronization Primitives
12147 (include "sync.md")
12148 ;; Fixed-point patterns
12149 (include "arm-fixed.md")