]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
f597a277c177050d96d9f495c10aa4d207e2df02
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51 ;; conditional compare combination
52 (define_constants
53 [(CMP_CMP 0)
54 (CMN_CMP 1)
55 (CMP_CMN 2)
56 (CMN_CMN 3)
57 (NUM_OF_COND_CMP 4)
58 ]
59 )
60
61 \f
62 ;;---------------------------------------------------------------------------
63 ;; Attributes
64
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
67
68 ;; Instruction classification types
69 (include "types.md")
70
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
77
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
85
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
101
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
104 ;; registers.
105 (define_attr "fp" "no,yes" (const_string "no"))
106
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
112
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
117
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
120 (const_int 4))
121
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
131
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
134 (const_string "yes")
135
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
138 (const_string "yes")
139
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
142 (const_string "yes")
143
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
182 (const_string "yes")
183 ]
184
185 (const_string "no")))
186
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
189
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
192 (const_string "yes")
193
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
196 (const_string "yes")
197
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
202
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
208
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
225 (const_string "no")
226
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
229 (const_string "no")
230
231 (eq_attr "arch_enabled" "no")
232 (const_string "no")]
233 (const_string "yes")))
234
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
254
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
262
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
270 ;
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
273 ; inlined branches
274 ;
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
277 ;
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
280 ;
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
283 ;
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
286
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (if_then_else
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
295
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
301
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
312 "block,call,load_4")
313 (const_string "yes")
314 (const_string "no")))
315
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
338
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
342
343
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
347
348 ;;---------------------------------------------------------------------------
349 ;; Unspecs
350
351 (include "unspecs.md")
352
353 ;;---------------------------------------------------------------------------
354 ;; Mode iterators
355
356 (include "iterators.md")
357
358 ;;---------------------------------------------------------------------------
359 ;; Predicates
360
361 (include "predicates.md")
362 (include "constraints.md")
363
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
366
367 (define_attr "tune_cortexr4" "yes,no"
368 (const (if_then_else
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "yes")
371 (const_string "no"))))
372
373 ;; True if the generic scheduling description should be used.
374
375 (define_attr "generic_sched" "yes,no"
376 (const (if_then_else
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "no")
385 (const_string "yes"))))
386
387 (define_attr "generic_vfp" "yes,no"
388 (const (if_then_else
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
394 (const_string "yes")
395 (const_string "no"))))
396
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
403 (include "fa526.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
422 (include "vfp11.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
425
426 \f
427 ;;---------------------------------------------------------------------------
428 ;; Insn patterns
429 ;;
430 ;; Addition insns.
431
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
435
436 (define_expand "adddi3"
437 [(parallel
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
442 "TARGET_EITHER"
443 "
444 if (TARGET_THUMB1)
445 {
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
448 }
449 else
450 {
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
454 &lo_op2, &hi_op2);
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
457
458 if (lo_op2 == const0_rtx)
459 {
460 lo_dest = lo_op1;
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
465 }
466 else
467 {
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
472
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 if (hi_op2 == const0_rtx)
475 emit_insn (gen_add0si3_carryin_ltu (hi_dest, hi_op1));
476 else
477 emit_insn (gen_addsi3_carryin_ltu (hi_dest, hi_op1, hi_op2));
478 }
479
480 if (lo_result != lo_dest)
481 emit_move_insn (lo_result, lo_dest);
482 if (hi_result != hi_dest)
483 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
484 DONE;
485 }
486 "
487 )
488
489 (define_expand "addv<mode>4"
490 [(match_operand:SIDI 0 "register_operand")
491 (match_operand:SIDI 1 "register_operand")
492 (match_operand:SIDI 2 "register_operand")
493 (match_operand 3 "")]
494 "TARGET_32BIT"
495 {
496 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
497 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
498
499 DONE;
500 })
501
502 (define_expand "uaddv<mode>4"
503 [(match_operand:SIDI 0 "register_operand")
504 (match_operand:SIDI 1 "register_operand")
505 (match_operand:SIDI 2 "register_operand")
506 (match_operand 3 "")]
507 "TARGET_32BIT"
508 {
509 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
510 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
511
512 DONE;
513 })
514
515 (define_expand "addsi3"
516 [(set (match_operand:SI 0 "s_register_operand")
517 (plus:SI (match_operand:SI 1 "s_register_operand")
518 (match_operand:SI 2 "reg_or_int_operand")))]
519 "TARGET_EITHER"
520 "
521 if (TARGET_32BIT && CONST_INT_P (operands[2]))
522 {
523 arm_split_constant (PLUS, SImode, NULL_RTX,
524 INTVAL (operands[2]), operands[0], operands[1],
525 optimize && can_create_pseudo_p ());
526 DONE;
527 }
528 "
529 )
530
531 ; If there is a scratch available, this will be faster than synthesizing the
532 ; addition.
533 (define_peephole2
534 [(match_scratch:SI 3 "r")
535 (set (match_operand:SI 0 "arm_general_register_operand" "")
536 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
537 (match_operand:SI 2 "const_int_operand" "")))]
538 "TARGET_32BIT &&
539 !(const_ok_for_arm (INTVAL (operands[2]))
540 || const_ok_for_arm (-INTVAL (operands[2])))
541 && const_ok_for_arm (~INTVAL (operands[2]))"
542 [(set (match_dup 3) (match_dup 2))
543 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
544 ""
545 )
546
547 ;; The r/r/k alternative is required when reloading the address
548 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
549 ;; put the duplicated register first, and not try the commutative version.
550 (define_insn_and_split "*arm_addsi3"
551 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
552 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
553 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
554 "TARGET_32BIT"
555 "@
556 add%?\\t%0, %0, %2
557 add%?\\t%0, %1, %2
558 add%?\\t%0, %1, %2
559 add%?\\t%0, %1, %2
560 add%?\\t%0, %1, %2
561 add%?\\t%0, %1, %2
562 add%?\\t%0, %2, %1
563 add%?\\t%0, %1, %2
564 addw%?\\t%0, %1, %2
565 addw%?\\t%0, %1, %2
566 sub%?\\t%0, %1, #%n2
567 sub%?\\t%0, %1, #%n2
568 sub%?\\t%0, %1, #%n2
569 subw%?\\t%0, %1, #%n2
570 subw%?\\t%0, %1, #%n2
571 #"
572 "TARGET_32BIT
573 && CONST_INT_P (operands[2])
574 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
575 && (reload_completed || !arm_eliminable_register (operands[1]))"
576 [(clobber (const_int 0))]
577 "
578 arm_split_constant (PLUS, SImode, curr_insn,
579 INTVAL (operands[2]), operands[0],
580 operands[1], 0);
581 DONE;
582 "
583 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
584 (set_attr "predicable" "yes")
585 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
586 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
587 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
588 (const_string "alu_imm")
589 (const_string "alu_sreg")))
590 ]
591 )
592
593 (define_insn "adddi3_compareV"
594 [(set (reg:CC_V CC_REGNUM)
595 (ne:CC_V
596 (plus:TI
597 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
598 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
599 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
600 (set (match_operand:DI 0 "s_register_operand" "=&r")
601 (plus:DI (match_dup 1) (match_dup 2)))]
602 "TARGET_32BIT"
603 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
604 [(set_attr "conds" "set")
605 (set_attr "length" "8")
606 (set_attr "type" "multiple")]
607 )
608
609 (define_insn "addsi3_compareV"
610 [(set (reg:CC_V CC_REGNUM)
611 (ne:CC_V
612 (plus:DI
613 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
614 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
615 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
616 (set (match_operand:SI 0 "register_operand" "=r")
617 (plus:SI (match_dup 1) (match_dup 2)))]
618 "TARGET_32BIT"
619 "adds%?\\t%0, %1, %2"
620 [(set_attr "conds" "set")
621 (set_attr "type" "alus_sreg")]
622 )
623
624 (define_insn "adddi3_compareC"
625 [(set (reg:CC_C CC_REGNUM)
626 (compare:CC_C
627 (plus:DI
628 (match_operand:DI 1 "register_operand" "r")
629 (match_operand:DI 2 "register_operand" "r"))
630 (match_dup 1)))
631 (set (match_operand:DI 0 "register_operand" "=&r")
632 (plus:DI (match_dup 1) (match_dup 2)))]
633 "TARGET_32BIT"
634 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
635 [(set_attr "conds" "set")
636 (set_attr "length" "8")
637 (set_attr "type" "multiple")]
638 )
639
640 (define_insn "addsi3_compareC"
641 [(set (reg:CC_C CC_REGNUM)
642 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
643 (match_operand:SI 2 "register_operand" "r"))
644 (match_dup 1)))
645 (set (match_operand:SI 0 "register_operand" "=r")
646 (plus:SI (match_dup 1) (match_dup 2)))]
647 "TARGET_32BIT"
648 "adds%?\\t%0, %1, %2"
649 [(set_attr "conds" "set")
650 (set_attr "type" "alus_sreg")]
651 )
652
653 (define_insn "addsi3_compare0"
654 [(set (reg:CC_NOOV CC_REGNUM)
655 (compare:CC_NOOV
656 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
657 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
658 (const_int 0)))
659 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
660 (plus:SI (match_dup 1) (match_dup 2)))]
661 "TARGET_ARM"
662 "@
663 adds%?\\t%0, %1, %2
664 subs%?\\t%0, %1, #%n2
665 adds%?\\t%0, %1, %2"
666 [(set_attr "conds" "set")
667 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
668 )
669
670 (define_insn "*addsi3_compare0_scratch"
671 [(set (reg:CC_NOOV CC_REGNUM)
672 (compare:CC_NOOV
673 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
674 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
675 (const_int 0)))]
676 "TARGET_ARM"
677 "@
678 cmn%?\\t%0, %1
679 cmp%?\\t%0, #%n1
680 cmn%?\\t%0, %1"
681 [(set_attr "conds" "set")
682 (set_attr "predicable" "yes")
683 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
684 )
685
686 (define_insn "*compare_negsi_si"
687 [(set (reg:CC_Z CC_REGNUM)
688 (compare:CC_Z
689 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
690 (match_operand:SI 1 "s_register_operand" "l,r")))]
691 "TARGET_32BIT"
692 "cmn%?\\t%1, %0"
693 [(set_attr "conds" "set")
694 (set_attr "predicable" "yes")
695 (set_attr "arch" "t2,*")
696 (set_attr "length" "2,4")
697 (set_attr "predicable_short_it" "yes,no")
698 (set_attr "type" "alus_sreg")]
699 )
700
701 ;; This is the canonicalization of subsi3_compare when the
702 ;; addend is a constant.
703 (define_insn "cmpsi2_addneg"
704 [(set (reg:CC CC_REGNUM)
705 (compare:CC
706 (match_operand:SI 1 "s_register_operand" "r,r")
707 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
708 (set (match_operand:SI 0 "s_register_operand" "=r,r")
709 (plus:SI (match_dup 1)
710 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
711 "TARGET_32BIT
712 && (INTVAL (operands[2])
713 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
714 {
715 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
716 in different condition codes (like cmn rather than like cmp), so that
717 alternative comes first. Both alternatives can match for any 0x??000000
718 where except for 0 and INT_MIN it doesn't matter what we choose, and also
719 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
720 as it is shorter. */
721 if (which_alternative == 0 && operands[3] != const1_rtx)
722 return "subs%?\\t%0, %1, #%n3";
723 else
724 return "adds%?\\t%0, %1, %3";
725 }
726 [(set_attr "conds" "set")
727 (set_attr "type" "alus_sreg")]
728 )
729
730 ;; Convert the sequence
731 ;; sub rd, rn, #1
732 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
733 ;; bne dest
734 ;; into
735 ;; subs rd, rn, #1
736 ;; bcs dest ((unsigned)rn >= 1)
737 ;; similarly for the beq variant using bcc.
738 ;; This is a common looping idiom (while (n--))
739 (define_peephole2
740 [(set (match_operand:SI 0 "arm_general_register_operand" "")
741 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
742 (const_int -1)))
743 (set (match_operand 2 "cc_register" "")
744 (compare (match_dup 0) (const_int -1)))
745 (set (pc)
746 (if_then_else (match_operator 3 "equality_operator"
747 [(match_dup 2) (const_int 0)])
748 (match_operand 4 "" "")
749 (match_operand 5 "" "")))]
750 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
751 [(parallel[
752 (set (match_dup 2)
753 (compare:CC
754 (match_dup 1) (const_int 1)))
755 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
756 (set (pc)
757 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
758 (match_dup 4)
759 (match_dup 5)))]
760 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
761 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
762 ? GEU : LTU),
763 VOIDmode,
764 operands[2], const0_rtx);"
765 )
766
767 ;; The next four insns work because they compare the result with one of
768 ;; the operands, and we know that the use of the condition code is
769 ;; either GEU or LTU, so we can use the carry flag from the addition
770 ;; instead of doing the compare a second time.
771 (define_insn "*addsi3_compare_op1"
772 [(set (reg:CC_C CC_REGNUM)
773 (compare:CC_C
774 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
775 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
776 (match_dup 1)))
777 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
778 (plus:SI (match_dup 1) (match_dup 2)))]
779 "TARGET_32BIT"
780 "@
781 adds%?\\t%0, %1, %2
782 adds%?\\t%0, %0, %2
783 subs%?\\t%0, %1, #%n2
784 subs%?\\t%0, %0, #%n2
785 adds%?\\t%0, %1, %2
786 subs%?\\t%0, %1, #%n2
787 adds%?\\t%0, %1, %2"
788 [(set_attr "conds" "set")
789 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
790 (set_attr "length" "2,2,2,2,4,4,4")
791 (set_attr "type"
792 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
793 )
794
795 (define_insn "*addsi3_compare_op2"
796 [(set (reg:CC_C CC_REGNUM)
797 (compare:CC_C
798 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
799 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
800 (match_dup 2)))
801 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
802 (plus:SI (match_dup 1) (match_dup 2)))]
803 "TARGET_32BIT"
804 "@
805 adds%?\\t%0, %1, %2
806 adds%?\\t%0, %0, %2
807 subs%?\\t%0, %1, #%n2
808 subs%?\\t%0, %0, #%n2
809 adds%?\\t%0, %1, %2
810 subs%?\\t%0, %1, #%n2
811 adds%?\\t%0, %1, %2"
812 [(set_attr "conds" "set")
813 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
814 (set_attr "length" "2,2,2,2,4,4,4")
815 (set_attr "type"
816 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
817 )
818
819 (define_insn "*compare_addsi2_op0"
820 [(set (reg:CC_C CC_REGNUM)
821 (compare:CC_C
822 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
823 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
824 (match_dup 0)))]
825 "TARGET_32BIT"
826 "@
827 cmp%?\\t%0, #%n1
828 cmn%?\\t%0, %1
829 cmn%?\\t%0, %1
830 cmp%?\\t%0, #%n1
831 cmn%?\\t%0, %1"
832 [(set_attr "conds" "set")
833 (set_attr "predicable" "yes")
834 (set_attr "arch" "t2,t2,*,*,*")
835 (set_attr "predicable_short_it" "yes,yes,no,no,no")
836 (set_attr "length" "2,2,4,4,4")
837 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
838 )
839
840 (define_insn "*compare_addsi2_op1"
841 [(set (reg:CC_C CC_REGNUM)
842 (compare:CC_C
843 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
844 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
845 (match_dup 1)))]
846 "TARGET_32BIT"
847 "@
848 cmp%?\\t%0, #%n1
849 cmn%?\\t%0, %1
850 cmn%?\\t%0, %1
851 cmp%?\\t%0, #%n1
852 cmn%?\\t%0, %1"
853 [(set_attr "conds" "set")
854 (set_attr "predicable" "yes")
855 (set_attr "arch" "t2,t2,*,*,*")
856 (set_attr "predicable_short_it" "yes,yes,no,no,no")
857 (set_attr "length" "2,2,4,4,4")
858 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
859 )
860
861 (define_insn "addsi3_carryin_<optab>"
862 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
863 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
864 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
865 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
866 "TARGET_32BIT"
867 "@
868 adc%?\\t%0, %1, %2
869 adc%?\\t%0, %1, %2
870 sbc%?\\t%0, %1, #%B2"
871 [(set_attr "conds" "use")
872 (set_attr "predicable" "yes")
873 (set_attr "arch" "t2,*,*")
874 (set_attr "length" "4")
875 (set_attr "predicable_short_it" "yes,no,no")
876 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
877 )
878
879 ;; Canonicalization of the above when the immediate is zero.
880 (define_insn "add0si3_carryin_<optab>"
881 [(set (match_operand:SI 0 "s_register_operand" "=r")
882 (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
883 (match_operand:SI 1 "arm_not_operand" "r")))]
884 "TARGET_32BIT"
885 "adc%?\\t%0, %1, #0"
886 [(set_attr "conds" "use")
887 (set_attr "predicable" "yes")
888 (set_attr "length" "4")
889 (set_attr "type" "adc_imm")]
890 )
891
892 (define_insn "*addsi3_carryin_alt2_<optab>"
893 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
894 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
895 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
896 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
897 "TARGET_32BIT"
898 "@
899 adc%?\\t%0, %1, %2
900 adc%?\\t%0, %1, %2
901 sbc%?\\t%0, %1, #%B2"
902 [(set_attr "conds" "use")
903 (set_attr "predicable" "yes")
904 (set_attr "arch" "t2,*,*")
905 (set_attr "length" "4")
906 (set_attr "predicable_short_it" "yes,no,no")
907 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
908 )
909
910 (define_insn "*addsi3_carryin_shift_<optab>"
911 [(set (match_operand:SI 0 "s_register_operand" "=r")
912 (plus:SI (plus:SI
913 (match_operator:SI 2 "shift_operator"
914 [(match_operand:SI 3 "s_register_operand" "r")
915 (match_operand:SI 4 "reg_or_int_operand" "rM")])
916 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0)))
917 (match_operand:SI 1 "s_register_operand" "r")))]
918 "TARGET_32BIT"
919 "adc%?\\t%0, %1, %3%S2"
920 [(set_attr "conds" "use")
921 (set_attr "predicable" "yes")
922 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
923 (const_string "alu_shift_imm")
924 (const_string "alu_shift_reg")))]
925 )
926
927 (define_insn "*addsi3_carryin_clobercc_<optab>"
928 [(set (match_operand:SI 0 "s_register_operand" "=r")
929 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
930 (match_operand:SI 2 "arm_rhs_operand" "rI"))
931 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
932 (clobber (reg:CC CC_REGNUM))]
933 "TARGET_32BIT"
934 "adcs%?\\t%0, %1, %2"
935 [(set_attr "conds" "set")
936 (set_attr "type" "adcs_reg")]
937 )
938
939 (define_expand "subv<mode>4"
940 [(match_operand:SIDI 0 "register_operand")
941 (match_operand:SIDI 1 "register_operand")
942 (match_operand:SIDI 2 "register_operand")
943 (match_operand 3 "")]
944 "TARGET_32BIT"
945 {
946 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
947 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
948
949 DONE;
950 })
951
952 (define_expand "usubv<mode>4"
953 [(match_operand:SIDI 0 "register_operand")
954 (match_operand:SIDI 1 "register_operand")
955 (match_operand:SIDI 2 "register_operand")
956 (match_operand 3 "")]
957 "TARGET_32BIT"
958 {
959 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
960 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
961
962 DONE;
963 })
964
965 (define_insn "subdi3_compare1"
966 [(set (reg:CC CC_REGNUM)
967 (compare:CC
968 (match_operand:DI 1 "s_register_operand" "r")
969 (match_operand:DI 2 "s_register_operand" "r")))
970 (set (match_operand:DI 0 "s_register_operand" "=&r")
971 (minus:DI (match_dup 1) (match_dup 2)))]
972 "TARGET_32BIT"
973 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
974 [(set_attr "conds" "set")
975 (set_attr "length" "8")
976 (set_attr "type" "multiple")]
977 )
978
979 (define_insn "subsi3_compare1"
980 [(set (reg:CC CC_REGNUM)
981 (compare:CC
982 (match_operand:SI 1 "register_operand" "r")
983 (match_operand:SI 2 "register_operand" "r")))
984 (set (match_operand:SI 0 "register_operand" "=r")
985 (minus:SI (match_dup 1) (match_dup 2)))]
986 "TARGET_32BIT"
987 "subs%?\\t%0, %1, %2"
988 [(set_attr "conds" "set")
989 (set_attr "type" "alus_sreg")]
990 )
991
992 (define_insn "subsi3_carryin"
993 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
994 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
995 (match_operand:SI 2 "s_register_operand" "r,r,r"))
996 (match_operand:SI 3 "arm_borrow_operation" "")))]
997 "TARGET_32BIT"
998 "@
999 sbc%?\\t%0, %1, %2
1000 rsc%?\\t%0, %2, %1
1001 sbc%?\\t%0, %2, %2, lsl #1"
1002 [(set_attr "conds" "use")
1003 (set_attr "arch" "*,a,t2")
1004 (set_attr "predicable" "yes")
1005 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1006 )
1007
1008 (define_insn "*subsi3_carryin_const"
1009 [(set (match_operand:SI 0 "s_register_operand" "=r")
1010 (minus:SI (plus:SI
1011 (match_operand:SI 1 "s_register_operand" "r")
1012 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1013 (match_operand:SI 3 "arm_borrow_operation" "")))]
1014 "TARGET_32BIT"
1015 "sbc\\t%0, %1, #%n2"
1016 [(set_attr "conds" "use")
1017 (set_attr "type" "adc_imm")]
1018 )
1019
1020 (define_insn "*subsi3_carryin_const0"
1021 [(set (match_operand:SI 0 "s_register_operand" "=r")
1022 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1023 (match_operand:SI 2 "arm_borrow_operation" "")))]
1024 "TARGET_32BIT"
1025 "sbc\\t%0, %1, #0"
1026 [(set_attr "conds" "use")
1027 (set_attr "type" "adc_imm")]
1028 )
1029
1030 (define_insn "*subsi3_carryin_shift"
1031 [(set (match_operand:SI 0 "s_register_operand" "=r")
1032 (minus:SI (minus:SI
1033 (match_operand:SI 1 "s_register_operand" "r")
1034 (match_operator:SI 2 "shift_operator"
1035 [(match_operand:SI 3 "s_register_operand" "r")
1036 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1037 (match_operand:SI 5 "arm_borrow_operation" "")))]
1038 "TARGET_32BIT"
1039 "sbc%?\\t%0, %1, %3%S2"
1040 [(set_attr "conds" "use")
1041 (set_attr "predicable" "yes")
1042 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1043 (const_string "alu_shift_imm")
1044 (const_string "alu_shift_reg")))]
1045 )
1046
1047 (define_insn "*rsbsi3_carryin_shift"
1048 [(set (match_operand:SI 0 "s_register_operand" "=r")
1049 (minus:SI (minus:SI
1050 (match_operator:SI 2 "shift_operator"
1051 [(match_operand:SI 3 "s_register_operand" "r")
1052 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1053 (match_operand:SI 1 "s_register_operand" "r"))
1054 (match_operand:SI 5 "arm_borrow_operation" "")))]
1055 "TARGET_ARM"
1056 "rsc%?\\t%0, %1, %3%S2"
1057 [(set_attr "conds" "use")
1058 (set_attr "predicable" "yes")
1059 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1060 (const_string "alu_shift_imm")
1061 (const_string "alu_shift_reg")))]
1062 )
1063
1064 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1065 (define_split
1066 [(set (match_operand:SI 0 "s_register_operand" "")
1067 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1068 (match_operand:SI 2 "s_register_operand" ""))
1069 (const_int -1)))
1070 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1071 "TARGET_32BIT"
1072 [(set (match_dup 3) (match_dup 1))
1073 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1074 "
1075 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1076 ")
1077
1078 (define_expand "addsf3"
1079 [(set (match_operand:SF 0 "s_register_operand")
1080 (plus:SF (match_operand:SF 1 "s_register_operand")
1081 (match_operand:SF 2 "s_register_operand")))]
1082 "TARGET_32BIT && TARGET_HARD_FLOAT"
1083 "
1084 ")
1085
1086 (define_expand "adddf3"
1087 [(set (match_operand:DF 0 "s_register_operand")
1088 (plus:DF (match_operand:DF 1 "s_register_operand")
1089 (match_operand:DF 2 "s_register_operand")))]
1090 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1091 "
1092 ")
1093
1094 (define_expand "subdi3"
1095 [(parallel
1096 [(set (match_operand:DI 0 "s_register_operand")
1097 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1098 (match_operand:DI 2 "s_register_operand")))
1099 (clobber (reg:CC CC_REGNUM))])]
1100 "TARGET_EITHER"
1101 "
1102 if (TARGET_THUMB1)
1103 {
1104 if (!REG_P (operands[1]))
1105 operands[1] = force_reg (DImode, operands[1]);
1106 }
1107 else
1108 {
1109 rtx lo_result, hi_result, lo_dest, hi_dest;
1110 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1111 rtx condition;
1112
1113 /* Since operands[1] may be an integer, pass it second, so that
1114 any necessary simplifications will be done on the decomposed
1115 constant. */
1116 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1117 &lo_op1, &hi_op1);
1118 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1119 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1120
1121 if (!arm_rhs_operand (lo_op1, SImode))
1122 lo_op1 = force_reg (SImode, lo_op1);
1123
1124 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1125 || !arm_rhs_operand (hi_op1, SImode))
1126 hi_op1 = force_reg (SImode, hi_op1);
1127
1128 rtx cc_reg;
1129 if (lo_op1 == const0_rtx)
1130 {
1131 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1132 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1133 }
1134 else if (CONST_INT_P (lo_op1))
1135 {
1136 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1137 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1138 GEN_INT (~UINTVAL (lo_op1))));
1139 }
1140 else
1141 {
1142 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1143 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1144 }
1145
1146 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1147
1148 if (hi_op1 == const0_rtx)
1149 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1150 else
1151 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1152
1153 if (lo_result != lo_dest)
1154 emit_move_insn (lo_result, lo_dest);
1155
1156 if (hi_result != hi_dest)
1157 emit_move_insn (hi_result, hi_dest);
1158
1159 DONE;
1160 }
1161 "
1162 )
1163
1164 (define_expand "subsi3"
1165 [(set (match_operand:SI 0 "s_register_operand")
1166 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1167 (match_operand:SI 2 "s_register_operand")))]
1168 "TARGET_EITHER"
1169 "
1170 if (CONST_INT_P (operands[1]))
1171 {
1172 if (TARGET_32BIT)
1173 {
1174 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1175 operands[1] = force_reg (SImode, operands[1]);
1176 else
1177 {
1178 arm_split_constant (MINUS, SImode, NULL_RTX,
1179 INTVAL (operands[1]), operands[0],
1180 operands[2],
1181 optimize && can_create_pseudo_p ());
1182 DONE;
1183 }
1184 }
1185 else /* TARGET_THUMB1 */
1186 operands[1] = force_reg (SImode, operands[1]);
1187 }
1188 "
1189 )
1190
1191 ; ??? Check Thumb-2 split length
1192 (define_insn_and_split "*arm_subsi3_insn"
1193 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1194 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1195 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1196 "TARGET_32BIT"
1197 "@
1198 sub%?\\t%0, %1, %2
1199 sub%?\\t%0, %2
1200 sub%?\\t%0, %1, %2
1201 rsb%?\\t%0, %2, %1
1202 rsb%?\\t%0, %2, %1
1203 sub%?\\t%0, %1, %2
1204 sub%?\\t%0, %1, %2
1205 sub%?\\t%0, %1, %2
1206 #"
1207 "&& (CONST_INT_P (operands[1])
1208 && !const_ok_for_arm (INTVAL (operands[1])))"
1209 [(clobber (const_int 0))]
1210 "
1211 arm_split_constant (MINUS, SImode, curr_insn,
1212 INTVAL (operands[1]), operands[0], operands[2], 0);
1213 DONE;
1214 "
1215 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1216 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1217 (set_attr "predicable" "yes")
1218 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1219 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1220 )
1221
1222 (define_peephole2
1223 [(match_scratch:SI 3 "r")
1224 (set (match_operand:SI 0 "arm_general_register_operand" "")
1225 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1226 (match_operand:SI 2 "arm_general_register_operand" "")))]
1227 "TARGET_32BIT
1228 && !const_ok_for_arm (INTVAL (operands[1]))
1229 && const_ok_for_arm (~INTVAL (operands[1]))"
1230 [(set (match_dup 3) (match_dup 1))
1231 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1232 ""
1233 )
1234
1235 (define_insn "subsi3_compare0"
1236 [(set (reg:CC_NOOV CC_REGNUM)
1237 (compare:CC_NOOV
1238 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1239 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1240 (const_int 0)))
1241 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1242 (minus:SI (match_dup 1) (match_dup 2)))]
1243 "TARGET_32BIT"
1244 "@
1245 subs%?\\t%0, %1, %2
1246 subs%?\\t%0, %1, %2
1247 rsbs%?\\t%0, %2, %1"
1248 [(set_attr "conds" "set")
1249 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1250 )
1251
1252 (define_insn "subsi3_compare"
1253 [(set (reg:CC CC_REGNUM)
1254 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1255 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1256 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1257 (minus:SI (match_dup 1) (match_dup 2)))]
1258 "TARGET_32BIT"
1259 "@
1260 subs%?\\t%0, %1, %2
1261 subs%?\\t%0, %1, %2
1262 rsbs%?\\t%0, %2, %1"
1263 [(set_attr "conds" "set")
1264 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1265 )
1266
1267 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1268 ;; rather than (0 cmp reg). This gives the same results for unsigned
1269 ;; and equality compares which is what we mostly need here.
1270 (define_insn "rsb_imm_compare"
1271 [(set (reg:CC_RSB CC_REGNUM)
1272 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1273 (match_operand 3 "const_int_operand" "")))
1274 (set (match_operand:SI 0 "s_register_operand" "=r")
1275 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1276 (match_dup 2)))]
1277 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1278 "rsbs\\t%0, %2, %1"
1279 [(set_attr "conds" "set")
1280 (set_attr "type" "alus_imm")]
1281 )
1282
1283 (define_expand "subsf3"
1284 [(set (match_operand:SF 0 "s_register_operand")
1285 (minus:SF (match_operand:SF 1 "s_register_operand")
1286 (match_operand:SF 2 "s_register_operand")))]
1287 "TARGET_32BIT && TARGET_HARD_FLOAT"
1288 "
1289 ")
1290
1291 (define_expand "subdf3"
1292 [(set (match_operand:DF 0 "s_register_operand")
1293 (minus:DF (match_operand:DF 1 "s_register_operand")
1294 (match_operand:DF 2 "s_register_operand")))]
1295 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1296 "
1297 ")
1298
1299 \f
1300 ;; Multiplication insns
1301
1302 (define_expand "mulhi3"
1303 [(set (match_operand:HI 0 "s_register_operand")
1304 (mult:HI (match_operand:HI 1 "s_register_operand")
1305 (match_operand:HI 2 "s_register_operand")))]
1306 "TARGET_DSP_MULTIPLY"
1307 "
1308 {
1309 rtx result = gen_reg_rtx (SImode);
1310 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1311 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1312 DONE;
1313 }"
1314 )
1315
1316 (define_expand "mulsi3"
1317 [(set (match_operand:SI 0 "s_register_operand")
1318 (mult:SI (match_operand:SI 2 "s_register_operand")
1319 (match_operand:SI 1 "s_register_operand")))]
1320 "TARGET_EITHER"
1321 ""
1322 )
1323
1324 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1325 (define_insn "*mul"
1326 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1327 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1328 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1329 "TARGET_32BIT"
1330 "mul%?\\t%0, %2, %1"
1331 [(set_attr "type" "mul")
1332 (set_attr "predicable" "yes")
1333 (set_attr "arch" "t2,v6,nov6,nov6")
1334 (set_attr "length" "4")
1335 (set_attr "predicable_short_it" "yes,no,*,*")]
1336 )
1337
1338 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1339 ;; reusing the same register.
1340
1341 (define_insn "*mla"
1342 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1343 (plus:SI
1344 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1345 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1346 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1347 "TARGET_32BIT"
1348 "mla%?\\t%0, %3, %2, %1"
1349 [(set_attr "type" "mla")
1350 (set_attr "predicable" "yes")
1351 (set_attr "arch" "v6,nov6,nov6,nov6")]
1352 )
1353
1354 (define_insn "*mls"
1355 [(set (match_operand:SI 0 "s_register_operand" "=r")
1356 (minus:SI
1357 (match_operand:SI 1 "s_register_operand" "r")
1358 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1359 (match_operand:SI 2 "s_register_operand" "r"))))]
1360 "TARGET_32BIT && arm_arch_thumb2"
1361 "mls%?\\t%0, %3, %2, %1"
1362 [(set_attr "type" "mla")
1363 (set_attr "predicable" "yes")]
1364 )
1365
1366 (define_insn "*mulsi3_compare0"
1367 [(set (reg:CC_NOOV CC_REGNUM)
1368 (compare:CC_NOOV (mult:SI
1369 (match_operand:SI 2 "s_register_operand" "r,r")
1370 (match_operand:SI 1 "s_register_operand" "%0,r"))
1371 (const_int 0)))
1372 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1373 (mult:SI (match_dup 2) (match_dup 1)))]
1374 "TARGET_ARM && !arm_arch6"
1375 "muls%?\\t%0, %2, %1"
1376 [(set_attr "conds" "set")
1377 (set_attr "type" "muls")]
1378 )
1379
1380 (define_insn "*mulsi3_compare0_v6"
1381 [(set (reg:CC_NOOV CC_REGNUM)
1382 (compare:CC_NOOV (mult:SI
1383 (match_operand:SI 2 "s_register_operand" "r")
1384 (match_operand:SI 1 "s_register_operand" "r"))
1385 (const_int 0)))
1386 (set (match_operand:SI 0 "s_register_operand" "=r")
1387 (mult:SI (match_dup 2) (match_dup 1)))]
1388 "TARGET_ARM && arm_arch6 && optimize_size"
1389 "muls%?\\t%0, %2, %1"
1390 [(set_attr "conds" "set")
1391 (set_attr "type" "muls")]
1392 )
1393
1394 (define_insn "*mulsi_compare0_scratch"
1395 [(set (reg:CC_NOOV CC_REGNUM)
1396 (compare:CC_NOOV (mult:SI
1397 (match_operand:SI 2 "s_register_operand" "r,r")
1398 (match_operand:SI 1 "s_register_operand" "%0,r"))
1399 (const_int 0)))
1400 (clobber (match_scratch:SI 0 "=&r,&r"))]
1401 "TARGET_ARM && !arm_arch6"
1402 "muls%?\\t%0, %2, %1"
1403 [(set_attr "conds" "set")
1404 (set_attr "type" "muls")]
1405 )
1406
1407 (define_insn "*mulsi_compare0_scratch_v6"
1408 [(set (reg:CC_NOOV CC_REGNUM)
1409 (compare:CC_NOOV (mult:SI
1410 (match_operand:SI 2 "s_register_operand" "r")
1411 (match_operand:SI 1 "s_register_operand" "r"))
1412 (const_int 0)))
1413 (clobber (match_scratch:SI 0 "=r"))]
1414 "TARGET_ARM && arm_arch6 && optimize_size"
1415 "muls%?\\t%0, %2, %1"
1416 [(set_attr "conds" "set")
1417 (set_attr "type" "muls")]
1418 )
1419
1420 (define_insn "*mulsi3addsi_compare0"
1421 [(set (reg:CC_NOOV CC_REGNUM)
1422 (compare:CC_NOOV
1423 (plus:SI (mult:SI
1424 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1425 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1426 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1427 (const_int 0)))
1428 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1429 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1430 (match_dup 3)))]
1431 "TARGET_ARM && arm_arch6"
1432 "mlas%?\\t%0, %2, %1, %3"
1433 [(set_attr "conds" "set")
1434 (set_attr "type" "mlas")]
1435 )
1436
1437 (define_insn "*mulsi3addsi_compare0_v6"
1438 [(set (reg:CC_NOOV CC_REGNUM)
1439 (compare:CC_NOOV
1440 (plus:SI (mult:SI
1441 (match_operand:SI 2 "s_register_operand" "r")
1442 (match_operand:SI 1 "s_register_operand" "r"))
1443 (match_operand:SI 3 "s_register_operand" "r"))
1444 (const_int 0)))
1445 (set (match_operand:SI 0 "s_register_operand" "=r")
1446 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1447 (match_dup 3)))]
1448 "TARGET_ARM && arm_arch6 && optimize_size"
1449 "mlas%?\\t%0, %2, %1, %3"
1450 [(set_attr "conds" "set")
1451 (set_attr "type" "mlas")]
1452 )
1453
1454 (define_insn "*mulsi3addsi_compare0_scratch"
1455 [(set (reg:CC_NOOV CC_REGNUM)
1456 (compare:CC_NOOV
1457 (plus:SI (mult:SI
1458 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1459 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1460 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1461 (const_int 0)))
1462 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1463 "TARGET_ARM && !arm_arch6"
1464 "mlas%?\\t%0, %2, %1, %3"
1465 [(set_attr "conds" "set")
1466 (set_attr "type" "mlas")]
1467 )
1468
1469 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1470 [(set (reg:CC_NOOV CC_REGNUM)
1471 (compare:CC_NOOV
1472 (plus:SI (mult:SI
1473 (match_operand:SI 2 "s_register_operand" "r")
1474 (match_operand:SI 1 "s_register_operand" "r"))
1475 (match_operand:SI 3 "s_register_operand" "r"))
1476 (const_int 0)))
1477 (clobber (match_scratch:SI 0 "=r"))]
1478 "TARGET_ARM && arm_arch6 && optimize_size"
1479 "mlas%?\\t%0, %2, %1, %3"
1480 [(set_attr "conds" "set")
1481 (set_attr "type" "mlas")]
1482 )
1483
1484 ;; 32x32->64 widening multiply.
1485 ;; The only difference between the v3-5 and v6+ versions is the requirement
1486 ;; that the output does not overlap with either input.
1487
1488 (define_expand "<Us>mulsidi3"
1489 [(set (match_operand:DI 0 "s_register_operand")
1490 (mult:DI
1491 (SE:DI (match_operand:SI 1 "s_register_operand"))
1492 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1493 "TARGET_32BIT"
1494 {
1495 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1496 gen_highpart (SImode, operands[0]),
1497 operands[1], operands[2]));
1498 DONE;
1499 }
1500 )
1501
1502 (define_insn "<US>mull"
1503 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1504 (mult:SI
1505 (match_operand:SI 2 "s_register_operand" "%r,r")
1506 (match_operand:SI 3 "s_register_operand" "r,r")))
1507 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1508 (truncate:SI
1509 (lshiftrt:DI
1510 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1511 (const_int 32))))]
1512 "TARGET_32BIT"
1513 "<US>mull%?\\t%0, %1, %2, %3"
1514 [(set_attr "type" "umull")
1515 (set_attr "predicable" "yes")
1516 (set_attr "arch" "v6,nov6")]
1517 )
1518
1519 (define_expand "<Us>maddsidi4"
1520 [(set (match_operand:DI 0 "s_register_operand")
1521 (plus:DI
1522 (mult:DI
1523 (SE:DI (match_operand:SI 1 "s_register_operand"))
1524 (SE:DI (match_operand:SI 2 "s_register_operand")))
1525 (match_operand:DI 3 "s_register_operand")))]
1526 "TARGET_32BIT"
1527 {
1528 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1529 gen_lowpart (SImode, operands[3]),
1530 gen_highpart (SImode, operands[0]),
1531 gen_highpart (SImode, operands[3]),
1532 operands[1], operands[2]));
1533 DONE;
1534 }
1535 )
1536
1537 (define_insn "<US>mlal"
1538 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1539 (plus:SI
1540 (mult:SI
1541 (match_operand:SI 4 "s_register_operand" "%r,r")
1542 (match_operand:SI 5 "s_register_operand" "r,r"))
1543 (match_operand:SI 1 "s_register_operand" "0,0")))
1544 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1545 (plus:SI
1546 (truncate:SI
1547 (lshiftrt:DI
1548 (plus:DI
1549 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1550 (zero_extend:DI (match_dup 1)))
1551 (const_int 32)))
1552 (match_operand:SI 3 "s_register_operand" "2,2")))]
1553 "TARGET_32BIT"
1554 "<US>mlal%?\\t%0, %2, %4, %5"
1555 [(set_attr "type" "umlal")
1556 (set_attr "predicable" "yes")
1557 (set_attr "arch" "v6,nov6")]
1558 )
1559
1560 (define_expand "<US>mulsi3_highpart"
1561 [(parallel
1562 [(set (match_operand:SI 0 "s_register_operand")
1563 (truncate:SI
1564 (lshiftrt:DI
1565 (mult:DI
1566 (SE:DI (match_operand:SI 1 "s_register_operand"))
1567 (SE:DI (match_operand:SI 2 "s_register_operand")))
1568 (const_int 32))))
1569 (clobber (match_scratch:SI 3 ""))])]
1570 "TARGET_32BIT"
1571 ""
1572 )
1573
1574 (define_insn "*<US>mull_high"
1575 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1576 (truncate:SI
1577 (lshiftrt:DI
1578 (mult:DI
1579 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1580 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1581 (const_int 32))))
1582 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1583 "TARGET_32BIT"
1584 "<US>mull%?\\t%3, %0, %2, %1"
1585 [(set_attr "type" "umull")
1586 (set_attr "predicable" "yes")
1587 (set_attr "arch" "v6,nov6,nov6")]
1588 )
1589
1590 (define_insn "mulhisi3"
1591 [(set (match_operand:SI 0 "s_register_operand" "=r")
1592 (mult:SI (sign_extend:SI
1593 (match_operand:HI 1 "s_register_operand" "%r"))
1594 (sign_extend:SI
1595 (match_operand:HI 2 "s_register_operand" "r"))))]
1596 "TARGET_DSP_MULTIPLY"
1597 "smulbb%?\\t%0, %1, %2"
1598 [(set_attr "type" "smulxy")
1599 (set_attr "predicable" "yes")]
1600 )
1601
1602 (define_insn "*mulhisi3tb"
1603 [(set (match_operand:SI 0 "s_register_operand" "=r")
1604 (mult:SI (ashiftrt:SI
1605 (match_operand:SI 1 "s_register_operand" "r")
1606 (const_int 16))
1607 (sign_extend:SI
1608 (match_operand:HI 2 "s_register_operand" "r"))))]
1609 "TARGET_DSP_MULTIPLY"
1610 "smultb%?\\t%0, %1, %2"
1611 [(set_attr "type" "smulxy")
1612 (set_attr "predicable" "yes")]
1613 )
1614
1615 (define_insn "*mulhisi3bt"
1616 [(set (match_operand:SI 0 "s_register_operand" "=r")
1617 (mult:SI (sign_extend:SI
1618 (match_operand:HI 1 "s_register_operand" "r"))
1619 (ashiftrt:SI
1620 (match_operand:SI 2 "s_register_operand" "r")
1621 (const_int 16))))]
1622 "TARGET_DSP_MULTIPLY"
1623 "smulbt%?\\t%0, %1, %2"
1624 [(set_attr "type" "smulxy")
1625 (set_attr "predicable" "yes")]
1626 )
1627
1628 (define_insn "*mulhisi3tt"
1629 [(set (match_operand:SI 0 "s_register_operand" "=r")
1630 (mult:SI (ashiftrt:SI
1631 (match_operand:SI 1 "s_register_operand" "r")
1632 (const_int 16))
1633 (ashiftrt:SI
1634 (match_operand:SI 2 "s_register_operand" "r")
1635 (const_int 16))))]
1636 "TARGET_DSP_MULTIPLY"
1637 "smultt%?\\t%0, %1, %2"
1638 [(set_attr "type" "smulxy")
1639 (set_attr "predicable" "yes")]
1640 )
1641
1642 (define_insn "maddhisi4"
1643 [(set (match_operand:SI 0 "s_register_operand" "=r")
1644 (plus:SI (mult:SI (sign_extend:SI
1645 (match_operand:HI 1 "s_register_operand" "r"))
1646 (sign_extend:SI
1647 (match_operand:HI 2 "s_register_operand" "r")))
1648 (match_operand:SI 3 "s_register_operand" "r")))]
1649 "TARGET_DSP_MULTIPLY"
1650 "smlabb%?\\t%0, %1, %2, %3"
1651 [(set_attr "type" "smlaxy")
1652 (set_attr "predicable" "yes")]
1653 )
1654
1655 ;; Note: there is no maddhisi4ibt because this one is canonical form
1656 (define_insn "*maddhisi4tb"
1657 [(set (match_operand:SI 0 "s_register_operand" "=r")
1658 (plus:SI (mult:SI (ashiftrt:SI
1659 (match_operand:SI 1 "s_register_operand" "r")
1660 (const_int 16))
1661 (sign_extend:SI
1662 (match_operand:HI 2 "s_register_operand" "r")))
1663 (match_operand:SI 3 "s_register_operand" "r")))]
1664 "TARGET_DSP_MULTIPLY"
1665 "smlatb%?\\t%0, %1, %2, %3"
1666 [(set_attr "type" "smlaxy")
1667 (set_attr "predicable" "yes")]
1668 )
1669
1670 (define_insn "*maddhisi4tt"
1671 [(set (match_operand:SI 0 "s_register_operand" "=r")
1672 (plus:SI (mult:SI (ashiftrt:SI
1673 (match_operand:SI 1 "s_register_operand" "r")
1674 (const_int 16))
1675 (ashiftrt:SI
1676 (match_operand:SI 2 "s_register_operand" "r")
1677 (const_int 16)))
1678 (match_operand:SI 3 "s_register_operand" "r")))]
1679 "TARGET_DSP_MULTIPLY"
1680 "smlatt%?\\t%0, %1, %2, %3"
1681 [(set_attr "type" "smlaxy")
1682 (set_attr "predicable" "yes")]
1683 )
1684
1685 (define_insn "maddhidi4"
1686 [(set (match_operand:DI 0 "s_register_operand" "=r")
1687 (plus:DI
1688 (mult:DI (sign_extend:DI
1689 (match_operand:HI 1 "s_register_operand" "r"))
1690 (sign_extend:DI
1691 (match_operand:HI 2 "s_register_operand" "r")))
1692 (match_operand:DI 3 "s_register_operand" "0")))]
1693 "TARGET_DSP_MULTIPLY"
1694 "smlalbb%?\\t%Q0, %R0, %1, %2"
1695 [(set_attr "type" "smlalxy")
1696 (set_attr "predicable" "yes")])
1697
1698 ;; Note: there is no maddhidi4ibt because this one is canonical form
1699 (define_insn "*maddhidi4tb"
1700 [(set (match_operand:DI 0 "s_register_operand" "=r")
1701 (plus:DI
1702 (mult:DI (sign_extend:DI
1703 (ashiftrt:SI
1704 (match_operand:SI 1 "s_register_operand" "r")
1705 (const_int 16)))
1706 (sign_extend:DI
1707 (match_operand:HI 2 "s_register_operand" "r")))
1708 (match_operand:DI 3 "s_register_operand" "0")))]
1709 "TARGET_DSP_MULTIPLY"
1710 "smlaltb%?\\t%Q0, %R0, %1, %2"
1711 [(set_attr "type" "smlalxy")
1712 (set_attr "predicable" "yes")])
1713
1714 (define_insn "*maddhidi4tt"
1715 [(set (match_operand:DI 0 "s_register_operand" "=r")
1716 (plus:DI
1717 (mult:DI (sign_extend:DI
1718 (ashiftrt:SI
1719 (match_operand:SI 1 "s_register_operand" "r")
1720 (const_int 16)))
1721 (sign_extend:DI
1722 (ashiftrt:SI
1723 (match_operand:SI 2 "s_register_operand" "r")
1724 (const_int 16))))
1725 (match_operand:DI 3 "s_register_operand" "0")))]
1726 "TARGET_DSP_MULTIPLY"
1727 "smlaltt%?\\t%Q0, %R0, %1, %2"
1728 [(set_attr "type" "smlalxy")
1729 (set_attr "predicable" "yes")])
1730
1731 (define_expand "mulsf3"
1732 [(set (match_operand:SF 0 "s_register_operand")
1733 (mult:SF (match_operand:SF 1 "s_register_operand")
1734 (match_operand:SF 2 "s_register_operand")))]
1735 "TARGET_32BIT && TARGET_HARD_FLOAT"
1736 "
1737 ")
1738
1739 (define_expand "muldf3"
1740 [(set (match_operand:DF 0 "s_register_operand")
1741 (mult:DF (match_operand:DF 1 "s_register_operand")
1742 (match_operand:DF 2 "s_register_operand")))]
1743 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1744 "
1745 ")
1746 \f
1747 ;; Division insns
1748
1749 (define_expand "divsf3"
1750 [(set (match_operand:SF 0 "s_register_operand")
1751 (div:SF (match_operand:SF 1 "s_register_operand")
1752 (match_operand:SF 2 "s_register_operand")))]
1753 "TARGET_32BIT && TARGET_HARD_FLOAT"
1754 "")
1755
1756 (define_expand "divdf3"
1757 [(set (match_operand:DF 0 "s_register_operand")
1758 (div:DF (match_operand:DF 1 "s_register_operand")
1759 (match_operand:DF 2 "s_register_operand")))]
1760 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1761 "")
1762 \f
1763
1764 ; Expand logical operations. The mid-end expander does not split off memory
1765 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1766 ; So an explicit expander is needed to generate better code.
1767
1768 (define_expand "<LOGICAL:optab>di3"
1769 [(set (match_operand:DI 0 "s_register_operand")
1770 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1771 (match_operand:DI 2 "arm_<optab>di_operand")))]
1772 "TARGET_32BIT"
1773 {
1774 rtx low = simplify_gen_binary (<CODE>, SImode,
1775 gen_lowpart (SImode, operands[1]),
1776 gen_lowpart (SImode, operands[2]));
1777 rtx high = simplify_gen_binary (<CODE>, SImode,
1778 gen_highpart (SImode, operands[1]),
1779 gen_highpart_mode (SImode, DImode,
1780 operands[2]));
1781
1782 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1783 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1784 DONE;
1785 }
1786 )
1787
1788 (define_expand "one_cmpldi2"
1789 [(set (match_operand:DI 0 "s_register_operand")
1790 (not:DI (match_operand:DI 1 "s_register_operand")))]
1791 "TARGET_32BIT"
1792 {
1793 rtx low = simplify_gen_unary (NOT, SImode,
1794 gen_lowpart (SImode, operands[1]),
1795 SImode);
1796 rtx high = simplify_gen_unary (NOT, SImode,
1797 gen_highpart_mode (SImode, DImode,
1798 operands[1]),
1799 SImode);
1800
1801 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1802 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1803 DONE;
1804 }
1805 )
1806
1807 ;; Split DImode and, ior, xor operations. Simply perform the logical
1808 ;; operation on the upper and lower halves of the registers.
1809 ;; This is needed for atomic operations in arm_split_atomic_op.
1810 ;; Avoid splitting IWMMXT instructions.
1811 (define_split
1812 [(set (match_operand:DI 0 "s_register_operand" "")
1813 (match_operator:DI 6 "logical_binary_operator"
1814 [(match_operand:DI 1 "s_register_operand" "")
1815 (match_operand:DI 2 "s_register_operand" "")]))]
1816 "TARGET_32BIT && reload_completed
1817 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1818 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1819 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1820 "
1821 {
1822 operands[3] = gen_highpart (SImode, operands[0]);
1823 operands[0] = gen_lowpart (SImode, operands[0]);
1824 operands[4] = gen_highpart (SImode, operands[1]);
1825 operands[1] = gen_lowpart (SImode, operands[1]);
1826 operands[5] = gen_highpart (SImode, operands[2]);
1827 operands[2] = gen_lowpart (SImode, operands[2]);
1828 }"
1829 )
1830
1831 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1832 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1833 (define_split
1834 [(set (match_operand:DI 0 "s_register_operand")
1835 (not:DI (match_operand:DI 1 "s_register_operand")))]
1836 "TARGET_32BIT"
1837 [(set (match_dup 0) (not:SI (match_dup 1)))
1838 (set (match_dup 2) (not:SI (match_dup 3)))]
1839 "
1840 {
1841 operands[2] = gen_highpart (SImode, operands[0]);
1842 operands[0] = gen_lowpart (SImode, operands[0]);
1843 operands[3] = gen_highpart (SImode, operands[1]);
1844 operands[1] = gen_lowpart (SImode, operands[1]);
1845 }"
1846 )
1847
1848 (define_expand "andsi3"
1849 [(set (match_operand:SI 0 "s_register_operand")
1850 (and:SI (match_operand:SI 1 "s_register_operand")
1851 (match_operand:SI 2 "reg_or_int_operand")))]
1852 "TARGET_EITHER"
1853 "
1854 if (TARGET_32BIT)
1855 {
1856 if (CONST_INT_P (operands[2]))
1857 {
1858 if (INTVAL (operands[2]) == 255 && arm_arch6)
1859 {
1860 operands[1] = convert_to_mode (QImode, operands[1], 1);
1861 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1862 operands[1]));
1863 DONE;
1864 }
1865 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1866 operands[2] = force_reg (SImode, operands[2]);
1867 else
1868 {
1869 arm_split_constant (AND, SImode, NULL_RTX,
1870 INTVAL (operands[2]), operands[0],
1871 operands[1],
1872 optimize && can_create_pseudo_p ());
1873
1874 DONE;
1875 }
1876 }
1877 }
1878 else /* TARGET_THUMB1 */
1879 {
1880 if (!CONST_INT_P (operands[2]))
1881 {
1882 rtx tmp = force_reg (SImode, operands[2]);
1883 if (rtx_equal_p (operands[0], operands[1]))
1884 operands[2] = tmp;
1885 else
1886 {
1887 operands[2] = operands[1];
1888 operands[1] = tmp;
1889 }
1890 }
1891 else
1892 {
1893 int i;
1894
1895 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1896 {
1897 operands[2] = force_reg (SImode,
1898 GEN_INT (~INTVAL (operands[2])));
1899
1900 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
1901
1902 DONE;
1903 }
1904
1905 for (i = 9; i <= 31; i++)
1906 {
1907 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
1908 {
1909 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1910 const0_rtx));
1911 DONE;
1912 }
1913 else if ((HOST_WIDE_INT_1 << i) - 1
1914 == ~INTVAL (operands[2]))
1915 {
1916 rtx shift = GEN_INT (i);
1917 rtx reg = gen_reg_rtx (SImode);
1918
1919 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1920 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1921
1922 DONE;
1923 }
1924 }
1925
1926 operands[2] = force_reg (SImode, operands[2]);
1927 }
1928 }
1929 "
1930 )
1931
1932 ; ??? Check split length for Thumb-2
1933 (define_insn_and_split "*arm_andsi3_insn"
1934 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
1935 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
1936 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
1937 "TARGET_32BIT"
1938 "@
1939 and%?\\t%0, %1, %2
1940 and%?\\t%0, %1, %2
1941 bic%?\\t%0, %1, #%B2
1942 and%?\\t%0, %1, %2
1943 #"
1944 "TARGET_32BIT
1945 && CONST_INT_P (operands[2])
1946 && !(const_ok_for_arm (INTVAL (operands[2]))
1947 || const_ok_for_arm (~INTVAL (operands[2])))"
1948 [(clobber (const_int 0))]
1949 "
1950 arm_split_constant (AND, SImode, curr_insn,
1951 INTVAL (operands[2]), operands[0], operands[1], 0);
1952 DONE;
1953 "
1954 [(set_attr "length" "4,4,4,4,16")
1955 (set_attr "predicable" "yes")
1956 (set_attr "predicable_short_it" "no,yes,no,no,no")
1957 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
1958 )
1959
1960 (define_insn "*andsi3_compare0"
1961 [(set (reg:CC_NOOV CC_REGNUM)
1962 (compare:CC_NOOV
1963 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1964 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
1965 (const_int 0)))
1966 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1967 (and:SI (match_dup 1) (match_dup 2)))]
1968 "TARGET_32BIT"
1969 "@
1970 ands%?\\t%0, %1, %2
1971 bics%?\\t%0, %1, #%B2
1972 ands%?\\t%0, %1, %2"
1973 [(set_attr "conds" "set")
1974 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1975 )
1976
1977 (define_insn "*andsi3_compare0_scratch"
1978 [(set (reg:CC_NOOV CC_REGNUM)
1979 (compare:CC_NOOV
1980 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1981 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
1982 (const_int 0)))
1983 (clobber (match_scratch:SI 2 "=X,r,X"))]
1984 "TARGET_32BIT"
1985 "@
1986 tst%?\\t%0, %1
1987 bics%?\\t%2, %0, #%B1
1988 tst%?\\t%0, %1"
1989 [(set_attr "conds" "set")
1990 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1991 )
1992
1993 (define_insn "*zeroextractsi_compare0_scratch"
1994 [(set (reg:CC_NOOV CC_REGNUM)
1995 (compare:CC_NOOV (zero_extract:SI
1996 (match_operand:SI 0 "s_register_operand" "r")
1997 (match_operand 1 "const_int_operand" "n")
1998 (match_operand 2 "const_int_operand" "n"))
1999 (const_int 0)))]
2000 "TARGET_32BIT
2001 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2002 && INTVAL (operands[1]) > 0
2003 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2004 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2005 "*
2006 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2007 << INTVAL (operands[2]));
2008 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2009 return \"\";
2010 "
2011 [(set_attr "conds" "set")
2012 (set_attr "predicable" "yes")
2013 (set_attr "type" "logics_imm")]
2014 )
2015
2016 (define_insn_and_split "*ne_zeroextractsi"
2017 [(set (match_operand:SI 0 "s_register_operand" "=r")
2018 (ne:SI (zero_extract:SI
2019 (match_operand:SI 1 "s_register_operand" "r")
2020 (match_operand:SI 2 "const_int_operand" "n")
2021 (match_operand:SI 3 "const_int_operand" "n"))
2022 (const_int 0)))
2023 (clobber (reg:CC CC_REGNUM))]
2024 "TARGET_32BIT
2025 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2026 && INTVAL (operands[2]) > 0
2027 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2028 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2029 "#"
2030 "TARGET_32BIT
2031 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2032 && INTVAL (operands[2]) > 0
2033 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2034 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2035 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2036 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2037 (const_int 0)))
2038 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2039 (set (match_dup 0)
2040 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2041 (match_dup 0) (const_int 1)))]
2042 "
2043 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2044 << INTVAL (operands[3]));
2045 "
2046 [(set_attr "conds" "clob")
2047 (set (attr "length")
2048 (if_then_else (eq_attr "is_thumb" "yes")
2049 (const_int 12)
2050 (const_int 8)))
2051 (set_attr "type" "multiple")]
2052 )
2053
2054 (define_insn_and_split "*ne_zeroextractsi_shifted"
2055 [(set (match_operand:SI 0 "s_register_operand" "=r")
2056 (ne:SI (zero_extract:SI
2057 (match_operand:SI 1 "s_register_operand" "r")
2058 (match_operand:SI 2 "const_int_operand" "n")
2059 (const_int 0))
2060 (const_int 0)))
2061 (clobber (reg:CC CC_REGNUM))]
2062 "TARGET_ARM"
2063 "#"
2064 "TARGET_ARM"
2065 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2066 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2067 (const_int 0)))
2068 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2069 (set (match_dup 0)
2070 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2071 (match_dup 0) (const_int 1)))]
2072 "
2073 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2074 "
2075 [(set_attr "conds" "clob")
2076 (set_attr "length" "8")
2077 (set_attr "type" "multiple")]
2078 )
2079
2080 (define_insn_and_split "*ite_ne_zeroextractsi"
2081 [(set (match_operand:SI 0 "s_register_operand" "=r")
2082 (if_then_else:SI (ne (zero_extract:SI
2083 (match_operand:SI 1 "s_register_operand" "r")
2084 (match_operand:SI 2 "const_int_operand" "n")
2085 (match_operand:SI 3 "const_int_operand" "n"))
2086 (const_int 0))
2087 (match_operand:SI 4 "arm_not_operand" "rIK")
2088 (const_int 0)))
2089 (clobber (reg:CC CC_REGNUM))]
2090 "TARGET_ARM
2091 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2092 && INTVAL (operands[2]) > 0
2093 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2094 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2095 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2096 "#"
2097 "TARGET_ARM
2098 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2099 && INTVAL (operands[2]) > 0
2100 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2101 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2102 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2103 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2104 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2105 (const_int 0)))
2106 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2107 (set (match_dup 0)
2108 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2109 (match_dup 0) (match_dup 4)))]
2110 "
2111 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2112 << INTVAL (operands[3]));
2113 "
2114 [(set_attr "conds" "clob")
2115 (set_attr "length" "8")
2116 (set_attr "type" "multiple")]
2117 )
2118
2119 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2120 [(set (match_operand:SI 0 "s_register_operand" "=r")
2121 (if_then_else:SI (ne (zero_extract:SI
2122 (match_operand:SI 1 "s_register_operand" "r")
2123 (match_operand:SI 2 "const_int_operand" "n")
2124 (const_int 0))
2125 (const_int 0))
2126 (match_operand:SI 3 "arm_not_operand" "rIK")
2127 (const_int 0)))
2128 (clobber (reg:CC CC_REGNUM))]
2129 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2130 "#"
2131 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2132 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2133 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2134 (const_int 0)))
2135 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2136 (set (match_dup 0)
2137 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2138 (match_dup 0) (match_dup 3)))]
2139 "
2140 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2141 "
2142 [(set_attr "conds" "clob")
2143 (set_attr "length" "8")
2144 (set_attr "type" "multiple")]
2145 )
2146
2147 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2148 (define_split
2149 [(set (match_operand:SI 0 "s_register_operand" "")
2150 (match_operator:SI 1 "shiftable_operator"
2151 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2152 (match_operand:SI 3 "const_int_operand" "")
2153 (match_operand:SI 4 "const_int_operand" ""))
2154 (match_operand:SI 5 "s_register_operand" "")]))
2155 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2156 "TARGET_ARM"
2157 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2158 (set (match_dup 0)
2159 (match_op_dup 1
2160 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2161 (match_dup 5)]))]
2162 "{
2163 HOST_WIDE_INT temp = INTVAL (operands[3]);
2164
2165 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2166 operands[4] = GEN_INT (32 - temp);
2167 }"
2168 )
2169
2170 (define_split
2171 [(set (match_operand:SI 0 "s_register_operand" "")
2172 (match_operator:SI 1 "shiftable_operator"
2173 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2174 (match_operand:SI 3 "const_int_operand" "")
2175 (match_operand:SI 4 "const_int_operand" ""))
2176 (match_operand:SI 5 "s_register_operand" "")]))
2177 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2178 "TARGET_ARM"
2179 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2180 (set (match_dup 0)
2181 (match_op_dup 1
2182 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2183 (match_dup 5)]))]
2184 "{
2185 HOST_WIDE_INT temp = INTVAL (operands[3]);
2186
2187 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2188 operands[4] = GEN_INT (32 - temp);
2189 }"
2190 )
2191
2192 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2193 ;;; represented by the bitfield, then this will produce incorrect results.
2194 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2195 ;;; which have a real bit-field insert instruction, the truncation happens
2196 ;;; in the bit-field insert instruction itself. Since arm does not have a
2197 ;;; bit-field insert instruction, we would have to emit code here to truncate
2198 ;;; the value before we insert. This loses some of the advantage of having
2199 ;;; this insv pattern, so this pattern needs to be reevalutated.
2200
2201 (define_expand "insv"
2202 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2203 (match_operand 1 "general_operand")
2204 (match_operand 2 "general_operand"))
2205 (match_operand 3 "reg_or_int_operand"))]
2206 "TARGET_ARM || arm_arch_thumb2"
2207 "
2208 {
2209 int start_bit = INTVAL (operands[2]);
2210 int width = INTVAL (operands[1]);
2211 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2212 rtx target, subtarget;
2213
2214 if (arm_arch_thumb2)
2215 {
2216 if (unaligned_access && MEM_P (operands[0])
2217 && s_register_operand (operands[3], GET_MODE (operands[3]))
2218 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2219 {
2220 rtx base_addr;
2221
2222 if (BYTES_BIG_ENDIAN)
2223 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2224 - start_bit;
2225
2226 if (width == 32)
2227 {
2228 base_addr = adjust_address (operands[0], SImode,
2229 start_bit / BITS_PER_UNIT);
2230 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2231 }
2232 else
2233 {
2234 rtx tmp = gen_reg_rtx (HImode);
2235
2236 base_addr = adjust_address (operands[0], HImode,
2237 start_bit / BITS_PER_UNIT);
2238 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2239 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2240 }
2241 DONE;
2242 }
2243 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2244 {
2245 bool use_bfi = TRUE;
2246
2247 if (CONST_INT_P (operands[3]))
2248 {
2249 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2250
2251 if (val == 0)
2252 {
2253 emit_insn (gen_insv_zero (operands[0], operands[1],
2254 operands[2]));
2255 DONE;
2256 }
2257
2258 /* See if the set can be done with a single orr instruction. */
2259 if (val == mask && const_ok_for_arm (val << start_bit))
2260 use_bfi = FALSE;
2261 }
2262
2263 if (use_bfi)
2264 {
2265 if (!REG_P (operands[3]))
2266 operands[3] = force_reg (SImode, operands[3]);
2267
2268 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2269 operands[3]));
2270 DONE;
2271 }
2272 }
2273 else
2274 FAIL;
2275 }
2276
2277 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2278 FAIL;
2279
2280 target = copy_rtx (operands[0]);
2281 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2282 subreg as the final target. */
2283 if (GET_CODE (target) == SUBREG)
2284 {
2285 subtarget = gen_reg_rtx (SImode);
2286 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2287 < GET_MODE_SIZE (SImode))
2288 target = SUBREG_REG (target);
2289 }
2290 else
2291 subtarget = target;
2292
2293 if (CONST_INT_P (operands[3]))
2294 {
2295 /* Since we are inserting a known constant, we may be able to
2296 reduce the number of bits that we have to clear so that
2297 the mask becomes simple. */
2298 /* ??? This code does not check to see if the new mask is actually
2299 simpler. It may not be. */
2300 rtx op1 = gen_reg_rtx (SImode);
2301 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2302 start of this pattern. */
2303 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2304 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2305
2306 emit_insn (gen_andsi3 (op1, operands[0],
2307 gen_int_mode (~mask2, SImode)));
2308 emit_insn (gen_iorsi3 (subtarget, op1,
2309 gen_int_mode (op3_value << start_bit, SImode)));
2310 }
2311 else if (start_bit == 0
2312 && !(const_ok_for_arm (mask)
2313 || const_ok_for_arm (~mask)))
2314 {
2315 /* A Trick, since we are setting the bottom bits in the word,
2316 we can shift operand[3] up, operand[0] down, OR them together
2317 and rotate the result back again. This takes 3 insns, and
2318 the third might be mergeable into another op. */
2319 /* The shift up copes with the possibility that operand[3] is
2320 wider than the bitfield. */
2321 rtx op0 = gen_reg_rtx (SImode);
2322 rtx op1 = gen_reg_rtx (SImode);
2323
2324 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2325 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2326 emit_insn (gen_iorsi3 (op1, op1, op0));
2327 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2328 }
2329 else if ((width + start_bit == 32)
2330 && !(const_ok_for_arm (mask)
2331 || const_ok_for_arm (~mask)))
2332 {
2333 /* Similar trick, but slightly less efficient. */
2334
2335 rtx op0 = gen_reg_rtx (SImode);
2336 rtx op1 = gen_reg_rtx (SImode);
2337
2338 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2339 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2340 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2341 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2342 }
2343 else
2344 {
2345 rtx op0 = gen_int_mode (mask, SImode);
2346 rtx op1 = gen_reg_rtx (SImode);
2347 rtx op2 = gen_reg_rtx (SImode);
2348
2349 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2350 {
2351 rtx tmp = gen_reg_rtx (SImode);
2352
2353 emit_insn (gen_movsi (tmp, op0));
2354 op0 = tmp;
2355 }
2356
2357 /* Mask out any bits in operand[3] that are not needed. */
2358 emit_insn (gen_andsi3 (op1, operands[3], op0));
2359
2360 if (CONST_INT_P (op0)
2361 && (const_ok_for_arm (mask << start_bit)
2362 || const_ok_for_arm (~(mask << start_bit))))
2363 {
2364 op0 = gen_int_mode (~(mask << start_bit), SImode);
2365 emit_insn (gen_andsi3 (op2, operands[0], op0));
2366 }
2367 else
2368 {
2369 if (CONST_INT_P (op0))
2370 {
2371 rtx tmp = gen_reg_rtx (SImode);
2372
2373 emit_insn (gen_movsi (tmp, op0));
2374 op0 = tmp;
2375 }
2376
2377 if (start_bit != 0)
2378 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2379
2380 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2381 }
2382
2383 if (start_bit != 0)
2384 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2385
2386 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2387 }
2388
2389 if (subtarget != target)
2390 {
2391 /* If TARGET is still a SUBREG, then it must be wider than a word,
2392 so we must be careful only to set the subword we were asked to. */
2393 if (GET_CODE (target) == SUBREG)
2394 emit_move_insn (target, subtarget);
2395 else
2396 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2397 }
2398
2399 DONE;
2400 }"
2401 )
2402
2403 (define_insn "insv_zero"
2404 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2405 (match_operand:SI 1 "const_int_M_operand" "M")
2406 (match_operand:SI 2 "const_int_M_operand" "M"))
2407 (const_int 0))]
2408 "arm_arch_thumb2"
2409 "bfc%?\t%0, %2, %1"
2410 [(set_attr "length" "4")
2411 (set_attr "predicable" "yes")
2412 (set_attr "type" "bfm")]
2413 )
2414
2415 (define_insn "insv_t2"
2416 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2417 (match_operand:SI 1 "const_int_M_operand" "M")
2418 (match_operand:SI 2 "const_int_M_operand" "M"))
2419 (match_operand:SI 3 "s_register_operand" "r"))]
2420 "arm_arch_thumb2"
2421 "bfi%?\t%0, %3, %2, %1"
2422 [(set_attr "length" "4")
2423 (set_attr "predicable" "yes")
2424 (set_attr "type" "bfm")]
2425 )
2426
2427 (define_insn "andsi_notsi_si"
2428 [(set (match_operand:SI 0 "s_register_operand" "=r")
2429 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2430 (match_operand:SI 1 "s_register_operand" "r")))]
2431 "TARGET_32BIT"
2432 "bic%?\\t%0, %1, %2"
2433 [(set_attr "predicable" "yes")
2434 (set_attr "type" "logic_reg")]
2435 )
2436
2437 (define_insn "andsi_not_shiftsi_si"
2438 [(set (match_operand:SI 0 "s_register_operand" "=r")
2439 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2440 [(match_operand:SI 2 "s_register_operand" "r")
2441 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2442 (match_operand:SI 1 "s_register_operand" "r")))]
2443 "TARGET_ARM"
2444 "bic%?\\t%0, %1, %2%S4"
2445 [(set_attr "predicable" "yes")
2446 (set_attr "shift" "2")
2447 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2448 (const_string "logic_shift_imm")
2449 (const_string "logic_shift_reg")))]
2450 )
2451
2452 ;; Shifted bics pattern used to set up CC status register and not reusing
2453 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2454 ;; does not support shift by register.
2455 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2456 [(set (reg:CC_NOOV CC_REGNUM)
2457 (compare:CC_NOOV
2458 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2459 [(match_operand:SI 1 "s_register_operand" "r")
2460 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2461 (match_operand:SI 3 "s_register_operand" "r"))
2462 (const_int 0)))
2463 (clobber (match_scratch:SI 4 "=r"))]
2464 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2465 "bics%?\\t%4, %3, %1%S0"
2466 [(set_attr "predicable" "yes")
2467 (set_attr "conds" "set")
2468 (set_attr "shift" "1")
2469 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2470 (const_string "logic_shift_imm")
2471 (const_string "logic_shift_reg")))]
2472 )
2473
2474 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2475 ;; getting reused later.
2476 (define_insn "andsi_not_shiftsi_si_scc"
2477 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2478 (compare:CC_NOOV
2479 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2480 [(match_operand:SI 1 "s_register_operand" "r")
2481 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2482 (match_operand:SI 3 "s_register_operand" "r"))
2483 (const_int 0)))
2484 (set (match_operand:SI 4 "s_register_operand" "=r")
2485 (and:SI (not:SI (match_op_dup 0
2486 [(match_dup 1)
2487 (match_dup 2)]))
2488 (match_dup 3)))])]
2489 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2490 "bics%?\\t%4, %3, %1%S0"
2491 [(set_attr "predicable" "yes")
2492 (set_attr "conds" "set")
2493 (set_attr "shift" "1")
2494 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2495 (const_string "logic_shift_imm")
2496 (const_string "logic_shift_reg")))]
2497 )
2498
2499 (define_insn "*andsi_notsi_si_compare0"
2500 [(set (reg:CC_NOOV CC_REGNUM)
2501 (compare:CC_NOOV
2502 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2503 (match_operand:SI 1 "s_register_operand" "r"))
2504 (const_int 0)))
2505 (set (match_operand:SI 0 "s_register_operand" "=r")
2506 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2507 "TARGET_32BIT"
2508 "bics\\t%0, %1, %2"
2509 [(set_attr "conds" "set")
2510 (set_attr "type" "logics_shift_reg")]
2511 )
2512
2513 (define_insn "*andsi_notsi_si_compare0_scratch"
2514 [(set (reg:CC_NOOV CC_REGNUM)
2515 (compare:CC_NOOV
2516 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2517 (match_operand:SI 1 "s_register_operand" "r"))
2518 (const_int 0)))
2519 (clobber (match_scratch:SI 0 "=r"))]
2520 "TARGET_32BIT"
2521 "bics\\t%0, %1, %2"
2522 [(set_attr "conds" "set")
2523 (set_attr "type" "logics_shift_reg")]
2524 )
2525
2526 (define_expand "iorsi3"
2527 [(set (match_operand:SI 0 "s_register_operand")
2528 (ior:SI (match_operand:SI 1 "s_register_operand")
2529 (match_operand:SI 2 "reg_or_int_operand")))]
2530 "TARGET_EITHER"
2531 "
2532 if (CONST_INT_P (operands[2]))
2533 {
2534 if (TARGET_32BIT)
2535 {
2536 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2537 operands[2] = force_reg (SImode, operands[2]);
2538 else
2539 {
2540 arm_split_constant (IOR, SImode, NULL_RTX,
2541 INTVAL (operands[2]), operands[0],
2542 operands[1],
2543 optimize && can_create_pseudo_p ());
2544 DONE;
2545 }
2546 }
2547 else /* TARGET_THUMB1 */
2548 {
2549 rtx tmp = force_reg (SImode, operands[2]);
2550 if (rtx_equal_p (operands[0], operands[1]))
2551 operands[2] = tmp;
2552 else
2553 {
2554 operands[2] = operands[1];
2555 operands[1] = tmp;
2556 }
2557 }
2558 }
2559 "
2560 )
2561
2562 (define_insn_and_split "*iorsi3_insn"
2563 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2564 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2565 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2566 "TARGET_32BIT"
2567 "@
2568 orr%?\\t%0, %1, %2
2569 orr%?\\t%0, %1, %2
2570 orn%?\\t%0, %1, #%B2
2571 orr%?\\t%0, %1, %2
2572 #"
2573 "TARGET_32BIT
2574 && CONST_INT_P (operands[2])
2575 && !(const_ok_for_arm (INTVAL (operands[2]))
2576 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2577 [(clobber (const_int 0))]
2578 {
2579 arm_split_constant (IOR, SImode, curr_insn,
2580 INTVAL (operands[2]), operands[0], operands[1], 0);
2581 DONE;
2582 }
2583 [(set_attr "length" "4,4,4,4,16")
2584 (set_attr "arch" "32,t2,t2,32,32")
2585 (set_attr "predicable" "yes")
2586 (set_attr "predicable_short_it" "no,yes,no,no,no")
2587 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2588 )
2589
2590 (define_peephole2
2591 [(match_scratch:SI 3 "r")
2592 (set (match_operand:SI 0 "arm_general_register_operand" "")
2593 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2594 (match_operand:SI 2 "const_int_operand" "")))]
2595 "TARGET_ARM
2596 && !const_ok_for_arm (INTVAL (operands[2]))
2597 && const_ok_for_arm (~INTVAL (operands[2]))"
2598 [(set (match_dup 3) (match_dup 2))
2599 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2600 ""
2601 )
2602
2603 (define_insn "*iorsi3_compare0"
2604 [(set (reg:CC_NOOV CC_REGNUM)
2605 (compare:CC_NOOV
2606 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2607 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2608 (const_int 0)))
2609 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2610 (ior:SI (match_dup 1) (match_dup 2)))]
2611 "TARGET_32BIT"
2612 "orrs%?\\t%0, %1, %2"
2613 [(set_attr "conds" "set")
2614 (set_attr "arch" "*,t2,*")
2615 (set_attr "length" "4,2,4")
2616 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2617 )
2618
2619 (define_insn "*iorsi3_compare0_scratch"
2620 [(set (reg:CC_NOOV CC_REGNUM)
2621 (compare:CC_NOOV
2622 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2623 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2624 (const_int 0)))
2625 (clobber (match_scratch:SI 0 "=r,l,r"))]
2626 "TARGET_32BIT"
2627 "orrs%?\\t%0, %1, %2"
2628 [(set_attr "conds" "set")
2629 (set_attr "arch" "*,t2,*")
2630 (set_attr "length" "4,2,4")
2631 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2632 )
2633
2634 (define_expand "xorsi3"
2635 [(set (match_operand:SI 0 "s_register_operand")
2636 (xor:SI (match_operand:SI 1 "s_register_operand")
2637 (match_operand:SI 2 "reg_or_int_operand")))]
2638 "TARGET_EITHER"
2639 "if (CONST_INT_P (operands[2]))
2640 {
2641 if (TARGET_32BIT)
2642 {
2643 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2644 operands[2] = force_reg (SImode, operands[2]);
2645 else
2646 {
2647 arm_split_constant (XOR, SImode, NULL_RTX,
2648 INTVAL (operands[2]), operands[0],
2649 operands[1],
2650 optimize && can_create_pseudo_p ());
2651 DONE;
2652 }
2653 }
2654 else /* TARGET_THUMB1 */
2655 {
2656 rtx tmp = force_reg (SImode, operands[2]);
2657 if (rtx_equal_p (operands[0], operands[1]))
2658 operands[2] = tmp;
2659 else
2660 {
2661 operands[2] = operands[1];
2662 operands[1] = tmp;
2663 }
2664 }
2665 }"
2666 )
2667
2668 (define_insn_and_split "*arm_xorsi3"
2669 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2670 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2671 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2672 "TARGET_32BIT"
2673 "@
2674 eor%?\\t%0, %1, %2
2675 eor%?\\t%0, %1, %2
2676 eor%?\\t%0, %1, %2
2677 #"
2678 "TARGET_32BIT
2679 && CONST_INT_P (operands[2])
2680 && !const_ok_for_arm (INTVAL (operands[2]))"
2681 [(clobber (const_int 0))]
2682 {
2683 arm_split_constant (XOR, SImode, curr_insn,
2684 INTVAL (operands[2]), operands[0], operands[1], 0);
2685 DONE;
2686 }
2687 [(set_attr "length" "4,4,4,16")
2688 (set_attr "predicable" "yes")
2689 (set_attr "predicable_short_it" "no,yes,no,no")
2690 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2691 )
2692
2693 (define_insn "*xorsi3_compare0"
2694 [(set (reg:CC_NOOV CC_REGNUM)
2695 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2696 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2697 (const_int 0)))
2698 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2699 (xor:SI (match_dup 1) (match_dup 2)))]
2700 "TARGET_32BIT"
2701 "eors%?\\t%0, %1, %2"
2702 [(set_attr "conds" "set")
2703 (set_attr "type" "logics_imm,logics_reg")]
2704 )
2705
2706 (define_insn "*xorsi3_compare0_scratch"
2707 [(set (reg:CC_NOOV CC_REGNUM)
2708 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2709 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2710 (const_int 0)))]
2711 "TARGET_32BIT"
2712 "teq%?\\t%0, %1"
2713 [(set_attr "conds" "set")
2714 (set_attr "type" "logics_imm,logics_reg")]
2715 )
2716
2717 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2718 ; (NOT D) we can sometimes merge the final NOT into one of the following
2719 ; insns.
2720
2721 (define_split
2722 [(set (match_operand:SI 0 "s_register_operand" "")
2723 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2724 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2725 (match_operand:SI 3 "arm_rhs_operand" "")))
2726 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2727 "TARGET_32BIT"
2728 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2729 (not:SI (match_dup 3))))
2730 (set (match_dup 0) (not:SI (match_dup 4)))]
2731 ""
2732 )
2733
2734 (define_insn_and_split "*andsi_iorsi3_notsi"
2735 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2736 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2737 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2738 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2739 "TARGET_32BIT"
2740 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2741 "&& reload_completed"
2742 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2743 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2744 {
2745 /* If operands[3] is a constant make sure to fold the NOT into it
2746 to avoid creating a NOT of a CONST_INT. */
2747 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2748 if (CONST_INT_P (not_rtx))
2749 {
2750 operands[4] = operands[0];
2751 operands[5] = not_rtx;
2752 }
2753 else
2754 {
2755 operands[5] = operands[0];
2756 operands[4] = not_rtx;
2757 }
2758 }
2759 [(set_attr "length" "8")
2760 (set_attr "ce_count" "2")
2761 (set_attr "predicable" "yes")
2762 (set_attr "type" "multiple")]
2763 )
2764
2765 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2766 ; insns are available?
2767 (define_split
2768 [(set (match_operand:SI 0 "s_register_operand" "")
2769 (match_operator:SI 1 "logical_binary_operator"
2770 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2771 (match_operand:SI 3 "const_int_operand" "")
2772 (match_operand:SI 4 "const_int_operand" ""))
2773 (match_operator:SI 9 "logical_binary_operator"
2774 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2775 (match_operand:SI 6 "const_int_operand" ""))
2776 (match_operand:SI 7 "s_register_operand" "")])]))
2777 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2778 "TARGET_32BIT
2779 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2780 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2781 [(set (match_dup 8)
2782 (match_op_dup 1
2783 [(ashift:SI (match_dup 2) (match_dup 4))
2784 (match_dup 5)]))
2785 (set (match_dup 0)
2786 (match_op_dup 1
2787 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2788 (match_dup 7)]))]
2789 "
2790 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2791 ")
2792
2793 (define_split
2794 [(set (match_operand:SI 0 "s_register_operand" "")
2795 (match_operator:SI 1 "logical_binary_operator"
2796 [(match_operator:SI 9 "logical_binary_operator"
2797 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2798 (match_operand:SI 6 "const_int_operand" ""))
2799 (match_operand:SI 7 "s_register_operand" "")])
2800 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2801 (match_operand:SI 3 "const_int_operand" "")
2802 (match_operand:SI 4 "const_int_operand" ""))]))
2803 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2804 "TARGET_32BIT
2805 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2806 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2807 [(set (match_dup 8)
2808 (match_op_dup 1
2809 [(ashift:SI (match_dup 2) (match_dup 4))
2810 (match_dup 5)]))
2811 (set (match_dup 0)
2812 (match_op_dup 1
2813 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2814 (match_dup 7)]))]
2815 "
2816 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2817 ")
2818
2819 (define_split
2820 [(set (match_operand:SI 0 "s_register_operand" "")
2821 (match_operator:SI 1 "logical_binary_operator"
2822 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2823 (match_operand:SI 3 "const_int_operand" "")
2824 (match_operand:SI 4 "const_int_operand" ""))
2825 (match_operator:SI 9 "logical_binary_operator"
2826 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2827 (match_operand:SI 6 "const_int_operand" ""))
2828 (match_operand:SI 7 "s_register_operand" "")])]))
2829 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2830 "TARGET_32BIT
2831 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2832 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2833 [(set (match_dup 8)
2834 (match_op_dup 1
2835 [(ashift:SI (match_dup 2) (match_dup 4))
2836 (match_dup 5)]))
2837 (set (match_dup 0)
2838 (match_op_dup 1
2839 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2840 (match_dup 7)]))]
2841 "
2842 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2843 ")
2844
2845 (define_split
2846 [(set (match_operand:SI 0 "s_register_operand" "")
2847 (match_operator:SI 1 "logical_binary_operator"
2848 [(match_operator:SI 9 "logical_binary_operator"
2849 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2850 (match_operand:SI 6 "const_int_operand" ""))
2851 (match_operand:SI 7 "s_register_operand" "")])
2852 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2853 (match_operand:SI 3 "const_int_operand" "")
2854 (match_operand:SI 4 "const_int_operand" ""))]))
2855 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2856 "TARGET_32BIT
2857 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2858 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2859 [(set (match_dup 8)
2860 (match_op_dup 1
2861 [(ashift:SI (match_dup 2) (match_dup 4))
2862 (match_dup 5)]))
2863 (set (match_dup 0)
2864 (match_op_dup 1
2865 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2866 (match_dup 7)]))]
2867 "
2868 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2869 ")
2870 \f
2871
2872 ;; Minimum and maximum insns
2873
2874 (define_expand "smaxsi3"
2875 [(parallel [
2876 (set (match_operand:SI 0 "s_register_operand")
2877 (smax:SI (match_operand:SI 1 "s_register_operand")
2878 (match_operand:SI 2 "arm_rhs_operand")))
2879 (clobber (reg:CC CC_REGNUM))])]
2880 "TARGET_32BIT"
2881 "
2882 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2883 {
2884 /* No need for a clobber of the condition code register here. */
2885 emit_insn (gen_rtx_SET (operands[0],
2886 gen_rtx_SMAX (SImode, operands[1],
2887 operands[2])));
2888 DONE;
2889 }
2890 ")
2891
2892 (define_insn "*smax_0"
2893 [(set (match_operand:SI 0 "s_register_operand" "=r")
2894 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2895 (const_int 0)))]
2896 "TARGET_32BIT"
2897 "bic%?\\t%0, %1, %1, asr #31"
2898 [(set_attr "predicable" "yes")
2899 (set_attr "type" "logic_shift_reg")]
2900 )
2901
2902 (define_insn "*smax_m1"
2903 [(set (match_operand:SI 0 "s_register_operand" "=r")
2904 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2905 (const_int -1)))]
2906 "TARGET_32BIT"
2907 "orr%?\\t%0, %1, %1, asr #31"
2908 [(set_attr "predicable" "yes")
2909 (set_attr "type" "logic_shift_reg")]
2910 )
2911
2912 (define_insn_and_split "*arm_smax_insn"
2913 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2914 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2915 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2916 (clobber (reg:CC CC_REGNUM))]
2917 "TARGET_ARM"
2918 "#"
2919 ; cmp\\t%1, %2\;movlt\\t%0, %2
2920 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2921 "TARGET_ARM"
2922 [(set (reg:CC CC_REGNUM)
2923 (compare:CC (match_dup 1) (match_dup 2)))
2924 (set (match_dup 0)
2925 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
2926 (match_dup 1)
2927 (match_dup 2)))]
2928 ""
2929 [(set_attr "conds" "clob")
2930 (set_attr "length" "8,12")
2931 (set_attr "type" "multiple")]
2932 )
2933
2934 (define_expand "sminsi3"
2935 [(parallel [
2936 (set (match_operand:SI 0 "s_register_operand")
2937 (smin:SI (match_operand:SI 1 "s_register_operand")
2938 (match_operand:SI 2 "arm_rhs_operand")))
2939 (clobber (reg:CC CC_REGNUM))])]
2940 "TARGET_32BIT"
2941 "
2942 if (operands[2] == const0_rtx)
2943 {
2944 /* No need for a clobber of the condition code register here. */
2945 emit_insn (gen_rtx_SET (operands[0],
2946 gen_rtx_SMIN (SImode, operands[1],
2947 operands[2])));
2948 DONE;
2949 }
2950 ")
2951
2952 (define_insn "*smin_0"
2953 [(set (match_operand:SI 0 "s_register_operand" "=r")
2954 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2955 (const_int 0)))]
2956 "TARGET_32BIT"
2957 "and%?\\t%0, %1, %1, asr #31"
2958 [(set_attr "predicable" "yes")
2959 (set_attr "type" "logic_shift_reg")]
2960 )
2961
2962 (define_insn_and_split "*arm_smin_insn"
2963 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2964 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2965 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2966 (clobber (reg:CC CC_REGNUM))]
2967 "TARGET_ARM"
2968 "#"
2969 ; cmp\\t%1, %2\;movge\\t%0, %2
2970 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2971 "TARGET_ARM"
2972 [(set (reg:CC CC_REGNUM)
2973 (compare:CC (match_dup 1) (match_dup 2)))
2974 (set (match_dup 0)
2975 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
2976 (match_dup 1)
2977 (match_dup 2)))]
2978 ""
2979 [(set_attr "conds" "clob")
2980 (set_attr "length" "8,12")
2981 (set_attr "type" "multiple,multiple")]
2982 )
2983
2984 (define_expand "umaxsi3"
2985 [(parallel [
2986 (set (match_operand:SI 0 "s_register_operand")
2987 (umax:SI (match_operand:SI 1 "s_register_operand")
2988 (match_operand:SI 2 "arm_rhs_operand")))
2989 (clobber (reg:CC CC_REGNUM))])]
2990 "TARGET_32BIT"
2991 ""
2992 )
2993
2994 (define_insn_and_split "*arm_umaxsi3"
2995 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2996 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2997 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2998 (clobber (reg:CC CC_REGNUM))]
2999 "TARGET_ARM"
3000 "#"
3001 ; cmp\\t%1, %2\;movcc\\t%0, %2
3002 ; cmp\\t%1, %2\;movcs\\t%0, %1
3003 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3004 "TARGET_ARM"
3005 [(set (reg:CC CC_REGNUM)
3006 (compare:CC (match_dup 1) (match_dup 2)))
3007 (set (match_dup 0)
3008 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3009 (match_dup 1)
3010 (match_dup 2)))]
3011 ""
3012 [(set_attr "conds" "clob")
3013 (set_attr "length" "8,8,12")
3014 (set_attr "type" "store_4")]
3015 )
3016
3017 (define_expand "uminsi3"
3018 [(parallel [
3019 (set (match_operand:SI 0 "s_register_operand")
3020 (umin:SI (match_operand:SI 1 "s_register_operand")
3021 (match_operand:SI 2 "arm_rhs_operand")))
3022 (clobber (reg:CC CC_REGNUM))])]
3023 "TARGET_32BIT"
3024 ""
3025 )
3026
3027 (define_insn_and_split "*arm_uminsi3"
3028 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3029 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3030 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3031 (clobber (reg:CC CC_REGNUM))]
3032 "TARGET_ARM"
3033 "#"
3034 ; cmp\\t%1, %2\;movcs\\t%0, %2
3035 ; cmp\\t%1, %2\;movcc\\t%0, %1
3036 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3037 "TARGET_ARM"
3038 [(set (reg:CC CC_REGNUM)
3039 (compare:CC (match_dup 1) (match_dup 2)))
3040 (set (match_dup 0)
3041 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3042 (match_dup 1)
3043 (match_dup 2)))]
3044 ""
3045 [(set_attr "conds" "clob")
3046 (set_attr "length" "8,8,12")
3047 (set_attr "type" "store_4")]
3048 )
3049
3050 (define_insn "*store_minmaxsi"
3051 [(set (match_operand:SI 0 "memory_operand" "=m")
3052 (match_operator:SI 3 "minmax_operator"
3053 [(match_operand:SI 1 "s_register_operand" "r")
3054 (match_operand:SI 2 "s_register_operand" "r")]))
3055 (clobber (reg:CC CC_REGNUM))]
3056 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3057 "*
3058 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3059 operands[1], operands[2]);
3060 output_asm_insn (\"cmp\\t%1, %2\", operands);
3061 if (TARGET_THUMB2)
3062 output_asm_insn (\"ite\t%d3\", operands);
3063 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3064 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3065 return \"\";
3066 "
3067 [(set_attr "conds" "clob")
3068 (set (attr "length")
3069 (if_then_else (eq_attr "is_thumb" "yes")
3070 (const_int 14)
3071 (const_int 12)))
3072 (set_attr "type" "store_4")]
3073 )
3074
3075 ; Reject the frame pointer in operand[1], since reloading this after
3076 ; it has been eliminated can cause carnage.
3077 (define_insn "*minmax_arithsi"
3078 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3079 (match_operator:SI 4 "shiftable_operator"
3080 [(match_operator:SI 5 "minmax_operator"
3081 [(match_operand:SI 2 "s_register_operand" "r,r")
3082 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3083 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3084 (clobber (reg:CC CC_REGNUM))]
3085 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3086 "*
3087 {
3088 enum rtx_code code = GET_CODE (operands[4]);
3089 bool need_else;
3090
3091 if (which_alternative != 0 || operands[3] != const0_rtx
3092 || (code != PLUS && code != IOR && code != XOR))
3093 need_else = true;
3094 else
3095 need_else = false;
3096
3097 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3098 operands[2], operands[3]);
3099 output_asm_insn (\"cmp\\t%2, %3\", operands);
3100 if (TARGET_THUMB2)
3101 {
3102 if (need_else)
3103 output_asm_insn (\"ite\\t%d5\", operands);
3104 else
3105 output_asm_insn (\"it\\t%d5\", operands);
3106 }
3107 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3108 if (need_else)
3109 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3110 return \"\";
3111 }"
3112 [(set_attr "conds" "clob")
3113 (set (attr "length")
3114 (if_then_else (eq_attr "is_thumb" "yes")
3115 (const_int 14)
3116 (const_int 12)))
3117 (set_attr "type" "multiple")]
3118 )
3119
3120 ; Reject the frame pointer in operand[1], since reloading this after
3121 ; it has been eliminated can cause carnage.
3122 (define_insn_and_split "*minmax_arithsi_non_canon"
3123 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3124 (minus:SI
3125 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3126 (match_operator:SI 4 "minmax_operator"
3127 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3128 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3129 (clobber (reg:CC CC_REGNUM))]
3130 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3131 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3132 "#"
3133 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3134 [(set (reg:CC CC_REGNUM)
3135 (compare:CC (match_dup 2) (match_dup 3)))
3136
3137 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3138 (set (match_dup 0)
3139 (minus:SI (match_dup 1)
3140 (match_dup 2))))
3141 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3142 (set (match_dup 0)
3143 (match_dup 6)))]
3144 {
3145 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3146 operands[2], operands[3]);
3147 enum rtx_code rc = minmax_code (operands[4]);
3148 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3149 operands[2], operands[3]);
3150
3151 if (mode == CCFPmode || mode == CCFPEmode)
3152 rc = reverse_condition_maybe_unordered (rc);
3153 else
3154 rc = reverse_condition (rc);
3155 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3156 if (CONST_INT_P (operands[3]))
3157 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3158 else
3159 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3160 }
3161 [(set_attr "conds" "clob")
3162 (set (attr "length")
3163 (if_then_else (eq_attr "is_thumb" "yes")
3164 (const_int 14)
3165 (const_int 12)))
3166 (set_attr "type" "multiple")]
3167 )
3168
3169 (define_code_iterator SAT [smin smax])
3170 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3171 (define_code_attr SATlo [(smin "1") (smax "2")])
3172 (define_code_attr SAThi [(smin "2") (smax "1")])
3173
3174 (define_insn "*satsi_<SAT:code>"
3175 [(set (match_operand:SI 0 "s_register_operand" "=r")
3176 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3177 (match_operand:SI 1 "const_int_operand" "i"))
3178 (match_operand:SI 2 "const_int_operand" "i")))]
3179 "TARGET_32BIT && arm_arch6
3180 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3181 {
3182 int mask;
3183 bool signed_sat;
3184 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3185 &mask, &signed_sat))
3186 gcc_unreachable ();
3187
3188 operands[1] = GEN_INT (mask);
3189 if (signed_sat)
3190 return "ssat%?\t%0, %1, %3";
3191 else
3192 return "usat%?\t%0, %1, %3";
3193 }
3194 [(set_attr "predicable" "yes")
3195 (set_attr "type" "alus_imm")]
3196 )
3197
3198 (define_insn "*satsi_<SAT:code>_shift"
3199 [(set (match_operand:SI 0 "s_register_operand" "=r")
3200 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3201 [(match_operand:SI 4 "s_register_operand" "r")
3202 (match_operand:SI 5 "const_int_operand" "i")])
3203 (match_operand:SI 1 "const_int_operand" "i"))
3204 (match_operand:SI 2 "const_int_operand" "i")))]
3205 "TARGET_32BIT && arm_arch6
3206 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3207 {
3208 int mask;
3209 bool signed_sat;
3210 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3211 &mask, &signed_sat))
3212 gcc_unreachable ();
3213
3214 operands[1] = GEN_INT (mask);
3215 if (signed_sat)
3216 return "ssat%?\t%0, %1, %4%S3";
3217 else
3218 return "usat%?\t%0, %1, %4%S3";
3219 }
3220 [(set_attr "predicable" "yes")
3221 (set_attr "shift" "3")
3222 (set_attr "type" "logic_shift_reg")])
3223 \f
3224 ;; Shift and rotation insns
3225
3226 (define_expand "ashldi3"
3227 [(set (match_operand:DI 0 "s_register_operand")
3228 (ashift:DI (match_operand:DI 1 "s_register_operand")
3229 (match_operand:SI 2 "reg_or_int_operand")))]
3230 "TARGET_32BIT"
3231 "
3232 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3233 operands[2], gen_reg_rtx (SImode),
3234 gen_reg_rtx (SImode));
3235 DONE;
3236 ")
3237
3238 (define_expand "ashlsi3"
3239 [(set (match_operand:SI 0 "s_register_operand")
3240 (ashift:SI (match_operand:SI 1 "s_register_operand")
3241 (match_operand:SI 2 "arm_rhs_operand")))]
3242 "TARGET_EITHER"
3243 "
3244 if (CONST_INT_P (operands[2])
3245 && (UINTVAL (operands[2])) > 31)
3246 {
3247 emit_insn (gen_movsi (operands[0], const0_rtx));
3248 DONE;
3249 }
3250 "
3251 )
3252
3253 (define_expand "ashrdi3"
3254 [(set (match_operand:DI 0 "s_register_operand")
3255 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3256 (match_operand:SI 2 "reg_or_int_operand")))]
3257 "TARGET_32BIT"
3258 "
3259 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3260 operands[2], gen_reg_rtx (SImode),
3261 gen_reg_rtx (SImode));
3262 DONE;
3263 ")
3264
3265 (define_expand "ashrsi3"
3266 [(set (match_operand:SI 0 "s_register_operand")
3267 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3268 (match_operand:SI 2 "arm_rhs_operand")))]
3269 "TARGET_EITHER"
3270 "
3271 if (CONST_INT_P (operands[2])
3272 && UINTVAL (operands[2]) > 31)
3273 operands[2] = GEN_INT (31);
3274 "
3275 )
3276
3277 (define_expand "lshrdi3"
3278 [(set (match_operand:DI 0 "s_register_operand")
3279 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3280 (match_operand:SI 2 "reg_or_int_operand")))]
3281 "TARGET_32BIT"
3282 "
3283 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3284 operands[2], gen_reg_rtx (SImode),
3285 gen_reg_rtx (SImode));
3286 DONE;
3287 ")
3288
3289 (define_expand "lshrsi3"
3290 [(set (match_operand:SI 0 "s_register_operand")
3291 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3292 (match_operand:SI 2 "arm_rhs_operand")))]
3293 "TARGET_EITHER"
3294 "
3295 if (CONST_INT_P (operands[2])
3296 && (UINTVAL (operands[2])) > 31)
3297 {
3298 emit_insn (gen_movsi (operands[0], const0_rtx));
3299 DONE;
3300 }
3301 "
3302 )
3303
3304 (define_expand "rotlsi3"
3305 [(set (match_operand:SI 0 "s_register_operand")
3306 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3307 (match_operand:SI 2 "reg_or_int_operand")))]
3308 "TARGET_32BIT"
3309 "
3310 if (CONST_INT_P (operands[2]))
3311 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3312 else
3313 {
3314 rtx reg = gen_reg_rtx (SImode);
3315 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3316 operands[2] = reg;
3317 }
3318 "
3319 )
3320
3321 (define_expand "rotrsi3"
3322 [(set (match_operand:SI 0 "s_register_operand")
3323 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3324 (match_operand:SI 2 "arm_rhs_operand")))]
3325 "TARGET_EITHER"
3326 "
3327 if (TARGET_32BIT)
3328 {
3329 if (CONST_INT_P (operands[2])
3330 && UINTVAL (operands[2]) > 31)
3331 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3332 }
3333 else /* TARGET_THUMB1 */
3334 {
3335 if (CONST_INT_P (operands [2]))
3336 operands [2] = force_reg (SImode, operands[2]);
3337 }
3338 "
3339 )
3340
3341 (define_insn "*arm_shiftsi3"
3342 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3343 (match_operator:SI 3 "shift_operator"
3344 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3345 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3346 "TARGET_32BIT"
3347 "* return arm_output_shift(operands, 0);"
3348 [(set_attr "predicable" "yes")
3349 (set_attr "arch" "t2,t2,*,*")
3350 (set_attr "predicable_short_it" "yes,yes,no,no")
3351 (set_attr "length" "4")
3352 (set_attr "shift" "1")
3353 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3354 )
3355
3356 (define_insn "*shiftsi3_compare0"
3357 [(set (reg:CC_NOOV CC_REGNUM)
3358 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3359 [(match_operand:SI 1 "s_register_operand" "r,r")
3360 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3361 (const_int 0)))
3362 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3363 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3364 "TARGET_32BIT"
3365 "* return arm_output_shift(operands, 1);"
3366 [(set_attr "conds" "set")
3367 (set_attr "shift" "1")
3368 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3369 )
3370
3371 (define_insn "*shiftsi3_compare0_scratch"
3372 [(set (reg:CC_NOOV CC_REGNUM)
3373 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3374 [(match_operand:SI 1 "s_register_operand" "r,r")
3375 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3376 (const_int 0)))
3377 (clobber (match_scratch:SI 0 "=r,r"))]
3378 "TARGET_32BIT"
3379 "* return arm_output_shift(operands, 1);"
3380 [(set_attr "conds" "set")
3381 (set_attr "shift" "1")
3382 (set_attr "type" "shift_imm,shift_reg")]
3383 )
3384
3385 (define_insn "*not_shiftsi"
3386 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3387 (not:SI (match_operator:SI 3 "shift_operator"
3388 [(match_operand:SI 1 "s_register_operand" "r,r")
3389 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3390 "TARGET_32BIT"
3391 "mvn%?\\t%0, %1%S3"
3392 [(set_attr "predicable" "yes")
3393 (set_attr "shift" "1")
3394 (set_attr "arch" "32,a")
3395 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3396
3397 (define_insn "*not_shiftsi_compare0"
3398 [(set (reg:CC_NOOV CC_REGNUM)
3399 (compare:CC_NOOV
3400 (not:SI (match_operator:SI 3 "shift_operator"
3401 [(match_operand:SI 1 "s_register_operand" "r,r")
3402 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3403 (const_int 0)))
3404 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3405 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3406 "TARGET_32BIT"
3407 "mvns%?\\t%0, %1%S3"
3408 [(set_attr "conds" "set")
3409 (set_attr "shift" "1")
3410 (set_attr "arch" "32,a")
3411 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3412
3413 (define_insn "*not_shiftsi_compare0_scratch"
3414 [(set (reg:CC_NOOV CC_REGNUM)
3415 (compare:CC_NOOV
3416 (not:SI (match_operator:SI 3 "shift_operator"
3417 [(match_operand:SI 1 "s_register_operand" "r,r")
3418 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3419 (const_int 0)))
3420 (clobber (match_scratch:SI 0 "=r,r"))]
3421 "TARGET_32BIT"
3422 "mvns%?\\t%0, %1%S3"
3423 [(set_attr "conds" "set")
3424 (set_attr "shift" "1")
3425 (set_attr "arch" "32,a")
3426 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3427
3428 ;; We don't really have extzv, but defining this using shifts helps
3429 ;; to reduce register pressure later on.
3430
3431 (define_expand "extzv"
3432 [(set (match_operand 0 "s_register_operand")
3433 (zero_extract (match_operand 1 "nonimmediate_operand")
3434 (match_operand 2 "const_int_operand")
3435 (match_operand 3 "const_int_operand")))]
3436 "TARGET_THUMB1 || arm_arch_thumb2"
3437 "
3438 {
3439 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3440 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3441
3442 if (arm_arch_thumb2)
3443 {
3444 HOST_WIDE_INT width = INTVAL (operands[2]);
3445 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3446
3447 if (unaligned_access && MEM_P (operands[1])
3448 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3449 {
3450 rtx base_addr;
3451
3452 if (BYTES_BIG_ENDIAN)
3453 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3454 - bitpos;
3455
3456 if (width == 32)
3457 {
3458 base_addr = adjust_address (operands[1], SImode,
3459 bitpos / BITS_PER_UNIT);
3460 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3461 }
3462 else
3463 {
3464 rtx dest = operands[0];
3465 rtx tmp = gen_reg_rtx (SImode);
3466
3467 /* We may get a paradoxical subreg here. Strip it off. */
3468 if (GET_CODE (dest) == SUBREG
3469 && GET_MODE (dest) == SImode
3470 && GET_MODE (SUBREG_REG (dest)) == HImode)
3471 dest = SUBREG_REG (dest);
3472
3473 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3474 FAIL;
3475
3476 base_addr = adjust_address (operands[1], HImode,
3477 bitpos / BITS_PER_UNIT);
3478 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3479 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3480 }
3481 DONE;
3482 }
3483 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3484 {
3485 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3486 operands[3]));
3487 DONE;
3488 }
3489 else
3490 FAIL;
3491 }
3492
3493 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3494 FAIL;
3495
3496 operands[3] = GEN_INT (rshift);
3497
3498 if (lshift == 0)
3499 {
3500 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3501 DONE;
3502 }
3503
3504 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3505 operands[3], gen_reg_rtx (SImode)));
3506 DONE;
3507 }"
3508 )
3509
3510 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3511
3512 (define_expand "extzv_t1"
3513 [(set (match_operand:SI 4 "s_register_operand")
3514 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3515 (match_operand:SI 2 "const_int_operand")))
3516 (set (match_operand:SI 0 "s_register_operand")
3517 (lshiftrt:SI (match_dup 4)
3518 (match_operand:SI 3 "const_int_operand")))]
3519 "TARGET_THUMB1"
3520 "")
3521
3522 (define_expand "extv"
3523 [(set (match_operand 0 "s_register_operand")
3524 (sign_extract (match_operand 1 "nonimmediate_operand")
3525 (match_operand 2 "const_int_operand")
3526 (match_operand 3 "const_int_operand")))]
3527 "arm_arch_thumb2"
3528 {
3529 HOST_WIDE_INT width = INTVAL (operands[2]);
3530 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3531
3532 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3533 && (bitpos % BITS_PER_UNIT) == 0)
3534 {
3535 rtx base_addr;
3536
3537 if (BYTES_BIG_ENDIAN)
3538 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3539
3540 if (width == 32)
3541 {
3542 base_addr = adjust_address (operands[1], SImode,
3543 bitpos / BITS_PER_UNIT);
3544 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3545 }
3546 else
3547 {
3548 rtx dest = operands[0];
3549 rtx tmp = gen_reg_rtx (SImode);
3550
3551 /* We may get a paradoxical subreg here. Strip it off. */
3552 if (GET_CODE (dest) == SUBREG
3553 && GET_MODE (dest) == SImode
3554 && GET_MODE (SUBREG_REG (dest)) == HImode)
3555 dest = SUBREG_REG (dest);
3556
3557 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3558 FAIL;
3559
3560 base_addr = adjust_address (operands[1], HImode,
3561 bitpos / BITS_PER_UNIT);
3562 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3563 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3564 }
3565
3566 DONE;
3567 }
3568 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3569 FAIL;
3570 else if (GET_MODE (operands[0]) == SImode
3571 && GET_MODE (operands[1]) == SImode)
3572 {
3573 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3574 operands[3]));
3575 DONE;
3576 }
3577
3578 FAIL;
3579 })
3580
3581 ; Helper to expand register forms of extv with the proper modes.
3582
3583 (define_expand "extv_regsi"
3584 [(set (match_operand:SI 0 "s_register_operand")
3585 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3586 (match_operand 2 "const_int_operand")
3587 (match_operand 3 "const_int_operand")))]
3588 ""
3589 {
3590 })
3591
3592 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3593
3594 (define_insn "unaligned_loaddi"
3595 [(set (match_operand:DI 0 "s_register_operand" "=r")
3596 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3597 UNSPEC_UNALIGNED_LOAD))]
3598 "TARGET_32BIT && TARGET_LDRD"
3599 "*
3600 return output_move_double (operands, true, NULL);
3601 "
3602 [(set_attr "length" "8")
3603 (set_attr "type" "load_8")])
3604
3605 (define_insn "unaligned_loadsi"
3606 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3607 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3608 UNSPEC_UNALIGNED_LOAD))]
3609 "unaligned_access"
3610 "@
3611 ldr\t%0, %1\t@ unaligned
3612 ldr%?\t%0, %1\t@ unaligned
3613 ldr%?\t%0, %1\t@ unaligned"
3614 [(set_attr "arch" "t1,t2,32")
3615 (set_attr "length" "2,2,4")
3616 (set_attr "predicable" "no,yes,yes")
3617 (set_attr "predicable_short_it" "no,yes,no")
3618 (set_attr "type" "load_4")])
3619
3620 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3621 ;; address (there's no immediate format). That's tricky to support
3622 ;; here and we don't really need this pattern for that case, so only
3623 ;; enable for 32-bit ISAs.
3624 (define_insn "unaligned_loadhis"
3625 [(set (match_operand:SI 0 "s_register_operand" "=r")
3626 (sign_extend:SI
3627 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3628 UNSPEC_UNALIGNED_LOAD)))]
3629 "unaligned_access && TARGET_32BIT"
3630 "ldrsh%?\t%0, %1\t@ unaligned"
3631 [(set_attr "predicable" "yes")
3632 (set_attr "type" "load_byte")])
3633
3634 (define_insn "unaligned_loadhiu"
3635 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3636 (zero_extend:SI
3637 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3638 UNSPEC_UNALIGNED_LOAD)))]
3639 "unaligned_access"
3640 "@
3641 ldrh\t%0, %1\t@ unaligned
3642 ldrh%?\t%0, %1\t@ unaligned
3643 ldrh%?\t%0, %1\t@ unaligned"
3644 [(set_attr "arch" "t1,t2,32")
3645 (set_attr "length" "2,2,4")
3646 (set_attr "predicable" "no,yes,yes")
3647 (set_attr "predicable_short_it" "no,yes,no")
3648 (set_attr "type" "load_byte")])
3649
3650 (define_insn "unaligned_storedi"
3651 [(set (match_operand:DI 0 "memory_operand" "=m")
3652 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3653 UNSPEC_UNALIGNED_STORE))]
3654 "TARGET_32BIT && TARGET_LDRD"
3655 "*
3656 return output_move_double (operands, true, NULL);
3657 "
3658 [(set_attr "length" "8")
3659 (set_attr "type" "store_8")])
3660
3661 (define_insn "unaligned_storesi"
3662 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3663 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3664 UNSPEC_UNALIGNED_STORE))]
3665 "unaligned_access"
3666 "@
3667 str\t%1, %0\t@ unaligned
3668 str%?\t%1, %0\t@ unaligned
3669 str%?\t%1, %0\t@ unaligned"
3670 [(set_attr "arch" "t1,t2,32")
3671 (set_attr "length" "2,2,4")
3672 (set_attr "predicable" "no,yes,yes")
3673 (set_attr "predicable_short_it" "no,yes,no")
3674 (set_attr "type" "store_4")])
3675
3676 (define_insn "unaligned_storehi"
3677 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3678 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3679 UNSPEC_UNALIGNED_STORE))]
3680 "unaligned_access"
3681 "@
3682 strh\t%1, %0\t@ unaligned
3683 strh%?\t%1, %0\t@ unaligned
3684 strh%?\t%1, %0\t@ unaligned"
3685 [(set_attr "arch" "t1,t2,32")
3686 (set_attr "length" "2,2,4")
3687 (set_attr "predicable" "no,yes,yes")
3688 (set_attr "predicable_short_it" "no,yes,no")
3689 (set_attr "type" "store_4")])
3690
3691
3692 (define_insn "*extv_reg"
3693 [(set (match_operand:SI 0 "s_register_operand" "=r")
3694 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3695 (match_operand:SI 2 "const_int_operand" "n")
3696 (match_operand:SI 3 "const_int_operand" "n")))]
3697 "arm_arch_thumb2
3698 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3699 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3700 "sbfx%?\t%0, %1, %3, %2"
3701 [(set_attr "length" "4")
3702 (set_attr "predicable" "yes")
3703 (set_attr "type" "bfm")]
3704 )
3705
3706 (define_insn "extzv_t2"
3707 [(set (match_operand:SI 0 "s_register_operand" "=r")
3708 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3709 (match_operand:SI 2 "const_int_operand" "n")
3710 (match_operand:SI 3 "const_int_operand" "n")))]
3711 "arm_arch_thumb2
3712 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3713 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3714 "ubfx%?\t%0, %1, %3, %2"
3715 [(set_attr "length" "4")
3716 (set_attr "predicable" "yes")
3717 (set_attr "type" "bfm")]
3718 )
3719
3720
3721 ;; Division instructions
3722 (define_insn "divsi3"
3723 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3724 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3725 (match_operand:SI 2 "s_register_operand" "r,r")))]
3726 "TARGET_IDIV"
3727 "@
3728 sdiv%?\t%0, %1, %2
3729 sdiv\t%0, %1, %2"
3730 [(set_attr "arch" "32,v8mb")
3731 (set_attr "predicable" "yes")
3732 (set_attr "type" "sdiv")]
3733 )
3734
3735 (define_insn "udivsi3"
3736 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3737 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3738 (match_operand:SI 2 "s_register_operand" "r,r")))]
3739 "TARGET_IDIV"
3740 "@
3741 udiv%?\t%0, %1, %2
3742 udiv\t%0, %1, %2"
3743 [(set_attr "arch" "32,v8mb")
3744 (set_attr "predicable" "yes")
3745 (set_attr "type" "udiv")]
3746 )
3747
3748 \f
3749 ;; Unary arithmetic insns
3750
3751 (define_expand "negvsi3"
3752 [(match_operand:SI 0 "register_operand")
3753 (match_operand:SI 1 "register_operand")
3754 (match_operand 2 "")]
3755 "TARGET_32BIT"
3756 {
3757 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3758 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3759
3760 DONE;
3761 })
3762
3763 (define_expand "negvdi3"
3764 [(match_operand:DI 0 "s_register_operand")
3765 (match_operand:DI 1 "s_register_operand")
3766 (match_operand 2 "")]
3767 "TARGET_ARM"
3768 {
3769 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3770 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3771
3772 DONE;
3773 })
3774
3775
3776 (define_insn "negdi2_compare"
3777 [(set (reg:CC CC_REGNUM)
3778 (compare:CC
3779 (const_int 0)
3780 (match_operand:DI 1 "register_operand" "r,r")))
3781 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3782 (minus:DI (const_int 0) (match_dup 1)))]
3783 "TARGET_ARM"
3784 "@
3785 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3786 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3787 [(set_attr "conds" "set")
3788 (set_attr "arch" "a,t2")
3789 (set_attr "length" "8")
3790 (set_attr "type" "multiple")]
3791 )
3792
3793 (define_expand "negsi2"
3794 [(set (match_operand:SI 0 "s_register_operand")
3795 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3796 "TARGET_EITHER"
3797 ""
3798 )
3799
3800 (define_insn "*arm_negsi2"
3801 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3802 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3803 "TARGET_32BIT"
3804 "rsb%?\\t%0, %1, #0"
3805 [(set_attr "predicable" "yes")
3806 (set_attr "predicable_short_it" "yes,no")
3807 (set_attr "arch" "t2,*")
3808 (set_attr "length" "4")
3809 (set_attr "type" "alu_imm")]
3810 )
3811
3812 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3813 ;; rather than (0 cmp reg). This gives the same results for unsigned
3814 ;; and equality compares which is what we mostly need here.
3815 (define_insn "negsi2_0compare"
3816 [(set (reg:CC_RSB CC_REGNUM)
3817 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3818 (const_int -1)))
3819 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3820 (neg:SI (match_dup 1)))]
3821 "TARGET_32BIT"
3822 "@
3823 negs\\t%0, %1
3824 rsbs\\t%0, %1, #0"
3825 [(set_attr "conds" "set")
3826 (set_attr "arch" "t2,*")
3827 (set_attr "length" "2,*")
3828 (set_attr "type" "alus_imm")]
3829 )
3830
3831 (define_insn "negsi2_carryin"
3832 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3833 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3834 (match_operand:SI 2 "arm_borrow_operation" "")))]
3835 "TARGET_32BIT"
3836 "@
3837 rsc\\t%0, %1, #0
3838 sbc\\t%0, %1, %1, lsl #1"
3839 [(set_attr "conds" "use")
3840 (set_attr "arch" "a,t2")
3841 (set_attr "type" "adc_imm,adc_reg")]
3842 )
3843
3844 (define_expand "negsf2"
3845 [(set (match_operand:SF 0 "s_register_operand")
3846 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3847 "TARGET_32BIT && TARGET_HARD_FLOAT"
3848 ""
3849 )
3850
3851 (define_expand "negdf2"
3852 [(set (match_operand:DF 0 "s_register_operand")
3853 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3854 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3855 "")
3856
3857 ;; abssi2 doesn't really clobber the condition codes if a different register
3858 ;; is being set. To keep things simple, assume during rtl manipulations that
3859 ;; it does, but tell the final scan operator the truth. Similarly for
3860 ;; (neg (abs...))
3861
3862 (define_expand "abssi2"
3863 [(parallel
3864 [(set (match_operand:SI 0 "s_register_operand")
3865 (abs:SI (match_operand:SI 1 "s_register_operand")))
3866 (clobber (match_dup 2))])]
3867 "TARGET_EITHER"
3868 "
3869 if (TARGET_THUMB1)
3870 operands[2] = gen_rtx_SCRATCH (SImode);
3871 else
3872 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3873 ")
3874
3875 (define_insn_and_split "*arm_abssi2"
3876 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3877 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3878 (clobber (reg:CC CC_REGNUM))]
3879 "TARGET_ARM"
3880 "#"
3881 "&& reload_completed"
3882 [(const_int 0)]
3883 {
3884 /* if (which_alternative == 0) */
3885 if (REGNO(operands[0]) == REGNO(operands[1]))
3886 {
3887 /* Emit the pattern:
3888 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3889 [(set (reg:CC CC_REGNUM)
3890 (compare:CC (match_dup 0) (const_int 0)))
3891 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
3892 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
3893 */
3894 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3895 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3896 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3897 (gen_rtx_LT (SImode,
3898 gen_rtx_REG (CCmode, CC_REGNUM),
3899 const0_rtx)),
3900 (gen_rtx_SET (operands[0],
3901 (gen_rtx_MINUS (SImode,
3902 const0_rtx,
3903 operands[1]))))));
3904 DONE;
3905 }
3906 else
3907 {
3908 /* Emit the pattern:
3909 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
3910 [(set (match_dup 0)
3911 (xor:SI (match_dup 1)
3912 (ashiftrt:SI (match_dup 1) (const_int 31))))
3913 (set (match_dup 0)
3914 (minus:SI (match_dup 0)
3915 (ashiftrt:SI (match_dup 1) (const_int 31))))]
3916 */
3917 emit_insn (gen_rtx_SET (operands[0],
3918 gen_rtx_XOR (SImode,
3919 gen_rtx_ASHIFTRT (SImode,
3920 operands[1],
3921 GEN_INT (31)),
3922 operands[1])));
3923 emit_insn (gen_rtx_SET (operands[0],
3924 gen_rtx_MINUS (SImode,
3925 operands[0],
3926 gen_rtx_ASHIFTRT (SImode,
3927 operands[1],
3928 GEN_INT (31)))));
3929 DONE;
3930 }
3931 }
3932 [(set_attr "conds" "clob,*")
3933 (set_attr "shift" "1")
3934 (set_attr "predicable" "no, yes")
3935 (set_attr "length" "8")
3936 (set_attr "type" "multiple")]
3937 )
3938
3939 (define_insn_and_split "*arm_neg_abssi2"
3940 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3941 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3942 (clobber (reg:CC CC_REGNUM))]
3943 "TARGET_ARM"
3944 "#"
3945 "&& reload_completed"
3946 [(const_int 0)]
3947 {
3948 /* if (which_alternative == 0) */
3949 if (REGNO (operands[0]) == REGNO (operands[1]))
3950 {
3951 /* Emit the pattern:
3952 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3953 */
3954 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3955 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3956 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3957 gen_rtx_GT (SImode,
3958 gen_rtx_REG (CCmode, CC_REGNUM),
3959 const0_rtx),
3960 gen_rtx_SET (operands[0],
3961 (gen_rtx_MINUS (SImode,
3962 const0_rtx,
3963 operands[1])))));
3964 }
3965 else
3966 {
3967 /* Emit the pattern:
3968 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
3969 */
3970 emit_insn (gen_rtx_SET (operands[0],
3971 gen_rtx_XOR (SImode,
3972 gen_rtx_ASHIFTRT (SImode,
3973 operands[1],
3974 GEN_INT (31)),
3975 operands[1])));
3976 emit_insn (gen_rtx_SET (operands[0],
3977 gen_rtx_MINUS (SImode,
3978 gen_rtx_ASHIFTRT (SImode,
3979 operands[1],
3980 GEN_INT (31)),
3981 operands[0])));
3982 }
3983 DONE;
3984 }
3985 [(set_attr "conds" "clob,*")
3986 (set_attr "shift" "1")
3987 (set_attr "predicable" "no, yes")
3988 (set_attr "length" "8")
3989 (set_attr "type" "multiple")]
3990 )
3991
3992 (define_expand "abssf2"
3993 [(set (match_operand:SF 0 "s_register_operand")
3994 (abs:SF (match_operand:SF 1 "s_register_operand")))]
3995 "TARGET_32BIT && TARGET_HARD_FLOAT"
3996 "")
3997
3998 (define_expand "absdf2"
3999 [(set (match_operand:DF 0 "s_register_operand")
4000 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4001 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4002 "")
4003
4004 (define_expand "sqrtsf2"
4005 [(set (match_operand:SF 0 "s_register_operand")
4006 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4007 "TARGET_32BIT && TARGET_HARD_FLOAT"
4008 "")
4009
4010 (define_expand "sqrtdf2"
4011 [(set (match_operand:DF 0 "s_register_operand")
4012 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4013 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4014 "")
4015
4016 (define_expand "one_cmplsi2"
4017 [(set (match_operand:SI 0 "s_register_operand")
4018 (not:SI (match_operand:SI 1 "s_register_operand")))]
4019 "TARGET_EITHER"
4020 ""
4021 )
4022
4023 (define_insn "*arm_one_cmplsi2"
4024 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4025 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4026 "TARGET_32BIT"
4027 "mvn%?\\t%0, %1"
4028 [(set_attr "predicable" "yes")
4029 (set_attr "predicable_short_it" "yes,no")
4030 (set_attr "arch" "t2,*")
4031 (set_attr "length" "4")
4032 (set_attr "type" "mvn_reg")]
4033 )
4034
4035 (define_insn "*notsi_compare0"
4036 [(set (reg:CC_NOOV CC_REGNUM)
4037 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4038 (const_int 0)))
4039 (set (match_operand:SI 0 "s_register_operand" "=r")
4040 (not:SI (match_dup 1)))]
4041 "TARGET_32BIT"
4042 "mvns%?\\t%0, %1"
4043 [(set_attr "conds" "set")
4044 (set_attr "type" "mvn_reg")]
4045 )
4046
4047 (define_insn "*notsi_compare0_scratch"
4048 [(set (reg:CC_NOOV CC_REGNUM)
4049 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4050 (const_int 0)))
4051 (clobber (match_scratch:SI 0 "=r"))]
4052 "TARGET_32BIT"
4053 "mvns%?\\t%0, %1"
4054 [(set_attr "conds" "set")
4055 (set_attr "type" "mvn_reg")]
4056 )
4057 \f
4058 ;; Fixed <--> Floating conversion insns
4059
4060 (define_expand "floatsihf2"
4061 [(set (match_operand:HF 0 "general_operand")
4062 (float:HF (match_operand:SI 1 "general_operand")))]
4063 "TARGET_EITHER"
4064 "
4065 {
4066 rtx op1 = gen_reg_rtx (SFmode);
4067 expand_float (op1, operands[1], 0);
4068 op1 = convert_to_mode (HFmode, op1, 0);
4069 emit_move_insn (operands[0], op1);
4070 DONE;
4071 }"
4072 )
4073
4074 (define_expand "floatdihf2"
4075 [(set (match_operand:HF 0 "general_operand")
4076 (float:HF (match_operand:DI 1 "general_operand")))]
4077 "TARGET_EITHER"
4078 "
4079 {
4080 rtx op1 = gen_reg_rtx (SFmode);
4081 expand_float (op1, operands[1], 0);
4082 op1 = convert_to_mode (HFmode, op1, 0);
4083 emit_move_insn (operands[0], op1);
4084 DONE;
4085 }"
4086 )
4087
4088 (define_expand "floatsisf2"
4089 [(set (match_operand:SF 0 "s_register_operand")
4090 (float:SF (match_operand:SI 1 "s_register_operand")))]
4091 "TARGET_32BIT && TARGET_HARD_FLOAT"
4092 "
4093 ")
4094
4095 (define_expand "floatsidf2"
4096 [(set (match_operand:DF 0 "s_register_operand")
4097 (float:DF (match_operand:SI 1 "s_register_operand")))]
4098 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4099 "
4100 ")
4101
4102 (define_expand "fix_trunchfsi2"
4103 [(set (match_operand:SI 0 "general_operand")
4104 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4105 "TARGET_EITHER"
4106 "
4107 {
4108 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4109 expand_fix (operands[0], op1, 0);
4110 DONE;
4111 }"
4112 )
4113
4114 (define_expand "fix_trunchfdi2"
4115 [(set (match_operand:DI 0 "general_operand")
4116 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4117 "TARGET_EITHER"
4118 "
4119 {
4120 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4121 expand_fix (operands[0], op1, 0);
4122 DONE;
4123 }"
4124 )
4125
4126 (define_expand "fix_truncsfsi2"
4127 [(set (match_operand:SI 0 "s_register_operand")
4128 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4129 "TARGET_32BIT && TARGET_HARD_FLOAT"
4130 "
4131 ")
4132
4133 (define_expand "fix_truncdfsi2"
4134 [(set (match_operand:SI 0 "s_register_operand")
4135 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4136 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4137 "
4138 ")
4139
4140 ;; Truncation insns
4141
4142 (define_expand "truncdfsf2"
4143 [(set (match_operand:SF 0 "s_register_operand")
4144 (float_truncate:SF
4145 (match_operand:DF 1 "s_register_operand")))]
4146 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4147 ""
4148 )
4149
4150 ;; DFmode to HFmode conversions on targets without a single-step hardware
4151 ;; instruction for it would have to go through SFmode. This is dangerous
4152 ;; as it introduces double rounding.
4153 ;;
4154 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4155 ;; a single-step instruction.
4156
4157 (define_expand "truncdfhf2"
4158 [(set (match_operand:HF 0 "s_register_operand")
4159 (float_truncate:HF
4160 (match_operand:DF 1 "s_register_operand")))]
4161 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4162 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4163 {
4164 /* We don't have a direct instruction for this, so we must be in
4165 an unsafe math mode, and going via SFmode. */
4166
4167 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4168 {
4169 rtx op1;
4170 op1 = convert_to_mode (SFmode, operands[1], 0);
4171 op1 = convert_to_mode (HFmode, op1, 0);
4172 emit_move_insn (operands[0], op1);
4173 DONE;
4174 }
4175 /* Otherwise, we will pick this up as a single instruction with
4176 no intermediary rounding. */
4177 }
4178 )
4179 \f
4180 ;; Zero and sign extension instructions.
4181
4182 (define_expand "zero_extend<mode>di2"
4183 [(set (match_operand:DI 0 "s_register_operand" "")
4184 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4185 "TARGET_32BIT <qhs_zextenddi_cond>"
4186 {
4187 rtx res_lo, res_hi, op0_lo, op0_hi;
4188 res_lo = gen_lowpart (SImode, operands[0]);
4189 res_hi = gen_highpart (SImode, operands[0]);
4190 if (can_create_pseudo_p ())
4191 {
4192 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4193 op0_hi = gen_reg_rtx (SImode);
4194 }
4195 else
4196 {
4197 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4198 op0_hi = res_hi;
4199 }
4200 if (<MODE>mode != SImode)
4201 emit_insn (gen_rtx_SET (op0_lo,
4202 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4203 emit_insn (gen_movsi (op0_hi, const0_rtx));
4204 if (res_lo != op0_lo)
4205 emit_move_insn (res_lo, op0_lo);
4206 if (res_hi != op0_hi)
4207 emit_move_insn (res_hi, op0_hi);
4208 DONE;
4209 }
4210 )
4211
4212 (define_expand "extend<mode>di2"
4213 [(set (match_operand:DI 0 "s_register_operand" "")
4214 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4215 "TARGET_32BIT <qhs_sextenddi_cond>"
4216 {
4217 rtx res_lo, res_hi, op0_lo, op0_hi;
4218 res_lo = gen_lowpart (SImode, operands[0]);
4219 res_hi = gen_highpart (SImode, operands[0]);
4220 if (can_create_pseudo_p ())
4221 {
4222 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4223 op0_hi = gen_reg_rtx (SImode);
4224 }
4225 else
4226 {
4227 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4228 op0_hi = res_hi;
4229 }
4230 if (<MODE>mode != SImode)
4231 emit_insn (gen_rtx_SET (op0_lo,
4232 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4233 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4234 if (res_lo != op0_lo)
4235 emit_move_insn (res_lo, op0_lo);
4236 if (res_hi != op0_hi)
4237 emit_move_insn (res_hi, op0_hi);
4238 DONE;
4239 }
4240 )
4241
4242 ;; Splits for all extensions to DImode
4243 (define_split
4244 [(set (match_operand:DI 0 "s_register_operand" "")
4245 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4246 "TARGET_32BIT"
4247 [(set (match_dup 0) (match_dup 1))]
4248 {
4249 rtx lo_part = gen_lowpart (SImode, operands[0]);
4250 machine_mode src_mode = GET_MODE (operands[1]);
4251
4252 if (src_mode == SImode)
4253 emit_move_insn (lo_part, operands[1]);
4254 else
4255 emit_insn (gen_rtx_SET (lo_part,
4256 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4257 operands[0] = gen_highpart (SImode, operands[0]);
4258 operands[1] = const0_rtx;
4259 })
4260
4261 (define_split
4262 [(set (match_operand:DI 0 "s_register_operand" "")
4263 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4264 "TARGET_32BIT"
4265 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4266 {
4267 rtx lo_part = gen_lowpart (SImode, operands[0]);
4268 machine_mode src_mode = GET_MODE (operands[1]);
4269
4270 if (src_mode == SImode)
4271 emit_move_insn (lo_part, operands[1]);
4272 else
4273 emit_insn (gen_rtx_SET (lo_part,
4274 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4275 operands[1] = lo_part;
4276 operands[0] = gen_highpart (SImode, operands[0]);
4277 })
4278
4279 (define_expand "zero_extendhisi2"
4280 [(set (match_operand:SI 0 "s_register_operand")
4281 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4282 "TARGET_EITHER"
4283 {
4284 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4285 {
4286 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4287 DONE;
4288 }
4289 if (!arm_arch6 && !MEM_P (operands[1]))
4290 {
4291 rtx t = gen_lowpart (SImode, operands[1]);
4292 rtx tmp = gen_reg_rtx (SImode);
4293 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4294 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4295 DONE;
4296 }
4297 })
4298
4299 (define_split
4300 [(set (match_operand:SI 0 "s_register_operand" "")
4301 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4302 "!TARGET_THUMB2 && !arm_arch6"
4303 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4304 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4305 {
4306 operands[2] = gen_lowpart (SImode, operands[1]);
4307 })
4308
4309 (define_insn "*arm_zero_extendhisi2"
4310 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4311 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4312 "TARGET_ARM && arm_arch4 && !arm_arch6"
4313 "@
4314 #
4315 ldrh%?\\t%0, %1"
4316 [(set_attr "type" "alu_shift_reg,load_byte")
4317 (set_attr "predicable" "yes")]
4318 )
4319
4320 (define_insn "*arm_zero_extendhisi2_v6"
4321 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4322 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4323 "TARGET_ARM && arm_arch6"
4324 "@
4325 uxth%?\\t%0, %1
4326 ldrh%?\\t%0, %1"
4327 [(set_attr "predicable" "yes")
4328 (set_attr "type" "extend,load_byte")]
4329 )
4330
4331 (define_insn "*arm_zero_extendhisi2addsi"
4332 [(set (match_operand:SI 0 "s_register_operand" "=r")
4333 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4334 (match_operand:SI 2 "s_register_operand" "r")))]
4335 "TARGET_INT_SIMD"
4336 "uxtah%?\\t%0, %2, %1"
4337 [(set_attr "type" "alu_shift_reg")
4338 (set_attr "predicable" "yes")]
4339 )
4340
4341 (define_expand "zero_extendqisi2"
4342 [(set (match_operand:SI 0 "s_register_operand")
4343 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4344 "TARGET_EITHER"
4345 {
4346 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4347 {
4348 emit_insn (gen_andsi3 (operands[0],
4349 gen_lowpart (SImode, operands[1]),
4350 GEN_INT (255)));
4351 DONE;
4352 }
4353 if (!arm_arch6 && !MEM_P (operands[1]))
4354 {
4355 rtx t = gen_lowpart (SImode, operands[1]);
4356 rtx tmp = gen_reg_rtx (SImode);
4357 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4358 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4359 DONE;
4360 }
4361 })
4362
4363 (define_split
4364 [(set (match_operand:SI 0 "s_register_operand" "")
4365 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4366 "!arm_arch6"
4367 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4368 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4369 {
4370 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4371 if (TARGET_ARM)
4372 {
4373 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4374 DONE;
4375 }
4376 })
4377
4378 (define_insn "*arm_zero_extendqisi2"
4379 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4380 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4381 "TARGET_ARM && !arm_arch6"
4382 "@
4383 #
4384 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4385 [(set_attr "length" "8,4")
4386 (set_attr "type" "alu_shift_reg,load_byte")
4387 (set_attr "predicable" "yes")]
4388 )
4389
4390 (define_insn "*arm_zero_extendqisi2_v6"
4391 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4392 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4393 "TARGET_ARM && arm_arch6"
4394 "@
4395 uxtb%?\\t%0, %1
4396 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4397 [(set_attr "type" "extend,load_byte")
4398 (set_attr "predicable" "yes")]
4399 )
4400
4401 (define_insn "*arm_zero_extendqisi2addsi"
4402 [(set (match_operand:SI 0 "s_register_operand" "=r")
4403 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4404 (match_operand:SI 2 "s_register_operand" "r")))]
4405 "TARGET_INT_SIMD"
4406 "uxtab%?\\t%0, %2, %1"
4407 [(set_attr "predicable" "yes")
4408 (set_attr "type" "alu_shift_reg")]
4409 )
4410
4411 (define_split
4412 [(set (match_operand:SI 0 "s_register_operand" "")
4413 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4414 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4415 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4416 [(set (match_dup 2) (match_dup 1))
4417 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4418 ""
4419 )
4420
4421 (define_split
4422 [(set (match_operand:SI 0 "s_register_operand" "")
4423 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4424 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4425 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4426 [(set (match_dup 2) (match_dup 1))
4427 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4428 ""
4429 )
4430
4431
4432 (define_split
4433 [(set (match_operand:SI 0 "s_register_operand" "")
4434 (IOR_XOR:SI (and:SI (ashift:SI
4435 (match_operand:SI 1 "s_register_operand" "")
4436 (match_operand:SI 2 "const_int_operand" ""))
4437 (match_operand:SI 3 "const_int_operand" ""))
4438 (zero_extend:SI
4439 (match_operator 5 "subreg_lowpart_operator"
4440 [(match_operand:SI 4 "s_register_operand" "")]))))]
4441 "TARGET_32BIT
4442 && (UINTVAL (operands[3])
4443 == (GET_MODE_MASK (GET_MODE (operands[5]))
4444 & (GET_MODE_MASK (GET_MODE (operands[5]))
4445 << (INTVAL (operands[2])))))"
4446 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4447 (match_dup 4)))
4448 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4449 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4450 )
4451
4452 (define_insn "*compareqi_eq0"
4453 [(set (reg:CC_Z CC_REGNUM)
4454 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4455 (const_int 0)))]
4456 "TARGET_32BIT"
4457 "tst%?\\t%0, #255"
4458 [(set_attr "conds" "set")
4459 (set_attr "predicable" "yes")
4460 (set_attr "type" "logic_imm")]
4461 )
4462
4463 (define_expand "extendhisi2"
4464 [(set (match_operand:SI 0 "s_register_operand")
4465 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4466 "TARGET_EITHER"
4467 {
4468 if (TARGET_THUMB1)
4469 {
4470 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4471 DONE;
4472 }
4473 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4474 {
4475 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4476 DONE;
4477 }
4478
4479 if (!arm_arch6 && !MEM_P (operands[1]))
4480 {
4481 rtx t = gen_lowpart (SImode, operands[1]);
4482 rtx tmp = gen_reg_rtx (SImode);
4483 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4484 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4485 DONE;
4486 }
4487 })
4488
4489 (define_split
4490 [(parallel
4491 [(set (match_operand:SI 0 "register_operand" "")
4492 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4493 (clobber (match_scratch:SI 2 ""))])]
4494 "!arm_arch6"
4495 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4496 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4497 {
4498 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4499 })
4500
4501 ;; This pattern will only be used when ldsh is not available
4502 (define_expand "extendhisi2_mem"
4503 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4504 (set (match_dup 3)
4505 (zero_extend:SI (match_dup 7)))
4506 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4507 (set (match_operand:SI 0 "" "")
4508 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4509 "TARGET_ARM"
4510 "
4511 {
4512 rtx mem1, mem2;
4513 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4514
4515 mem1 = change_address (operands[1], QImode, addr);
4516 mem2 = change_address (operands[1], QImode,
4517 plus_constant (Pmode, addr, 1));
4518 operands[0] = gen_lowpart (SImode, operands[0]);
4519 operands[1] = mem1;
4520 operands[2] = gen_reg_rtx (SImode);
4521 operands[3] = gen_reg_rtx (SImode);
4522 operands[6] = gen_reg_rtx (SImode);
4523 operands[7] = mem2;
4524
4525 if (BYTES_BIG_ENDIAN)
4526 {
4527 operands[4] = operands[2];
4528 operands[5] = operands[3];
4529 }
4530 else
4531 {
4532 operands[4] = operands[3];
4533 operands[5] = operands[2];
4534 }
4535 }"
4536 )
4537
4538 (define_split
4539 [(set (match_operand:SI 0 "register_operand" "")
4540 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4541 "!arm_arch6"
4542 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4543 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4544 {
4545 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4546 })
4547
4548 (define_insn "*arm_extendhisi2"
4549 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4550 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4551 "TARGET_ARM && arm_arch4 && !arm_arch6"
4552 "@
4553 #
4554 ldrsh%?\\t%0, %1"
4555 [(set_attr "length" "8,4")
4556 (set_attr "type" "alu_shift_reg,load_byte")
4557 (set_attr "predicable" "yes")]
4558 )
4559
4560 ;; ??? Check Thumb-2 pool range
4561 (define_insn "*arm_extendhisi2_v6"
4562 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4563 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4564 "TARGET_32BIT && arm_arch6"
4565 "@
4566 sxth%?\\t%0, %1
4567 ldrsh%?\\t%0, %1"
4568 [(set_attr "type" "extend,load_byte")
4569 (set_attr "predicable" "yes")]
4570 )
4571
4572 (define_insn "*arm_extendhisi2addsi"
4573 [(set (match_operand:SI 0 "s_register_operand" "=r")
4574 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4575 (match_operand:SI 2 "s_register_operand" "r")))]
4576 "TARGET_INT_SIMD"
4577 "sxtah%?\\t%0, %2, %1"
4578 [(set_attr "type" "alu_shift_reg")]
4579 )
4580
4581 (define_expand "extendqihi2"
4582 [(set (match_dup 2)
4583 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4584 (const_int 24)))
4585 (set (match_operand:HI 0 "s_register_operand")
4586 (ashiftrt:SI (match_dup 2)
4587 (const_int 24)))]
4588 "TARGET_ARM"
4589 "
4590 {
4591 if (arm_arch4 && MEM_P (operands[1]))
4592 {
4593 emit_insn (gen_rtx_SET (operands[0],
4594 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4595 DONE;
4596 }
4597 if (!s_register_operand (operands[1], QImode))
4598 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4599 operands[0] = gen_lowpart (SImode, operands[0]);
4600 operands[1] = gen_lowpart (SImode, operands[1]);
4601 operands[2] = gen_reg_rtx (SImode);
4602 }"
4603 )
4604
4605 (define_insn "*arm_extendqihi_insn"
4606 [(set (match_operand:HI 0 "s_register_operand" "=r")
4607 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4608 "TARGET_ARM && arm_arch4"
4609 "ldrsb%?\\t%0, %1"
4610 [(set_attr "type" "load_byte")
4611 (set_attr "predicable" "yes")]
4612 )
4613
4614 (define_expand "extendqisi2"
4615 [(set (match_operand:SI 0 "s_register_operand")
4616 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4617 "TARGET_EITHER"
4618 {
4619 if (!arm_arch4 && MEM_P (operands[1]))
4620 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4621
4622 if (!arm_arch6 && !MEM_P (operands[1]))
4623 {
4624 rtx t = gen_lowpart (SImode, operands[1]);
4625 rtx tmp = gen_reg_rtx (SImode);
4626 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4627 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4628 DONE;
4629 }
4630 })
4631
4632 (define_split
4633 [(set (match_operand:SI 0 "register_operand" "")
4634 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4635 "!arm_arch6"
4636 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4637 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4638 {
4639 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4640 })
4641
4642 (define_insn "*arm_extendqisi"
4643 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4644 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4645 "TARGET_ARM && arm_arch4 && !arm_arch6"
4646 "@
4647 #
4648 ldrsb%?\\t%0, %1"
4649 [(set_attr "length" "8,4")
4650 (set_attr "type" "alu_shift_reg,load_byte")
4651 (set_attr "predicable" "yes")]
4652 )
4653
4654 (define_insn "*arm_extendqisi_v6"
4655 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4656 (sign_extend:SI
4657 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4658 "TARGET_ARM && arm_arch6"
4659 "@
4660 sxtb%?\\t%0, %1
4661 ldrsb%?\\t%0, %1"
4662 [(set_attr "type" "extend,load_byte")
4663 (set_attr "predicable" "yes")]
4664 )
4665
4666 (define_insn "*arm_extendqisi2addsi"
4667 [(set (match_operand:SI 0 "s_register_operand" "=r")
4668 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4669 (match_operand:SI 2 "s_register_operand" "r")))]
4670 "TARGET_INT_SIMD"
4671 "sxtab%?\\t%0, %2, %1"
4672 [(set_attr "type" "alu_shift_reg")
4673 (set_attr "predicable" "yes")]
4674 )
4675
4676 (define_insn "arm_<sup>xtb16"
4677 [(set (match_operand:SI 0 "s_register_operand" "=r")
4678 (unspec:SI
4679 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4680 "TARGET_INT_SIMD"
4681 "<sup>xtb16%?\\t%0, %1"
4682 [(set_attr "predicable" "yes")
4683 (set_attr "type" "alu_dsp_reg")])
4684
4685 (define_insn "arm_<simd32_op>"
4686 [(set (match_operand:SI 0 "s_register_operand" "=r")
4687 (unspec:SI
4688 [(match_operand:SI 1 "s_register_operand" "r")
4689 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4690 "TARGET_INT_SIMD"
4691 "<simd32_op>%?\\t%0, %1, %2"
4692 [(set_attr "predicable" "yes")
4693 (set_attr "type" "alu_dsp_reg")])
4694
4695 (define_insn "arm_usada8"
4696 [(set (match_operand:SI 0 "s_register_operand" "=r")
4697 (unspec:SI
4698 [(match_operand:SI 1 "s_register_operand" "r")
4699 (match_operand:SI 2 "s_register_operand" "r")
4700 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4701 "TARGET_INT_SIMD"
4702 "usada8%?\\t%0, %1, %2, %3"
4703 [(set_attr "predicable" "yes")
4704 (set_attr "type" "alu_dsp_reg")])
4705
4706 (define_insn "arm_<simd32_op>"
4707 [(set (match_operand:DI 0 "s_register_operand" "=r")
4708 (unspec:DI
4709 [(match_operand:SI 1 "s_register_operand" "r")
4710 (match_operand:SI 2 "s_register_operand" "r")
4711 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4712 "TARGET_INT_SIMD"
4713 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4714 [(set_attr "predicable" "yes")
4715 (set_attr "type" "smlald")])
4716
4717 (define_expand "extendsfdf2"
4718 [(set (match_operand:DF 0 "s_register_operand")
4719 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4720 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4721 ""
4722 )
4723
4724 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4725 ;; must go through SFmode.
4726 ;;
4727 ;; This is always safe for an extend.
4728
4729 (define_expand "extendhfdf2"
4730 [(set (match_operand:DF 0 "s_register_operand")
4731 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4732 "TARGET_EITHER"
4733 {
4734 /* We don't have a direct instruction for this, so go via SFmode. */
4735 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4736 {
4737 rtx op1;
4738 op1 = convert_to_mode (SFmode, operands[1], 0);
4739 op1 = convert_to_mode (DFmode, op1, 0);
4740 emit_insn (gen_movdf (operands[0], op1));
4741 DONE;
4742 }
4743 /* Otherwise, we're done producing RTL and will pick up the correct
4744 pattern to do this with one rounding-step in a single instruction. */
4745 }
4746 )
4747 \f
4748 ;; Move insns (including loads and stores)
4749
4750 ;; XXX Just some ideas about movti.
4751 ;; I don't think these are a good idea on the arm, there just aren't enough
4752 ;; registers
4753 ;;(define_expand "loadti"
4754 ;; [(set (match_operand:TI 0 "s_register_operand")
4755 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4756 ;; "" "")
4757
4758 ;;(define_expand "storeti"
4759 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4760 ;; (match_operand:TI 1 "s_register_operand"))]
4761 ;; "" "")
4762
4763 ;;(define_expand "movti"
4764 ;; [(set (match_operand:TI 0 "general_operand")
4765 ;; (match_operand:TI 1 "general_operand"))]
4766 ;; ""
4767 ;; "
4768 ;;{
4769 ;; rtx insn;
4770 ;;
4771 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4772 ;; operands[1] = copy_to_reg (operands[1]);
4773 ;; if (MEM_P (operands[0]))
4774 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4775 ;; else if (MEM_P (operands[1]))
4776 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4777 ;; else
4778 ;; FAIL;
4779 ;;
4780 ;; emit_insn (insn);
4781 ;; DONE;
4782 ;;}")
4783
4784 ;; Recognize garbage generated above.
4785
4786 ;;(define_insn ""
4787 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4788 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4789 ;; ""
4790 ;; "*
4791 ;; {
4792 ;; register mem = (which_alternative < 3);
4793 ;; register const char *template;
4794 ;;
4795 ;; operands[mem] = XEXP (operands[mem], 0);
4796 ;; switch (which_alternative)
4797 ;; {
4798 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4799 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4800 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4801 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4802 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4803 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4804 ;; }
4805 ;; output_asm_insn (template, operands);
4806 ;; return \"\";
4807 ;; }")
4808
4809 (define_expand "movdi"
4810 [(set (match_operand:DI 0 "general_operand")
4811 (match_operand:DI 1 "general_operand"))]
4812 "TARGET_EITHER"
4813 "
4814 gcc_checking_assert (aligned_operand (operands[0], DImode));
4815 gcc_checking_assert (aligned_operand (operands[1], DImode));
4816 if (can_create_pseudo_p ())
4817 {
4818 if (!REG_P (operands[0]))
4819 operands[1] = force_reg (DImode, operands[1]);
4820 }
4821 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4822 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4823 {
4824 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4825 when expanding function calls. */
4826 gcc_assert (can_create_pseudo_p ());
4827 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4828 {
4829 /* Perform load into legal reg pair first, then move. */
4830 rtx reg = gen_reg_rtx (DImode);
4831 emit_insn (gen_movdi (reg, operands[1]));
4832 operands[1] = reg;
4833 }
4834 emit_move_insn (gen_lowpart (SImode, operands[0]),
4835 gen_lowpart (SImode, operands[1]));
4836 emit_move_insn (gen_highpart (SImode, operands[0]),
4837 gen_highpart (SImode, operands[1]));
4838 DONE;
4839 }
4840 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4841 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4842 {
4843 /* Avoid STRD's from an odd-numbered register pair in ARM state
4844 when expanding function prologue. */
4845 gcc_assert (can_create_pseudo_p ());
4846 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4847 ? gen_reg_rtx (DImode)
4848 : operands[0];
4849 emit_move_insn (gen_lowpart (SImode, split_dest),
4850 gen_lowpart (SImode, operands[1]));
4851 emit_move_insn (gen_highpart (SImode, split_dest),
4852 gen_highpart (SImode, operands[1]));
4853 if (split_dest != operands[0])
4854 emit_insn (gen_movdi (operands[0], split_dest));
4855 DONE;
4856 }
4857 "
4858 )
4859
4860 (define_insn "*arm_movdi"
4861 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4862 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4863 "TARGET_32BIT
4864 && !(TARGET_HARD_FLOAT)
4865 && !TARGET_IWMMXT
4866 && ( register_operand (operands[0], DImode)
4867 || register_operand (operands[1], DImode))"
4868 "*
4869 switch (which_alternative)
4870 {
4871 case 0:
4872 case 1:
4873 case 2:
4874 return \"#\";
4875 case 3:
4876 /* Cannot load it directly, split to load it via MOV / MOVT. */
4877 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4878 return \"#\";
4879 /* Fall through. */
4880 default:
4881 return output_move_double (operands, true, NULL);
4882 }
4883 "
4884 [(set_attr "length" "8,12,16,8,8")
4885 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4886 (set_attr "arm_pool_range" "*,*,*,1020,*")
4887 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4888 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4889 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4890 )
4891
4892 (define_split
4893 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4894 (match_operand:ANY64 1 "immediate_operand" ""))]
4895 "TARGET_32BIT
4896 && reload_completed
4897 && (arm_disable_literal_pool
4898 || (arm_const_double_inline_cost (operands[1])
4899 <= arm_max_const_double_inline_cost ()))"
4900 [(const_int 0)]
4901 "
4902 arm_split_constant (SET, SImode, curr_insn,
4903 INTVAL (gen_lowpart (SImode, operands[1])),
4904 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4905 arm_split_constant (SET, SImode, curr_insn,
4906 INTVAL (gen_highpart_mode (SImode,
4907 GET_MODE (operands[0]),
4908 operands[1])),
4909 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4910 DONE;
4911 "
4912 )
4913
4914 ; If optimizing for size, or if we have load delay slots, then
4915 ; we want to split the constant into two separate operations.
4916 ; In both cases this may split a trivial part into a single data op
4917 ; leaving a single complex constant to load. We can also get longer
4918 ; offsets in a LDR which means we get better chances of sharing the pool
4919 ; entries. Finally, we can normally do a better job of scheduling
4920 ; LDR instructions than we can with LDM.
4921 ; This pattern will only match if the one above did not.
4922 (define_split
4923 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4924 (match_operand:ANY64 1 "const_double_operand" ""))]
4925 "TARGET_ARM && reload_completed
4926 && arm_const_double_by_parts (operands[1])"
4927 [(set (match_dup 0) (match_dup 1))
4928 (set (match_dup 2) (match_dup 3))]
4929 "
4930 operands[2] = gen_highpart (SImode, operands[0]);
4931 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4932 operands[1]);
4933 operands[0] = gen_lowpart (SImode, operands[0]);
4934 operands[1] = gen_lowpart (SImode, operands[1]);
4935 "
4936 )
4937
4938 (define_split
4939 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4940 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4941 "TARGET_EITHER && reload_completed"
4942 [(set (match_dup 0) (match_dup 1))
4943 (set (match_dup 2) (match_dup 3))]
4944 "
4945 operands[2] = gen_highpart (SImode, operands[0]);
4946 operands[3] = gen_highpart (SImode, operands[1]);
4947 operands[0] = gen_lowpart (SImode, operands[0]);
4948 operands[1] = gen_lowpart (SImode, operands[1]);
4949
4950 /* Handle a partial overlap. */
4951 if (rtx_equal_p (operands[0], operands[3]))
4952 {
4953 rtx tmp0 = operands[0];
4954 rtx tmp1 = operands[1];
4955
4956 operands[0] = operands[2];
4957 operands[1] = operands[3];
4958 operands[2] = tmp0;
4959 operands[3] = tmp1;
4960 }
4961 "
4962 )
4963
4964 ;; We can't actually do base+index doubleword loads if the index and
4965 ;; destination overlap. Split here so that we at least have chance to
4966 ;; schedule.
4967 (define_split
4968 [(set (match_operand:DI 0 "s_register_operand" "")
4969 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4970 (match_operand:SI 2 "s_register_operand" ""))))]
4971 "TARGET_LDRD
4972 && reg_overlap_mentioned_p (operands[0], operands[1])
4973 && reg_overlap_mentioned_p (operands[0], operands[2])"
4974 [(set (match_dup 4)
4975 (plus:SI (match_dup 1)
4976 (match_dup 2)))
4977 (set (match_dup 0)
4978 (mem:DI (match_dup 4)))]
4979 "
4980 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4981 "
4982 )
4983
4984 (define_expand "movsi"
4985 [(set (match_operand:SI 0 "general_operand")
4986 (match_operand:SI 1 "general_operand"))]
4987 "TARGET_EITHER"
4988 "
4989 {
4990 rtx base, offset, tmp;
4991
4992 gcc_checking_assert (aligned_operand (operands[0], SImode));
4993 gcc_checking_assert (aligned_operand (operands[1], SImode));
4994 if (TARGET_32BIT || TARGET_HAVE_MOVT)
4995 {
4996 /* Everything except mem = const or mem = mem can be done easily. */
4997 if (MEM_P (operands[0]))
4998 operands[1] = force_reg (SImode, operands[1]);
4999 if (arm_general_register_operand (operands[0], SImode)
5000 && CONST_INT_P (operands[1])
5001 && !(const_ok_for_arm (INTVAL (operands[1]))
5002 || const_ok_for_arm (~INTVAL (operands[1]))))
5003 {
5004 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5005 {
5006 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5007 DONE;
5008 }
5009 else
5010 {
5011 arm_split_constant (SET, SImode, NULL_RTX,
5012 INTVAL (operands[1]), operands[0], NULL_RTX,
5013 optimize && can_create_pseudo_p ());
5014 DONE;
5015 }
5016 }
5017 }
5018 else /* Target doesn't have MOVT... */
5019 {
5020 if (can_create_pseudo_p ())
5021 {
5022 if (!REG_P (operands[0]))
5023 operands[1] = force_reg (SImode, operands[1]);
5024 }
5025 }
5026
5027 split_const (operands[1], &base, &offset);
5028 if (INTVAL (offset) != 0
5029 && targetm.cannot_force_const_mem (SImode, operands[1]))
5030 {
5031 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5032 emit_move_insn (tmp, base);
5033 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5034 DONE;
5035 }
5036
5037 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5038
5039 /* Recognize the case where operand[1] is a reference to thread-local
5040 data and load its address to a register. Offsets have been split off
5041 already. */
5042 if (arm_tls_referenced_p (operands[1]))
5043 operands[1] = legitimize_tls_address (operands[1], tmp);
5044 else if (flag_pic
5045 && (CONSTANT_P (operands[1])
5046 || symbol_mentioned_p (operands[1])
5047 || label_mentioned_p (operands[1])))
5048 operands[1] =
5049 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5050 }
5051 "
5052 )
5053
5054 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5055 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5056 ;; so this does not matter.
5057 (define_insn "*arm_movt"
5058 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5059 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5060 (match_operand:SI 2 "general_operand" "i,i")))]
5061 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5062 "@
5063 movt%?\t%0, #:upper16:%c2
5064 movt\t%0, #:upper16:%c2"
5065 [(set_attr "arch" "32,v8mb")
5066 (set_attr "predicable" "yes")
5067 (set_attr "length" "4")
5068 (set_attr "type" "alu_sreg")]
5069 )
5070
5071 (define_insn "*arm_movsi_insn"
5072 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5073 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5074 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5075 && ( register_operand (operands[0], SImode)
5076 || register_operand (operands[1], SImode))"
5077 "@
5078 mov%?\\t%0, %1
5079 mov%?\\t%0, %1
5080 mvn%?\\t%0, #%B1
5081 movw%?\\t%0, %1
5082 ldr%?\\t%0, %1
5083 str%?\\t%1, %0"
5084 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5085 (set_attr "predicable" "yes")
5086 (set_attr "arch" "*,*,*,v6t2,*,*")
5087 (set_attr "pool_range" "*,*,*,*,4096,*")
5088 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5089 )
5090
5091 (define_split
5092 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5093 (match_operand:SI 1 "const_int_operand" ""))]
5094 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5095 && (!(const_ok_for_arm (INTVAL (operands[1]))
5096 || const_ok_for_arm (~INTVAL (operands[1]))))"
5097 [(clobber (const_int 0))]
5098 "
5099 arm_split_constant (SET, SImode, NULL_RTX,
5100 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5101 DONE;
5102 "
5103 )
5104
5105 ;; A normal way to do (symbol + offset) requires three instructions at least
5106 ;; (depends on how big the offset is) as below:
5107 ;; movw r0, #:lower16:g
5108 ;; movw r0, #:upper16:g
5109 ;; adds r0, #4
5110 ;;
5111 ;; A better way would be:
5112 ;; movw r0, #:lower16:g+4
5113 ;; movw r0, #:upper16:g+4
5114 ;;
5115 ;; The limitation of this way is that the length of offset should be a 16-bit
5116 ;; signed value, because current assembler only supports REL type relocation for
5117 ;; such case. If the more powerful RELA type is supported in future, we should
5118 ;; update this pattern to go with better way.
5119 (define_split
5120 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5121 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5122 (match_operand:SI 2 "const_int_operand" ""))))]
5123 "TARGET_THUMB
5124 && TARGET_HAVE_MOVT
5125 && arm_disable_literal_pool
5126 && reload_completed
5127 && GET_CODE (operands[1]) == SYMBOL_REF"
5128 [(clobber (const_int 0))]
5129 "
5130 int offset = INTVAL (operands[2]);
5131
5132 if (offset < -0x8000 || offset > 0x7fff)
5133 {
5134 arm_emit_movpair (operands[0], operands[1]);
5135 emit_insn (gen_rtx_SET (operands[0],
5136 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5137 }
5138 else
5139 {
5140 rtx op = gen_rtx_CONST (SImode,
5141 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5142 arm_emit_movpair (operands[0], op);
5143 }
5144 "
5145 )
5146
5147 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5148 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5149 ;; and lo_sum would be merged back into memory load at cprop. However,
5150 ;; if the default is to prefer movt/movw rather than a load from the constant
5151 ;; pool, the performance is better.
5152 (define_split
5153 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5154 (match_operand:SI 1 "general_operand" ""))]
5155 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5156 && !target_word_relocations
5157 && !arm_tls_referenced_p (operands[1])"
5158 [(clobber (const_int 0))]
5159 {
5160 arm_emit_movpair (operands[0], operands[1]);
5161 DONE;
5162 })
5163
5164 ;; When generating pic, we need to load the symbol offset into a register.
5165 ;; So that the optimizer does not confuse this with a normal symbol load
5166 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5167 ;; since that is the only type of relocation we can use.
5168
5169 ;; Wrap calculation of the whole PIC address in a single pattern for the
5170 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5171 ;; a PIC address involves two loads from memory, so we want to CSE it
5172 ;; as often as possible.
5173 ;; This pattern will be split into one of the pic_load_addr_* patterns
5174 ;; and a move after GCSE optimizations.
5175 ;;
5176 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5177 (define_expand "calculate_pic_address"
5178 [(set (match_operand:SI 0 "register_operand")
5179 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5180 (unspec:SI [(match_operand:SI 2 "" "")]
5181 UNSPEC_PIC_SYM))))]
5182 "flag_pic"
5183 )
5184
5185 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5186 (define_split
5187 [(set (match_operand:SI 0 "register_operand" "")
5188 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5189 (unspec:SI [(match_operand:SI 2 "" "")]
5190 UNSPEC_PIC_SYM))))]
5191 "flag_pic"
5192 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5193 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5194 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5195 )
5196
5197 ;; operand1 is the memory address to go into
5198 ;; pic_load_addr_32bit.
5199 ;; operand2 is the PIC label to be emitted
5200 ;; from pic_add_dot_plus_eight.
5201 ;; We do this to allow hoisting of the entire insn.
5202 (define_insn_and_split "pic_load_addr_unified"
5203 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5204 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5205 (match_operand:SI 2 "" "")]
5206 UNSPEC_PIC_UNIFIED))]
5207 "flag_pic"
5208 "#"
5209 "&& reload_completed"
5210 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5211 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5212 (match_dup 2)] UNSPEC_PIC_BASE))]
5213 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5214 [(set_attr "type" "load_4,load_4,load_4")
5215 (set_attr "pool_range" "4096,4094,1022")
5216 (set_attr "neg_pool_range" "4084,0,0")
5217 (set_attr "arch" "a,t2,t1")
5218 (set_attr "length" "8,6,4")]
5219 )
5220
5221 ;; The rather odd constraints on the following are to force reload to leave
5222 ;; the insn alone, and to force the minipool generation pass to then move
5223 ;; the GOT symbol to memory.
5224
5225 (define_insn "pic_load_addr_32bit"
5226 [(set (match_operand:SI 0 "s_register_operand" "=r")
5227 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5228 "TARGET_32BIT && flag_pic"
5229 "ldr%?\\t%0, %1"
5230 [(set_attr "type" "load_4")
5231 (set (attr "pool_range")
5232 (if_then_else (eq_attr "is_thumb" "no")
5233 (const_int 4096)
5234 (const_int 4094)))
5235 (set (attr "neg_pool_range")
5236 (if_then_else (eq_attr "is_thumb" "no")
5237 (const_int 4084)
5238 (const_int 0)))]
5239 )
5240
5241 (define_insn "pic_load_addr_thumb1"
5242 [(set (match_operand:SI 0 "s_register_operand" "=l")
5243 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5244 "TARGET_THUMB1 && flag_pic"
5245 "ldr\\t%0, %1"
5246 [(set_attr "type" "load_4")
5247 (set (attr "pool_range") (const_int 1018))]
5248 )
5249
5250 (define_insn "pic_add_dot_plus_four"
5251 [(set (match_operand:SI 0 "register_operand" "=r")
5252 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5253 (const_int 4)
5254 (match_operand 2 "" "")]
5255 UNSPEC_PIC_BASE))]
5256 "TARGET_THUMB"
5257 "*
5258 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5259 INTVAL (operands[2]));
5260 return \"add\\t%0, %|pc\";
5261 "
5262 [(set_attr "length" "2")
5263 (set_attr "type" "alu_sreg")]
5264 )
5265
5266 (define_insn "pic_add_dot_plus_eight"
5267 [(set (match_operand:SI 0 "register_operand" "=r")
5268 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5269 (const_int 8)
5270 (match_operand 2 "" "")]
5271 UNSPEC_PIC_BASE))]
5272 "TARGET_ARM"
5273 "*
5274 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5275 INTVAL (operands[2]));
5276 return \"add%?\\t%0, %|pc, %1\";
5277 "
5278 [(set_attr "predicable" "yes")
5279 (set_attr "type" "alu_sreg")]
5280 )
5281
5282 (define_insn "tls_load_dot_plus_eight"
5283 [(set (match_operand:SI 0 "register_operand" "=r")
5284 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5285 (const_int 8)
5286 (match_operand 2 "" "")]
5287 UNSPEC_PIC_BASE)))]
5288 "TARGET_ARM"
5289 "*
5290 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5291 INTVAL (operands[2]));
5292 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5293 "
5294 [(set_attr "predicable" "yes")
5295 (set_attr "type" "load_4")]
5296 )
5297
5298 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5299 ;; followed by a load. These sequences can be crunched down to
5300 ;; tls_load_dot_plus_eight by a peephole.
5301
5302 (define_peephole2
5303 [(set (match_operand:SI 0 "register_operand" "")
5304 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5305 (const_int 8)
5306 (match_operand 1 "" "")]
5307 UNSPEC_PIC_BASE))
5308 (set (match_operand:SI 2 "arm_general_register_operand" "")
5309 (mem:SI (match_dup 0)))]
5310 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5311 [(set (match_dup 2)
5312 (mem:SI (unspec:SI [(match_dup 3)
5313 (const_int 8)
5314 (match_dup 1)]
5315 UNSPEC_PIC_BASE)))]
5316 ""
5317 )
5318
5319 (define_insn "pic_offset_arm"
5320 [(set (match_operand:SI 0 "register_operand" "=r")
5321 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5322 (unspec:SI [(match_operand:SI 2 "" "X")]
5323 UNSPEC_PIC_OFFSET))))]
5324 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5325 "ldr%?\\t%0, [%1,%2]"
5326 [(set_attr "type" "load_4")]
5327 )
5328
5329 (define_expand "builtin_setjmp_receiver"
5330 [(label_ref (match_operand 0 "" ""))]
5331 "flag_pic"
5332 "
5333 {
5334 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5335 register. */
5336 if (arm_pic_register != INVALID_REGNUM)
5337 arm_load_pic_register (1UL << 3, NULL_RTX);
5338 DONE;
5339 }")
5340
5341 ;; If copying one reg to another we can set the condition codes according to
5342 ;; its value. Such a move is common after a return from subroutine and the
5343 ;; result is being tested against zero.
5344
5345 (define_insn "*movsi_compare0"
5346 [(set (reg:CC CC_REGNUM)
5347 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5348 (const_int 0)))
5349 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5350 (match_dup 1))]
5351 "TARGET_32BIT"
5352 "@
5353 cmp%?\\t%0, #0
5354 subs%?\\t%0, %1, #0"
5355 [(set_attr "conds" "set")
5356 (set_attr "type" "alus_imm,alus_imm")]
5357 )
5358
5359 ;; Subroutine to store a half word from a register into memory.
5360 ;; Operand 0 is the source register (HImode)
5361 ;; Operand 1 is the destination address in a register (SImode)
5362
5363 ;; In both this routine and the next, we must be careful not to spill
5364 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5365 ;; can generate unrecognizable rtl.
5366
5367 (define_expand "storehi"
5368 [;; store the low byte
5369 (set (match_operand 1 "" "") (match_dup 3))
5370 ;; extract the high byte
5371 (set (match_dup 2)
5372 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5373 ;; store the high byte
5374 (set (match_dup 4) (match_dup 5))]
5375 "TARGET_ARM"
5376 "
5377 {
5378 rtx op1 = operands[1];
5379 rtx addr = XEXP (op1, 0);
5380 enum rtx_code code = GET_CODE (addr);
5381
5382 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5383 || code == MINUS)
5384 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5385
5386 operands[4] = adjust_address (op1, QImode, 1);
5387 operands[1] = adjust_address (operands[1], QImode, 0);
5388 operands[3] = gen_lowpart (QImode, operands[0]);
5389 operands[0] = gen_lowpart (SImode, operands[0]);
5390 operands[2] = gen_reg_rtx (SImode);
5391 operands[5] = gen_lowpart (QImode, operands[2]);
5392 }"
5393 )
5394
5395 (define_expand "storehi_bigend"
5396 [(set (match_dup 4) (match_dup 3))
5397 (set (match_dup 2)
5398 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5399 (set (match_operand 1 "" "") (match_dup 5))]
5400 "TARGET_ARM"
5401 "
5402 {
5403 rtx op1 = operands[1];
5404 rtx addr = XEXP (op1, 0);
5405 enum rtx_code code = GET_CODE (addr);
5406
5407 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5408 || code == MINUS)
5409 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5410
5411 operands[4] = adjust_address (op1, QImode, 1);
5412 operands[1] = adjust_address (operands[1], QImode, 0);
5413 operands[3] = gen_lowpart (QImode, operands[0]);
5414 operands[0] = gen_lowpart (SImode, operands[0]);
5415 operands[2] = gen_reg_rtx (SImode);
5416 operands[5] = gen_lowpart (QImode, operands[2]);
5417 }"
5418 )
5419
5420 ;; Subroutine to store a half word integer constant into memory.
5421 (define_expand "storeinthi"
5422 [(set (match_operand 0 "" "")
5423 (match_operand 1 "" ""))
5424 (set (match_dup 3) (match_dup 2))]
5425 "TARGET_ARM"
5426 "
5427 {
5428 HOST_WIDE_INT value = INTVAL (operands[1]);
5429 rtx addr = XEXP (operands[0], 0);
5430 rtx op0 = operands[0];
5431 enum rtx_code code = GET_CODE (addr);
5432
5433 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5434 || code == MINUS)
5435 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5436
5437 operands[1] = gen_reg_rtx (SImode);
5438 if (BYTES_BIG_ENDIAN)
5439 {
5440 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5441 if ((value & 255) == ((value >> 8) & 255))
5442 operands[2] = operands[1];
5443 else
5444 {
5445 operands[2] = gen_reg_rtx (SImode);
5446 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5447 }
5448 }
5449 else
5450 {
5451 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5452 if ((value & 255) == ((value >> 8) & 255))
5453 operands[2] = operands[1];
5454 else
5455 {
5456 operands[2] = gen_reg_rtx (SImode);
5457 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5458 }
5459 }
5460
5461 operands[3] = adjust_address (op0, QImode, 1);
5462 operands[0] = adjust_address (operands[0], QImode, 0);
5463 operands[2] = gen_lowpart (QImode, operands[2]);
5464 operands[1] = gen_lowpart (QImode, operands[1]);
5465 }"
5466 )
5467
5468 (define_expand "storehi_single_op"
5469 [(set (match_operand:HI 0 "memory_operand")
5470 (match_operand:HI 1 "general_operand"))]
5471 "TARGET_32BIT && arm_arch4"
5472 "
5473 if (!s_register_operand (operands[1], HImode))
5474 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5475 "
5476 )
5477
5478 (define_expand "movhi"
5479 [(set (match_operand:HI 0 "general_operand")
5480 (match_operand:HI 1 "general_operand"))]
5481 "TARGET_EITHER"
5482 "
5483 gcc_checking_assert (aligned_operand (operands[0], HImode));
5484 gcc_checking_assert (aligned_operand (operands[1], HImode));
5485 if (TARGET_ARM)
5486 {
5487 if (can_create_pseudo_p ())
5488 {
5489 if (MEM_P (operands[0]))
5490 {
5491 if (arm_arch4)
5492 {
5493 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5494 DONE;
5495 }
5496 if (CONST_INT_P (operands[1]))
5497 emit_insn (gen_storeinthi (operands[0], operands[1]));
5498 else
5499 {
5500 if (MEM_P (operands[1]))
5501 operands[1] = force_reg (HImode, operands[1]);
5502 if (BYTES_BIG_ENDIAN)
5503 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5504 else
5505 emit_insn (gen_storehi (operands[1], operands[0]));
5506 }
5507 DONE;
5508 }
5509 /* Sign extend a constant, and keep it in an SImode reg. */
5510 else if (CONST_INT_P (operands[1]))
5511 {
5512 rtx reg = gen_reg_rtx (SImode);
5513 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5514
5515 /* If the constant is already valid, leave it alone. */
5516 if (!const_ok_for_arm (val))
5517 {
5518 /* If setting all the top bits will make the constant
5519 loadable in a single instruction, then set them.
5520 Otherwise, sign extend the number. */
5521
5522 if (const_ok_for_arm (~(val | ~0xffff)))
5523 val |= ~0xffff;
5524 else if (val & 0x8000)
5525 val |= ~0xffff;
5526 }
5527
5528 emit_insn (gen_movsi (reg, GEN_INT (val)));
5529 operands[1] = gen_lowpart (HImode, reg);
5530 }
5531 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5532 && MEM_P (operands[1]))
5533 {
5534 rtx reg = gen_reg_rtx (SImode);
5535
5536 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5537 operands[1] = gen_lowpart (HImode, reg);
5538 }
5539 else if (!arm_arch4)
5540 {
5541 if (MEM_P (operands[1]))
5542 {
5543 rtx base;
5544 rtx offset = const0_rtx;
5545 rtx reg = gen_reg_rtx (SImode);
5546
5547 if ((REG_P (base = XEXP (operands[1], 0))
5548 || (GET_CODE (base) == PLUS
5549 && (CONST_INT_P (offset = XEXP (base, 1)))
5550 && ((INTVAL(offset) & 1) != 1)
5551 && REG_P (base = XEXP (base, 0))))
5552 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5553 {
5554 rtx new_rtx;
5555
5556 new_rtx = widen_memory_access (operands[1], SImode,
5557 ((INTVAL (offset) & ~3)
5558 - INTVAL (offset)));
5559 emit_insn (gen_movsi (reg, new_rtx));
5560 if (((INTVAL (offset) & 2) != 0)
5561 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5562 {
5563 rtx reg2 = gen_reg_rtx (SImode);
5564
5565 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5566 reg = reg2;
5567 }
5568 }
5569 else
5570 emit_insn (gen_movhi_bytes (reg, operands[1]));
5571
5572 operands[1] = gen_lowpart (HImode, reg);
5573 }
5574 }
5575 }
5576 /* Handle loading a large integer during reload. */
5577 else if (CONST_INT_P (operands[1])
5578 && !const_ok_for_arm (INTVAL (operands[1]))
5579 && !const_ok_for_arm (~INTVAL (operands[1])))
5580 {
5581 /* Writing a constant to memory needs a scratch, which should
5582 be handled with SECONDARY_RELOADs. */
5583 gcc_assert (REG_P (operands[0]));
5584
5585 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5586 emit_insn (gen_movsi (operands[0], operands[1]));
5587 DONE;
5588 }
5589 }
5590 else if (TARGET_THUMB2)
5591 {
5592 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5593 if (can_create_pseudo_p ())
5594 {
5595 if (!REG_P (operands[0]))
5596 operands[1] = force_reg (HImode, operands[1]);
5597 /* Zero extend a constant, and keep it in an SImode reg. */
5598 else if (CONST_INT_P (operands[1]))
5599 {
5600 rtx reg = gen_reg_rtx (SImode);
5601 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5602
5603 emit_insn (gen_movsi (reg, GEN_INT (val)));
5604 operands[1] = gen_lowpart (HImode, reg);
5605 }
5606 }
5607 }
5608 else /* TARGET_THUMB1 */
5609 {
5610 if (can_create_pseudo_p ())
5611 {
5612 if (CONST_INT_P (operands[1]))
5613 {
5614 rtx reg = gen_reg_rtx (SImode);
5615
5616 emit_insn (gen_movsi (reg, operands[1]));
5617 operands[1] = gen_lowpart (HImode, reg);
5618 }
5619
5620 /* ??? We shouldn't really get invalid addresses here, but this can
5621 happen if we are passed a SP (never OK for HImode/QImode) or
5622 virtual register (also rejected as illegitimate for HImode/QImode)
5623 relative address. */
5624 /* ??? This should perhaps be fixed elsewhere, for instance, in
5625 fixup_stack_1, by checking for other kinds of invalid addresses,
5626 e.g. a bare reference to a virtual register. This may confuse the
5627 alpha though, which must handle this case differently. */
5628 if (MEM_P (operands[0])
5629 && !memory_address_p (GET_MODE (operands[0]),
5630 XEXP (operands[0], 0)))
5631 operands[0]
5632 = replace_equiv_address (operands[0],
5633 copy_to_reg (XEXP (operands[0], 0)));
5634
5635 if (MEM_P (operands[1])
5636 && !memory_address_p (GET_MODE (operands[1]),
5637 XEXP (operands[1], 0)))
5638 operands[1]
5639 = replace_equiv_address (operands[1],
5640 copy_to_reg (XEXP (operands[1], 0)));
5641
5642 if (MEM_P (operands[1]) && optimize > 0)
5643 {
5644 rtx reg = gen_reg_rtx (SImode);
5645
5646 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5647 operands[1] = gen_lowpart (HImode, reg);
5648 }
5649
5650 if (MEM_P (operands[0]))
5651 operands[1] = force_reg (HImode, operands[1]);
5652 }
5653 else if (CONST_INT_P (operands[1])
5654 && !satisfies_constraint_I (operands[1]))
5655 {
5656 /* Handle loading a large integer during reload. */
5657
5658 /* Writing a constant to memory needs a scratch, which should
5659 be handled with SECONDARY_RELOADs. */
5660 gcc_assert (REG_P (operands[0]));
5661
5662 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5663 emit_insn (gen_movsi (operands[0], operands[1]));
5664 DONE;
5665 }
5666 }
5667 "
5668 )
5669
5670 (define_expand "movhi_bytes"
5671 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5672 (set (match_dup 3)
5673 (zero_extend:SI (match_dup 6)))
5674 (set (match_operand:SI 0 "" "")
5675 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5676 "TARGET_ARM"
5677 "
5678 {
5679 rtx mem1, mem2;
5680 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5681
5682 mem1 = change_address (operands[1], QImode, addr);
5683 mem2 = change_address (operands[1], QImode,
5684 plus_constant (Pmode, addr, 1));
5685 operands[0] = gen_lowpart (SImode, operands[0]);
5686 operands[1] = mem1;
5687 operands[2] = gen_reg_rtx (SImode);
5688 operands[3] = gen_reg_rtx (SImode);
5689 operands[6] = mem2;
5690
5691 if (BYTES_BIG_ENDIAN)
5692 {
5693 operands[4] = operands[2];
5694 operands[5] = operands[3];
5695 }
5696 else
5697 {
5698 operands[4] = operands[3];
5699 operands[5] = operands[2];
5700 }
5701 }"
5702 )
5703
5704 (define_expand "movhi_bigend"
5705 [(set (match_dup 2)
5706 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5707 (const_int 16)))
5708 (set (match_dup 3)
5709 (ashiftrt:SI (match_dup 2) (const_int 16)))
5710 (set (match_operand:HI 0 "s_register_operand")
5711 (match_dup 4))]
5712 "TARGET_ARM"
5713 "
5714 operands[2] = gen_reg_rtx (SImode);
5715 operands[3] = gen_reg_rtx (SImode);
5716 operands[4] = gen_lowpart (HImode, operands[3]);
5717 "
5718 )
5719
5720 ;; Pattern to recognize insn generated default case above
5721 (define_insn "*movhi_insn_arch4"
5722 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5723 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5724 "TARGET_ARM
5725 && arm_arch4 && !TARGET_HARD_FLOAT
5726 && (register_operand (operands[0], HImode)
5727 || register_operand (operands[1], HImode))"
5728 "@
5729 mov%?\\t%0, %1\\t%@ movhi
5730 mvn%?\\t%0, #%B1\\t%@ movhi
5731 movw%?\\t%0, %L1\\t%@ movhi
5732 strh%?\\t%1, %0\\t%@ movhi
5733 ldrh%?\\t%0, %1\\t%@ movhi"
5734 [(set_attr "predicable" "yes")
5735 (set_attr "pool_range" "*,*,*,*,256")
5736 (set_attr "neg_pool_range" "*,*,*,*,244")
5737 (set_attr "arch" "*,*,v6t2,*,*")
5738 (set_attr_alternative "type"
5739 [(if_then_else (match_operand 1 "const_int_operand" "")
5740 (const_string "mov_imm" )
5741 (const_string "mov_reg"))
5742 (const_string "mvn_imm")
5743 (const_string "mov_imm")
5744 (const_string "store_4")
5745 (const_string "load_4")])]
5746 )
5747
5748 (define_insn "*movhi_bytes"
5749 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5750 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5751 "TARGET_ARM && !TARGET_HARD_FLOAT"
5752 "@
5753 mov%?\\t%0, %1\\t%@ movhi
5754 mov%?\\t%0, %1\\t%@ movhi
5755 mvn%?\\t%0, #%B1\\t%@ movhi"
5756 [(set_attr "predicable" "yes")
5757 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5758 )
5759
5760 ;; We use a DImode scratch because we may occasionally need an additional
5761 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5762 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5763 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5764 ;; to be correctly handled in default_secondary_reload function.
5765 (define_expand "reload_outhi"
5766 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5767 (match_operand:HI 1 "s_register_operand" "r")
5768 (match_operand:DI 2 "s_register_operand" "=&l")])]
5769 "TARGET_EITHER"
5770 "if (TARGET_ARM)
5771 arm_reload_out_hi (operands);
5772 else
5773 thumb_reload_out_hi (operands);
5774 DONE;
5775 "
5776 )
5777
5778 (define_expand "reload_inhi"
5779 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5780 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5781 (match_operand:DI 2 "s_register_operand" "=&r")])]
5782 "TARGET_EITHER"
5783 "
5784 if (TARGET_ARM)
5785 arm_reload_in_hi (operands);
5786 else
5787 thumb_reload_out_hi (operands);
5788 DONE;
5789 ")
5790
5791 (define_expand "movqi"
5792 [(set (match_operand:QI 0 "general_operand")
5793 (match_operand:QI 1 "general_operand"))]
5794 "TARGET_EITHER"
5795 "
5796 /* Everything except mem = const or mem = mem can be done easily */
5797
5798 if (can_create_pseudo_p ())
5799 {
5800 if (CONST_INT_P (operands[1]))
5801 {
5802 rtx reg = gen_reg_rtx (SImode);
5803
5804 /* For thumb we want an unsigned immediate, then we are more likely
5805 to be able to use a movs insn. */
5806 if (TARGET_THUMB)
5807 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5808
5809 emit_insn (gen_movsi (reg, operands[1]));
5810 operands[1] = gen_lowpart (QImode, reg);
5811 }
5812
5813 if (TARGET_THUMB)
5814 {
5815 /* ??? We shouldn't really get invalid addresses here, but this can
5816 happen if we are passed a SP (never OK for HImode/QImode) or
5817 virtual register (also rejected as illegitimate for HImode/QImode)
5818 relative address. */
5819 /* ??? This should perhaps be fixed elsewhere, for instance, in
5820 fixup_stack_1, by checking for other kinds of invalid addresses,
5821 e.g. a bare reference to a virtual register. This may confuse the
5822 alpha though, which must handle this case differently. */
5823 if (MEM_P (operands[0])
5824 && !memory_address_p (GET_MODE (operands[0]),
5825 XEXP (operands[0], 0)))
5826 operands[0]
5827 = replace_equiv_address (operands[0],
5828 copy_to_reg (XEXP (operands[0], 0)));
5829 if (MEM_P (operands[1])
5830 && !memory_address_p (GET_MODE (operands[1]),
5831 XEXP (operands[1], 0)))
5832 operands[1]
5833 = replace_equiv_address (operands[1],
5834 copy_to_reg (XEXP (operands[1], 0)));
5835 }
5836
5837 if (MEM_P (operands[1]) && optimize > 0)
5838 {
5839 rtx reg = gen_reg_rtx (SImode);
5840
5841 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5842 operands[1] = gen_lowpart (QImode, reg);
5843 }
5844
5845 if (MEM_P (operands[0]))
5846 operands[1] = force_reg (QImode, operands[1]);
5847 }
5848 else if (TARGET_THUMB
5849 && CONST_INT_P (operands[1])
5850 && !satisfies_constraint_I (operands[1]))
5851 {
5852 /* Handle loading a large integer during reload. */
5853
5854 /* Writing a constant to memory needs a scratch, which should
5855 be handled with SECONDARY_RELOADs. */
5856 gcc_assert (REG_P (operands[0]));
5857
5858 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5859 emit_insn (gen_movsi (operands[0], operands[1]));
5860 DONE;
5861 }
5862 "
5863 )
5864
5865 (define_insn "*arm_movqi_insn"
5866 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5867 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5868 "TARGET_32BIT
5869 && ( register_operand (operands[0], QImode)
5870 || register_operand (operands[1], QImode))"
5871 "@
5872 mov%?\\t%0, %1
5873 mov%?\\t%0, %1
5874 mov%?\\t%0, %1
5875 mov%?\\t%0, %1
5876 mvn%?\\t%0, #%B1
5877 ldrb%?\\t%0, %1
5878 strb%?\\t%1, %0
5879 ldrb%?\\t%0, %1
5880 strb%?\\t%1, %0"
5881 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5882 (set_attr "predicable" "yes")
5883 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5884 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5885 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5886 )
5887
5888 ;; HFmode moves
5889 (define_expand "movhf"
5890 [(set (match_operand:HF 0 "general_operand")
5891 (match_operand:HF 1 "general_operand"))]
5892 "TARGET_EITHER"
5893 "
5894 gcc_checking_assert (aligned_operand (operands[0], HFmode));
5895 gcc_checking_assert (aligned_operand (operands[1], HFmode));
5896 if (TARGET_32BIT)
5897 {
5898 if (MEM_P (operands[0]))
5899 operands[1] = force_reg (HFmode, operands[1]);
5900 }
5901 else /* TARGET_THUMB1 */
5902 {
5903 if (can_create_pseudo_p ())
5904 {
5905 if (!REG_P (operands[0]))
5906 operands[1] = force_reg (HFmode, operands[1]);
5907 }
5908 }
5909 "
5910 )
5911
5912 (define_insn "*arm32_movhf"
5913 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5914 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5915 "TARGET_32BIT && !TARGET_HARD_FLOAT
5916 && ( s_register_operand (operands[0], HFmode)
5917 || s_register_operand (operands[1], HFmode))"
5918 "*
5919 switch (which_alternative)
5920 {
5921 case 0: /* ARM register from memory */
5922 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
5923 case 1: /* memory from ARM register */
5924 return \"strh%?\\t%1, %0\\t%@ __fp16\";
5925 case 2: /* ARM register from ARM register */
5926 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5927 case 3: /* ARM register from constant */
5928 {
5929 long bits;
5930 rtx ops[4];
5931
5932 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
5933 HFmode);
5934 ops[0] = operands[0];
5935 ops[1] = GEN_INT (bits);
5936 ops[2] = GEN_INT (bits & 0xff00);
5937 ops[3] = GEN_INT (bits & 0x00ff);
5938
5939 if (arm_arch_thumb2)
5940 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5941 else
5942 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5943 return \"\";
5944 }
5945 default:
5946 gcc_unreachable ();
5947 }
5948 "
5949 [(set_attr "conds" "unconditional")
5950 (set_attr "type" "load_4,store_4,mov_reg,multiple")
5951 (set_attr "length" "4,4,4,8")
5952 (set_attr "predicable" "yes")]
5953 )
5954
5955 (define_expand "movsf"
5956 [(set (match_operand:SF 0 "general_operand")
5957 (match_operand:SF 1 "general_operand"))]
5958 "TARGET_EITHER"
5959 "
5960 gcc_checking_assert (aligned_operand (operands[0], SFmode));
5961 gcc_checking_assert (aligned_operand (operands[1], SFmode));
5962 if (TARGET_32BIT)
5963 {
5964 if (MEM_P (operands[0]))
5965 operands[1] = force_reg (SFmode, operands[1]);
5966 }
5967 else /* TARGET_THUMB1 */
5968 {
5969 if (can_create_pseudo_p ())
5970 {
5971 if (!REG_P (operands[0]))
5972 operands[1] = force_reg (SFmode, operands[1]);
5973 }
5974 }
5975
5976 /* Cannot load it directly, generate a load with clobber so that it can be
5977 loaded via GPR with MOV / MOVT. */
5978 if (arm_disable_literal_pool
5979 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
5980 && CONST_DOUBLE_P (operands[1])
5981 && TARGET_HARD_FLOAT
5982 && !vfp3_const_double_rtx (operands[1]))
5983 {
5984 rtx clobreg = gen_reg_rtx (SFmode);
5985 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
5986 clobreg));
5987 DONE;
5988 }
5989 "
5990 )
5991
5992 ;; Transform a floating-point move of a constant into a core register into
5993 ;; an SImode operation.
5994 (define_split
5995 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5996 (match_operand:SF 1 "immediate_operand" ""))]
5997 "TARGET_EITHER
5998 && reload_completed
5999 && CONST_DOUBLE_P (operands[1])"
6000 [(set (match_dup 2) (match_dup 3))]
6001 "
6002 operands[2] = gen_lowpart (SImode, operands[0]);
6003 operands[3] = gen_lowpart (SImode, operands[1]);
6004 if (operands[2] == 0 || operands[3] == 0)
6005 FAIL;
6006 "
6007 )
6008
6009 (define_insn "*arm_movsf_soft_insn"
6010 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6011 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6012 "TARGET_32BIT
6013 && TARGET_SOFT_FLOAT
6014 && (!MEM_P (operands[0])
6015 || register_operand (operands[1], SFmode))"
6016 {
6017 switch (which_alternative)
6018 {
6019 case 0: return \"mov%?\\t%0, %1\";
6020 case 1:
6021 /* Cannot load it directly, split to load it via MOV / MOVT. */
6022 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6023 return \"#\";
6024 return \"ldr%?\\t%0, %1\\t%@ float\";
6025 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6026 default: gcc_unreachable ();
6027 }
6028 }
6029 [(set_attr "predicable" "yes")
6030 (set_attr "type" "mov_reg,load_4,store_4")
6031 (set_attr "arm_pool_range" "*,4096,*")
6032 (set_attr "thumb2_pool_range" "*,4094,*")
6033 (set_attr "arm_neg_pool_range" "*,4084,*")
6034 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6035 )
6036
6037 ;; Splitter for the above.
6038 (define_split
6039 [(set (match_operand:SF 0 "s_register_operand")
6040 (match_operand:SF 1 "const_double_operand"))]
6041 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6042 [(const_int 0)]
6043 {
6044 long buf;
6045 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6046 rtx cst = gen_int_mode (buf, SImode);
6047 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6048 DONE;
6049 }
6050 )
6051
6052 (define_expand "movdf"
6053 [(set (match_operand:DF 0 "general_operand")
6054 (match_operand:DF 1 "general_operand"))]
6055 "TARGET_EITHER"
6056 "
6057 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6058 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6059 if (TARGET_32BIT)
6060 {
6061 if (MEM_P (operands[0]))
6062 operands[1] = force_reg (DFmode, operands[1]);
6063 }
6064 else /* TARGET_THUMB */
6065 {
6066 if (can_create_pseudo_p ())
6067 {
6068 if (!REG_P (operands[0]))
6069 operands[1] = force_reg (DFmode, operands[1]);
6070 }
6071 }
6072
6073 /* Cannot load it directly, generate a load with clobber so that it can be
6074 loaded via GPR with MOV / MOVT. */
6075 if (arm_disable_literal_pool
6076 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6077 && CONSTANT_P (operands[1])
6078 && TARGET_HARD_FLOAT
6079 && !arm_const_double_rtx (operands[1])
6080 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6081 {
6082 rtx clobreg = gen_reg_rtx (DFmode);
6083 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6084 clobreg));
6085 DONE;
6086 }
6087 "
6088 )
6089
6090 ;; Reloading a df mode value stored in integer regs to memory can require a
6091 ;; scratch reg.
6092 ;; Another reload_out<m> pattern that requires special constraints.
6093 (define_expand "reload_outdf"
6094 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6095 (match_operand:DF 1 "s_register_operand" "r")
6096 (match_operand:SI 2 "s_register_operand" "=&r")]
6097 "TARGET_THUMB2"
6098 "
6099 {
6100 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6101
6102 if (code == REG)
6103 operands[2] = XEXP (operands[0], 0);
6104 else if (code == POST_INC || code == PRE_DEC)
6105 {
6106 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6107 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6108 emit_insn (gen_movdi (operands[0], operands[1]));
6109 DONE;
6110 }
6111 else if (code == PRE_INC)
6112 {
6113 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6114
6115 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6116 operands[2] = reg;
6117 }
6118 else if (code == POST_DEC)
6119 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6120 else
6121 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6122 XEXP (XEXP (operands[0], 0), 1)));
6123
6124 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6125 operands[1]));
6126
6127 if (code == POST_DEC)
6128 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6129
6130 DONE;
6131 }"
6132 )
6133
6134 (define_insn "*movdf_soft_insn"
6135 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6136 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6137 "TARGET_32BIT && TARGET_SOFT_FLOAT
6138 && ( register_operand (operands[0], DFmode)
6139 || register_operand (operands[1], DFmode))"
6140 "*
6141 switch (which_alternative)
6142 {
6143 case 0:
6144 case 1:
6145 case 2:
6146 return \"#\";
6147 case 3:
6148 /* Cannot load it directly, split to load it via MOV / MOVT. */
6149 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6150 return \"#\";
6151 /* Fall through. */
6152 default:
6153 return output_move_double (operands, true, NULL);
6154 }
6155 "
6156 [(set_attr "length" "8,12,16,8,8")
6157 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6158 (set_attr "arm_pool_range" "*,*,*,1020,*")
6159 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6160 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6161 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6162 )
6163
6164 ;; Splitter for the above.
6165 (define_split
6166 [(set (match_operand:DF 0 "s_register_operand")
6167 (match_operand:DF 1 "const_double_operand"))]
6168 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6169 [(const_int 0)]
6170 {
6171 long buf[2];
6172 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6173 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6174 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6175 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6176 rtx cst = gen_int_mode (ival, DImode);
6177 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6178 DONE;
6179 }
6180 )
6181 \f
6182
6183 ;; load- and store-multiple insns
6184 ;; The arm can load/store any set of registers, provided that they are in
6185 ;; ascending order, but these expanders assume a contiguous set.
6186
6187 (define_expand "load_multiple"
6188 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6189 (match_operand:SI 1 "" ""))
6190 (use (match_operand:SI 2 "" ""))])]
6191 "TARGET_32BIT"
6192 {
6193 HOST_WIDE_INT offset = 0;
6194
6195 /* Support only fixed point registers. */
6196 if (!CONST_INT_P (operands[2])
6197 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6198 || INTVAL (operands[2]) < 2
6199 || !MEM_P (operands[1])
6200 || !REG_P (operands[0])
6201 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6202 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6203 FAIL;
6204
6205 operands[3]
6206 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6207 INTVAL (operands[2]),
6208 force_reg (SImode, XEXP (operands[1], 0)),
6209 FALSE, operands[1], &offset);
6210 })
6211
6212 (define_expand "store_multiple"
6213 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6214 (match_operand:SI 1 "" ""))
6215 (use (match_operand:SI 2 "" ""))])]
6216 "TARGET_32BIT"
6217 {
6218 HOST_WIDE_INT offset = 0;
6219
6220 /* Support only fixed point registers. */
6221 if (!CONST_INT_P (operands[2])
6222 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6223 || INTVAL (operands[2]) < 2
6224 || !REG_P (operands[1])
6225 || !MEM_P (operands[0])
6226 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6227 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6228 FAIL;
6229
6230 operands[3]
6231 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6232 INTVAL (operands[2]),
6233 force_reg (SImode, XEXP (operands[0], 0)),
6234 FALSE, operands[0], &offset);
6235 })
6236
6237
6238 (define_expand "setmemsi"
6239 [(match_operand:BLK 0 "general_operand")
6240 (match_operand:SI 1 "const_int_operand")
6241 (match_operand:SI 2 "const_int_operand")
6242 (match_operand:SI 3 "const_int_operand")]
6243 "TARGET_32BIT"
6244 {
6245 if (arm_gen_setmem (operands))
6246 DONE;
6247
6248 FAIL;
6249 })
6250
6251
6252 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6253 ;; We could let this apply for blocks of less than this, but it clobbers so
6254 ;; many registers that there is then probably a better way.
6255
6256 (define_expand "cpymemqi"
6257 [(match_operand:BLK 0 "general_operand")
6258 (match_operand:BLK 1 "general_operand")
6259 (match_operand:SI 2 "const_int_operand")
6260 (match_operand:SI 3 "const_int_operand")]
6261 ""
6262 "
6263 if (TARGET_32BIT)
6264 {
6265 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6266 && !optimize_function_for_size_p (cfun))
6267 {
6268 if (gen_cpymem_ldrd_strd (operands))
6269 DONE;
6270 FAIL;
6271 }
6272
6273 if (arm_gen_cpymemqi (operands))
6274 DONE;
6275 FAIL;
6276 }
6277 else /* TARGET_THUMB1 */
6278 {
6279 if ( INTVAL (operands[3]) != 4
6280 || INTVAL (operands[2]) > 48)
6281 FAIL;
6282
6283 thumb_expand_cpymemqi (operands);
6284 DONE;
6285 }
6286 "
6287 )
6288 \f
6289
6290 ;; Compare & branch insns
6291 ;; The range calculations are based as follows:
6292 ;; For forward branches, the address calculation returns the address of
6293 ;; the next instruction. This is 2 beyond the branch instruction.
6294 ;; For backward branches, the address calculation returns the address of
6295 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6296 ;; instruction for the shortest sequence, and 4 before the branch instruction
6297 ;; if we have to jump around an unconditional branch.
6298 ;; To the basic branch range the PC offset must be added (this is +4).
6299 ;; So for forward branches we have
6300 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6301 ;; And for backward branches we have
6302 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6303 ;;
6304 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6305 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6306
6307 (define_expand "cbranchsi4"
6308 [(set (pc) (if_then_else
6309 (match_operator 0 "expandable_comparison_operator"
6310 [(match_operand:SI 1 "s_register_operand")
6311 (match_operand:SI 2 "nonmemory_operand")])
6312 (label_ref (match_operand 3 "" ""))
6313 (pc)))]
6314 "TARGET_EITHER"
6315 "
6316 if (!TARGET_THUMB1)
6317 {
6318 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6319 FAIL;
6320 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6321 operands[3]));
6322 DONE;
6323 }
6324 if (thumb1_cmpneg_operand (operands[2], SImode))
6325 {
6326 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6327 operands[3], operands[0]));
6328 DONE;
6329 }
6330 if (!thumb1_cmp_operand (operands[2], SImode))
6331 operands[2] = force_reg (SImode, operands[2]);
6332 ")
6333
6334 (define_expand "cbranchsf4"
6335 [(set (pc) (if_then_else
6336 (match_operator 0 "expandable_comparison_operator"
6337 [(match_operand:SF 1 "s_register_operand")
6338 (match_operand:SF 2 "vfp_compare_operand")])
6339 (label_ref (match_operand 3 "" ""))
6340 (pc)))]
6341 "TARGET_32BIT && TARGET_HARD_FLOAT"
6342 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6343 operands[3])); DONE;"
6344 )
6345
6346 (define_expand "cbranchdf4"
6347 [(set (pc) (if_then_else
6348 (match_operator 0 "expandable_comparison_operator"
6349 [(match_operand:DF 1 "s_register_operand")
6350 (match_operand:DF 2 "vfp_compare_operand")])
6351 (label_ref (match_operand 3 "" ""))
6352 (pc)))]
6353 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6354 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6355 operands[3])); DONE;"
6356 )
6357
6358 (define_expand "cbranchdi4"
6359 [(set (pc) (if_then_else
6360 (match_operator 0 "expandable_comparison_operator"
6361 [(match_operand:DI 1 "s_register_operand")
6362 (match_operand:DI 2 "cmpdi_operand")])
6363 (label_ref (match_operand 3 "" ""))
6364 (pc)))]
6365 "TARGET_32BIT"
6366 "{
6367 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6368 FAIL;
6369 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6370 operands[3]));
6371 DONE;
6372 }"
6373 )
6374
6375 ;; Comparison and test insns
6376
6377 (define_insn "*arm_cmpsi_insn"
6378 [(set (reg:CC CC_REGNUM)
6379 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6380 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6381 "TARGET_32BIT"
6382 "@
6383 cmp%?\\t%0, %1
6384 cmp%?\\t%0, %1
6385 cmp%?\\t%0, %1
6386 cmp%?\\t%0, %1
6387 cmn%?\\t%0, #%n1"
6388 [(set_attr "conds" "set")
6389 (set_attr "arch" "t2,t2,any,any,any")
6390 (set_attr "length" "2,2,4,4,4")
6391 (set_attr "predicable" "yes")
6392 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6393 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6394 )
6395
6396 (define_insn "*cmpsi_shiftsi"
6397 [(set (reg:CC CC_REGNUM)
6398 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6399 (match_operator:SI 3 "shift_operator"
6400 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6401 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6402 "TARGET_32BIT"
6403 "cmp\\t%0, %1%S3"
6404 [(set_attr "conds" "set")
6405 (set_attr "shift" "1")
6406 (set_attr "arch" "32,a,a")
6407 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6408
6409 (define_insn "*cmpsi_shiftsi_swp"
6410 [(set (reg:CC_SWP CC_REGNUM)
6411 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6412 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6413 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6414 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6415 "TARGET_32BIT"
6416 "cmp%?\\t%0, %1%S3"
6417 [(set_attr "conds" "set")
6418 (set_attr "shift" "1")
6419 (set_attr "arch" "32,a,a")
6420 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6421
6422 (define_insn "*arm_cmpsi_negshiftsi_si"
6423 [(set (reg:CC_Z CC_REGNUM)
6424 (compare:CC_Z
6425 (neg:SI (match_operator:SI 1 "shift_operator"
6426 [(match_operand:SI 2 "s_register_operand" "r")
6427 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6428 (match_operand:SI 0 "s_register_operand" "r")))]
6429 "TARGET_ARM"
6430 "cmn%?\\t%0, %2%S1"
6431 [(set_attr "conds" "set")
6432 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6433 (const_string "alus_shift_imm")
6434 (const_string "alus_shift_reg")))
6435 (set_attr "predicable" "yes")]
6436 )
6437
6438 ;; DImode comparisons. The generic code generates branches that
6439 ;; if-conversion cannot reduce to a conditional compare, so we do
6440 ;; that directly.
6441
6442 (define_insn "*arm_cmpdi_insn"
6443 [(set (reg:CC_NCV CC_REGNUM)
6444 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6445 (match_operand:DI 1 "arm_di_operand" "rDi")))
6446 (clobber (match_scratch:SI 2 "=r"))]
6447 "TARGET_32BIT"
6448 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6449 [(set_attr "conds" "set")
6450 (set_attr "length" "8")
6451 (set_attr "type" "multiple")]
6452 )
6453
6454 (define_insn_and_split "*arm_cmpdi_unsigned"
6455 [(set (reg:CC_CZ CC_REGNUM)
6456 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6457 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6458
6459 "TARGET_32BIT"
6460 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6461 "&& reload_completed"
6462 [(set (reg:CC CC_REGNUM)
6463 (compare:CC (match_dup 2) (match_dup 3)))
6464 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6465 (set (reg:CC CC_REGNUM)
6466 (compare:CC (match_dup 0) (match_dup 1))))]
6467 {
6468 operands[2] = gen_highpart (SImode, operands[0]);
6469 operands[0] = gen_lowpart (SImode, operands[0]);
6470 if (CONST_INT_P (operands[1]))
6471 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6472 else
6473 operands[3] = gen_highpart (SImode, operands[1]);
6474 operands[1] = gen_lowpart (SImode, operands[1]);
6475 }
6476 [(set_attr "conds" "set")
6477 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6478 (set_attr "arch" "t2,t2,t2,a")
6479 (set_attr "length" "6,6,10,8")
6480 (set_attr "type" "multiple")]
6481 )
6482
6483 (define_insn "*arm_cmpdi_zero"
6484 [(set (reg:CC_Z CC_REGNUM)
6485 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
6486 (const_int 0)))
6487 (clobber (match_scratch:SI 1 "=r"))]
6488 "TARGET_32BIT"
6489 "orrs%?\\t%1, %Q0, %R0"
6490 [(set_attr "conds" "set")
6491 (set_attr "type" "logics_reg")]
6492 )
6493
6494 ; This insn allows redundant compares to be removed by cse, nothing should
6495 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6496 ; is deleted later on. The match_dup will match the mode here, so that
6497 ; mode changes of the condition codes aren't lost by this even though we don't
6498 ; specify what they are.
6499
6500 (define_insn "*deleted_compare"
6501 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6502 "TARGET_32BIT"
6503 "\\t%@ deleted compare"
6504 [(set_attr "conds" "set")
6505 (set_attr "length" "0")
6506 (set_attr "type" "no_insn")]
6507 )
6508
6509 \f
6510 ;; Conditional branch insns
6511
6512 (define_expand "cbranch_cc"
6513 [(set (pc)
6514 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6515 (match_operand 2 "" "")])
6516 (label_ref (match_operand 3 "" ""))
6517 (pc)))]
6518 "TARGET_32BIT"
6519 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6520 operands[1], operands[2], NULL_RTX);
6521 operands[2] = const0_rtx;"
6522 )
6523
6524 ;;
6525 ;; Patterns to match conditional branch insns.
6526 ;;
6527
6528 (define_insn "arm_cond_branch"
6529 [(set (pc)
6530 (if_then_else (match_operator 1 "arm_comparison_operator"
6531 [(match_operand 2 "cc_register" "") (const_int 0)])
6532 (label_ref (match_operand 0 "" ""))
6533 (pc)))]
6534 "TARGET_32BIT"
6535 "*
6536 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6537 {
6538 arm_ccfsm_state += 2;
6539 return \"\";
6540 }
6541 return \"b%d1\\t%l0\";
6542 "
6543 [(set_attr "conds" "use")
6544 (set_attr "type" "branch")
6545 (set (attr "length")
6546 (if_then_else
6547 (and (match_test "TARGET_THUMB2")
6548 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6549 (le (minus (match_dup 0) (pc)) (const_int 256))))
6550 (const_int 2)
6551 (const_int 4)))]
6552 )
6553
6554 (define_insn "*arm_cond_branch_reversed"
6555 [(set (pc)
6556 (if_then_else (match_operator 1 "arm_comparison_operator"
6557 [(match_operand 2 "cc_register" "") (const_int 0)])
6558 (pc)
6559 (label_ref (match_operand 0 "" ""))))]
6560 "TARGET_32BIT"
6561 "*
6562 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6563 {
6564 arm_ccfsm_state += 2;
6565 return \"\";
6566 }
6567 return \"b%D1\\t%l0\";
6568 "
6569 [(set_attr "conds" "use")
6570 (set_attr "type" "branch")
6571 (set (attr "length")
6572 (if_then_else
6573 (and (match_test "TARGET_THUMB2")
6574 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6575 (le (minus (match_dup 0) (pc)) (const_int 256))))
6576 (const_int 2)
6577 (const_int 4)))]
6578 )
6579
6580 \f
6581
6582 ; scc insns
6583
6584 (define_expand "cstore_cc"
6585 [(set (match_operand:SI 0 "s_register_operand")
6586 (match_operator:SI 1 "" [(match_operand 2 "" "")
6587 (match_operand 3 "" "")]))]
6588 "TARGET_32BIT"
6589 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6590 operands[2], operands[3], NULL_RTX);
6591 operands[3] = const0_rtx;"
6592 )
6593
6594 (define_insn_and_split "*mov_scc"
6595 [(set (match_operand:SI 0 "s_register_operand" "=r")
6596 (match_operator:SI 1 "arm_comparison_operator_mode"
6597 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6598 "TARGET_ARM"
6599 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6600 "TARGET_ARM"
6601 [(set (match_dup 0)
6602 (if_then_else:SI (match_dup 1)
6603 (const_int 1)
6604 (const_int 0)))]
6605 ""
6606 [(set_attr "conds" "use")
6607 (set_attr "length" "8")
6608 (set_attr "type" "multiple")]
6609 )
6610
6611 (define_insn_and_split "*mov_negscc"
6612 [(set (match_operand:SI 0 "s_register_operand" "=r")
6613 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6614 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6615 "TARGET_ARM"
6616 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6617 "TARGET_ARM"
6618 [(set (match_dup 0)
6619 (if_then_else:SI (match_dup 1)
6620 (match_dup 3)
6621 (const_int 0)))]
6622 {
6623 operands[3] = GEN_INT (~0);
6624 }
6625 [(set_attr "conds" "use")
6626 (set_attr "length" "8")
6627 (set_attr "type" "multiple")]
6628 )
6629
6630 (define_insn_and_split "*mov_notscc"
6631 [(set (match_operand:SI 0 "s_register_operand" "=r")
6632 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6633 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6634 "TARGET_ARM"
6635 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6636 "TARGET_ARM"
6637 [(set (match_dup 0)
6638 (if_then_else:SI (match_dup 1)
6639 (match_dup 3)
6640 (match_dup 4)))]
6641 {
6642 operands[3] = GEN_INT (~1);
6643 operands[4] = GEN_INT (~0);
6644 }
6645 [(set_attr "conds" "use")
6646 (set_attr "length" "8")
6647 (set_attr "type" "multiple")]
6648 )
6649
6650 (define_expand "cstoresi4"
6651 [(set (match_operand:SI 0 "s_register_operand")
6652 (match_operator:SI 1 "expandable_comparison_operator"
6653 [(match_operand:SI 2 "s_register_operand")
6654 (match_operand:SI 3 "reg_or_int_operand")]))]
6655 "TARGET_32BIT || TARGET_THUMB1"
6656 "{
6657 rtx op3, scratch, scratch2;
6658
6659 if (!TARGET_THUMB1)
6660 {
6661 if (!arm_add_operand (operands[3], SImode))
6662 operands[3] = force_reg (SImode, operands[3]);
6663 emit_insn (gen_cstore_cc (operands[0], operands[1],
6664 operands[2], operands[3]));
6665 DONE;
6666 }
6667
6668 if (operands[3] == const0_rtx)
6669 {
6670 switch (GET_CODE (operands[1]))
6671 {
6672 case EQ:
6673 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6674 break;
6675
6676 case NE:
6677 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6678 break;
6679
6680 case LE:
6681 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6682 NULL_RTX, 0, OPTAB_WIDEN);
6683 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6684 NULL_RTX, 0, OPTAB_WIDEN);
6685 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6686 operands[0], 1, OPTAB_WIDEN);
6687 break;
6688
6689 case GE:
6690 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6691 NULL_RTX, 1);
6692 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6693 NULL_RTX, 1, OPTAB_WIDEN);
6694 break;
6695
6696 case GT:
6697 scratch = expand_binop (SImode, ashr_optab, operands[2],
6698 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6699 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6700 NULL_RTX, 0, OPTAB_WIDEN);
6701 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6702 0, OPTAB_WIDEN);
6703 break;
6704
6705 /* LT is handled by generic code. No need for unsigned with 0. */
6706 default:
6707 FAIL;
6708 }
6709 DONE;
6710 }
6711
6712 switch (GET_CODE (operands[1]))
6713 {
6714 case EQ:
6715 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6716 NULL_RTX, 0, OPTAB_WIDEN);
6717 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6718 break;
6719
6720 case NE:
6721 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6722 NULL_RTX, 0, OPTAB_WIDEN);
6723 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6724 break;
6725
6726 case LE:
6727 op3 = force_reg (SImode, operands[3]);
6728
6729 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6730 NULL_RTX, 1, OPTAB_WIDEN);
6731 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6732 NULL_RTX, 0, OPTAB_WIDEN);
6733 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6734 op3, operands[2]));
6735 break;
6736
6737 case GE:
6738 op3 = operands[3];
6739 if (!thumb1_cmp_operand (op3, SImode))
6740 op3 = force_reg (SImode, op3);
6741 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6742 NULL_RTX, 0, OPTAB_WIDEN);
6743 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6744 NULL_RTX, 1, OPTAB_WIDEN);
6745 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6746 operands[2], op3));
6747 break;
6748
6749 case LEU:
6750 op3 = force_reg (SImode, operands[3]);
6751 scratch = force_reg (SImode, const0_rtx);
6752 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6753 op3, operands[2]));
6754 break;
6755
6756 case GEU:
6757 op3 = operands[3];
6758 if (!thumb1_cmp_operand (op3, SImode))
6759 op3 = force_reg (SImode, op3);
6760 scratch = force_reg (SImode, const0_rtx);
6761 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6762 operands[2], op3));
6763 break;
6764
6765 case LTU:
6766 op3 = operands[3];
6767 if (!thumb1_cmp_operand (op3, SImode))
6768 op3 = force_reg (SImode, op3);
6769 scratch = gen_reg_rtx (SImode);
6770 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6771 break;
6772
6773 case GTU:
6774 op3 = force_reg (SImode, operands[3]);
6775 scratch = gen_reg_rtx (SImode);
6776 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6777 break;
6778
6779 /* No good sequences for GT, LT. */
6780 default:
6781 FAIL;
6782 }
6783 DONE;
6784 }")
6785
6786 (define_expand "cstorehf4"
6787 [(set (match_operand:SI 0 "s_register_operand")
6788 (match_operator:SI 1 "expandable_comparison_operator"
6789 [(match_operand:HF 2 "s_register_operand")
6790 (match_operand:HF 3 "vfp_compare_operand")]))]
6791 "TARGET_VFP_FP16INST"
6792 {
6793 if (!arm_validize_comparison (&operands[1],
6794 &operands[2],
6795 &operands[3]))
6796 FAIL;
6797
6798 emit_insn (gen_cstore_cc (operands[0], operands[1],
6799 operands[2], operands[3]));
6800 DONE;
6801 }
6802 )
6803
6804 (define_expand "cstoresf4"
6805 [(set (match_operand:SI 0 "s_register_operand")
6806 (match_operator:SI 1 "expandable_comparison_operator"
6807 [(match_operand:SF 2 "s_register_operand")
6808 (match_operand:SF 3 "vfp_compare_operand")]))]
6809 "TARGET_32BIT && TARGET_HARD_FLOAT"
6810 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6811 operands[2], operands[3])); DONE;"
6812 )
6813
6814 (define_expand "cstoredf4"
6815 [(set (match_operand:SI 0 "s_register_operand")
6816 (match_operator:SI 1 "expandable_comparison_operator"
6817 [(match_operand:DF 2 "s_register_operand")
6818 (match_operand:DF 3 "vfp_compare_operand")]))]
6819 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6820 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6821 operands[2], operands[3])); DONE;"
6822 )
6823
6824 (define_expand "cstoredi4"
6825 [(set (match_operand:SI 0 "s_register_operand")
6826 (match_operator:SI 1 "expandable_comparison_operator"
6827 [(match_operand:DI 2 "s_register_operand")
6828 (match_operand:DI 3 "cmpdi_operand")]))]
6829 "TARGET_32BIT"
6830 "{
6831 if (!arm_validize_comparison (&operands[1],
6832 &operands[2],
6833 &operands[3]))
6834 FAIL;
6835 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6836 operands[3]));
6837 DONE;
6838 }"
6839 )
6840
6841 \f
6842 ;; Conditional move insns
6843
6844 (define_expand "movsicc"
6845 [(set (match_operand:SI 0 "s_register_operand")
6846 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6847 (match_operand:SI 2 "arm_not_operand")
6848 (match_operand:SI 3 "arm_not_operand")))]
6849 "TARGET_32BIT"
6850 "
6851 {
6852 enum rtx_code code;
6853 rtx ccreg;
6854
6855 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6856 &XEXP (operands[1], 1)))
6857 FAIL;
6858
6859 code = GET_CODE (operands[1]);
6860 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6861 XEXP (operands[1], 1), NULL_RTX);
6862 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6863 }"
6864 )
6865
6866 (define_expand "movhfcc"
6867 [(set (match_operand:HF 0 "s_register_operand")
6868 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6869 (match_operand:HF 2 "s_register_operand")
6870 (match_operand:HF 3 "s_register_operand")))]
6871 "TARGET_VFP_FP16INST"
6872 "
6873 {
6874 enum rtx_code code = GET_CODE (operands[1]);
6875 rtx ccreg;
6876
6877 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6878 &XEXP (operands[1], 1)))
6879 FAIL;
6880
6881 code = GET_CODE (operands[1]);
6882 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6883 XEXP (operands[1], 1), NULL_RTX);
6884 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6885 }"
6886 )
6887
6888 (define_expand "movsfcc"
6889 [(set (match_operand:SF 0 "s_register_operand")
6890 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6891 (match_operand:SF 2 "s_register_operand")
6892 (match_operand:SF 3 "s_register_operand")))]
6893 "TARGET_32BIT && TARGET_HARD_FLOAT"
6894 "
6895 {
6896 enum rtx_code code = GET_CODE (operands[1]);
6897 rtx ccreg;
6898
6899 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6900 &XEXP (operands[1], 1)))
6901 FAIL;
6902
6903 code = GET_CODE (operands[1]);
6904 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6905 XEXP (operands[1], 1), NULL_RTX);
6906 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6907 }"
6908 )
6909
6910 (define_expand "movdfcc"
6911 [(set (match_operand:DF 0 "s_register_operand")
6912 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
6913 (match_operand:DF 2 "s_register_operand")
6914 (match_operand:DF 3 "s_register_operand")))]
6915 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
6916 "
6917 {
6918 enum rtx_code code = GET_CODE (operands[1]);
6919 rtx ccreg;
6920
6921 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6922 &XEXP (operands[1], 1)))
6923 FAIL;
6924 code = GET_CODE (operands[1]);
6925 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6926 XEXP (operands[1], 1), NULL_RTX);
6927 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6928 }"
6929 )
6930
6931 (define_insn "*cmov<mode>"
6932 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
6933 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
6934 [(match_operand 2 "cc_register" "") (const_int 0)])
6935 (match_operand:SDF 3 "s_register_operand"
6936 "<F_constraint>")
6937 (match_operand:SDF 4 "s_register_operand"
6938 "<F_constraint>")))]
6939 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
6940 "*
6941 {
6942 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6943 switch (code)
6944 {
6945 case ARM_GE:
6946 case ARM_GT:
6947 case ARM_EQ:
6948 case ARM_VS:
6949 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
6950 case ARM_LT:
6951 case ARM_LE:
6952 case ARM_NE:
6953 case ARM_VC:
6954 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
6955 default:
6956 gcc_unreachable ();
6957 }
6958 return \"\";
6959 }"
6960 [(set_attr "conds" "use")
6961 (set_attr "type" "fcsel")]
6962 )
6963
6964 (define_insn "*cmovhf"
6965 [(set (match_operand:HF 0 "s_register_operand" "=t")
6966 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
6967 [(match_operand 2 "cc_register" "") (const_int 0)])
6968 (match_operand:HF 3 "s_register_operand" "t")
6969 (match_operand:HF 4 "s_register_operand" "t")))]
6970 "TARGET_VFP_FP16INST"
6971 "*
6972 {
6973 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6974 switch (code)
6975 {
6976 case ARM_GE:
6977 case ARM_GT:
6978 case ARM_EQ:
6979 case ARM_VS:
6980 return \"vsel%d1.f16\\t%0, %3, %4\";
6981 case ARM_LT:
6982 case ARM_LE:
6983 case ARM_NE:
6984 case ARM_VC:
6985 return \"vsel%D1.f16\\t%0, %4, %3\";
6986 default:
6987 gcc_unreachable ();
6988 }
6989 return \"\";
6990 }"
6991 [(set_attr "conds" "use")
6992 (set_attr "type" "fcsel")]
6993 )
6994
6995 (define_insn_and_split "*movsicc_insn"
6996 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
6997 (if_then_else:SI
6998 (match_operator 3 "arm_comparison_operator"
6999 [(match_operand 4 "cc_register" "") (const_int 0)])
7000 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7001 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7002 "TARGET_ARM"
7003 "@
7004 mov%D3\\t%0, %2
7005 mvn%D3\\t%0, #%B2
7006 mov%d3\\t%0, %1
7007 mvn%d3\\t%0, #%B1
7008 #
7009 #
7010 #
7011 #"
7012 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7013 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7014 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7015 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7016 "&& reload_completed"
7017 [(const_int 0)]
7018 {
7019 enum rtx_code rev_code;
7020 machine_mode mode;
7021 rtx rev_cond;
7022
7023 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7024 operands[3],
7025 gen_rtx_SET (operands[0], operands[1])));
7026
7027 rev_code = GET_CODE (operands[3]);
7028 mode = GET_MODE (operands[4]);
7029 if (mode == CCFPmode || mode == CCFPEmode)
7030 rev_code = reverse_condition_maybe_unordered (rev_code);
7031 else
7032 rev_code = reverse_condition (rev_code);
7033
7034 rev_cond = gen_rtx_fmt_ee (rev_code,
7035 VOIDmode,
7036 operands[4],
7037 const0_rtx);
7038 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7039 rev_cond,
7040 gen_rtx_SET (operands[0], operands[2])));
7041 DONE;
7042 }
7043 [(set_attr "length" "4,4,4,4,8,8,8,8")
7044 (set_attr "conds" "use")
7045 (set_attr_alternative "type"
7046 [(if_then_else (match_operand 2 "const_int_operand" "")
7047 (const_string "mov_imm")
7048 (const_string "mov_reg"))
7049 (const_string "mvn_imm")
7050 (if_then_else (match_operand 1 "const_int_operand" "")
7051 (const_string "mov_imm")
7052 (const_string "mov_reg"))
7053 (const_string "mvn_imm")
7054 (const_string "multiple")
7055 (const_string "multiple")
7056 (const_string "multiple")
7057 (const_string "multiple")])]
7058 )
7059
7060 (define_insn "*movsfcc_soft_insn"
7061 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7062 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7063 [(match_operand 4 "cc_register" "") (const_int 0)])
7064 (match_operand:SF 1 "s_register_operand" "0,r")
7065 (match_operand:SF 2 "s_register_operand" "r,0")))]
7066 "TARGET_ARM && TARGET_SOFT_FLOAT"
7067 "@
7068 mov%D3\\t%0, %2
7069 mov%d3\\t%0, %1"
7070 [(set_attr "conds" "use")
7071 (set_attr "type" "mov_reg")]
7072 )
7073
7074 \f
7075 ;; Jump and linkage insns
7076
7077 (define_expand "jump"
7078 [(set (pc)
7079 (label_ref (match_operand 0 "" "")))]
7080 "TARGET_EITHER"
7081 ""
7082 )
7083
7084 (define_insn "*arm_jump"
7085 [(set (pc)
7086 (label_ref (match_operand 0 "" "")))]
7087 "TARGET_32BIT"
7088 "*
7089 {
7090 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7091 {
7092 arm_ccfsm_state += 2;
7093 return \"\";
7094 }
7095 return \"b%?\\t%l0\";
7096 }
7097 "
7098 [(set_attr "predicable" "yes")
7099 (set (attr "length")
7100 (if_then_else
7101 (and (match_test "TARGET_THUMB2")
7102 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7103 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7104 (const_int 2)
7105 (const_int 4)))
7106 (set_attr "type" "branch")]
7107 )
7108
7109 (define_expand "call"
7110 [(parallel [(call (match_operand 0 "memory_operand")
7111 (match_operand 1 "general_operand"))
7112 (use (match_operand 2 "" ""))
7113 (clobber (reg:SI LR_REGNUM))])]
7114 "TARGET_EITHER"
7115 "
7116 {
7117 rtx callee, pat;
7118 tree addr = MEM_EXPR (operands[0]);
7119
7120 /* In an untyped call, we can get NULL for operand 2. */
7121 if (operands[2] == NULL_RTX)
7122 operands[2] = const0_rtx;
7123
7124 /* Decide if we should generate indirect calls by loading the
7125 32-bit address of the callee into a register before performing the
7126 branch and link. */
7127 callee = XEXP (operands[0], 0);
7128 if (GET_CODE (callee) == SYMBOL_REF
7129 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7130 : !REG_P (callee))
7131 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7132
7133 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7134 /* Indirect call: set r9 with FDPIC value of callee. */
7135 XEXP (operands[0], 0)
7136 = arm_load_function_descriptor (XEXP (operands[0], 0));
7137
7138 if (detect_cmse_nonsecure_call (addr))
7139 {
7140 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7141 operands[2]);
7142 emit_call_insn (pat);
7143 }
7144 else
7145 {
7146 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7147 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7148 }
7149
7150 /* Restore FDPIC register (r9) after call. */
7151 if (TARGET_FDPIC)
7152 {
7153 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7154 rtx initial_fdpic_reg
7155 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7156
7157 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7158 initial_fdpic_reg));
7159 }
7160
7161 DONE;
7162 }"
7163 )
7164
7165 (define_insn "restore_pic_register_after_call"
7166 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7167 (unspec:SI [(match_dup 0)
7168 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7169 UNSPEC_PIC_RESTORE))]
7170 ""
7171 "@
7172 mov\t%0, %1
7173 ldr\t%0, %1"
7174 )
7175
7176 (define_expand "call_internal"
7177 [(parallel [(call (match_operand 0 "memory_operand")
7178 (match_operand 1 "general_operand"))
7179 (use (match_operand 2 "" ""))
7180 (clobber (reg:SI LR_REGNUM))])])
7181
7182 (define_expand "nonsecure_call_internal"
7183 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7184 UNSPEC_NONSECURE_MEM)
7185 (match_operand 1 "general_operand"))
7186 (use (match_operand 2 "" ""))
7187 (clobber (reg:SI LR_REGNUM))])]
7188 "use_cmse"
7189 "
7190 {
7191 rtx tmp;
7192 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7193 gen_rtx_REG (SImode, R4_REGNUM),
7194 SImode);
7195
7196 operands[0] = replace_equiv_address (operands[0], tmp);
7197 }")
7198
7199 (define_insn "*call_reg_armv5"
7200 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7201 (match_operand 1 "" ""))
7202 (use (match_operand 2 "" ""))
7203 (clobber (reg:SI LR_REGNUM))]
7204 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7205 "blx%?\\t%0"
7206 [(set_attr "type" "call")]
7207 )
7208
7209 (define_insn "*call_reg_arm"
7210 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7211 (match_operand 1 "" ""))
7212 (use (match_operand 2 "" ""))
7213 (clobber (reg:SI LR_REGNUM))]
7214 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7215 "*
7216 return output_call (operands);
7217 "
7218 ;; length is worst case, normally it is only two
7219 [(set_attr "length" "12")
7220 (set_attr "type" "call")]
7221 )
7222
7223
7224 (define_expand "call_value"
7225 [(parallel [(set (match_operand 0 "" "")
7226 (call (match_operand 1 "memory_operand")
7227 (match_operand 2 "general_operand")))
7228 (use (match_operand 3 "" ""))
7229 (clobber (reg:SI LR_REGNUM))])]
7230 "TARGET_EITHER"
7231 "
7232 {
7233 rtx pat, callee;
7234 tree addr = MEM_EXPR (operands[1]);
7235
7236 /* In an untyped call, we can get NULL for operand 2. */
7237 if (operands[3] == 0)
7238 operands[3] = const0_rtx;
7239
7240 /* Decide if we should generate indirect calls by loading the
7241 32-bit address of the callee into a register before performing the
7242 branch and link. */
7243 callee = XEXP (operands[1], 0);
7244 if (GET_CODE (callee) == SYMBOL_REF
7245 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7246 : !REG_P (callee))
7247 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7248
7249 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7250 /* Indirect call: set r9 with FDPIC value of callee. */
7251 XEXP (operands[1], 0)
7252 = arm_load_function_descriptor (XEXP (operands[1], 0));
7253
7254 if (detect_cmse_nonsecure_call (addr))
7255 {
7256 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7257 operands[2], operands[3]);
7258 emit_call_insn (pat);
7259 }
7260 else
7261 {
7262 pat = gen_call_value_internal (operands[0], operands[1],
7263 operands[2], operands[3]);
7264 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7265 }
7266
7267 /* Restore FDPIC register (r9) after call. */
7268 if (TARGET_FDPIC)
7269 {
7270 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7271 rtx initial_fdpic_reg
7272 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7273
7274 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7275 initial_fdpic_reg));
7276 }
7277
7278 DONE;
7279 }"
7280 )
7281
7282 (define_expand "call_value_internal"
7283 [(parallel [(set (match_operand 0 "" "")
7284 (call (match_operand 1 "memory_operand")
7285 (match_operand 2 "general_operand")))
7286 (use (match_operand 3 "" ""))
7287 (clobber (reg:SI LR_REGNUM))])])
7288
7289 (define_expand "nonsecure_call_value_internal"
7290 [(parallel [(set (match_operand 0 "" "")
7291 (call (unspec:SI [(match_operand 1 "memory_operand")]
7292 UNSPEC_NONSECURE_MEM)
7293 (match_operand 2 "general_operand")))
7294 (use (match_operand 3 "" ""))
7295 (clobber (reg:SI LR_REGNUM))])]
7296 "use_cmse"
7297 "
7298 {
7299 rtx tmp;
7300 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7301 gen_rtx_REG (SImode, R4_REGNUM),
7302 SImode);
7303
7304 operands[1] = replace_equiv_address (operands[1], tmp);
7305 }")
7306
7307 (define_insn "*call_value_reg_armv5"
7308 [(set (match_operand 0 "" "")
7309 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7310 (match_operand 2 "" "")))
7311 (use (match_operand 3 "" ""))
7312 (clobber (reg:SI LR_REGNUM))]
7313 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7314 "blx%?\\t%1"
7315 [(set_attr "type" "call")]
7316 )
7317
7318 (define_insn "*call_value_reg_arm"
7319 [(set (match_operand 0 "" "")
7320 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7321 (match_operand 2 "" "")))
7322 (use (match_operand 3 "" ""))
7323 (clobber (reg:SI LR_REGNUM))]
7324 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7325 "*
7326 return output_call (&operands[1]);
7327 "
7328 [(set_attr "length" "12")
7329 (set_attr "type" "call")]
7330 )
7331
7332 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7333 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7334
7335 (define_insn "*call_symbol"
7336 [(call (mem:SI (match_operand:SI 0 "" ""))
7337 (match_operand 1 "" ""))
7338 (use (match_operand 2 "" ""))
7339 (clobber (reg:SI LR_REGNUM))]
7340 "TARGET_32BIT
7341 && !SIBLING_CALL_P (insn)
7342 && (GET_CODE (operands[0]) == SYMBOL_REF)
7343 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7344 "*
7345 {
7346 rtx op = operands[0];
7347
7348 /* Switch mode now when possible. */
7349 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7350 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7351 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7352
7353 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7354 }"
7355 [(set_attr "type" "call")]
7356 )
7357
7358 (define_insn "*call_value_symbol"
7359 [(set (match_operand 0 "" "")
7360 (call (mem:SI (match_operand:SI 1 "" ""))
7361 (match_operand:SI 2 "" "")))
7362 (use (match_operand 3 "" ""))
7363 (clobber (reg:SI LR_REGNUM))]
7364 "TARGET_32BIT
7365 && !SIBLING_CALL_P (insn)
7366 && (GET_CODE (operands[1]) == SYMBOL_REF)
7367 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7368 "*
7369 {
7370 rtx op = operands[1];
7371
7372 /* Switch mode now when possible. */
7373 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7374 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7375 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7376
7377 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7378 }"
7379 [(set_attr "type" "call")]
7380 )
7381
7382 (define_expand "sibcall_internal"
7383 [(parallel [(call (match_operand 0 "memory_operand")
7384 (match_operand 1 "general_operand"))
7385 (return)
7386 (use (match_operand 2 "" ""))])])
7387
7388 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7389 (define_expand "sibcall"
7390 [(parallel [(call (match_operand 0 "memory_operand")
7391 (match_operand 1 "general_operand"))
7392 (return)
7393 (use (match_operand 2 "" ""))])]
7394 "TARGET_32BIT"
7395 "
7396 {
7397 rtx pat;
7398
7399 if ((!REG_P (XEXP (operands[0], 0))
7400 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7401 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7402 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7403 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7404
7405 if (operands[2] == NULL_RTX)
7406 operands[2] = const0_rtx;
7407
7408 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7409 arm_emit_call_insn (pat, operands[0], true);
7410 DONE;
7411 }"
7412 )
7413
7414 (define_expand "sibcall_value_internal"
7415 [(parallel [(set (match_operand 0 "" "")
7416 (call (match_operand 1 "memory_operand")
7417 (match_operand 2 "general_operand")))
7418 (return)
7419 (use (match_operand 3 "" ""))])])
7420
7421 (define_expand "sibcall_value"
7422 [(parallel [(set (match_operand 0 "" "")
7423 (call (match_operand 1 "memory_operand")
7424 (match_operand 2 "general_operand")))
7425 (return)
7426 (use (match_operand 3 "" ""))])]
7427 "TARGET_32BIT"
7428 "
7429 {
7430 rtx pat;
7431
7432 if ((!REG_P (XEXP (operands[1], 0))
7433 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7434 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7435 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7436 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7437
7438 if (operands[3] == NULL_RTX)
7439 operands[3] = const0_rtx;
7440
7441 pat = gen_sibcall_value_internal (operands[0], operands[1],
7442 operands[2], operands[3]);
7443 arm_emit_call_insn (pat, operands[1], true);
7444 DONE;
7445 }"
7446 )
7447
7448 (define_insn "*sibcall_insn"
7449 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7450 (match_operand 1 "" ""))
7451 (return)
7452 (use (match_operand 2 "" ""))]
7453 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7454 "*
7455 if (which_alternative == 1)
7456 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7457 else
7458 {
7459 if (arm_arch5t || arm_arch4t)
7460 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7461 else
7462 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7463 }
7464 "
7465 [(set_attr "type" "call")]
7466 )
7467
7468 (define_insn "*sibcall_value_insn"
7469 [(set (match_operand 0 "" "")
7470 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7471 (match_operand 2 "" "")))
7472 (return)
7473 (use (match_operand 3 "" ""))]
7474 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7475 "*
7476 if (which_alternative == 1)
7477 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7478 else
7479 {
7480 if (arm_arch5t || arm_arch4t)
7481 return \"bx%?\\t%1\";
7482 else
7483 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7484 }
7485 "
7486 [(set_attr "type" "call")]
7487 )
7488
7489 (define_expand "<return_str>return"
7490 [(RETURNS)]
7491 "(TARGET_ARM || (TARGET_THUMB2
7492 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7493 && !IS_STACKALIGN (arm_current_func_type ())))
7494 <return_cond_false>"
7495 "
7496 {
7497 if (TARGET_THUMB2)
7498 {
7499 thumb2_expand_return (<return_simple_p>);
7500 DONE;
7501 }
7502 }
7503 "
7504 )
7505
7506 ;; Often the return insn will be the same as loading from memory, so set attr
7507 (define_insn "*arm_return"
7508 [(return)]
7509 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7510 "*
7511 {
7512 if (arm_ccfsm_state == 2)
7513 {
7514 arm_ccfsm_state += 2;
7515 return \"\";
7516 }
7517 return output_return_instruction (const_true_rtx, true, false, false);
7518 }"
7519 [(set_attr "type" "load_4")
7520 (set_attr "length" "12")
7521 (set_attr "predicable" "yes")]
7522 )
7523
7524 (define_insn "*cond_<return_str>return"
7525 [(set (pc)
7526 (if_then_else (match_operator 0 "arm_comparison_operator"
7527 [(match_operand 1 "cc_register" "") (const_int 0)])
7528 (RETURNS)
7529 (pc)))]
7530 "TARGET_ARM <return_cond_true>"
7531 "*
7532 {
7533 if (arm_ccfsm_state == 2)
7534 {
7535 arm_ccfsm_state += 2;
7536 return \"\";
7537 }
7538 return output_return_instruction (operands[0], true, false,
7539 <return_simple_p>);
7540 }"
7541 [(set_attr "conds" "use")
7542 (set_attr "length" "12")
7543 (set_attr "type" "load_4")]
7544 )
7545
7546 (define_insn "*cond_<return_str>return_inverted"
7547 [(set (pc)
7548 (if_then_else (match_operator 0 "arm_comparison_operator"
7549 [(match_operand 1 "cc_register" "") (const_int 0)])
7550 (pc)
7551 (RETURNS)))]
7552 "TARGET_ARM <return_cond_true>"
7553 "*
7554 {
7555 if (arm_ccfsm_state == 2)
7556 {
7557 arm_ccfsm_state += 2;
7558 return \"\";
7559 }
7560 return output_return_instruction (operands[0], true, true,
7561 <return_simple_p>);
7562 }"
7563 [(set_attr "conds" "use")
7564 (set_attr "length" "12")
7565 (set_attr "type" "load_4")]
7566 )
7567
7568 (define_insn "*arm_simple_return"
7569 [(simple_return)]
7570 "TARGET_ARM"
7571 "*
7572 {
7573 if (arm_ccfsm_state == 2)
7574 {
7575 arm_ccfsm_state += 2;
7576 return \"\";
7577 }
7578 return output_return_instruction (const_true_rtx, true, false, true);
7579 }"
7580 [(set_attr "type" "branch")
7581 (set_attr "length" "4")
7582 (set_attr "predicable" "yes")]
7583 )
7584
7585 ;; Generate a sequence of instructions to determine if the processor is
7586 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7587 ;; mask.
7588
7589 (define_expand "return_addr_mask"
7590 [(set (match_dup 1)
7591 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7592 (const_int 0)))
7593 (set (match_operand:SI 0 "s_register_operand")
7594 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7595 (const_int -1)
7596 (const_int 67108860)))] ; 0x03fffffc
7597 "TARGET_ARM"
7598 "
7599 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7600 ")
7601
7602 (define_insn "*check_arch2"
7603 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7604 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7605 (const_int 0)))]
7606 "TARGET_ARM"
7607 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7608 [(set_attr "length" "8")
7609 (set_attr "conds" "set")
7610 (set_attr "type" "multiple")]
7611 )
7612
7613 ;; Call subroutine returning any type.
7614
7615 (define_expand "untyped_call"
7616 [(parallel [(call (match_operand 0 "" "")
7617 (const_int 0))
7618 (match_operand 1 "" "")
7619 (match_operand 2 "" "")])]
7620 "TARGET_EITHER && !TARGET_FDPIC"
7621 "
7622 {
7623 int i;
7624 rtx par = gen_rtx_PARALLEL (VOIDmode,
7625 rtvec_alloc (XVECLEN (operands[2], 0)));
7626 rtx addr = gen_reg_rtx (Pmode);
7627 rtx mem;
7628 int size = 0;
7629
7630 emit_move_insn (addr, XEXP (operands[1], 0));
7631 mem = change_address (operands[1], BLKmode, addr);
7632
7633 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7634 {
7635 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7636
7637 /* Default code only uses r0 as a return value, but we could
7638 be using anything up to 4 registers. */
7639 if (REGNO (src) == R0_REGNUM)
7640 src = gen_rtx_REG (TImode, R0_REGNUM);
7641
7642 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7643 GEN_INT (size));
7644 size += GET_MODE_SIZE (GET_MODE (src));
7645 }
7646
7647 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7648
7649 size = 0;
7650
7651 for (i = 0; i < XVECLEN (par, 0); i++)
7652 {
7653 HOST_WIDE_INT offset = 0;
7654 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7655
7656 if (size != 0)
7657 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7658
7659 mem = change_address (mem, GET_MODE (reg), NULL);
7660 if (REGNO (reg) == R0_REGNUM)
7661 {
7662 /* On thumb we have to use a write-back instruction. */
7663 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7664 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7665 size = TARGET_ARM ? 16 : 0;
7666 }
7667 else
7668 {
7669 emit_move_insn (mem, reg);
7670 size = GET_MODE_SIZE (GET_MODE (reg));
7671 }
7672 }
7673
7674 /* The optimizer does not know that the call sets the function value
7675 registers we stored in the result block. We avoid problems by
7676 claiming that all hard registers are used and clobbered at this
7677 point. */
7678 emit_insn (gen_blockage ());
7679
7680 DONE;
7681 }"
7682 )
7683
7684 (define_expand "untyped_return"
7685 [(match_operand:BLK 0 "memory_operand")
7686 (match_operand 1 "" "")]
7687 "TARGET_EITHER && !TARGET_FDPIC"
7688 "
7689 {
7690 int i;
7691 rtx addr = gen_reg_rtx (Pmode);
7692 rtx mem;
7693 int size = 0;
7694
7695 emit_move_insn (addr, XEXP (operands[0], 0));
7696 mem = change_address (operands[0], BLKmode, addr);
7697
7698 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7699 {
7700 HOST_WIDE_INT offset = 0;
7701 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7702
7703 if (size != 0)
7704 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7705
7706 mem = change_address (mem, GET_MODE (reg), NULL);
7707 if (REGNO (reg) == R0_REGNUM)
7708 {
7709 /* On thumb we have to use a write-back instruction. */
7710 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7711 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7712 size = TARGET_ARM ? 16 : 0;
7713 }
7714 else
7715 {
7716 emit_move_insn (reg, mem);
7717 size = GET_MODE_SIZE (GET_MODE (reg));
7718 }
7719 }
7720
7721 /* Emit USE insns before the return. */
7722 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7723 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7724
7725 /* Construct the return. */
7726 expand_naked_return ();
7727
7728 DONE;
7729 }"
7730 )
7731
7732 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7733 ;; all of memory. This blocks insns from being moved across this point.
7734
7735 (define_insn "blockage"
7736 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7737 "TARGET_EITHER"
7738 ""
7739 [(set_attr "length" "0")
7740 (set_attr "type" "block")]
7741 )
7742
7743 ;; Since we hard code r0 here use the 'o' constraint to prevent
7744 ;; provoking undefined behaviour in the hardware with putting out
7745 ;; auto-increment operations with potentially r0 as the base register.
7746 (define_insn "probe_stack"
7747 [(set (match_operand:SI 0 "memory_operand" "=o")
7748 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7749 "TARGET_32BIT"
7750 "str%?\\tr0, %0"
7751 [(set_attr "type" "store_4")
7752 (set_attr "predicable" "yes")]
7753 )
7754
7755 (define_insn "probe_stack_range"
7756 [(set (match_operand:SI 0 "register_operand" "=r")
7757 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7758 (match_operand:SI 2 "register_operand" "r")]
7759 VUNSPEC_PROBE_STACK_RANGE))]
7760 "TARGET_32BIT"
7761 {
7762 return output_probe_stack_range (operands[0], operands[2]);
7763 }
7764 [(set_attr "type" "multiple")
7765 (set_attr "conds" "clob")]
7766 )
7767
7768 ;; Named patterns for stack smashing protection.
7769 (define_expand "stack_protect_combined_set"
7770 [(parallel
7771 [(set (match_operand:SI 0 "memory_operand")
7772 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7773 UNSPEC_SP_SET))
7774 (clobber (match_scratch:SI 2 ""))
7775 (clobber (match_scratch:SI 3 ""))])]
7776 ""
7777 ""
7778 )
7779
7780 ;; Use a separate insn from the above expand to be able to have the mem outside
7781 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7782 ;; try to reload the guard since we need to control how PIC access is done in
7783 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7784 ;; legitimize_pic_address ()).
7785 (define_insn_and_split "*stack_protect_combined_set_insn"
7786 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7787 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7788 UNSPEC_SP_SET))
7789 (clobber (match_scratch:SI 2 "=&l,&r"))
7790 (clobber (match_scratch:SI 3 "=&l,&r"))]
7791 ""
7792 "#"
7793 "reload_completed"
7794 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7795 UNSPEC_SP_SET))
7796 (clobber (match_dup 2))])]
7797 "
7798 {
7799 if (flag_pic)
7800 {
7801 rtx pic_reg;
7802
7803 if (TARGET_FDPIC)
7804 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7805 else
7806 pic_reg = operands[3];
7807
7808 /* Forces recomputing of GOT base now. */
7809 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7810 true /*compute_now*/);
7811 }
7812 else
7813 {
7814 if (address_operand (operands[1], SImode))
7815 operands[2] = operands[1];
7816 else
7817 {
7818 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7819 emit_move_insn (operands[2], mem);
7820 }
7821 }
7822 }"
7823 [(set_attr "arch" "t1,32")]
7824 )
7825
7826 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7827 ;; canary value does not live beyond the life of this sequence.
7828 (define_insn "*stack_protect_set_insn"
7829 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7830 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7831 UNSPEC_SP_SET))
7832 (clobber (match_dup 1))]
7833 ""
7834 "@
7835 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7836 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7837 [(set_attr "length" "8,12")
7838 (set_attr "conds" "clob,nocond")
7839 (set_attr "type" "multiple")
7840 (set_attr "arch" "t1,32")]
7841 )
7842
7843 (define_expand "stack_protect_combined_test"
7844 [(parallel
7845 [(set (pc)
7846 (if_then_else
7847 (eq (match_operand:SI 0 "memory_operand")
7848 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7849 UNSPEC_SP_TEST))
7850 (label_ref (match_operand 2))
7851 (pc)))
7852 (clobber (match_scratch:SI 3 ""))
7853 (clobber (match_scratch:SI 4 ""))
7854 (clobber (reg:CC CC_REGNUM))])]
7855 ""
7856 ""
7857 )
7858
7859 ;; Use a separate insn from the above expand to be able to have the mem outside
7860 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7861 ;; try to reload the guard since we need to control how PIC access is done in
7862 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7863 ;; legitimize_pic_address ()).
7864 (define_insn_and_split "*stack_protect_combined_test_insn"
7865 [(set (pc)
7866 (if_then_else
7867 (eq (match_operand:SI 0 "memory_operand" "m,m")
7868 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7869 UNSPEC_SP_TEST))
7870 (label_ref (match_operand 2))
7871 (pc)))
7872 (clobber (match_scratch:SI 3 "=&l,&r"))
7873 (clobber (match_scratch:SI 4 "=&l,&r"))
7874 (clobber (reg:CC CC_REGNUM))]
7875 ""
7876 "#"
7877 "reload_completed"
7878 [(const_int 0)]
7879 {
7880 rtx eq;
7881
7882 if (flag_pic)
7883 {
7884 rtx pic_reg;
7885
7886 if (TARGET_FDPIC)
7887 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7888 else
7889 pic_reg = operands[4];
7890
7891 /* Forces recomputing of GOT base now. */
7892 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7893 true /*compute_now*/);
7894 }
7895 else
7896 {
7897 if (address_operand (operands[1], SImode))
7898 operands[3] = operands[1];
7899 else
7900 {
7901 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7902 emit_move_insn (operands[3], mem);
7903 }
7904 }
7905 if (TARGET_32BIT)
7906 {
7907 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
7908 operands[3]));
7909 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
7910 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
7911 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
7912 }
7913 else
7914 {
7915 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
7916 operands[3]));
7917 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
7918 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
7919 operands[2]));
7920 }
7921 DONE;
7922 }
7923 [(set_attr "arch" "t1,32")]
7924 )
7925
7926 (define_insn "arm_stack_protect_test_insn"
7927 [(set (reg:CC_Z CC_REGNUM)
7928 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
7929 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
7930 UNSPEC_SP_TEST)
7931 (const_int 0)))
7932 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
7933 (clobber (match_dup 2))]
7934 "TARGET_32BIT"
7935 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
7936 [(set_attr "length" "8,12")
7937 (set_attr "conds" "set")
7938 (set_attr "type" "multiple")
7939 (set_attr "arch" "t,32")]
7940 )
7941
7942 (define_expand "casesi"
7943 [(match_operand:SI 0 "s_register_operand") ; index to jump on
7944 (match_operand:SI 1 "const_int_operand") ; lower bound
7945 (match_operand:SI 2 "const_int_operand") ; total range
7946 (match_operand:SI 3 "" "") ; table label
7947 (match_operand:SI 4 "" "")] ; Out of range label
7948 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
7949 "
7950 {
7951 enum insn_code code;
7952 if (operands[1] != const0_rtx)
7953 {
7954 rtx reg = gen_reg_rtx (SImode);
7955
7956 emit_insn (gen_addsi3 (reg, operands[0],
7957 gen_int_mode (-INTVAL (operands[1]),
7958 SImode)));
7959 operands[0] = reg;
7960 }
7961
7962 if (TARGET_ARM)
7963 code = CODE_FOR_arm_casesi_internal;
7964 else if (TARGET_THUMB1)
7965 code = CODE_FOR_thumb1_casesi_internal_pic;
7966 else if (flag_pic)
7967 code = CODE_FOR_thumb2_casesi_internal_pic;
7968 else
7969 code = CODE_FOR_thumb2_casesi_internal;
7970
7971 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
7972 operands[2] = force_reg (SImode, operands[2]);
7973
7974 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
7975 operands[3], operands[4]));
7976 DONE;
7977 }"
7978 )
7979
7980 ;; The USE in this pattern is needed to tell flow analysis that this is
7981 ;; a CASESI insn. It has no other purpose.
7982 (define_expand "arm_casesi_internal"
7983 [(parallel [(set (pc)
7984 (if_then_else
7985 (leu (match_operand:SI 0 "s_register_operand")
7986 (match_operand:SI 1 "arm_rhs_operand"))
7987 (match_dup 4)
7988 (label_ref:SI (match_operand 3 ""))))
7989 (clobber (reg:CC CC_REGNUM))
7990 (use (label_ref:SI (match_operand 2 "")))])]
7991 "TARGET_ARM"
7992 {
7993 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
7994 operands[4] = gen_rtx_PLUS (SImode, operands[4],
7995 gen_rtx_LABEL_REF (SImode, operands[2]));
7996 operands[4] = gen_rtx_MEM (SImode, operands[4]);
7997 MEM_READONLY_P (operands[4]) = 1;
7998 MEM_NOTRAP_P (operands[4]) = 1;
7999 })
8000
8001 (define_insn "*arm_casesi_internal"
8002 [(parallel [(set (pc)
8003 (if_then_else
8004 (leu (match_operand:SI 0 "s_register_operand" "r")
8005 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8006 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8007 (label_ref:SI (match_operand 2 "" ""))))
8008 (label_ref:SI (match_operand 3 "" ""))))
8009 (clobber (reg:CC CC_REGNUM))
8010 (use (label_ref:SI (match_dup 2)))])]
8011 "TARGET_ARM"
8012 "*
8013 if (flag_pic)
8014 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8015 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8016 "
8017 [(set_attr "conds" "clob")
8018 (set_attr "length" "12")
8019 (set_attr "type" "multiple")]
8020 )
8021
8022 (define_expand "indirect_jump"
8023 [(set (pc)
8024 (match_operand:SI 0 "s_register_operand"))]
8025 "TARGET_EITHER"
8026 "
8027 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8028 address and use bx. */
8029 if (TARGET_THUMB2)
8030 {
8031 rtx tmp;
8032 tmp = gen_reg_rtx (SImode);
8033 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8034 operands[0] = tmp;
8035 }
8036 "
8037 )
8038
8039 ;; NB Never uses BX.
8040 (define_insn "*arm_indirect_jump"
8041 [(set (pc)
8042 (match_operand:SI 0 "s_register_operand" "r"))]
8043 "TARGET_ARM"
8044 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8045 [(set_attr "predicable" "yes")
8046 (set_attr "type" "branch")]
8047 )
8048
8049 (define_insn "*load_indirect_jump"
8050 [(set (pc)
8051 (match_operand:SI 0 "memory_operand" "m"))]
8052 "TARGET_ARM"
8053 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8054 [(set_attr "type" "load_4")
8055 (set_attr "pool_range" "4096")
8056 (set_attr "neg_pool_range" "4084")
8057 (set_attr "predicable" "yes")]
8058 )
8059
8060 \f
8061 ;; Misc insns
8062
8063 (define_insn "nop"
8064 [(const_int 0)]
8065 "TARGET_EITHER"
8066 "nop"
8067 [(set (attr "length")
8068 (if_then_else (eq_attr "is_thumb" "yes")
8069 (const_int 2)
8070 (const_int 4)))
8071 (set_attr "type" "mov_reg")]
8072 )
8073
8074 (define_insn "trap"
8075 [(trap_if (const_int 1) (const_int 0))]
8076 ""
8077 "*
8078 if (TARGET_ARM)
8079 return \".inst\\t0xe7f000f0\";
8080 else
8081 return \".inst\\t0xdeff\";
8082 "
8083 [(set (attr "length")
8084 (if_then_else (eq_attr "is_thumb" "yes")
8085 (const_int 2)
8086 (const_int 4)))
8087 (set_attr "type" "trap")
8088 (set_attr "conds" "unconditional")]
8089 )
8090
8091 \f
8092 ;; Patterns to allow combination of arithmetic, cond code and shifts
8093
8094 (define_insn "*<arith_shift_insn>_multsi"
8095 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8096 (SHIFTABLE_OPS:SI
8097 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8098 (match_operand:SI 3 "power_of_two_operand" ""))
8099 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8100 "TARGET_32BIT"
8101 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8102 [(set_attr "predicable" "yes")
8103 (set_attr "shift" "2")
8104 (set_attr "arch" "a,t2")
8105 (set_attr "type" "alu_shift_imm")])
8106
8107 (define_insn "*<arith_shift_insn>_shiftsi"
8108 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8109 (SHIFTABLE_OPS:SI
8110 (match_operator:SI 2 "shift_nomul_operator"
8111 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8112 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8113 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8114 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8115 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8116 [(set_attr "predicable" "yes")
8117 (set_attr "shift" "3")
8118 (set_attr "arch" "a,t2,a")
8119 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8120
8121 (define_split
8122 [(set (match_operand:SI 0 "s_register_operand" "")
8123 (match_operator:SI 1 "shiftable_operator"
8124 [(match_operator:SI 2 "shiftable_operator"
8125 [(match_operator:SI 3 "shift_operator"
8126 [(match_operand:SI 4 "s_register_operand" "")
8127 (match_operand:SI 5 "reg_or_int_operand" "")])
8128 (match_operand:SI 6 "s_register_operand" "")])
8129 (match_operand:SI 7 "arm_rhs_operand" "")]))
8130 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8131 "TARGET_32BIT"
8132 [(set (match_dup 8)
8133 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8134 (match_dup 6)]))
8135 (set (match_dup 0)
8136 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8137 "")
8138
8139 (define_insn "*arith_shiftsi_compare0"
8140 [(set (reg:CC_NOOV CC_REGNUM)
8141 (compare:CC_NOOV
8142 (match_operator:SI 1 "shiftable_operator"
8143 [(match_operator:SI 3 "shift_operator"
8144 [(match_operand:SI 4 "s_register_operand" "r,r")
8145 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8146 (match_operand:SI 2 "s_register_operand" "r,r")])
8147 (const_int 0)))
8148 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8149 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8150 (match_dup 2)]))]
8151 "TARGET_32BIT"
8152 "%i1s%?\\t%0, %2, %4%S3"
8153 [(set_attr "conds" "set")
8154 (set_attr "shift" "4")
8155 (set_attr "arch" "32,a")
8156 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8157
8158 (define_insn "*arith_shiftsi_compare0_scratch"
8159 [(set (reg:CC_NOOV CC_REGNUM)
8160 (compare:CC_NOOV
8161 (match_operator:SI 1 "shiftable_operator"
8162 [(match_operator:SI 3 "shift_operator"
8163 [(match_operand:SI 4 "s_register_operand" "r,r")
8164 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8165 (match_operand:SI 2 "s_register_operand" "r,r")])
8166 (const_int 0)))
8167 (clobber (match_scratch:SI 0 "=r,r"))]
8168 "TARGET_32BIT"
8169 "%i1s%?\\t%0, %2, %4%S3"
8170 [(set_attr "conds" "set")
8171 (set_attr "shift" "4")
8172 (set_attr "arch" "32,a")
8173 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8174
8175 (define_insn "*sub_shiftsi"
8176 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8177 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8178 (match_operator:SI 2 "shift_operator"
8179 [(match_operand:SI 3 "s_register_operand" "r,r")
8180 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8181 "TARGET_32BIT"
8182 "sub%?\\t%0, %1, %3%S2"
8183 [(set_attr "predicable" "yes")
8184 (set_attr "predicable_short_it" "no")
8185 (set_attr "shift" "3")
8186 (set_attr "arch" "32,a")
8187 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8188
8189 (define_insn "*sub_shiftsi_compare0"
8190 [(set (reg:CC_NOOV CC_REGNUM)
8191 (compare:CC_NOOV
8192 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8193 (match_operator:SI 2 "shift_operator"
8194 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8195 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8196 (const_int 0)))
8197 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8198 (minus:SI (match_dup 1)
8199 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8200 "TARGET_32BIT"
8201 "subs%?\\t%0, %1, %3%S2"
8202 [(set_attr "conds" "set")
8203 (set_attr "shift" "3")
8204 (set_attr "arch" "32,a,a")
8205 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8206
8207 (define_insn "*sub_shiftsi_compare0_scratch"
8208 [(set (reg:CC_NOOV CC_REGNUM)
8209 (compare:CC_NOOV
8210 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8211 (match_operator:SI 2 "shift_operator"
8212 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8213 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8214 (const_int 0)))
8215 (clobber (match_scratch:SI 0 "=r,r,r"))]
8216 "TARGET_32BIT"
8217 "subs%?\\t%0, %1, %3%S2"
8218 [(set_attr "conds" "set")
8219 (set_attr "shift" "3")
8220 (set_attr "arch" "32,a,a")
8221 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8222 \f
8223
8224 (define_insn_and_split "*and_scc"
8225 [(set (match_operand:SI 0 "s_register_operand" "=r")
8226 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8227 [(match_operand 2 "cc_register" "") (const_int 0)])
8228 (match_operand:SI 3 "s_register_operand" "r")))]
8229 "TARGET_ARM"
8230 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8231 "&& reload_completed"
8232 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8233 (cond_exec (match_dup 4) (set (match_dup 0)
8234 (and:SI (match_dup 3) (const_int 1))))]
8235 {
8236 machine_mode mode = GET_MODE (operands[2]);
8237 enum rtx_code rc = GET_CODE (operands[1]);
8238
8239 /* Note that operands[4] is the same as operands[1],
8240 but with VOIDmode as the result. */
8241 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8242 if (mode == CCFPmode || mode == CCFPEmode)
8243 rc = reverse_condition_maybe_unordered (rc);
8244 else
8245 rc = reverse_condition (rc);
8246 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8247 }
8248 [(set_attr "conds" "use")
8249 (set_attr "type" "multiple")
8250 (set_attr "length" "8")]
8251 )
8252
8253 (define_insn_and_split "*ior_scc"
8254 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8255 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8256 [(match_operand 2 "cc_register" "") (const_int 0)])
8257 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8258 "TARGET_ARM"
8259 "@
8260 orr%d1\\t%0, %3, #1
8261 #"
8262 "&& reload_completed
8263 && REGNO (operands [0]) != REGNO (operands[3])"
8264 ;; && which_alternative == 1
8265 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8266 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8267 (cond_exec (match_dup 4) (set (match_dup 0)
8268 (ior:SI (match_dup 3) (const_int 1))))]
8269 {
8270 machine_mode mode = GET_MODE (operands[2]);
8271 enum rtx_code rc = GET_CODE (operands[1]);
8272
8273 /* Note that operands[4] is the same as operands[1],
8274 but with VOIDmode as the result. */
8275 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8276 if (mode == CCFPmode || mode == CCFPEmode)
8277 rc = reverse_condition_maybe_unordered (rc);
8278 else
8279 rc = reverse_condition (rc);
8280 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8281 }
8282 [(set_attr "conds" "use")
8283 (set_attr "length" "4,8")
8284 (set_attr "type" "logic_imm,multiple")]
8285 )
8286
8287 ; A series of splitters for the compare_scc pattern below. Note that
8288 ; order is important.
8289 (define_split
8290 [(set (match_operand:SI 0 "s_register_operand" "")
8291 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8292 (const_int 0)))
8293 (clobber (reg:CC CC_REGNUM))]
8294 "TARGET_32BIT && reload_completed"
8295 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8296
8297 (define_split
8298 [(set (match_operand:SI 0 "s_register_operand" "")
8299 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8300 (const_int 0)))
8301 (clobber (reg:CC CC_REGNUM))]
8302 "TARGET_32BIT && reload_completed"
8303 [(set (match_dup 0) (not:SI (match_dup 1)))
8304 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8305
8306 (define_split
8307 [(set (match_operand:SI 0 "s_register_operand" "")
8308 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8309 (const_int 0)))
8310 (clobber (reg:CC CC_REGNUM))]
8311 "arm_arch5t && TARGET_32BIT"
8312 [(set (match_dup 0) (clz:SI (match_dup 1)))
8313 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8314 )
8315
8316 (define_split
8317 [(set (match_operand:SI 0 "s_register_operand" "")
8318 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8319 (const_int 0)))
8320 (clobber (reg:CC CC_REGNUM))]
8321 "TARGET_32BIT && reload_completed"
8322 [(parallel
8323 [(set (reg:CC CC_REGNUM)
8324 (compare:CC (const_int 1) (match_dup 1)))
8325 (set (match_dup 0)
8326 (minus:SI (const_int 1) (match_dup 1)))])
8327 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8328 (set (match_dup 0) (const_int 0)))])
8329
8330 (define_split
8331 [(set (match_operand:SI 0 "s_register_operand" "")
8332 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8333 (match_operand:SI 2 "const_int_operand" "")))
8334 (clobber (reg:CC CC_REGNUM))]
8335 "TARGET_32BIT && reload_completed"
8336 [(parallel
8337 [(set (reg:CC CC_REGNUM)
8338 (compare:CC (match_dup 1) (match_dup 2)))
8339 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8340 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8341 (set (match_dup 0) (const_int 1)))]
8342 {
8343 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8344 })
8345
8346 (define_split
8347 [(set (match_operand:SI 0 "s_register_operand" "")
8348 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8349 (match_operand:SI 2 "arm_add_operand" "")))
8350 (clobber (reg:CC CC_REGNUM))]
8351 "TARGET_32BIT && reload_completed"
8352 [(parallel
8353 [(set (reg:CC_NOOV CC_REGNUM)
8354 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8355 (const_int 0)))
8356 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8357 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8358 (set (match_dup 0) (const_int 1)))])
8359
8360 (define_insn_and_split "*compare_scc"
8361 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8362 (match_operator:SI 1 "arm_comparison_operator"
8363 [(match_operand:SI 2 "s_register_operand" "r,r")
8364 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8365 (clobber (reg:CC CC_REGNUM))]
8366 "TARGET_32BIT"
8367 "#"
8368 "&& reload_completed"
8369 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8370 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8371 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8372 {
8373 rtx tmp1;
8374 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8375 operands[2], operands[3]);
8376 enum rtx_code rc = GET_CODE (operands[1]);
8377
8378 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8379
8380 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8381 if (mode == CCFPmode || mode == CCFPEmode)
8382 rc = reverse_condition_maybe_unordered (rc);
8383 else
8384 rc = reverse_condition (rc);
8385 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8386 }
8387 [(set_attr "type" "multiple")]
8388 )
8389
8390 ;; Attempt to improve the sequence generated by the compare_scc splitters
8391 ;; not to use conditional execution.
8392
8393 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8394 ;; clz Rd, reg1
8395 ;; lsr Rd, Rd, #5
8396 (define_peephole2
8397 [(set (reg:CC CC_REGNUM)
8398 (compare:CC (match_operand:SI 1 "register_operand" "")
8399 (const_int 0)))
8400 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8401 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8402 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8403 (set (match_dup 0) (const_int 1)))]
8404 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8405 [(set (match_dup 0) (clz:SI (match_dup 1)))
8406 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8407 )
8408
8409 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8410 ;; negs Rd, reg1
8411 ;; adc Rd, Rd, reg1
8412 (define_peephole2
8413 [(set (reg:CC CC_REGNUM)
8414 (compare:CC (match_operand:SI 1 "register_operand" "")
8415 (const_int 0)))
8416 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8417 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8418 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8419 (set (match_dup 0) (const_int 1)))
8420 (match_scratch:SI 2 "r")]
8421 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8422 [(parallel
8423 [(set (reg:CC CC_REGNUM)
8424 (compare:CC (const_int 0) (match_dup 1)))
8425 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8426 (set (match_dup 0)
8427 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8428 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8429 )
8430
8431 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8432 ;; sub Rd, Reg1, reg2
8433 ;; clz Rd, Rd
8434 ;; lsr Rd, Rd, #5
8435 (define_peephole2
8436 [(set (reg:CC CC_REGNUM)
8437 (compare:CC (match_operand:SI 1 "register_operand" "")
8438 (match_operand:SI 2 "arm_rhs_operand" "")))
8439 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8440 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8441 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8442 (set (match_dup 0) (const_int 1)))]
8443 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8444 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8445 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8446 (set (match_dup 0) (clz:SI (match_dup 0)))
8447 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8448 )
8449
8450
8451 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8452 ;; sub T1, Reg1, reg2
8453 ;; negs Rd, T1
8454 ;; adc Rd, Rd, T1
8455 (define_peephole2
8456 [(set (reg:CC CC_REGNUM)
8457 (compare:CC (match_operand:SI 1 "register_operand" "")
8458 (match_operand:SI 2 "arm_rhs_operand" "")))
8459 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8460 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8461 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8462 (set (match_dup 0) (const_int 1)))
8463 (match_scratch:SI 3 "r")]
8464 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8465 [(set (match_dup 3) (match_dup 4))
8466 (parallel
8467 [(set (reg:CC CC_REGNUM)
8468 (compare:CC (const_int 0) (match_dup 3)))
8469 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8470 (set (match_dup 0)
8471 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8472 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8473 "
8474 if (CONST_INT_P (operands[2]))
8475 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8476 else
8477 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8478 ")
8479
8480 (define_insn "*cond_move"
8481 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8482 (if_then_else:SI (match_operator 3 "equality_operator"
8483 [(match_operator 4 "arm_comparison_operator"
8484 [(match_operand 5 "cc_register" "") (const_int 0)])
8485 (const_int 0)])
8486 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8487 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8488 "TARGET_ARM"
8489 "*
8490 if (GET_CODE (operands[3]) == NE)
8491 {
8492 if (which_alternative != 1)
8493 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8494 if (which_alternative != 0)
8495 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8496 return \"\";
8497 }
8498 if (which_alternative != 0)
8499 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8500 if (which_alternative != 1)
8501 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8502 return \"\";
8503 "
8504 [(set_attr "conds" "use")
8505 (set_attr_alternative "type"
8506 [(if_then_else (match_operand 2 "const_int_operand" "")
8507 (const_string "mov_imm")
8508 (const_string "mov_reg"))
8509 (if_then_else (match_operand 1 "const_int_operand" "")
8510 (const_string "mov_imm")
8511 (const_string "mov_reg"))
8512 (const_string "multiple")])
8513 (set_attr "length" "4,4,8")]
8514 )
8515
8516 (define_insn "*cond_arith"
8517 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8518 (match_operator:SI 5 "shiftable_operator"
8519 [(match_operator:SI 4 "arm_comparison_operator"
8520 [(match_operand:SI 2 "s_register_operand" "r,r")
8521 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8522 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8523 (clobber (reg:CC CC_REGNUM))]
8524 "TARGET_ARM"
8525 "*
8526 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8527 return \"%i5\\t%0, %1, %2, lsr #31\";
8528
8529 output_asm_insn (\"cmp\\t%2, %3\", operands);
8530 if (GET_CODE (operands[5]) == AND)
8531 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8532 else if (GET_CODE (operands[5]) == MINUS)
8533 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8534 else if (which_alternative != 0)
8535 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8536 return \"%i5%d4\\t%0, %1, #1\";
8537 "
8538 [(set_attr "conds" "clob")
8539 (set_attr "length" "12")
8540 (set_attr "type" "multiple")]
8541 )
8542
8543 (define_insn "*cond_sub"
8544 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8545 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8546 (match_operator:SI 4 "arm_comparison_operator"
8547 [(match_operand:SI 2 "s_register_operand" "r,r")
8548 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8549 (clobber (reg:CC CC_REGNUM))]
8550 "TARGET_ARM"
8551 "*
8552 output_asm_insn (\"cmp\\t%2, %3\", operands);
8553 if (which_alternative != 0)
8554 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8555 return \"sub%d4\\t%0, %1, #1\";
8556 "
8557 [(set_attr "conds" "clob")
8558 (set_attr "length" "8,12")
8559 (set_attr "type" "multiple")]
8560 )
8561
8562 (define_insn "*cmp_ite0"
8563 [(set (match_operand 6 "dominant_cc_register" "")
8564 (compare
8565 (if_then_else:SI
8566 (match_operator 4 "arm_comparison_operator"
8567 [(match_operand:SI 0 "s_register_operand"
8568 "l,l,l,r,r,r,r,r,r")
8569 (match_operand:SI 1 "arm_add_operand"
8570 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8571 (match_operator:SI 5 "arm_comparison_operator"
8572 [(match_operand:SI 2 "s_register_operand"
8573 "l,r,r,l,l,r,r,r,r")
8574 (match_operand:SI 3 "arm_add_operand"
8575 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8576 (const_int 0))
8577 (const_int 0)))]
8578 "TARGET_32BIT"
8579 "*
8580 {
8581 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8582 {
8583 {\"cmp%d5\\t%0, %1\",
8584 \"cmp%d4\\t%2, %3\"},
8585 {\"cmn%d5\\t%0, #%n1\",
8586 \"cmp%d4\\t%2, %3\"},
8587 {\"cmp%d5\\t%0, %1\",
8588 \"cmn%d4\\t%2, #%n3\"},
8589 {\"cmn%d5\\t%0, #%n1\",
8590 \"cmn%d4\\t%2, #%n3\"}
8591 };
8592 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8593 {
8594 {\"cmp\\t%2, %3\",
8595 \"cmp\\t%0, %1\"},
8596 {\"cmp\\t%2, %3\",
8597 \"cmn\\t%0, #%n1\"},
8598 {\"cmn\\t%2, #%n3\",
8599 \"cmp\\t%0, %1\"},
8600 {\"cmn\\t%2, #%n3\",
8601 \"cmn\\t%0, #%n1\"}
8602 };
8603 static const char * const ite[2] =
8604 {
8605 \"it\\t%d5\",
8606 \"it\\t%d4\"
8607 };
8608 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8609 CMP_CMP, CMN_CMP, CMP_CMP,
8610 CMN_CMP, CMP_CMN, CMN_CMN};
8611 int swap =
8612 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8613
8614 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8615 if (TARGET_THUMB2) {
8616 output_asm_insn (ite[swap], operands);
8617 }
8618 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8619 return \"\";
8620 }"
8621 [(set_attr "conds" "set")
8622 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8623 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8624 (set_attr "type" "multiple")
8625 (set_attr_alternative "length"
8626 [(const_int 6)
8627 (const_int 8)
8628 (const_int 8)
8629 (const_int 8)
8630 (const_int 8)
8631 (if_then_else (eq_attr "is_thumb" "no")
8632 (const_int 8)
8633 (const_int 10))
8634 (if_then_else (eq_attr "is_thumb" "no")
8635 (const_int 8)
8636 (const_int 10))
8637 (if_then_else (eq_attr "is_thumb" "no")
8638 (const_int 8)
8639 (const_int 10))
8640 (if_then_else (eq_attr "is_thumb" "no")
8641 (const_int 8)
8642 (const_int 10))])]
8643 )
8644
8645 (define_insn "*cmp_ite1"
8646 [(set (match_operand 6 "dominant_cc_register" "")
8647 (compare
8648 (if_then_else:SI
8649 (match_operator 4 "arm_comparison_operator"
8650 [(match_operand:SI 0 "s_register_operand"
8651 "l,l,l,r,r,r,r,r,r")
8652 (match_operand:SI 1 "arm_add_operand"
8653 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8654 (match_operator:SI 5 "arm_comparison_operator"
8655 [(match_operand:SI 2 "s_register_operand"
8656 "l,r,r,l,l,r,r,r,r")
8657 (match_operand:SI 3 "arm_add_operand"
8658 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8659 (const_int 1))
8660 (const_int 0)))]
8661 "TARGET_32BIT"
8662 "*
8663 {
8664 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8665 {
8666 {\"cmp\\t%0, %1\",
8667 \"cmp\\t%2, %3\"},
8668 {\"cmn\\t%0, #%n1\",
8669 \"cmp\\t%2, %3\"},
8670 {\"cmp\\t%0, %1\",
8671 \"cmn\\t%2, #%n3\"},
8672 {\"cmn\\t%0, #%n1\",
8673 \"cmn\\t%2, #%n3\"}
8674 };
8675 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8676 {
8677 {\"cmp%d4\\t%2, %3\",
8678 \"cmp%D5\\t%0, %1\"},
8679 {\"cmp%d4\\t%2, %3\",
8680 \"cmn%D5\\t%0, #%n1\"},
8681 {\"cmn%d4\\t%2, #%n3\",
8682 \"cmp%D5\\t%0, %1\"},
8683 {\"cmn%d4\\t%2, #%n3\",
8684 \"cmn%D5\\t%0, #%n1\"}
8685 };
8686 static const char * const ite[2] =
8687 {
8688 \"it\\t%d4\",
8689 \"it\\t%D5\"
8690 };
8691 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8692 CMP_CMP, CMN_CMP, CMP_CMP,
8693 CMN_CMP, CMP_CMN, CMN_CMN};
8694 int swap =
8695 comparison_dominates_p (GET_CODE (operands[5]),
8696 reverse_condition (GET_CODE (operands[4])));
8697
8698 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8699 if (TARGET_THUMB2) {
8700 output_asm_insn (ite[swap], operands);
8701 }
8702 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8703 return \"\";
8704 }"
8705 [(set_attr "conds" "set")
8706 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8707 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8708 (set_attr_alternative "length"
8709 [(const_int 6)
8710 (const_int 8)
8711 (const_int 8)
8712 (const_int 8)
8713 (const_int 8)
8714 (if_then_else (eq_attr "is_thumb" "no")
8715 (const_int 8)
8716 (const_int 10))
8717 (if_then_else (eq_attr "is_thumb" "no")
8718 (const_int 8)
8719 (const_int 10))
8720 (if_then_else (eq_attr "is_thumb" "no")
8721 (const_int 8)
8722 (const_int 10))
8723 (if_then_else (eq_attr "is_thumb" "no")
8724 (const_int 8)
8725 (const_int 10))])
8726 (set_attr "type" "multiple")]
8727 )
8728
8729 (define_insn "*cmp_and"
8730 [(set (match_operand 6 "dominant_cc_register" "")
8731 (compare
8732 (and:SI
8733 (match_operator 4 "arm_comparison_operator"
8734 [(match_operand:SI 0 "s_register_operand"
8735 "l,l,l,r,r,r,r,r,r,r")
8736 (match_operand:SI 1 "arm_add_operand"
8737 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8738 (match_operator:SI 5 "arm_comparison_operator"
8739 [(match_operand:SI 2 "s_register_operand"
8740 "l,r,r,l,l,r,r,r,r,r")
8741 (match_operand:SI 3 "arm_add_operand"
8742 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8743 (const_int 0)))]
8744 "TARGET_32BIT"
8745 "*
8746 {
8747 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8748 {
8749 {\"cmp%d5\\t%0, %1\",
8750 \"cmp%d4\\t%2, %3\"},
8751 {\"cmn%d5\\t%0, #%n1\",
8752 \"cmp%d4\\t%2, %3\"},
8753 {\"cmp%d5\\t%0, %1\",
8754 \"cmn%d4\\t%2, #%n3\"},
8755 {\"cmn%d5\\t%0, #%n1\",
8756 \"cmn%d4\\t%2, #%n3\"}
8757 };
8758 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8759 {
8760 {\"cmp\\t%2, %3\",
8761 \"cmp\\t%0, %1\"},
8762 {\"cmp\\t%2, %3\",
8763 \"cmn\\t%0, #%n1\"},
8764 {\"cmn\\t%2, #%n3\",
8765 \"cmp\\t%0, %1\"},
8766 {\"cmn\\t%2, #%n3\",
8767 \"cmn\\t%0, #%n1\"}
8768 };
8769 static const char *const ite[2] =
8770 {
8771 \"it\\t%d5\",
8772 \"it\\t%d4\"
8773 };
8774 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8775 CMP_CMP, CMN_CMP, CMP_CMP,
8776 CMP_CMP, CMN_CMP, CMP_CMN,
8777 CMN_CMN};
8778 int swap =
8779 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8780
8781 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8782 if (TARGET_THUMB2) {
8783 output_asm_insn (ite[swap], operands);
8784 }
8785 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8786 return \"\";
8787 }"
8788 [(set_attr "conds" "set")
8789 (set_attr "predicable" "no")
8790 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8791 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8792 (set_attr_alternative "length"
8793 [(const_int 6)
8794 (const_int 8)
8795 (const_int 8)
8796 (const_int 8)
8797 (const_int 8)
8798 (const_int 6)
8799 (if_then_else (eq_attr "is_thumb" "no")
8800 (const_int 8)
8801 (const_int 10))
8802 (if_then_else (eq_attr "is_thumb" "no")
8803 (const_int 8)
8804 (const_int 10))
8805 (if_then_else (eq_attr "is_thumb" "no")
8806 (const_int 8)
8807 (const_int 10))
8808 (if_then_else (eq_attr "is_thumb" "no")
8809 (const_int 8)
8810 (const_int 10))])
8811 (set_attr "type" "multiple")]
8812 )
8813
8814 (define_insn "*cmp_ior"
8815 [(set (match_operand 6 "dominant_cc_register" "")
8816 (compare
8817 (ior:SI
8818 (match_operator 4 "arm_comparison_operator"
8819 [(match_operand:SI 0 "s_register_operand"
8820 "l,l,l,r,r,r,r,r,r,r")
8821 (match_operand:SI 1 "arm_add_operand"
8822 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8823 (match_operator:SI 5 "arm_comparison_operator"
8824 [(match_operand:SI 2 "s_register_operand"
8825 "l,r,r,l,l,r,r,r,r,r")
8826 (match_operand:SI 3 "arm_add_operand"
8827 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8828 (const_int 0)))]
8829 "TARGET_32BIT"
8830 "*
8831 {
8832 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8833 {
8834 {\"cmp\\t%0, %1\",
8835 \"cmp\\t%2, %3\"},
8836 {\"cmn\\t%0, #%n1\",
8837 \"cmp\\t%2, %3\"},
8838 {\"cmp\\t%0, %1\",
8839 \"cmn\\t%2, #%n3\"},
8840 {\"cmn\\t%0, #%n1\",
8841 \"cmn\\t%2, #%n3\"}
8842 };
8843 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8844 {
8845 {\"cmp%D4\\t%2, %3\",
8846 \"cmp%D5\\t%0, %1\"},
8847 {\"cmp%D4\\t%2, %3\",
8848 \"cmn%D5\\t%0, #%n1\"},
8849 {\"cmn%D4\\t%2, #%n3\",
8850 \"cmp%D5\\t%0, %1\"},
8851 {\"cmn%D4\\t%2, #%n3\",
8852 \"cmn%D5\\t%0, #%n1\"}
8853 };
8854 static const char *const ite[2] =
8855 {
8856 \"it\\t%D4\",
8857 \"it\\t%D5\"
8858 };
8859 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8860 CMP_CMP, CMN_CMP, CMP_CMP,
8861 CMP_CMP, CMN_CMP, CMP_CMN,
8862 CMN_CMN};
8863 int swap =
8864 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8865
8866 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8867 if (TARGET_THUMB2) {
8868 output_asm_insn (ite[swap], operands);
8869 }
8870 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8871 return \"\";
8872 }
8873 "
8874 [(set_attr "conds" "set")
8875 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8876 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8877 (set_attr_alternative "length"
8878 [(const_int 6)
8879 (const_int 8)
8880 (const_int 8)
8881 (const_int 8)
8882 (const_int 8)
8883 (const_int 6)
8884 (if_then_else (eq_attr "is_thumb" "no")
8885 (const_int 8)
8886 (const_int 10))
8887 (if_then_else (eq_attr "is_thumb" "no")
8888 (const_int 8)
8889 (const_int 10))
8890 (if_then_else (eq_attr "is_thumb" "no")
8891 (const_int 8)
8892 (const_int 10))
8893 (if_then_else (eq_attr "is_thumb" "no")
8894 (const_int 8)
8895 (const_int 10))])
8896 (set_attr "type" "multiple")]
8897 )
8898
8899 (define_insn_and_split "*ior_scc_scc"
8900 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8901 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8902 [(match_operand:SI 1 "s_register_operand" "l,r")
8903 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8904 (match_operator:SI 6 "arm_comparison_operator"
8905 [(match_operand:SI 4 "s_register_operand" "l,r")
8906 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8907 (clobber (reg:CC CC_REGNUM))]
8908 "TARGET_32BIT
8909 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8910 != CCmode)"
8911 "#"
8912 "TARGET_32BIT && reload_completed"
8913 [(set (match_dup 7)
8914 (compare
8915 (ior:SI
8916 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8917 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8918 (const_int 0)))
8919 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8920 "operands[7]
8921 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8922 DOM_CC_X_OR_Y),
8923 CC_REGNUM);"
8924 [(set_attr "conds" "clob")
8925 (set_attr "enabled_for_short_it" "yes,no")
8926 (set_attr "length" "16")
8927 (set_attr "type" "multiple")]
8928 )
8929
8930 ; If the above pattern is followed by a CMP insn, then the compare is
8931 ; redundant, since we can rework the conditional instruction that follows.
8932 (define_insn_and_split "*ior_scc_scc_cmp"
8933 [(set (match_operand 0 "dominant_cc_register" "")
8934 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8935 [(match_operand:SI 1 "s_register_operand" "l,r")
8936 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8937 (match_operator:SI 6 "arm_comparison_operator"
8938 [(match_operand:SI 4 "s_register_operand" "l,r")
8939 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
8940 (const_int 0)))
8941 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
8942 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8943 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8944 "TARGET_32BIT"
8945 "#"
8946 "TARGET_32BIT && reload_completed"
8947 [(set (match_dup 0)
8948 (compare
8949 (ior:SI
8950 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8951 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8952 (const_int 0)))
8953 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8954 ""
8955 [(set_attr "conds" "set")
8956 (set_attr "enabled_for_short_it" "yes,no")
8957 (set_attr "length" "16")
8958 (set_attr "type" "multiple")]
8959 )
8960
8961 (define_insn_and_split "*and_scc_scc"
8962 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8963 (and:SI (match_operator:SI 3 "arm_comparison_operator"
8964 [(match_operand:SI 1 "s_register_operand" "l,r")
8965 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8966 (match_operator:SI 6 "arm_comparison_operator"
8967 [(match_operand:SI 4 "s_register_operand" "l,r")
8968 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8969 (clobber (reg:CC CC_REGNUM))]
8970 "TARGET_32BIT
8971 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8972 != CCmode)"
8973 "#"
8974 "TARGET_32BIT && reload_completed
8975 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8976 != CCmode)"
8977 [(set (match_dup 7)
8978 (compare
8979 (and:SI
8980 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8981 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8982 (const_int 0)))
8983 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8984 "operands[7]
8985 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8986 DOM_CC_X_AND_Y),
8987 CC_REGNUM);"
8988 [(set_attr "conds" "clob")
8989 (set_attr "enabled_for_short_it" "yes,no")
8990 (set_attr "length" "16")
8991 (set_attr "type" "multiple")]
8992 )
8993
8994 ; If the above pattern is followed by a CMP insn, then the compare is
8995 ; redundant, since we can rework the conditional instruction that follows.
8996 (define_insn_and_split "*and_scc_scc_cmp"
8997 [(set (match_operand 0 "dominant_cc_register" "")
8998 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
8999 [(match_operand:SI 1 "s_register_operand" "l,r")
9000 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9001 (match_operator:SI 6 "arm_comparison_operator"
9002 [(match_operand:SI 4 "s_register_operand" "l,r")
9003 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9004 (const_int 0)))
9005 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9006 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9007 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9008 "TARGET_32BIT"
9009 "#"
9010 "TARGET_32BIT && reload_completed"
9011 [(set (match_dup 0)
9012 (compare
9013 (and:SI
9014 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9015 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9016 (const_int 0)))
9017 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9018 ""
9019 [(set_attr "conds" "set")
9020 (set_attr "enabled_for_short_it" "yes,no")
9021 (set_attr "length" "16")
9022 (set_attr "type" "multiple")]
9023 )
9024
9025 ;; If there is no dominance in the comparison, then we can still save an
9026 ;; instruction in the AND case, since we can know that the second compare
9027 ;; need only zero the value if false (if true, then the value is already
9028 ;; correct).
9029 (define_insn_and_split "*and_scc_scc_nodom"
9030 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9031 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9032 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9033 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9034 (match_operator:SI 6 "arm_comparison_operator"
9035 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9036 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9037 (clobber (reg:CC CC_REGNUM))]
9038 "TARGET_32BIT
9039 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9040 == CCmode)"
9041 "#"
9042 "TARGET_32BIT && reload_completed"
9043 [(parallel [(set (match_dup 0)
9044 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9045 (clobber (reg:CC CC_REGNUM))])
9046 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9047 (set (match_dup 0)
9048 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9049 (match_dup 0)
9050 (const_int 0)))]
9051 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9052 operands[4], operands[5]),
9053 CC_REGNUM);
9054 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9055 operands[5]);"
9056 [(set_attr "conds" "clob")
9057 (set_attr "length" "20")
9058 (set_attr "type" "multiple")]
9059 )
9060
9061 (define_split
9062 [(set (reg:CC_NOOV CC_REGNUM)
9063 (compare:CC_NOOV (ior:SI
9064 (and:SI (match_operand:SI 0 "s_register_operand" "")
9065 (const_int 1))
9066 (match_operator:SI 1 "arm_comparison_operator"
9067 [(match_operand:SI 2 "s_register_operand" "")
9068 (match_operand:SI 3 "arm_add_operand" "")]))
9069 (const_int 0)))
9070 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9071 "TARGET_ARM"
9072 [(set (match_dup 4)
9073 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9074 (match_dup 0)))
9075 (set (reg:CC_NOOV CC_REGNUM)
9076 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9077 (const_int 0)))]
9078 "")
9079
9080 (define_split
9081 [(set (reg:CC_NOOV CC_REGNUM)
9082 (compare:CC_NOOV (ior:SI
9083 (match_operator:SI 1 "arm_comparison_operator"
9084 [(match_operand:SI 2 "s_register_operand" "")
9085 (match_operand:SI 3 "arm_add_operand" "")])
9086 (and:SI (match_operand:SI 0 "s_register_operand" "")
9087 (const_int 1)))
9088 (const_int 0)))
9089 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9090 "TARGET_ARM"
9091 [(set (match_dup 4)
9092 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9093 (match_dup 0)))
9094 (set (reg:CC_NOOV CC_REGNUM)
9095 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9096 (const_int 0)))]
9097 "")
9098 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9099
9100 (define_insn_and_split "*negscc"
9101 [(set (match_operand:SI 0 "s_register_operand" "=r")
9102 (neg:SI (match_operator 3 "arm_comparison_operator"
9103 [(match_operand:SI 1 "s_register_operand" "r")
9104 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9105 (clobber (reg:CC CC_REGNUM))]
9106 "TARGET_ARM"
9107 "#"
9108 "&& reload_completed"
9109 [(const_int 0)]
9110 {
9111 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9112
9113 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9114 {
9115 /* Emit mov\\t%0, %1, asr #31 */
9116 emit_insn (gen_rtx_SET (operands[0],
9117 gen_rtx_ASHIFTRT (SImode,
9118 operands[1],
9119 GEN_INT (31))));
9120 DONE;
9121 }
9122 else if (GET_CODE (operands[3]) == NE)
9123 {
9124 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9125 if (CONST_INT_P (operands[2]))
9126 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9127 gen_int_mode (-INTVAL (operands[2]),
9128 SImode)));
9129 else
9130 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9131
9132 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9133 gen_rtx_NE (SImode,
9134 cc_reg,
9135 const0_rtx),
9136 gen_rtx_SET (operands[0],
9137 GEN_INT (~0))));
9138 DONE;
9139 }
9140 else
9141 {
9142 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9143 emit_insn (gen_rtx_SET (cc_reg,
9144 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9145 enum rtx_code rc = GET_CODE (operands[3]);
9146
9147 rc = reverse_condition (rc);
9148 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9149 gen_rtx_fmt_ee (rc,
9150 VOIDmode,
9151 cc_reg,
9152 const0_rtx),
9153 gen_rtx_SET (operands[0], const0_rtx)));
9154 rc = GET_CODE (operands[3]);
9155 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9156 gen_rtx_fmt_ee (rc,
9157 VOIDmode,
9158 cc_reg,
9159 const0_rtx),
9160 gen_rtx_SET (operands[0],
9161 GEN_INT (~0))));
9162 DONE;
9163 }
9164 FAIL;
9165 }
9166 [(set_attr "conds" "clob")
9167 (set_attr "length" "12")
9168 (set_attr "type" "multiple")]
9169 )
9170
9171 (define_insn_and_split "movcond_addsi"
9172 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9173 (if_then_else:SI
9174 (match_operator 5 "comparison_operator"
9175 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9176 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9177 (const_int 0)])
9178 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9179 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9180 (clobber (reg:CC CC_REGNUM))]
9181 "TARGET_32BIT"
9182 "#"
9183 "&& reload_completed"
9184 [(set (reg:CC_NOOV CC_REGNUM)
9185 (compare:CC_NOOV
9186 (plus:SI (match_dup 3)
9187 (match_dup 4))
9188 (const_int 0)))
9189 (set (match_dup 0) (match_dup 1))
9190 (cond_exec (match_dup 6)
9191 (set (match_dup 0) (match_dup 2)))]
9192 "
9193 {
9194 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9195 operands[3], operands[4]);
9196 enum rtx_code rc = GET_CODE (operands[5]);
9197 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9198 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9199 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9200 rc = reverse_condition (rc);
9201 else
9202 std::swap (operands[1], operands[2]);
9203
9204 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9205 }
9206 "
9207 [(set_attr "conds" "clob")
9208 (set_attr "enabled_for_short_it" "no,yes,yes")
9209 (set_attr "type" "multiple")]
9210 )
9211
9212 (define_insn "movcond"
9213 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9214 (if_then_else:SI
9215 (match_operator 5 "arm_comparison_operator"
9216 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9217 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9218 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9219 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9220 (clobber (reg:CC CC_REGNUM))]
9221 "TARGET_ARM"
9222 "*
9223 if (GET_CODE (operands[5]) == LT
9224 && (operands[4] == const0_rtx))
9225 {
9226 if (which_alternative != 1 && REG_P (operands[1]))
9227 {
9228 if (operands[2] == const0_rtx)
9229 return \"and\\t%0, %1, %3, asr #31\";
9230 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9231 }
9232 else if (which_alternative != 0 && REG_P (operands[2]))
9233 {
9234 if (operands[1] == const0_rtx)
9235 return \"bic\\t%0, %2, %3, asr #31\";
9236 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9237 }
9238 /* The only case that falls through to here is when both ops 1 & 2
9239 are constants. */
9240 }
9241
9242 if (GET_CODE (operands[5]) == GE
9243 && (operands[4] == const0_rtx))
9244 {
9245 if (which_alternative != 1 && REG_P (operands[1]))
9246 {
9247 if (operands[2] == const0_rtx)
9248 return \"bic\\t%0, %1, %3, asr #31\";
9249 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9250 }
9251 else if (which_alternative != 0 && REG_P (operands[2]))
9252 {
9253 if (operands[1] == const0_rtx)
9254 return \"and\\t%0, %2, %3, asr #31\";
9255 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9256 }
9257 /* The only case that falls through to here is when both ops 1 & 2
9258 are constants. */
9259 }
9260 if (CONST_INT_P (operands[4])
9261 && !const_ok_for_arm (INTVAL (operands[4])))
9262 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9263 else
9264 output_asm_insn (\"cmp\\t%3, %4\", operands);
9265 if (which_alternative != 0)
9266 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9267 if (which_alternative != 1)
9268 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9269 return \"\";
9270 "
9271 [(set_attr "conds" "clob")
9272 (set_attr "length" "8,8,12")
9273 (set_attr "type" "multiple")]
9274 )
9275
9276 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9277
9278 (define_insn "*ifcompare_plus_move"
9279 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9280 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9281 [(match_operand:SI 4 "s_register_operand" "r,r")
9282 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9283 (plus:SI
9284 (match_operand:SI 2 "s_register_operand" "r,r")
9285 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9286 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9287 (clobber (reg:CC CC_REGNUM))]
9288 "TARGET_ARM"
9289 "#"
9290 [(set_attr "conds" "clob")
9291 (set_attr "length" "8,12")
9292 (set_attr "type" "multiple")]
9293 )
9294
9295 (define_insn "*if_plus_move"
9296 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9297 (if_then_else:SI
9298 (match_operator 4 "arm_comparison_operator"
9299 [(match_operand 5 "cc_register" "") (const_int 0)])
9300 (plus:SI
9301 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9302 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9303 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9304 "TARGET_ARM"
9305 "@
9306 add%d4\\t%0, %2, %3
9307 sub%d4\\t%0, %2, #%n3
9308 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9309 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9310 [(set_attr "conds" "use")
9311 (set_attr "length" "4,4,8,8")
9312 (set_attr_alternative "type"
9313 [(if_then_else (match_operand 3 "const_int_operand" "")
9314 (const_string "alu_imm" )
9315 (const_string "alu_sreg"))
9316 (const_string "alu_imm")
9317 (const_string "multiple")
9318 (const_string "multiple")])]
9319 )
9320
9321 (define_insn "*ifcompare_move_plus"
9322 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9323 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9324 [(match_operand:SI 4 "s_register_operand" "r,r")
9325 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9326 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9327 (plus:SI
9328 (match_operand:SI 2 "s_register_operand" "r,r")
9329 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9330 (clobber (reg:CC CC_REGNUM))]
9331 "TARGET_ARM"
9332 "#"
9333 [(set_attr "conds" "clob")
9334 (set_attr "length" "8,12")
9335 (set_attr "type" "multiple")]
9336 )
9337
9338 (define_insn "*if_move_plus"
9339 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9340 (if_then_else:SI
9341 (match_operator 4 "arm_comparison_operator"
9342 [(match_operand 5 "cc_register" "") (const_int 0)])
9343 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9344 (plus:SI
9345 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9346 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9347 "TARGET_ARM"
9348 "@
9349 add%D4\\t%0, %2, %3
9350 sub%D4\\t%0, %2, #%n3
9351 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9352 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9353 [(set_attr "conds" "use")
9354 (set_attr "length" "4,4,8,8")
9355 (set_attr_alternative "type"
9356 [(if_then_else (match_operand 3 "const_int_operand" "")
9357 (const_string "alu_imm" )
9358 (const_string "alu_sreg"))
9359 (const_string "alu_imm")
9360 (const_string "multiple")
9361 (const_string "multiple")])]
9362 )
9363
9364 (define_insn "*ifcompare_arith_arith"
9365 [(set (match_operand:SI 0 "s_register_operand" "=r")
9366 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9367 [(match_operand:SI 5 "s_register_operand" "r")
9368 (match_operand:SI 6 "arm_add_operand" "rIL")])
9369 (match_operator:SI 8 "shiftable_operator"
9370 [(match_operand:SI 1 "s_register_operand" "r")
9371 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9372 (match_operator:SI 7 "shiftable_operator"
9373 [(match_operand:SI 3 "s_register_operand" "r")
9374 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9375 (clobber (reg:CC CC_REGNUM))]
9376 "TARGET_ARM"
9377 "#"
9378 [(set_attr "conds" "clob")
9379 (set_attr "length" "12")
9380 (set_attr "type" "multiple")]
9381 )
9382
9383 (define_insn "*if_arith_arith"
9384 [(set (match_operand:SI 0 "s_register_operand" "=r")
9385 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9386 [(match_operand 8 "cc_register" "") (const_int 0)])
9387 (match_operator:SI 6 "shiftable_operator"
9388 [(match_operand:SI 1 "s_register_operand" "r")
9389 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9390 (match_operator:SI 7 "shiftable_operator"
9391 [(match_operand:SI 3 "s_register_operand" "r")
9392 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9393 "TARGET_ARM"
9394 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9395 [(set_attr "conds" "use")
9396 (set_attr "length" "8")
9397 (set_attr "type" "multiple")]
9398 )
9399
9400 (define_insn "*ifcompare_arith_move"
9401 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9402 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9403 [(match_operand:SI 2 "s_register_operand" "r,r")
9404 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9405 (match_operator:SI 7 "shiftable_operator"
9406 [(match_operand:SI 4 "s_register_operand" "r,r")
9407 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9408 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9409 (clobber (reg:CC CC_REGNUM))]
9410 "TARGET_ARM"
9411 "*
9412 /* If we have an operation where (op x 0) is the identity operation and
9413 the conditional operator is LT or GE and we are comparing against zero and
9414 everything is in registers then we can do this in two instructions. */
9415 if (operands[3] == const0_rtx
9416 && GET_CODE (operands[7]) != AND
9417 && REG_P (operands[5])
9418 && REG_P (operands[1])
9419 && REGNO (operands[1]) == REGNO (operands[4])
9420 && REGNO (operands[4]) != REGNO (operands[0]))
9421 {
9422 if (GET_CODE (operands[6]) == LT)
9423 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9424 else if (GET_CODE (operands[6]) == GE)
9425 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9426 }
9427 if (CONST_INT_P (operands[3])
9428 && !const_ok_for_arm (INTVAL (operands[3])))
9429 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9430 else
9431 output_asm_insn (\"cmp\\t%2, %3\", operands);
9432 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9433 if (which_alternative != 0)
9434 return \"mov%D6\\t%0, %1\";
9435 return \"\";
9436 "
9437 [(set_attr "conds" "clob")
9438 (set_attr "length" "8,12")
9439 (set_attr "type" "multiple")]
9440 )
9441
9442 (define_insn "*if_arith_move"
9443 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9444 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9445 [(match_operand 6 "cc_register" "") (const_int 0)])
9446 (match_operator:SI 5 "shiftable_operator"
9447 [(match_operand:SI 2 "s_register_operand" "r,r")
9448 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9449 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9450 "TARGET_ARM"
9451 "@
9452 %I5%d4\\t%0, %2, %3
9453 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9454 [(set_attr "conds" "use")
9455 (set_attr "length" "4,8")
9456 (set_attr_alternative "type"
9457 [(if_then_else (match_operand 3 "const_int_operand" "")
9458 (const_string "alu_shift_imm" )
9459 (const_string "alu_shift_reg"))
9460 (const_string "multiple")])]
9461 )
9462
9463 (define_insn "*ifcompare_move_arith"
9464 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9465 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9466 [(match_operand:SI 4 "s_register_operand" "r,r")
9467 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9468 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9469 (match_operator:SI 7 "shiftable_operator"
9470 [(match_operand:SI 2 "s_register_operand" "r,r")
9471 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9472 (clobber (reg:CC CC_REGNUM))]
9473 "TARGET_ARM"
9474 "*
9475 /* If we have an operation where (op x 0) is the identity operation and
9476 the conditional operator is LT or GE and we are comparing against zero and
9477 everything is in registers then we can do this in two instructions */
9478 if (operands[5] == const0_rtx
9479 && GET_CODE (operands[7]) != AND
9480 && REG_P (operands[3])
9481 && REG_P (operands[1])
9482 && REGNO (operands[1]) == REGNO (operands[2])
9483 && REGNO (operands[2]) != REGNO (operands[0]))
9484 {
9485 if (GET_CODE (operands[6]) == GE)
9486 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9487 else if (GET_CODE (operands[6]) == LT)
9488 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9489 }
9490
9491 if (CONST_INT_P (operands[5])
9492 && !const_ok_for_arm (INTVAL (operands[5])))
9493 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9494 else
9495 output_asm_insn (\"cmp\\t%4, %5\", operands);
9496
9497 if (which_alternative != 0)
9498 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9499 return \"%I7%D6\\t%0, %2, %3\";
9500 "
9501 [(set_attr "conds" "clob")
9502 (set_attr "length" "8,12")
9503 (set_attr "type" "multiple")]
9504 )
9505
9506 (define_insn "*if_move_arith"
9507 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9508 (if_then_else:SI
9509 (match_operator 4 "arm_comparison_operator"
9510 [(match_operand 6 "cc_register" "") (const_int 0)])
9511 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9512 (match_operator:SI 5 "shiftable_operator"
9513 [(match_operand:SI 2 "s_register_operand" "r,r")
9514 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9515 "TARGET_ARM"
9516 "@
9517 %I5%D4\\t%0, %2, %3
9518 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9519 [(set_attr "conds" "use")
9520 (set_attr "length" "4,8")
9521 (set_attr_alternative "type"
9522 [(if_then_else (match_operand 3 "const_int_operand" "")
9523 (const_string "alu_shift_imm" )
9524 (const_string "alu_shift_reg"))
9525 (const_string "multiple")])]
9526 )
9527
9528 (define_insn "*ifcompare_move_not"
9529 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9530 (if_then_else:SI
9531 (match_operator 5 "arm_comparison_operator"
9532 [(match_operand:SI 3 "s_register_operand" "r,r")
9533 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9534 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9535 (not:SI
9536 (match_operand:SI 2 "s_register_operand" "r,r"))))
9537 (clobber (reg:CC CC_REGNUM))]
9538 "TARGET_ARM"
9539 "#"
9540 [(set_attr "conds" "clob")
9541 (set_attr "length" "8,12")
9542 (set_attr "type" "multiple")]
9543 )
9544
9545 (define_insn "*if_move_not"
9546 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9547 (if_then_else:SI
9548 (match_operator 4 "arm_comparison_operator"
9549 [(match_operand 3 "cc_register" "") (const_int 0)])
9550 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9551 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9552 "TARGET_ARM"
9553 "@
9554 mvn%D4\\t%0, %2
9555 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9556 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9557 [(set_attr "conds" "use")
9558 (set_attr "type" "mvn_reg")
9559 (set_attr "length" "4,8,8")
9560 (set_attr "type" "mvn_reg,multiple,multiple")]
9561 )
9562
9563 (define_insn "*ifcompare_not_move"
9564 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9565 (if_then_else:SI
9566 (match_operator 5 "arm_comparison_operator"
9567 [(match_operand:SI 3 "s_register_operand" "r,r")
9568 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9569 (not:SI
9570 (match_operand:SI 2 "s_register_operand" "r,r"))
9571 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9572 (clobber (reg:CC CC_REGNUM))]
9573 "TARGET_ARM"
9574 "#"
9575 [(set_attr "conds" "clob")
9576 (set_attr "length" "8,12")
9577 (set_attr "type" "multiple")]
9578 )
9579
9580 (define_insn "*if_not_move"
9581 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9582 (if_then_else:SI
9583 (match_operator 4 "arm_comparison_operator"
9584 [(match_operand 3 "cc_register" "") (const_int 0)])
9585 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9586 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9587 "TARGET_ARM"
9588 "@
9589 mvn%d4\\t%0, %2
9590 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9591 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9592 [(set_attr "conds" "use")
9593 (set_attr "type" "mvn_reg,multiple,multiple")
9594 (set_attr "length" "4,8,8")]
9595 )
9596
9597 (define_insn "*ifcompare_shift_move"
9598 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9599 (if_then_else:SI
9600 (match_operator 6 "arm_comparison_operator"
9601 [(match_operand:SI 4 "s_register_operand" "r,r")
9602 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9603 (match_operator:SI 7 "shift_operator"
9604 [(match_operand:SI 2 "s_register_operand" "r,r")
9605 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9606 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9607 (clobber (reg:CC CC_REGNUM))]
9608 "TARGET_ARM"
9609 "#"
9610 [(set_attr "conds" "clob")
9611 (set_attr "length" "8,12")
9612 (set_attr "type" "multiple")]
9613 )
9614
9615 (define_insn "*if_shift_move"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9617 (if_then_else:SI
9618 (match_operator 5 "arm_comparison_operator"
9619 [(match_operand 6 "cc_register" "") (const_int 0)])
9620 (match_operator:SI 4 "shift_operator"
9621 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9622 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9623 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9624 "TARGET_ARM"
9625 "@
9626 mov%d5\\t%0, %2%S4
9627 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9628 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9629 [(set_attr "conds" "use")
9630 (set_attr "shift" "2")
9631 (set_attr "length" "4,8,8")
9632 (set_attr_alternative "type"
9633 [(if_then_else (match_operand 3 "const_int_operand" "")
9634 (const_string "mov_shift" )
9635 (const_string "mov_shift_reg"))
9636 (const_string "multiple")
9637 (const_string "multiple")])]
9638 )
9639
9640 (define_insn "*ifcompare_move_shift"
9641 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9642 (if_then_else:SI
9643 (match_operator 6 "arm_comparison_operator"
9644 [(match_operand:SI 4 "s_register_operand" "r,r")
9645 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9646 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9647 (match_operator:SI 7 "shift_operator"
9648 [(match_operand:SI 2 "s_register_operand" "r,r")
9649 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9650 (clobber (reg:CC CC_REGNUM))]
9651 "TARGET_ARM"
9652 "#"
9653 [(set_attr "conds" "clob")
9654 (set_attr "length" "8,12")
9655 (set_attr "type" "multiple")]
9656 )
9657
9658 (define_insn "*if_move_shift"
9659 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9660 (if_then_else:SI
9661 (match_operator 5 "arm_comparison_operator"
9662 [(match_operand 6 "cc_register" "") (const_int 0)])
9663 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9664 (match_operator:SI 4 "shift_operator"
9665 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9666 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9667 "TARGET_ARM"
9668 "@
9669 mov%D5\\t%0, %2%S4
9670 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9671 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9672 [(set_attr "conds" "use")
9673 (set_attr "shift" "2")
9674 (set_attr "length" "4,8,8")
9675 (set_attr_alternative "type"
9676 [(if_then_else (match_operand 3 "const_int_operand" "")
9677 (const_string "mov_shift" )
9678 (const_string "mov_shift_reg"))
9679 (const_string "multiple")
9680 (const_string "multiple")])]
9681 )
9682
9683 (define_insn "*ifcompare_shift_shift"
9684 [(set (match_operand:SI 0 "s_register_operand" "=r")
9685 (if_then_else:SI
9686 (match_operator 7 "arm_comparison_operator"
9687 [(match_operand:SI 5 "s_register_operand" "r")
9688 (match_operand:SI 6 "arm_add_operand" "rIL")])
9689 (match_operator:SI 8 "shift_operator"
9690 [(match_operand:SI 1 "s_register_operand" "r")
9691 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9692 (match_operator:SI 9 "shift_operator"
9693 [(match_operand:SI 3 "s_register_operand" "r")
9694 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9695 (clobber (reg:CC CC_REGNUM))]
9696 "TARGET_ARM"
9697 "#"
9698 [(set_attr "conds" "clob")
9699 (set_attr "length" "12")
9700 (set_attr "type" "multiple")]
9701 )
9702
9703 (define_insn "*if_shift_shift"
9704 [(set (match_operand:SI 0 "s_register_operand" "=r")
9705 (if_then_else:SI
9706 (match_operator 5 "arm_comparison_operator"
9707 [(match_operand 8 "cc_register" "") (const_int 0)])
9708 (match_operator:SI 6 "shift_operator"
9709 [(match_operand:SI 1 "s_register_operand" "r")
9710 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9711 (match_operator:SI 7 "shift_operator"
9712 [(match_operand:SI 3 "s_register_operand" "r")
9713 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9714 "TARGET_ARM"
9715 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9716 [(set_attr "conds" "use")
9717 (set_attr "shift" "1")
9718 (set_attr "length" "8")
9719 (set (attr "type") (if_then_else
9720 (and (match_operand 2 "const_int_operand" "")
9721 (match_operand 4 "const_int_operand" ""))
9722 (const_string "mov_shift")
9723 (const_string "mov_shift_reg")))]
9724 )
9725
9726 (define_insn "*ifcompare_not_arith"
9727 [(set (match_operand:SI 0 "s_register_operand" "=r")
9728 (if_then_else:SI
9729 (match_operator 6 "arm_comparison_operator"
9730 [(match_operand:SI 4 "s_register_operand" "r")
9731 (match_operand:SI 5 "arm_add_operand" "rIL")])
9732 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9733 (match_operator:SI 7 "shiftable_operator"
9734 [(match_operand:SI 2 "s_register_operand" "r")
9735 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9736 (clobber (reg:CC CC_REGNUM))]
9737 "TARGET_ARM"
9738 "#"
9739 [(set_attr "conds" "clob")
9740 (set_attr "length" "12")
9741 (set_attr "type" "multiple")]
9742 )
9743
9744 (define_insn "*if_not_arith"
9745 [(set (match_operand:SI 0 "s_register_operand" "=r")
9746 (if_then_else:SI
9747 (match_operator 5 "arm_comparison_operator"
9748 [(match_operand 4 "cc_register" "") (const_int 0)])
9749 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9750 (match_operator:SI 6 "shiftable_operator"
9751 [(match_operand:SI 2 "s_register_operand" "r")
9752 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9753 "TARGET_ARM"
9754 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9755 [(set_attr "conds" "use")
9756 (set_attr "type" "mvn_reg")
9757 (set_attr "length" "8")]
9758 )
9759
9760 (define_insn "*ifcompare_arith_not"
9761 [(set (match_operand:SI 0 "s_register_operand" "=r")
9762 (if_then_else:SI
9763 (match_operator 6 "arm_comparison_operator"
9764 [(match_operand:SI 4 "s_register_operand" "r")
9765 (match_operand:SI 5 "arm_add_operand" "rIL")])
9766 (match_operator:SI 7 "shiftable_operator"
9767 [(match_operand:SI 2 "s_register_operand" "r")
9768 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9769 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9770 (clobber (reg:CC CC_REGNUM))]
9771 "TARGET_ARM"
9772 "#"
9773 [(set_attr "conds" "clob")
9774 (set_attr "length" "12")
9775 (set_attr "type" "multiple")]
9776 )
9777
9778 (define_insn "*if_arith_not"
9779 [(set (match_operand:SI 0 "s_register_operand" "=r")
9780 (if_then_else:SI
9781 (match_operator 5 "arm_comparison_operator"
9782 [(match_operand 4 "cc_register" "") (const_int 0)])
9783 (match_operator:SI 6 "shiftable_operator"
9784 [(match_operand:SI 2 "s_register_operand" "r")
9785 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9786 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9787 "TARGET_ARM"
9788 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9789 [(set_attr "conds" "use")
9790 (set_attr "type" "multiple")
9791 (set_attr "length" "8")]
9792 )
9793
9794 (define_insn "*ifcompare_neg_move"
9795 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9796 (if_then_else:SI
9797 (match_operator 5 "arm_comparison_operator"
9798 [(match_operand:SI 3 "s_register_operand" "r,r")
9799 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9800 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9801 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9802 (clobber (reg:CC CC_REGNUM))]
9803 "TARGET_ARM"
9804 "#"
9805 [(set_attr "conds" "clob")
9806 (set_attr "length" "8,12")
9807 (set_attr "type" "multiple")]
9808 )
9809
9810 (define_insn_and_split "*if_neg_move"
9811 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9812 (if_then_else:SI
9813 (match_operator 4 "arm_comparison_operator"
9814 [(match_operand 3 "cc_register" "") (const_int 0)])
9815 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9816 (match_operand:SI 1 "s_register_operand" "0,0")))]
9817 "TARGET_32BIT"
9818 "#"
9819 "&& reload_completed"
9820 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9821 (set (match_dup 0) (neg:SI (match_dup 2))))]
9822 ""
9823 [(set_attr "conds" "use")
9824 (set_attr "length" "4")
9825 (set_attr "arch" "t2,32")
9826 (set_attr "enabled_for_short_it" "yes,no")
9827 (set_attr "type" "logic_shift_imm")]
9828 )
9829
9830 (define_insn "*ifcompare_move_neg"
9831 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9832 (if_then_else:SI
9833 (match_operator 5 "arm_comparison_operator"
9834 [(match_operand:SI 3 "s_register_operand" "r,r")
9835 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9836 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9837 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9838 (clobber (reg:CC CC_REGNUM))]
9839 "TARGET_ARM"
9840 "#"
9841 [(set_attr "conds" "clob")
9842 (set_attr "length" "8,12")
9843 (set_attr "type" "multiple")]
9844 )
9845
9846 (define_insn_and_split "*if_move_neg"
9847 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9848 (if_then_else:SI
9849 (match_operator 4 "arm_comparison_operator"
9850 [(match_operand 3 "cc_register" "") (const_int 0)])
9851 (match_operand:SI 1 "s_register_operand" "0,0")
9852 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9853 "TARGET_32BIT"
9854 "#"
9855 "&& reload_completed"
9856 [(cond_exec (match_dup 5)
9857 (set (match_dup 0) (neg:SI (match_dup 2))))]
9858 {
9859 machine_mode mode = GET_MODE (operands[3]);
9860 rtx_code rc = GET_CODE (operands[4]);
9861
9862 if (mode == CCFPmode || mode == CCFPEmode)
9863 rc = reverse_condition_maybe_unordered (rc);
9864 else
9865 rc = reverse_condition (rc);
9866
9867 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9868 }
9869 [(set_attr "conds" "use")
9870 (set_attr "length" "4")
9871 (set_attr "arch" "t2,32")
9872 (set_attr "enabled_for_short_it" "yes,no")
9873 (set_attr "type" "logic_shift_imm")]
9874 )
9875
9876 (define_insn "*arith_adjacentmem"
9877 [(set (match_operand:SI 0 "s_register_operand" "=r")
9878 (match_operator:SI 1 "shiftable_operator"
9879 [(match_operand:SI 2 "memory_operand" "m")
9880 (match_operand:SI 3 "memory_operand" "m")]))
9881 (clobber (match_scratch:SI 4 "=r"))]
9882 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9883 "*
9884 {
9885 rtx ldm[3];
9886 rtx arith[4];
9887 rtx base_reg;
9888 HOST_WIDE_INT val1 = 0, val2 = 0;
9889
9890 if (REGNO (operands[0]) > REGNO (operands[4]))
9891 {
9892 ldm[1] = operands[4];
9893 ldm[2] = operands[0];
9894 }
9895 else
9896 {
9897 ldm[1] = operands[0];
9898 ldm[2] = operands[4];
9899 }
9900
9901 base_reg = XEXP (operands[2], 0);
9902
9903 if (!REG_P (base_reg))
9904 {
9905 val1 = INTVAL (XEXP (base_reg, 1));
9906 base_reg = XEXP (base_reg, 0);
9907 }
9908
9909 if (!REG_P (XEXP (operands[3], 0)))
9910 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9911
9912 arith[0] = operands[0];
9913 arith[3] = operands[1];
9914
9915 if (val1 < val2)
9916 {
9917 arith[1] = ldm[1];
9918 arith[2] = ldm[2];
9919 }
9920 else
9921 {
9922 arith[1] = ldm[2];
9923 arith[2] = ldm[1];
9924 }
9925
9926 ldm[0] = base_reg;
9927 if (val1 !=0 && val2 != 0)
9928 {
9929 rtx ops[3];
9930
9931 if (val1 == 4 || val2 == 4)
9932 /* Other val must be 8, since we know they are adjacent and neither
9933 is zero. */
9934 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
9935 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9936 {
9937 ldm[0] = ops[0] = operands[4];
9938 ops[1] = base_reg;
9939 ops[2] = GEN_INT (val1);
9940 output_add_immediate (ops);
9941 if (val1 < val2)
9942 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9943 else
9944 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9945 }
9946 else
9947 {
9948 /* Offset is out of range for a single add, so use two ldr. */
9949 ops[0] = ldm[1];
9950 ops[1] = base_reg;
9951 ops[2] = GEN_INT (val1);
9952 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9953 ops[0] = ldm[2];
9954 ops[2] = GEN_INT (val2);
9955 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9956 }
9957 }
9958 else if (val1 != 0)
9959 {
9960 if (val1 < val2)
9961 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9962 else
9963 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9964 }
9965 else
9966 {
9967 if (val1 < val2)
9968 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9969 else
9970 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9971 }
9972 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9973 return \"\";
9974 }"
9975 [(set_attr "length" "12")
9976 (set_attr "predicable" "yes")
9977 (set_attr "type" "load_4")]
9978 )
9979
9980 ; This pattern is never tried by combine, so do it as a peephole
9981
9982 (define_peephole2
9983 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9984 (match_operand:SI 1 "arm_general_register_operand" ""))
9985 (set (reg:CC CC_REGNUM)
9986 (compare:CC (match_dup 1) (const_int 0)))]
9987 "TARGET_ARM"
9988 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
9989 (set (match_dup 0) (match_dup 1))])]
9990 ""
9991 )
9992
9993 (define_split
9994 [(set (match_operand:SI 0 "s_register_operand" "")
9995 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
9996 (const_int 0))
9997 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
9998 [(match_operand:SI 3 "s_register_operand" "")
9999 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10000 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10001 "TARGET_ARM"
10002 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10003 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10004 (match_dup 5)))]
10005 ""
10006 )
10007
10008 ;; This split can be used because CC_Z mode implies that the following
10009 ;; branch will be an equality, or an unsigned inequality, so the sign
10010 ;; extension is not needed.
10011
10012 (define_split
10013 [(set (reg:CC_Z CC_REGNUM)
10014 (compare:CC_Z
10015 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10016 (const_int 24))
10017 (match_operand 1 "const_int_operand" "")))
10018 (clobber (match_scratch:SI 2 ""))]
10019 "TARGET_ARM
10020 && ((UINTVAL (operands[1]))
10021 == ((UINTVAL (operands[1])) >> 24) << 24)"
10022 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10023 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10024 "
10025 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10026 "
10027 )
10028 ;; ??? Check the patterns above for Thumb-2 usefulness
10029
10030 (define_expand "prologue"
10031 [(clobber (const_int 0))]
10032 "TARGET_EITHER"
10033 "if (TARGET_32BIT)
10034 arm_expand_prologue ();
10035 else
10036 thumb1_expand_prologue ();
10037 DONE;
10038 "
10039 )
10040
10041 (define_expand "epilogue"
10042 [(clobber (const_int 0))]
10043 "TARGET_EITHER"
10044 "
10045 if (crtl->calls_eh_return)
10046 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10047 if (TARGET_THUMB1)
10048 {
10049 thumb1_expand_epilogue ();
10050 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10051 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10052 }
10053 else if (HAVE_return)
10054 {
10055 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10056 no need for explicit testing again. */
10057 emit_jump_insn (gen_return ());
10058 }
10059 else if (TARGET_32BIT)
10060 {
10061 arm_expand_epilogue (true);
10062 }
10063 DONE;
10064 "
10065 )
10066
10067 ;; Note - although unspec_volatile's USE all hard registers,
10068 ;; USEs are ignored after relaod has completed. Thus we need
10069 ;; to add an unspec of the link register to ensure that flow
10070 ;; does not think that it is unused by the sibcall branch that
10071 ;; will replace the standard function epilogue.
10072 (define_expand "sibcall_epilogue"
10073 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10074 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10075 "TARGET_32BIT"
10076 "
10077 arm_expand_epilogue (false);
10078 DONE;
10079 "
10080 )
10081
10082 (define_expand "eh_epilogue"
10083 [(use (match_operand:SI 0 "register_operand"))
10084 (use (match_operand:SI 1 "register_operand"))
10085 (use (match_operand:SI 2 "register_operand"))]
10086 "TARGET_EITHER"
10087 "
10088 {
10089 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10090 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10091 {
10092 rtx ra = gen_rtx_REG (Pmode, 2);
10093
10094 emit_move_insn (ra, operands[2]);
10095 operands[2] = ra;
10096 }
10097 /* This is a hack -- we may have crystalized the function type too
10098 early. */
10099 cfun->machine->func_type = 0;
10100 }"
10101 )
10102
10103 ;; This split is only used during output to reduce the number of patterns
10104 ;; that need assembler instructions adding to them. We allowed the setting
10105 ;; of the conditions to be implicit during rtl generation so that
10106 ;; the conditional compare patterns would work. However this conflicts to
10107 ;; some extent with the conditional data operations, so we have to split them
10108 ;; up again here.
10109
10110 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10111 ;; conditional execution sufficient?
10112
10113 (define_split
10114 [(set (match_operand:SI 0 "s_register_operand" "")
10115 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10116 [(match_operand 2 "" "") (match_operand 3 "" "")])
10117 (match_dup 0)
10118 (match_operand 4 "" "")))
10119 (clobber (reg:CC CC_REGNUM))]
10120 "TARGET_ARM && reload_completed"
10121 [(set (match_dup 5) (match_dup 6))
10122 (cond_exec (match_dup 7)
10123 (set (match_dup 0) (match_dup 4)))]
10124 "
10125 {
10126 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10127 operands[2], operands[3]);
10128 enum rtx_code rc = GET_CODE (operands[1]);
10129
10130 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10131 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10132 if (mode == CCFPmode || mode == CCFPEmode)
10133 rc = reverse_condition_maybe_unordered (rc);
10134 else
10135 rc = reverse_condition (rc);
10136
10137 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10138 }"
10139 )
10140
10141 (define_split
10142 [(set (match_operand:SI 0 "s_register_operand" "")
10143 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10144 [(match_operand 2 "" "") (match_operand 3 "" "")])
10145 (match_operand 4 "" "")
10146 (match_dup 0)))
10147 (clobber (reg:CC CC_REGNUM))]
10148 "TARGET_ARM && reload_completed"
10149 [(set (match_dup 5) (match_dup 6))
10150 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10151 (set (match_dup 0) (match_dup 4)))]
10152 "
10153 {
10154 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10155 operands[2], operands[3]);
10156
10157 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10158 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10159 }"
10160 )
10161
10162 (define_split
10163 [(set (match_operand:SI 0 "s_register_operand" "")
10164 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10165 [(match_operand 2 "" "") (match_operand 3 "" "")])
10166 (match_operand 4 "" "")
10167 (match_operand 5 "" "")))
10168 (clobber (reg:CC CC_REGNUM))]
10169 "TARGET_ARM && reload_completed"
10170 [(set (match_dup 6) (match_dup 7))
10171 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10172 (set (match_dup 0) (match_dup 4)))
10173 (cond_exec (match_dup 8)
10174 (set (match_dup 0) (match_dup 5)))]
10175 "
10176 {
10177 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10178 operands[2], operands[3]);
10179 enum rtx_code rc = GET_CODE (operands[1]);
10180
10181 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10182 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10183 if (mode == CCFPmode || mode == CCFPEmode)
10184 rc = reverse_condition_maybe_unordered (rc);
10185 else
10186 rc = reverse_condition (rc);
10187
10188 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10189 }"
10190 )
10191
10192 (define_split
10193 [(set (match_operand:SI 0 "s_register_operand" "")
10194 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10195 [(match_operand:SI 2 "s_register_operand" "")
10196 (match_operand:SI 3 "arm_add_operand" "")])
10197 (match_operand:SI 4 "arm_rhs_operand" "")
10198 (not:SI
10199 (match_operand:SI 5 "s_register_operand" ""))))
10200 (clobber (reg:CC CC_REGNUM))]
10201 "TARGET_ARM && reload_completed"
10202 [(set (match_dup 6) (match_dup 7))
10203 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10204 (set (match_dup 0) (match_dup 4)))
10205 (cond_exec (match_dup 8)
10206 (set (match_dup 0) (not:SI (match_dup 5))))]
10207 "
10208 {
10209 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10210 operands[2], operands[3]);
10211 enum rtx_code rc = GET_CODE (operands[1]);
10212
10213 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10214 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10215 if (mode == CCFPmode || mode == CCFPEmode)
10216 rc = reverse_condition_maybe_unordered (rc);
10217 else
10218 rc = reverse_condition (rc);
10219
10220 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10221 }"
10222 )
10223
10224 (define_insn "*cond_move_not"
10225 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10226 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10227 [(match_operand 3 "cc_register" "") (const_int 0)])
10228 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10229 (not:SI
10230 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10231 "TARGET_ARM"
10232 "@
10233 mvn%D4\\t%0, %2
10234 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10235 [(set_attr "conds" "use")
10236 (set_attr "type" "mvn_reg,multiple")
10237 (set_attr "length" "4,8")]
10238 )
10239
10240 ;; The next two patterns occur when an AND operation is followed by a
10241 ;; scc insn sequence
10242
10243 (define_insn "*sign_extract_onebit"
10244 [(set (match_operand:SI 0 "s_register_operand" "=r")
10245 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10246 (const_int 1)
10247 (match_operand:SI 2 "const_int_operand" "n")))
10248 (clobber (reg:CC CC_REGNUM))]
10249 "TARGET_ARM"
10250 "*
10251 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10252 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10253 return \"mvnne\\t%0, #0\";
10254 "
10255 [(set_attr "conds" "clob")
10256 (set_attr "length" "8")
10257 (set_attr "type" "multiple")]
10258 )
10259
10260 (define_insn "*not_signextract_onebit"
10261 [(set (match_operand:SI 0 "s_register_operand" "=r")
10262 (not:SI
10263 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10264 (const_int 1)
10265 (match_operand:SI 2 "const_int_operand" "n"))))
10266 (clobber (reg:CC CC_REGNUM))]
10267 "TARGET_ARM"
10268 "*
10269 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10270 output_asm_insn (\"tst\\t%1, %2\", operands);
10271 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10272 return \"movne\\t%0, #0\";
10273 "
10274 [(set_attr "conds" "clob")
10275 (set_attr "length" "12")
10276 (set_attr "type" "multiple")]
10277 )
10278 ;; ??? The above patterns need auditing for Thumb-2
10279
10280 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10281 ;; expressions. For simplicity, the first register is also in the unspec
10282 ;; part.
10283 ;; To avoid the usage of GNU extension, the length attribute is computed
10284 ;; in a C function arm_attr_length_push_multi.
10285 (define_insn "*push_multi"
10286 [(match_parallel 2 "multi_register_push"
10287 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10288 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10289 UNSPEC_PUSH_MULT))])]
10290 ""
10291 "*
10292 {
10293 int num_saves = XVECLEN (operands[2], 0);
10294
10295 /* For the StrongARM at least it is faster to
10296 use STR to store only a single register.
10297 In Thumb mode always use push, and the assembler will pick
10298 something appropriate. */
10299 if (num_saves == 1 && TARGET_ARM)
10300 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10301 else
10302 {
10303 int i;
10304 char pattern[100];
10305
10306 if (TARGET_32BIT)
10307 strcpy (pattern, \"push%?\\t{%1\");
10308 else
10309 strcpy (pattern, \"push\\t{%1\");
10310
10311 for (i = 1; i < num_saves; i++)
10312 {
10313 strcat (pattern, \", %|\");
10314 strcat (pattern,
10315 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10316 }
10317
10318 strcat (pattern, \"}\");
10319 output_asm_insn (pattern, operands);
10320 }
10321
10322 return \"\";
10323 }"
10324 [(set_attr "type" "store_16")
10325 (set (attr "length")
10326 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10327 )
10328
10329 (define_insn "stack_tie"
10330 [(set (mem:BLK (scratch))
10331 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10332 (match_operand:SI 1 "s_register_operand" "rk")]
10333 UNSPEC_PRLG_STK))]
10334 ""
10335 ""
10336 [(set_attr "length" "0")
10337 (set_attr "type" "block")]
10338 )
10339
10340 ;; Pop (as used in epilogue RTL)
10341 ;;
10342 (define_insn "*load_multiple_with_writeback"
10343 [(match_parallel 0 "load_multiple_operation"
10344 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10345 (plus:SI (match_dup 1)
10346 (match_operand:SI 2 "const_int_I_operand" "I")))
10347 (set (match_operand:SI 3 "s_register_operand" "=rk")
10348 (mem:SI (match_dup 1)))
10349 ])]
10350 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10351 "*
10352 {
10353 arm_output_multireg_pop (operands, /*return_pc=*/false,
10354 /*cond=*/const_true_rtx,
10355 /*reverse=*/false,
10356 /*update=*/true);
10357 return \"\";
10358 }
10359 "
10360 [(set_attr "type" "load_16")
10361 (set_attr "predicable" "yes")
10362 (set (attr "length")
10363 (symbol_ref "arm_attr_length_pop_multi (operands,
10364 /*return_pc=*/false,
10365 /*write_back_p=*/true)"))]
10366 )
10367
10368 ;; Pop with return (as used in epilogue RTL)
10369 ;;
10370 ;; This instruction is generated when the registers are popped at the end of
10371 ;; epilogue. Here, instead of popping the value into LR and then generating
10372 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10373 ;; with (return).
10374 (define_insn "*pop_multiple_with_writeback_and_return"
10375 [(match_parallel 0 "pop_multiple_return"
10376 [(return)
10377 (set (match_operand:SI 1 "s_register_operand" "+rk")
10378 (plus:SI (match_dup 1)
10379 (match_operand:SI 2 "const_int_I_operand" "I")))
10380 (set (match_operand:SI 3 "s_register_operand" "=rk")
10381 (mem:SI (match_dup 1)))
10382 ])]
10383 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10384 "*
10385 {
10386 arm_output_multireg_pop (operands, /*return_pc=*/true,
10387 /*cond=*/const_true_rtx,
10388 /*reverse=*/false,
10389 /*update=*/true);
10390 return \"\";
10391 }
10392 "
10393 [(set_attr "type" "load_16")
10394 (set_attr "predicable" "yes")
10395 (set (attr "length")
10396 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10397 /*write_back_p=*/true)"))]
10398 )
10399
10400 (define_insn "*pop_multiple_with_return"
10401 [(match_parallel 0 "pop_multiple_return"
10402 [(return)
10403 (set (match_operand:SI 2 "s_register_operand" "=rk")
10404 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10405 ])]
10406 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10407 "*
10408 {
10409 arm_output_multireg_pop (operands, /*return_pc=*/true,
10410 /*cond=*/const_true_rtx,
10411 /*reverse=*/false,
10412 /*update=*/false);
10413 return \"\";
10414 }
10415 "
10416 [(set_attr "type" "load_16")
10417 (set_attr "predicable" "yes")
10418 (set (attr "length")
10419 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10420 /*write_back_p=*/false)"))]
10421 )
10422
10423 ;; Load into PC and return
10424 (define_insn "*ldr_with_return"
10425 [(return)
10426 (set (reg:SI PC_REGNUM)
10427 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10428 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10429 "ldr%?\t%|pc, [%0], #4"
10430 [(set_attr "type" "load_4")
10431 (set_attr "predicable" "yes")]
10432 )
10433 ;; Pop for floating point registers (as used in epilogue RTL)
10434 (define_insn "*vfp_pop_multiple_with_writeback"
10435 [(match_parallel 0 "pop_multiple_fp"
10436 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10437 (plus:SI (match_dup 1)
10438 (match_operand:SI 2 "const_int_I_operand" "I")))
10439 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10440 (mem:DF (match_dup 1)))])]
10441 "TARGET_32BIT && TARGET_HARD_FLOAT"
10442 "*
10443 {
10444 int num_regs = XVECLEN (operands[0], 0);
10445 char pattern[100];
10446 rtx op_list[2];
10447 strcpy (pattern, \"vldm\\t\");
10448 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10449 strcat (pattern, \"!, {\");
10450 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10451 strcat (pattern, \"%P0\");
10452 if ((num_regs - 1) > 1)
10453 {
10454 strcat (pattern, \"-%P1\");
10455 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10456 }
10457
10458 strcat (pattern, \"}\");
10459 output_asm_insn (pattern, op_list);
10460 return \"\";
10461 }
10462 "
10463 [(set_attr "type" "load_16")
10464 (set_attr "conds" "unconditional")
10465 (set_attr "predicable" "no")]
10466 )
10467
10468 ;; Special patterns for dealing with the constant pool
10469
10470 (define_insn "align_4"
10471 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10472 "TARGET_EITHER"
10473 "*
10474 assemble_align (32);
10475 return \"\";
10476 "
10477 [(set_attr "type" "no_insn")]
10478 )
10479
10480 (define_insn "align_8"
10481 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10482 "TARGET_EITHER"
10483 "*
10484 assemble_align (64);
10485 return \"\";
10486 "
10487 [(set_attr "type" "no_insn")]
10488 )
10489
10490 (define_insn "consttable_end"
10491 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10492 "TARGET_EITHER"
10493 "*
10494 making_const_table = FALSE;
10495 return \"\";
10496 "
10497 [(set_attr "type" "no_insn")]
10498 )
10499
10500 (define_insn "consttable_1"
10501 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10502 "TARGET_EITHER"
10503 "*
10504 making_const_table = TRUE;
10505 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10506 assemble_zeros (3);
10507 return \"\";
10508 "
10509 [(set_attr "length" "4")
10510 (set_attr "type" "no_insn")]
10511 )
10512
10513 (define_insn "consttable_2"
10514 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10515 "TARGET_EITHER"
10516 "*
10517 {
10518 rtx x = operands[0];
10519 making_const_table = TRUE;
10520 switch (GET_MODE_CLASS (GET_MODE (x)))
10521 {
10522 case MODE_FLOAT:
10523 arm_emit_fp16_const (x);
10524 break;
10525 default:
10526 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10527 assemble_zeros (2);
10528 break;
10529 }
10530 return \"\";
10531 }"
10532 [(set_attr "length" "4")
10533 (set_attr "type" "no_insn")]
10534 )
10535
10536 (define_insn "consttable_4"
10537 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10538 "TARGET_EITHER"
10539 "*
10540 {
10541 rtx x = operands[0];
10542 making_const_table = TRUE;
10543 scalar_float_mode float_mode;
10544 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10545 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10546 else
10547 {
10548 /* XXX: Sometimes gcc does something really dumb and ends up with
10549 a HIGH in a constant pool entry, usually because it's trying to
10550 load into a VFP register. We know this will always be used in
10551 combination with a LO_SUM which ignores the high bits, so just
10552 strip off the HIGH. */
10553 if (GET_CODE (x) == HIGH)
10554 x = XEXP (x, 0);
10555 assemble_integer (x, 4, BITS_PER_WORD, 1);
10556 mark_symbol_refs_as_used (x);
10557 }
10558 return \"\";
10559 }"
10560 [(set_attr "length" "4")
10561 (set_attr "type" "no_insn")]
10562 )
10563
10564 (define_insn "consttable_8"
10565 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10566 "TARGET_EITHER"
10567 "*
10568 {
10569 making_const_table = TRUE;
10570 scalar_float_mode float_mode;
10571 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10572 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10573 float_mode, BITS_PER_WORD);
10574 else
10575 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10576 return \"\";
10577 }"
10578 [(set_attr "length" "8")
10579 (set_attr "type" "no_insn")]
10580 )
10581
10582 (define_insn "consttable_16"
10583 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10584 "TARGET_EITHER"
10585 "*
10586 {
10587 making_const_table = TRUE;
10588 scalar_float_mode float_mode;
10589 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10590 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10591 float_mode, BITS_PER_WORD);
10592 else
10593 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10594 return \"\";
10595 }"
10596 [(set_attr "length" "16")
10597 (set_attr "type" "no_insn")]
10598 )
10599
10600 ;; V5 Instructions,
10601
10602 (define_insn "clzsi2"
10603 [(set (match_operand:SI 0 "s_register_operand" "=r")
10604 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10605 "TARGET_32BIT && arm_arch5t"
10606 "clz%?\\t%0, %1"
10607 [(set_attr "predicable" "yes")
10608 (set_attr "type" "clz")])
10609
10610 (define_insn "rbitsi2"
10611 [(set (match_operand:SI 0 "s_register_operand" "=r")
10612 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10613 "TARGET_32BIT && arm_arch_thumb2"
10614 "rbit%?\\t%0, %1"
10615 [(set_attr "predicable" "yes")
10616 (set_attr "type" "clz")])
10617
10618 ;; Keep this as a CTZ expression until after reload and then split
10619 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10620 ;; to fold with any other expression.
10621
10622 (define_insn_and_split "ctzsi2"
10623 [(set (match_operand:SI 0 "s_register_operand" "=r")
10624 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10625 "TARGET_32BIT && arm_arch_thumb2"
10626 "#"
10627 "&& reload_completed"
10628 [(const_int 0)]
10629 "
10630 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10631 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10632 DONE;
10633 ")
10634
10635 ;; V5E instructions.
10636
10637 (define_insn "prefetch"
10638 [(prefetch (match_operand:SI 0 "address_operand" "p")
10639 (match_operand:SI 1 "" "")
10640 (match_operand:SI 2 "" ""))]
10641 "TARGET_32BIT && arm_arch5te"
10642 "pld\\t%a0"
10643 [(set_attr "type" "load_4")]
10644 )
10645
10646 ;; General predication pattern
10647
10648 (define_cond_exec
10649 [(match_operator 0 "arm_comparison_operator"
10650 [(match_operand 1 "cc_register" "")
10651 (const_int 0)])]
10652 "TARGET_32BIT
10653 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10654 ""
10655 [(set_attr "predicated" "yes")]
10656 )
10657
10658 (define_insn "force_register_use"
10659 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10660 ""
10661 "%@ %0 needed"
10662 [(set_attr "length" "0")
10663 (set_attr "type" "no_insn")]
10664 )
10665
10666
10667 ;; Patterns for exception handling
10668
10669 (define_expand "eh_return"
10670 [(use (match_operand 0 "general_operand"))]
10671 "TARGET_EITHER"
10672 "
10673 {
10674 if (TARGET_32BIT)
10675 emit_insn (gen_arm_eh_return (operands[0]));
10676 else
10677 emit_insn (gen_thumb_eh_return (operands[0]));
10678 DONE;
10679 }"
10680 )
10681
10682 ;; We can't expand this before we know where the link register is stored.
10683 (define_insn_and_split "arm_eh_return"
10684 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10685 VUNSPEC_EH_RETURN)
10686 (clobber (match_scratch:SI 1 "=&r"))]
10687 "TARGET_ARM"
10688 "#"
10689 "&& reload_completed"
10690 [(const_int 0)]
10691 "
10692 {
10693 arm_set_return_address (operands[0], operands[1]);
10694 DONE;
10695 }"
10696 )
10697
10698 \f
10699 ;; TLS support
10700
10701 (define_insn "load_tp_hard"
10702 [(set (match_operand:SI 0 "register_operand" "=r")
10703 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10704 "TARGET_HARD_TP"
10705 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10706 [(set_attr "predicable" "yes")
10707 (set_attr "type" "mrs")]
10708 )
10709
10710 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10711 (define_insn "load_tp_soft_fdpic"
10712 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10713 (clobber (reg:SI FDPIC_REGNUM))
10714 (clobber (reg:SI LR_REGNUM))
10715 (clobber (reg:SI IP_REGNUM))
10716 (clobber (reg:CC CC_REGNUM))]
10717 "TARGET_SOFT_TP && TARGET_FDPIC"
10718 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10719 [(set_attr "conds" "clob")
10720 (set_attr "type" "branch")]
10721 )
10722
10723 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10724 (define_insn "load_tp_soft"
10725 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10726 (clobber (reg:SI LR_REGNUM))
10727 (clobber (reg:SI IP_REGNUM))
10728 (clobber (reg:CC CC_REGNUM))]
10729 "TARGET_SOFT_TP && !TARGET_FDPIC"
10730 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10731 [(set_attr "conds" "clob")
10732 (set_attr "type" "branch")]
10733 )
10734
10735 ;; tls descriptor call
10736 (define_insn "tlscall"
10737 [(set (reg:SI R0_REGNUM)
10738 (unspec:SI [(reg:SI R0_REGNUM)
10739 (match_operand:SI 0 "" "X")
10740 (match_operand 1 "" "")] UNSPEC_TLS))
10741 (clobber (reg:SI R1_REGNUM))
10742 (clobber (reg:SI LR_REGNUM))
10743 (clobber (reg:SI CC_REGNUM))]
10744 "TARGET_GNU2_TLS"
10745 {
10746 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10747 INTVAL (operands[1]));
10748 return "bl\\t%c0(tlscall)";
10749 }
10750 [(set_attr "conds" "clob")
10751 (set_attr "length" "4")
10752 (set_attr "type" "branch")]
10753 )
10754
10755 ;; For thread pointer builtin
10756 (define_expand "get_thread_pointersi"
10757 [(match_operand:SI 0 "s_register_operand")]
10758 ""
10759 "
10760 {
10761 arm_load_tp (operands[0]);
10762 DONE;
10763 }")
10764
10765 ;;
10766
10767 ;; We only care about the lower 16 bits of the constant
10768 ;; being inserted into the upper 16 bits of the register.
10769 (define_insn "*arm_movtas_ze"
10770 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10771 (const_int 16)
10772 (const_int 16))
10773 (match_operand:SI 1 "const_int_operand" ""))]
10774 "TARGET_HAVE_MOVT"
10775 "@
10776 movt%?\t%0, %L1
10777 movt\t%0, %L1"
10778 [(set_attr "arch" "32,v8mb")
10779 (set_attr "predicable" "yes")
10780 (set_attr "length" "4")
10781 (set_attr "type" "alu_sreg")]
10782 )
10783
10784 (define_insn "*arm_rev"
10785 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10786 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10787 "arm_arch6"
10788 "@
10789 rev\t%0, %1
10790 rev%?\t%0, %1
10791 rev%?\t%0, %1"
10792 [(set_attr "arch" "t1,t2,32")
10793 (set_attr "length" "2,2,4")
10794 (set_attr "predicable" "no,yes,yes")
10795 (set_attr "type" "rev")]
10796 )
10797
10798 (define_expand "arm_legacy_rev"
10799 [(set (match_operand:SI 2 "s_register_operand")
10800 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10801 (const_int 16))
10802 (match_dup 1)))
10803 (set (match_dup 2)
10804 (lshiftrt:SI (match_dup 2)
10805 (const_int 8)))
10806 (set (match_operand:SI 3 "s_register_operand")
10807 (rotatert:SI (match_dup 1)
10808 (const_int 8)))
10809 (set (match_dup 2)
10810 (and:SI (match_dup 2)
10811 (const_int -65281)))
10812 (set (match_operand:SI 0 "s_register_operand")
10813 (xor:SI (match_dup 3)
10814 (match_dup 2)))]
10815 "TARGET_32BIT"
10816 ""
10817 )
10818
10819 ;; Reuse temporaries to keep register pressure down.
10820 (define_expand "thumb_legacy_rev"
10821 [(set (match_operand:SI 2 "s_register_operand")
10822 (ashift:SI (match_operand:SI 1 "s_register_operand")
10823 (const_int 24)))
10824 (set (match_operand:SI 3 "s_register_operand")
10825 (lshiftrt:SI (match_dup 1)
10826 (const_int 24)))
10827 (set (match_dup 3)
10828 (ior:SI (match_dup 3)
10829 (match_dup 2)))
10830 (set (match_operand:SI 4 "s_register_operand")
10831 (const_int 16))
10832 (set (match_operand:SI 5 "s_register_operand")
10833 (rotatert:SI (match_dup 1)
10834 (match_dup 4)))
10835 (set (match_dup 2)
10836 (ashift:SI (match_dup 5)
10837 (const_int 24)))
10838 (set (match_dup 5)
10839 (lshiftrt:SI (match_dup 5)
10840 (const_int 24)))
10841 (set (match_dup 5)
10842 (ior:SI (match_dup 5)
10843 (match_dup 2)))
10844 (set (match_dup 5)
10845 (rotatert:SI (match_dup 5)
10846 (match_dup 4)))
10847 (set (match_operand:SI 0 "s_register_operand")
10848 (ior:SI (match_dup 5)
10849 (match_dup 3)))]
10850 "TARGET_THUMB"
10851 ""
10852 )
10853
10854 ;; ARM-specific expansion of signed mod by power of 2
10855 ;; using conditional negate.
10856 ;; For r0 % n where n is a power of 2 produce:
10857 ;; rsbs r1, r0, #0
10858 ;; and r0, r0, #(n - 1)
10859 ;; and r1, r1, #(n - 1)
10860 ;; rsbpl r0, r1, #0
10861
10862 (define_expand "modsi3"
10863 [(match_operand:SI 0 "register_operand")
10864 (match_operand:SI 1 "register_operand")
10865 (match_operand:SI 2 "const_int_operand")]
10866 "TARGET_32BIT"
10867 {
10868 HOST_WIDE_INT val = INTVAL (operands[2]);
10869
10870 if (val <= 0
10871 || exact_log2 (val) <= 0)
10872 FAIL;
10873
10874 rtx mask = GEN_INT (val - 1);
10875
10876 /* In the special case of x0 % 2 we can do the even shorter:
10877 cmp r0, #0
10878 and r0, r0, #1
10879 rsblt r0, r0, #0. */
10880
10881 if (val == 2)
10882 {
10883 rtx cc_reg = arm_gen_compare_reg (LT,
10884 operands[1], const0_rtx, NULL_RTX);
10885 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10886 rtx masked = gen_reg_rtx (SImode);
10887
10888 emit_insn (gen_andsi3 (masked, operands[1], mask));
10889 emit_move_insn (operands[0],
10890 gen_rtx_IF_THEN_ELSE (SImode, cond,
10891 gen_rtx_NEG (SImode,
10892 masked),
10893 masked));
10894 DONE;
10895 }
10896
10897 rtx neg_op = gen_reg_rtx (SImode);
10898 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
10899 operands[1]));
10900
10901 /* Extract the condition register and mode. */
10902 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
10903 rtx cc_reg = SET_DEST (cmp);
10904 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
10905
10906 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
10907
10908 rtx masked_neg = gen_reg_rtx (SImode);
10909 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
10910
10911 /* We want a conditional negate here, but emitting COND_EXEC rtxes
10912 during expand does not always work. Do an IF_THEN_ELSE instead. */
10913 emit_move_insn (operands[0],
10914 gen_rtx_IF_THEN_ELSE (SImode, cond,
10915 gen_rtx_NEG (SImode, masked_neg),
10916 operands[0]));
10917
10918
10919 DONE;
10920 }
10921 )
10922
10923 (define_expand "bswapsi2"
10924 [(set (match_operand:SI 0 "s_register_operand")
10925 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
10926 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10927 "
10928 if (!arm_arch6)
10929 {
10930 rtx op2 = gen_reg_rtx (SImode);
10931 rtx op3 = gen_reg_rtx (SImode);
10932
10933 if (TARGET_THUMB)
10934 {
10935 rtx op4 = gen_reg_rtx (SImode);
10936 rtx op5 = gen_reg_rtx (SImode);
10937
10938 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10939 op2, op3, op4, op5));
10940 }
10941 else
10942 {
10943 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10944 op2, op3));
10945 }
10946
10947 DONE;
10948 }
10949 "
10950 )
10951
10952 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
10953 ;; and unsigned variants, respectively. For rev16, expose
10954 ;; byte-swapping in the lower 16 bits only.
10955 (define_insn "*arm_revsh"
10956 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10957 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
10958 "arm_arch6"
10959 "@
10960 revsh\t%0, %1
10961 revsh%?\t%0, %1
10962 revsh%?\t%0, %1"
10963 [(set_attr "arch" "t1,t2,32")
10964 (set_attr "length" "2,2,4")
10965 (set_attr "type" "rev")]
10966 )
10967
10968 (define_insn "*arm_rev16"
10969 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
10970 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
10971 "arm_arch6"
10972 "@
10973 rev16\t%0, %1
10974 rev16%?\t%0, %1
10975 rev16%?\t%0, %1"
10976 [(set_attr "arch" "t1,t2,32")
10977 (set_attr "length" "2,2,4")
10978 (set_attr "type" "rev")]
10979 )
10980
10981 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
10982 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
10983 ;; each valid permutation.
10984
10985 (define_insn "arm_rev16si2"
10986 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
10987 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
10988 (const_int 8))
10989 (match_operand:SI 3 "const_int_operand" "n,n,n"))
10990 (and:SI (lshiftrt:SI (match_dup 1)
10991 (const_int 8))
10992 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
10993 "arm_arch6
10994 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
10995 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
10996 "rev16\\t%0, %1"
10997 [(set_attr "arch" "t1,t2,32")
10998 (set_attr "length" "2,2,4")
10999 (set_attr "type" "rev")]
11000 )
11001
11002 (define_insn "arm_rev16si2_alt"
11003 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11004 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11005 (const_int 8))
11006 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11007 (and:SI (ashift:SI (match_dup 1)
11008 (const_int 8))
11009 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11010 "arm_arch6
11011 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11012 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11013 "rev16\\t%0, %1"
11014 [(set_attr "arch" "t1,t2,32")
11015 (set_attr "length" "2,2,4")
11016 (set_attr "type" "rev")]
11017 )
11018
11019 (define_expand "bswaphi2"
11020 [(set (match_operand:HI 0 "s_register_operand")
11021 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11022 "arm_arch6"
11023 ""
11024 )
11025
11026 ;; Patterns for LDRD/STRD in Thumb2 mode
11027
11028 (define_insn "*thumb2_ldrd"
11029 [(set (match_operand:SI 0 "s_register_operand" "=r")
11030 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11031 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11032 (set (match_operand:SI 3 "s_register_operand" "=r")
11033 (mem:SI (plus:SI (match_dup 1)
11034 (match_operand:SI 4 "const_int_operand" ""))))]
11035 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11036 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11037 && (operands_ok_ldrd_strd (operands[0], operands[3],
11038 operands[1], INTVAL (operands[2]),
11039 false, true))"
11040 "ldrd%?\t%0, %3, [%1, %2]"
11041 [(set_attr "type" "load_8")
11042 (set_attr "predicable" "yes")])
11043
11044 (define_insn "*thumb2_ldrd_base"
11045 [(set (match_operand:SI 0 "s_register_operand" "=r")
11046 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11047 (set (match_operand:SI 2 "s_register_operand" "=r")
11048 (mem:SI (plus:SI (match_dup 1)
11049 (const_int 4))))]
11050 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11051 && (operands_ok_ldrd_strd (operands[0], operands[2],
11052 operands[1], 0, false, true))"
11053 "ldrd%?\t%0, %2, [%1]"
11054 [(set_attr "type" "load_8")
11055 (set_attr "predicable" "yes")])
11056
11057 (define_insn "*thumb2_ldrd_base_neg"
11058 [(set (match_operand:SI 0 "s_register_operand" "=r")
11059 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11060 (const_int -4))))
11061 (set (match_operand:SI 2 "s_register_operand" "=r")
11062 (mem:SI (match_dup 1)))]
11063 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11064 && (operands_ok_ldrd_strd (operands[0], operands[2],
11065 operands[1], -4, false, true))"
11066 "ldrd%?\t%0, %2, [%1, #-4]"
11067 [(set_attr "type" "load_8")
11068 (set_attr "predicable" "yes")])
11069
11070 (define_insn "*thumb2_strd"
11071 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11072 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11073 (match_operand:SI 2 "s_register_operand" "r"))
11074 (set (mem:SI (plus:SI (match_dup 0)
11075 (match_operand:SI 3 "const_int_operand" "")))
11076 (match_operand:SI 4 "s_register_operand" "r"))]
11077 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11078 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11079 && (operands_ok_ldrd_strd (operands[2], operands[4],
11080 operands[0], INTVAL (operands[1]),
11081 false, false))"
11082 "strd%?\t%2, %4, [%0, %1]"
11083 [(set_attr "type" "store_8")
11084 (set_attr "predicable" "yes")])
11085
11086 (define_insn "*thumb2_strd_base"
11087 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11088 (match_operand:SI 1 "s_register_operand" "r"))
11089 (set (mem:SI (plus:SI (match_dup 0)
11090 (const_int 4)))
11091 (match_operand:SI 2 "s_register_operand" "r"))]
11092 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11093 && (operands_ok_ldrd_strd (operands[1], operands[2],
11094 operands[0], 0, false, false))"
11095 "strd%?\t%1, %2, [%0]"
11096 [(set_attr "type" "store_8")
11097 (set_attr "predicable" "yes")])
11098
11099 (define_insn "*thumb2_strd_base_neg"
11100 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11101 (const_int -4)))
11102 (match_operand:SI 1 "s_register_operand" "r"))
11103 (set (mem:SI (match_dup 0))
11104 (match_operand:SI 2 "s_register_operand" "r"))]
11105 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11106 && (operands_ok_ldrd_strd (operands[1], operands[2],
11107 operands[0], -4, false, false))"
11108 "strd%?\t%1, %2, [%0, #-4]"
11109 [(set_attr "type" "store_8")
11110 (set_attr "predicable" "yes")])
11111
11112 ;; ARMv8 CRC32 instructions.
11113 (define_insn "arm_<crc_variant>"
11114 [(set (match_operand:SI 0 "s_register_operand" "=r")
11115 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11116 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11117 CRC))]
11118 "TARGET_CRC32"
11119 "<crc_variant>\\t%0, %1, %2"
11120 [(set_attr "type" "crc")
11121 (set_attr "conds" "unconditional")]
11122 )
11123
11124 ;; Load the load/store double peephole optimizations.
11125 (include "ldrdstrd.md")
11126
11127 ;; Load the load/store multiple patterns
11128 (include "ldmstm.md")
11129
11130 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11131 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11132 ;; The operands are validated through the load_multiple_operation
11133 ;; match_parallel predicate rather than through constraints so enable it only
11134 ;; after reload.
11135 (define_insn "*load_multiple"
11136 [(match_parallel 0 "load_multiple_operation"
11137 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11138 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11139 ])]
11140 "TARGET_32BIT && reload_completed"
11141 "*
11142 {
11143 arm_output_multireg_pop (operands, /*return_pc=*/false,
11144 /*cond=*/const_true_rtx,
11145 /*reverse=*/false,
11146 /*update=*/false);
11147 return \"\";
11148 }
11149 "
11150 [(set_attr "predicable" "yes")]
11151 )
11152
11153 (define_expand "copysignsf3"
11154 [(match_operand:SF 0 "register_operand")
11155 (match_operand:SF 1 "register_operand")
11156 (match_operand:SF 2 "register_operand")]
11157 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11158 "{
11159 emit_move_insn (operands[0], operands[2]);
11160 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11161 GEN_INT (31), GEN_INT (0),
11162 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11163 DONE;
11164 }"
11165 )
11166
11167 (define_expand "copysigndf3"
11168 [(match_operand:DF 0 "register_operand")
11169 (match_operand:DF 1 "register_operand")
11170 (match_operand:DF 2 "register_operand")]
11171 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11172 "{
11173 rtx op0_low = gen_lowpart (SImode, operands[0]);
11174 rtx op0_high = gen_highpart (SImode, operands[0]);
11175 rtx op1_low = gen_lowpart (SImode, operands[1]);
11176 rtx op1_high = gen_highpart (SImode, operands[1]);
11177 rtx op2_high = gen_highpart (SImode, operands[2]);
11178
11179 rtx scratch1 = gen_reg_rtx (SImode);
11180 rtx scratch2 = gen_reg_rtx (SImode);
11181 emit_move_insn (scratch1, op2_high);
11182 emit_move_insn (scratch2, op1_high);
11183
11184 emit_insn(gen_rtx_SET(scratch1,
11185 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11186 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11187 emit_move_insn (op0_low, op1_low);
11188 emit_move_insn (op0_high, scratch2);
11189
11190 DONE;
11191 }"
11192 )
11193
11194 ;; movmisalign patterns for HImode and SImode.
11195 (define_expand "movmisalign<mode>"
11196 [(match_operand:HSI 0 "general_operand")
11197 (match_operand:HSI 1 "general_operand")]
11198 "unaligned_access"
11199 {
11200 /* This pattern is not permitted to fail during expansion: if both arguments
11201 are non-registers (e.g. memory := constant), force operand 1 into a
11202 register. */
11203 rtx (* gen_unaligned_load)(rtx, rtx);
11204 rtx tmp_dest = operands[0];
11205 if (!s_register_operand (operands[0], <MODE>mode)
11206 && !s_register_operand (operands[1], <MODE>mode))
11207 operands[1] = force_reg (<MODE>mode, operands[1]);
11208
11209 if (<MODE>mode == HImode)
11210 {
11211 gen_unaligned_load = gen_unaligned_loadhiu;
11212 tmp_dest = gen_reg_rtx (SImode);
11213 }
11214 else
11215 gen_unaligned_load = gen_unaligned_loadsi;
11216
11217 if (MEM_P (operands[1]))
11218 {
11219 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11220 if (<MODE>mode == HImode)
11221 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11222 }
11223 else
11224 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11225
11226 DONE;
11227 })
11228
11229 (define_insn "arm_<cdp>"
11230 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11231 (match_operand:SI 1 "immediate_operand" "n")
11232 (match_operand:SI 2 "immediate_operand" "n")
11233 (match_operand:SI 3 "immediate_operand" "n")
11234 (match_operand:SI 4 "immediate_operand" "n")
11235 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11236 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11237 {
11238 arm_const_bounds (operands[0], 0, 16);
11239 arm_const_bounds (operands[1], 0, 16);
11240 arm_const_bounds (operands[2], 0, (1 << 5));
11241 arm_const_bounds (operands[3], 0, (1 << 5));
11242 arm_const_bounds (operands[4], 0, (1 << 5));
11243 arm_const_bounds (operands[5], 0, 8);
11244 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11245 }
11246 [(set_attr "length" "4")
11247 (set_attr "type" "coproc")])
11248
11249 (define_insn "*ldc"
11250 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11251 (match_operand:SI 1 "immediate_operand" "n")
11252 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11253 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11254 {
11255 arm_const_bounds (operands[0], 0, 16);
11256 arm_const_bounds (operands[1], 0, (1 << 5));
11257 return "<ldc>\\tp%c0, CR%c1, %2";
11258 }
11259 [(set_attr "length" "4")
11260 (set_attr "type" "coproc")])
11261
11262 (define_insn "*stc"
11263 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11264 (match_operand:SI 1 "immediate_operand" "n")
11265 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11266 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11267 {
11268 arm_const_bounds (operands[0], 0, 16);
11269 arm_const_bounds (operands[1], 0, (1 << 5));
11270 return "<stc>\\tp%c0, CR%c1, %2";
11271 }
11272 [(set_attr "length" "4")
11273 (set_attr "type" "coproc")])
11274
11275 (define_expand "arm_<ldc>"
11276 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11277 (match_operand:SI 1 "immediate_operand")
11278 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11279 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11280
11281 (define_expand "arm_<stc>"
11282 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11283 (match_operand:SI 1 "immediate_operand")
11284 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11285 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11286
11287 (define_insn "arm_<mcr>"
11288 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11289 (match_operand:SI 1 "immediate_operand" "n")
11290 (match_operand:SI 2 "s_register_operand" "r")
11291 (match_operand:SI 3 "immediate_operand" "n")
11292 (match_operand:SI 4 "immediate_operand" "n")
11293 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11294 (use (match_dup 2))]
11295 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11296 {
11297 arm_const_bounds (operands[0], 0, 16);
11298 arm_const_bounds (operands[1], 0, 8);
11299 arm_const_bounds (operands[3], 0, (1 << 5));
11300 arm_const_bounds (operands[4], 0, (1 << 5));
11301 arm_const_bounds (operands[5], 0, 8);
11302 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11303 }
11304 [(set_attr "length" "4")
11305 (set_attr "type" "coproc")])
11306
11307 (define_insn "arm_<mrc>"
11308 [(set (match_operand:SI 0 "s_register_operand" "=r")
11309 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11310 (match_operand:SI 2 "immediate_operand" "n")
11311 (match_operand:SI 3 "immediate_operand" "n")
11312 (match_operand:SI 4 "immediate_operand" "n")
11313 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11314 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11315 {
11316 arm_const_bounds (operands[1], 0, 16);
11317 arm_const_bounds (operands[2], 0, 8);
11318 arm_const_bounds (operands[3], 0, (1 << 5));
11319 arm_const_bounds (operands[4], 0, (1 << 5));
11320 arm_const_bounds (operands[5], 0, 8);
11321 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11322 }
11323 [(set_attr "length" "4")
11324 (set_attr "type" "coproc")])
11325
11326 (define_insn "arm_<mcrr>"
11327 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11328 (match_operand:SI 1 "immediate_operand" "n")
11329 (match_operand:DI 2 "s_register_operand" "r")
11330 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11331 (use (match_dup 2))]
11332 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11333 {
11334 arm_const_bounds (operands[0], 0, 16);
11335 arm_const_bounds (operands[1], 0, 8);
11336 arm_const_bounds (operands[3], 0, (1 << 5));
11337 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11338 }
11339 [(set_attr "length" "4")
11340 (set_attr "type" "coproc")])
11341
11342 (define_insn "arm_<mrrc>"
11343 [(set (match_operand:DI 0 "s_register_operand" "=r")
11344 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11345 (match_operand:SI 2 "immediate_operand" "n")
11346 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11347 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11348 {
11349 arm_const_bounds (operands[1], 0, 16);
11350 arm_const_bounds (operands[2], 0, 8);
11351 arm_const_bounds (operands[3], 0, (1 << 5));
11352 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11353 }
11354 [(set_attr "length" "4")
11355 (set_attr "type" "coproc")])
11356
11357 (define_expand "speculation_barrier"
11358 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11359 "TARGET_EITHER"
11360 "
11361 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11362 have a usable barrier (and probably don't need one in practice).
11363 But to be safe if such code is run on later architectures, call a
11364 helper function in libgcc that will do the thing for the active
11365 system. */
11366 if (!(arm_arch7 || arm_arch8))
11367 {
11368 arm_emit_speculation_barrier_function ();
11369 DONE;
11370 }
11371 "
11372 )
11373
11374 ;; Generate a hard speculation barrier when we have not enabled speculation
11375 ;; tracking.
11376 (define_insn "*speculation_barrier_insn"
11377 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11378 "arm_arch7 || arm_arch8"
11379 "isb\;dsb\\tsy"
11380 [(set_attr "type" "block")
11381 (set_attr "length" "8")]
11382 )
11383
11384 ;; Vector bits common to IWMMXT and Neon
11385 (include "vec-common.md")
11386 ;; Load the Intel Wireless Multimedia Extension patterns
11387 (include "iwmmxt.md")
11388 ;; Load the VFP co-processor patterns
11389 (include "vfp.md")
11390 ;; Thumb-1 patterns
11391 (include "thumb1.md")
11392 ;; Thumb-2 patterns
11393 (include "thumb2.md")
11394 ;; Neon patterns
11395 (include "neon.md")
11396 ;; Crypto patterns
11397 (include "crypto.md")
11398 ;; Synchronization Primitives
11399 (include "sync.md")
11400 ;; Fixed-point patterns
11401 (include "arm-fixed.md")