]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
[arm] Implement negscc using SBC when appropriate.
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51 ;; conditional compare combination
52 (define_constants
53 [(CMP_CMP 0)
54 (CMN_CMP 1)
55 (CMP_CMN 2)
56 (CMN_CMN 3)
57 (NUM_OF_COND_CMP 4)
58 ]
59 )
60
61 \f
62 ;;---------------------------------------------------------------------------
63 ;; Attributes
64
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
67
68 ;; Instruction classification types
69 (include "types.md")
70
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
77
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
85
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
101
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
104 ;; registers.
105 (define_attr "fp" "no,yes" (const_string "no"))
106
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
112
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
117
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
120 (const_int 4))
121
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
131
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
134 (const_string "yes")
135
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
138 (const_string "yes")
139
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
142 (const_string "yes")
143
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
182 (const_string "yes")
183 ]
184
185 (const_string "no")))
186
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
189
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
192 (const_string "yes")
193
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
196 (const_string "yes")
197
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
202
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
208
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
225 (const_string "no")
226
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
229 (const_string "no")
230
231 (eq_attr "arch_enabled" "no")
232 (const_string "no")]
233 (const_string "yes")))
234
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
254
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
262
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
270 ;
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
273 ; inlined branches
274 ;
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
277 ;
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
280 ;
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
283 ;
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
286
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (if_then_else
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
295
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
301
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
312 "block,call,load_4")
313 (const_string "yes")
314 (const_string "no")))
315
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
338
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
342
343
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
347
348 ;;---------------------------------------------------------------------------
349 ;; Unspecs
350
351 (include "unspecs.md")
352
353 ;;---------------------------------------------------------------------------
354 ;; Mode iterators
355
356 (include "iterators.md")
357
358 ;;---------------------------------------------------------------------------
359 ;; Predicates
360
361 (include "predicates.md")
362 (include "constraints.md")
363
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
366
367 (define_attr "tune_cortexr4" "yes,no"
368 (const (if_then_else
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "yes")
371 (const_string "no"))))
372
373 ;; True if the generic scheduling description should be used.
374
375 (define_attr "generic_sched" "yes,no"
376 (const (if_then_else
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "no")
385 (const_string "yes"))))
386
387 (define_attr "generic_vfp" "yes,no"
388 (const (if_then_else
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
394 (const_string "yes")
395 (const_string "no"))))
396
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
403 (include "fa526.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
422 (include "vfp11.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
425
426 \f
427 ;;---------------------------------------------------------------------------
428 ;; Insn patterns
429 ;;
430 ;; Addition insns.
431
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
435
436 (define_expand "adddi3"
437 [(parallel
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
442 "TARGET_EITHER"
443 "
444 if (TARGET_THUMB1)
445 {
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
448 }
449 else
450 {
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
454 &lo_op2, &hi_op2);
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
457
458 if (lo_op2 == const0_rtx)
459 {
460 lo_dest = lo_op1;
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
465 }
466 else
467 {
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
472
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
475 const0_rtx);
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
478 else
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
480 }
481
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
486 DONE;
487 }
488 "
489 )
490
491 (define_expand "addv<mode>4"
492 [(match_operand:SIDI 0 "register_operand")
493 (match_operand:SIDI 1 "register_operand")
494 (match_operand:SIDI 2 "register_operand")
495 (match_operand 3 "")]
496 "TARGET_32BIT"
497 {
498 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
499 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
500
501 DONE;
502 })
503
504 (define_expand "uaddv<mode>4"
505 [(match_operand:SIDI 0 "register_operand")
506 (match_operand:SIDI 1 "register_operand")
507 (match_operand:SIDI 2 "register_operand")
508 (match_operand 3 "")]
509 "TARGET_32BIT"
510 {
511 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
512 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
513
514 DONE;
515 })
516
517 (define_expand "addsi3"
518 [(set (match_operand:SI 0 "s_register_operand")
519 (plus:SI (match_operand:SI 1 "s_register_operand")
520 (match_operand:SI 2 "reg_or_int_operand")))]
521 "TARGET_EITHER"
522 "
523 if (TARGET_32BIT && CONST_INT_P (operands[2]))
524 {
525 arm_split_constant (PLUS, SImode, NULL_RTX,
526 INTVAL (operands[2]), operands[0], operands[1],
527 optimize && can_create_pseudo_p ());
528 DONE;
529 }
530 "
531 )
532
533 ; If there is a scratch available, this will be faster than synthesizing the
534 ; addition.
535 (define_peephole2
536 [(match_scratch:SI 3 "r")
537 (set (match_operand:SI 0 "arm_general_register_operand" "")
538 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
539 (match_operand:SI 2 "const_int_operand" "")))]
540 "TARGET_32BIT &&
541 !(const_ok_for_arm (INTVAL (operands[2]))
542 || const_ok_for_arm (-INTVAL (operands[2])))
543 && const_ok_for_arm (~INTVAL (operands[2]))"
544 [(set (match_dup 3) (match_dup 2))
545 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
546 ""
547 )
548
549 ;; The r/r/k alternative is required when reloading the address
550 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
551 ;; put the duplicated register first, and not try the commutative version.
552 (define_insn_and_split "*arm_addsi3"
553 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
554 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
555 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
556 "TARGET_32BIT"
557 "@
558 add%?\\t%0, %0, %2
559 add%?\\t%0, %1, %2
560 add%?\\t%0, %1, %2
561 add%?\\t%0, %1, %2
562 add%?\\t%0, %1, %2
563 add%?\\t%0, %1, %2
564 add%?\\t%0, %2, %1
565 add%?\\t%0, %1, %2
566 addw%?\\t%0, %1, %2
567 addw%?\\t%0, %1, %2
568 sub%?\\t%0, %1, #%n2
569 sub%?\\t%0, %1, #%n2
570 sub%?\\t%0, %1, #%n2
571 subw%?\\t%0, %1, #%n2
572 subw%?\\t%0, %1, #%n2
573 #"
574 "TARGET_32BIT
575 && CONST_INT_P (operands[2])
576 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
577 && (reload_completed || !arm_eliminable_register (operands[1]))"
578 [(clobber (const_int 0))]
579 "
580 arm_split_constant (PLUS, SImode, curr_insn,
581 INTVAL (operands[2]), operands[0],
582 operands[1], 0);
583 DONE;
584 "
585 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
586 (set_attr "predicable" "yes")
587 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
588 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
589 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
590 (const_string "alu_imm")
591 (const_string "alu_sreg")))
592 ]
593 )
594
595 (define_insn "adddi3_compareV"
596 [(set (reg:CC_V CC_REGNUM)
597 (ne:CC_V
598 (plus:TI
599 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
600 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
601 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
602 (set (match_operand:DI 0 "s_register_operand" "=&r")
603 (plus:DI (match_dup 1) (match_dup 2)))]
604 "TARGET_32BIT"
605 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
606 [(set_attr "conds" "set")
607 (set_attr "length" "8")
608 (set_attr "type" "multiple")]
609 )
610
611 (define_insn "addsi3_compareV"
612 [(set (reg:CC_V CC_REGNUM)
613 (ne:CC_V
614 (plus:DI
615 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
616 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
617 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
618 (set (match_operand:SI 0 "register_operand" "=r")
619 (plus:SI (match_dup 1) (match_dup 2)))]
620 "TARGET_32BIT"
621 "adds%?\\t%0, %1, %2"
622 [(set_attr "conds" "set")
623 (set_attr "type" "alus_sreg")]
624 )
625
626 (define_insn "adddi3_compareC"
627 [(set (reg:CC_C CC_REGNUM)
628 (compare:CC_C
629 (plus:DI
630 (match_operand:DI 1 "register_operand" "r")
631 (match_operand:DI 2 "register_operand" "r"))
632 (match_dup 1)))
633 (set (match_operand:DI 0 "register_operand" "=&r")
634 (plus:DI (match_dup 1) (match_dup 2)))]
635 "TARGET_32BIT"
636 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
637 [(set_attr "conds" "set")
638 (set_attr "length" "8")
639 (set_attr "type" "multiple")]
640 )
641
642 (define_insn "addsi3_compareC"
643 [(set (reg:CC_C CC_REGNUM)
644 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
645 (match_operand:SI 2 "register_operand" "r"))
646 (match_dup 1)))
647 (set (match_operand:SI 0 "register_operand" "=r")
648 (plus:SI (match_dup 1) (match_dup 2)))]
649 "TARGET_32BIT"
650 "adds%?\\t%0, %1, %2"
651 [(set_attr "conds" "set")
652 (set_attr "type" "alus_sreg")]
653 )
654
655 (define_insn "addsi3_compare0"
656 [(set (reg:CC_NOOV CC_REGNUM)
657 (compare:CC_NOOV
658 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
659 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
660 (const_int 0)))
661 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
662 (plus:SI (match_dup 1) (match_dup 2)))]
663 "TARGET_ARM"
664 "@
665 adds%?\\t%0, %1, %2
666 subs%?\\t%0, %1, #%n2
667 adds%?\\t%0, %1, %2"
668 [(set_attr "conds" "set")
669 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
670 )
671
672 (define_insn "*addsi3_compare0_scratch"
673 [(set (reg:CC_NOOV CC_REGNUM)
674 (compare:CC_NOOV
675 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
676 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
677 (const_int 0)))]
678 "TARGET_ARM"
679 "@
680 cmn%?\\t%0, %1
681 cmp%?\\t%0, #%n1
682 cmn%?\\t%0, %1"
683 [(set_attr "conds" "set")
684 (set_attr "predicable" "yes")
685 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
686 )
687
688 (define_insn "*compare_negsi_si"
689 [(set (reg:CC_Z CC_REGNUM)
690 (compare:CC_Z
691 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
692 (match_operand:SI 1 "s_register_operand" "l,r")))]
693 "TARGET_32BIT"
694 "cmn%?\\t%1, %0"
695 [(set_attr "conds" "set")
696 (set_attr "predicable" "yes")
697 (set_attr "arch" "t2,*")
698 (set_attr "length" "2,4")
699 (set_attr "predicable_short_it" "yes,no")
700 (set_attr "type" "alus_sreg")]
701 )
702
703 ;; This is the canonicalization of subsi3_compare when the
704 ;; addend is a constant.
705 (define_insn "cmpsi2_addneg"
706 [(set (reg:CC CC_REGNUM)
707 (compare:CC
708 (match_operand:SI 1 "s_register_operand" "r,r")
709 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
710 (set (match_operand:SI 0 "s_register_operand" "=r,r")
711 (plus:SI (match_dup 1)
712 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
713 "TARGET_32BIT
714 && (INTVAL (operands[2])
715 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
716 {
717 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
718 in different condition codes (like cmn rather than like cmp), so that
719 alternative comes first. Both alternatives can match for any 0x??000000
720 where except for 0 and INT_MIN it doesn't matter what we choose, and also
721 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
722 as it is shorter. */
723 if (which_alternative == 0 && operands[3] != const1_rtx)
724 return "subs%?\\t%0, %1, #%n3";
725 else
726 return "adds%?\\t%0, %1, %3";
727 }
728 [(set_attr "conds" "set")
729 (set_attr "type" "alus_sreg")]
730 )
731
732 ;; Convert the sequence
733 ;; sub rd, rn, #1
734 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
735 ;; bne dest
736 ;; into
737 ;; subs rd, rn, #1
738 ;; bcs dest ((unsigned)rn >= 1)
739 ;; similarly for the beq variant using bcc.
740 ;; This is a common looping idiom (while (n--))
741 (define_peephole2
742 [(set (match_operand:SI 0 "arm_general_register_operand" "")
743 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
744 (const_int -1)))
745 (set (match_operand 2 "cc_register" "")
746 (compare (match_dup 0) (const_int -1)))
747 (set (pc)
748 (if_then_else (match_operator 3 "equality_operator"
749 [(match_dup 2) (const_int 0)])
750 (match_operand 4 "" "")
751 (match_operand 5 "" "")))]
752 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
753 [(parallel[
754 (set (match_dup 2)
755 (compare:CC
756 (match_dup 1) (const_int 1)))
757 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
758 (set (pc)
759 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
760 (match_dup 4)
761 (match_dup 5)))]
762 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
763 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
764 ? GEU : LTU),
765 VOIDmode,
766 operands[2], const0_rtx);"
767 )
768
769 ;; The next four insns work because they compare the result with one of
770 ;; the operands, and we know that the use of the condition code is
771 ;; either GEU or LTU, so we can use the carry flag from the addition
772 ;; instead of doing the compare a second time.
773 (define_insn "*addsi3_compare_op1"
774 [(set (reg:CC_C CC_REGNUM)
775 (compare:CC_C
776 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
777 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
778 (match_dup 1)))
779 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
780 (plus:SI (match_dup 1) (match_dup 2)))]
781 "TARGET_32BIT"
782 "@
783 adds%?\\t%0, %1, %2
784 adds%?\\t%0, %0, %2
785 subs%?\\t%0, %1, #%n2
786 subs%?\\t%0, %0, #%n2
787 adds%?\\t%0, %1, %2
788 subs%?\\t%0, %1, #%n2
789 adds%?\\t%0, %1, %2"
790 [(set_attr "conds" "set")
791 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
792 (set_attr "length" "2,2,2,2,4,4,4")
793 (set_attr "type"
794 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
795 )
796
797 (define_insn "*addsi3_compare_op2"
798 [(set (reg:CC_C CC_REGNUM)
799 (compare:CC_C
800 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
801 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
802 (match_dup 2)))
803 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
804 (plus:SI (match_dup 1) (match_dup 2)))]
805 "TARGET_32BIT"
806 "@
807 adds%?\\t%0, %1, %2
808 adds%?\\t%0, %0, %2
809 subs%?\\t%0, %1, #%n2
810 subs%?\\t%0, %0, #%n2
811 adds%?\\t%0, %1, %2
812 subs%?\\t%0, %1, #%n2
813 adds%?\\t%0, %1, %2"
814 [(set_attr "conds" "set")
815 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
816 (set_attr "length" "2,2,2,2,4,4,4")
817 (set_attr "type"
818 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
819 )
820
821 (define_insn "*compare_addsi2_op0"
822 [(set (reg:CC_C CC_REGNUM)
823 (compare:CC_C
824 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
825 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
826 (match_dup 0)))]
827 "TARGET_32BIT"
828 "@
829 cmp%?\\t%0, #%n1
830 cmn%?\\t%0, %1
831 cmn%?\\t%0, %1
832 cmp%?\\t%0, #%n1
833 cmn%?\\t%0, %1"
834 [(set_attr "conds" "set")
835 (set_attr "predicable" "yes")
836 (set_attr "arch" "t2,t2,*,*,*")
837 (set_attr "predicable_short_it" "yes,yes,no,no,no")
838 (set_attr "length" "2,2,4,4,4")
839 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
840 )
841
842 (define_insn "*compare_addsi2_op1"
843 [(set (reg:CC_C CC_REGNUM)
844 (compare:CC_C
845 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
846 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
847 (match_dup 1)))]
848 "TARGET_32BIT"
849 "@
850 cmp%?\\t%0, #%n1
851 cmn%?\\t%0, %1
852 cmn%?\\t%0, %1
853 cmp%?\\t%0, #%n1
854 cmn%?\\t%0, %1"
855 [(set_attr "conds" "set")
856 (set_attr "predicable" "yes")
857 (set_attr "arch" "t2,t2,*,*,*")
858 (set_attr "predicable_short_it" "yes,yes,no,no,no")
859 (set_attr "length" "2,2,4,4,4")
860 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
861 )
862
863 (define_insn "addsi3_carryin"
864 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
865 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
866 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
867 (match_operand:SI 3 "arm_carry_operation" "")))]
868 "TARGET_32BIT"
869 "@
870 adc%?\\t%0, %1, %2
871 adc%?\\t%0, %1, %2
872 sbc%?\\t%0, %1, #%B2"
873 [(set_attr "conds" "use")
874 (set_attr "predicable" "yes")
875 (set_attr "arch" "t2,*,*")
876 (set_attr "length" "4")
877 (set_attr "predicable_short_it" "yes,no,no")
878 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
879 )
880
881 ;; Canonicalization of the above when the immediate is zero.
882 (define_insn "add0si3_carryin"
883 [(set (match_operand:SI 0 "s_register_operand" "=r")
884 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
885 (match_operand:SI 1 "arm_not_operand" "r")))]
886 "TARGET_32BIT"
887 "adc%?\\t%0, %1, #0"
888 [(set_attr "conds" "use")
889 (set_attr "predicable" "yes")
890 (set_attr "length" "4")
891 (set_attr "type" "adc_imm")]
892 )
893
894 (define_insn "*addsi3_carryin_alt2"
895 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
896 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
897 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
898 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
899 "TARGET_32BIT"
900 "@
901 adc%?\\t%0, %1, %2
902 adc%?\\t%0, %1, %2
903 sbc%?\\t%0, %1, #%B2"
904 [(set_attr "conds" "use")
905 (set_attr "predicable" "yes")
906 (set_attr "arch" "t2,*,*")
907 (set_attr "length" "4")
908 (set_attr "predicable_short_it" "yes,no,no")
909 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
910 )
911
912 (define_insn "*addsi3_carryin_shift"
913 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
914 (plus:SI (plus:SI
915 (match_operator:SI 2 "shift_operator"
916 [(match_operand:SI 3 "s_register_operand" "r,r")
917 (match_operand:SI 4 "shift_amount_operand" "M,r")])
918 (match_operand:SI 5 "arm_carry_operation" ""))
919 (match_operand:SI 1 "s_register_operand" "r,r")))]
920 "TARGET_32BIT"
921 "adc%?\\t%0, %1, %3%S2"
922 [(set_attr "conds" "use")
923 (set_attr "arch" "32,a")
924 (set_attr "shift" "3")
925 (set_attr "predicable" "yes")
926 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
927 (const_string "alu_shift_imm")
928 (const_string "alu_shift_reg")))]
929 )
930
931 (define_insn "*addsi3_carryin_clobercc"
932 [(set (match_operand:SI 0 "s_register_operand" "=r")
933 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
934 (match_operand:SI 2 "arm_rhs_operand" "rI"))
935 (match_operand:SI 3 "arm_carry_operation" "")))
936 (clobber (reg:CC CC_REGNUM))]
937 "TARGET_32BIT"
938 "adcs%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "type" "adcs_reg")]
941 )
942
943 (define_expand "subv<mode>4"
944 [(match_operand:SIDI 0 "register_operand")
945 (match_operand:SIDI 1 "register_operand")
946 (match_operand:SIDI 2 "register_operand")
947 (match_operand 3 "")]
948 "TARGET_32BIT"
949 {
950 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
951 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
952
953 DONE;
954 })
955
956 (define_expand "usubv<mode>4"
957 [(match_operand:SIDI 0 "register_operand")
958 (match_operand:SIDI 1 "register_operand")
959 (match_operand:SIDI 2 "register_operand")
960 (match_operand 3 "")]
961 "TARGET_32BIT"
962 {
963 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
964 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
965
966 DONE;
967 })
968
969 (define_insn "subdi3_compare1"
970 [(set (reg:CC CC_REGNUM)
971 (compare:CC
972 (match_operand:DI 1 "s_register_operand" "r")
973 (match_operand:DI 2 "s_register_operand" "r")))
974 (set (match_operand:DI 0 "s_register_operand" "=&r")
975 (minus:DI (match_dup 1) (match_dup 2)))]
976 "TARGET_32BIT"
977 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
978 [(set_attr "conds" "set")
979 (set_attr "length" "8")
980 (set_attr "type" "multiple")]
981 )
982
983 (define_insn "subsi3_compare1"
984 [(set (reg:CC CC_REGNUM)
985 (compare:CC
986 (match_operand:SI 1 "register_operand" "r")
987 (match_operand:SI 2 "register_operand" "r")))
988 (set (match_operand:SI 0 "register_operand" "=r")
989 (minus:SI (match_dup 1) (match_dup 2)))]
990 "TARGET_32BIT"
991 "subs%?\\t%0, %1, %2"
992 [(set_attr "conds" "set")
993 (set_attr "type" "alus_sreg")]
994 )
995
996 (define_insn "subsi3_carryin"
997 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
998 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
999 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1000 (match_operand:SI 3 "arm_borrow_operation" "")))]
1001 "TARGET_32BIT"
1002 "@
1003 sbc%?\\t%0, %1, %2
1004 rsc%?\\t%0, %2, %1
1005 sbc%?\\t%0, %2, %2, lsl #1"
1006 [(set_attr "conds" "use")
1007 (set_attr "arch" "*,a,t2")
1008 (set_attr "predicable" "yes")
1009 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1010 )
1011
1012 (define_insn "*subsi3_carryin_const"
1013 [(set (match_operand:SI 0 "s_register_operand" "=r")
1014 (minus:SI (plus:SI
1015 (match_operand:SI 1 "s_register_operand" "r")
1016 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1017 (match_operand:SI 3 "arm_borrow_operation" "")))]
1018 "TARGET_32BIT"
1019 "sbc\\t%0, %1, #%n2"
1020 [(set_attr "conds" "use")
1021 (set_attr "type" "adc_imm")]
1022 )
1023
1024 (define_insn "*subsi3_carryin_const0"
1025 [(set (match_operand:SI 0 "s_register_operand" "=r")
1026 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1027 (match_operand:SI 2 "arm_borrow_operation" "")))]
1028 "TARGET_32BIT"
1029 "sbc\\t%0, %1, #0"
1030 [(set_attr "conds" "use")
1031 (set_attr "type" "adc_imm")]
1032 )
1033
1034 (define_insn "*subsi3_carryin_shift"
1035 [(set (match_operand:SI 0 "s_register_operand" "=r")
1036 (minus:SI (minus:SI
1037 (match_operand:SI 1 "s_register_operand" "r")
1038 (match_operator:SI 2 "shift_operator"
1039 [(match_operand:SI 3 "s_register_operand" "r")
1040 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1041 (match_operand:SI 5 "arm_borrow_operation" "")))]
1042 "TARGET_32BIT"
1043 "sbc%?\\t%0, %1, %3%S2"
1044 [(set_attr "conds" "use")
1045 (set_attr "predicable" "yes")
1046 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1047 (const_string "alu_shift_imm")
1048 (const_string "alu_shift_reg")))]
1049 )
1050
1051 (define_insn "*rsbsi3_carryin_shift"
1052 [(set (match_operand:SI 0 "s_register_operand" "=r")
1053 (minus:SI (minus:SI
1054 (match_operator:SI 2 "shift_operator"
1055 [(match_operand:SI 3 "s_register_operand" "r")
1056 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1057 (match_operand:SI 1 "s_register_operand" "r"))
1058 (match_operand:SI 5 "arm_borrow_operation" "")))]
1059 "TARGET_ARM"
1060 "rsc%?\\t%0, %1, %3%S2"
1061 [(set_attr "conds" "use")
1062 (set_attr "predicable" "yes")
1063 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1064 (const_string "alu_shift_imm")
1065 (const_string "alu_shift_reg")))]
1066 )
1067
1068 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1069 (define_split
1070 [(set (match_operand:SI 0 "s_register_operand" "")
1071 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1072 (match_operand:SI 2 "s_register_operand" ""))
1073 (const_int -1)))
1074 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1075 "TARGET_32BIT"
1076 [(set (match_dup 3) (match_dup 1))
1077 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1078 "
1079 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1080 ")
1081
1082 (define_expand "addsf3"
1083 [(set (match_operand:SF 0 "s_register_operand")
1084 (plus:SF (match_operand:SF 1 "s_register_operand")
1085 (match_operand:SF 2 "s_register_operand")))]
1086 "TARGET_32BIT && TARGET_HARD_FLOAT"
1087 "
1088 ")
1089
1090 (define_expand "adddf3"
1091 [(set (match_operand:DF 0 "s_register_operand")
1092 (plus:DF (match_operand:DF 1 "s_register_operand")
1093 (match_operand:DF 2 "s_register_operand")))]
1094 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1095 "
1096 ")
1097
1098 (define_expand "subdi3"
1099 [(parallel
1100 [(set (match_operand:DI 0 "s_register_operand")
1101 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1102 (match_operand:DI 2 "s_register_operand")))
1103 (clobber (reg:CC CC_REGNUM))])]
1104 "TARGET_EITHER"
1105 "
1106 if (TARGET_THUMB1)
1107 {
1108 if (!REG_P (operands[1]))
1109 operands[1] = force_reg (DImode, operands[1]);
1110 }
1111 else
1112 {
1113 rtx lo_result, hi_result, lo_dest, hi_dest;
1114 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1115 rtx condition;
1116
1117 /* Since operands[1] may be an integer, pass it second, so that
1118 any necessary simplifications will be done on the decomposed
1119 constant. */
1120 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1121 &lo_op1, &hi_op1);
1122 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1123 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1124
1125 if (!arm_rhs_operand (lo_op1, SImode))
1126 lo_op1 = force_reg (SImode, lo_op1);
1127
1128 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1129 || !arm_rhs_operand (hi_op1, SImode))
1130 hi_op1 = force_reg (SImode, hi_op1);
1131
1132 rtx cc_reg;
1133 if (lo_op1 == const0_rtx)
1134 {
1135 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1136 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1137 }
1138 else if (CONST_INT_P (lo_op1))
1139 {
1140 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1141 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1142 GEN_INT (~UINTVAL (lo_op1))));
1143 }
1144 else
1145 {
1146 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1147 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1148 }
1149
1150 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1151
1152 if (hi_op1 == const0_rtx)
1153 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1154 else
1155 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1156
1157 if (lo_result != lo_dest)
1158 emit_move_insn (lo_result, lo_dest);
1159
1160 if (hi_result != hi_dest)
1161 emit_move_insn (hi_result, hi_dest);
1162
1163 DONE;
1164 }
1165 "
1166 )
1167
1168 (define_expand "subsi3"
1169 [(set (match_operand:SI 0 "s_register_operand")
1170 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1171 (match_operand:SI 2 "s_register_operand")))]
1172 "TARGET_EITHER"
1173 "
1174 if (CONST_INT_P (operands[1]))
1175 {
1176 if (TARGET_32BIT)
1177 {
1178 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1179 operands[1] = force_reg (SImode, operands[1]);
1180 else
1181 {
1182 arm_split_constant (MINUS, SImode, NULL_RTX,
1183 INTVAL (operands[1]), operands[0],
1184 operands[2],
1185 optimize && can_create_pseudo_p ());
1186 DONE;
1187 }
1188 }
1189 else /* TARGET_THUMB1 */
1190 operands[1] = force_reg (SImode, operands[1]);
1191 }
1192 "
1193 )
1194
1195 ; ??? Check Thumb-2 split length
1196 (define_insn_and_split "*arm_subsi3_insn"
1197 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1198 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1199 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1200 "TARGET_32BIT"
1201 "@
1202 sub%?\\t%0, %1, %2
1203 sub%?\\t%0, %2
1204 sub%?\\t%0, %1, %2
1205 rsb%?\\t%0, %2, %1
1206 rsb%?\\t%0, %2, %1
1207 sub%?\\t%0, %1, %2
1208 sub%?\\t%0, %1, %2
1209 sub%?\\t%0, %1, %2
1210 #"
1211 "&& (CONST_INT_P (operands[1])
1212 && !const_ok_for_arm (INTVAL (operands[1])))"
1213 [(clobber (const_int 0))]
1214 "
1215 arm_split_constant (MINUS, SImode, curr_insn,
1216 INTVAL (operands[1]), operands[0], operands[2], 0);
1217 DONE;
1218 "
1219 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1220 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1221 (set_attr "predicable" "yes")
1222 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1223 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1224 )
1225
1226 (define_peephole2
1227 [(match_scratch:SI 3 "r")
1228 (set (match_operand:SI 0 "arm_general_register_operand" "")
1229 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1230 (match_operand:SI 2 "arm_general_register_operand" "")))]
1231 "TARGET_32BIT
1232 && !const_ok_for_arm (INTVAL (operands[1]))
1233 && const_ok_for_arm (~INTVAL (operands[1]))"
1234 [(set (match_dup 3) (match_dup 1))
1235 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1236 ""
1237 )
1238
1239 (define_insn "subsi3_compare0"
1240 [(set (reg:CC_NOOV CC_REGNUM)
1241 (compare:CC_NOOV
1242 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1243 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1244 (const_int 0)))
1245 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1246 (minus:SI (match_dup 1) (match_dup 2)))]
1247 "TARGET_32BIT"
1248 "@
1249 subs%?\\t%0, %1, %2
1250 subs%?\\t%0, %1, %2
1251 rsbs%?\\t%0, %2, %1"
1252 [(set_attr "conds" "set")
1253 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1254 )
1255
1256 (define_insn "subsi3_compare"
1257 [(set (reg:CC CC_REGNUM)
1258 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1259 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1260 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1261 (minus:SI (match_dup 1) (match_dup 2)))]
1262 "TARGET_32BIT"
1263 "@
1264 subs%?\\t%0, %1, %2
1265 subs%?\\t%0, %1, %2
1266 rsbs%?\\t%0, %2, %1"
1267 [(set_attr "conds" "set")
1268 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1269 )
1270
1271 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1272 ;; rather than (0 cmp reg). This gives the same results for unsigned
1273 ;; and equality compares which is what we mostly need here.
1274 (define_insn "rsb_imm_compare"
1275 [(set (reg:CC_RSB CC_REGNUM)
1276 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1277 (match_operand 3 "const_int_operand" "")))
1278 (set (match_operand:SI 0 "s_register_operand" "=r")
1279 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1280 (match_dup 2)))]
1281 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1282 "rsbs\\t%0, %2, %1"
1283 [(set_attr "conds" "set")
1284 (set_attr "type" "alus_imm")]
1285 )
1286
1287 (define_expand "subsf3"
1288 [(set (match_operand:SF 0 "s_register_operand")
1289 (minus:SF (match_operand:SF 1 "s_register_operand")
1290 (match_operand:SF 2 "s_register_operand")))]
1291 "TARGET_32BIT && TARGET_HARD_FLOAT"
1292 "
1293 ")
1294
1295 (define_expand "subdf3"
1296 [(set (match_operand:DF 0 "s_register_operand")
1297 (minus:DF (match_operand:DF 1 "s_register_operand")
1298 (match_operand:DF 2 "s_register_operand")))]
1299 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1300 "
1301 ")
1302
1303 \f
1304 ;; Multiplication insns
1305
1306 (define_expand "mulhi3"
1307 [(set (match_operand:HI 0 "s_register_operand")
1308 (mult:HI (match_operand:HI 1 "s_register_operand")
1309 (match_operand:HI 2 "s_register_operand")))]
1310 "TARGET_DSP_MULTIPLY"
1311 "
1312 {
1313 rtx result = gen_reg_rtx (SImode);
1314 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1315 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1316 DONE;
1317 }"
1318 )
1319
1320 (define_expand "mulsi3"
1321 [(set (match_operand:SI 0 "s_register_operand")
1322 (mult:SI (match_operand:SI 2 "s_register_operand")
1323 (match_operand:SI 1 "s_register_operand")))]
1324 "TARGET_EITHER"
1325 ""
1326 )
1327
1328 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1329 (define_insn "*mul"
1330 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1331 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1332 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1333 "TARGET_32BIT"
1334 "mul%?\\t%0, %2, %1"
1335 [(set_attr "type" "mul")
1336 (set_attr "predicable" "yes")
1337 (set_attr "arch" "t2,v6,nov6,nov6")
1338 (set_attr "length" "4")
1339 (set_attr "predicable_short_it" "yes,no,*,*")]
1340 )
1341
1342 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1343 ;; reusing the same register.
1344
1345 (define_insn "*mla"
1346 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1347 (plus:SI
1348 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1349 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1350 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1351 "TARGET_32BIT"
1352 "mla%?\\t%0, %3, %2, %1"
1353 [(set_attr "type" "mla")
1354 (set_attr "predicable" "yes")
1355 (set_attr "arch" "v6,nov6,nov6,nov6")]
1356 )
1357
1358 (define_insn "*mls"
1359 [(set (match_operand:SI 0 "s_register_operand" "=r")
1360 (minus:SI
1361 (match_operand:SI 1 "s_register_operand" "r")
1362 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1363 (match_operand:SI 2 "s_register_operand" "r"))))]
1364 "TARGET_32BIT && arm_arch_thumb2"
1365 "mls%?\\t%0, %3, %2, %1"
1366 [(set_attr "type" "mla")
1367 (set_attr "predicable" "yes")]
1368 )
1369
1370 (define_insn "*mulsi3_compare0"
1371 [(set (reg:CC_NOOV CC_REGNUM)
1372 (compare:CC_NOOV (mult:SI
1373 (match_operand:SI 2 "s_register_operand" "r,r")
1374 (match_operand:SI 1 "s_register_operand" "%0,r"))
1375 (const_int 0)))
1376 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1377 (mult:SI (match_dup 2) (match_dup 1)))]
1378 "TARGET_ARM && !arm_arch6"
1379 "muls%?\\t%0, %2, %1"
1380 [(set_attr "conds" "set")
1381 (set_attr "type" "muls")]
1382 )
1383
1384 (define_insn "*mulsi3_compare0_v6"
1385 [(set (reg:CC_NOOV CC_REGNUM)
1386 (compare:CC_NOOV (mult:SI
1387 (match_operand:SI 2 "s_register_operand" "r")
1388 (match_operand:SI 1 "s_register_operand" "r"))
1389 (const_int 0)))
1390 (set (match_operand:SI 0 "s_register_operand" "=r")
1391 (mult:SI (match_dup 2) (match_dup 1)))]
1392 "TARGET_ARM && arm_arch6 && optimize_size"
1393 "muls%?\\t%0, %2, %1"
1394 [(set_attr "conds" "set")
1395 (set_attr "type" "muls")]
1396 )
1397
1398 (define_insn "*mulsi_compare0_scratch"
1399 [(set (reg:CC_NOOV CC_REGNUM)
1400 (compare:CC_NOOV (mult:SI
1401 (match_operand:SI 2 "s_register_operand" "r,r")
1402 (match_operand:SI 1 "s_register_operand" "%0,r"))
1403 (const_int 0)))
1404 (clobber (match_scratch:SI 0 "=&r,&r"))]
1405 "TARGET_ARM && !arm_arch6"
1406 "muls%?\\t%0, %2, %1"
1407 [(set_attr "conds" "set")
1408 (set_attr "type" "muls")]
1409 )
1410
1411 (define_insn "*mulsi_compare0_scratch_v6"
1412 [(set (reg:CC_NOOV CC_REGNUM)
1413 (compare:CC_NOOV (mult:SI
1414 (match_operand:SI 2 "s_register_operand" "r")
1415 (match_operand:SI 1 "s_register_operand" "r"))
1416 (const_int 0)))
1417 (clobber (match_scratch:SI 0 "=r"))]
1418 "TARGET_ARM && arm_arch6 && optimize_size"
1419 "muls%?\\t%0, %2, %1"
1420 [(set_attr "conds" "set")
1421 (set_attr "type" "muls")]
1422 )
1423
1424 (define_insn "*mulsi3addsi_compare0"
1425 [(set (reg:CC_NOOV CC_REGNUM)
1426 (compare:CC_NOOV
1427 (plus:SI (mult:SI
1428 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1429 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1430 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1431 (const_int 0)))
1432 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1433 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1434 (match_dup 3)))]
1435 "TARGET_ARM && arm_arch6"
1436 "mlas%?\\t%0, %2, %1, %3"
1437 [(set_attr "conds" "set")
1438 (set_attr "type" "mlas")]
1439 )
1440
1441 (define_insn "*mulsi3addsi_compare0_v6"
1442 [(set (reg:CC_NOOV CC_REGNUM)
1443 (compare:CC_NOOV
1444 (plus:SI (mult:SI
1445 (match_operand:SI 2 "s_register_operand" "r")
1446 (match_operand:SI 1 "s_register_operand" "r"))
1447 (match_operand:SI 3 "s_register_operand" "r"))
1448 (const_int 0)))
1449 (set (match_operand:SI 0 "s_register_operand" "=r")
1450 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1451 (match_dup 3)))]
1452 "TARGET_ARM && arm_arch6 && optimize_size"
1453 "mlas%?\\t%0, %2, %1, %3"
1454 [(set_attr "conds" "set")
1455 (set_attr "type" "mlas")]
1456 )
1457
1458 (define_insn "*mulsi3addsi_compare0_scratch"
1459 [(set (reg:CC_NOOV CC_REGNUM)
1460 (compare:CC_NOOV
1461 (plus:SI (mult:SI
1462 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1463 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1464 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1465 (const_int 0)))
1466 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1467 "TARGET_ARM && !arm_arch6"
1468 "mlas%?\\t%0, %2, %1, %3"
1469 [(set_attr "conds" "set")
1470 (set_attr "type" "mlas")]
1471 )
1472
1473 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1474 [(set (reg:CC_NOOV CC_REGNUM)
1475 (compare:CC_NOOV
1476 (plus:SI (mult:SI
1477 (match_operand:SI 2 "s_register_operand" "r")
1478 (match_operand:SI 1 "s_register_operand" "r"))
1479 (match_operand:SI 3 "s_register_operand" "r"))
1480 (const_int 0)))
1481 (clobber (match_scratch:SI 0 "=r"))]
1482 "TARGET_ARM && arm_arch6 && optimize_size"
1483 "mlas%?\\t%0, %2, %1, %3"
1484 [(set_attr "conds" "set")
1485 (set_attr "type" "mlas")]
1486 )
1487
1488 ;; 32x32->64 widening multiply.
1489 ;; The only difference between the v3-5 and v6+ versions is the requirement
1490 ;; that the output does not overlap with either input.
1491
1492 (define_expand "<Us>mulsidi3"
1493 [(set (match_operand:DI 0 "s_register_operand")
1494 (mult:DI
1495 (SE:DI (match_operand:SI 1 "s_register_operand"))
1496 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1497 "TARGET_32BIT"
1498 {
1499 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1500 gen_highpart (SImode, operands[0]),
1501 operands[1], operands[2]));
1502 DONE;
1503 }
1504 )
1505
1506 (define_insn "<US>mull"
1507 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1508 (mult:SI
1509 (match_operand:SI 2 "s_register_operand" "%r,r")
1510 (match_operand:SI 3 "s_register_operand" "r,r")))
1511 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1512 (truncate:SI
1513 (lshiftrt:DI
1514 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1515 (const_int 32))))]
1516 "TARGET_32BIT"
1517 "<US>mull%?\\t%0, %1, %2, %3"
1518 [(set_attr "type" "umull")
1519 (set_attr "predicable" "yes")
1520 (set_attr "arch" "v6,nov6")]
1521 )
1522
1523 (define_expand "<Us>maddsidi4"
1524 [(set (match_operand:DI 0 "s_register_operand")
1525 (plus:DI
1526 (mult:DI
1527 (SE:DI (match_operand:SI 1 "s_register_operand"))
1528 (SE:DI (match_operand:SI 2 "s_register_operand")))
1529 (match_operand:DI 3 "s_register_operand")))]
1530 "TARGET_32BIT"
1531 {
1532 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1533 gen_lowpart (SImode, operands[3]),
1534 gen_highpart (SImode, operands[0]),
1535 gen_highpart (SImode, operands[3]),
1536 operands[1], operands[2]));
1537 DONE;
1538 }
1539 )
1540
1541 (define_insn "<US>mlal"
1542 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1543 (plus:SI
1544 (mult:SI
1545 (match_operand:SI 4 "s_register_operand" "%r,r")
1546 (match_operand:SI 5 "s_register_operand" "r,r"))
1547 (match_operand:SI 1 "s_register_operand" "0,0")))
1548 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1549 (plus:SI
1550 (truncate:SI
1551 (lshiftrt:DI
1552 (plus:DI
1553 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1554 (zero_extend:DI (match_dup 1)))
1555 (const_int 32)))
1556 (match_operand:SI 3 "s_register_operand" "2,2")))]
1557 "TARGET_32BIT"
1558 "<US>mlal%?\\t%0, %2, %4, %5"
1559 [(set_attr "type" "umlal")
1560 (set_attr "predicable" "yes")
1561 (set_attr "arch" "v6,nov6")]
1562 )
1563
1564 (define_expand "<US>mulsi3_highpart"
1565 [(parallel
1566 [(set (match_operand:SI 0 "s_register_operand")
1567 (truncate:SI
1568 (lshiftrt:DI
1569 (mult:DI
1570 (SE:DI (match_operand:SI 1 "s_register_operand"))
1571 (SE:DI (match_operand:SI 2 "s_register_operand")))
1572 (const_int 32))))
1573 (clobber (match_scratch:SI 3 ""))])]
1574 "TARGET_32BIT"
1575 ""
1576 )
1577
1578 (define_insn "*<US>mull_high"
1579 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1580 (truncate:SI
1581 (lshiftrt:DI
1582 (mult:DI
1583 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1584 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1585 (const_int 32))))
1586 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1587 "TARGET_32BIT"
1588 "<US>mull%?\\t%3, %0, %2, %1"
1589 [(set_attr "type" "umull")
1590 (set_attr "predicable" "yes")
1591 (set_attr "arch" "v6,nov6,nov6")]
1592 )
1593
1594 (define_insn "mulhisi3"
1595 [(set (match_operand:SI 0 "s_register_operand" "=r")
1596 (mult:SI (sign_extend:SI
1597 (match_operand:HI 1 "s_register_operand" "%r"))
1598 (sign_extend:SI
1599 (match_operand:HI 2 "s_register_operand" "r"))))]
1600 "TARGET_DSP_MULTIPLY"
1601 "smulbb%?\\t%0, %1, %2"
1602 [(set_attr "type" "smulxy")
1603 (set_attr "predicable" "yes")]
1604 )
1605
1606 (define_insn "*mulhisi3tb"
1607 [(set (match_operand:SI 0 "s_register_operand" "=r")
1608 (mult:SI (ashiftrt:SI
1609 (match_operand:SI 1 "s_register_operand" "r")
1610 (const_int 16))
1611 (sign_extend:SI
1612 (match_operand:HI 2 "s_register_operand" "r"))))]
1613 "TARGET_DSP_MULTIPLY"
1614 "smultb%?\\t%0, %1, %2"
1615 [(set_attr "type" "smulxy")
1616 (set_attr "predicable" "yes")]
1617 )
1618
1619 (define_insn "*mulhisi3bt"
1620 [(set (match_operand:SI 0 "s_register_operand" "=r")
1621 (mult:SI (sign_extend:SI
1622 (match_operand:HI 1 "s_register_operand" "r"))
1623 (ashiftrt:SI
1624 (match_operand:SI 2 "s_register_operand" "r")
1625 (const_int 16))))]
1626 "TARGET_DSP_MULTIPLY"
1627 "smulbt%?\\t%0, %1, %2"
1628 [(set_attr "type" "smulxy")
1629 (set_attr "predicable" "yes")]
1630 )
1631
1632 (define_insn "*mulhisi3tt"
1633 [(set (match_operand:SI 0 "s_register_operand" "=r")
1634 (mult:SI (ashiftrt:SI
1635 (match_operand:SI 1 "s_register_operand" "r")
1636 (const_int 16))
1637 (ashiftrt:SI
1638 (match_operand:SI 2 "s_register_operand" "r")
1639 (const_int 16))))]
1640 "TARGET_DSP_MULTIPLY"
1641 "smultt%?\\t%0, %1, %2"
1642 [(set_attr "type" "smulxy")
1643 (set_attr "predicable" "yes")]
1644 )
1645
1646 (define_insn "maddhisi4"
1647 [(set (match_operand:SI 0 "s_register_operand" "=r")
1648 (plus:SI (mult:SI (sign_extend:SI
1649 (match_operand:HI 1 "s_register_operand" "r"))
1650 (sign_extend:SI
1651 (match_operand:HI 2 "s_register_operand" "r")))
1652 (match_operand:SI 3 "s_register_operand" "r")))]
1653 "TARGET_DSP_MULTIPLY"
1654 "smlabb%?\\t%0, %1, %2, %3"
1655 [(set_attr "type" "smlaxy")
1656 (set_attr "predicable" "yes")]
1657 )
1658
1659 ;; Note: there is no maddhisi4ibt because this one is canonical form
1660 (define_insn "*maddhisi4tb"
1661 [(set (match_operand:SI 0 "s_register_operand" "=r")
1662 (plus:SI (mult:SI (ashiftrt:SI
1663 (match_operand:SI 1 "s_register_operand" "r")
1664 (const_int 16))
1665 (sign_extend:SI
1666 (match_operand:HI 2 "s_register_operand" "r")))
1667 (match_operand:SI 3 "s_register_operand" "r")))]
1668 "TARGET_DSP_MULTIPLY"
1669 "smlatb%?\\t%0, %1, %2, %3"
1670 [(set_attr "type" "smlaxy")
1671 (set_attr "predicable" "yes")]
1672 )
1673
1674 (define_insn "*maddhisi4tt"
1675 [(set (match_operand:SI 0 "s_register_operand" "=r")
1676 (plus:SI (mult:SI (ashiftrt:SI
1677 (match_operand:SI 1 "s_register_operand" "r")
1678 (const_int 16))
1679 (ashiftrt:SI
1680 (match_operand:SI 2 "s_register_operand" "r")
1681 (const_int 16)))
1682 (match_operand:SI 3 "s_register_operand" "r")))]
1683 "TARGET_DSP_MULTIPLY"
1684 "smlatt%?\\t%0, %1, %2, %3"
1685 [(set_attr "type" "smlaxy")
1686 (set_attr "predicable" "yes")]
1687 )
1688
1689 (define_insn "maddhidi4"
1690 [(set (match_operand:DI 0 "s_register_operand" "=r")
1691 (plus:DI
1692 (mult:DI (sign_extend:DI
1693 (match_operand:HI 1 "s_register_operand" "r"))
1694 (sign_extend:DI
1695 (match_operand:HI 2 "s_register_operand" "r")))
1696 (match_operand:DI 3 "s_register_operand" "0")))]
1697 "TARGET_DSP_MULTIPLY"
1698 "smlalbb%?\\t%Q0, %R0, %1, %2"
1699 [(set_attr "type" "smlalxy")
1700 (set_attr "predicable" "yes")])
1701
1702 ;; Note: there is no maddhidi4ibt because this one is canonical form
1703 (define_insn "*maddhidi4tb"
1704 [(set (match_operand:DI 0 "s_register_operand" "=r")
1705 (plus:DI
1706 (mult:DI (sign_extend:DI
1707 (ashiftrt:SI
1708 (match_operand:SI 1 "s_register_operand" "r")
1709 (const_int 16)))
1710 (sign_extend:DI
1711 (match_operand:HI 2 "s_register_operand" "r")))
1712 (match_operand:DI 3 "s_register_operand" "0")))]
1713 "TARGET_DSP_MULTIPLY"
1714 "smlaltb%?\\t%Q0, %R0, %1, %2"
1715 [(set_attr "type" "smlalxy")
1716 (set_attr "predicable" "yes")])
1717
1718 (define_insn "*maddhidi4tt"
1719 [(set (match_operand:DI 0 "s_register_operand" "=r")
1720 (plus:DI
1721 (mult:DI (sign_extend:DI
1722 (ashiftrt:SI
1723 (match_operand:SI 1 "s_register_operand" "r")
1724 (const_int 16)))
1725 (sign_extend:DI
1726 (ashiftrt:SI
1727 (match_operand:SI 2 "s_register_operand" "r")
1728 (const_int 16))))
1729 (match_operand:DI 3 "s_register_operand" "0")))]
1730 "TARGET_DSP_MULTIPLY"
1731 "smlaltt%?\\t%Q0, %R0, %1, %2"
1732 [(set_attr "type" "smlalxy")
1733 (set_attr "predicable" "yes")])
1734
1735 (define_expand "mulsf3"
1736 [(set (match_operand:SF 0 "s_register_operand")
1737 (mult:SF (match_operand:SF 1 "s_register_operand")
1738 (match_operand:SF 2 "s_register_operand")))]
1739 "TARGET_32BIT && TARGET_HARD_FLOAT"
1740 "
1741 ")
1742
1743 (define_expand "muldf3"
1744 [(set (match_operand:DF 0 "s_register_operand")
1745 (mult:DF (match_operand:DF 1 "s_register_operand")
1746 (match_operand:DF 2 "s_register_operand")))]
1747 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1748 "
1749 ")
1750 \f
1751 ;; Division insns
1752
1753 (define_expand "divsf3"
1754 [(set (match_operand:SF 0 "s_register_operand")
1755 (div:SF (match_operand:SF 1 "s_register_operand")
1756 (match_operand:SF 2 "s_register_operand")))]
1757 "TARGET_32BIT && TARGET_HARD_FLOAT"
1758 "")
1759
1760 (define_expand "divdf3"
1761 [(set (match_operand:DF 0 "s_register_operand")
1762 (div:DF (match_operand:DF 1 "s_register_operand")
1763 (match_operand:DF 2 "s_register_operand")))]
1764 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1765 "")
1766 \f
1767
1768 ; Expand logical operations. The mid-end expander does not split off memory
1769 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1770 ; So an explicit expander is needed to generate better code.
1771
1772 (define_expand "<LOGICAL:optab>di3"
1773 [(set (match_operand:DI 0 "s_register_operand")
1774 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1775 (match_operand:DI 2 "arm_<optab>di_operand")))]
1776 "TARGET_32BIT"
1777 {
1778 rtx low = simplify_gen_binary (<CODE>, SImode,
1779 gen_lowpart (SImode, operands[1]),
1780 gen_lowpart (SImode, operands[2]));
1781 rtx high = simplify_gen_binary (<CODE>, SImode,
1782 gen_highpart (SImode, operands[1]),
1783 gen_highpart_mode (SImode, DImode,
1784 operands[2]));
1785
1786 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1787 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1788 DONE;
1789 }
1790 )
1791
1792 (define_expand "one_cmpldi2"
1793 [(set (match_operand:DI 0 "s_register_operand")
1794 (not:DI (match_operand:DI 1 "s_register_operand")))]
1795 "TARGET_32BIT"
1796 {
1797 rtx low = simplify_gen_unary (NOT, SImode,
1798 gen_lowpart (SImode, operands[1]),
1799 SImode);
1800 rtx high = simplify_gen_unary (NOT, SImode,
1801 gen_highpart_mode (SImode, DImode,
1802 operands[1]),
1803 SImode);
1804
1805 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1806 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1807 DONE;
1808 }
1809 )
1810
1811 ;; Split DImode and, ior, xor operations. Simply perform the logical
1812 ;; operation on the upper and lower halves of the registers.
1813 ;; This is needed for atomic operations in arm_split_atomic_op.
1814 ;; Avoid splitting IWMMXT instructions.
1815 (define_split
1816 [(set (match_operand:DI 0 "s_register_operand" "")
1817 (match_operator:DI 6 "logical_binary_operator"
1818 [(match_operand:DI 1 "s_register_operand" "")
1819 (match_operand:DI 2 "s_register_operand" "")]))]
1820 "TARGET_32BIT && reload_completed
1821 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1822 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1823 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1824 "
1825 {
1826 operands[3] = gen_highpart (SImode, operands[0]);
1827 operands[0] = gen_lowpart (SImode, operands[0]);
1828 operands[4] = gen_highpart (SImode, operands[1]);
1829 operands[1] = gen_lowpart (SImode, operands[1]);
1830 operands[5] = gen_highpart (SImode, operands[2]);
1831 operands[2] = gen_lowpart (SImode, operands[2]);
1832 }"
1833 )
1834
1835 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1836 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1837 (define_split
1838 [(set (match_operand:DI 0 "s_register_operand")
1839 (not:DI (match_operand:DI 1 "s_register_operand")))]
1840 "TARGET_32BIT"
1841 [(set (match_dup 0) (not:SI (match_dup 1)))
1842 (set (match_dup 2) (not:SI (match_dup 3)))]
1843 "
1844 {
1845 operands[2] = gen_highpart (SImode, operands[0]);
1846 operands[0] = gen_lowpart (SImode, operands[0]);
1847 operands[3] = gen_highpart (SImode, operands[1]);
1848 operands[1] = gen_lowpart (SImode, operands[1]);
1849 }"
1850 )
1851
1852 (define_expand "andsi3"
1853 [(set (match_operand:SI 0 "s_register_operand")
1854 (and:SI (match_operand:SI 1 "s_register_operand")
1855 (match_operand:SI 2 "reg_or_int_operand")))]
1856 "TARGET_EITHER"
1857 "
1858 if (TARGET_32BIT)
1859 {
1860 if (CONST_INT_P (operands[2]))
1861 {
1862 if (INTVAL (operands[2]) == 255 && arm_arch6)
1863 {
1864 operands[1] = convert_to_mode (QImode, operands[1], 1);
1865 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1866 operands[1]));
1867 DONE;
1868 }
1869 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1870 operands[2] = force_reg (SImode, operands[2]);
1871 else
1872 {
1873 arm_split_constant (AND, SImode, NULL_RTX,
1874 INTVAL (operands[2]), operands[0],
1875 operands[1],
1876 optimize && can_create_pseudo_p ());
1877
1878 DONE;
1879 }
1880 }
1881 }
1882 else /* TARGET_THUMB1 */
1883 {
1884 if (!CONST_INT_P (operands[2]))
1885 {
1886 rtx tmp = force_reg (SImode, operands[2]);
1887 if (rtx_equal_p (operands[0], operands[1]))
1888 operands[2] = tmp;
1889 else
1890 {
1891 operands[2] = operands[1];
1892 operands[1] = tmp;
1893 }
1894 }
1895 else
1896 {
1897 int i;
1898
1899 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1900 {
1901 operands[2] = force_reg (SImode,
1902 GEN_INT (~INTVAL (operands[2])));
1903
1904 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
1905
1906 DONE;
1907 }
1908
1909 for (i = 9; i <= 31; i++)
1910 {
1911 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
1912 {
1913 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1914 const0_rtx));
1915 DONE;
1916 }
1917 else if ((HOST_WIDE_INT_1 << i) - 1
1918 == ~INTVAL (operands[2]))
1919 {
1920 rtx shift = GEN_INT (i);
1921 rtx reg = gen_reg_rtx (SImode);
1922
1923 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1924 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1925
1926 DONE;
1927 }
1928 }
1929
1930 operands[2] = force_reg (SImode, operands[2]);
1931 }
1932 }
1933 "
1934 )
1935
1936 ; ??? Check split length for Thumb-2
1937 (define_insn_and_split "*arm_andsi3_insn"
1938 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
1939 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
1940 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
1941 "TARGET_32BIT"
1942 "@
1943 and%?\\t%0, %1, %2
1944 and%?\\t%0, %1, %2
1945 bic%?\\t%0, %1, #%B2
1946 and%?\\t%0, %1, %2
1947 #"
1948 "TARGET_32BIT
1949 && CONST_INT_P (operands[2])
1950 && !(const_ok_for_arm (INTVAL (operands[2]))
1951 || const_ok_for_arm (~INTVAL (operands[2])))"
1952 [(clobber (const_int 0))]
1953 "
1954 arm_split_constant (AND, SImode, curr_insn,
1955 INTVAL (operands[2]), operands[0], operands[1], 0);
1956 DONE;
1957 "
1958 [(set_attr "length" "4,4,4,4,16")
1959 (set_attr "predicable" "yes")
1960 (set_attr "predicable_short_it" "no,yes,no,no,no")
1961 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
1962 )
1963
1964 (define_insn "*andsi3_compare0"
1965 [(set (reg:CC_NOOV CC_REGNUM)
1966 (compare:CC_NOOV
1967 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1968 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
1969 (const_int 0)))
1970 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1971 (and:SI (match_dup 1) (match_dup 2)))]
1972 "TARGET_32BIT"
1973 "@
1974 ands%?\\t%0, %1, %2
1975 bics%?\\t%0, %1, #%B2
1976 ands%?\\t%0, %1, %2"
1977 [(set_attr "conds" "set")
1978 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1979 )
1980
1981 (define_insn "*andsi3_compare0_scratch"
1982 [(set (reg:CC_NOOV CC_REGNUM)
1983 (compare:CC_NOOV
1984 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1985 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
1986 (const_int 0)))
1987 (clobber (match_scratch:SI 2 "=X,r,X"))]
1988 "TARGET_32BIT"
1989 "@
1990 tst%?\\t%0, %1
1991 bics%?\\t%2, %0, #%B1
1992 tst%?\\t%0, %1"
1993 [(set_attr "conds" "set")
1994 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1995 )
1996
1997 (define_insn "*zeroextractsi_compare0_scratch"
1998 [(set (reg:CC_NOOV CC_REGNUM)
1999 (compare:CC_NOOV (zero_extract:SI
2000 (match_operand:SI 0 "s_register_operand" "r")
2001 (match_operand 1 "const_int_operand" "n")
2002 (match_operand 2 "const_int_operand" "n"))
2003 (const_int 0)))]
2004 "TARGET_32BIT
2005 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2006 && INTVAL (operands[1]) > 0
2007 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2008 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2009 "*
2010 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2011 << INTVAL (operands[2]));
2012 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2013 return \"\";
2014 "
2015 [(set_attr "conds" "set")
2016 (set_attr "predicable" "yes")
2017 (set_attr "type" "logics_imm")]
2018 )
2019
2020 (define_insn_and_split "*ne_zeroextractsi"
2021 [(set (match_operand:SI 0 "s_register_operand" "=r")
2022 (ne:SI (zero_extract:SI
2023 (match_operand:SI 1 "s_register_operand" "r")
2024 (match_operand:SI 2 "const_int_operand" "n")
2025 (match_operand:SI 3 "const_int_operand" "n"))
2026 (const_int 0)))
2027 (clobber (reg:CC CC_REGNUM))]
2028 "TARGET_32BIT
2029 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2030 && INTVAL (operands[2]) > 0
2031 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2032 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2033 "#"
2034 "TARGET_32BIT
2035 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2036 && INTVAL (operands[2]) > 0
2037 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2038 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2039 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2040 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2041 (const_int 0)))
2042 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2043 (set (match_dup 0)
2044 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2045 (match_dup 0) (const_int 1)))]
2046 "
2047 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2048 << INTVAL (operands[3]));
2049 "
2050 [(set_attr "conds" "clob")
2051 (set (attr "length")
2052 (if_then_else (eq_attr "is_thumb" "yes")
2053 (const_int 12)
2054 (const_int 8)))
2055 (set_attr "type" "multiple")]
2056 )
2057
2058 (define_insn_and_split "*ne_zeroextractsi_shifted"
2059 [(set (match_operand:SI 0 "s_register_operand" "=r")
2060 (ne:SI (zero_extract:SI
2061 (match_operand:SI 1 "s_register_operand" "r")
2062 (match_operand:SI 2 "const_int_operand" "n")
2063 (const_int 0))
2064 (const_int 0)))
2065 (clobber (reg:CC CC_REGNUM))]
2066 "TARGET_ARM"
2067 "#"
2068 "TARGET_ARM"
2069 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2070 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2071 (const_int 0)))
2072 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2073 (set (match_dup 0)
2074 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2075 (match_dup 0) (const_int 1)))]
2076 "
2077 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2078 "
2079 [(set_attr "conds" "clob")
2080 (set_attr "length" "8")
2081 (set_attr "type" "multiple")]
2082 )
2083
2084 (define_insn_and_split "*ite_ne_zeroextractsi"
2085 [(set (match_operand:SI 0 "s_register_operand" "=r")
2086 (if_then_else:SI (ne (zero_extract:SI
2087 (match_operand:SI 1 "s_register_operand" "r")
2088 (match_operand:SI 2 "const_int_operand" "n")
2089 (match_operand:SI 3 "const_int_operand" "n"))
2090 (const_int 0))
2091 (match_operand:SI 4 "arm_not_operand" "rIK")
2092 (const_int 0)))
2093 (clobber (reg:CC CC_REGNUM))]
2094 "TARGET_ARM
2095 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2096 && INTVAL (operands[2]) > 0
2097 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2098 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2099 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2100 "#"
2101 "TARGET_ARM
2102 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2103 && INTVAL (operands[2]) > 0
2104 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2105 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2106 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2107 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2108 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2109 (const_int 0)))
2110 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2111 (set (match_dup 0)
2112 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2113 (match_dup 0) (match_dup 4)))]
2114 "
2115 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2116 << INTVAL (operands[3]));
2117 "
2118 [(set_attr "conds" "clob")
2119 (set_attr "length" "8")
2120 (set_attr "type" "multiple")]
2121 )
2122
2123 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2124 [(set (match_operand:SI 0 "s_register_operand" "=r")
2125 (if_then_else:SI (ne (zero_extract:SI
2126 (match_operand:SI 1 "s_register_operand" "r")
2127 (match_operand:SI 2 "const_int_operand" "n")
2128 (const_int 0))
2129 (const_int 0))
2130 (match_operand:SI 3 "arm_not_operand" "rIK")
2131 (const_int 0)))
2132 (clobber (reg:CC CC_REGNUM))]
2133 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2134 "#"
2135 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2136 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2137 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2138 (const_int 0)))
2139 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2140 (set (match_dup 0)
2141 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2142 (match_dup 0) (match_dup 3)))]
2143 "
2144 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2145 "
2146 [(set_attr "conds" "clob")
2147 (set_attr "length" "8")
2148 (set_attr "type" "multiple")]
2149 )
2150
2151 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2152 (define_split
2153 [(set (match_operand:SI 0 "s_register_operand" "")
2154 (match_operator:SI 1 "shiftable_operator"
2155 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2156 (match_operand:SI 3 "const_int_operand" "")
2157 (match_operand:SI 4 "const_int_operand" ""))
2158 (match_operand:SI 5 "s_register_operand" "")]))
2159 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2160 "TARGET_ARM"
2161 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2162 (set (match_dup 0)
2163 (match_op_dup 1
2164 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2165 (match_dup 5)]))]
2166 "{
2167 HOST_WIDE_INT temp = INTVAL (operands[3]);
2168
2169 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2170 operands[4] = GEN_INT (32 - temp);
2171 }"
2172 )
2173
2174 (define_split
2175 [(set (match_operand:SI 0 "s_register_operand" "")
2176 (match_operator:SI 1 "shiftable_operator"
2177 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2178 (match_operand:SI 3 "const_int_operand" "")
2179 (match_operand:SI 4 "const_int_operand" ""))
2180 (match_operand:SI 5 "s_register_operand" "")]))
2181 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2182 "TARGET_ARM"
2183 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2184 (set (match_dup 0)
2185 (match_op_dup 1
2186 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2187 (match_dup 5)]))]
2188 "{
2189 HOST_WIDE_INT temp = INTVAL (operands[3]);
2190
2191 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2192 operands[4] = GEN_INT (32 - temp);
2193 }"
2194 )
2195
2196 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2197 ;;; represented by the bitfield, then this will produce incorrect results.
2198 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2199 ;;; which have a real bit-field insert instruction, the truncation happens
2200 ;;; in the bit-field insert instruction itself. Since arm does not have a
2201 ;;; bit-field insert instruction, we would have to emit code here to truncate
2202 ;;; the value before we insert. This loses some of the advantage of having
2203 ;;; this insv pattern, so this pattern needs to be reevalutated.
2204
2205 (define_expand "insv"
2206 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2207 (match_operand 1 "general_operand")
2208 (match_operand 2 "general_operand"))
2209 (match_operand 3 "reg_or_int_operand"))]
2210 "TARGET_ARM || arm_arch_thumb2"
2211 "
2212 {
2213 int start_bit = INTVAL (operands[2]);
2214 int width = INTVAL (operands[1]);
2215 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2216 rtx target, subtarget;
2217
2218 if (arm_arch_thumb2)
2219 {
2220 if (unaligned_access && MEM_P (operands[0])
2221 && s_register_operand (operands[3], GET_MODE (operands[3]))
2222 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2223 {
2224 rtx base_addr;
2225
2226 if (BYTES_BIG_ENDIAN)
2227 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2228 - start_bit;
2229
2230 if (width == 32)
2231 {
2232 base_addr = adjust_address (operands[0], SImode,
2233 start_bit / BITS_PER_UNIT);
2234 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2235 }
2236 else
2237 {
2238 rtx tmp = gen_reg_rtx (HImode);
2239
2240 base_addr = adjust_address (operands[0], HImode,
2241 start_bit / BITS_PER_UNIT);
2242 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2243 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2244 }
2245 DONE;
2246 }
2247 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2248 {
2249 bool use_bfi = TRUE;
2250
2251 if (CONST_INT_P (operands[3]))
2252 {
2253 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2254
2255 if (val == 0)
2256 {
2257 emit_insn (gen_insv_zero (operands[0], operands[1],
2258 operands[2]));
2259 DONE;
2260 }
2261
2262 /* See if the set can be done with a single orr instruction. */
2263 if (val == mask && const_ok_for_arm (val << start_bit))
2264 use_bfi = FALSE;
2265 }
2266
2267 if (use_bfi)
2268 {
2269 if (!REG_P (operands[3]))
2270 operands[3] = force_reg (SImode, operands[3]);
2271
2272 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2273 operands[3]));
2274 DONE;
2275 }
2276 }
2277 else
2278 FAIL;
2279 }
2280
2281 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2282 FAIL;
2283
2284 target = copy_rtx (operands[0]);
2285 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2286 subreg as the final target. */
2287 if (GET_CODE (target) == SUBREG)
2288 {
2289 subtarget = gen_reg_rtx (SImode);
2290 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2291 < GET_MODE_SIZE (SImode))
2292 target = SUBREG_REG (target);
2293 }
2294 else
2295 subtarget = target;
2296
2297 if (CONST_INT_P (operands[3]))
2298 {
2299 /* Since we are inserting a known constant, we may be able to
2300 reduce the number of bits that we have to clear so that
2301 the mask becomes simple. */
2302 /* ??? This code does not check to see if the new mask is actually
2303 simpler. It may not be. */
2304 rtx op1 = gen_reg_rtx (SImode);
2305 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2306 start of this pattern. */
2307 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2308 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2309
2310 emit_insn (gen_andsi3 (op1, operands[0],
2311 gen_int_mode (~mask2, SImode)));
2312 emit_insn (gen_iorsi3 (subtarget, op1,
2313 gen_int_mode (op3_value << start_bit, SImode)));
2314 }
2315 else if (start_bit == 0
2316 && !(const_ok_for_arm (mask)
2317 || const_ok_for_arm (~mask)))
2318 {
2319 /* A Trick, since we are setting the bottom bits in the word,
2320 we can shift operand[3] up, operand[0] down, OR them together
2321 and rotate the result back again. This takes 3 insns, and
2322 the third might be mergeable into another op. */
2323 /* The shift up copes with the possibility that operand[3] is
2324 wider than the bitfield. */
2325 rtx op0 = gen_reg_rtx (SImode);
2326 rtx op1 = gen_reg_rtx (SImode);
2327
2328 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2329 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2330 emit_insn (gen_iorsi3 (op1, op1, op0));
2331 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2332 }
2333 else if ((width + start_bit == 32)
2334 && !(const_ok_for_arm (mask)
2335 || const_ok_for_arm (~mask)))
2336 {
2337 /* Similar trick, but slightly less efficient. */
2338
2339 rtx op0 = gen_reg_rtx (SImode);
2340 rtx op1 = gen_reg_rtx (SImode);
2341
2342 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2343 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2344 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2345 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2346 }
2347 else
2348 {
2349 rtx op0 = gen_int_mode (mask, SImode);
2350 rtx op1 = gen_reg_rtx (SImode);
2351 rtx op2 = gen_reg_rtx (SImode);
2352
2353 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2354 {
2355 rtx tmp = gen_reg_rtx (SImode);
2356
2357 emit_insn (gen_movsi (tmp, op0));
2358 op0 = tmp;
2359 }
2360
2361 /* Mask out any bits in operand[3] that are not needed. */
2362 emit_insn (gen_andsi3 (op1, operands[3], op0));
2363
2364 if (CONST_INT_P (op0)
2365 && (const_ok_for_arm (mask << start_bit)
2366 || const_ok_for_arm (~(mask << start_bit))))
2367 {
2368 op0 = gen_int_mode (~(mask << start_bit), SImode);
2369 emit_insn (gen_andsi3 (op2, operands[0], op0));
2370 }
2371 else
2372 {
2373 if (CONST_INT_P (op0))
2374 {
2375 rtx tmp = gen_reg_rtx (SImode);
2376
2377 emit_insn (gen_movsi (tmp, op0));
2378 op0 = tmp;
2379 }
2380
2381 if (start_bit != 0)
2382 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2383
2384 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2385 }
2386
2387 if (start_bit != 0)
2388 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2389
2390 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2391 }
2392
2393 if (subtarget != target)
2394 {
2395 /* If TARGET is still a SUBREG, then it must be wider than a word,
2396 so we must be careful only to set the subword we were asked to. */
2397 if (GET_CODE (target) == SUBREG)
2398 emit_move_insn (target, subtarget);
2399 else
2400 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2401 }
2402
2403 DONE;
2404 }"
2405 )
2406
2407 (define_insn "insv_zero"
2408 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2409 (match_operand:SI 1 "const_int_M_operand" "M")
2410 (match_operand:SI 2 "const_int_M_operand" "M"))
2411 (const_int 0))]
2412 "arm_arch_thumb2"
2413 "bfc%?\t%0, %2, %1"
2414 [(set_attr "length" "4")
2415 (set_attr "predicable" "yes")
2416 (set_attr "type" "bfm")]
2417 )
2418
2419 (define_insn "insv_t2"
2420 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2421 (match_operand:SI 1 "const_int_M_operand" "M")
2422 (match_operand:SI 2 "const_int_M_operand" "M"))
2423 (match_operand:SI 3 "s_register_operand" "r"))]
2424 "arm_arch_thumb2"
2425 "bfi%?\t%0, %3, %2, %1"
2426 [(set_attr "length" "4")
2427 (set_attr "predicable" "yes")
2428 (set_attr "type" "bfm")]
2429 )
2430
2431 (define_insn "andsi_notsi_si"
2432 [(set (match_operand:SI 0 "s_register_operand" "=r")
2433 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2434 (match_operand:SI 1 "s_register_operand" "r")))]
2435 "TARGET_32BIT"
2436 "bic%?\\t%0, %1, %2"
2437 [(set_attr "predicable" "yes")
2438 (set_attr "type" "logic_reg")]
2439 )
2440
2441 (define_insn "andsi_not_shiftsi_si"
2442 [(set (match_operand:SI 0 "s_register_operand" "=r")
2443 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2444 [(match_operand:SI 2 "s_register_operand" "r")
2445 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2446 (match_operand:SI 1 "s_register_operand" "r")))]
2447 "TARGET_ARM"
2448 "bic%?\\t%0, %1, %2%S4"
2449 [(set_attr "predicable" "yes")
2450 (set_attr "shift" "2")
2451 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2452 (const_string "logic_shift_imm")
2453 (const_string "logic_shift_reg")))]
2454 )
2455
2456 ;; Shifted bics pattern used to set up CC status register and not reusing
2457 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2458 ;; does not support shift by register.
2459 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2460 [(set (reg:CC_NOOV CC_REGNUM)
2461 (compare:CC_NOOV
2462 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2463 [(match_operand:SI 1 "s_register_operand" "r")
2464 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2465 (match_operand:SI 3 "s_register_operand" "r"))
2466 (const_int 0)))
2467 (clobber (match_scratch:SI 4 "=r"))]
2468 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2469 "bics%?\\t%4, %3, %1%S0"
2470 [(set_attr "predicable" "yes")
2471 (set_attr "conds" "set")
2472 (set_attr "shift" "1")
2473 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2474 (const_string "logic_shift_imm")
2475 (const_string "logic_shift_reg")))]
2476 )
2477
2478 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2479 ;; getting reused later.
2480 (define_insn "andsi_not_shiftsi_si_scc"
2481 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2482 (compare:CC_NOOV
2483 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2484 [(match_operand:SI 1 "s_register_operand" "r")
2485 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2486 (match_operand:SI 3 "s_register_operand" "r"))
2487 (const_int 0)))
2488 (set (match_operand:SI 4 "s_register_operand" "=r")
2489 (and:SI (not:SI (match_op_dup 0
2490 [(match_dup 1)
2491 (match_dup 2)]))
2492 (match_dup 3)))])]
2493 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2494 "bics%?\\t%4, %3, %1%S0"
2495 [(set_attr "predicable" "yes")
2496 (set_attr "conds" "set")
2497 (set_attr "shift" "1")
2498 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2499 (const_string "logic_shift_imm")
2500 (const_string "logic_shift_reg")))]
2501 )
2502
2503 (define_insn "*andsi_notsi_si_compare0"
2504 [(set (reg:CC_NOOV CC_REGNUM)
2505 (compare:CC_NOOV
2506 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2507 (match_operand:SI 1 "s_register_operand" "r"))
2508 (const_int 0)))
2509 (set (match_operand:SI 0 "s_register_operand" "=r")
2510 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2511 "TARGET_32BIT"
2512 "bics\\t%0, %1, %2"
2513 [(set_attr "conds" "set")
2514 (set_attr "type" "logics_shift_reg")]
2515 )
2516
2517 (define_insn "*andsi_notsi_si_compare0_scratch"
2518 [(set (reg:CC_NOOV CC_REGNUM)
2519 (compare:CC_NOOV
2520 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2521 (match_operand:SI 1 "s_register_operand" "r"))
2522 (const_int 0)))
2523 (clobber (match_scratch:SI 0 "=r"))]
2524 "TARGET_32BIT"
2525 "bics\\t%0, %1, %2"
2526 [(set_attr "conds" "set")
2527 (set_attr "type" "logics_shift_reg")]
2528 )
2529
2530 (define_expand "iorsi3"
2531 [(set (match_operand:SI 0 "s_register_operand")
2532 (ior:SI (match_operand:SI 1 "s_register_operand")
2533 (match_operand:SI 2 "reg_or_int_operand")))]
2534 "TARGET_EITHER"
2535 "
2536 if (CONST_INT_P (operands[2]))
2537 {
2538 if (TARGET_32BIT)
2539 {
2540 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2541 operands[2] = force_reg (SImode, operands[2]);
2542 else
2543 {
2544 arm_split_constant (IOR, SImode, NULL_RTX,
2545 INTVAL (operands[2]), operands[0],
2546 operands[1],
2547 optimize && can_create_pseudo_p ());
2548 DONE;
2549 }
2550 }
2551 else /* TARGET_THUMB1 */
2552 {
2553 rtx tmp = force_reg (SImode, operands[2]);
2554 if (rtx_equal_p (operands[0], operands[1]))
2555 operands[2] = tmp;
2556 else
2557 {
2558 operands[2] = operands[1];
2559 operands[1] = tmp;
2560 }
2561 }
2562 }
2563 "
2564 )
2565
2566 (define_insn_and_split "*iorsi3_insn"
2567 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2568 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2569 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2570 "TARGET_32BIT"
2571 "@
2572 orr%?\\t%0, %1, %2
2573 orr%?\\t%0, %1, %2
2574 orn%?\\t%0, %1, #%B2
2575 orr%?\\t%0, %1, %2
2576 #"
2577 "TARGET_32BIT
2578 && CONST_INT_P (operands[2])
2579 && !(const_ok_for_arm (INTVAL (operands[2]))
2580 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2581 [(clobber (const_int 0))]
2582 {
2583 arm_split_constant (IOR, SImode, curr_insn,
2584 INTVAL (operands[2]), operands[0], operands[1], 0);
2585 DONE;
2586 }
2587 [(set_attr "length" "4,4,4,4,16")
2588 (set_attr "arch" "32,t2,t2,32,32")
2589 (set_attr "predicable" "yes")
2590 (set_attr "predicable_short_it" "no,yes,no,no,no")
2591 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2592 )
2593
2594 (define_peephole2
2595 [(match_scratch:SI 3 "r")
2596 (set (match_operand:SI 0 "arm_general_register_operand" "")
2597 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2598 (match_operand:SI 2 "const_int_operand" "")))]
2599 "TARGET_ARM
2600 && !const_ok_for_arm (INTVAL (operands[2]))
2601 && const_ok_for_arm (~INTVAL (operands[2]))"
2602 [(set (match_dup 3) (match_dup 2))
2603 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2604 ""
2605 )
2606
2607 (define_insn "*iorsi3_compare0"
2608 [(set (reg:CC_NOOV CC_REGNUM)
2609 (compare:CC_NOOV
2610 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2611 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2612 (const_int 0)))
2613 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2614 (ior:SI (match_dup 1) (match_dup 2)))]
2615 "TARGET_32BIT"
2616 "orrs%?\\t%0, %1, %2"
2617 [(set_attr "conds" "set")
2618 (set_attr "arch" "*,t2,*")
2619 (set_attr "length" "4,2,4")
2620 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2621 )
2622
2623 (define_insn "*iorsi3_compare0_scratch"
2624 [(set (reg:CC_NOOV CC_REGNUM)
2625 (compare:CC_NOOV
2626 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2627 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2628 (const_int 0)))
2629 (clobber (match_scratch:SI 0 "=r,l,r"))]
2630 "TARGET_32BIT"
2631 "orrs%?\\t%0, %1, %2"
2632 [(set_attr "conds" "set")
2633 (set_attr "arch" "*,t2,*")
2634 (set_attr "length" "4,2,4")
2635 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2636 )
2637
2638 (define_expand "xorsi3"
2639 [(set (match_operand:SI 0 "s_register_operand")
2640 (xor:SI (match_operand:SI 1 "s_register_operand")
2641 (match_operand:SI 2 "reg_or_int_operand")))]
2642 "TARGET_EITHER"
2643 "if (CONST_INT_P (operands[2]))
2644 {
2645 if (TARGET_32BIT)
2646 {
2647 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2648 operands[2] = force_reg (SImode, operands[2]);
2649 else
2650 {
2651 arm_split_constant (XOR, SImode, NULL_RTX,
2652 INTVAL (operands[2]), operands[0],
2653 operands[1],
2654 optimize && can_create_pseudo_p ());
2655 DONE;
2656 }
2657 }
2658 else /* TARGET_THUMB1 */
2659 {
2660 rtx tmp = force_reg (SImode, operands[2]);
2661 if (rtx_equal_p (operands[0], operands[1]))
2662 operands[2] = tmp;
2663 else
2664 {
2665 operands[2] = operands[1];
2666 operands[1] = tmp;
2667 }
2668 }
2669 }"
2670 )
2671
2672 (define_insn_and_split "*arm_xorsi3"
2673 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2674 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2675 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2676 "TARGET_32BIT"
2677 "@
2678 eor%?\\t%0, %1, %2
2679 eor%?\\t%0, %1, %2
2680 eor%?\\t%0, %1, %2
2681 #"
2682 "TARGET_32BIT
2683 && CONST_INT_P (operands[2])
2684 && !const_ok_for_arm (INTVAL (operands[2]))"
2685 [(clobber (const_int 0))]
2686 {
2687 arm_split_constant (XOR, SImode, curr_insn,
2688 INTVAL (operands[2]), operands[0], operands[1], 0);
2689 DONE;
2690 }
2691 [(set_attr "length" "4,4,4,16")
2692 (set_attr "predicable" "yes")
2693 (set_attr "predicable_short_it" "no,yes,no,no")
2694 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2695 )
2696
2697 (define_insn "*xorsi3_compare0"
2698 [(set (reg:CC_NOOV CC_REGNUM)
2699 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2700 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2701 (const_int 0)))
2702 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2703 (xor:SI (match_dup 1) (match_dup 2)))]
2704 "TARGET_32BIT"
2705 "eors%?\\t%0, %1, %2"
2706 [(set_attr "conds" "set")
2707 (set_attr "type" "logics_imm,logics_reg")]
2708 )
2709
2710 (define_insn "*xorsi3_compare0_scratch"
2711 [(set (reg:CC_NOOV CC_REGNUM)
2712 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2713 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2714 (const_int 0)))]
2715 "TARGET_32BIT"
2716 "teq%?\\t%0, %1"
2717 [(set_attr "conds" "set")
2718 (set_attr "type" "logics_imm,logics_reg")]
2719 )
2720
2721 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2722 ; (NOT D) we can sometimes merge the final NOT into one of the following
2723 ; insns.
2724
2725 (define_split
2726 [(set (match_operand:SI 0 "s_register_operand" "")
2727 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2728 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2729 (match_operand:SI 3 "arm_rhs_operand" "")))
2730 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2731 "TARGET_32BIT"
2732 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2733 (not:SI (match_dup 3))))
2734 (set (match_dup 0) (not:SI (match_dup 4)))]
2735 ""
2736 )
2737
2738 (define_insn_and_split "*andsi_iorsi3_notsi"
2739 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2740 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2741 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2742 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2743 "TARGET_32BIT"
2744 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2745 "&& reload_completed"
2746 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2747 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2748 {
2749 /* If operands[3] is a constant make sure to fold the NOT into it
2750 to avoid creating a NOT of a CONST_INT. */
2751 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2752 if (CONST_INT_P (not_rtx))
2753 {
2754 operands[4] = operands[0];
2755 operands[5] = not_rtx;
2756 }
2757 else
2758 {
2759 operands[5] = operands[0];
2760 operands[4] = not_rtx;
2761 }
2762 }
2763 [(set_attr "length" "8")
2764 (set_attr "ce_count" "2")
2765 (set_attr "predicable" "yes")
2766 (set_attr "type" "multiple")]
2767 )
2768
2769 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2770 ; insns are available?
2771 (define_split
2772 [(set (match_operand:SI 0 "s_register_operand" "")
2773 (match_operator:SI 1 "logical_binary_operator"
2774 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2775 (match_operand:SI 3 "const_int_operand" "")
2776 (match_operand:SI 4 "const_int_operand" ""))
2777 (match_operator:SI 9 "logical_binary_operator"
2778 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2779 (match_operand:SI 6 "const_int_operand" ""))
2780 (match_operand:SI 7 "s_register_operand" "")])]))
2781 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2782 "TARGET_32BIT
2783 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2784 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2785 [(set (match_dup 8)
2786 (match_op_dup 1
2787 [(ashift:SI (match_dup 2) (match_dup 4))
2788 (match_dup 5)]))
2789 (set (match_dup 0)
2790 (match_op_dup 1
2791 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2792 (match_dup 7)]))]
2793 "
2794 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2795 ")
2796
2797 (define_split
2798 [(set (match_operand:SI 0 "s_register_operand" "")
2799 (match_operator:SI 1 "logical_binary_operator"
2800 [(match_operator:SI 9 "logical_binary_operator"
2801 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2802 (match_operand:SI 6 "const_int_operand" ""))
2803 (match_operand:SI 7 "s_register_operand" "")])
2804 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2805 (match_operand:SI 3 "const_int_operand" "")
2806 (match_operand:SI 4 "const_int_operand" ""))]))
2807 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2808 "TARGET_32BIT
2809 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2810 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2811 [(set (match_dup 8)
2812 (match_op_dup 1
2813 [(ashift:SI (match_dup 2) (match_dup 4))
2814 (match_dup 5)]))
2815 (set (match_dup 0)
2816 (match_op_dup 1
2817 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2818 (match_dup 7)]))]
2819 "
2820 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2821 ")
2822
2823 (define_split
2824 [(set (match_operand:SI 0 "s_register_operand" "")
2825 (match_operator:SI 1 "logical_binary_operator"
2826 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2827 (match_operand:SI 3 "const_int_operand" "")
2828 (match_operand:SI 4 "const_int_operand" ""))
2829 (match_operator:SI 9 "logical_binary_operator"
2830 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2831 (match_operand:SI 6 "const_int_operand" ""))
2832 (match_operand:SI 7 "s_register_operand" "")])]))
2833 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2834 "TARGET_32BIT
2835 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2836 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2837 [(set (match_dup 8)
2838 (match_op_dup 1
2839 [(ashift:SI (match_dup 2) (match_dup 4))
2840 (match_dup 5)]))
2841 (set (match_dup 0)
2842 (match_op_dup 1
2843 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2844 (match_dup 7)]))]
2845 "
2846 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2847 ")
2848
2849 (define_split
2850 [(set (match_operand:SI 0 "s_register_operand" "")
2851 (match_operator:SI 1 "logical_binary_operator"
2852 [(match_operator:SI 9 "logical_binary_operator"
2853 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2854 (match_operand:SI 6 "const_int_operand" ""))
2855 (match_operand:SI 7 "s_register_operand" "")])
2856 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2857 (match_operand:SI 3 "const_int_operand" "")
2858 (match_operand:SI 4 "const_int_operand" ""))]))
2859 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2860 "TARGET_32BIT
2861 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2862 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2863 [(set (match_dup 8)
2864 (match_op_dup 1
2865 [(ashift:SI (match_dup 2) (match_dup 4))
2866 (match_dup 5)]))
2867 (set (match_dup 0)
2868 (match_op_dup 1
2869 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2870 (match_dup 7)]))]
2871 "
2872 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2873 ")
2874 \f
2875
2876 ;; Minimum and maximum insns
2877
2878 (define_expand "smaxsi3"
2879 [(parallel [
2880 (set (match_operand:SI 0 "s_register_operand")
2881 (smax:SI (match_operand:SI 1 "s_register_operand")
2882 (match_operand:SI 2 "arm_rhs_operand")))
2883 (clobber (reg:CC CC_REGNUM))])]
2884 "TARGET_32BIT"
2885 "
2886 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2887 {
2888 /* No need for a clobber of the condition code register here. */
2889 emit_insn (gen_rtx_SET (operands[0],
2890 gen_rtx_SMAX (SImode, operands[1],
2891 operands[2])));
2892 DONE;
2893 }
2894 ")
2895
2896 (define_insn "*smax_0"
2897 [(set (match_operand:SI 0 "s_register_operand" "=r")
2898 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2899 (const_int 0)))]
2900 "TARGET_32BIT"
2901 "bic%?\\t%0, %1, %1, asr #31"
2902 [(set_attr "predicable" "yes")
2903 (set_attr "type" "logic_shift_reg")]
2904 )
2905
2906 (define_insn "*smax_m1"
2907 [(set (match_operand:SI 0 "s_register_operand" "=r")
2908 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2909 (const_int -1)))]
2910 "TARGET_32BIT"
2911 "orr%?\\t%0, %1, %1, asr #31"
2912 [(set_attr "predicable" "yes")
2913 (set_attr "type" "logic_shift_reg")]
2914 )
2915
2916 (define_insn_and_split "*arm_smax_insn"
2917 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2918 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2919 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2920 (clobber (reg:CC CC_REGNUM))]
2921 "TARGET_ARM"
2922 "#"
2923 ; cmp\\t%1, %2\;movlt\\t%0, %2
2924 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2925 "TARGET_ARM"
2926 [(set (reg:CC CC_REGNUM)
2927 (compare:CC (match_dup 1) (match_dup 2)))
2928 (set (match_dup 0)
2929 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
2930 (match_dup 1)
2931 (match_dup 2)))]
2932 ""
2933 [(set_attr "conds" "clob")
2934 (set_attr "length" "8,12")
2935 (set_attr "type" "multiple")]
2936 )
2937
2938 (define_expand "sminsi3"
2939 [(parallel [
2940 (set (match_operand:SI 0 "s_register_operand")
2941 (smin:SI (match_operand:SI 1 "s_register_operand")
2942 (match_operand:SI 2 "arm_rhs_operand")))
2943 (clobber (reg:CC CC_REGNUM))])]
2944 "TARGET_32BIT"
2945 "
2946 if (operands[2] == const0_rtx)
2947 {
2948 /* No need for a clobber of the condition code register here. */
2949 emit_insn (gen_rtx_SET (operands[0],
2950 gen_rtx_SMIN (SImode, operands[1],
2951 operands[2])));
2952 DONE;
2953 }
2954 ")
2955
2956 (define_insn "*smin_0"
2957 [(set (match_operand:SI 0 "s_register_operand" "=r")
2958 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2959 (const_int 0)))]
2960 "TARGET_32BIT"
2961 "and%?\\t%0, %1, %1, asr #31"
2962 [(set_attr "predicable" "yes")
2963 (set_attr "type" "logic_shift_reg")]
2964 )
2965
2966 (define_insn_and_split "*arm_smin_insn"
2967 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2968 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2969 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2970 (clobber (reg:CC CC_REGNUM))]
2971 "TARGET_ARM"
2972 "#"
2973 ; cmp\\t%1, %2\;movge\\t%0, %2
2974 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2975 "TARGET_ARM"
2976 [(set (reg:CC CC_REGNUM)
2977 (compare:CC (match_dup 1) (match_dup 2)))
2978 (set (match_dup 0)
2979 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
2980 (match_dup 1)
2981 (match_dup 2)))]
2982 ""
2983 [(set_attr "conds" "clob")
2984 (set_attr "length" "8,12")
2985 (set_attr "type" "multiple,multiple")]
2986 )
2987
2988 (define_expand "umaxsi3"
2989 [(parallel [
2990 (set (match_operand:SI 0 "s_register_operand")
2991 (umax:SI (match_operand:SI 1 "s_register_operand")
2992 (match_operand:SI 2 "arm_rhs_operand")))
2993 (clobber (reg:CC CC_REGNUM))])]
2994 "TARGET_32BIT"
2995 ""
2996 )
2997
2998 (define_insn_and_split "*arm_umaxsi3"
2999 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3000 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3001 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3002 (clobber (reg:CC CC_REGNUM))]
3003 "TARGET_ARM"
3004 "#"
3005 ; cmp\\t%1, %2\;movcc\\t%0, %2
3006 ; cmp\\t%1, %2\;movcs\\t%0, %1
3007 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3008 "TARGET_ARM"
3009 [(set (reg:CC CC_REGNUM)
3010 (compare:CC (match_dup 1) (match_dup 2)))
3011 (set (match_dup 0)
3012 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3013 (match_dup 1)
3014 (match_dup 2)))]
3015 ""
3016 [(set_attr "conds" "clob")
3017 (set_attr "length" "8,8,12")
3018 (set_attr "type" "store_4")]
3019 )
3020
3021 (define_expand "uminsi3"
3022 [(parallel [
3023 (set (match_operand:SI 0 "s_register_operand")
3024 (umin:SI (match_operand:SI 1 "s_register_operand")
3025 (match_operand:SI 2 "arm_rhs_operand")))
3026 (clobber (reg:CC CC_REGNUM))])]
3027 "TARGET_32BIT"
3028 ""
3029 )
3030
3031 (define_insn_and_split "*arm_uminsi3"
3032 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3033 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3034 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3035 (clobber (reg:CC CC_REGNUM))]
3036 "TARGET_ARM"
3037 "#"
3038 ; cmp\\t%1, %2\;movcs\\t%0, %2
3039 ; cmp\\t%1, %2\;movcc\\t%0, %1
3040 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3041 "TARGET_ARM"
3042 [(set (reg:CC CC_REGNUM)
3043 (compare:CC (match_dup 1) (match_dup 2)))
3044 (set (match_dup 0)
3045 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3046 (match_dup 1)
3047 (match_dup 2)))]
3048 ""
3049 [(set_attr "conds" "clob")
3050 (set_attr "length" "8,8,12")
3051 (set_attr "type" "store_4")]
3052 )
3053
3054 (define_insn "*store_minmaxsi"
3055 [(set (match_operand:SI 0 "memory_operand" "=m")
3056 (match_operator:SI 3 "minmax_operator"
3057 [(match_operand:SI 1 "s_register_operand" "r")
3058 (match_operand:SI 2 "s_register_operand" "r")]))
3059 (clobber (reg:CC CC_REGNUM))]
3060 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3061 "*
3062 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3063 operands[1], operands[2]);
3064 output_asm_insn (\"cmp\\t%1, %2\", operands);
3065 if (TARGET_THUMB2)
3066 output_asm_insn (\"ite\t%d3\", operands);
3067 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3068 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3069 return \"\";
3070 "
3071 [(set_attr "conds" "clob")
3072 (set (attr "length")
3073 (if_then_else (eq_attr "is_thumb" "yes")
3074 (const_int 14)
3075 (const_int 12)))
3076 (set_attr "type" "store_4")]
3077 )
3078
3079 ; Reject the frame pointer in operand[1], since reloading this after
3080 ; it has been eliminated can cause carnage.
3081 (define_insn "*minmax_arithsi"
3082 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3083 (match_operator:SI 4 "shiftable_operator"
3084 [(match_operator:SI 5 "minmax_operator"
3085 [(match_operand:SI 2 "s_register_operand" "r,r")
3086 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3087 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3088 (clobber (reg:CC CC_REGNUM))]
3089 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3090 "*
3091 {
3092 enum rtx_code code = GET_CODE (operands[4]);
3093 bool need_else;
3094
3095 if (which_alternative != 0 || operands[3] != const0_rtx
3096 || (code != PLUS && code != IOR && code != XOR))
3097 need_else = true;
3098 else
3099 need_else = false;
3100
3101 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3102 operands[2], operands[3]);
3103 output_asm_insn (\"cmp\\t%2, %3\", operands);
3104 if (TARGET_THUMB2)
3105 {
3106 if (need_else)
3107 output_asm_insn (\"ite\\t%d5\", operands);
3108 else
3109 output_asm_insn (\"it\\t%d5\", operands);
3110 }
3111 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3112 if (need_else)
3113 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3114 return \"\";
3115 }"
3116 [(set_attr "conds" "clob")
3117 (set (attr "length")
3118 (if_then_else (eq_attr "is_thumb" "yes")
3119 (const_int 14)
3120 (const_int 12)))
3121 (set_attr "type" "multiple")]
3122 )
3123
3124 ; Reject the frame pointer in operand[1], since reloading this after
3125 ; it has been eliminated can cause carnage.
3126 (define_insn_and_split "*minmax_arithsi_non_canon"
3127 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3128 (minus:SI
3129 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3130 (match_operator:SI 4 "minmax_operator"
3131 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3132 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3133 (clobber (reg:CC CC_REGNUM))]
3134 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3135 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3136 "#"
3137 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3138 [(set (reg:CC CC_REGNUM)
3139 (compare:CC (match_dup 2) (match_dup 3)))
3140
3141 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3142 (set (match_dup 0)
3143 (minus:SI (match_dup 1)
3144 (match_dup 2))))
3145 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3146 (set (match_dup 0)
3147 (match_dup 6)))]
3148 {
3149 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3150 operands[2], operands[3]);
3151 enum rtx_code rc = minmax_code (operands[4]);
3152 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3153 operands[2], operands[3]);
3154
3155 if (mode == CCFPmode || mode == CCFPEmode)
3156 rc = reverse_condition_maybe_unordered (rc);
3157 else
3158 rc = reverse_condition (rc);
3159 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3160 if (CONST_INT_P (operands[3]))
3161 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3162 else
3163 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3164 }
3165 [(set_attr "conds" "clob")
3166 (set (attr "length")
3167 (if_then_else (eq_attr "is_thumb" "yes")
3168 (const_int 14)
3169 (const_int 12)))
3170 (set_attr "type" "multiple")]
3171 )
3172
3173 (define_code_iterator SAT [smin smax])
3174 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3175 (define_code_attr SATlo [(smin "1") (smax "2")])
3176 (define_code_attr SAThi [(smin "2") (smax "1")])
3177
3178 (define_insn "*satsi_<SAT:code>"
3179 [(set (match_operand:SI 0 "s_register_operand" "=r")
3180 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3181 (match_operand:SI 1 "const_int_operand" "i"))
3182 (match_operand:SI 2 "const_int_operand" "i")))]
3183 "TARGET_32BIT && arm_arch6
3184 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3185 {
3186 int mask;
3187 bool signed_sat;
3188 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3189 &mask, &signed_sat))
3190 gcc_unreachable ();
3191
3192 operands[1] = GEN_INT (mask);
3193 if (signed_sat)
3194 return "ssat%?\t%0, %1, %3";
3195 else
3196 return "usat%?\t%0, %1, %3";
3197 }
3198 [(set_attr "predicable" "yes")
3199 (set_attr "type" "alus_imm")]
3200 )
3201
3202 (define_insn "*satsi_<SAT:code>_shift"
3203 [(set (match_operand:SI 0 "s_register_operand" "=r")
3204 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3205 [(match_operand:SI 4 "s_register_operand" "r")
3206 (match_operand:SI 5 "const_int_operand" "i")])
3207 (match_operand:SI 1 "const_int_operand" "i"))
3208 (match_operand:SI 2 "const_int_operand" "i")))]
3209 "TARGET_32BIT && arm_arch6
3210 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3211 {
3212 int mask;
3213 bool signed_sat;
3214 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3215 &mask, &signed_sat))
3216 gcc_unreachable ();
3217
3218 operands[1] = GEN_INT (mask);
3219 if (signed_sat)
3220 return "ssat%?\t%0, %1, %4%S3";
3221 else
3222 return "usat%?\t%0, %1, %4%S3";
3223 }
3224 [(set_attr "predicable" "yes")
3225 (set_attr "shift" "3")
3226 (set_attr "type" "logic_shift_reg")])
3227 \f
3228 ;; Shift and rotation insns
3229
3230 (define_expand "ashldi3"
3231 [(set (match_operand:DI 0 "s_register_operand")
3232 (ashift:DI (match_operand:DI 1 "s_register_operand")
3233 (match_operand:SI 2 "reg_or_int_operand")))]
3234 "TARGET_32BIT"
3235 "
3236 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3237 operands[2], gen_reg_rtx (SImode),
3238 gen_reg_rtx (SImode));
3239 DONE;
3240 ")
3241
3242 (define_expand "ashlsi3"
3243 [(set (match_operand:SI 0 "s_register_operand")
3244 (ashift:SI (match_operand:SI 1 "s_register_operand")
3245 (match_operand:SI 2 "arm_rhs_operand")))]
3246 "TARGET_EITHER"
3247 "
3248 if (CONST_INT_P (operands[2])
3249 && (UINTVAL (operands[2])) > 31)
3250 {
3251 emit_insn (gen_movsi (operands[0], const0_rtx));
3252 DONE;
3253 }
3254 "
3255 )
3256
3257 (define_expand "ashrdi3"
3258 [(set (match_operand:DI 0 "s_register_operand")
3259 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3260 (match_operand:SI 2 "reg_or_int_operand")))]
3261 "TARGET_32BIT"
3262 "
3263 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3264 operands[2], gen_reg_rtx (SImode),
3265 gen_reg_rtx (SImode));
3266 DONE;
3267 ")
3268
3269 (define_expand "ashrsi3"
3270 [(set (match_operand:SI 0 "s_register_operand")
3271 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3272 (match_operand:SI 2 "arm_rhs_operand")))]
3273 "TARGET_EITHER"
3274 "
3275 if (CONST_INT_P (operands[2])
3276 && UINTVAL (operands[2]) > 31)
3277 operands[2] = GEN_INT (31);
3278 "
3279 )
3280
3281 (define_expand "lshrdi3"
3282 [(set (match_operand:DI 0 "s_register_operand")
3283 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3284 (match_operand:SI 2 "reg_or_int_operand")))]
3285 "TARGET_32BIT"
3286 "
3287 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3288 operands[2], gen_reg_rtx (SImode),
3289 gen_reg_rtx (SImode));
3290 DONE;
3291 ")
3292
3293 (define_expand "lshrsi3"
3294 [(set (match_operand:SI 0 "s_register_operand")
3295 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3296 (match_operand:SI 2 "arm_rhs_operand")))]
3297 "TARGET_EITHER"
3298 "
3299 if (CONST_INT_P (operands[2])
3300 && (UINTVAL (operands[2])) > 31)
3301 {
3302 emit_insn (gen_movsi (operands[0], const0_rtx));
3303 DONE;
3304 }
3305 "
3306 )
3307
3308 (define_expand "rotlsi3"
3309 [(set (match_operand:SI 0 "s_register_operand")
3310 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3311 (match_operand:SI 2 "reg_or_int_operand")))]
3312 "TARGET_32BIT"
3313 "
3314 if (CONST_INT_P (operands[2]))
3315 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3316 else
3317 {
3318 rtx reg = gen_reg_rtx (SImode);
3319 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3320 operands[2] = reg;
3321 }
3322 "
3323 )
3324
3325 (define_expand "rotrsi3"
3326 [(set (match_operand:SI 0 "s_register_operand")
3327 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3328 (match_operand:SI 2 "arm_rhs_operand")))]
3329 "TARGET_EITHER"
3330 "
3331 if (TARGET_32BIT)
3332 {
3333 if (CONST_INT_P (operands[2])
3334 && UINTVAL (operands[2]) > 31)
3335 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3336 }
3337 else /* TARGET_THUMB1 */
3338 {
3339 if (CONST_INT_P (operands [2]))
3340 operands [2] = force_reg (SImode, operands[2]);
3341 }
3342 "
3343 )
3344
3345 (define_insn "*arm_shiftsi3"
3346 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3347 (match_operator:SI 3 "shift_operator"
3348 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3349 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3350 "TARGET_32BIT"
3351 "* return arm_output_shift(operands, 0);"
3352 [(set_attr "predicable" "yes")
3353 (set_attr "arch" "t2,t2,*,*")
3354 (set_attr "predicable_short_it" "yes,yes,no,no")
3355 (set_attr "length" "4")
3356 (set_attr "shift" "1")
3357 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3358 )
3359
3360 (define_insn "*shiftsi3_compare0"
3361 [(set (reg:CC_NOOV CC_REGNUM)
3362 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3363 [(match_operand:SI 1 "s_register_operand" "r,r")
3364 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3365 (const_int 0)))
3366 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3367 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3368 "TARGET_32BIT"
3369 "* return arm_output_shift(operands, 1);"
3370 [(set_attr "conds" "set")
3371 (set_attr "shift" "1")
3372 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3373 )
3374
3375 (define_insn "*shiftsi3_compare0_scratch"
3376 [(set (reg:CC_NOOV CC_REGNUM)
3377 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3378 [(match_operand:SI 1 "s_register_operand" "r,r")
3379 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3380 (const_int 0)))
3381 (clobber (match_scratch:SI 0 "=r,r"))]
3382 "TARGET_32BIT"
3383 "* return arm_output_shift(operands, 1);"
3384 [(set_attr "conds" "set")
3385 (set_attr "shift" "1")
3386 (set_attr "type" "shift_imm,shift_reg")]
3387 )
3388
3389 (define_insn "*not_shiftsi"
3390 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3391 (not:SI (match_operator:SI 3 "shift_operator"
3392 [(match_operand:SI 1 "s_register_operand" "r,r")
3393 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3394 "TARGET_32BIT"
3395 "mvn%?\\t%0, %1%S3"
3396 [(set_attr "predicable" "yes")
3397 (set_attr "shift" "1")
3398 (set_attr "arch" "32,a")
3399 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3400
3401 (define_insn "*not_shiftsi_compare0"
3402 [(set (reg:CC_NOOV CC_REGNUM)
3403 (compare:CC_NOOV
3404 (not:SI (match_operator:SI 3 "shift_operator"
3405 [(match_operand:SI 1 "s_register_operand" "r,r")
3406 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3407 (const_int 0)))
3408 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3409 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3410 "TARGET_32BIT"
3411 "mvns%?\\t%0, %1%S3"
3412 [(set_attr "conds" "set")
3413 (set_attr "shift" "1")
3414 (set_attr "arch" "32,a")
3415 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3416
3417 (define_insn "*not_shiftsi_compare0_scratch"
3418 [(set (reg:CC_NOOV CC_REGNUM)
3419 (compare:CC_NOOV
3420 (not:SI (match_operator:SI 3 "shift_operator"
3421 [(match_operand:SI 1 "s_register_operand" "r,r")
3422 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3423 (const_int 0)))
3424 (clobber (match_scratch:SI 0 "=r,r"))]
3425 "TARGET_32BIT"
3426 "mvns%?\\t%0, %1%S3"
3427 [(set_attr "conds" "set")
3428 (set_attr "shift" "1")
3429 (set_attr "arch" "32,a")
3430 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3431
3432 ;; We don't really have extzv, but defining this using shifts helps
3433 ;; to reduce register pressure later on.
3434
3435 (define_expand "extzv"
3436 [(set (match_operand 0 "s_register_operand")
3437 (zero_extract (match_operand 1 "nonimmediate_operand")
3438 (match_operand 2 "const_int_operand")
3439 (match_operand 3 "const_int_operand")))]
3440 "TARGET_THUMB1 || arm_arch_thumb2"
3441 "
3442 {
3443 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3444 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3445
3446 if (arm_arch_thumb2)
3447 {
3448 HOST_WIDE_INT width = INTVAL (operands[2]);
3449 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3450
3451 if (unaligned_access && MEM_P (operands[1])
3452 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3453 {
3454 rtx base_addr;
3455
3456 if (BYTES_BIG_ENDIAN)
3457 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3458 - bitpos;
3459
3460 if (width == 32)
3461 {
3462 base_addr = adjust_address (operands[1], SImode,
3463 bitpos / BITS_PER_UNIT);
3464 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3465 }
3466 else
3467 {
3468 rtx dest = operands[0];
3469 rtx tmp = gen_reg_rtx (SImode);
3470
3471 /* We may get a paradoxical subreg here. Strip it off. */
3472 if (GET_CODE (dest) == SUBREG
3473 && GET_MODE (dest) == SImode
3474 && GET_MODE (SUBREG_REG (dest)) == HImode)
3475 dest = SUBREG_REG (dest);
3476
3477 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3478 FAIL;
3479
3480 base_addr = adjust_address (operands[1], HImode,
3481 bitpos / BITS_PER_UNIT);
3482 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3483 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3484 }
3485 DONE;
3486 }
3487 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3488 {
3489 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3490 operands[3]));
3491 DONE;
3492 }
3493 else
3494 FAIL;
3495 }
3496
3497 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3498 FAIL;
3499
3500 operands[3] = GEN_INT (rshift);
3501
3502 if (lshift == 0)
3503 {
3504 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3505 DONE;
3506 }
3507
3508 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3509 operands[3], gen_reg_rtx (SImode)));
3510 DONE;
3511 }"
3512 )
3513
3514 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3515
3516 (define_expand "extzv_t1"
3517 [(set (match_operand:SI 4 "s_register_operand")
3518 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3519 (match_operand:SI 2 "const_int_operand")))
3520 (set (match_operand:SI 0 "s_register_operand")
3521 (lshiftrt:SI (match_dup 4)
3522 (match_operand:SI 3 "const_int_operand")))]
3523 "TARGET_THUMB1"
3524 "")
3525
3526 (define_expand "extv"
3527 [(set (match_operand 0 "s_register_operand")
3528 (sign_extract (match_operand 1 "nonimmediate_operand")
3529 (match_operand 2 "const_int_operand")
3530 (match_operand 3 "const_int_operand")))]
3531 "arm_arch_thumb2"
3532 {
3533 HOST_WIDE_INT width = INTVAL (operands[2]);
3534 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3535
3536 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3537 && (bitpos % BITS_PER_UNIT) == 0)
3538 {
3539 rtx base_addr;
3540
3541 if (BYTES_BIG_ENDIAN)
3542 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3543
3544 if (width == 32)
3545 {
3546 base_addr = adjust_address (operands[1], SImode,
3547 bitpos / BITS_PER_UNIT);
3548 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3549 }
3550 else
3551 {
3552 rtx dest = operands[0];
3553 rtx tmp = gen_reg_rtx (SImode);
3554
3555 /* We may get a paradoxical subreg here. Strip it off. */
3556 if (GET_CODE (dest) == SUBREG
3557 && GET_MODE (dest) == SImode
3558 && GET_MODE (SUBREG_REG (dest)) == HImode)
3559 dest = SUBREG_REG (dest);
3560
3561 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3562 FAIL;
3563
3564 base_addr = adjust_address (operands[1], HImode,
3565 bitpos / BITS_PER_UNIT);
3566 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3567 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3568 }
3569
3570 DONE;
3571 }
3572 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3573 FAIL;
3574 else if (GET_MODE (operands[0]) == SImode
3575 && GET_MODE (operands[1]) == SImode)
3576 {
3577 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3578 operands[3]));
3579 DONE;
3580 }
3581
3582 FAIL;
3583 })
3584
3585 ; Helper to expand register forms of extv with the proper modes.
3586
3587 (define_expand "extv_regsi"
3588 [(set (match_operand:SI 0 "s_register_operand")
3589 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3590 (match_operand 2 "const_int_operand")
3591 (match_operand 3 "const_int_operand")))]
3592 ""
3593 {
3594 })
3595
3596 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3597
3598 (define_insn "unaligned_loaddi"
3599 [(set (match_operand:DI 0 "s_register_operand" "=r")
3600 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3601 UNSPEC_UNALIGNED_LOAD))]
3602 "TARGET_32BIT && TARGET_LDRD"
3603 "*
3604 return output_move_double (operands, true, NULL);
3605 "
3606 [(set_attr "length" "8")
3607 (set_attr "type" "load_8")])
3608
3609 (define_insn "unaligned_loadsi"
3610 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3611 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3612 UNSPEC_UNALIGNED_LOAD))]
3613 "unaligned_access"
3614 "@
3615 ldr\t%0, %1\t@ unaligned
3616 ldr%?\t%0, %1\t@ unaligned
3617 ldr%?\t%0, %1\t@ unaligned"
3618 [(set_attr "arch" "t1,t2,32")
3619 (set_attr "length" "2,2,4")
3620 (set_attr "predicable" "no,yes,yes")
3621 (set_attr "predicable_short_it" "no,yes,no")
3622 (set_attr "type" "load_4")])
3623
3624 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3625 ;; address (there's no immediate format). That's tricky to support
3626 ;; here and we don't really need this pattern for that case, so only
3627 ;; enable for 32-bit ISAs.
3628 (define_insn "unaligned_loadhis"
3629 [(set (match_operand:SI 0 "s_register_operand" "=r")
3630 (sign_extend:SI
3631 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3632 UNSPEC_UNALIGNED_LOAD)))]
3633 "unaligned_access && TARGET_32BIT"
3634 "ldrsh%?\t%0, %1\t@ unaligned"
3635 [(set_attr "predicable" "yes")
3636 (set_attr "type" "load_byte")])
3637
3638 (define_insn "unaligned_loadhiu"
3639 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3640 (zero_extend:SI
3641 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3642 UNSPEC_UNALIGNED_LOAD)))]
3643 "unaligned_access"
3644 "@
3645 ldrh\t%0, %1\t@ unaligned
3646 ldrh%?\t%0, %1\t@ unaligned
3647 ldrh%?\t%0, %1\t@ unaligned"
3648 [(set_attr "arch" "t1,t2,32")
3649 (set_attr "length" "2,2,4")
3650 (set_attr "predicable" "no,yes,yes")
3651 (set_attr "predicable_short_it" "no,yes,no")
3652 (set_attr "type" "load_byte")])
3653
3654 (define_insn "unaligned_storedi"
3655 [(set (match_operand:DI 0 "memory_operand" "=m")
3656 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3657 UNSPEC_UNALIGNED_STORE))]
3658 "TARGET_32BIT && TARGET_LDRD"
3659 "*
3660 return output_move_double (operands, true, NULL);
3661 "
3662 [(set_attr "length" "8")
3663 (set_attr "type" "store_8")])
3664
3665 (define_insn "unaligned_storesi"
3666 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3667 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3668 UNSPEC_UNALIGNED_STORE))]
3669 "unaligned_access"
3670 "@
3671 str\t%1, %0\t@ unaligned
3672 str%?\t%1, %0\t@ unaligned
3673 str%?\t%1, %0\t@ unaligned"
3674 [(set_attr "arch" "t1,t2,32")
3675 (set_attr "length" "2,2,4")
3676 (set_attr "predicable" "no,yes,yes")
3677 (set_attr "predicable_short_it" "no,yes,no")
3678 (set_attr "type" "store_4")])
3679
3680 (define_insn "unaligned_storehi"
3681 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3682 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3683 UNSPEC_UNALIGNED_STORE))]
3684 "unaligned_access"
3685 "@
3686 strh\t%1, %0\t@ unaligned
3687 strh%?\t%1, %0\t@ unaligned
3688 strh%?\t%1, %0\t@ unaligned"
3689 [(set_attr "arch" "t1,t2,32")
3690 (set_attr "length" "2,2,4")
3691 (set_attr "predicable" "no,yes,yes")
3692 (set_attr "predicable_short_it" "no,yes,no")
3693 (set_attr "type" "store_4")])
3694
3695
3696 (define_insn "*extv_reg"
3697 [(set (match_operand:SI 0 "s_register_operand" "=r")
3698 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3699 (match_operand:SI 2 "const_int_operand" "n")
3700 (match_operand:SI 3 "const_int_operand" "n")))]
3701 "arm_arch_thumb2
3702 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3703 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3704 "sbfx%?\t%0, %1, %3, %2"
3705 [(set_attr "length" "4")
3706 (set_attr "predicable" "yes")
3707 (set_attr "type" "bfm")]
3708 )
3709
3710 (define_insn "extzv_t2"
3711 [(set (match_operand:SI 0 "s_register_operand" "=r")
3712 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3713 (match_operand:SI 2 "const_int_operand" "n")
3714 (match_operand:SI 3 "const_int_operand" "n")))]
3715 "arm_arch_thumb2
3716 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3717 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3718 "ubfx%?\t%0, %1, %3, %2"
3719 [(set_attr "length" "4")
3720 (set_attr "predicable" "yes")
3721 (set_attr "type" "bfm")]
3722 )
3723
3724
3725 ;; Division instructions
3726 (define_insn "divsi3"
3727 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3728 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3729 (match_operand:SI 2 "s_register_operand" "r,r")))]
3730 "TARGET_IDIV"
3731 "@
3732 sdiv%?\t%0, %1, %2
3733 sdiv\t%0, %1, %2"
3734 [(set_attr "arch" "32,v8mb")
3735 (set_attr "predicable" "yes")
3736 (set_attr "type" "sdiv")]
3737 )
3738
3739 (define_insn "udivsi3"
3740 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3741 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3742 (match_operand:SI 2 "s_register_operand" "r,r")))]
3743 "TARGET_IDIV"
3744 "@
3745 udiv%?\t%0, %1, %2
3746 udiv\t%0, %1, %2"
3747 [(set_attr "arch" "32,v8mb")
3748 (set_attr "predicable" "yes")
3749 (set_attr "type" "udiv")]
3750 )
3751
3752 \f
3753 ;; Unary arithmetic insns
3754
3755 (define_expand "negvsi3"
3756 [(match_operand:SI 0 "register_operand")
3757 (match_operand:SI 1 "register_operand")
3758 (match_operand 2 "")]
3759 "TARGET_32BIT"
3760 {
3761 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3762 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3763
3764 DONE;
3765 })
3766
3767 (define_expand "negvdi3"
3768 [(match_operand:DI 0 "s_register_operand")
3769 (match_operand:DI 1 "s_register_operand")
3770 (match_operand 2 "")]
3771 "TARGET_ARM"
3772 {
3773 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3774 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3775
3776 DONE;
3777 })
3778
3779
3780 (define_insn "negdi2_compare"
3781 [(set (reg:CC CC_REGNUM)
3782 (compare:CC
3783 (const_int 0)
3784 (match_operand:DI 1 "register_operand" "r,r")))
3785 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3786 (minus:DI (const_int 0) (match_dup 1)))]
3787 "TARGET_ARM"
3788 "@
3789 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3790 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3791 [(set_attr "conds" "set")
3792 (set_attr "arch" "a,t2")
3793 (set_attr "length" "8")
3794 (set_attr "type" "multiple")]
3795 )
3796
3797 (define_expand "negsi2"
3798 [(set (match_operand:SI 0 "s_register_operand")
3799 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3800 "TARGET_EITHER"
3801 ""
3802 )
3803
3804 (define_insn "*arm_negsi2"
3805 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3806 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3807 "TARGET_32BIT"
3808 "rsb%?\\t%0, %1, #0"
3809 [(set_attr "predicable" "yes")
3810 (set_attr "predicable_short_it" "yes,no")
3811 (set_attr "arch" "t2,*")
3812 (set_attr "length" "4")
3813 (set_attr "type" "alu_imm")]
3814 )
3815
3816 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3817 ;; rather than (0 cmp reg). This gives the same results for unsigned
3818 ;; and equality compares which is what we mostly need here.
3819 (define_insn "negsi2_0compare"
3820 [(set (reg:CC_RSB CC_REGNUM)
3821 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3822 (const_int -1)))
3823 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3824 (neg:SI (match_dup 1)))]
3825 "TARGET_32BIT"
3826 "@
3827 negs\\t%0, %1
3828 rsbs\\t%0, %1, #0"
3829 [(set_attr "conds" "set")
3830 (set_attr "arch" "t2,*")
3831 (set_attr "length" "2,*")
3832 (set_attr "type" "alus_imm")]
3833 )
3834
3835 (define_insn "negsi2_carryin"
3836 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3837 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3838 (match_operand:SI 2 "arm_borrow_operation" "")))]
3839 "TARGET_32BIT"
3840 "@
3841 rsc\\t%0, %1, #0
3842 sbc\\t%0, %1, %1, lsl #1"
3843 [(set_attr "conds" "use")
3844 (set_attr "arch" "a,t2")
3845 (set_attr "type" "adc_imm,adc_reg")]
3846 )
3847
3848 (define_expand "negsf2"
3849 [(set (match_operand:SF 0 "s_register_operand")
3850 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3851 "TARGET_32BIT && TARGET_HARD_FLOAT"
3852 ""
3853 )
3854
3855 (define_expand "negdf2"
3856 [(set (match_operand:DF 0 "s_register_operand")
3857 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3858 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3859 "")
3860
3861 ;; abssi2 doesn't really clobber the condition codes if a different register
3862 ;; is being set. To keep things simple, assume during rtl manipulations that
3863 ;; it does, but tell the final scan operator the truth. Similarly for
3864 ;; (neg (abs...))
3865
3866 (define_expand "abssi2"
3867 [(parallel
3868 [(set (match_operand:SI 0 "s_register_operand")
3869 (abs:SI (match_operand:SI 1 "s_register_operand")))
3870 (clobber (match_dup 2))])]
3871 "TARGET_EITHER"
3872 "
3873 if (TARGET_THUMB1)
3874 operands[2] = gen_rtx_SCRATCH (SImode);
3875 else
3876 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3877 ")
3878
3879 (define_insn_and_split "*arm_abssi2"
3880 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3881 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3882 (clobber (reg:CC CC_REGNUM))]
3883 "TARGET_ARM"
3884 "#"
3885 "&& reload_completed"
3886 [(const_int 0)]
3887 {
3888 /* if (which_alternative == 0) */
3889 if (REGNO(operands[0]) == REGNO(operands[1]))
3890 {
3891 /* Emit the pattern:
3892 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3893 [(set (reg:CC CC_REGNUM)
3894 (compare:CC (match_dup 0) (const_int 0)))
3895 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
3896 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
3897 */
3898 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3899 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3900 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3901 (gen_rtx_LT (SImode,
3902 gen_rtx_REG (CCmode, CC_REGNUM),
3903 const0_rtx)),
3904 (gen_rtx_SET (operands[0],
3905 (gen_rtx_MINUS (SImode,
3906 const0_rtx,
3907 operands[1]))))));
3908 DONE;
3909 }
3910 else
3911 {
3912 /* Emit the pattern:
3913 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
3914 [(set (match_dup 0)
3915 (xor:SI (match_dup 1)
3916 (ashiftrt:SI (match_dup 1) (const_int 31))))
3917 (set (match_dup 0)
3918 (minus:SI (match_dup 0)
3919 (ashiftrt:SI (match_dup 1) (const_int 31))))]
3920 */
3921 emit_insn (gen_rtx_SET (operands[0],
3922 gen_rtx_XOR (SImode,
3923 gen_rtx_ASHIFTRT (SImode,
3924 operands[1],
3925 GEN_INT (31)),
3926 operands[1])));
3927 emit_insn (gen_rtx_SET (operands[0],
3928 gen_rtx_MINUS (SImode,
3929 operands[0],
3930 gen_rtx_ASHIFTRT (SImode,
3931 operands[1],
3932 GEN_INT (31)))));
3933 DONE;
3934 }
3935 }
3936 [(set_attr "conds" "clob,*")
3937 (set_attr "shift" "1")
3938 (set_attr "predicable" "no, yes")
3939 (set_attr "length" "8")
3940 (set_attr "type" "multiple")]
3941 )
3942
3943 (define_insn_and_split "*arm_neg_abssi2"
3944 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3945 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3946 (clobber (reg:CC CC_REGNUM))]
3947 "TARGET_ARM"
3948 "#"
3949 "&& reload_completed"
3950 [(const_int 0)]
3951 {
3952 /* if (which_alternative == 0) */
3953 if (REGNO (operands[0]) == REGNO (operands[1]))
3954 {
3955 /* Emit the pattern:
3956 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3957 */
3958 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3959 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3960 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3961 gen_rtx_GT (SImode,
3962 gen_rtx_REG (CCmode, CC_REGNUM),
3963 const0_rtx),
3964 gen_rtx_SET (operands[0],
3965 (gen_rtx_MINUS (SImode,
3966 const0_rtx,
3967 operands[1])))));
3968 }
3969 else
3970 {
3971 /* Emit the pattern:
3972 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
3973 */
3974 emit_insn (gen_rtx_SET (operands[0],
3975 gen_rtx_XOR (SImode,
3976 gen_rtx_ASHIFTRT (SImode,
3977 operands[1],
3978 GEN_INT (31)),
3979 operands[1])));
3980 emit_insn (gen_rtx_SET (operands[0],
3981 gen_rtx_MINUS (SImode,
3982 gen_rtx_ASHIFTRT (SImode,
3983 operands[1],
3984 GEN_INT (31)),
3985 operands[0])));
3986 }
3987 DONE;
3988 }
3989 [(set_attr "conds" "clob,*")
3990 (set_attr "shift" "1")
3991 (set_attr "predicable" "no, yes")
3992 (set_attr "length" "8")
3993 (set_attr "type" "multiple")]
3994 )
3995
3996 (define_expand "abssf2"
3997 [(set (match_operand:SF 0 "s_register_operand")
3998 (abs:SF (match_operand:SF 1 "s_register_operand")))]
3999 "TARGET_32BIT && TARGET_HARD_FLOAT"
4000 "")
4001
4002 (define_expand "absdf2"
4003 [(set (match_operand:DF 0 "s_register_operand")
4004 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4005 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4006 "")
4007
4008 (define_expand "sqrtsf2"
4009 [(set (match_operand:SF 0 "s_register_operand")
4010 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4011 "TARGET_32BIT && TARGET_HARD_FLOAT"
4012 "")
4013
4014 (define_expand "sqrtdf2"
4015 [(set (match_operand:DF 0 "s_register_operand")
4016 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4017 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4018 "")
4019
4020 (define_expand "one_cmplsi2"
4021 [(set (match_operand:SI 0 "s_register_operand")
4022 (not:SI (match_operand:SI 1 "s_register_operand")))]
4023 "TARGET_EITHER"
4024 ""
4025 )
4026
4027 (define_insn "*arm_one_cmplsi2"
4028 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4029 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4030 "TARGET_32BIT"
4031 "mvn%?\\t%0, %1"
4032 [(set_attr "predicable" "yes")
4033 (set_attr "predicable_short_it" "yes,no")
4034 (set_attr "arch" "t2,*")
4035 (set_attr "length" "4")
4036 (set_attr "type" "mvn_reg")]
4037 )
4038
4039 (define_insn "*notsi_compare0"
4040 [(set (reg:CC_NOOV CC_REGNUM)
4041 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4042 (const_int 0)))
4043 (set (match_operand:SI 0 "s_register_operand" "=r")
4044 (not:SI (match_dup 1)))]
4045 "TARGET_32BIT"
4046 "mvns%?\\t%0, %1"
4047 [(set_attr "conds" "set")
4048 (set_attr "type" "mvn_reg")]
4049 )
4050
4051 (define_insn "*notsi_compare0_scratch"
4052 [(set (reg:CC_NOOV CC_REGNUM)
4053 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4054 (const_int 0)))
4055 (clobber (match_scratch:SI 0 "=r"))]
4056 "TARGET_32BIT"
4057 "mvns%?\\t%0, %1"
4058 [(set_attr "conds" "set")
4059 (set_attr "type" "mvn_reg")]
4060 )
4061 \f
4062 ;; Fixed <--> Floating conversion insns
4063
4064 (define_expand "floatsihf2"
4065 [(set (match_operand:HF 0 "general_operand")
4066 (float:HF (match_operand:SI 1 "general_operand")))]
4067 "TARGET_EITHER"
4068 "
4069 {
4070 rtx op1 = gen_reg_rtx (SFmode);
4071 expand_float (op1, operands[1], 0);
4072 op1 = convert_to_mode (HFmode, op1, 0);
4073 emit_move_insn (operands[0], op1);
4074 DONE;
4075 }"
4076 )
4077
4078 (define_expand "floatdihf2"
4079 [(set (match_operand:HF 0 "general_operand")
4080 (float:HF (match_operand:DI 1 "general_operand")))]
4081 "TARGET_EITHER"
4082 "
4083 {
4084 rtx op1 = gen_reg_rtx (SFmode);
4085 expand_float (op1, operands[1], 0);
4086 op1 = convert_to_mode (HFmode, op1, 0);
4087 emit_move_insn (operands[0], op1);
4088 DONE;
4089 }"
4090 )
4091
4092 (define_expand "floatsisf2"
4093 [(set (match_operand:SF 0 "s_register_operand")
4094 (float:SF (match_operand:SI 1 "s_register_operand")))]
4095 "TARGET_32BIT && TARGET_HARD_FLOAT"
4096 "
4097 ")
4098
4099 (define_expand "floatsidf2"
4100 [(set (match_operand:DF 0 "s_register_operand")
4101 (float:DF (match_operand:SI 1 "s_register_operand")))]
4102 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4103 "
4104 ")
4105
4106 (define_expand "fix_trunchfsi2"
4107 [(set (match_operand:SI 0 "general_operand")
4108 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4109 "TARGET_EITHER"
4110 "
4111 {
4112 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4113 expand_fix (operands[0], op1, 0);
4114 DONE;
4115 }"
4116 )
4117
4118 (define_expand "fix_trunchfdi2"
4119 [(set (match_operand:DI 0 "general_operand")
4120 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4121 "TARGET_EITHER"
4122 "
4123 {
4124 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4125 expand_fix (operands[0], op1, 0);
4126 DONE;
4127 }"
4128 )
4129
4130 (define_expand "fix_truncsfsi2"
4131 [(set (match_operand:SI 0 "s_register_operand")
4132 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4133 "TARGET_32BIT && TARGET_HARD_FLOAT"
4134 "
4135 ")
4136
4137 (define_expand "fix_truncdfsi2"
4138 [(set (match_operand:SI 0 "s_register_operand")
4139 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4140 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4141 "
4142 ")
4143
4144 ;; Truncation insns
4145
4146 (define_expand "truncdfsf2"
4147 [(set (match_operand:SF 0 "s_register_operand")
4148 (float_truncate:SF
4149 (match_operand:DF 1 "s_register_operand")))]
4150 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4151 ""
4152 )
4153
4154 ;; DFmode to HFmode conversions on targets without a single-step hardware
4155 ;; instruction for it would have to go through SFmode. This is dangerous
4156 ;; as it introduces double rounding.
4157 ;;
4158 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4159 ;; a single-step instruction.
4160
4161 (define_expand "truncdfhf2"
4162 [(set (match_operand:HF 0 "s_register_operand")
4163 (float_truncate:HF
4164 (match_operand:DF 1 "s_register_operand")))]
4165 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4166 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4167 {
4168 /* We don't have a direct instruction for this, so we must be in
4169 an unsafe math mode, and going via SFmode. */
4170
4171 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4172 {
4173 rtx op1;
4174 op1 = convert_to_mode (SFmode, operands[1], 0);
4175 op1 = convert_to_mode (HFmode, op1, 0);
4176 emit_move_insn (operands[0], op1);
4177 DONE;
4178 }
4179 /* Otherwise, we will pick this up as a single instruction with
4180 no intermediary rounding. */
4181 }
4182 )
4183 \f
4184 ;; Zero and sign extension instructions.
4185
4186 (define_expand "zero_extend<mode>di2"
4187 [(set (match_operand:DI 0 "s_register_operand" "")
4188 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4189 "TARGET_32BIT <qhs_zextenddi_cond>"
4190 {
4191 rtx res_lo, res_hi, op0_lo, op0_hi;
4192 res_lo = gen_lowpart (SImode, operands[0]);
4193 res_hi = gen_highpart (SImode, operands[0]);
4194 if (can_create_pseudo_p ())
4195 {
4196 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4197 op0_hi = gen_reg_rtx (SImode);
4198 }
4199 else
4200 {
4201 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4202 op0_hi = res_hi;
4203 }
4204 if (<MODE>mode != SImode)
4205 emit_insn (gen_rtx_SET (op0_lo,
4206 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4207 emit_insn (gen_movsi (op0_hi, const0_rtx));
4208 if (res_lo != op0_lo)
4209 emit_move_insn (res_lo, op0_lo);
4210 if (res_hi != op0_hi)
4211 emit_move_insn (res_hi, op0_hi);
4212 DONE;
4213 }
4214 )
4215
4216 (define_expand "extend<mode>di2"
4217 [(set (match_operand:DI 0 "s_register_operand" "")
4218 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4219 "TARGET_32BIT <qhs_sextenddi_cond>"
4220 {
4221 rtx res_lo, res_hi, op0_lo, op0_hi;
4222 res_lo = gen_lowpart (SImode, operands[0]);
4223 res_hi = gen_highpart (SImode, operands[0]);
4224 if (can_create_pseudo_p ())
4225 {
4226 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4227 op0_hi = gen_reg_rtx (SImode);
4228 }
4229 else
4230 {
4231 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4232 op0_hi = res_hi;
4233 }
4234 if (<MODE>mode != SImode)
4235 emit_insn (gen_rtx_SET (op0_lo,
4236 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4237 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4238 if (res_lo != op0_lo)
4239 emit_move_insn (res_lo, op0_lo);
4240 if (res_hi != op0_hi)
4241 emit_move_insn (res_hi, op0_hi);
4242 DONE;
4243 }
4244 )
4245
4246 ;; Splits for all extensions to DImode
4247 (define_split
4248 [(set (match_operand:DI 0 "s_register_operand" "")
4249 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4250 "TARGET_32BIT"
4251 [(set (match_dup 0) (match_dup 1))]
4252 {
4253 rtx lo_part = gen_lowpart (SImode, operands[0]);
4254 machine_mode src_mode = GET_MODE (operands[1]);
4255
4256 if (src_mode == SImode)
4257 emit_move_insn (lo_part, operands[1]);
4258 else
4259 emit_insn (gen_rtx_SET (lo_part,
4260 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4261 operands[0] = gen_highpart (SImode, operands[0]);
4262 operands[1] = const0_rtx;
4263 })
4264
4265 (define_split
4266 [(set (match_operand:DI 0 "s_register_operand" "")
4267 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4268 "TARGET_32BIT"
4269 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4270 {
4271 rtx lo_part = gen_lowpart (SImode, operands[0]);
4272 machine_mode src_mode = GET_MODE (operands[1]);
4273
4274 if (src_mode == SImode)
4275 emit_move_insn (lo_part, operands[1]);
4276 else
4277 emit_insn (gen_rtx_SET (lo_part,
4278 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4279 operands[1] = lo_part;
4280 operands[0] = gen_highpart (SImode, operands[0]);
4281 })
4282
4283 (define_expand "zero_extendhisi2"
4284 [(set (match_operand:SI 0 "s_register_operand")
4285 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4286 "TARGET_EITHER"
4287 {
4288 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4289 {
4290 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4291 DONE;
4292 }
4293 if (!arm_arch6 && !MEM_P (operands[1]))
4294 {
4295 rtx t = gen_lowpart (SImode, operands[1]);
4296 rtx tmp = gen_reg_rtx (SImode);
4297 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4298 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4299 DONE;
4300 }
4301 })
4302
4303 (define_split
4304 [(set (match_operand:SI 0 "s_register_operand" "")
4305 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4306 "!TARGET_THUMB2 && !arm_arch6"
4307 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4308 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4309 {
4310 operands[2] = gen_lowpart (SImode, operands[1]);
4311 })
4312
4313 (define_insn "*arm_zero_extendhisi2"
4314 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4315 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4316 "TARGET_ARM && arm_arch4 && !arm_arch6"
4317 "@
4318 #
4319 ldrh%?\\t%0, %1"
4320 [(set_attr "type" "alu_shift_reg,load_byte")
4321 (set_attr "predicable" "yes")]
4322 )
4323
4324 (define_insn "*arm_zero_extendhisi2_v6"
4325 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4326 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4327 "TARGET_ARM && arm_arch6"
4328 "@
4329 uxth%?\\t%0, %1
4330 ldrh%?\\t%0, %1"
4331 [(set_attr "predicable" "yes")
4332 (set_attr "type" "extend,load_byte")]
4333 )
4334
4335 (define_insn "*arm_zero_extendhisi2addsi"
4336 [(set (match_operand:SI 0 "s_register_operand" "=r")
4337 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4338 (match_operand:SI 2 "s_register_operand" "r")))]
4339 "TARGET_INT_SIMD"
4340 "uxtah%?\\t%0, %2, %1"
4341 [(set_attr "type" "alu_shift_reg")
4342 (set_attr "predicable" "yes")]
4343 )
4344
4345 (define_expand "zero_extendqisi2"
4346 [(set (match_operand:SI 0 "s_register_operand")
4347 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4348 "TARGET_EITHER"
4349 {
4350 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4351 {
4352 emit_insn (gen_andsi3 (operands[0],
4353 gen_lowpart (SImode, operands[1]),
4354 GEN_INT (255)));
4355 DONE;
4356 }
4357 if (!arm_arch6 && !MEM_P (operands[1]))
4358 {
4359 rtx t = gen_lowpart (SImode, operands[1]);
4360 rtx tmp = gen_reg_rtx (SImode);
4361 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4362 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4363 DONE;
4364 }
4365 })
4366
4367 (define_split
4368 [(set (match_operand:SI 0 "s_register_operand" "")
4369 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4370 "!arm_arch6"
4371 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4372 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4373 {
4374 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4375 if (TARGET_ARM)
4376 {
4377 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4378 DONE;
4379 }
4380 })
4381
4382 (define_insn "*arm_zero_extendqisi2"
4383 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4384 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4385 "TARGET_ARM && !arm_arch6"
4386 "@
4387 #
4388 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4389 [(set_attr "length" "8,4")
4390 (set_attr "type" "alu_shift_reg,load_byte")
4391 (set_attr "predicable" "yes")]
4392 )
4393
4394 (define_insn "*arm_zero_extendqisi2_v6"
4395 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4396 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4397 "TARGET_ARM && arm_arch6"
4398 "@
4399 uxtb%?\\t%0, %1
4400 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4401 [(set_attr "type" "extend,load_byte")
4402 (set_attr "predicable" "yes")]
4403 )
4404
4405 (define_insn "*arm_zero_extendqisi2addsi"
4406 [(set (match_operand:SI 0 "s_register_operand" "=r")
4407 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4408 (match_operand:SI 2 "s_register_operand" "r")))]
4409 "TARGET_INT_SIMD"
4410 "uxtab%?\\t%0, %2, %1"
4411 [(set_attr "predicable" "yes")
4412 (set_attr "type" "alu_shift_reg")]
4413 )
4414
4415 (define_split
4416 [(set (match_operand:SI 0 "s_register_operand" "")
4417 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4418 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4419 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4420 [(set (match_dup 2) (match_dup 1))
4421 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4422 ""
4423 )
4424
4425 (define_split
4426 [(set (match_operand:SI 0 "s_register_operand" "")
4427 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4428 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4429 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4430 [(set (match_dup 2) (match_dup 1))
4431 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4432 ""
4433 )
4434
4435
4436 (define_split
4437 [(set (match_operand:SI 0 "s_register_operand" "")
4438 (IOR_XOR:SI (and:SI (ashift:SI
4439 (match_operand:SI 1 "s_register_operand" "")
4440 (match_operand:SI 2 "const_int_operand" ""))
4441 (match_operand:SI 3 "const_int_operand" ""))
4442 (zero_extend:SI
4443 (match_operator 5 "subreg_lowpart_operator"
4444 [(match_operand:SI 4 "s_register_operand" "")]))))]
4445 "TARGET_32BIT
4446 && (UINTVAL (operands[3])
4447 == (GET_MODE_MASK (GET_MODE (operands[5]))
4448 & (GET_MODE_MASK (GET_MODE (operands[5]))
4449 << (INTVAL (operands[2])))))"
4450 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4451 (match_dup 4)))
4452 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4453 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4454 )
4455
4456 (define_insn "*compareqi_eq0"
4457 [(set (reg:CC_Z CC_REGNUM)
4458 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4459 (const_int 0)))]
4460 "TARGET_32BIT"
4461 "tst%?\\t%0, #255"
4462 [(set_attr "conds" "set")
4463 (set_attr "predicable" "yes")
4464 (set_attr "type" "logic_imm")]
4465 )
4466
4467 (define_expand "extendhisi2"
4468 [(set (match_operand:SI 0 "s_register_operand")
4469 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4470 "TARGET_EITHER"
4471 {
4472 if (TARGET_THUMB1)
4473 {
4474 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4475 DONE;
4476 }
4477 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4478 {
4479 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4480 DONE;
4481 }
4482
4483 if (!arm_arch6 && !MEM_P (operands[1]))
4484 {
4485 rtx t = gen_lowpart (SImode, operands[1]);
4486 rtx tmp = gen_reg_rtx (SImode);
4487 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4488 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4489 DONE;
4490 }
4491 })
4492
4493 (define_split
4494 [(parallel
4495 [(set (match_operand:SI 0 "register_operand" "")
4496 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4497 (clobber (match_scratch:SI 2 ""))])]
4498 "!arm_arch6"
4499 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4500 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4501 {
4502 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4503 })
4504
4505 ;; This pattern will only be used when ldsh is not available
4506 (define_expand "extendhisi2_mem"
4507 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4508 (set (match_dup 3)
4509 (zero_extend:SI (match_dup 7)))
4510 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4511 (set (match_operand:SI 0 "" "")
4512 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4513 "TARGET_ARM"
4514 "
4515 {
4516 rtx mem1, mem2;
4517 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4518
4519 mem1 = change_address (operands[1], QImode, addr);
4520 mem2 = change_address (operands[1], QImode,
4521 plus_constant (Pmode, addr, 1));
4522 operands[0] = gen_lowpart (SImode, operands[0]);
4523 operands[1] = mem1;
4524 operands[2] = gen_reg_rtx (SImode);
4525 operands[3] = gen_reg_rtx (SImode);
4526 operands[6] = gen_reg_rtx (SImode);
4527 operands[7] = mem2;
4528
4529 if (BYTES_BIG_ENDIAN)
4530 {
4531 operands[4] = operands[2];
4532 operands[5] = operands[3];
4533 }
4534 else
4535 {
4536 operands[4] = operands[3];
4537 operands[5] = operands[2];
4538 }
4539 }"
4540 )
4541
4542 (define_split
4543 [(set (match_operand:SI 0 "register_operand" "")
4544 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4545 "!arm_arch6"
4546 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4547 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4548 {
4549 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4550 })
4551
4552 (define_insn "*arm_extendhisi2"
4553 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4554 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4555 "TARGET_ARM && arm_arch4 && !arm_arch6"
4556 "@
4557 #
4558 ldrsh%?\\t%0, %1"
4559 [(set_attr "length" "8,4")
4560 (set_attr "type" "alu_shift_reg,load_byte")
4561 (set_attr "predicable" "yes")]
4562 )
4563
4564 ;; ??? Check Thumb-2 pool range
4565 (define_insn "*arm_extendhisi2_v6"
4566 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4567 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4568 "TARGET_32BIT && arm_arch6"
4569 "@
4570 sxth%?\\t%0, %1
4571 ldrsh%?\\t%0, %1"
4572 [(set_attr "type" "extend,load_byte")
4573 (set_attr "predicable" "yes")]
4574 )
4575
4576 (define_insn "*arm_extendhisi2addsi"
4577 [(set (match_operand:SI 0 "s_register_operand" "=r")
4578 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4579 (match_operand:SI 2 "s_register_operand" "r")))]
4580 "TARGET_INT_SIMD"
4581 "sxtah%?\\t%0, %2, %1"
4582 [(set_attr "type" "alu_shift_reg")]
4583 )
4584
4585 (define_expand "extendqihi2"
4586 [(set (match_dup 2)
4587 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4588 (const_int 24)))
4589 (set (match_operand:HI 0 "s_register_operand")
4590 (ashiftrt:SI (match_dup 2)
4591 (const_int 24)))]
4592 "TARGET_ARM"
4593 "
4594 {
4595 if (arm_arch4 && MEM_P (operands[1]))
4596 {
4597 emit_insn (gen_rtx_SET (operands[0],
4598 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4599 DONE;
4600 }
4601 if (!s_register_operand (operands[1], QImode))
4602 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4603 operands[0] = gen_lowpart (SImode, operands[0]);
4604 operands[1] = gen_lowpart (SImode, operands[1]);
4605 operands[2] = gen_reg_rtx (SImode);
4606 }"
4607 )
4608
4609 (define_insn "*arm_extendqihi_insn"
4610 [(set (match_operand:HI 0 "s_register_operand" "=r")
4611 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4612 "TARGET_ARM && arm_arch4"
4613 "ldrsb%?\\t%0, %1"
4614 [(set_attr "type" "load_byte")
4615 (set_attr "predicable" "yes")]
4616 )
4617
4618 (define_expand "extendqisi2"
4619 [(set (match_operand:SI 0 "s_register_operand")
4620 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4621 "TARGET_EITHER"
4622 {
4623 if (!arm_arch4 && MEM_P (operands[1]))
4624 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4625
4626 if (!arm_arch6 && !MEM_P (operands[1]))
4627 {
4628 rtx t = gen_lowpart (SImode, operands[1]);
4629 rtx tmp = gen_reg_rtx (SImode);
4630 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4631 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4632 DONE;
4633 }
4634 })
4635
4636 (define_split
4637 [(set (match_operand:SI 0 "register_operand" "")
4638 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4639 "!arm_arch6"
4640 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4641 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4642 {
4643 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4644 })
4645
4646 (define_insn "*arm_extendqisi"
4647 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4648 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4649 "TARGET_ARM && arm_arch4 && !arm_arch6"
4650 "@
4651 #
4652 ldrsb%?\\t%0, %1"
4653 [(set_attr "length" "8,4")
4654 (set_attr "type" "alu_shift_reg,load_byte")
4655 (set_attr "predicable" "yes")]
4656 )
4657
4658 (define_insn "*arm_extendqisi_v6"
4659 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4660 (sign_extend:SI
4661 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4662 "TARGET_ARM && arm_arch6"
4663 "@
4664 sxtb%?\\t%0, %1
4665 ldrsb%?\\t%0, %1"
4666 [(set_attr "type" "extend,load_byte")
4667 (set_attr "predicable" "yes")]
4668 )
4669
4670 (define_insn "*arm_extendqisi2addsi"
4671 [(set (match_operand:SI 0 "s_register_operand" "=r")
4672 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4673 (match_operand:SI 2 "s_register_operand" "r")))]
4674 "TARGET_INT_SIMD"
4675 "sxtab%?\\t%0, %2, %1"
4676 [(set_attr "type" "alu_shift_reg")
4677 (set_attr "predicable" "yes")]
4678 )
4679
4680 (define_insn "arm_<sup>xtb16"
4681 [(set (match_operand:SI 0 "s_register_operand" "=r")
4682 (unspec:SI
4683 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4684 "TARGET_INT_SIMD"
4685 "<sup>xtb16%?\\t%0, %1"
4686 [(set_attr "predicable" "yes")
4687 (set_attr "type" "alu_dsp_reg")])
4688
4689 (define_insn "arm_<simd32_op>"
4690 [(set (match_operand:SI 0 "s_register_operand" "=r")
4691 (unspec:SI
4692 [(match_operand:SI 1 "s_register_operand" "r")
4693 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4694 "TARGET_INT_SIMD"
4695 "<simd32_op>%?\\t%0, %1, %2"
4696 [(set_attr "predicable" "yes")
4697 (set_attr "type" "alu_dsp_reg")])
4698
4699 (define_insn "arm_usada8"
4700 [(set (match_operand:SI 0 "s_register_operand" "=r")
4701 (unspec:SI
4702 [(match_operand:SI 1 "s_register_operand" "r")
4703 (match_operand:SI 2 "s_register_operand" "r")
4704 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4705 "TARGET_INT_SIMD"
4706 "usada8%?\\t%0, %1, %2, %3"
4707 [(set_attr "predicable" "yes")
4708 (set_attr "type" "alu_dsp_reg")])
4709
4710 (define_insn "arm_<simd32_op>"
4711 [(set (match_operand:DI 0 "s_register_operand" "=r")
4712 (unspec:DI
4713 [(match_operand:SI 1 "s_register_operand" "r")
4714 (match_operand:SI 2 "s_register_operand" "r")
4715 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4716 "TARGET_INT_SIMD"
4717 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4718 [(set_attr "predicable" "yes")
4719 (set_attr "type" "smlald")])
4720
4721 (define_expand "extendsfdf2"
4722 [(set (match_operand:DF 0 "s_register_operand")
4723 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4724 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4725 ""
4726 )
4727
4728 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4729 ;; must go through SFmode.
4730 ;;
4731 ;; This is always safe for an extend.
4732
4733 (define_expand "extendhfdf2"
4734 [(set (match_operand:DF 0 "s_register_operand")
4735 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4736 "TARGET_EITHER"
4737 {
4738 /* We don't have a direct instruction for this, so go via SFmode. */
4739 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4740 {
4741 rtx op1;
4742 op1 = convert_to_mode (SFmode, operands[1], 0);
4743 op1 = convert_to_mode (DFmode, op1, 0);
4744 emit_insn (gen_movdf (operands[0], op1));
4745 DONE;
4746 }
4747 /* Otherwise, we're done producing RTL and will pick up the correct
4748 pattern to do this with one rounding-step in a single instruction. */
4749 }
4750 )
4751 \f
4752 ;; Move insns (including loads and stores)
4753
4754 ;; XXX Just some ideas about movti.
4755 ;; I don't think these are a good idea on the arm, there just aren't enough
4756 ;; registers
4757 ;;(define_expand "loadti"
4758 ;; [(set (match_operand:TI 0 "s_register_operand")
4759 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4760 ;; "" "")
4761
4762 ;;(define_expand "storeti"
4763 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4764 ;; (match_operand:TI 1 "s_register_operand"))]
4765 ;; "" "")
4766
4767 ;;(define_expand "movti"
4768 ;; [(set (match_operand:TI 0 "general_operand")
4769 ;; (match_operand:TI 1 "general_operand"))]
4770 ;; ""
4771 ;; "
4772 ;;{
4773 ;; rtx insn;
4774 ;;
4775 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4776 ;; operands[1] = copy_to_reg (operands[1]);
4777 ;; if (MEM_P (operands[0]))
4778 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4779 ;; else if (MEM_P (operands[1]))
4780 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4781 ;; else
4782 ;; FAIL;
4783 ;;
4784 ;; emit_insn (insn);
4785 ;; DONE;
4786 ;;}")
4787
4788 ;; Recognize garbage generated above.
4789
4790 ;;(define_insn ""
4791 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4792 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4793 ;; ""
4794 ;; "*
4795 ;; {
4796 ;; register mem = (which_alternative < 3);
4797 ;; register const char *template;
4798 ;;
4799 ;; operands[mem] = XEXP (operands[mem], 0);
4800 ;; switch (which_alternative)
4801 ;; {
4802 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4803 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4804 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4805 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4806 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4807 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4808 ;; }
4809 ;; output_asm_insn (template, operands);
4810 ;; return \"\";
4811 ;; }")
4812
4813 (define_expand "movdi"
4814 [(set (match_operand:DI 0 "general_operand")
4815 (match_operand:DI 1 "general_operand"))]
4816 "TARGET_EITHER"
4817 "
4818 gcc_checking_assert (aligned_operand (operands[0], DImode));
4819 gcc_checking_assert (aligned_operand (operands[1], DImode));
4820 if (can_create_pseudo_p ())
4821 {
4822 if (!REG_P (operands[0]))
4823 operands[1] = force_reg (DImode, operands[1]);
4824 }
4825 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4826 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4827 {
4828 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4829 when expanding function calls. */
4830 gcc_assert (can_create_pseudo_p ());
4831 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4832 {
4833 /* Perform load into legal reg pair first, then move. */
4834 rtx reg = gen_reg_rtx (DImode);
4835 emit_insn (gen_movdi (reg, operands[1]));
4836 operands[1] = reg;
4837 }
4838 emit_move_insn (gen_lowpart (SImode, operands[0]),
4839 gen_lowpart (SImode, operands[1]));
4840 emit_move_insn (gen_highpart (SImode, operands[0]),
4841 gen_highpart (SImode, operands[1]));
4842 DONE;
4843 }
4844 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4845 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4846 {
4847 /* Avoid STRD's from an odd-numbered register pair in ARM state
4848 when expanding function prologue. */
4849 gcc_assert (can_create_pseudo_p ());
4850 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4851 ? gen_reg_rtx (DImode)
4852 : operands[0];
4853 emit_move_insn (gen_lowpart (SImode, split_dest),
4854 gen_lowpart (SImode, operands[1]));
4855 emit_move_insn (gen_highpart (SImode, split_dest),
4856 gen_highpart (SImode, operands[1]));
4857 if (split_dest != operands[0])
4858 emit_insn (gen_movdi (operands[0], split_dest));
4859 DONE;
4860 }
4861 "
4862 )
4863
4864 (define_insn "*arm_movdi"
4865 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4866 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4867 "TARGET_32BIT
4868 && !(TARGET_HARD_FLOAT)
4869 && !TARGET_IWMMXT
4870 && ( register_operand (operands[0], DImode)
4871 || register_operand (operands[1], DImode))"
4872 "*
4873 switch (which_alternative)
4874 {
4875 case 0:
4876 case 1:
4877 case 2:
4878 return \"#\";
4879 case 3:
4880 /* Cannot load it directly, split to load it via MOV / MOVT. */
4881 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4882 return \"#\";
4883 /* Fall through. */
4884 default:
4885 return output_move_double (operands, true, NULL);
4886 }
4887 "
4888 [(set_attr "length" "8,12,16,8,8")
4889 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4890 (set_attr "arm_pool_range" "*,*,*,1020,*")
4891 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4892 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4893 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4894 )
4895
4896 (define_split
4897 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4898 (match_operand:ANY64 1 "immediate_operand" ""))]
4899 "TARGET_32BIT
4900 && reload_completed
4901 && (arm_disable_literal_pool
4902 || (arm_const_double_inline_cost (operands[1])
4903 <= arm_max_const_double_inline_cost ()))"
4904 [(const_int 0)]
4905 "
4906 arm_split_constant (SET, SImode, curr_insn,
4907 INTVAL (gen_lowpart (SImode, operands[1])),
4908 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4909 arm_split_constant (SET, SImode, curr_insn,
4910 INTVAL (gen_highpart_mode (SImode,
4911 GET_MODE (operands[0]),
4912 operands[1])),
4913 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4914 DONE;
4915 "
4916 )
4917
4918 ; If optimizing for size, or if we have load delay slots, then
4919 ; we want to split the constant into two separate operations.
4920 ; In both cases this may split a trivial part into a single data op
4921 ; leaving a single complex constant to load. We can also get longer
4922 ; offsets in a LDR which means we get better chances of sharing the pool
4923 ; entries. Finally, we can normally do a better job of scheduling
4924 ; LDR instructions than we can with LDM.
4925 ; This pattern will only match if the one above did not.
4926 (define_split
4927 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4928 (match_operand:ANY64 1 "const_double_operand" ""))]
4929 "TARGET_ARM && reload_completed
4930 && arm_const_double_by_parts (operands[1])"
4931 [(set (match_dup 0) (match_dup 1))
4932 (set (match_dup 2) (match_dup 3))]
4933 "
4934 operands[2] = gen_highpart (SImode, operands[0]);
4935 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4936 operands[1]);
4937 operands[0] = gen_lowpart (SImode, operands[0]);
4938 operands[1] = gen_lowpart (SImode, operands[1]);
4939 "
4940 )
4941
4942 (define_split
4943 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4944 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4945 "TARGET_EITHER && reload_completed"
4946 [(set (match_dup 0) (match_dup 1))
4947 (set (match_dup 2) (match_dup 3))]
4948 "
4949 operands[2] = gen_highpart (SImode, operands[0]);
4950 operands[3] = gen_highpart (SImode, operands[1]);
4951 operands[0] = gen_lowpart (SImode, operands[0]);
4952 operands[1] = gen_lowpart (SImode, operands[1]);
4953
4954 /* Handle a partial overlap. */
4955 if (rtx_equal_p (operands[0], operands[3]))
4956 {
4957 rtx tmp0 = operands[0];
4958 rtx tmp1 = operands[1];
4959
4960 operands[0] = operands[2];
4961 operands[1] = operands[3];
4962 operands[2] = tmp0;
4963 operands[3] = tmp1;
4964 }
4965 "
4966 )
4967
4968 ;; We can't actually do base+index doubleword loads if the index and
4969 ;; destination overlap. Split here so that we at least have chance to
4970 ;; schedule.
4971 (define_split
4972 [(set (match_operand:DI 0 "s_register_operand" "")
4973 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4974 (match_operand:SI 2 "s_register_operand" ""))))]
4975 "TARGET_LDRD
4976 && reg_overlap_mentioned_p (operands[0], operands[1])
4977 && reg_overlap_mentioned_p (operands[0], operands[2])"
4978 [(set (match_dup 4)
4979 (plus:SI (match_dup 1)
4980 (match_dup 2)))
4981 (set (match_dup 0)
4982 (mem:DI (match_dup 4)))]
4983 "
4984 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4985 "
4986 )
4987
4988 (define_expand "movsi"
4989 [(set (match_operand:SI 0 "general_operand")
4990 (match_operand:SI 1 "general_operand"))]
4991 "TARGET_EITHER"
4992 "
4993 {
4994 rtx base, offset, tmp;
4995
4996 gcc_checking_assert (aligned_operand (operands[0], SImode));
4997 gcc_checking_assert (aligned_operand (operands[1], SImode));
4998 if (TARGET_32BIT || TARGET_HAVE_MOVT)
4999 {
5000 /* Everything except mem = const or mem = mem can be done easily. */
5001 if (MEM_P (operands[0]))
5002 operands[1] = force_reg (SImode, operands[1]);
5003 if (arm_general_register_operand (operands[0], SImode)
5004 && CONST_INT_P (operands[1])
5005 && !(const_ok_for_arm (INTVAL (operands[1]))
5006 || const_ok_for_arm (~INTVAL (operands[1]))))
5007 {
5008 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5009 {
5010 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5011 DONE;
5012 }
5013 else
5014 {
5015 arm_split_constant (SET, SImode, NULL_RTX,
5016 INTVAL (operands[1]), operands[0], NULL_RTX,
5017 optimize && can_create_pseudo_p ());
5018 DONE;
5019 }
5020 }
5021 }
5022 else /* Target doesn't have MOVT... */
5023 {
5024 if (can_create_pseudo_p ())
5025 {
5026 if (!REG_P (operands[0]))
5027 operands[1] = force_reg (SImode, operands[1]);
5028 }
5029 }
5030
5031 split_const (operands[1], &base, &offset);
5032 if (INTVAL (offset) != 0
5033 && targetm.cannot_force_const_mem (SImode, operands[1]))
5034 {
5035 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5036 emit_move_insn (tmp, base);
5037 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5038 DONE;
5039 }
5040
5041 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5042
5043 /* Recognize the case where operand[1] is a reference to thread-local
5044 data and load its address to a register. Offsets have been split off
5045 already. */
5046 if (arm_tls_referenced_p (operands[1]))
5047 operands[1] = legitimize_tls_address (operands[1], tmp);
5048 else if (flag_pic
5049 && (CONSTANT_P (operands[1])
5050 || symbol_mentioned_p (operands[1])
5051 || label_mentioned_p (operands[1])))
5052 operands[1] =
5053 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5054 }
5055 "
5056 )
5057
5058 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5059 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5060 ;; so this does not matter.
5061 (define_insn "*arm_movt"
5062 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5063 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5064 (match_operand:SI 2 "general_operand" "i,i")))]
5065 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5066 "@
5067 movt%?\t%0, #:upper16:%c2
5068 movt\t%0, #:upper16:%c2"
5069 [(set_attr "arch" "32,v8mb")
5070 (set_attr "predicable" "yes")
5071 (set_attr "length" "4")
5072 (set_attr "type" "alu_sreg")]
5073 )
5074
5075 (define_insn "*arm_movsi_insn"
5076 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5077 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5078 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5079 && ( register_operand (operands[0], SImode)
5080 || register_operand (operands[1], SImode))"
5081 "@
5082 mov%?\\t%0, %1
5083 mov%?\\t%0, %1
5084 mvn%?\\t%0, #%B1
5085 movw%?\\t%0, %1
5086 ldr%?\\t%0, %1
5087 str%?\\t%1, %0"
5088 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5089 (set_attr "predicable" "yes")
5090 (set_attr "arch" "*,*,*,v6t2,*,*")
5091 (set_attr "pool_range" "*,*,*,*,4096,*")
5092 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5093 )
5094
5095 (define_split
5096 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5097 (match_operand:SI 1 "const_int_operand" ""))]
5098 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5099 && (!(const_ok_for_arm (INTVAL (operands[1]))
5100 || const_ok_for_arm (~INTVAL (operands[1]))))"
5101 [(clobber (const_int 0))]
5102 "
5103 arm_split_constant (SET, SImode, NULL_RTX,
5104 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5105 DONE;
5106 "
5107 )
5108
5109 ;; A normal way to do (symbol + offset) requires three instructions at least
5110 ;; (depends on how big the offset is) as below:
5111 ;; movw r0, #:lower16:g
5112 ;; movw r0, #:upper16:g
5113 ;; adds r0, #4
5114 ;;
5115 ;; A better way would be:
5116 ;; movw r0, #:lower16:g+4
5117 ;; movw r0, #:upper16:g+4
5118 ;;
5119 ;; The limitation of this way is that the length of offset should be a 16-bit
5120 ;; signed value, because current assembler only supports REL type relocation for
5121 ;; such case. If the more powerful RELA type is supported in future, we should
5122 ;; update this pattern to go with better way.
5123 (define_split
5124 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5125 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5126 (match_operand:SI 2 "const_int_operand" ""))))]
5127 "TARGET_THUMB
5128 && TARGET_HAVE_MOVT
5129 && arm_disable_literal_pool
5130 && reload_completed
5131 && GET_CODE (operands[1]) == SYMBOL_REF"
5132 [(clobber (const_int 0))]
5133 "
5134 int offset = INTVAL (operands[2]);
5135
5136 if (offset < -0x8000 || offset > 0x7fff)
5137 {
5138 arm_emit_movpair (operands[0], operands[1]);
5139 emit_insn (gen_rtx_SET (operands[0],
5140 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5141 }
5142 else
5143 {
5144 rtx op = gen_rtx_CONST (SImode,
5145 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5146 arm_emit_movpair (operands[0], op);
5147 }
5148 "
5149 )
5150
5151 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5152 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5153 ;; and lo_sum would be merged back into memory load at cprop. However,
5154 ;; if the default is to prefer movt/movw rather than a load from the constant
5155 ;; pool, the performance is better.
5156 (define_split
5157 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5158 (match_operand:SI 1 "general_operand" ""))]
5159 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5160 && !target_word_relocations
5161 && !arm_tls_referenced_p (operands[1])"
5162 [(clobber (const_int 0))]
5163 {
5164 arm_emit_movpair (operands[0], operands[1]);
5165 DONE;
5166 })
5167
5168 ;; When generating pic, we need to load the symbol offset into a register.
5169 ;; So that the optimizer does not confuse this with a normal symbol load
5170 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5171 ;; since that is the only type of relocation we can use.
5172
5173 ;; Wrap calculation of the whole PIC address in a single pattern for the
5174 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5175 ;; a PIC address involves two loads from memory, so we want to CSE it
5176 ;; as often as possible.
5177 ;; This pattern will be split into one of the pic_load_addr_* patterns
5178 ;; and a move after GCSE optimizations.
5179 ;;
5180 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5181 (define_expand "calculate_pic_address"
5182 [(set (match_operand:SI 0 "register_operand")
5183 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5184 (unspec:SI [(match_operand:SI 2 "" "")]
5185 UNSPEC_PIC_SYM))))]
5186 "flag_pic"
5187 )
5188
5189 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5190 (define_split
5191 [(set (match_operand:SI 0 "register_operand" "")
5192 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5193 (unspec:SI [(match_operand:SI 2 "" "")]
5194 UNSPEC_PIC_SYM))))]
5195 "flag_pic"
5196 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5197 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5198 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5199 )
5200
5201 ;; operand1 is the memory address to go into
5202 ;; pic_load_addr_32bit.
5203 ;; operand2 is the PIC label to be emitted
5204 ;; from pic_add_dot_plus_eight.
5205 ;; We do this to allow hoisting of the entire insn.
5206 (define_insn_and_split "pic_load_addr_unified"
5207 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5208 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5209 (match_operand:SI 2 "" "")]
5210 UNSPEC_PIC_UNIFIED))]
5211 "flag_pic"
5212 "#"
5213 "&& reload_completed"
5214 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5215 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5216 (match_dup 2)] UNSPEC_PIC_BASE))]
5217 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5218 [(set_attr "type" "load_4,load_4,load_4")
5219 (set_attr "pool_range" "4096,4094,1022")
5220 (set_attr "neg_pool_range" "4084,0,0")
5221 (set_attr "arch" "a,t2,t1")
5222 (set_attr "length" "8,6,4")]
5223 )
5224
5225 ;; The rather odd constraints on the following are to force reload to leave
5226 ;; the insn alone, and to force the minipool generation pass to then move
5227 ;; the GOT symbol to memory.
5228
5229 (define_insn "pic_load_addr_32bit"
5230 [(set (match_operand:SI 0 "s_register_operand" "=r")
5231 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5232 "TARGET_32BIT && flag_pic"
5233 "ldr%?\\t%0, %1"
5234 [(set_attr "type" "load_4")
5235 (set (attr "pool_range")
5236 (if_then_else (eq_attr "is_thumb" "no")
5237 (const_int 4096)
5238 (const_int 4094)))
5239 (set (attr "neg_pool_range")
5240 (if_then_else (eq_attr "is_thumb" "no")
5241 (const_int 4084)
5242 (const_int 0)))]
5243 )
5244
5245 (define_insn "pic_load_addr_thumb1"
5246 [(set (match_operand:SI 0 "s_register_operand" "=l")
5247 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5248 "TARGET_THUMB1 && flag_pic"
5249 "ldr\\t%0, %1"
5250 [(set_attr "type" "load_4")
5251 (set (attr "pool_range") (const_int 1018))]
5252 )
5253
5254 (define_insn "pic_add_dot_plus_four"
5255 [(set (match_operand:SI 0 "register_operand" "=r")
5256 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5257 (const_int 4)
5258 (match_operand 2 "" "")]
5259 UNSPEC_PIC_BASE))]
5260 "TARGET_THUMB"
5261 "*
5262 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5263 INTVAL (operands[2]));
5264 return \"add\\t%0, %|pc\";
5265 "
5266 [(set_attr "length" "2")
5267 (set_attr "type" "alu_sreg")]
5268 )
5269
5270 (define_insn "pic_add_dot_plus_eight"
5271 [(set (match_operand:SI 0 "register_operand" "=r")
5272 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5273 (const_int 8)
5274 (match_operand 2 "" "")]
5275 UNSPEC_PIC_BASE))]
5276 "TARGET_ARM"
5277 "*
5278 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5279 INTVAL (operands[2]));
5280 return \"add%?\\t%0, %|pc, %1\";
5281 "
5282 [(set_attr "predicable" "yes")
5283 (set_attr "type" "alu_sreg")]
5284 )
5285
5286 (define_insn "tls_load_dot_plus_eight"
5287 [(set (match_operand:SI 0 "register_operand" "=r")
5288 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5289 (const_int 8)
5290 (match_operand 2 "" "")]
5291 UNSPEC_PIC_BASE)))]
5292 "TARGET_ARM"
5293 "*
5294 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5295 INTVAL (operands[2]));
5296 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5297 "
5298 [(set_attr "predicable" "yes")
5299 (set_attr "type" "load_4")]
5300 )
5301
5302 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5303 ;; followed by a load. These sequences can be crunched down to
5304 ;; tls_load_dot_plus_eight by a peephole.
5305
5306 (define_peephole2
5307 [(set (match_operand:SI 0 "register_operand" "")
5308 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5309 (const_int 8)
5310 (match_operand 1 "" "")]
5311 UNSPEC_PIC_BASE))
5312 (set (match_operand:SI 2 "arm_general_register_operand" "")
5313 (mem:SI (match_dup 0)))]
5314 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5315 [(set (match_dup 2)
5316 (mem:SI (unspec:SI [(match_dup 3)
5317 (const_int 8)
5318 (match_dup 1)]
5319 UNSPEC_PIC_BASE)))]
5320 ""
5321 )
5322
5323 (define_insn "pic_offset_arm"
5324 [(set (match_operand:SI 0 "register_operand" "=r")
5325 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5326 (unspec:SI [(match_operand:SI 2 "" "X")]
5327 UNSPEC_PIC_OFFSET))))]
5328 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5329 "ldr%?\\t%0, [%1,%2]"
5330 [(set_attr "type" "load_4")]
5331 )
5332
5333 (define_expand "builtin_setjmp_receiver"
5334 [(label_ref (match_operand 0 "" ""))]
5335 "flag_pic"
5336 "
5337 {
5338 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5339 register. */
5340 if (arm_pic_register != INVALID_REGNUM)
5341 arm_load_pic_register (1UL << 3, NULL_RTX);
5342 DONE;
5343 }")
5344
5345 ;; If copying one reg to another we can set the condition codes according to
5346 ;; its value. Such a move is common after a return from subroutine and the
5347 ;; result is being tested against zero.
5348
5349 (define_insn "*movsi_compare0"
5350 [(set (reg:CC CC_REGNUM)
5351 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5352 (const_int 0)))
5353 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5354 (match_dup 1))]
5355 "TARGET_32BIT"
5356 "@
5357 cmp%?\\t%0, #0
5358 subs%?\\t%0, %1, #0"
5359 [(set_attr "conds" "set")
5360 (set_attr "type" "alus_imm,alus_imm")]
5361 )
5362
5363 ;; Subroutine to store a half word from a register into memory.
5364 ;; Operand 0 is the source register (HImode)
5365 ;; Operand 1 is the destination address in a register (SImode)
5366
5367 ;; In both this routine and the next, we must be careful not to spill
5368 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5369 ;; can generate unrecognizable rtl.
5370
5371 (define_expand "storehi"
5372 [;; store the low byte
5373 (set (match_operand 1 "" "") (match_dup 3))
5374 ;; extract the high byte
5375 (set (match_dup 2)
5376 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5377 ;; store the high byte
5378 (set (match_dup 4) (match_dup 5))]
5379 "TARGET_ARM"
5380 "
5381 {
5382 rtx op1 = operands[1];
5383 rtx addr = XEXP (op1, 0);
5384 enum rtx_code code = GET_CODE (addr);
5385
5386 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5387 || code == MINUS)
5388 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5389
5390 operands[4] = adjust_address (op1, QImode, 1);
5391 operands[1] = adjust_address (operands[1], QImode, 0);
5392 operands[3] = gen_lowpart (QImode, operands[0]);
5393 operands[0] = gen_lowpart (SImode, operands[0]);
5394 operands[2] = gen_reg_rtx (SImode);
5395 operands[5] = gen_lowpart (QImode, operands[2]);
5396 }"
5397 )
5398
5399 (define_expand "storehi_bigend"
5400 [(set (match_dup 4) (match_dup 3))
5401 (set (match_dup 2)
5402 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5403 (set (match_operand 1 "" "") (match_dup 5))]
5404 "TARGET_ARM"
5405 "
5406 {
5407 rtx op1 = operands[1];
5408 rtx addr = XEXP (op1, 0);
5409 enum rtx_code code = GET_CODE (addr);
5410
5411 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5412 || code == MINUS)
5413 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5414
5415 operands[4] = adjust_address (op1, QImode, 1);
5416 operands[1] = adjust_address (operands[1], QImode, 0);
5417 operands[3] = gen_lowpart (QImode, operands[0]);
5418 operands[0] = gen_lowpart (SImode, operands[0]);
5419 operands[2] = gen_reg_rtx (SImode);
5420 operands[5] = gen_lowpart (QImode, operands[2]);
5421 }"
5422 )
5423
5424 ;; Subroutine to store a half word integer constant into memory.
5425 (define_expand "storeinthi"
5426 [(set (match_operand 0 "" "")
5427 (match_operand 1 "" ""))
5428 (set (match_dup 3) (match_dup 2))]
5429 "TARGET_ARM"
5430 "
5431 {
5432 HOST_WIDE_INT value = INTVAL (operands[1]);
5433 rtx addr = XEXP (operands[0], 0);
5434 rtx op0 = operands[0];
5435 enum rtx_code code = GET_CODE (addr);
5436
5437 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5438 || code == MINUS)
5439 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5440
5441 operands[1] = gen_reg_rtx (SImode);
5442 if (BYTES_BIG_ENDIAN)
5443 {
5444 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5445 if ((value & 255) == ((value >> 8) & 255))
5446 operands[2] = operands[1];
5447 else
5448 {
5449 operands[2] = gen_reg_rtx (SImode);
5450 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5451 }
5452 }
5453 else
5454 {
5455 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5456 if ((value & 255) == ((value >> 8) & 255))
5457 operands[2] = operands[1];
5458 else
5459 {
5460 operands[2] = gen_reg_rtx (SImode);
5461 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5462 }
5463 }
5464
5465 operands[3] = adjust_address (op0, QImode, 1);
5466 operands[0] = adjust_address (operands[0], QImode, 0);
5467 operands[2] = gen_lowpart (QImode, operands[2]);
5468 operands[1] = gen_lowpart (QImode, operands[1]);
5469 }"
5470 )
5471
5472 (define_expand "storehi_single_op"
5473 [(set (match_operand:HI 0 "memory_operand")
5474 (match_operand:HI 1 "general_operand"))]
5475 "TARGET_32BIT && arm_arch4"
5476 "
5477 if (!s_register_operand (operands[1], HImode))
5478 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5479 "
5480 )
5481
5482 (define_expand "movhi"
5483 [(set (match_operand:HI 0 "general_operand")
5484 (match_operand:HI 1 "general_operand"))]
5485 "TARGET_EITHER"
5486 "
5487 gcc_checking_assert (aligned_operand (operands[0], HImode));
5488 gcc_checking_assert (aligned_operand (operands[1], HImode));
5489 if (TARGET_ARM)
5490 {
5491 if (can_create_pseudo_p ())
5492 {
5493 if (MEM_P (operands[0]))
5494 {
5495 if (arm_arch4)
5496 {
5497 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5498 DONE;
5499 }
5500 if (CONST_INT_P (operands[1]))
5501 emit_insn (gen_storeinthi (operands[0], operands[1]));
5502 else
5503 {
5504 if (MEM_P (operands[1]))
5505 operands[1] = force_reg (HImode, operands[1]);
5506 if (BYTES_BIG_ENDIAN)
5507 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5508 else
5509 emit_insn (gen_storehi (operands[1], operands[0]));
5510 }
5511 DONE;
5512 }
5513 /* Sign extend a constant, and keep it in an SImode reg. */
5514 else if (CONST_INT_P (operands[1]))
5515 {
5516 rtx reg = gen_reg_rtx (SImode);
5517 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5518
5519 /* If the constant is already valid, leave it alone. */
5520 if (!const_ok_for_arm (val))
5521 {
5522 /* If setting all the top bits will make the constant
5523 loadable in a single instruction, then set them.
5524 Otherwise, sign extend the number. */
5525
5526 if (const_ok_for_arm (~(val | ~0xffff)))
5527 val |= ~0xffff;
5528 else if (val & 0x8000)
5529 val |= ~0xffff;
5530 }
5531
5532 emit_insn (gen_movsi (reg, GEN_INT (val)));
5533 operands[1] = gen_lowpart (HImode, reg);
5534 }
5535 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5536 && MEM_P (operands[1]))
5537 {
5538 rtx reg = gen_reg_rtx (SImode);
5539
5540 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5541 operands[1] = gen_lowpart (HImode, reg);
5542 }
5543 else if (!arm_arch4)
5544 {
5545 if (MEM_P (operands[1]))
5546 {
5547 rtx base;
5548 rtx offset = const0_rtx;
5549 rtx reg = gen_reg_rtx (SImode);
5550
5551 if ((REG_P (base = XEXP (operands[1], 0))
5552 || (GET_CODE (base) == PLUS
5553 && (CONST_INT_P (offset = XEXP (base, 1)))
5554 && ((INTVAL(offset) & 1) != 1)
5555 && REG_P (base = XEXP (base, 0))))
5556 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5557 {
5558 rtx new_rtx;
5559
5560 new_rtx = widen_memory_access (operands[1], SImode,
5561 ((INTVAL (offset) & ~3)
5562 - INTVAL (offset)));
5563 emit_insn (gen_movsi (reg, new_rtx));
5564 if (((INTVAL (offset) & 2) != 0)
5565 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5566 {
5567 rtx reg2 = gen_reg_rtx (SImode);
5568
5569 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5570 reg = reg2;
5571 }
5572 }
5573 else
5574 emit_insn (gen_movhi_bytes (reg, operands[1]));
5575
5576 operands[1] = gen_lowpart (HImode, reg);
5577 }
5578 }
5579 }
5580 /* Handle loading a large integer during reload. */
5581 else if (CONST_INT_P (operands[1])
5582 && !const_ok_for_arm (INTVAL (operands[1]))
5583 && !const_ok_for_arm (~INTVAL (operands[1])))
5584 {
5585 /* Writing a constant to memory needs a scratch, which should
5586 be handled with SECONDARY_RELOADs. */
5587 gcc_assert (REG_P (operands[0]));
5588
5589 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5590 emit_insn (gen_movsi (operands[0], operands[1]));
5591 DONE;
5592 }
5593 }
5594 else if (TARGET_THUMB2)
5595 {
5596 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5597 if (can_create_pseudo_p ())
5598 {
5599 if (!REG_P (operands[0]))
5600 operands[1] = force_reg (HImode, operands[1]);
5601 /* Zero extend a constant, and keep it in an SImode reg. */
5602 else if (CONST_INT_P (operands[1]))
5603 {
5604 rtx reg = gen_reg_rtx (SImode);
5605 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5606
5607 emit_insn (gen_movsi (reg, GEN_INT (val)));
5608 operands[1] = gen_lowpart (HImode, reg);
5609 }
5610 }
5611 }
5612 else /* TARGET_THUMB1 */
5613 {
5614 if (can_create_pseudo_p ())
5615 {
5616 if (CONST_INT_P (operands[1]))
5617 {
5618 rtx reg = gen_reg_rtx (SImode);
5619
5620 emit_insn (gen_movsi (reg, operands[1]));
5621 operands[1] = gen_lowpart (HImode, reg);
5622 }
5623
5624 /* ??? We shouldn't really get invalid addresses here, but this can
5625 happen if we are passed a SP (never OK for HImode/QImode) or
5626 virtual register (also rejected as illegitimate for HImode/QImode)
5627 relative address. */
5628 /* ??? This should perhaps be fixed elsewhere, for instance, in
5629 fixup_stack_1, by checking for other kinds of invalid addresses,
5630 e.g. a bare reference to a virtual register. This may confuse the
5631 alpha though, which must handle this case differently. */
5632 if (MEM_P (operands[0])
5633 && !memory_address_p (GET_MODE (operands[0]),
5634 XEXP (operands[0], 0)))
5635 operands[0]
5636 = replace_equiv_address (operands[0],
5637 copy_to_reg (XEXP (operands[0], 0)));
5638
5639 if (MEM_P (operands[1])
5640 && !memory_address_p (GET_MODE (operands[1]),
5641 XEXP (operands[1], 0)))
5642 operands[1]
5643 = replace_equiv_address (operands[1],
5644 copy_to_reg (XEXP (operands[1], 0)));
5645
5646 if (MEM_P (operands[1]) && optimize > 0)
5647 {
5648 rtx reg = gen_reg_rtx (SImode);
5649
5650 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5651 operands[1] = gen_lowpart (HImode, reg);
5652 }
5653
5654 if (MEM_P (operands[0]))
5655 operands[1] = force_reg (HImode, operands[1]);
5656 }
5657 else if (CONST_INT_P (operands[1])
5658 && !satisfies_constraint_I (operands[1]))
5659 {
5660 /* Handle loading a large integer during reload. */
5661
5662 /* Writing a constant to memory needs a scratch, which should
5663 be handled with SECONDARY_RELOADs. */
5664 gcc_assert (REG_P (operands[0]));
5665
5666 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5667 emit_insn (gen_movsi (operands[0], operands[1]));
5668 DONE;
5669 }
5670 }
5671 "
5672 )
5673
5674 (define_expand "movhi_bytes"
5675 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5676 (set (match_dup 3)
5677 (zero_extend:SI (match_dup 6)))
5678 (set (match_operand:SI 0 "" "")
5679 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5680 "TARGET_ARM"
5681 "
5682 {
5683 rtx mem1, mem2;
5684 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5685
5686 mem1 = change_address (operands[1], QImode, addr);
5687 mem2 = change_address (operands[1], QImode,
5688 plus_constant (Pmode, addr, 1));
5689 operands[0] = gen_lowpart (SImode, operands[0]);
5690 operands[1] = mem1;
5691 operands[2] = gen_reg_rtx (SImode);
5692 operands[3] = gen_reg_rtx (SImode);
5693 operands[6] = mem2;
5694
5695 if (BYTES_BIG_ENDIAN)
5696 {
5697 operands[4] = operands[2];
5698 operands[5] = operands[3];
5699 }
5700 else
5701 {
5702 operands[4] = operands[3];
5703 operands[5] = operands[2];
5704 }
5705 }"
5706 )
5707
5708 (define_expand "movhi_bigend"
5709 [(set (match_dup 2)
5710 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5711 (const_int 16)))
5712 (set (match_dup 3)
5713 (ashiftrt:SI (match_dup 2) (const_int 16)))
5714 (set (match_operand:HI 0 "s_register_operand")
5715 (match_dup 4))]
5716 "TARGET_ARM"
5717 "
5718 operands[2] = gen_reg_rtx (SImode);
5719 operands[3] = gen_reg_rtx (SImode);
5720 operands[4] = gen_lowpart (HImode, operands[3]);
5721 "
5722 )
5723
5724 ;; Pattern to recognize insn generated default case above
5725 (define_insn "*movhi_insn_arch4"
5726 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5727 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5728 "TARGET_ARM
5729 && arm_arch4 && !TARGET_HARD_FLOAT
5730 && (register_operand (operands[0], HImode)
5731 || register_operand (operands[1], HImode))"
5732 "@
5733 mov%?\\t%0, %1\\t%@ movhi
5734 mvn%?\\t%0, #%B1\\t%@ movhi
5735 movw%?\\t%0, %L1\\t%@ movhi
5736 strh%?\\t%1, %0\\t%@ movhi
5737 ldrh%?\\t%0, %1\\t%@ movhi"
5738 [(set_attr "predicable" "yes")
5739 (set_attr "pool_range" "*,*,*,*,256")
5740 (set_attr "neg_pool_range" "*,*,*,*,244")
5741 (set_attr "arch" "*,*,v6t2,*,*")
5742 (set_attr_alternative "type"
5743 [(if_then_else (match_operand 1 "const_int_operand" "")
5744 (const_string "mov_imm" )
5745 (const_string "mov_reg"))
5746 (const_string "mvn_imm")
5747 (const_string "mov_imm")
5748 (const_string "store_4")
5749 (const_string "load_4")])]
5750 )
5751
5752 (define_insn "*movhi_bytes"
5753 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5754 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5755 "TARGET_ARM && !TARGET_HARD_FLOAT"
5756 "@
5757 mov%?\\t%0, %1\\t%@ movhi
5758 mov%?\\t%0, %1\\t%@ movhi
5759 mvn%?\\t%0, #%B1\\t%@ movhi"
5760 [(set_attr "predicable" "yes")
5761 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5762 )
5763
5764 ;; We use a DImode scratch because we may occasionally need an additional
5765 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5766 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5767 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5768 ;; to be correctly handled in default_secondary_reload function.
5769 (define_expand "reload_outhi"
5770 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5771 (match_operand:HI 1 "s_register_operand" "r")
5772 (match_operand:DI 2 "s_register_operand" "=&l")])]
5773 "TARGET_EITHER"
5774 "if (TARGET_ARM)
5775 arm_reload_out_hi (operands);
5776 else
5777 thumb_reload_out_hi (operands);
5778 DONE;
5779 "
5780 )
5781
5782 (define_expand "reload_inhi"
5783 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5784 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5785 (match_operand:DI 2 "s_register_operand" "=&r")])]
5786 "TARGET_EITHER"
5787 "
5788 if (TARGET_ARM)
5789 arm_reload_in_hi (operands);
5790 else
5791 thumb_reload_out_hi (operands);
5792 DONE;
5793 ")
5794
5795 (define_expand "movqi"
5796 [(set (match_operand:QI 0 "general_operand")
5797 (match_operand:QI 1 "general_operand"))]
5798 "TARGET_EITHER"
5799 "
5800 /* Everything except mem = const or mem = mem can be done easily */
5801
5802 if (can_create_pseudo_p ())
5803 {
5804 if (CONST_INT_P (operands[1]))
5805 {
5806 rtx reg = gen_reg_rtx (SImode);
5807
5808 /* For thumb we want an unsigned immediate, then we are more likely
5809 to be able to use a movs insn. */
5810 if (TARGET_THUMB)
5811 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5812
5813 emit_insn (gen_movsi (reg, operands[1]));
5814 operands[1] = gen_lowpart (QImode, reg);
5815 }
5816
5817 if (TARGET_THUMB)
5818 {
5819 /* ??? We shouldn't really get invalid addresses here, but this can
5820 happen if we are passed a SP (never OK for HImode/QImode) or
5821 virtual register (also rejected as illegitimate for HImode/QImode)
5822 relative address. */
5823 /* ??? This should perhaps be fixed elsewhere, for instance, in
5824 fixup_stack_1, by checking for other kinds of invalid addresses,
5825 e.g. a bare reference to a virtual register. This may confuse the
5826 alpha though, which must handle this case differently. */
5827 if (MEM_P (operands[0])
5828 && !memory_address_p (GET_MODE (operands[0]),
5829 XEXP (operands[0], 0)))
5830 operands[0]
5831 = replace_equiv_address (operands[0],
5832 copy_to_reg (XEXP (operands[0], 0)));
5833 if (MEM_P (operands[1])
5834 && !memory_address_p (GET_MODE (operands[1]),
5835 XEXP (operands[1], 0)))
5836 operands[1]
5837 = replace_equiv_address (operands[1],
5838 copy_to_reg (XEXP (operands[1], 0)));
5839 }
5840
5841 if (MEM_P (operands[1]) && optimize > 0)
5842 {
5843 rtx reg = gen_reg_rtx (SImode);
5844
5845 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5846 operands[1] = gen_lowpart (QImode, reg);
5847 }
5848
5849 if (MEM_P (operands[0]))
5850 operands[1] = force_reg (QImode, operands[1]);
5851 }
5852 else if (TARGET_THUMB
5853 && CONST_INT_P (operands[1])
5854 && !satisfies_constraint_I (operands[1]))
5855 {
5856 /* Handle loading a large integer during reload. */
5857
5858 /* Writing a constant to memory needs a scratch, which should
5859 be handled with SECONDARY_RELOADs. */
5860 gcc_assert (REG_P (operands[0]));
5861
5862 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5863 emit_insn (gen_movsi (operands[0], operands[1]));
5864 DONE;
5865 }
5866 "
5867 )
5868
5869 (define_insn "*arm_movqi_insn"
5870 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5871 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5872 "TARGET_32BIT
5873 && ( register_operand (operands[0], QImode)
5874 || register_operand (operands[1], QImode))"
5875 "@
5876 mov%?\\t%0, %1
5877 mov%?\\t%0, %1
5878 mov%?\\t%0, %1
5879 mov%?\\t%0, %1
5880 mvn%?\\t%0, #%B1
5881 ldrb%?\\t%0, %1
5882 strb%?\\t%1, %0
5883 ldrb%?\\t%0, %1
5884 strb%?\\t%1, %0"
5885 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5886 (set_attr "predicable" "yes")
5887 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5888 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5889 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5890 )
5891
5892 ;; HFmode moves
5893 (define_expand "movhf"
5894 [(set (match_operand:HF 0 "general_operand")
5895 (match_operand:HF 1 "general_operand"))]
5896 "TARGET_EITHER"
5897 "
5898 gcc_checking_assert (aligned_operand (operands[0], HFmode));
5899 gcc_checking_assert (aligned_operand (operands[1], HFmode));
5900 if (TARGET_32BIT)
5901 {
5902 if (MEM_P (operands[0]))
5903 operands[1] = force_reg (HFmode, operands[1]);
5904 }
5905 else /* TARGET_THUMB1 */
5906 {
5907 if (can_create_pseudo_p ())
5908 {
5909 if (!REG_P (operands[0]))
5910 operands[1] = force_reg (HFmode, operands[1]);
5911 }
5912 }
5913 "
5914 )
5915
5916 (define_insn "*arm32_movhf"
5917 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5918 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5919 "TARGET_32BIT && !TARGET_HARD_FLOAT
5920 && ( s_register_operand (operands[0], HFmode)
5921 || s_register_operand (operands[1], HFmode))"
5922 "*
5923 switch (which_alternative)
5924 {
5925 case 0: /* ARM register from memory */
5926 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
5927 case 1: /* memory from ARM register */
5928 return \"strh%?\\t%1, %0\\t%@ __fp16\";
5929 case 2: /* ARM register from ARM register */
5930 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5931 case 3: /* ARM register from constant */
5932 {
5933 long bits;
5934 rtx ops[4];
5935
5936 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
5937 HFmode);
5938 ops[0] = operands[0];
5939 ops[1] = GEN_INT (bits);
5940 ops[2] = GEN_INT (bits & 0xff00);
5941 ops[3] = GEN_INT (bits & 0x00ff);
5942
5943 if (arm_arch_thumb2)
5944 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5945 else
5946 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5947 return \"\";
5948 }
5949 default:
5950 gcc_unreachable ();
5951 }
5952 "
5953 [(set_attr "conds" "unconditional")
5954 (set_attr "type" "load_4,store_4,mov_reg,multiple")
5955 (set_attr "length" "4,4,4,8")
5956 (set_attr "predicable" "yes")]
5957 )
5958
5959 (define_expand "movsf"
5960 [(set (match_operand:SF 0 "general_operand")
5961 (match_operand:SF 1 "general_operand"))]
5962 "TARGET_EITHER"
5963 "
5964 gcc_checking_assert (aligned_operand (operands[0], SFmode));
5965 gcc_checking_assert (aligned_operand (operands[1], SFmode));
5966 if (TARGET_32BIT)
5967 {
5968 if (MEM_P (operands[0]))
5969 operands[1] = force_reg (SFmode, operands[1]);
5970 }
5971 else /* TARGET_THUMB1 */
5972 {
5973 if (can_create_pseudo_p ())
5974 {
5975 if (!REG_P (operands[0]))
5976 operands[1] = force_reg (SFmode, operands[1]);
5977 }
5978 }
5979
5980 /* Cannot load it directly, generate a load with clobber so that it can be
5981 loaded via GPR with MOV / MOVT. */
5982 if (arm_disable_literal_pool
5983 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
5984 && CONST_DOUBLE_P (operands[1])
5985 && TARGET_HARD_FLOAT
5986 && !vfp3_const_double_rtx (operands[1]))
5987 {
5988 rtx clobreg = gen_reg_rtx (SFmode);
5989 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
5990 clobreg));
5991 DONE;
5992 }
5993 "
5994 )
5995
5996 ;; Transform a floating-point move of a constant into a core register into
5997 ;; an SImode operation.
5998 (define_split
5999 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6000 (match_operand:SF 1 "immediate_operand" ""))]
6001 "TARGET_EITHER
6002 && reload_completed
6003 && CONST_DOUBLE_P (operands[1])"
6004 [(set (match_dup 2) (match_dup 3))]
6005 "
6006 operands[2] = gen_lowpart (SImode, operands[0]);
6007 operands[3] = gen_lowpart (SImode, operands[1]);
6008 if (operands[2] == 0 || operands[3] == 0)
6009 FAIL;
6010 "
6011 )
6012
6013 (define_insn "*arm_movsf_soft_insn"
6014 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6015 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6016 "TARGET_32BIT
6017 && TARGET_SOFT_FLOAT
6018 && (!MEM_P (operands[0])
6019 || register_operand (operands[1], SFmode))"
6020 {
6021 switch (which_alternative)
6022 {
6023 case 0: return \"mov%?\\t%0, %1\";
6024 case 1:
6025 /* Cannot load it directly, split to load it via MOV / MOVT. */
6026 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6027 return \"#\";
6028 return \"ldr%?\\t%0, %1\\t%@ float\";
6029 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6030 default: gcc_unreachable ();
6031 }
6032 }
6033 [(set_attr "predicable" "yes")
6034 (set_attr "type" "mov_reg,load_4,store_4")
6035 (set_attr "arm_pool_range" "*,4096,*")
6036 (set_attr "thumb2_pool_range" "*,4094,*")
6037 (set_attr "arm_neg_pool_range" "*,4084,*")
6038 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6039 )
6040
6041 ;; Splitter for the above.
6042 (define_split
6043 [(set (match_operand:SF 0 "s_register_operand")
6044 (match_operand:SF 1 "const_double_operand"))]
6045 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6046 [(const_int 0)]
6047 {
6048 long buf;
6049 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6050 rtx cst = gen_int_mode (buf, SImode);
6051 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6052 DONE;
6053 }
6054 )
6055
6056 (define_expand "movdf"
6057 [(set (match_operand:DF 0 "general_operand")
6058 (match_operand:DF 1 "general_operand"))]
6059 "TARGET_EITHER"
6060 "
6061 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6062 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6063 if (TARGET_32BIT)
6064 {
6065 if (MEM_P (operands[0]))
6066 operands[1] = force_reg (DFmode, operands[1]);
6067 }
6068 else /* TARGET_THUMB */
6069 {
6070 if (can_create_pseudo_p ())
6071 {
6072 if (!REG_P (operands[0]))
6073 operands[1] = force_reg (DFmode, operands[1]);
6074 }
6075 }
6076
6077 /* Cannot load it directly, generate a load with clobber so that it can be
6078 loaded via GPR with MOV / MOVT. */
6079 if (arm_disable_literal_pool
6080 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6081 && CONSTANT_P (operands[1])
6082 && TARGET_HARD_FLOAT
6083 && !arm_const_double_rtx (operands[1])
6084 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6085 {
6086 rtx clobreg = gen_reg_rtx (DFmode);
6087 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6088 clobreg));
6089 DONE;
6090 }
6091 "
6092 )
6093
6094 ;; Reloading a df mode value stored in integer regs to memory can require a
6095 ;; scratch reg.
6096 ;; Another reload_out<m> pattern that requires special constraints.
6097 (define_expand "reload_outdf"
6098 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6099 (match_operand:DF 1 "s_register_operand" "r")
6100 (match_operand:SI 2 "s_register_operand" "=&r")]
6101 "TARGET_THUMB2"
6102 "
6103 {
6104 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6105
6106 if (code == REG)
6107 operands[2] = XEXP (operands[0], 0);
6108 else if (code == POST_INC || code == PRE_DEC)
6109 {
6110 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6111 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6112 emit_insn (gen_movdi (operands[0], operands[1]));
6113 DONE;
6114 }
6115 else if (code == PRE_INC)
6116 {
6117 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6118
6119 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6120 operands[2] = reg;
6121 }
6122 else if (code == POST_DEC)
6123 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6124 else
6125 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6126 XEXP (XEXP (operands[0], 0), 1)));
6127
6128 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6129 operands[1]));
6130
6131 if (code == POST_DEC)
6132 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6133
6134 DONE;
6135 }"
6136 )
6137
6138 (define_insn "*movdf_soft_insn"
6139 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6140 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6141 "TARGET_32BIT && TARGET_SOFT_FLOAT
6142 && ( register_operand (operands[0], DFmode)
6143 || register_operand (operands[1], DFmode))"
6144 "*
6145 switch (which_alternative)
6146 {
6147 case 0:
6148 case 1:
6149 case 2:
6150 return \"#\";
6151 case 3:
6152 /* Cannot load it directly, split to load it via MOV / MOVT. */
6153 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6154 return \"#\";
6155 /* Fall through. */
6156 default:
6157 return output_move_double (operands, true, NULL);
6158 }
6159 "
6160 [(set_attr "length" "8,12,16,8,8")
6161 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6162 (set_attr "arm_pool_range" "*,*,*,1020,*")
6163 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6164 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6165 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6166 )
6167
6168 ;; Splitter for the above.
6169 (define_split
6170 [(set (match_operand:DF 0 "s_register_operand")
6171 (match_operand:DF 1 "const_double_operand"))]
6172 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6173 [(const_int 0)]
6174 {
6175 long buf[2];
6176 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6177 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6178 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6179 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6180 rtx cst = gen_int_mode (ival, DImode);
6181 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6182 DONE;
6183 }
6184 )
6185 \f
6186
6187 ;; load- and store-multiple insns
6188 ;; The arm can load/store any set of registers, provided that they are in
6189 ;; ascending order, but these expanders assume a contiguous set.
6190
6191 (define_expand "load_multiple"
6192 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6193 (match_operand:SI 1 "" ""))
6194 (use (match_operand:SI 2 "" ""))])]
6195 "TARGET_32BIT"
6196 {
6197 HOST_WIDE_INT offset = 0;
6198
6199 /* Support only fixed point registers. */
6200 if (!CONST_INT_P (operands[2])
6201 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6202 || INTVAL (operands[2]) < 2
6203 || !MEM_P (operands[1])
6204 || !REG_P (operands[0])
6205 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6206 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6207 FAIL;
6208
6209 operands[3]
6210 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6211 INTVAL (operands[2]),
6212 force_reg (SImode, XEXP (operands[1], 0)),
6213 FALSE, operands[1], &offset);
6214 })
6215
6216 (define_expand "store_multiple"
6217 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6218 (match_operand:SI 1 "" ""))
6219 (use (match_operand:SI 2 "" ""))])]
6220 "TARGET_32BIT"
6221 {
6222 HOST_WIDE_INT offset = 0;
6223
6224 /* Support only fixed point registers. */
6225 if (!CONST_INT_P (operands[2])
6226 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6227 || INTVAL (operands[2]) < 2
6228 || !REG_P (operands[1])
6229 || !MEM_P (operands[0])
6230 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6231 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6232 FAIL;
6233
6234 operands[3]
6235 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6236 INTVAL (operands[2]),
6237 force_reg (SImode, XEXP (operands[0], 0)),
6238 FALSE, operands[0], &offset);
6239 })
6240
6241
6242 (define_expand "setmemsi"
6243 [(match_operand:BLK 0 "general_operand")
6244 (match_operand:SI 1 "const_int_operand")
6245 (match_operand:SI 2 "const_int_operand")
6246 (match_operand:SI 3 "const_int_operand")]
6247 "TARGET_32BIT"
6248 {
6249 if (arm_gen_setmem (operands))
6250 DONE;
6251
6252 FAIL;
6253 })
6254
6255
6256 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6257 ;; We could let this apply for blocks of less than this, but it clobbers so
6258 ;; many registers that there is then probably a better way.
6259
6260 (define_expand "cpymemqi"
6261 [(match_operand:BLK 0 "general_operand")
6262 (match_operand:BLK 1 "general_operand")
6263 (match_operand:SI 2 "const_int_operand")
6264 (match_operand:SI 3 "const_int_operand")]
6265 ""
6266 "
6267 if (TARGET_32BIT)
6268 {
6269 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6270 && !optimize_function_for_size_p (cfun))
6271 {
6272 if (gen_cpymem_ldrd_strd (operands))
6273 DONE;
6274 FAIL;
6275 }
6276
6277 if (arm_gen_cpymemqi (operands))
6278 DONE;
6279 FAIL;
6280 }
6281 else /* TARGET_THUMB1 */
6282 {
6283 if ( INTVAL (operands[3]) != 4
6284 || INTVAL (operands[2]) > 48)
6285 FAIL;
6286
6287 thumb_expand_cpymemqi (operands);
6288 DONE;
6289 }
6290 "
6291 )
6292 \f
6293
6294 ;; Compare & branch insns
6295 ;; The range calculations are based as follows:
6296 ;; For forward branches, the address calculation returns the address of
6297 ;; the next instruction. This is 2 beyond the branch instruction.
6298 ;; For backward branches, the address calculation returns the address of
6299 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6300 ;; instruction for the shortest sequence, and 4 before the branch instruction
6301 ;; if we have to jump around an unconditional branch.
6302 ;; To the basic branch range the PC offset must be added (this is +4).
6303 ;; So for forward branches we have
6304 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6305 ;; And for backward branches we have
6306 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6307 ;;
6308 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6309 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6310
6311 (define_expand "cbranchsi4"
6312 [(set (pc) (if_then_else
6313 (match_operator 0 "expandable_comparison_operator"
6314 [(match_operand:SI 1 "s_register_operand")
6315 (match_operand:SI 2 "nonmemory_operand")])
6316 (label_ref (match_operand 3 "" ""))
6317 (pc)))]
6318 "TARGET_EITHER"
6319 "
6320 if (!TARGET_THUMB1)
6321 {
6322 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6323 FAIL;
6324 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6325 operands[3]));
6326 DONE;
6327 }
6328 if (thumb1_cmpneg_operand (operands[2], SImode))
6329 {
6330 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6331 operands[3], operands[0]));
6332 DONE;
6333 }
6334 if (!thumb1_cmp_operand (operands[2], SImode))
6335 operands[2] = force_reg (SImode, operands[2]);
6336 ")
6337
6338 (define_expand "cbranchsf4"
6339 [(set (pc) (if_then_else
6340 (match_operator 0 "expandable_comparison_operator"
6341 [(match_operand:SF 1 "s_register_operand")
6342 (match_operand:SF 2 "vfp_compare_operand")])
6343 (label_ref (match_operand 3 "" ""))
6344 (pc)))]
6345 "TARGET_32BIT && TARGET_HARD_FLOAT"
6346 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6347 operands[3])); DONE;"
6348 )
6349
6350 (define_expand "cbranchdf4"
6351 [(set (pc) (if_then_else
6352 (match_operator 0 "expandable_comparison_operator"
6353 [(match_operand:DF 1 "s_register_operand")
6354 (match_operand:DF 2 "vfp_compare_operand")])
6355 (label_ref (match_operand 3 "" ""))
6356 (pc)))]
6357 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6358 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6359 operands[3])); DONE;"
6360 )
6361
6362 (define_expand "cbranchdi4"
6363 [(set (pc) (if_then_else
6364 (match_operator 0 "expandable_comparison_operator"
6365 [(match_operand:DI 1 "s_register_operand")
6366 (match_operand:DI 2 "cmpdi_operand")])
6367 (label_ref (match_operand 3 "" ""))
6368 (pc)))]
6369 "TARGET_32BIT"
6370 "{
6371 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6372 FAIL;
6373 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6374 operands[3]));
6375 DONE;
6376 }"
6377 )
6378
6379 ;; Comparison and test insns
6380
6381 (define_insn "*arm_cmpsi_insn"
6382 [(set (reg:CC CC_REGNUM)
6383 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6384 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6385 "TARGET_32BIT"
6386 "@
6387 cmp%?\\t%0, %1
6388 cmp%?\\t%0, %1
6389 cmp%?\\t%0, %1
6390 cmp%?\\t%0, %1
6391 cmn%?\\t%0, #%n1"
6392 [(set_attr "conds" "set")
6393 (set_attr "arch" "t2,t2,any,any,any")
6394 (set_attr "length" "2,2,4,4,4")
6395 (set_attr "predicable" "yes")
6396 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6397 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6398 )
6399
6400 (define_insn "*cmpsi_shiftsi"
6401 [(set (reg:CC CC_REGNUM)
6402 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6403 (match_operator:SI 3 "shift_operator"
6404 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6405 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6406 "TARGET_32BIT"
6407 "cmp\\t%0, %1%S3"
6408 [(set_attr "conds" "set")
6409 (set_attr "shift" "1")
6410 (set_attr "arch" "32,a,a")
6411 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6412
6413 (define_insn "*cmpsi_shiftsi_swp"
6414 [(set (reg:CC_SWP CC_REGNUM)
6415 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6416 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6417 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6418 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6419 "TARGET_32BIT"
6420 "cmp%?\\t%0, %1%S3"
6421 [(set_attr "conds" "set")
6422 (set_attr "shift" "1")
6423 (set_attr "arch" "32,a,a")
6424 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6425
6426 (define_insn "*arm_cmpsi_negshiftsi_si"
6427 [(set (reg:CC_Z CC_REGNUM)
6428 (compare:CC_Z
6429 (neg:SI (match_operator:SI 1 "shift_operator"
6430 [(match_operand:SI 2 "s_register_operand" "r")
6431 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6432 (match_operand:SI 0 "s_register_operand" "r")))]
6433 "TARGET_ARM"
6434 "cmn%?\\t%0, %2%S1"
6435 [(set_attr "conds" "set")
6436 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6437 (const_string "alus_shift_imm")
6438 (const_string "alus_shift_reg")))
6439 (set_attr "predicable" "yes")]
6440 )
6441
6442 ;; DImode comparisons. The generic code generates branches that
6443 ;; if-conversion cannot reduce to a conditional compare, so we do
6444 ;; that directly.
6445
6446 (define_insn "*arm_cmpdi_insn"
6447 [(set (reg:CC_NCV CC_REGNUM)
6448 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6449 (match_operand:DI 1 "arm_di_operand" "rDi")))
6450 (clobber (match_scratch:SI 2 "=r"))]
6451 "TARGET_32BIT"
6452 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6453 [(set_attr "conds" "set")
6454 (set_attr "length" "8")
6455 (set_attr "type" "multiple")]
6456 )
6457
6458 (define_insn_and_split "*arm_cmpdi_unsigned"
6459 [(set (reg:CC_CZ CC_REGNUM)
6460 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6461 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6462
6463 "TARGET_32BIT"
6464 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6465 "&& reload_completed"
6466 [(set (reg:CC CC_REGNUM)
6467 (compare:CC (match_dup 2) (match_dup 3)))
6468 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6469 (set (reg:CC CC_REGNUM)
6470 (compare:CC (match_dup 0) (match_dup 1))))]
6471 {
6472 operands[2] = gen_highpart (SImode, operands[0]);
6473 operands[0] = gen_lowpart (SImode, operands[0]);
6474 if (CONST_INT_P (operands[1]))
6475 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6476 else
6477 operands[3] = gen_highpart (SImode, operands[1]);
6478 operands[1] = gen_lowpart (SImode, operands[1]);
6479 }
6480 [(set_attr "conds" "set")
6481 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6482 (set_attr "arch" "t2,t2,t2,a")
6483 (set_attr "length" "6,6,10,8")
6484 (set_attr "type" "multiple")]
6485 )
6486
6487 (define_insn "*arm_cmpdi_zero"
6488 [(set (reg:CC_Z CC_REGNUM)
6489 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
6490 (const_int 0)))
6491 (clobber (match_scratch:SI 1 "=r"))]
6492 "TARGET_32BIT"
6493 "orrs%?\\t%1, %Q0, %R0"
6494 [(set_attr "conds" "set")
6495 (set_attr "type" "logics_reg")]
6496 )
6497
6498 ; This insn allows redundant compares to be removed by cse, nothing should
6499 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6500 ; is deleted later on. The match_dup will match the mode here, so that
6501 ; mode changes of the condition codes aren't lost by this even though we don't
6502 ; specify what they are.
6503
6504 (define_insn "*deleted_compare"
6505 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6506 "TARGET_32BIT"
6507 "\\t%@ deleted compare"
6508 [(set_attr "conds" "set")
6509 (set_attr "length" "0")
6510 (set_attr "type" "no_insn")]
6511 )
6512
6513 \f
6514 ;; Conditional branch insns
6515
6516 (define_expand "cbranch_cc"
6517 [(set (pc)
6518 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6519 (match_operand 2 "" "")])
6520 (label_ref (match_operand 3 "" ""))
6521 (pc)))]
6522 "TARGET_32BIT"
6523 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6524 operands[1], operands[2], NULL_RTX);
6525 operands[2] = const0_rtx;"
6526 )
6527
6528 ;;
6529 ;; Patterns to match conditional branch insns.
6530 ;;
6531
6532 (define_insn "arm_cond_branch"
6533 [(set (pc)
6534 (if_then_else (match_operator 1 "arm_comparison_operator"
6535 [(match_operand 2 "cc_register" "") (const_int 0)])
6536 (label_ref (match_operand 0 "" ""))
6537 (pc)))]
6538 "TARGET_32BIT"
6539 "*
6540 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6541 {
6542 arm_ccfsm_state += 2;
6543 return \"\";
6544 }
6545 return \"b%d1\\t%l0\";
6546 "
6547 [(set_attr "conds" "use")
6548 (set_attr "type" "branch")
6549 (set (attr "length")
6550 (if_then_else
6551 (and (match_test "TARGET_THUMB2")
6552 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6553 (le (minus (match_dup 0) (pc)) (const_int 256))))
6554 (const_int 2)
6555 (const_int 4)))]
6556 )
6557
6558 (define_insn "*arm_cond_branch_reversed"
6559 [(set (pc)
6560 (if_then_else (match_operator 1 "arm_comparison_operator"
6561 [(match_operand 2 "cc_register" "") (const_int 0)])
6562 (pc)
6563 (label_ref (match_operand 0 "" ""))))]
6564 "TARGET_32BIT"
6565 "*
6566 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6567 {
6568 arm_ccfsm_state += 2;
6569 return \"\";
6570 }
6571 return \"b%D1\\t%l0\";
6572 "
6573 [(set_attr "conds" "use")
6574 (set_attr "type" "branch")
6575 (set (attr "length")
6576 (if_then_else
6577 (and (match_test "TARGET_THUMB2")
6578 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6579 (le (minus (match_dup 0) (pc)) (const_int 256))))
6580 (const_int 2)
6581 (const_int 4)))]
6582 )
6583
6584 \f
6585
6586 ; scc insns
6587
6588 (define_expand "cstore_cc"
6589 [(set (match_operand:SI 0 "s_register_operand")
6590 (match_operator:SI 1 "" [(match_operand 2 "" "")
6591 (match_operand 3 "" "")]))]
6592 "TARGET_32BIT"
6593 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6594 operands[2], operands[3], NULL_RTX);
6595 operands[3] = const0_rtx;"
6596 )
6597
6598 (define_insn_and_split "*mov_scc"
6599 [(set (match_operand:SI 0 "s_register_operand" "=r")
6600 (match_operator:SI 1 "arm_comparison_operator_mode"
6601 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6602 "TARGET_ARM"
6603 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6604 "TARGET_ARM"
6605 [(set (match_dup 0)
6606 (if_then_else:SI (match_dup 1)
6607 (const_int 1)
6608 (const_int 0)))]
6609 ""
6610 [(set_attr "conds" "use")
6611 (set_attr "length" "8")
6612 (set_attr "type" "multiple")]
6613 )
6614
6615 (define_insn "*negscc_borrow"
6616 [(set (match_operand:SI 0 "s_register_operand" "=r")
6617 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
6618 "TARGET_32BIT"
6619 "sbc\\t%0, %0, %0"
6620 [(set_attr "conds" "use")
6621 (set_attr "length" "4")
6622 (set_attr "type" "adc_reg")]
6623 )
6624
6625 (define_insn_and_split "*mov_negscc"
6626 [(set (match_operand:SI 0 "s_register_operand" "=r")
6627 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6628 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6629 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
6630 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6631 "&& true"
6632 [(set (match_dup 0)
6633 (if_then_else:SI (match_dup 1)
6634 (match_dup 3)
6635 (const_int 0)))]
6636 {
6637 operands[3] = GEN_INT (~0);
6638 }
6639 [(set_attr "conds" "use")
6640 (set_attr "length" "8")
6641 (set_attr "type" "multiple")]
6642 )
6643
6644 (define_insn_and_split "*mov_notscc"
6645 [(set (match_operand:SI 0 "s_register_operand" "=r")
6646 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6647 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6648 "TARGET_ARM"
6649 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6650 "TARGET_ARM"
6651 [(set (match_dup 0)
6652 (if_then_else:SI (match_dup 1)
6653 (match_dup 3)
6654 (match_dup 4)))]
6655 {
6656 operands[3] = GEN_INT (~1);
6657 operands[4] = GEN_INT (~0);
6658 }
6659 [(set_attr "conds" "use")
6660 (set_attr "length" "8")
6661 (set_attr "type" "multiple")]
6662 )
6663
6664 (define_expand "cstoresi4"
6665 [(set (match_operand:SI 0 "s_register_operand")
6666 (match_operator:SI 1 "expandable_comparison_operator"
6667 [(match_operand:SI 2 "s_register_operand")
6668 (match_operand:SI 3 "reg_or_int_operand")]))]
6669 "TARGET_32BIT || TARGET_THUMB1"
6670 "{
6671 rtx op3, scratch, scratch2;
6672
6673 if (!TARGET_THUMB1)
6674 {
6675 if (!arm_add_operand (operands[3], SImode))
6676 operands[3] = force_reg (SImode, operands[3]);
6677 emit_insn (gen_cstore_cc (operands[0], operands[1],
6678 operands[2], operands[3]));
6679 DONE;
6680 }
6681
6682 if (operands[3] == const0_rtx)
6683 {
6684 switch (GET_CODE (operands[1]))
6685 {
6686 case EQ:
6687 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6688 break;
6689
6690 case NE:
6691 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6692 break;
6693
6694 case LE:
6695 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6696 NULL_RTX, 0, OPTAB_WIDEN);
6697 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6698 NULL_RTX, 0, OPTAB_WIDEN);
6699 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6700 operands[0], 1, OPTAB_WIDEN);
6701 break;
6702
6703 case GE:
6704 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6705 NULL_RTX, 1);
6706 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6707 NULL_RTX, 1, OPTAB_WIDEN);
6708 break;
6709
6710 case GT:
6711 scratch = expand_binop (SImode, ashr_optab, operands[2],
6712 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6713 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6714 NULL_RTX, 0, OPTAB_WIDEN);
6715 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6716 0, OPTAB_WIDEN);
6717 break;
6718
6719 /* LT is handled by generic code. No need for unsigned with 0. */
6720 default:
6721 FAIL;
6722 }
6723 DONE;
6724 }
6725
6726 switch (GET_CODE (operands[1]))
6727 {
6728 case EQ:
6729 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6730 NULL_RTX, 0, OPTAB_WIDEN);
6731 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6732 break;
6733
6734 case NE:
6735 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6736 NULL_RTX, 0, OPTAB_WIDEN);
6737 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6738 break;
6739
6740 case LE:
6741 op3 = force_reg (SImode, operands[3]);
6742
6743 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6744 NULL_RTX, 1, OPTAB_WIDEN);
6745 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6746 NULL_RTX, 0, OPTAB_WIDEN);
6747 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6748 op3, operands[2]));
6749 break;
6750
6751 case GE:
6752 op3 = operands[3];
6753 if (!thumb1_cmp_operand (op3, SImode))
6754 op3 = force_reg (SImode, op3);
6755 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6756 NULL_RTX, 0, OPTAB_WIDEN);
6757 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6758 NULL_RTX, 1, OPTAB_WIDEN);
6759 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6760 operands[2], op3));
6761 break;
6762
6763 case LEU:
6764 op3 = force_reg (SImode, operands[3]);
6765 scratch = force_reg (SImode, const0_rtx);
6766 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6767 op3, operands[2]));
6768 break;
6769
6770 case GEU:
6771 op3 = operands[3];
6772 if (!thumb1_cmp_operand (op3, SImode))
6773 op3 = force_reg (SImode, op3);
6774 scratch = force_reg (SImode, const0_rtx);
6775 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6776 operands[2], op3));
6777 break;
6778
6779 case LTU:
6780 op3 = operands[3];
6781 if (!thumb1_cmp_operand (op3, SImode))
6782 op3 = force_reg (SImode, op3);
6783 scratch = gen_reg_rtx (SImode);
6784 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6785 break;
6786
6787 case GTU:
6788 op3 = force_reg (SImode, operands[3]);
6789 scratch = gen_reg_rtx (SImode);
6790 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6791 break;
6792
6793 /* No good sequences for GT, LT. */
6794 default:
6795 FAIL;
6796 }
6797 DONE;
6798 }")
6799
6800 (define_expand "cstorehf4"
6801 [(set (match_operand:SI 0 "s_register_operand")
6802 (match_operator:SI 1 "expandable_comparison_operator"
6803 [(match_operand:HF 2 "s_register_operand")
6804 (match_operand:HF 3 "vfp_compare_operand")]))]
6805 "TARGET_VFP_FP16INST"
6806 {
6807 if (!arm_validize_comparison (&operands[1],
6808 &operands[2],
6809 &operands[3]))
6810 FAIL;
6811
6812 emit_insn (gen_cstore_cc (operands[0], operands[1],
6813 operands[2], operands[3]));
6814 DONE;
6815 }
6816 )
6817
6818 (define_expand "cstoresf4"
6819 [(set (match_operand:SI 0 "s_register_operand")
6820 (match_operator:SI 1 "expandable_comparison_operator"
6821 [(match_operand:SF 2 "s_register_operand")
6822 (match_operand:SF 3 "vfp_compare_operand")]))]
6823 "TARGET_32BIT && TARGET_HARD_FLOAT"
6824 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6825 operands[2], operands[3])); DONE;"
6826 )
6827
6828 (define_expand "cstoredf4"
6829 [(set (match_operand:SI 0 "s_register_operand")
6830 (match_operator:SI 1 "expandable_comparison_operator"
6831 [(match_operand:DF 2 "s_register_operand")
6832 (match_operand:DF 3 "vfp_compare_operand")]))]
6833 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6834 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6835 operands[2], operands[3])); DONE;"
6836 )
6837
6838 (define_expand "cstoredi4"
6839 [(set (match_operand:SI 0 "s_register_operand")
6840 (match_operator:SI 1 "expandable_comparison_operator"
6841 [(match_operand:DI 2 "s_register_operand")
6842 (match_operand:DI 3 "cmpdi_operand")]))]
6843 "TARGET_32BIT"
6844 "{
6845 if (!arm_validize_comparison (&operands[1],
6846 &operands[2],
6847 &operands[3]))
6848 FAIL;
6849 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6850 operands[3]));
6851 DONE;
6852 }"
6853 )
6854
6855 \f
6856 ;; Conditional move insns
6857
6858 (define_expand "movsicc"
6859 [(set (match_operand:SI 0 "s_register_operand")
6860 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6861 (match_operand:SI 2 "arm_not_operand")
6862 (match_operand:SI 3 "arm_not_operand")))]
6863 "TARGET_32BIT"
6864 "
6865 {
6866 enum rtx_code code;
6867 rtx ccreg;
6868
6869 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6870 &XEXP (operands[1], 1)))
6871 FAIL;
6872
6873 code = GET_CODE (operands[1]);
6874 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6875 XEXP (operands[1], 1), NULL_RTX);
6876 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6877 }"
6878 )
6879
6880 (define_expand "movhfcc"
6881 [(set (match_operand:HF 0 "s_register_operand")
6882 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6883 (match_operand:HF 2 "s_register_operand")
6884 (match_operand:HF 3 "s_register_operand")))]
6885 "TARGET_VFP_FP16INST"
6886 "
6887 {
6888 enum rtx_code code = GET_CODE (operands[1]);
6889 rtx ccreg;
6890
6891 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6892 &XEXP (operands[1], 1)))
6893 FAIL;
6894
6895 code = GET_CODE (operands[1]);
6896 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6897 XEXP (operands[1], 1), NULL_RTX);
6898 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6899 }"
6900 )
6901
6902 (define_expand "movsfcc"
6903 [(set (match_operand:SF 0 "s_register_operand")
6904 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6905 (match_operand:SF 2 "s_register_operand")
6906 (match_operand:SF 3 "s_register_operand")))]
6907 "TARGET_32BIT && TARGET_HARD_FLOAT"
6908 "
6909 {
6910 enum rtx_code code = GET_CODE (operands[1]);
6911 rtx ccreg;
6912
6913 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6914 &XEXP (operands[1], 1)))
6915 FAIL;
6916
6917 code = GET_CODE (operands[1]);
6918 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6919 XEXP (operands[1], 1), NULL_RTX);
6920 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6921 }"
6922 )
6923
6924 (define_expand "movdfcc"
6925 [(set (match_operand:DF 0 "s_register_operand")
6926 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
6927 (match_operand:DF 2 "s_register_operand")
6928 (match_operand:DF 3 "s_register_operand")))]
6929 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
6930 "
6931 {
6932 enum rtx_code code = GET_CODE (operands[1]);
6933 rtx ccreg;
6934
6935 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6936 &XEXP (operands[1], 1)))
6937 FAIL;
6938 code = GET_CODE (operands[1]);
6939 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6940 XEXP (operands[1], 1), NULL_RTX);
6941 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6942 }"
6943 )
6944
6945 (define_insn "*cmov<mode>"
6946 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
6947 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
6948 [(match_operand 2 "cc_register" "") (const_int 0)])
6949 (match_operand:SDF 3 "s_register_operand"
6950 "<F_constraint>")
6951 (match_operand:SDF 4 "s_register_operand"
6952 "<F_constraint>")))]
6953 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
6954 "*
6955 {
6956 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6957 switch (code)
6958 {
6959 case ARM_GE:
6960 case ARM_GT:
6961 case ARM_EQ:
6962 case ARM_VS:
6963 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
6964 case ARM_LT:
6965 case ARM_LE:
6966 case ARM_NE:
6967 case ARM_VC:
6968 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
6969 default:
6970 gcc_unreachable ();
6971 }
6972 return \"\";
6973 }"
6974 [(set_attr "conds" "use")
6975 (set_attr "type" "fcsel")]
6976 )
6977
6978 (define_insn "*cmovhf"
6979 [(set (match_operand:HF 0 "s_register_operand" "=t")
6980 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
6981 [(match_operand 2 "cc_register" "") (const_int 0)])
6982 (match_operand:HF 3 "s_register_operand" "t")
6983 (match_operand:HF 4 "s_register_operand" "t")))]
6984 "TARGET_VFP_FP16INST"
6985 "*
6986 {
6987 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6988 switch (code)
6989 {
6990 case ARM_GE:
6991 case ARM_GT:
6992 case ARM_EQ:
6993 case ARM_VS:
6994 return \"vsel%d1.f16\\t%0, %3, %4\";
6995 case ARM_LT:
6996 case ARM_LE:
6997 case ARM_NE:
6998 case ARM_VC:
6999 return \"vsel%D1.f16\\t%0, %4, %3\";
7000 default:
7001 gcc_unreachable ();
7002 }
7003 return \"\";
7004 }"
7005 [(set_attr "conds" "use")
7006 (set_attr "type" "fcsel")]
7007 )
7008
7009 (define_insn_and_split "*movsicc_insn"
7010 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7011 (if_then_else:SI
7012 (match_operator 3 "arm_comparison_operator"
7013 [(match_operand 4 "cc_register" "") (const_int 0)])
7014 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7015 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7016 "TARGET_ARM"
7017 "@
7018 mov%D3\\t%0, %2
7019 mvn%D3\\t%0, #%B2
7020 mov%d3\\t%0, %1
7021 mvn%d3\\t%0, #%B1
7022 #
7023 #
7024 #
7025 #"
7026 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7027 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7028 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7029 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7030 "&& reload_completed"
7031 [(const_int 0)]
7032 {
7033 enum rtx_code rev_code;
7034 machine_mode mode;
7035 rtx rev_cond;
7036
7037 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7038 operands[3],
7039 gen_rtx_SET (operands[0], operands[1])));
7040
7041 rev_code = GET_CODE (operands[3]);
7042 mode = GET_MODE (operands[4]);
7043 if (mode == CCFPmode || mode == CCFPEmode)
7044 rev_code = reverse_condition_maybe_unordered (rev_code);
7045 else
7046 rev_code = reverse_condition (rev_code);
7047
7048 rev_cond = gen_rtx_fmt_ee (rev_code,
7049 VOIDmode,
7050 operands[4],
7051 const0_rtx);
7052 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7053 rev_cond,
7054 gen_rtx_SET (operands[0], operands[2])));
7055 DONE;
7056 }
7057 [(set_attr "length" "4,4,4,4,8,8,8,8")
7058 (set_attr "conds" "use")
7059 (set_attr_alternative "type"
7060 [(if_then_else (match_operand 2 "const_int_operand" "")
7061 (const_string "mov_imm")
7062 (const_string "mov_reg"))
7063 (const_string "mvn_imm")
7064 (if_then_else (match_operand 1 "const_int_operand" "")
7065 (const_string "mov_imm")
7066 (const_string "mov_reg"))
7067 (const_string "mvn_imm")
7068 (const_string "multiple")
7069 (const_string "multiple")
7070 (const_string "multiple")
7071 (const_string "multiple")])]
7072 )
7073
7074 (define_insn "*movsfcc_soft_insn"
7075 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7076 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7077 [(match_operand 4 "cc_register" "") (const_int 0)])
7078 (match_operand:SF 1 "s_register_operand" "0,r")
7079 (match_operand:SF 2 "s_register_operand" "r,0")))]
7080 "TARGET_ARM && TARGET_SOFT_FLOAT"
7081 "@
7082 mov%D3\\t%0, %2
7083 mov%d3\\t%0, %1"
7084 [(set_attr "conds" "use")
7085 (set_attr "type" "mov_reg")]
7086 )
7087
7088 \f
7089 ;; Jump and linkage insns
7090
7091 (define_expand "jump"
7092 [(set (pc)
7093 (label_ref (match_operand 0 "" "")))]
7094 "TARGET_EITHER"
7095 ""
7096 )
7097
7098 (define_insn "*arm_jump"
7099 [(set (pc)
7100 (label_ref (match_operand 0 "" "")))]
7101 "TARGET_32BIT"
7102 "*
7103 {
7104 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7105 {
7106 arm_ccfsm_state += 2;
7107 return \"\";
7108 }
7109 return \"b%?\\t%l0\";
7110 }
7111 "
7112 [(set_attr "predicable" "yes")
7113 (set (attr "length")
7114 (if_then_else
7115 (and (match_test "TARGET_THUMB2")
7116 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7117 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7118 (const_int 2)
7119 (const_int 4)))
7120 (set_attr "type" "branch")]
7121 )
7122
7123 (define_expand "call"
7124 [(parallel [(call (match_operand 0 "memory_operand")
7125 (match_operand 1 "general_operand"))
7126 (use (match_operand 2 "" ""))
7127 (clobber (reg:SI LR_REGNUM))])]
7128 "TARGET_EITHER"
7129 "
7130 {
7131 rtx callee, pat;
7132 tree addr = MEM_EXPR (operands[0]);
7133
7134 /* In an untyped call, we can get NULL for operand 2. */
7135 if (operands[2] == NULL_RTX)
7136 operands[2] = const0_rtx;
7137
7138 /* Decide if we should generate indirect calls by loading the
7139 32-bit address of the callee into a register before performing the
7140 branch and link. */
7141 callee = XEXP (operands[0], 0);
7142 if (GET_CODE (callee) == SYMBOL_REF
7143 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7144 : !REG_P (callee))
7145 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7146
7147 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7148 /* Indirect call: set r9 with FDPIC value of callee. */
7149 XEXP (operands[0], 0)
7150 = arm_load_function_descriptor (XEXP (operands[0], 0));
7151
7152 if (detect_cmse_nonsecure_call (addr))
7153 {
7154 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7155 operands[2]);
7156 emit_call_insn (pat);
7157 }
7158 else
7159 {
7160 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7161 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7162 }
7163
7164 /* Restore FDPIC register (r9) after call. */
7165 if (TARGET_FDPIC)
7166 {
7167 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7168 rtx initial_fdpic_reg
7169 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7170
7171 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7172 initial_fdpic_reg));
7173 }
7174
7175 DONE;
7176 }"
7177 )
7178
7179 (define_insn "restore_pic_register_after_call"
7180 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7181 (unspec:SI [(match_dup 0)
7182 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7183 UNSPEC_PIC_RESTORE))]
7184 ""
7185 "@
7186 mov\t%0, %1
7187 ldr\t%0, %1"
7188 )
7189
7190 (define_expand "call_internal"
7191 [(parallel [(call (match_operand 0 "memory_operand")
7192 (match_operand 1 "general_operand"))
7193 (use (match_operand 2 "" ""))
7194 (clobber (reg:SI LR_REGNUM))])])
7195
7196 (define_expand "nonsecure_call_internal"
7197 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7198 UNSPEC_NONSECURE_MEM)
7199 (match_operand 1 "general_operand"))
7200 (use (match_operand 2 "" ""))
7201 (clobber (reg:SI LR_REGNUM))])]
7202 "use_cmse"
7203 "
7204 {
7205 rtx tmp;
7206 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7207 gen_rtx_REG (SImode, R4_REGNUM),
7208 SImode);
7209
7210 operands[0] = replace_equiv_address (operands[0], tmp);
7211 }")
7212
7213 (define_insn "*call_reg_armv5"
7214 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7215 (match_operand 1 "" ""))
7216 (use (match_operand 2 "" ""))
7217 (clobber (reg:SI LR_REGNUM))]
7218 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7219 "blx%?\\t%0"
7220 [(set_attr "type" "call")]
7221 )
7222
7223 (define_insn "*call_reg_arm"
7224 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7225 (match_operand 1 "" ""))
7226 (use (match_operand 2 "" ""))
7227 (clobber (reg:SI LR_REGNUM))]
7228 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7229 "*
7230 return output_call (operands);
7231 "
7232 ;; length is worst case, normally it is only two
7233 [(set_attr "length" "12")
7234 (set_attr "type" "call")]
7235 )
7236
7237
7238 (define_expand "call_value"
7239 [(parallel [(set (match_operand 0 "" "")
7240 (call (match_operand 1 "memory_operand")
7241 (match_operand 2 "general_operand")))
7242 (use (match_operand 3 "" ""))
7243 (clobber (reg:SI LR_REGNUM))])]
7244 "TARGET_EITHER"
7245 "
7246 {
7247 rtx pat, callee;
7248 tree addr = MEM_EXPR (operands[1]);
7249
7250 /* In an untyped call, we can get NULL for operand 2. */
7251 if (operands[3] == 0)
7252 operands[3] = const0_rtx;
7253
7254 /* Decide if we should generate indirect calls by loading the
7255 32-bit address of the callee into a register before performing the
7256 branch and link. */
7257 callee = XEXP (operands[1], 0);
7258 if (GET_CODE (callee) == SYMBOL_REF
7259 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7260 : !REG_P (callee))
7261 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7262
7263 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7264 /* Indirect call: set r9 with FDPIC value of callee. */
7265 XEXP (operands[1], 0)
7266 = arm_load_function_descriptor (XEXP (operands[1], 0));
7267
7268 if (detect_cmse_nonsecure_call (addr))
7269 {
7270 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7271 operands[2], operands[3]);
7272 emit_call_insn (pat);
7273 }
7274 else
7275 {
7276 pat = gen_call_value_internal (operands[0], operands[1],
7277 operands[2], operands[3]);
7278 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7279 }
7280
7281 /* Restore FDPIC register (r9) after call. */
7282 if (TARGET_FDPIC)
7283 {
7284 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7285 rtx initial_fdpic_reg
7286 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7287
7288 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7289 initial_fdpic_reg));
7290 }
7291
7292 DONE;
7293 }"
7294 )
7295
7296 (define_expand "call_value_internal"
7297 [(parallel [(set (match_operand 0 "" "")
7298 (call (match_operand 1 "memory_operand")
7299 (match_operand 2 "general_operand")))
7300 (use (match_operand 3 "" ""))
7301 (clobber (reg:SI LR_REGNUM))])])
7302
7303 (define_expand "nonsecure_call_value_internal"
7304 [(parallel [(set (match_operand 0 "" "")
7305 (call (unspec:SI [(match_operand 1 "memory_operand")]
7306 UNSPEC_NONSECURE_MEM)
7307 (match_operand 2 "general_operand")))
7308 (use (match_operand 3 "" ""))
7309 (clobber (reg:SI LR_REGNUM))])]
7310 "use_cmse"
7311 "
7312 {
7313 rtx tmp;
7314 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7315 gen_rtx_REG (SImode, R4_REGNUM),
7316 SImode);
7317
7318 operands[1] = replace_equiv_address (operands[1], tmp);
7319 }")
7320
7321 (define_insn "*call_value_reg_armv5"
7322 [(set (match_operand 0 "" "")
7323 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7324 (match_operand 2 "" "")))
7325 (use (match_operand 3 "" ""))
7326 (clobber (reg:SI LR_REGNUM))]
7327 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7328 "blx%?\\t%1"
7329 [(set_attr "type" "call")]
7330 )
7331
7332 (define_insn "*call_value_reg_arm"
7333 [(set (match_operand 0 "" "")
7334 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7335 (match_operand 2 "" "")))
7336 (use (match_operand 3 "" ""))
7337 (clobber (reg:SI LR_REGNUM))]
7338 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7339 "*
7340 return output_call (&operands[1]);
7341 "
7342 [(set_attr "length" "12")
7343 (set_attr "type" "call")]
7344 )
7345
7346 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7347 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7348
7349 (define_insn "*call_symbol"
7350 [(call (mem:SI (match_operand:SI 0 "" ""))
7351 (match_operand 1 "" ""))
7352 (use (match_operand 2 "" ""))
7353 (clobber (reg:SI LR_REGNUM))]
7354 "TARGET_32BIT
7355 && !SIBLING_CALL_P (insn)
7356 && (GET_CODE (operands[0]) == SYMBOL_REF)
7357 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7358 "*
7359 {
7360 rtx op = operands[0];
7361
7362 /* Switch mode now when possible. */
7363 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7364 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7365 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7366
7367 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7368 }"
7369 [(set_attr "type" "call")]
7370 )
7371
7372 (define_insn "*call_value_symbol"
7373 [(set (match_operand 0 "" "")
7374 (call (mem:SI (match_operand:SI 1 "" ""))
7375 (match_operand:SI 2 "" "")))
7376 (use (match_operand 3 "" ""))
7377 (clobber (reg:SI LR_REGNUM))]
7378 "TARGET_32BIT
7379 && !SIBLING_CALL_P (insn)
7380 && (GET_CODE (operands[1]) == SYMBOL_REF)
7381 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7382 "*
7383 {
7384 rtx op = operands[1];
7385
7386 /* Switch mode now when possible. */
7387 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7388 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7389 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7390
7391 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7392 }"
7393 [(set_attr "type" "call")]
7394 )
7395
7396 (define_expand "sibcall_internal"
7397 [(parallel [(call (match_operand 0 "memory_operand")
7398 (match_operand 1 "general_operand"))
7399 (return)
7400 (use (match_operand 2 "" ""))])])
7401
7402 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7403 (define_expand "sibcall"
7404 [(parallel [(call (match_operand 0 "memory_operand")
7405 (match_operand 1 "general_operand"))
7406 (return)
7407 (use (match_operand 2 "" ""))])]
7408 "TARGET_32BIT"
7409 "
7410 {
7411 rtx pat;
7412
7413 if ((!REG_P (XEXP (operands[0], 0))
7414 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7415 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7416 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7417 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7418
7419 if (operands[2] == NULL_RTX)
7420 operands[2] = const0_rtx;
7421
7422 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7423 arm_emit_call_insn (pat, operands[0], true);
7424 DONE;
7425 }"
7426 )
7427
7428 (define_expand "sibcall_value_internal"
7429 [(parallel [(set (match_operand 0 "" "")
7430 (call (match_operand 1 "memory_operand")
7431 (match_operand 2 "general_operand")))
7432 (return)
7433 (use (match_operand 3 "" ""))])])
7434
7435 (define_expand "sibcall_value"
7436 [(parallel [(set (match_operand 0 "" "")
7437 (call (match_operand 1 "memory_operand")
7438 (match_operand 2 "general_operand")))
7439 (return)
7440 (use (match_operand 3 "" ""))])]
7441 "TARGET_32BIT"
7442 "
7443 {
7444 rtx pat;
7445
7446 if ((!REG_P (XEXP (operands[1], 0))
7447 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7448 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7449 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7450 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7451
7452 if (operands[3] == NULL_RTX)
7453 operands[3] = const0_rtx;
7454
7455 pat = gen_sibcall_value_internal (operands[0], operands[1],
7456 operands[2], operands[3]);
7457 arm_emit_call_insn (pat, operands[1], true);
7458 DONE;
7459 }"
7460 )
7461
7462 (define_insn "*sibcall_insn"
7463 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7464 (match_operand 1 "" ""))
7465 (return)
7466 (use (match_operand 2 "" ""))]
7467 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7468 "*
7469 if (which_alternative == 1)
7470 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7471 else
7472 {
7473 if (arm_arch5t || arm_arch4t)
7474 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7475 else
7476 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7477 }
7478 "
7479 [(set_attr "type" "call")]
7480 )
7481
7482 (define_insn "*sibcall_value_insn"
7483 [(set (match_operand 0 "" "")
7484 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7485 (match_operand 2 "" "")))
7486 (return)
7487 (use (match_operand 3 "" ""))]
7488 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7489 "*
7490 if (which_alternative == 1)
7491 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7492 else
7493 {
7494 if (arm_arch5t || arm_arch4t)
7495 return \"bx%?\\t%1\";
7496 else
7497 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7498 }
7499 "
7500 [(set_attr "type" "call")]
7501 )
7502
7503 (define_expand "<return_str>return"
7504 [(RETURNS)]
7505 "(TARGET_ARM || (TARGET_THUMB2
7506 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7507 && !IS_STACKALIGN (arm_current_func_type ())))
7508 <return_cond_false>"
7509 "
7510 {
7511 if (TARGET_THUMB2)
7512 {
7513 thumb2_expand_return (<return_simple_p>);
7514 DONE;
7515 }
7516 }
7517 "
7518 )
7519
7520 ;; Often the return insn will be the same as loading from memory, so set attr
7521 (define_insn "*arm_return"
7522 [(return)]
7523 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7524 "*
7525 {
7526 if (arm_ccfsm_state == 2)
7527 {
7528 arm_ccfsm_state += 2;
7529 return \"\";
7530 }
7531 return output_return_instruction (const_true_rtx, true, false, false);
7532 }"
7533 [(set_attr "type" "load_4")
7534 (set_attr "length" "12")
7535 (set_attr "predicable" "yes")]
7536 )
7537
7538 (define_insn "*cond_<return_str>return"
7539 [(set (pc)
7540 (if_then_else (match_operator 0 "arm_comparison_operator"
7541 [(match_operand 1 "cc_register" "") (const_int 0)])
7542 (RETURNS)
7543 (pc)))]
7544 "TARGET_ARM <return_cond_true>"
7545 "*
7546 {
7547 if (arm_ccfsm_state == 2)
7548 {
7549 arm_ccfsm_state += 2;
7550 return \"\";
7551 }
7552 return output_return_instruction (operands[0], true, false,
7553 <return_simple_p>);
7554 }"
7555 [(set_attr "conds" "use")
7556 (set_attr "length" "12")
7557 (set_attr "type" "load_4")]
7558 )
7559
7560 (define_insn "*cond_<return_str>return_inverted"
7561 [(set (pc)
7562 (if_then_else (match_operator 0 "arm_comparison_operator"
7563 [(match_operand 1 "cc_register" "") (const_int 0)])
7564 (pc)
7565 (RETURNS)))]
7566 "TARGET_ARM <return_cond_true>"
7567 "*
7568 {
7569 if (arm_ccfsm_state == 2)
7570 {
7571 arm_ccfsm_state += 2;
7572 return \"\";
7573 }
7574 return output_return_instruction (operands[0], true, true,
7575 <return_simple_p>);
7576 }"
7577 [(set_attr "conds" "use")
7578 (set_attr "length" "12")
7579 (set_attr "type" "load_4")]
7580 )
7581
7582 (define_insn "*arm_simple_return"
7583 [(simple_return)]
7584 "TARGET_ARM"
7585 "*
7586 {
7587 if (arm_ccfsm_state == 2)
7588 {
7589 arm_ccfsm_state += 2;
7590 return \"\";
7591 }
7592 return output_return_instruction (const_true_rtx, true, false, true);
7593 }"
7594 [(set_attr "type" "branch")
7595 (set_attr "length" "4")
7596 (set_attr "predicable" "yes")]
7597 )
7598
7599 ;; Generate a sequence of instructions to determine if the processor is
7600 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7601 ;; mask.
7602
7603 (define_expand "return_addr_mask"
7604 [(set (match_dup 1)
7605 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7606 (const_int 0)))
7607 (set (match_operand:SI 0 "s_register_operand")
7608 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7609 (const_int -1)
7610 (const_int 67108860)))] ; 0x03fffffc
7611 "TARGET_ARM"
7612 "
7613 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7614 ")
7615
7616 (define_insn "*check_arch2"
7617 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7618 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7619 (const_int 0)))]
7620 "TARGET_ARM"
7621 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7622 [(set_attr "length" "8")
7623 (set_attr "conds" "set")
7624 (set_attr "type" "multiple")]
7625 )
7626
7627 ;; Call subroutine returning any type.
7628
7629 (define_expand "untyped_call"
7630 [(parallel [(call (match_operand 0 "" "")
7631 (const_int 0))
7632 (match_operand 1 "" "")
7633 (match_operand 2 "" "")])]
7634 "TARGET_EITHER && !TARGET_FDPIC"
7635 "
7636 {
7637 int i;
7638 rtx par = gen_rtx_PARALLEL (VOIDmode,
7639 rtvec_alloc (XVECLEN (operands[2], 0)));
7640 rtx addr = gen_reg_rtx (Pmode);
7641 rtx mem;
7642 int size = 0;
7643
7644 emit_move_insn (addr, XEXP (operands[1], 0));
7645 mem = change_address (operands[1], BLKmode, addr);
7646
7647 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7648 {
7649 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7650
7651 /* Default code only uses r0 as a return value, but we could
7652 be using anything up to 4 registers. */
7653 if (REGNO (src) == R0_REGNUM)
7654 src = gen_rtx_REG (TImode, R0_REGNUM);
7655
7656 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7657 GEN_INT (size));
7658 size += GET_MODE_SIZE (GET_MODE (src));
7659 }
7660
7661 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7662
7663 size = 0;
7664
7665 for (i = 0; i < XVECLEN (par, 0); i++)
7666 {
7667 HOST_WIDE_INT offset = 0;
7668 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7669
7670 if (size != 0)
7671 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7672
7673 mem = change_address (mem, GET_MODE (reg), NULL);
7674 if (REGNO (reg) == R0_REGNUM)
7675 {
7676 /* On thumb we have to use a write-back instruction. */
7677 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7678 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7679 size = TARGET_ARM ? 16 : 0;
7680 }
7681 else
7682 {
7683 emit_move_insn (mem, reg);
7684 size = GET_MODE_SIZE (GET_MODE (reg));
7685 }
7686 }
7687
7688 /* The optimizer does not know that the call sets the function value
7689 registers we stored in the result block. We avoid problems by
7690 claiming that all hard registers are used and clobbered at this
7691 point. */
7692 emit_insn (gen_blockage ());
7693
7694 DONE;
7695 }"
7696 )
7697
7698 (define_expand "untyped_return"
7699 [(match_operand:BLK 0 "memory_operand")
7700 (match_operand 1 "" "")]
7701 "TARGET_EITHER && !TARGET_FDPIC"
7702 "
7703 {
7704 int i;
7705 rtx addr = gen_reg_rtx (Pmode);
7706 rtx mem;
7707 int size = 0;
7708
7709 emit_move_insn (addr, XEXP (operands[0], 0));
7710 mem = change_address (operands[0], BLKmode, addr);
7711
7712 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7713 {
7714 HOST_WIDE_INT offset = 0;
7715 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7716
7717 if (size != 0)
7718 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7719
7720 mem = change_address (mem, GET_MODE (reg), NULL);
7721 if (REGNO (reg) == R0_REGNUM)
7722 {
7723 /* On thumb we have to use a write-back instruction. */
7724 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7725 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7726 size = TARGET_ARM ? 16 : 0;
7727 }
7728 else
7729 {
7730 emit_move_insn (reg, mem);
7731 size = GET_MODE_SIZE (GET_MODE (reg));
7732 }
7733 }
7734
7735 /* Emit USE insns before the return. */
7736 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7737 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7738
7739 /* Construct the return. */
7740 expand_naked_return ();
7741
7742 DONE;
7743 }"
7744 )
7745
7746 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7747 ;; all of memory. This blocks insns from being moved across this point.
7748
7749 (define_insn "blockage"
7750 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7751 "TARGET_EITHER"
7752 ""
7753 [(set_attr "length" "0")
7754 (set_attr "type" "block")]
7755 )
7756
7757 ;; Since we hard code r0 here use the 'o' constraint to prevent
7758 ;; provoking undefined behaviour in the hardware with putting out
7759 ;; auto-increment operations with potentially r0 as the base register.
7760 (define_insn "probe_stack"
7761 [(set (match_operand:SI 0 "memory_operand" "=o")
7762 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7763 "TARGET_32BIT"
7764 "str%?\\tr0, %0"
7765 [(set_attr "type" "store_4")
7766 (set_attr "predicable" "yes")]
7767 )
7768
7769 (define_insn "probe_stack_range"
7770 [(set (match_operand:SI 0 "register_operand" "=r")
7771 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7772 (match_operand:SI 2 "register_operand" "r")]
7773 VUNSPEC_PROBE_STACK_RANGE))]
7774 "TARGET_32BIT"
7775 {
7776 return output_probe_stack_range (operands[0], operands[2]);
7777 }
7778 [(set_attr "type" "multiple")
7779 (set_attr "conds" "clob")]
7780 )
7781
7782 ;; Named patterns for stack smashing protection.
7783 (define_expand "stack_protect_combined_set"
7784 [(parallel
7785 [(set (match_operand:SI 0 "memory_operand")
7786 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7787 UNSPEC_SP_SET))
7788 (clobber (match_scratch:SI 2 ""))
7789 (clobber (match_scratch:SI 3 ""))])]
7790 ""
7791 ""
7792 )
7793
7794 ;; Use a separate insn from the above expand to be able to have the mem outside
7795 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7796 ;; try to reload the guard since we need to control how PIC access is done in
7797 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7798 ;; legitimize_pic_address ()).
7799 (define_insn_and_split "*stack_protect_combined_set_insn"
7800 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7801 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7802 UNSPEC_SP_SET))
7803 (clobber (match_scratch:SI 2 "=&l,&r"))
7804 (clobber (match_scratch:SI 3 "=&l,&r"))]
7805 ""
7806 "#"
7807 "reload_completed"
7808 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7809 UNSPEC_SP_SET))
7810 (clobber (match_dup 2))])]
7811 "
7812 {
7813 if (flag_pic)
7814 {
7815 rtx pic_reg;
7816
7817 if (TARGET_FDPIC)
7818 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7819 else
7820 pic_reg = operands[3];
7821
7822 /* Forces recomputing of GOT base now. */
7823 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7824 true /*compute_now*/);
7825 }
7826 else
7827 {
7828 if (address_operand (operands[1], SImode))
7829 operands[2] = operands[1];
7830 else
7831 {
7832 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7833 emit_move_insn (operands[2], mem);
7834 }
7835 }
7836 }"
7837 [(set_attr "arch" "t1,32")]
7838 )
7839
7840 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7841 ;; canary value does not live beyond the life of this sequence.
7842 (define_insn "*stack_protect_set_insn"
7843 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7844 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7845 UNSPEC_SP_SET))
7846 (clobber (match_dup 1))]
7847 ""
7848 "@
7849 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7850 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7851 [(set_attr "length" "8,12")
7852 (set_attr "conds" "clob,nocond")
7853 (set_attr "type" "multiple")
7854 (set_attr "arch" "t1,32")]
7855 )
7856
7857 (define_expand "stack_protect_combined_test"
7858 [(parallel
7859 [(set (pc)
7860 (if_then_else
7861 (eq (match_operand:SI 0 "memory_operand")
7862 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7863 UNSPEC_SP_TEST))
7864 (label_ref (match_operand 2))
7865 (pc)))
7866 (clobber (match_scratch:SI 3 ""))
7867 (clobber (match_scratch:SI 4 ""))
7868 (clobber (reg:CC CC_REGNUM))])]
7869 ""
7870 ""
7871 )
7872
7873 ;; Use a separate insn from the above expand to be able to have the mem outside
7874 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7875 ;; try to reload the guard since we need to control how PIC access is done in
7876 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7877 ;; legitimize_pic_address ()).
7878 (define_insn_and_split "*stack_protect_combined_test_insn"
7879 [(set (pc)
7880 (if_then_else
7881 (eq (match_operand:SI 0 "memory_operand" "m,m")
7882 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7883 UNSPEC_SP_TEST))
7884 (label_ref (match_operand 2))
7885 (pc)))
7886 (clobber (match_scratch:SI 3 "=&l,&r"))
7887 (clobber (match_scratch:SI 4 "=&l,&r"))
7888 (clobber (reg:CC CC_REGNUM))]
7889 ""
7890 "#"
7891 "reload_completed"
7892 [(const_int 0)]
7893 {
7894 rtx eq;
7895
7896 if (flag_pic)
7897 {
7898 rtx pic_reg;
7899
7900 if (TARGET_FDPIC)
7901 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7902 else
7903 pic_reg = operands[4];
7904
7905 /* Forces recomputing of GOT base now. */
7906 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7907 true /*compute_now*/);
7908 }
7909 else
7910 {
7911 if (address_operand (operands[1], SImode))
7912 operands[3] = operands[1];
7913 else
7914 {
7915 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7916 emit_move_insn (operands[3], mem);
7917 }
7918 }
7919 if (TARGET_32BIT)
7920 {
7921 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
7922 operands[3]));
7923 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
7924 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
7925 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
7926 }
7927 else
7928 {
7929 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
7930 operands[3]));
7931 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
7932 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
7933 operands[2]));
7934 }
7935 DONE;
7936 }
7937 [(set_attr "arch" "t1,32")]
7938 )
7939
7940 (define_insn "arm_stack_protect_test_insn"
7941 [(set (reg:CC_Z CC_REGNUM)
7942 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
7943 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
7944 UNSPEC_SP_TEST)
7945 (const_int 0)))
7946 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
7947 (clobber (match_dup 2))]
7948 "TARGET_32BIT"
7949 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
7950 [(set_attr "length" "8,12")
7951 (set_attr "conds" "set")
7952 (set_attr "type" "multiple")
7953 (set_attr "arch" "t,32")]
7954 )
7955
7956 (define_expand "casesi"
7957 [(match_operand:SI 0 "s_register_operand") ; index to jump on
7958 (match_operand:SI 1 "const_int_operand") ; lower bound
7959 (match_operand:SI 2 "const_int_operand") ; total range
7960 (match_operand:SI 3 "" "") ; table label
7961 (match_operand:SI 4 "" "")] ; Out of range label
7962 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
7963 "
7964 {
7965 enum insn_code code;
7966 if (operands[1] != const0_rtx)
7967 {
7968 rtx reg = gen_reg_rtx (SImode);
7969
7970 emit_insn (gen_addsi3 (reg, operands[0],
7971 gen_int_mode (-INTVAL (operands[1]),
7972 SImode)));
7973 operands[0] = reg;
7974 }
7975
7976 if (TARGET_ARM)
7977 code = CODE_FOR_arm_casesi_internal;
7978 else if (TARGET_THUMB1)
7979 code = CODE_FOR_thumb1_casesi_internal_pic;
7980 else if (flag_pic)
7981 code = CODE_FOR_thumb2_casesi_internal_pic;
7982 else
7983 code = CODE_FOR_thumb2_casesi_internal;
7984
7985 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
7986 operands[2] = force_reg (SImode, operands[2]);
7987
7988 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
7989 operands[3], operands[4]));
7990 DONE;
7991 }"
7992 )
7993
7994 ;; The USE in this pattern is needed to tell flow analysis that this is
7995 ;; a CASESI insn. It has no other purpose.
7996 (define_expand "arm_casesi_internal"
7997 [(parallel [(set (pc)
7998 (if_then_else
7999 (leu (match_operand:SI 0 "s_register_operand")
8000 (match_operand:SI 1 "arm_rhs_operand"))
8001 (match_dup 4)
8002 (label_ref:SI (match_operand 3 ""))))
8003 (clobber (reg:CC CC_REGNUM))
8004 (use (label_ref:SI (match_operand 2 "")))])]
8005 "TARGET_ARM"
8006 {
8007 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8008 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8009 gen_rtx_LABEL_REF (SImode, operands[2]));
8010 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8011 MEM_READONLY_P (operands[4]) = 1;
8012 MEM_NOTRAP_P (operands[4]) = 1;
8013 })
8014
8015 (define_insn "*arm_casesi_internal"
8016 [(parallel [(set (pc)
8017 (if_then_else
8018 (leu (match_operand:SI 0 "s_register_operand" "r")
8019 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8020 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8021 (label_ref:SI (match_operand 2 "" ""))))
8022 (label_ref:SI (match_operand 3 "" ""))))
8023 (clobber (reg:CC CC_REGNUM))
8024 (use (label_ref:SI (match_dup 2)))])]
8025 "TARGET_ARM"
8026 "*
8027 if (flag_pic)
8028 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8029 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8030 "
8031 [(set_attr "conds" "clob")
8032 (set_attr "length" "12")
8033 (set_attr "type" "multiple")]
8034 )
8035
8036 (define_expand "indirect_jump"
8037 [(set (pc)
8038 (match_operand:SI 0 "s_register_operand"))]
8039 "TARGET_EITHER"
8040 "
8041 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8042 address and use bx. */
8043 if (TARGET_THUMB2)
8044 {
8045 rtx tmp;
8046 tmp = gen_reg_rtx (SImode);
8047 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8048 operands[0] = tmp;
8049 }
8050 "
8051 )
8052
8053 ;; NB Never uses BX.
8054 (define_insn "*arm_indirect_jump"
8055 [(set (pc)
8056 (match_operand:SI 0 "s_register_operand" "r"))]
8057 "TARGET_ARM"
8058 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8059 [(set_attr "predicable" "yes")
8060 (set_attr "type" "branch")]
8061 )
8062
8063 (define_insn "*load_indirect_jump"
8064 [(set (pc)
8065 (match_operand:SI 0 "memory_operand" "m"))]
8066 "TARGET_ARM"
8067 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8068 [(set_attr "type" "load_4")
8069 (set_attr "pool_range" "4096")
8070 (set_attr "neg_pool_range" "4084")
8071 (set_attr "predicable" "yes")]
8072 )
8073
8074 \f
8075 ;; Misc insns
8076
8077 (define_insn "nop"
8078 [(const_int 0)]
8079 "TARGET_EITHER"
8080 "nop"
8081 [(set (attr "length")
8082 (if_then_else (eq_attr "is_thumb" "yes")
8083 (const_int 2)
8084 (const_int 4)))
8085 (set_attr "type" "mov_reg")]
8086 )
8087
8088 (define_insn "trap"
8089 [(trap_if (const_int 1) (const_int 0))]
8090 ""
8091 "*
8092 if (TARGET_ARM)
8093 return \".inst\\t0xe7f000f0\";
8094 else
8095 return \".inst\\t0xdeff\";
8096 "
8097 [(set (attr "length")
8098 (if_then_else (eq_attr "is_thumb" "yes")
8099 (const_int 2)
8100 (const_int 4)))
8101 (set_attr "type" "trap")
8102 (set_attr "conds" "unconditional")]
8103 )
8104
8105 \f
8106 ;; Patterns to allow combination of arithmetic, cond code and shifts
8107
8108 (define_insn "*<arith_shift_insn>_multsi"
8109 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8110 (SHIFTABLE_OPS:SI
8111 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8112 (match_operand:SI 3 "power_of_two_operand" ""))
8113 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8114 "TARGET_32BIT"
8115 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8116 [(set_attr "predicable" "yes")
8117 (set_attr "shift" "2")
8118 (set_attr "arch" "a,t2")
8119 (set_attr "type" "alu_shift_imm")])
8120
8121 (define_insn "*<arith_shift_insn>_shiftsi"
8122 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8123 (SHIFTABLE_OPS:SI
8124 (match_operator:SI 2 "shift_nomul_operator"
8125 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8126 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8127 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8128 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8129 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8130 [(set_attr "predicable" "yes")
8131 (set_attr "shift" "3")
8132 (set_attr "arch" "a,t2,a")
8133 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8134
8135 (define_split
8136 [(set (match_operand:SI 0 "s_register_operand" "")
8137 (match_operator:SI 1 "shiftable_operator"
8138 [(match_operator:SI 2 "shiftable_operator"
8139 [(match_operator:SI 3 "shift_operator"
8140 [(match_operand:SI 4 "s_register_operand" "")
8141 (match_operand:SI 5 "reg_or_int_operand" "")])
8142 (match_operand:SI 6 "s_register_operand" "")])
8143 (match_operand:SI 7 "arm_rhs_operand" "")]))
8144 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8145 "TARGET_32BIT"
8146 [(set (match_dup 8)
8147 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8148 (match_dup 6)]))
8149 (set (match_dup 0)
8150 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8151 "")
8152
8153 (define_insn "*arith_shiftsi_compare0"
8154 [(set (reg:CC_NOOV CC_REGNUM)
8155 (compare:CC_NOOV
8156 (match_operator:SI 1 "shiftable_operator"
8157 [(match_operator:SI 3 "shift_operator"
8158 [(match_operand:SI 4 "s_register_operand" "r,r")
8159 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8160 (match_operand:SI 2 "s_register_operand" "r,r")])
8161 (const_int 0)))
8162 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8163 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8164 (match_dup 2)]))]
8165 "TARGET_32BIT"
8166 "%i1s%?\\t%0, %2, %4%S3"
8167 [(set_attr "conds" "set")
8168 (set_attr "shift" "4")
8169 (set_attr "arch" "32,a")
8170 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8171
8172 (define_insn "*arith_shiftsi_compare0_scratch"
8173 [(set (reg:CC_NOOV CC_REGNUM)
8174 (compare:CC_NOOV
8175 (match_operator:SI 1 "shiftable_operator"
8176 [(match_operator:SI 3 "shift_operator"
8177 [(match_operand:SI 4 "s_register_operand" "r,r")
8178 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8179 (match_operand:SI 2 "s_register_operand" "r,r")])
8180 (const_int 0)))
8181 (clobber (match_scratch:SI 0 "=r,r"))]
8182 "TARGET_32BIT"
8183 "%i1s%?\\t%0, %2, %4%S3"
8184 [(set_attr "conds" "set")
8185 (set_attr "shift" "4")
8186 (set_attr "arch" "32,a")
8187 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8188
8189 (define_insn "*sub_shiftsi"
8190 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8191 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8192 (match_operator:SI 2 "shift_operator"
8193 [(match_operand:SI 3 "s_register_operand" "r,r")
8194 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8195 "TARGET_32BIT"
8196 "sub%?\\t%0, %1, %3%S2"
8197 [(set_attr "predicable" "yes")
8198 (set_attr "predicable_short_it" "no")
8199 (set_attr "shift" "3")
8200 (set_attr "arch" "32,a")
8201 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8202
8203 (define_insn "*sub_shiftsi_compare0"
8204 [(set (reg:CC_NOOV CC_REGNUM)
8205 (compare:CC_NOOV
8206 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8207 (match_operator:SI 2 "shift_operator"
8208 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8209 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8210 (const_int 0)))
8211 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8212 (minus:SI (match_dup 1)
8213 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8214 "TARGET_32BIT"
8215 "subs%?\\t%0, %1, %3%S2"
8216 [(set_attr "conds" "set")
8217 (set_attr "shift" "3")
8218 (set_attr "arch" "32,a,a")
8219 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8220
8221 (define_insn "*sub_shiftsi_compare0_scratch"
8222 [(set (reg:CC_NOOV CC_REGNUM)
8223 (compare:CC_NOOV
8224 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8225 (match_operator:SI 2 "shift_operator"
8226 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8227 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8228 (const_int 0)))
8229 (clobber (match_scratch:SI 0 "=r,r,r"))]
8230 "TARGET_32BIT"
8231 "subs%?\\t%0, %1, %3%S2"
8232 [(set_attr "conds" "set")
8233 (set_attr "shift" "3")
8234 (set_attr "arch" "32,a,a")
8235 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8236 \f
8237
8238 (define_insn_and_split "*and_scc"
8239 [(set (match_operand:SI 0 "s_register_operand" "=r")
8240 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8241 [(match_operand 2 "cc_register" "") (const_int 0)])
8242 (match_operand:SI 3 "s_register_operand" "r")))]
8243 "TARGET_ARM"
8244 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8245 "&& reload_completed"
8246 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8247 (cond_exec (match_dup 4) (set (match_dup 0)
8248 (and:SI (match_dup 3) (const_int 1))))]
8249 {
8250 machine_mode mode = GET_MODE (operands[2]);
8251 enum rtx_code rc = GET_CODE (operands[1]);
8252
8253 /* Note that operands[4] is the same as operands[1],
8254 but with VOIDmode as the result. */
8255 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8256 if (mode == CCFPmode || mode == CCFPEmode)
8257 rc = reverse_condition_maybe_unordered (rc);
8258 else
8259 rc = reverse_condition (rc);
8260 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8261 }
8262 [(set_attr "conds" "use")
8263 (set_attr "type" "multiple")
8264 (set_attr "length" "8")]
8265 )
8266
8267 (define_insn_and_split "*ior_scc"
8268 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8269 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8270 [(match_operand 2 "cc_register" "") (const_int 0)])
8271 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8272 "TARGET_ARM"
8273 "@
8274 orr%d1\\t%0, %3, #1
8275 #"
8276 "&& reload_completed
8277 && REGNO (operands [0]) != REGNO (operands[3])"
8278 ;; && which_alternative == 1
8279 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8280 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8281 (cond_exec (match_dup 4) (set (match_dup 0)
8282 (ior:SI (match_dup 3) (const_int 1))))]
8283 {
8284 machine_mode mode = GET_MODE (operands[2]);
8285 enum rtx_code rc = GET_CODE (operands[1]);
8286
8287 /* Note that operands[4] is the same as operands[1],
8288 but with VOIDmode as the result. */
8289 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8290 if (mode == CCFPmode || mode == CCFPEmode)
8291 rc = reverse_condition_maybe_unordered (rc);
8292 else
8293 rc = reverse_condition (rc);
8294 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8295 }
8296 [(set_attr "conds" "use")
8297 (set_attr "length" "4,8")
8298 (set_attr "type" "logic_imm,multiple")]
8299 )
8300
8301 ; A series of splitters for the compare_scc pattern below. Note that
8302 ; order is important.
8303 (define_split
8304 [(set (match_operand:SI 0 "s_register_operand" "")
8305 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8306 (const_int 0)))
8307 (clobber (reg:CC CC_REGNUM))]
8308 "TARGET_32BIT && reload_completed"
8309 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8310
8311 (define_split
8312 [(set (match_operand:SI 0 "s_register_operand" "")
8313 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8314 (const_int 0)))
8315 (clobber (reg:CC CC_REGNUM))]
8316 "TARGET_32BIT && reload_completed"
8317 [(set (match_dup 0) (not:SI (match_dup 1)))
8318 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8319
8320 (define_split
8321 [(set (match_operand:SI 0 "s_register_operand" "")
8322 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8323 (const_int 0)))
8324 (clobber (reg:CC CC_REGNUM))]
8325 "arm_arch5t && TARGET_32BIT"
8326 [(set (match_dup 0) (clz:SI (match_dup 1)))
8327 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8328 )
8329
8330 (define_split
8331 [(set (match_operand:SI 0 "s_register_operand" "")
8332 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8333 (const_int 0)))
8334 (clobber (reg:CC CC_REGNUM))]
8335 "TARGET_32BIT && reload_completed"
8336 [(parallel
8337 [(set (reg:CC CC_REGNUM)
8338 (compare:CC (const_int 1) (match_dup 1)))
8339 (set (match_dup 0)
8340 (minus:SI (const_int 1) (match_dup 1)))])
8341 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8342 (set (match_dup 0) (const_int 0)))])
8343
8344 (define_split
8345 [(set (match_operand:SI 0 "s_register_operand" "")
8346 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8347 (match_operand:SI 2 "const_int_operand" "")))
8348 (clobber (reg:CC CC_REGNUM))]
8349 "TARGET_32BIT && reload_completed"
8350 [(parallel
8351 [(set (reg:CC CC_REGNUM)
8352 (compare:CC (match_dup 1) (match_dup 2)))
8353 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8354 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8355 (set (match_dup 0) (const_int 1)))]
8356 {
8357 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8358 })
8359
8360 (define_split
8361 [(set (match_operand:SI 0 "s_register_operand" "")
8362 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8363 (match_operand:SI 2 "arm_add_operand" "")))
8364 (clobber (reg:CC CC_REGNUM))]
8365 "TARGET_32BIT && reload_completed"
8366 [(parallel
8367 [(set (reg:CC_NOOV CC_REGNUM)
8368 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8369 (const_int 0)))
8370 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8371 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8372 (set (match_dup 0) (const_int 1)))])
8373
8374 (define_insn_and_split "*compare_scc"
8375 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8376 (match_operator:SI 1 "arm_comparison_operator"
8377 [(match_operand:SI 2 "s_register_operand" "r,r")
8378 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8379 (clobber (reg:CC CC_REGNUM))]
8380 "TARGET_32BIT"
8381 "#"
8382 "&& reload_completed"
8383 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8384 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8385 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8386 {
8387 rtx tmp1;
8388 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8389 operands[2], operands[3]);
8390 enum rtx_code rc = GET_CODE (operands[1]);
8391
8392 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8393
8394 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8395 if (mode == CCFPmode || mode == CCFPEmode)
8396 rc = reverse_condition_maybe_unordered (rc);
8397 else
8398 rc = reverse_condition (rc);
8399 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8400 }
8401 [(set_attr "type" "multiple")]
8402 )
8403
8404 ;; Attempt to improve the sequence generated by the compare_scc splitters
8405 ;; not to use conditional execution.
8406
8407 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8408 ;; clz Rd, reg1
8409 ;; lsr Rd, Rd, #5
8410 (define_peephole2
8411 [(set (reg:CC CC_REGNUM)
8412 (compare:CC (match_operand:SI 1 "register_operand" "")
8413 (const_int 0)))
8414 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8415 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8416 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8417 (set (match_dup 0) (const_int 1)))]
8418 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8419 [(set (match_dup 0) (clz:SI (match_dup 1)))
8420 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8421 )
8422
8423 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8424 ;; negs Rd, reg1
8425 ;; adc Rd, Rd, reg1
8426 (define_peephole2
8427 [(set (reg:CC CC_REGNUM)
8428 (compare:CC (match_operand:SI 1 "register_operand" "")
8429 (const_int 0)))
8430 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8431 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8432 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8433 (set (match_dup 0) (const_int 1)))
8434 (match_scratch:SI 2 "r")]
8435 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8436 [(parallel
8437 [(set (reg:CC CC_REGNUM)
8438 (compare:CC (const_int 0) (match_dup 1)))
8439 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8440 (set (match_dup 0)
8441 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8442 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8443 )
8444
8445 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8446 ;; sub Rd, Reg1, reg2
8447 ;; clz Rd, Rd
8448 ;; lsr Rd, Rd, #5
8449 (define_peephole2
8450 [(set (reg:CC CC_REGNUM)
8451 (compare:CC (match_operand:SI 1 "register_operand" "")
8452 (match_operand:SI 2 "arm_rhs_operand" "")))
8453 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8454 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8455 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8456 (set (match_dup 0) (const_int 1)))]
8457 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8458 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8459 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8460 (set (match_dup 0) (clz:SI (match_dup 0)))
8461 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8462 )
8463
8464
8465 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8466 ;; sub T1, Reg1, reg2
8467 ;; negs Rd, T1
8468 ;; adc Rd, Rd, T1
8469 (define_peephole2
8470 [(set (reg:CC CC_REGNUM)
8471 (compare:CC (match_operand:SI 1 "register_operand" "")
8472 (match_operand:SI 2 "arm_rhs_operand" "")))
8473 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8474 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8475 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8476 (set (match_dup 0) (const_int 1)))
8477 (match_scratch:SI 3 "r")]
8478 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8479 [(set (match_dup 3) (match_dup 4))
8480 (parallel
8481 [(set (reg:CC CC_REGNUM)
8482 (compare:CC (const_int 0) (match_dup 3)))
8483 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8484 (set (match_dup 0)
8485 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8486 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8487 "
8488 if (CONST_INT_P (operands[2]))
8489 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8490 else
8491 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8492 ")
8493
8494 (define_insn "*cond_move"
8495 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8496 (if_then_else:SI (match_operator 3 "equality_operator"
8497 [(match_operator 4 "arm_comparison_operator"
8498 [(match_operand 5 "cc_register" "") (const_int 0)])
8499 (const_int 0)])
8500 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8501 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8502 "TARGET_ARM"
8503 "*
8504 if (GET_CODE (operands[3]) == NE)
8505 {
8506 if (which_alternative != 1)
8507 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8508 if (which_alternative != 0)
8509 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8510 return \"\";
8511 }
8512 if (which_alternative != 0)
8513 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8514 if (which_alternative != 1)
8515 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8516 return \"\";
8517 "
8518 [(set_attr "conds" "use")
8519 (set_attr_alternative "type"
8520 [(if_then_else (match_operand 2 "const_int_operand" "")
8521 (const_string "mov_imm")
8522 (const_string "mov_reg"))
8523 (if_then_else (match_operand 1 "const_int_operand" "")
8524 (const_string "mov_imm")
8525 (const_string "mov_reg"))
8526 (const_string "multiple")])
8527 (set_attr "length" "4,4,8")]
8528 )
8529
8530 (define_insn "*cond_arith"
8531 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8532 (match_operator:SI 5 "shiftable_operator"
8533 [(match_operator:SI 4 "arm_comparison_operator"
8534 [(match_operand:SI 2 "s_register_operand" "r,r")
8535 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8536 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8537 (clobber (reg:CC CC_REGNUM))]
8538 "TARGET_ARM"
8539 "*
8540 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8541 return \"%i5\\t%0, %1, %2, lsr #31\";
8542
8543 output_asm_insn (\"cmp\\t%2, %3\", operands);
8544 if (GET_CODE (operands[5]) == AND)
8545 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8546 else if (GET_CODE (operands[5]) == MINUS)
8547 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8548 else if (which_alternative != 0)
8549 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8550 return \"%i5%d4\\t%0, %1, #1\";
8551 "
8552 [(set_attr "conds" "clob")
8553 (set_attr "length" "12")
8554 (set_attr "type" "multiple")]
8555 )
8556
8557 (define_insn "*cond_sub"
8558 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8559 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8560 (match_operator:SI 4 "arm_comparison_operator"
8561 [(match_operand:SI 2 "s_register_operand" "r,r")
8562 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8563 (clobber (reg:CC CC_REGNUM))]
8564 "TARGET_ARM"
8565 "*
8566 output_asm_insn (\"cmp\\t%2, %3\", operands);
8567 if (which_alternative != 0)
8568 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8569 return \"sub%d4\\t%0, %1, #1\";
8570 "
8571 [(set_attr "conds" "clob")
8572 (set_attr "length" "8,12")
8573 (set_attr "type" "multiple")]
8574 )
8575
8576 (define_insn "*cmp_ite0"
8577 [(set (match_operand 6 "dominant_cc_register" "")
8578 (compare
8579 (if_then_else:SI
8580 (match_operator 4 "arm_comparison_operator"
8581 [(match_operand:SI 0 "s_register_operand"
8582 "l,l,l,r,r,r,r,r,r")
8583 (match_operand:SI 1 "arm_add_operand"
8584 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8585 (match_operator:SI 5 "arm_comparison_operator"
8586 [(match_operand:SI 2 "s_register_operand"
8587 "l,r,r,l,l,r,r,r,r")
8588 (match_operand:SI 3 "arm_add_operand"
8589 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8590 (const_int 0))
8591 (const_int 0)))]
8592 "TARGET_32BIT"
8593 "*
8594 {
8595 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8596 {
8597 {\"cmp%d5\\t%0, %1\",
8598 \"cmp%d4\\t%2, %3\"},
8599 {\"cmn%d5\\t%0, #%n1\",
8600 \"cmp%d4\\t%2, %3\"},
8601 {\"cmp%d5\\t%0, %1\",
8602 \"cmn%d4\\t%2, #%n3\"},
8603 {\"cmn%d5\\t%0, #%n1\",
8604 \"cmn%d4\\t%2, #%n3\"}
8605 };
8606 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8607 {
8608 {\"cmp\\t%2, %3\",
8609 \"cmp\\t%0, %1\"},
8610 {\"cmp\\t%2, %3\",
8611 \"cmn\\t%0, #%n1\"},
8612 {\"cmn\\t%2, #%n3\",
8613 \"cmp\\t%0, %1\"},
8614 {\"cmn\\t%2, #%n3\",
8615 \"cmn\\t%0, #%n1\"}
8616 };
8617 static const char * const ite[2] =
8618 {
8619 \"it\\t%d5\",
8620 \"it\\t%d4\"
8621 };
8622 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8623 CMP_CMP, CMN_CMP, CMP_CMP,
8624 CMN_CMP, CMP_CMN, CMN_CMN};
8625 int swap =
8626 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8627
8628 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8629 if (TARGET_THUMB2) {
8630 output_asm_insn (ite[swap], operands);
8631 }
8632 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8633 return \"\";
8634 }"
8635 [(set_attr "conds" "set")
8636 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8637 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8638 (set_attr "type" "multiple")
8639 (set_attr_alternative "length"
8640 [(const_int 6)
8641 (const_int 8)
8642 (const_int 8)
8643 (const_int 8)
8644 (const_int 8)
8645 (if_then_else (eq_attr "is_thumb" "no")
8646 (const_int 8)
8647 (const_int 10))
8648 (if_then_else (eq_attr "is_thumb" "no")
8649 (const_int 8)
8650 (const_int 10))
8651 (if_then_else (eq_attr "is_thumb" "no")
8652 (const_int 8)
8653 (const_int 10))
8654 (if_then_else (eq_attr "is_thumb" "no")
8655 (const_int 8)
8656 (const_int 10))])]
8657 )
8658
8659 (define_insn "*cmp_ite1"
8660 [(set (match_operand 6 "dominant_cc_register" "")
8661 (compare
8662 (if_then_else:SI
8663 (match_operator 4 "arm_comparison_operator"
8664 [(match_operand:SI 0 "s_register_operand"
8665 "l,l,l,r,r,r,r,r,r")
8666 (match_operand:SI 1 "arm_add_operand"
8667 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8668 (match_operator:SI 5 "arm_comparison_operator"
8669 [(match_operand:SI 2 "s_register_operand"
8670 "l,r,r,l,l,r,r,r,r")
8671 (match_operand:SI 3 "arm_add_operand"
8672 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8673 (const_int 1))
8674 (const_int 0)))]
8675 "TARGET_32BIT"
8676 "*
8677 {
8678 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8679 {
8680 {\"cmp\\t%0, %1\",
8681 \"cmp\\t%2, %3\"},
8682 {\"cmn\\t%0, #%n1\",
8683 \"cmp\\t%2, %3\"},
8684 {\"cmp\\t%0, %1\",
8685 \"cmn\\t%2, #%n3\"},
8686 {\"cmn\\t%0, #%n1\",
8687 \"cmn\\t%2, #%n3\"}
8688 };
8689 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8690 {
8691 {\"cmp%d4\\t%2, %3\",
8692 \"cmp%D5\\t%0, %1\"},
8693 {\"cmp%d4\\t%2, %3\",
8694 \"cmn%D5\\t%0, #%n1\"},
8695 {\"cmn%d4\\t%2, #%n3\",
8696 \"cmp%D5\\t%0, %1\"},
8697 {\"cmn%d4\\t%2, #%n3\",
8698 \"cmn%D5\\t%0, #%n1\"}
8699 };
8700 static const char * const ite[2] =
8701 {
8702 \"it\\t%d4\",
8703 \"it\\t%D5\"
8704 };
8705 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8706 CMP_CMP, CMN_CMP, CMP_CMP,
8707 CMN_CMP, CMP_CMN, CMN_CMN};
8708 int swap =
8709 comparison_dominates_p (GET_CODE (operands[5]),
8710 reverse_condition (GET_CODE (operands[4])));
8711
8712 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8713 if (TARGET_THUMB2) {
8714 output_asm_insn (ite[swap], operands);
8715 }
8716 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8717 return \"\";
8718 }"
8719 [(set_attr "conds" "set")
8720 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8721 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8722 (set_attr_alternative "length"
8723 [(const_int 6)
8724 (const_int 8)
8725 (const_int 8)
8726 (const_int 8)
8727 (const_int 8)
8728 (if_then_else (eq_attr "is_thumb" "no")
8729 (const_int 8)
8730 (const_int 10))
8731 (if_then_else (eq_attr "is_thumb" "no")
8732 (const_int 8)
8733 (const_int 10))
8734 (if_then_else (eq_attr "is_thumb" "no")
8735 (const_int 8)
8736 (const_int 10))
8737 (if_then_else (eq_attr "is_thumb" "no")
8738 (const_int 8)
8739 (const_int 10))])
8740 (set_attr "type" "multiple")]
8741 )
8742
8743 (define_insn "*cmp_and"
8744 [(set (match_operand 6 "dominant_cc_register" "")
8745 (compare
8746 (and:SI
8747 (match_operator 4 "arm_comparison_operator"
8748 [(match_operand:SI 0 "s_register_operand"
8749 "l,l,l,r,r,r,r,r,r,r")
8750 (match_operand:SI 1 "arm_add_operand"
8751 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8752 (match_operator:SI 5 "arm_comparison_operator"
8753 [(match_operand:SI 2 "s_register_operand"
8754 "l,r,r,l,l,r,r,r,r,r")
8755 (match_operand:SI 3 "arm_add_operand"
8756 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8757 (const_int 0)))]
8758 "TARGET_32BIT"
8759 "*
8760 {
8761 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8762 {
8763 {\"cmp%d5\\t%0, %1\",
8764 \"cmp%d4\\t%2, %3\"},
8765 {\"cmn%d5\\t%0, #%n1\",
8766 \"cmp%d4\\t%2, %3\"},
8767 {\"cmp%d5\\t%0, %1\",
8768 \"cmn%d4\\t%2, #%n3\"},
8769 {\"cmn%d5\\t%0, #%n1\",
8770 \"cmn%d4\\t%2, #%n3\"}
8771 };
8772 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8773 {
8774 {\"cmp\\t%2, %3\",
8775 \"cmp\\t%0, %1\"},
8776 {\"cmp\\t%2, %3\",
8777 \"cmn\\t%0, #%n1\"},
8778 {\"cmn\\t%2, #%n3\",
8779 \"cmp\\t%0, %1\"},
8780 {\"cmn\\t%2, #%n3\",
8781 \"cmn\\t%0, #%n1\"}
8782 };
8783 static const char *const ite[2] =
8784 {
8785 \"it\\t%d5\",
8786 \"it\\t%d4\"
8787 };
8788 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8789 CMP_CMP, CMN_CMP, CMP_CMP,
8790 CMP_CMP, CMN_CMP, CMP_CMN,
8791 CMN_CMN};
8792 int swap =
8793 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8794
8795 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8796 if (TARGET_THUMB2) {
8797 output_asm_insn (ite[swap], operands);
8798 }
8799 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8800 return \"\";
8801 }"
8802 [(set_attr "conds" "set")
8803 (set_attr "predicable" "no")
8804 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8805 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8806 (set_attr_alternative "length"
8807 [(const_int 6)
8808 (const_int 8)
8809 (const_int 8)
8810 (const_int 8)
8811 (const_int 8)
8812 (const_int 6)
8813 (if_then_else (eq_attr "is_thumb" "no")
8814 (const_int 8)
8815 (const_int 10))
8816 (if_then_else (eq_attr "is_thumb" "no")
8817 (const_int 8)
8818 (const_int 10))
8819 (if_then_else (eq_attr "is_thumb" "no")
8820 (const_int 8)
8821 (const_int 10))
8822 (if_then_else (eq_attr "is_thumb" "no")
8823 (const_int 8)
8824 (const_int 10))])
8825 (set_attr "type" "multiple")]
8826 )
8827
8828 (define_insn "*cmp_ior"
8829 [(set (match_operand 6 "dominant_cc_register" "")
8830 (compare
8831 (ior:SI
8832 (match_operator 4 "arm_comparison_operator"
8833 [(match_operand:SI 0 "s_register_operand"
8834 "l,l,l,r,r,r,r,r,r,r")
8835 (match_operand:SI 1 "arm_add_operand"
8836 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8837 (match_operator:SI 5 "arm_comparison_operator"
8838 [(match_operand:SI 2 "s_register_operand"
8839 "l,r,r,l,l,r,r,r,r,r")
8840 (match_operand:SI 3 "arm_add_operand"
8841 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8842 (const_int 0)))]
8843 "TARGET_32BIT"
8844 "*
8845 {
8846 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8847 {
8848 {\"cmp\\t%0, %1\",
8849 \"cmp\\t%2, %3\"},
8850 {\"cmn\\t%0, #%n1\",
8851 \"cmp\\t%2, %3\"},
8852 {\"cmp\\t%0, %1\",
8853 \"cmn\\t%2, #%n3\"},
8854 {\"cmn\\t%0, #%n1\",
8855 \"cmn\\t%2, #%n3\"}
8856 };
8857 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8858 {
8859 {\"cmp%D4\\t%2, %3\",
8860 \"cmp%D5\\t%0, %1\"},
8861 {\"cmp%D4\\t%2, %3\",
8862 \"cmn%D5\\t%0, #%n1\"},
8863 {\"cmn%D4\\t%2, #%n3\",
8864 \"cmp%D5\\t%0, %1\"},
8865 {\"cmn%D4\\t%2, #%n3\",
8866 \"cmn%D5\\t%0, #%n1\"}
8867 };
8868 static const char *const ite[2] =
8869 {
8870 \"it\\t%D4\",
8871 \"it\\t%D5\"
8872 };
8873 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8874 CMP_CMP, CMN_CMP, CMP_CMP,
8875 CMP_CMP, CMN_CMP, CMP_CMN,
8876 CMN_CMN};
8877 int swap =
8878 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8879
8880 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8881 if (TARGET_THUMB2) {
8882 output_asm_insn (ite[swap], operands);
8883 }
8884 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8885 return \"\";
8886 }
8887 "
8888 [(set_attr "conds" "set")
8889 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8890 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8891 (set_attr_alternative "length"
8892 [(const_int 6)
8893 (const_int 8)
8894 (const_int 8)
8895 (const_int 8)
8896 (const_int 8)
8897 (const_int 6)
8898 (if_then_else (eq_attr "is_thumb" "no")
8899 (const_int 8)
8900 (const_int 10))
8901 (if_then_else (eq_attr "is_thumb" "no")
8902 (const_int 8)
8903 (const_int 10))
8904 (if_then_else (eq_attr "is_thumb" "no")
8905 (const_int 8)
8906 (const_int 10))
8907 (if_then_else (eq_attr "is_thumb" "no")
8908 (const_int 8)
8909 (const_int 10))])
8910 (set_attr "type" "multiple")]
8911 )
8912
8913 (define_insn_and_split "*ior_scc_scc"
8914 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8915 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8916 [(match_operand:SI 1 "s_register_operand" "l,r")
8917 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8918 (match_operator:SI 6 "arm_comparison_operator"
8919 [(match_operand:SI 4 "s_register_operand" "l,r")
8920 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8921 (clobber (reg:CC CC_REGNUM))]
8922 "TARGET_32BIT
8923 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8924 != CCmode)"
8925 "#"
8926 "TARGET_32BIT && reload_completed"
8927 [(set (match_dup 7)
8928 (compare
8929 (ior:SI
8930 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8931 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8932 (const_int 0)))
8933 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8934 "operands[7]
8935 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8936 DOM_CC_X_OR_Y),
8937 CC_REGNUM);"
8938 [(set_attr "conds" "clob")
8939 (set_attr "enabled_for_short_it" "yes,no")
8940 (set_attr "length" "16")
8941 (set_attr "type" "multiple")]
8942 )
8943
8944 ; If the above pattern is followed by a CMP insn, then the compare is
8945 ; redundant, since we can rework the conditional instruction that follows.
8946 (define_insn_and_split "*ior_scc_scc_cmp"
8947 [(set (match_operand 0 "dominant_cc_register" "")
8948 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8949 [(match_operand:SI 1 "s_register_operand" "l,r")
8950 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8951 (match_operator:SI 6 "arm_comparison_operator"
8952 [(match_operand:SI 4 "s_register_operand" "l,r")
8953 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
8954 (const_int 0)))
8955 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
8956 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8957 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8958 "TARGET_32BIT"
8959 "#"
8960 "TARGET_32BIT && reload_completed"
8961 [(set (match_dup 0)
8962 (compare
8963 (ior:SI
8964 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8965 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8966 (const_int 0)))
8967 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8968 ""
8969 [(set_attr "conds" "set")
8970 (set_attr "enabled_for_short_it" "yes,no")
8971 (set_attr "length" "16")
8972 (set_attr "type" "multiple")]
8973 )
8974
8975 (define_insn_and_split "*and_scc_scc"
8976 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8977 (and:SI (match_operator:SI 3 "arm_comparison_operator"
8978 [(match_operand:SI 1 "s_register_operand" "l,r")
8979 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8980 (match_operator:SI 6 "arm_comparison_operator"
8981 [(match_operand:SI 4 "s_register_operand" "l,r")
8982 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8983 (clobber (reg:CC CC_REGNUM))]
8984 "TARGET_32BIT
8985 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8986 != CCmode)"
8987 "#"
8988 "TARGET_32BIT && reload_completed
8989 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8990 != CCmode)"
8991 [(set (match_dup 7)
8992 (compare
8993 (and:SI
8994 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8995 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8996 (const_int 0)))
8997 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8998 "operands[7]
8999 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9000 DOM_CC_X_AND_Y),
9001 CC_REGNUM);"
9002 [(set_attr "conds" "clob")
9003 (set_attr "enabled_for_short_it" "yes,no")
9004 (set_attr "length" "16")
9005 (set_attr "type" "multiple")]
9006 )
9007
9008 ; If the above pattern is followed by a CMP insn, then the compare is
9009 ; redundant, since we can rework the conditional instruction that follows.
9010 (define_insn_and_split "*and_scc_scc_cmp"
9011 [(set (match_operand 0 "dominant_cc_register" "")
9012 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9013 [(match_operand:SI 1 "s_register_operand" "l,r")
9014 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9015 (match_operator:SI 6 "arm_comparison_operator"
9016 [(match_operand:SI 4 "s_register_operand" "l,r")
9017 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9018 (const_int 0)))
9019 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9020 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9021 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9022 "TARGET_32BIT"
9023 "#"
9024 "TARGET_32BIT && reload_completed"
9025 [(set (match_dup 0)
9026 (compare
9027 (and:SI
9028 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9029 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9030 (const_int 0)))
9031 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9032 ""
9033 [(set_attr "conds" "set")
9034 (set_attr "enabled_for_short_it" "yes,no")
9035 (set_attr "length" "16")
9036 (set_attr "type" "multiple")]
9037 )
9038
9039 ;; If there is no dominance in the comparison, then we can still save an
9040 ;; instruction in the AND case, since we can know that the second compare
9041 ;; need only zero the value if false (if true, then the value is already
9042 ;; correct).
9043 (define_insn_and_split "*and_scc_scc_nodom"
9044 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9045 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9046 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9047 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9048 (match_operator:SI 6 "arm_comparison_operator"
9049 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9050 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9051 (clobber (reg:CC CC_REGNUM))]
9052 "TARGET_32BIT
9053 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9054 == CCmode)"
9055 "#"
9056 "TARGET_32BIT && reload_completed"
9057 [(parallel [(set (match_dup 0)
9058 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9059 (clobber (reg:CC CC_REGNUM))])
9060 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9061 (set (match_dup 0)
9062 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9063 (match_dup 0)
9064 (const_int 0)))]
9065 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9066 operands[4], operands[5]),
9067 CC_REGNUM);
9068 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9069 operands[5]);"
9070 [(set_attr "conds" "clob")
9071 (set_attr "length" "20")
9072 (set_attr "type" "multiple")]
9073 )
9074
9075 (define_split
9076 [(set (reg:CC_NOOV CC_REGNUM)
9077 (compare:CC_NOOV (ior:SI
9078 (and:SI (match_operand:SI 0 "s_register_operand" "")
9079 (const_int 1))
9080 (match_operator:SI 1 "arm_comparison_operator"
9081 [(match_operand:SI 2 "s_register_operand" "")
9082 (match_operand:SI 3 "arm_add_operand" "")]))
9083 (const_int 0)))
9084 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9085 "TARGET_ARM"
9086 [(set (match_dup 4)
9087 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9088 (match_dup 0)))
9089 (set (reg:CC_NOOV CC_REGNUM)
9090 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9091 (const_int 0)))]
9092 "")
9093
9094 (define_split
9095 [(set (reg:CC_NOOV CC_REGNUM)
9096 (compare:CC_NOOV (ior:SI
9097 (match_operator:SI 1 "arm_comparison_operator"
9098 [(match_operand:SI 2 "s_register_operand" "")
9099 (match_operand:SI 3 "arm_add_operand" "")])
9100 (and:SI (match_operand:SI 0 "s_register_operand" "")
9101 (const_int 1)))
9102 (const_int 0)))
9103 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9104 "TARGET_ARM"
9105 [(set (match_dup 4)
9106 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9107 (match_dup 0)))
9108 (set (reg:CC_NOOV CC_REGNUM)
9109 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9110 (const_int 0)))]
9111 "")
9112 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9113
9114 (define_insn_and_split "*negscc"
9115 [(set (match_operand:SI 0 "s_register_operand" "=r")
9116 (neg:SI (match_operator 3 "arm_comparison_operator"
9117 [(match_operand:SI 1 "s_register_operand" "r")
9118 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9119 (clobber (reg:CC CC_REGNUM))]
9120 "TARGET_ARM"
9121 "#"
9122 "&& reload_completed"
9123 [(const_int 0)]
9124 {
9125 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9126
9127 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9128 {
9129 /* Emit mov\\t%0, %1, asr #31 */
9130 emit_insn (gen_rtx_SET (operands[0],
9131 gen_rtx_ASHIFTRT (SImode,
9132 operands[1],
9133 GEN_INT (31))));
9134 DONE;
9135 }
9136 else if (GET_CODE (operands[3]) == NE)
9137 {
9138 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9139 if (CONST_INT_P (operands[2]))
9140 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9141 gen_int_mode (-INTVAL (operands[2]),
9142 SImode)));
9143 else
9144 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9145
9146 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9147 gen_rtx_NE (SImode,
9148 cc_reg,
9149 const0_rtx),
9150 gen_rtx_SET (operands[0],
9151 GEN_INT (~0))));
9152 DONE;
9153 }
9154 else
9155 {
9156 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9157 emit_insn (gen_rtx_SET (cc_reg,
9158 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9159 enum rtx_code rc = GET_CODE (operands[3]);
9160
9161 rc = reverse_condition (rc);
9162 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9163 gen_rtx_fmt_ee (rc,
9164 VOIDmode,
9165 cc_reg,
9166 const0_rtx),
9167 gen_rtx_SET (operands[0], const0_rtx)));
9168 rc = GET_CODE (operands[3]);
9169 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9170 gen_rtx_fmt_ee (rc,
9171 VOIDmode,
9172 cc_reg,
9173 const0_rtx),
9174 gen_rtx_SET (operands[0],
9175 GEN_INT (~0))));
9176 DONE;
9177 }
9178 FAIL;
9179 }
9180 [(set_attr "conds" "clob")
9181 (set_attr "length" "12")
9182 (set_attr "type" "multiple")]
9183 )
9184
9185 (define_insn_and_split "movcond_addsi"
9186 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9187 (if_then_else:SI
9188 (match_operator 5 "comparison_operator"
9189 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9190 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9191 (const_int 0)])
9192 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9193 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9194 (clobber (reg:CC CC_REGNUM))]
9195 "TARGET_32BIT"
9196 "#"
9197 "&& reload_completed"
9198 [(set (reg:CC_NOOV CC_REGNUM)
9199 (compare:CC_NOOV
9200 (plus:SI (match_dup 3)
9201 (match_dup 4))
9202 (const_int 0)))
9203 (set (match_dup 0) (match_dup 1))
9204 (cond_exec (match_dup 6)
9205 (set (match_dup 0) (match_dup 2)))]
9206 "
9207 {
9208 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9209 operands[3], operands[4]);
9210 enum rtx_code rc = GET_CODE (operands[5]);
9211 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9212 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9213 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9214 rc = reverse_condition (rc);
9215 else
9216 std::swap (operands[1], operands[2]);
9217
9218 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9219 }
9220 "
9221 [(set_attr "conds" "clob")
9222 (set_attr "enabled_for_short_it" "no,yes,yes")
9223 (set_attr "type" "multiple")]
9224 )
9225
9226 (define_insn "movcond"
9227 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9228 (if_then_else:SI
9229 (match_operator 5 "arm_comparison_operator"
9230 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9231 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9232 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9233 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9234 (clobber (reg:CC CC_REGNUM))]
9235 "TARGET_ARM"
9236 "*
9237 if (GET_CODE (operands[5]) == LT
9238 && (operands[4] == const0_rtx))
9239 {
9240 if (which_alternative != 1 && REG_P (operands[1]))
9241 {
9242 if (operands[2] == const0_rtx)
9243 return \"and\\t%0, %1, %3, asr #31\";
9244 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9245 }
9246 else if (which_alternative != 0 && REG_P (operands[2]))
9247 {
9248 if (operands[1] == const0_rtx)
9249 return \"bic\\t%0, %2, %3, asr #31\";
9250 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9251 }
9252 /* The only case that falls through to here is when both ops 1 & 2
9253 are constants. */
9254 }
9255
9256 if (GET_CODE (operands[5]) == GE
9257 && (operands[4] == const0_rtx))
9258 {
9259 if (which_alternative != 1 && REG_P (operands[1]))
9260 {
9261 if (operands[2] == const0_rtx)
9262 return \"bic\\t%0, %1, %3, asr #31\";
9263 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9264 }
9265 else if (which_alternative != 0 && REG_P (operands[2]))
9266 {
9267 if (operands[1] == const0_rtx)
9268 return \"and\\t%0, %2, %3, asr #31\";
9269 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9270 }
9271 /* The only case that falls through to here is when both ops 1 & 2
9272 are constants. */
9273 }
9274 if (CONST_INT_P (operands[4])
9275 && !const_ok_for_arm (INTVAL (operands[4])))
9276 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9277 else
9278 output_asm_insn (\"cmp\\t%3, %4\", operands);
9279 if (which_alternative != 0)
9280 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9281 if (which_alternative != 1)
9282 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9283 return \"\";
9284 "
9285 [(set_attr "conds" "clob")
9286 (set_attr "length" "8,8,12")
9287 (set_attr "type" "multiple")]
9288 )
9289
9290 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9291
9292 (define_insn "*ifcompare_plus_move"
9293 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9294 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9295 [(match_operand:SI 4 "s_register_operand" "r,r")
9296 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9297 (plus:SI
9298 (match_operand:SI 2 "s_register_operand" "r,r")
9299 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9300 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9301 (clobber (reg:CC CC_REGNUM))]
9302 "TARGET_ARM"
9303 "#"
9304 [(set_attr "conds" "clob")
9305 (set_attr "length" "8,12")
9306 (set_attr "type" "multiple")]
9307 )
9308
9309 (define_insn "*if_plus_move"
9310 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9311 (if_then_else:SI
9312 (match_operator 4 "arm_comparison_operator"
9313 [(match_operand 5 "cc_register" "") (const_int 0)])
9314 (plus:SI
9315 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9316 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9317 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9318 "TARGET_ARM"
9319 "@
9320 add%d4\\t%0, %2, %3
9321 sub%d4\\t%0, %2, #%n3
9322 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9323 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9324 [(set_attr "conds" "use")
9325 (set_attr "length" "4,4,8,8")
9326 (set_attr_alternative "type"
9327 [(if_then_else (match_operand 3 "const_int_operand" "")
9328 (const_string "alu_imm" )
9329 (const_string "alu_sreg"))
9330 (const_string "alu_imm")
9331 (const_string "multiple")
9332 (const_string "multiple")])]
9333 )
9334
9335 (define_insn "*ifcompare_move_plus"
9336 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9337 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9338 [(match_operand:SI 4 "s_register_operand" "r,r")
9339 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9340 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9341 (plus:SI
9342 (match_operand:SI 2 "s_register_operand" "r,r")
9343 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9344 (clobber (reg:CC CC_REGNUM))]
9345 "TARGET_ARM"
9346 "#"
9347 [(set_attr "conds" "clob")
9348 (set_attr "length" "8,12")
9349 (set_attr "type" "multiple")]
9350 )
9351
9352 (define_insn "*if_move_plus"
9353 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9354 (if_then_else:SI
9355 (match_operator 4 "arm_comparison_operator"
9356 [(match_operand 5 "cc_register" "") (const_int 0)])
9357 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9358 (plus:SI
9359 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9360 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9361 "TARGET_ARM"
9362 "@
9363 add%D4\\t%0, %2, %3
9364 sub%D4\\t%0, %2, #%n3
9365 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9366 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9367 [(set_attr "conds" "use")
9368 (set_attr "length" "4,4,8,8")
9369 (set_attr_alternative "type"
9370 [(if_then_else (match_operand 3 "const_int_operand" "")
9371 (const_string "alu_imm" )
9372 (const_string "alu_sreg"))
9373 (const_string "alu_imm")
9374 (const_string "multiple")
9375 (const_string "multiple")])]
9376 )
9377
9378 (define_insn "*ifcompare_arith_arith"
9379 [(set (match_operand:SI 0 "s_register_operand" "=r")
9380 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9381 [(match_operand:SI 5 "s_register_operand" "r")
9382 (match_operand:SI 6 "arm_add_operand" "rIL")])
9383 (match_operator:SI 8 "shiftable_operator"
9384 [(match_operand:SI 1 "s_register_operand" "r")
9385 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9386 (match_operator:SI 7 "shiftable_operator"
9387 [(match_operand:SI 3 "s_register_operand" "r")
9388 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9389 (clobber (reg:CC CC_REGNUM))]
9390 "TARGET_ARM"
9391 "#"
9392 [(set_attr "conds" "clob")
9393 (set_attr "length" "12")
9394 (set_attr "type" "multiple")]
9395 )
9396
9397 (define_insn "*if_arith_arith"
9398 [(set (match_operand:SI 0 "s_register_operand" "=r")
9399 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9400 [(match_operand 8 "cc_register" "") (const_int 0)])
9401 (match_operator:SI 6 "shiftable_operator"
9402 [(match_operand:SI 1 "s_register_operand" "r")
9403 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9404 (match_operator:SI 7 "shiftable_operator"
9405 [(match_operand:SI 3 "s_register_operand" "r")
9406 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9407 "TARGET_ARM"
9408 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9409 [(set_attr "conds" "use")
9410 (set_attr "length" "8")
9411 (set_attr "type" "multiple")]
9412 )
9413
9414 (define_insn "*ifcompare_arith_move"
9415 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9416 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9417 [(match_operand:SI 2 "s_register_operand" "r,r")
9418 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9419 (match_operator:SI 7 "shiftable_operator"
9420 [(match_operand:SI 4 "s_register_operand" "r,r")
9421 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9422 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9423 (clobber (reg:CC CC_REGNUM))]
9424 "TARGET_ARM"
9425 "*
9426 /* If we have an operation where (op x 0) is the identity operation and
9427 the conditional operator is LT or GE and we are comparing against zero and
9428 everything is in registers then we can do this in two instructions. */
9429 if (operands[3] == const0_rtx
9430 && GET_CODE (operands[7]) != AND
9431 && REG_P (operands[5])
9432 && REG_P (operands[1])
9433 && REGNO (operands[1]) == REGNO (operands[4])
9434 && REGNO (operands[4]) != REGNO (operands[0]))
9435 {
9436 if (GET_CODE (operands[6]) == LT)
9437 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9438 else if (GET_CODE (operands[6]) == GE)
9439 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9440 }
9441 if (CONST_INT_P (operands[3])
9442 && !const_ok_for_arm (INTVAL (operands[3])))
9443 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9444 else
9445 output_asm_insn (\"cmp\\t%2, %3\", operands);
9446 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9447 if (which_alternative != 0)
9448 return \"mov%D6\\t%0, %1\";
9449 return \"\";
9450 "
9451 [(set_attr "conds" "clob")
9452 (set_attr "length" "8,12")
9453 (set_attr "type" "multiple")]
9454 )
9455
9456 (define_insn "*if_arith_move"
9457 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9458 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9459 [(match_operand 6 "cc_register" "") (const_int 0)])
9460 (match_operator:SI 5 "shiftable_operator"
9461 [(match_operand:SI 2 "s_register_operand" "r,r")
9462 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9463 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9464 "TARGET_ARM"
9465 "@
9466 %I5%d4\\t%0, %2, %3
9467 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9468 [(set_attr "conds" "use")
9469 (set_attr "length" "4,8")
9470 (set_attr_alternative "type"
9471 [(if_then_else (match_operand 3 "const_int_operand" "")
9472 (const_string "alu_shift_imm" )
9473 (const_string "alu_shift_reg"))
9474 (const_string "multiple")])]
9475 )
9476
9477 (define_insn "*ifcompare_move_arith"
9478 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9479 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9480 [(match_operand:SI 4 "s_register_operand" "r,r")
9481 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9482 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9483 (match_operator:SI 7 "shiftable_operator"
9484 [(match_operand:SI 2 "s_register_operand" "r,r")
9485 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9486 (clobber (reg:CC CC_REGNUM))]
9487 "TARGET_ARM"
9488 "*
9489 /* If we have an operation where (op x 0) is the identity operation and
9490 the conditional operator is LT or GE and we are comparing against zero and
9491 everything is in registers then we can do this in two instructions */
9492 if (operands[5] == const0_rtx
9493 && GET_CODE (operands[7]) != AND
9494 && REG_P (operands[3])
9495 && REG_P (operands[1])
9496 && REGNO (operands[1]) == REGNO (operands[2])
9497 && REGNO (operands[2]) != REGNO (operands[0]))
9498 {
9499 if (GET_CODE (operands[6]) == GE)
9500 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9501 else if (GET_CODE (operands[6]) == LT)
9502 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9503 }
9504
9505 if (CONST_INT_P (operands[5])
9506 && !const_ok_for_arm (INTVAL (operands[5])))
9507 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9508 else
9509 output_asm_insn (\"cmp\\t%4, %5\", operands);
9510
9511 if (which_alternative != 0)
9512 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9513 return \"%I7%D6\\t%0, %2, %3\";
9514 "
9515 [(set_attr "conds" "clob")
9516 (set_attr "length" "8,12")
9517 (set_attr "type" "multiple")]
9518 )
9519
9520 (define_insn "*if_move_arith"
9521 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9522 (if_then_else:SI
9523 (match_operator 4 "arm_comparison_operator"
9524 [(match_operand 6 "cc_register" "") (const_int 0)])
9525 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9526 (match_operator:SI 5 "shiftable_operator"
9527 [(match_operand:SI 2 "s_register_operand" "r,r")
9528 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9529 "TARGET_ARM"
9530 "@
9531 %I5%D4\\t%0, %2, %3
9532 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9533 [(set_attr "conds" "use")
9534 (set_attr "length" "4,8")
9535 (set_attr_alternative "type"
9536 [(if_then_else (match_operand 3 "const_int_operand" "")
9537 (const_string "alu_shift_imm" )
9538 (const_string "alu_shift_reg"))
9539 (const_string "multiple")])]
9540 )
9541
9542 (define_insn "*ifcompare_move_not"
9543 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9544 (if_then_else:SI
9545 (match_operator 5 "arm_comparison_operator"
9546 [(match_operand:SI 3 "s_register_operand" "r,r")
9547 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9548 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9549 (not:SI
9550 (match_operand:SI 2 "s_register_operand" "r,r"))))
9551 (clobber (reg:CC CC_REGNUM))]
9552 "TARGET_ARM"
9553 "#"
9554 [(set_attr "conds" "clob")
9555 (set_attr "length" "8,12")
9556 (set_attr "type" "multiple")]
9557 )
9558
9559 (define_insn "*if_move_not"
9560 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9561 (if_then_else:SI
9562 (match_operator 4 "arm_comparison_operator"
9563 [(match_operand 3 "cc_register" "") (const_int 0)])
9564 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9565 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9566 "TARGET_ARM"
9567 "@
9568 mvn%D4\\t%0, %2
9569 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9570 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9571 [(set_attr "conds" "use")
9572 (set_attr "type" "mvn_reg")
9573 (set_attr "length" "4,8,8")
9574 (set_attr "type" "mvn_reg,multiple,multiple")]
9575 )
9576
9577 (define_insn "*ifcompare_not_move"
9578 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9579 (if_then_else:SI
9580 (match_operator 5 "arm_comparison_operator"
9581 [(match_operand:SI 3 "s_register_operand" "r,r")
9582 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9583 (not:SI
9584 (match_operand:SI 2 "s_register_operand" "r,r"))
9585 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9586 (clobber (reg:CC CC_REGNUM))]
9587 "TARGET_ARM"
9588 "#"
9589 [(set_attr "conds" "clob")
9590 (set_attr "length" "8,12")
9591 (set_attr "type" "multiple")]
9592 )
9593
9594 (define_insn "*if_not_move"
9595 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9596 (if_then_else:SI
9597 (match_operator 4 "arm_comparison_operator"
9598 [(match_operand 3 "cc_register" "") (const_int 0)])
9599 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9600 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9601 "TARGET_ARM"
9602 "@
9603 mvn%d4\\t%0, %2
9604 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9605 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9606 [(set_attr "conds" "use")
9607 (set_attr "type" "mvn_reg,multiple,multiple")
9608 (set_attr "length" "4,8,8")]
9609 )
9610
9611 (define_insn "*ifcompare_shift_move"
9612 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9613 (if_then_else:SI
9614 (match_operator 6 "arm_comparison_operator"
9615 [(match_operand:SI 4 "s_register_operand" "r,r")
9616 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9617 (match_operator:SI 7 "shift_operator"
9618 [(match_operand:SI 2 "s_register_operand" "r,r")
9619 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9620 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9621 (clobber (reg:CC CC_REGNUM))]
9622 "TARGET_ARM"
9623 "#"
9624 [(set_attr "conds" "clob")
9625 (set_attr "length" "8,12")
9626 (set_attr "type" "multiple")]
9627 )
9628
9629 (define_insn "*if_shift_move"
9630 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9631 (if_then_else:SI
9632 (match_operator 5 "arm_comparison_operator"
9633 [(match_operand 6 "cc_register" "") (const_int 0)])
9634 (match_operator:SI 4 "shift_operator"
9635 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9636 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9637 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9638 "TARGET_ARM"
9639 "@
9640 mov%d5\\t%0, %2%S4
9641 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9642 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9643 [(set_attr "conds" "use")
9644 (set_attr "shift" "2")
9645 (set_attr "length" "4,8,8")
9646 (set_attr_alternative "type"
9647 [(if_then_else (match_operand 3 "const_int_operand" "")
9648 (const_string "mov_shift" )
9649 (const_string "mov_shift_reg"))
9650 (const_string "multiple")
9651 (const_string "multiple")])]
9652 )
9653
9654 (define_insn "*ifcompare_move_shift"
9655 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9656 (if_then_else:SI
9657 (match_operator 6 "arm_comparison_operator"
9658 [(match_operand:SI 4 "s_register_operand" "r,r")
9659 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9660 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9661 (match_operator:SI 7 "shift_operator"
9662 [(match_operand:SI 2 "s_register_operand" "r,r")
9663 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9664 (clobber (reg:CC CC_REGNUM))]
9665 "TARGET_ARM"
9666 "#"
9667 [(set_attr "conds" "clob")
9668 (set_attr "length" "8,12")
9669 (set_attr "type" "multiple")]
9670 )
9671
9672 (define_insn "*if_move_shift"
9673 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9674 (if_then_else:SI
9675 (match_operator 5 "arm_comparison_operator"
9676 [(match_operand 6 "cc_register" "") (const_int 0)])
9677 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9678 (match_operator:SI 4 "shift_operator"
9679 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9680 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9681 "TARGET_ARM"
9682 "@
9683 mov%D5\\t%0, %2%S4
9684 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9685 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9686 [(set_attr "conds" "use")
9687 (set_attr "shift" "2")
9688 (set_attr "length" "4,8,8")
9689 (set_attr_alternative "type"
9690 [(if_then_else (match_operand 3 "const_int_operand" "")
9691 (const_string "mov_shift" )
9692 (const_string "mov_shift_reg"))
9693 (const_string "multiple")
9694 (const_string "multiple")])]
9695 )
9696
9697 (define_insn "*ifcompare_shift_shift"
9698 [(set (match_operand:SI 0 "s_register_operand" "=r")
9699 (if_then_else:SI
9700 (match_operator 7 "arm_comparison_operator"
9701 [(match_operand:SI 5 "s_register_operand" "r")
9702 (match_operand:SI 6 "arm_add_operand" "rIL")])
9703 (match_operator:SI 8 "shift_operator"
9704 [(match_operand:SI 1 "s_register_operand" "r")
9705 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9706 (match_operator:SI 9 "shift_operator"
9707 [(match_operand:SI 3 "s_register_operand" "r")
9708 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9709 (clobber (reg:CC CC_REGNUM))]
9710 "TARGET_ARM"
9711 "#"
9712 [(set_attr "conds" "clob")
9713 (set_attr "length" "12")
9714 (set_attr "type" "multiple")]
9715 )
9716
9717 (define_insn "*if_shift_shift"
9718 [(set (match_operand:SI 0 "s_register_operand" "=r")
9719 (if_then_else:SI
9720 (match_operator 5 "arm_comparison_operator"
9721 [(match_operand 8 "cc_register" "") (const_int 0)])
9722 (match_operator:SI 6 "shift_operator"
9723 [(match_operand:SI 1 "s_register_operand" "r")
9724 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9725 (match_operator:SI 7 "shift_operator"
9726 [(match_operand:SI 3 "s_register_operand" "r")
9727 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9728 "TARGET_ARM"
9729 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9730 [(set_attr "conds" "use")
9731 (set_attr "shift" "1")
9732 (set_attr "length" "8")
9733 (set (attr "type") (if_then_else
9734 (and (match_operand 2 "const_int_operand" "")
9735 (match_operand 4 "const_int_operand" ""))
9736 (const_string "mov_shift")
9737 (const_string "mov_shift_reg")))]
9738 )
9739
9740 (define_insn "*ifcompare_not_arith"
9741 [(set (match_operand:SI 0 "s_register_operand" "=r")
9742 (if_then_else:SI
9743 (match_operator 6 "arm_comparison_operator"
9744 [(match_operand:SI 4 "s_register_operand" "r")
9745 (match_operand:SI 5 "arm_add_operand" "rIL")])
9746 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9747 (match_operator:SI 7 "shiftable_operator"
9748 [(match_operand:SI 2 "s_register_operand" "r")
9749 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9750 (clobber (reg:CC CC_REGNUM))]
9751 "TARGET_ARM"
9752 "#"
9753 [(set_attr "conds" "clob")
9754 (set_attr "length" "12")
9755 (set_attr "type" "multiple")]
9756 )
9757
9758 (define_insn "*if_not_arith"
9759 [(set (match_operand:SI 0 "s_register_operand" "=r")
9760 (if_then_else:SI
9761 (match_operator 5 "arm_comparison_operator"
9762 [(match_operand 4 "cc_register" "") (const_int 0)])
9763 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9764 (match_operator:SI 6 "shiftable_operator"
9765 [(match_operand:SI 2 "s_register_operand" "r")
9766 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9767 "TARGET_ARM"
9768 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9769 [(set_attr "conds" "use")
9770 (set_attr "type" "mvn_reg")
9771 (set_attr "length" "8")]
9772 )
9773
9774 (define_insn "*ifcompare_arith_not"
9775 [(set (match_operand:SI 0 "s_register_operand" "=r")
9776 (if_then_else:SI
9777 (match_operator 6 "arm_comparison_operator"
9778 [(match_operand:SI 4 "s_register_operand" "r")
9779 (match_operand:SI 5 "arm_add_operand" "rIL")])
9780 (match_operator:SI 7 "shiftable_operator"
9781 [(match_operand:SI 2 "s_register_operand" "r")
9782 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9783 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9784 (clobber (reg:CC CC_REGNUM))]
9785 "TARGET_ARM"
9786 "#"
9787 [(set_attr "conds" "clob")
9788 (set_attr "length" "12")
9789 (set_attr "type" "multiple")]
9790 )
9791
9792 (define_insn "*if_arith_not"
9793 [(set (match_operand:SI 0 "s_register_operand" "=r")
9794 (if_then_else:SI
9795 (match_operator 5 "arm_comparison_operator"
9796 [(match_operand 4 "cc_register" "") (const_int 0)])
9797 (match_operator:SI 6 "shiftable_operator"
9798 [(match_operand:SI 2 "s_register_operand" "r")
9799 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9800 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9801 "TARGET_ARM"
9802 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9803 [(set_attr "conds" "use")
9804 (set_attr "type" "multiple")
9805 (set_attr "length" "8")]
9806 )
9807
9808 (define_insn "*ifcompare_neg_move"
9809 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9810 (if_then_else:SI
9811 (match_operator 5 "arm_comparison_operator"
9812 [(match_operand:SI 3 "s_register_operand" "r,r")
9813 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9814 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9815 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9816 (clobber (reg:CC CC_REGNUM))]
9817 "TARGET_ARM"
9818 "#"
9819 [(set_attr "conds" "clob")
9820 (set_attr "length" "8,12")
9821 (set_attr "type" "multiple")]
9822 )
9823
9824 (define_insn_and_split "*if_neg_move"
9825 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9826 (if_then_else:SI
9827 (match_operator 4 "arm_comparison_operator"
9828 [(match_operand 3 "cc_register" "") (const_int 0)])
9829 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9830 (match_operand:SI 1 "s_register_operand" "0,0")))]
9831 "TARGET_32BIT"
9832 "#"
9833 "&& reload_completed"
9834 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9835 (set (match_dup 0) (neg:SI (match_dup 2))))]
9836 ""
9837 [(set_attr "conds" "use")
9838 (set_attr "length" "4")
9839 (set_attr "arch" "t2,32")
9840 (set_attr "enabled_for_short_it" "yes,no")
9841 (set_attr "type" "logic_shift_imm")]
9842 )
9843
9844 (define_insn "*ifcompare_move_neg"
9845 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9846 (if_then_else:SI
9847 (match_operator 5 "arm_comparison_operator"
9848 [(match_operand:SI 3 "s_register_operand" "r,r")
9849 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9850 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9851 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9852 (clobber (reg:CC CC_REGNUM))]
9853 "TARGET_ARM"
9854 "#"
9855 [(set_attr "conds" "clob")
9856 (set_attr "length" "8,12")
9857 (set_attr "type" "multiple")]
9858 )
9859
9860 (define_insn_and_split "*if_move_neg"
9861 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9862 (if_then_else:SI
9863 (match_operator 4 "arm_comparison_operator"
9864 [(match_operand 3 "cc_register" "") (const_int 0)])
9865 (match_operand:SI 1 "s_register_operand" "0,0")
9866 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9867 "TARGET_32BIT"
9868 "#"
9869 "&& reload_completed"
9870 [(cond_exec (match_dup 5)
9871 (set (match_dup 0) (neg:SI (match_dup 2))))]
9872 {
9873 machine_mode mode = GET_MODE (operands[3]);
9874 rtx_code rc = GET_CODE (operands[4]);
9875
9876 if (mode == CCFPmode || mode == CCFPEmode)
9877 rc = reverse_condition_maybe_unordered (rc);
9878 else
9879 rc = reverse_condition (rc);
9880
9881 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9882 }
9883 [(set_attr "conds" "use")
9884 (set_attr "length" "4")
9885 (set_attr "arch" "t2,32")
9886 (set_attr "enabled_for_short_it" "yes,no")
9887 (set_attr "type" "logic_shift_imm")]
9888 )
9889
9890 (define_insn "*arith_adjacentmem"
9891 [(set (match_operand:SI 0 "s_register_operand" "=r")
9892 (match_operator:SI 1 "shiftable_operator"
9893 [(match_operand:SI 2 "memory_operand" "m")
9894 (match_operand:SI 3 "memory_operand" "m")]))
9895 (clobber (match_scratch:SI 4 "=r"))]
9896 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9897 "*
9898 {
9899 rtx ldm[3];
9900 rtx arith[4];
9901 rtx base_reg;
9902 HOST_WIDE_INT val1 = 0, val2 = 0;
9903
9904 if (REGNO (operands[0]) > REGNO (operands[4]))
9905 {
9906 ldm[1] = operands[4];
9907 ldm[2] = operands[0];
9908 }
9909 else
9910 {
9911 ldm[1] = operands[0];
9912 ldm[2] = operands[4];
9913 }
9914
9915 base_reg = XEXP (operands[2], 0);
9916
9917 if (!REG_P (base_reg))
9918 {
9919 val1 = INTVAL (XEXP (base_reg, 1));
9920 base_reg = XEXP (base_reg, 0);
9921 }
9922
9923 if (!REG_P (XEXP (operands[3], 0)))
9924 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9925
9926 arith[0] = operands[0];
9927 arith[3] = operands[1];
9928
9929 if (val1 < val2)
9930 {
9931 arith[1] = ldm[1];
9932 arith[2] = ldm[2];
9933 }
9934 else
9935 {
9936 arith[1] = ldm[2];
9937 arith[2] = ldm[1];
9938 }
9939
9940 ldm[0] = base_reg;
9941 if (val1 !=0 && val2 != 0)
9942 {
9943 rtx ops[3];
9944
9945 if (val1 == 4 || val2 == 4)
9946 /* Other val must be 8, since we know they are adjacent and neither
9947 is zero. */
9948 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
9949 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9950 {
9951 ldm[0] = ops[0] = operands[4];
9952 ops[1] = base_reg;
9953 ops[2] = GEN_INT (val1);
9954 output_add_immediate (ops);
9955 if (val1 < val2)
9956 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9957 else
9958 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9959 }
9960 else
9961 {
9962 /* Offset is out of range for a single add, so use two ldr. */
9963 ops[0] = ldm[1];
9964 ops[1] = base_reg;
9965 ops[2] = GEN_INT (val1);
9966 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9967 ops[0] = ldm[2];
9968 ops[2] = GEN_INT (val2);
9969 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9970 }
9971 }
9972 else if (val1 != 0)
9973 {
9974 if (val1 < val2)
9975 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9976 else
9977 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9978 }
9979 else
9980 {
9981 if (val1 < val2)
9982 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9983 else
9984 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9985 }
9986 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9987 return \"\";
9988 }"
9989 [(set_attr "length" "12")
9990 (set_attr "predicable" "yes")
9991 (set_attr "type" "load_4")]
9992 )
9993
9994 ; This pattern is never tried by combine, so do it as a peephole
9995
9996 (define_peephole2
9997 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9998 (match_operand:SI 1 "arm_general_register_operand" ""))
9999 (set (reg:CC CC_REGNUM)
10000 (compare:CC (match_dup 1) (const_int 0)))]
10001 "TARGET_ARM"
10002 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10003 (set (match_dup 0) (match_dup 1))])]
10004 ""
10005 )
10006
10007 (define_split
10008 [(set (match_operand:SI 0 "s_register_operand" "")
10009 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10010 (const_int 0))
10011 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10012 [(match_operand:SI 3 "s_register_operand" "")
10013 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10014 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10015 "TARGET_ARM"
10016 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10017 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10018 (match_dup 5)))]
10019 ""
10020 )
10021
10022 ;; This split can be used because CC_Z mode implies that the following
10023 ;; branch will be an equality, or an unsigned inequality, so the sign
10024 ;; extension is not needed.
10025
10026 (define_split
10027 [(set (reg:CC_Z CC_REGNUM)
10028 (compare:CC_Z
10029 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10030 (const_int 24))
10031 (match_operand 1 "const_int_operand" "")))
10032 (clobber (match_scratch:SI 2 ""))]
10033 "TARGET_ARM
10034 && ((UINTVAL (operands[1]))
10035 == ((UINTVAL (operands[1])) >> 24) << 24)"
10036 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10037 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10038 "
10039 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10040 "
10041 )
10042 ;; ??? Check the patterns above for Thumb-2 usefulness
10043
10044 (define_expand "prologue"
10045 [(clobber (const_int 0))]
10046 "TARGET_EITHER"
10047 "if (TARGET_32BIT)
10048 arm_expand_prologue ();
10049 else
10050 thumb1_expand_prologue ();
10051 DONE;
10052 "
10053 )
10054
10055 (define_expand "epilogue"
10056 [(clobber (const_int 0))]
10057 "TARGET_EITHER"
10058 "
10059 if (crtl->calls_eh_return)
10060 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10061 if (TARGET_THUMB1)
10062 {
10063 thumb1_expand_epilogue ();
10064 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10065 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10066 }
10067 else if (HAVE_return)
10068 {
10069 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10070 no need for explicit testing again. */
10071 emit_jump_insn (gen_return ());
10072 }
10073 else if (TARGET_32BIT)
10074 {
10075 arm_expand_epilogue (true);
10076 }
10077 DONE;
10078 "
10079 )
10080
10081 ;; Note - although unspec_volatile's USE all hard registers,
10082 ;; USEs are ignored after relaod has completed. Thus we need
10083 ;; to add an unspec of the link register to ensure that flow
10084 ;; does not think that it is unused by the sibcall branch that
10085 ;; will replace the standard function epilogue.
10086 (define_expand "sibcall_epilogue"
10087 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10088 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10089 "TARGET_32BIT"
10090 "
10091 arm_expand_epilogue (false);
10092 DONE;
10093 "
10094 )
10095
10096 (define_expand "eh_epilogue"
10097 [(use (match_operand:SI 0 "register_operand"))
10098 (use (match_operand:SI 1 "register_operand"))
10099 (use (match_operand:SI 2 "register_operand"))]
10100 "TARGET_EITHER"
10101 "
10102 {
10103 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10104 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10105 {
10106 rtx ra = gen_rtx_REG (Pmode, 2);
10107
10108 emit_move_insn (ra, operands[2]);
10109 operands[2] = ra;
10110 }
10111 /* This is a hack -- we may have crystalized the function type too
10112 early. */
10113 cfun->machine->func_type = 0;
10114 }"
10115 )
10116
10117 ;; This split is only used during output to reduce the number of patterns
10118 ;; that need assembler instructions adding to them. We allowed the setting
10119 ;; of the conditions to be implicit during rtl generation so that
10120 ;; the conditional compare patterns would work. However this conflicts to
10121 ;; some extent with the conditional data operations, so we have to split them
10122 ;; up again here.
10123
10124 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10125 ;; conditional execution sufficient?
10126
10127 (define_split
10128 [(set (match_operand:SI 0 "s_register_operand" "")
10129 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10130 [(match_operand 2 "" "") (match_operand 3 "" "")])
10131 (match_dup 0)
10132 (match_operand 4 "" "")))
10133 (clobber (reg:CC CC_REGNUM))]
10134 "TARGET_ARM && reload_completed"
10135 [(set (match_dup 5) (match_dup 6))
10136 (cond_exec (match_dup 7)
10137 (set (match_dup 0) (match_dup 4)))]
10138 "
10139 {
10140 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10141 operands[2], operands[3]);
10142 enum rtx_code rc = GET_CODE (operands[1]);
10143
10144 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10145 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10146 if (mode == CCFPmode || mode == CCFPEmode)
10147 rc = reverse_condition_maybe_unordered (rc);
10148 else
10149 rc = reverse_condition (rc);
10150
10151 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10152 }"
10153 )
10154
10155 (define_split
10156 [(set (match_operand:SI 0 "s_register_operand" "")
10157 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10158 [(match_operand 2 "" "") (match_operand 3 "" "")])
10159 (match_operand 4 "" "")
10160 (match_dup 0)))
10161 (clobber (reg:CC CC_REGNUM))]
10162 "TARGET_ARM && reload_completed"
10163 [(set (match_dup 5) (match_dup 6))
10164 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10165 (set (match_dup 0) (match_dup 4)))]
10166 "
10167 {
10168 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10169 operands[2], operands[3]);
10170
10171 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10172 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10173 }"
10174 )
10175
10176 (define_split
10177 [(set (match_operand:SI 0 "s_register_operand" "")
10178 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10179 [(match_operand 2 "" "") (match_operand 3 "" "")])
10180 (match_operand 4 "" "")
10181 (match_operand 5 "" "")))
10182 (clobber (reg:CC CC_REGNUM))]
10183 "TARGET_ARM && reload_completed"
10184 [(set (match_dup 6) (match_dup 7))
10185 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10186 (set (match_dup 0) (match_dup 4)))
10187 (cond_exec (match_dup 8)
10188 (set (match_dup 0) (match_dup 5)))]
10189 "
10190 {
10191 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10192 operands[2], operands[3]);
10193 enum rtx_code rc = GET_CODE (operands[1]);
10194
10195 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10196 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10197 if (mode == CCFPmode || mode == CCFPEmode)
10198 rc = reverse_condition_maybe_unordered (rc);
10199 else
10200 rc = reverse_condition (rc);
10201
10202 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10203 }"
10204 )
10205
10206 (define_split
10207 [(set (match_operand:SI 0 "s_register_operand" "")
10208 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10209 [(match_operand:SI 2 "s_register_operand" "")
10210 (match_operand:SI 3 "arm_add_operand" "")])
10211 (match_operand:SI 4 "arm_rhs_operand" "")
10212 (not:SI
10213 (match_operand:SI 5 "s_register_operand" ""))))
10214 (clobber (reg:CC CC_REGNUM))]
10215 "TARGET_ARM && reload_completed"
10216 [(set (match_dup 6) (match_dup 7))
10217 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10218 (set (match_dup 0) (match_dup 4)))
10219 (cond_exec (match_dup 8)
10220 (set (match_dup 0) (not:SI (match_dup 5))))]
10221 "
10222 {
10223 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10224 operands[2], operands[3]);
10225 enum rtx_code rc = GET_CODE (operands[1]);
10226
10227 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10228 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10229 if (mode == CCFPmode || mode == CCFPEmode)
10230 rc = reverse_condition_maybe_unordered (rc);
10231 else
10232 rc = reverse_condition (rc);
10233
10234 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10235 }"
10236 )
10237
10238 (define_insn "*cond_move_not"
10239 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10240 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10241 [(match_operand 3 "cc_register" "") (const_int 0)])
10242 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10243 (not:SI
10244 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10245 "TARGET_ARM"
10246 "@
10247 mvn%D4\\t%0, %2
10248 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10249 [(set_attr "conds" "use")
10250 (set_attr "type" "mvn_reg,multiple")
10251 (set_attr "length" "4,8")]
10252 )
10253
10254 ;; The next two patterns occur when an AND operation is followed by a
10255 ;; scc insn sequence
10256
10257 (define_insn "*sign_extract_onebit"
10258 [(set (match_operand:SI 0 "s_register_operand" "=r")
10259 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10260 (const_int 1)
10261 (match_operand:SI 2 "const_int_operand" "n")))
10262 (clobber (reg:CC CC_REGNUM))]
10263 "TARGET_ARM"
10264 "*
10265 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10266 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10267 return \"mvnne\\t%0, #0\";
10268 "
10269 [(set_attr "conds" "clob")
10270 (set_attr "length" "8")
10271 (set_attr "type" "multiple")]
10272 )
10273
10274 (define_insn "*not_signextract_onebit"
10275 [(set (match_operand:SI 0 "s_register_operand" "=r")
10276 (not:SI
10277 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10278 (const_int 1)
10279 (match_operand:SI 2 "const_int_operand" "n"))))
10280 (clobber (reg:CC CC_REGNUM))]
10281 "TARGET_ARM"
10282 "*
10283 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10284 output_asm_insn (\"tst\\t%1, %2\", operands);
10285 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10286 return \"movne\\t%0, #0\";
10287 "
10288 [(set_attr "conds" "clob")
10289 (set_attr "length" "12")
10290 (set_attr "type" "multiple")]
10291 )
10292 ;; ??? The above patterns need auditing for Thumb-2
10293
10294 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10295 ;; expressions. For simplicity, the first register is also in the unspec
10296 ;; part.
10297 ;; To avoid the usage of GNU extension, the length attribute is computed
10298 ;; in a C function arm_attr_length_push_multi.
10299 (define_insn "*push_multi"
10300 [(match_parallel 2 "multi_register_push"
10301 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10302 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10303 UNSPEC_PUSH_MULT))])]
10304 ""
10305 "*
10306 {
10307 int num_saves = XVECLEN (operands[2], 0);
10308
10309 /* For the StrongARM at least it is faster to
10310 use STR to store only a single register.
10311 In Thumb mode always use push, and the assembler will pick
10312 something appropriate. */
10313 if (num_saves == 1 && TARGET_ARM)
10314 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10315 else
10316 {
10317 int i;
10318 char pattern[100];
10319
10320 if (TARGET_32BIT)
10321 strcpy (pattern, \"push%?\\t{%1\");
10322 else
10323 strcpy (pattern, \"push\\t{%1\");
10324
10325 for (i = 1; i < num_saves; i++)
10326 {
10327 strcat (pattern, \", %|\");
10328 strcat (pattern,
10329 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10330 }
10331
10332 strcat (pattern, \"}\");
10333 output_asm_insn (pattern, operands);
10334 }
10335
10336 return \"\";
10337 }"
10338 [(set_attr "type" "store_16")
10339 (set (attr "length")
10340 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10341 )
10342
10343 (define_insn "stack_tie"
10344 [(set (mem:BLK (scratch))
10345 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10346 (match_operand:SI 1 "s_register_operand" "rk")]
10347 UNSPEC_PRLG_STK))]
10348 ""
10349 ""
10350 [(set_attr "length" "0")
10351 (set_attr "type" "block")]
10352 )
10353
10354 ;; Pop (as used in epilogue RTL)
10355 ;;
10356 (define_insn "*load_multiple_with_writeback"
10357 [(match_parallel 0 "load_multiple_operation"
10358 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10359 (plus:SI (match_dup 1)
10360 (match_operand:SI 2 "const_int_I_operand" "I")))
10361 (set (match_operand:SI 3 "s_register_operand" "=rk")
10362 (mem:SI (match_dup 1)))
10363 ])]
10364 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10365 "*
10366 {
10367 arm_output_multireg_pop (operands, /*return_pc=*/false,
10368 /*cond=*/const_true_rtx,
10369 /*reverse=*/false,
10370 /*update=*/true);
10371 return \"\";
10372 }
10373 "
10374 [(set_attr "type" "load_16")
10375 (set_attr "predicable" "yes")
10376 (set (attr "length")
10377 (symbol_ref "arm_attr_length_pop_multi (operands,
10378 /*return_pc=*/false,
10379 /*write_back_p=*/true)"))]
10380 )
10381
10382 ;; Pop with return (as used in epilogue RTL)
10383 ;;
10384 ;; This instruction is generated when the registers are popped at the end of
10385 ;; epilogue. Here, instead of popping the value into LR and then generating
10386 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10387 ;; with (return).
10388 (define_insn "*pop_multiple_with_writeback_and_return"
10389 [(match_parallel 0 "pop_multiple_return"
10390 [(return)
10391 (set (match_operand:SI 1 "s_register_operand" "+rk")
10392 (plus:SI (match_dup 1)
10393 (match_operand:SI 2 "const_int_I_operand" "I")))
10394 (set (match_operand:SI 3 "s_register_operand" "=rk")
10395 (mem:SI (match_dup 1)))
10396 ])]
10397 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10398 "*
10399 {
10400 arm_output_multireg_pop (operands, /*return_pc=*/true,
10401 /*cond=*/const_true_rtx,
10402 /*reverse=*/false,
10403 /*update=*/true);
10404 return \"\";
10405 }
10406 "
10407 [(set_attr "type" "load_16")
10408 (set_attr "predicable" "yes")
10409 (set (attr "length")
10410 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10411 /*write_back_p=*/true)"))]
10412 )
10413
10414 (define_insn "*pop_multiple_with_return"
10415 [(match_parallel 0 "pop_multiple_return"
10416 [(return)
10417 (set (match_operand:SI 2 "s_register_operand" "=rk")
10418 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10419 ])]
10420 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10421 "*
10422 {
10423 arm_output_multireg_pop (operands, /*return_pc=*/true,
10424 /*cond=*/const_true_rtx,
10425 /*reverse=*/false,
10426 /*update=*/false);
10427 return \"\";
10428 }
10429 "
10430 [(set_attr "type" "load_16")
10431 (set_attr "predicable" "yes")
10432 (set (attr "length")
10433 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10434 /*write_back_p=*/false)"))]
10435 )
10436
10437 ;; Load into PC and return
10438 (define_insn "*ldr_with_return"
10439 [(return)
10440 (set (reg:SI PC_REGNUM)
10441 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10442 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10443 "ldr%?\t%|pc, [%0], #4"
10444 [(set_attr "type" "load_4")
10445 (set_attr "predicable" "yes")]
10446 )
10447 ;; Pop for floating point registers (as used in epilogue RTL)
10448 (define_insn "*vfp_pop_multiple_with_writeback"
10449 [(match_parallel 0 "pop_multiple_fp"
10450 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10451 (plus:SI (match_dup 1)
10452 (match_operand:SI 2 "const_int_I_operand" "I")))
10453 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10454 (mem:DF (match_dup 1)))])]
10455 "TARGET_32BIT && TARGET_HARD_FLOAT"
10456 "*
10457 {
10458 int num_regs = XVECLEN (operands[0], 0);
10459 char pattern[100];
10460 rtx op_list[2];
10461 strcpy (pattern, \"vldm\\t\");
10462 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10463 strcat (pattern, \"!, {\");
10464 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10465 strcat (pattern, \"%P0\");
10466 if ((num_regs - 1) > 1)
10467 {
10468 strcat (pattern, \"-%P1\");
10469 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10470 }
10471
10472 strcat (pattern, \"}\");
10473 output_asm_insn (pattern, op_list);
10474 return \"\";
10475 }
10476 "
10477 [(set_attr "type" "load_16")
10478 (set_attr "conds" "unconditional")
10479 (set_attr "predicable" "no")]
10480 )
10481
10482 ;; Special patterns for dealing with the constant pool
10483
10484 (define_insn "align_4"
10485 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10486 "TARGET_EITHER"
10487 "*
10488 assemble_align (32);
10489 return \"\";
10490 "
10491 [(set_attr "type" "no_insn")]
10492 )
10493
10494 (define_insn "align_8"
10495 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10496 "TARGET_EITHER"
10497 "*
10498 assemble_align (64);
10499 return \"\";
10500 "
10501 [(set_attr "type" "no_insn")]
10502 )
10503
10504 (define_insn "consttable_end"
10505 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10506 "TARGET_EITHER"
10507 "*
10508 making_const_table = FALSE;
10509 return \"\";
10510 "
10511 [(set_attr "type" "no_insn")]
10512 )
10513
10514 (define_insn "consttable_1"
10515 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10516 "TARGET_EITHER"
10517 "*
10518 making_const_table = TRUE;
10519 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10520 assemble_zeros (3);
10521 return \"\";
10522 "
10523 [(set_attr "length" "4")
10524 (set_attr "type" "no_insn")]
10525 )
10526
10527 (define_insn "consttable_2"
10528 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10529 "TARGET_EITHER"
10530 "*
10531 {
10532 rtx x = operands[0];
10533 making_const_table = TRUE;
10534 switch (GET_MODE_CLASS (GET_MODE (x)))
10535 {
10536 case MODE_FLOAT:
10537 arm_emit_fp16_const (x);
10538 break;
10539 default:
10540 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10541 assemble_zeros (2);
10542 break;
10543 }
10544 return \"\";
10545 }"
10546 [(set_attr "length" "4")
10547 (set_attr "type" "no_insn")]
10548 )
10549
10550 (define_insn "consttable_4"
10551 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10552 "TARGET_EITHER"
10553 "*
10554 {
10555 rtx x = operands[0];
10556 making_const_table = TRUE;
10557 scalar_float_mode float_mode;
10558 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10559 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10560 else
10561 {
10562 /* XXX: Sometimes gcc does something really dumb and ends up with
10563 a HIGH in a constant pool entry, usually because it's trying to
10564 load into a VFP register. We know this will always be used in
10565 combination with a LO_SUM which ignores the high bits, so just
10566 strip off the HIGH. */
10567 if (GET_CODE (x) == HIGH)
10568 x = XEXP (x, 0);
10569 assemble_integer (x, 4, BITS_PER_WORD, 1);
10570 mark_symbol_refs_as_used (x);
10571 }
10572 return \"\";
10573 }"
10574 [(set_attr "length" "4")
10575 (set_attr "type" "no_insn")]
10576 )
10577
10578 (define_insn "consttable_8"
10579 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10580 "TARGET_EITHER"
10581 "*
10582 {
10583 making_const_table = TRUE;
10584 scalar_float_mode float_mode;
10585 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10586 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10587 float_mode, BITS_PER_WORD);
10588 else
10589 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10590 return \"\";
10591 }"
10592 [(set_attr "length" "8")
10593 (set_attr "type" "no_insn")]
10594 )
10595
10596 (define_insn "consttable_16"
10597 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10598 "TARGET_EITHER"
10599 "*
10600 {
10601 making_const_table = TRUE;
10602 scalar_float_mode float_mode;
10603 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10604 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10605 float_mode, BITS_PER_WORD);
10606 else
10607 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10608 return \"\";
10609 }"
10610 [(set_attr "length" "16")
10611 (set_attr "type" "no_insn")]
10612 )
10613
10614 ;; V5 Instructions,
10615
10616 (define_insn "clzsi2"
10617 [(set (match_operand:SI 0 "s_register_operand" "=r")
10618 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10619 "TARGET_32BIT && arm_arch5t"
10620 "clz%?\\t%0, %1"
10621 [(set_attr "predicable" "yes")
10622 (set_attr "type" "clz")])
10623
10624 (define_insn "rbitsi2"
10625 [(set (match_operand:SI 0 "s_register_operand" "=r")
10626 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10627 "TARGET_32BIT && arm_arch_thumb2"
10628 "rbit%?\\t%0, %1"
10629 [(set_attr "predicable" "yes")
10630 (set_attr "type" "clz")])
10631
10632 ;; Keep this as a CTZ expression until after reload and then split
10633 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10634 ;; to fold with any other expression.
10635
10636 (define_insn_and_split "ctzsi2"
10637 [(set (match_operand:SI 0 "s_register_operand" "=r")
10638 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10639 "TARGET_32BIT && arm_arch_thumb2"
10640 "#"
10641 "&& reload_completed"
10642 [(const_int 0)]
10643 "
10644 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10645 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10646 DONE;
10647 ")
10648
10649 ;; V5E instructions.
10650
10651 (define_insn "prefetch"
10652 [(prefetch (match_operand:SI 0 "address_operand" "p")
10653 (match_operand:SI 1 "" "")
10654 (match_operand:SI 2 "" ""))]
10655 "TARGET_32BIT && arm_arch5te"
10656 "pld\\t%a0"
10657 [(set_attr "type" "load_4")]
10658 )
10659
10660 ;; General predication pattern
10661
10662 (define_cond_exec
10663 [(match_operator 0 "arm_comparison_operator"
10664 [(match_operand 1 "cc_register" "")
10665 (const_int 0)])]
10666 "TARGET_32BIT
10667 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10668 ""
10669 [(set_attr "predicated" "yes")]
10670 )
10671
10672 (define_insn "force_register_use"
10673 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10674 ""
10675 "%@ %0 needed"
10676 [(set_attr "length" "0")
10677 (set_attr "type" "no_insn")]
10678 )
10679
10680
10681 ;; Patterns for exception handling
10682
10683 (define_expand "eh_return"
10684 [(use (match_operand 0 "general_operand"))]
10685 "TARGET_EITHER"
10686 "
10687 {
10688 if (TARGET_32BIT)
10689 emit_insn (gen_arm_eh_return (operands[0]));
10690 else
10691 emit_insn (gen_thumb_eh_return (operands[0]));
10692 DONE;
10693 }"
10694 )
10695
10696 ;; We can't expand this before we know where the link register is stored.
10697 (define_insn_and_split "arm_eh_return"
10698 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10699 VUNSPEC_EH_RETURN)
10700 (clobber (match_scratch:SI 1 "=&r"))]
10701 "TARGET_ARM"
10702 "#"
10703 "&& reload_completed"
10704 [(const_int 0)]
10705 "
10706 {
10707 arm_set_return_address (operands[0], operands[1]);
10708 DONE;
10709 }"
10710 )
10711
10712 \f
10713 ;; TLS support
10714
10715 (define_insn "load_tp_hard"
10716 [(set (match_operand:SI 0 "register_operand" "=r")
10717 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10718 "TARGET_HARD_TP"
10719 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10720 [(set_attr "predicable" "yes")
10721 (set_attr "type" "mrs")]
10722 )
10723
10724 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10725 (define_insn "load_tp_soft_fdpic"
10726 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10727 (clobber (reg:SI FDPIC_REGNUM))
10728 (clobber (reg:SI LR_REGNUM))
10729 (clobber (reg:SI IP_REGNUM))
10730 (clobber (reg:CC CC_REGNUM))]
10731 "TARGET_SOFT_TP && TARGET_FDPIC"
10732 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10733 [(set_attr "conds" "clob")
10734 (set_attr "type" "branch")]
10735 )
10736
10737 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10738 (define_insn "load_tp_soft"
10739 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10740 (clobber (reg:SI LR_REGNUM))
10741 (clobber (reg:SI IP_REGNUM))
10742 (clobber (reg:CC CC_REGNUM))]
10743 "TARGET_SOFT_TP && !TARGET_FDPIC"
10744 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10745 [(set_attr "conds" "clob")
10746 (set_attr "type" "branch")]
10747 )
10748
10749 ;; tls descriptor call
10750 (define_insn "tlscall"
10751 [(set (reg:SI R0_REGNUM)
10752 (unspec:SI [(reg:SI R0_REGNUM)
10753 (match_operand:SI 0 "" "X")
10754 (match_operand 1 "" "")] UNSPEC_TLS))
10755 (clobber (reg:SI R1_REGNUM))
10756 (clobber (reg:SI LR_REGNUM))
10757 (clobber (reg:SI CC_REGNUM))]
10758 "TARGET_GNU2_TLS"
10759 {
10760 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10761 INTVAL (operands[1]));
10762 return "bl\\t%c0(tlscall)";
10763 }
10764 [(set_attr "conds" "clob")
10765 (set_attr "length" "4")
10766 (set_attr "type" "branch")]
10767 )
10768
10769 ;; For thread pointer builtin
10770 (define_expand "get_thread_pointersi"
10771 [(match_operand:SI 0 "s_register_operand")]
10772 ""
10773 "
10774 {
10775 arm_load_tp (operands[0]);
10776 DONE;
10777 }")
10778
10779 ;;
10780
10781 ;; We only care about the lower 16 bits of the constant
10782 ;; being inserted into the upper 16 bits of the register.
10783 (define_insn "*arm_movtas_ze"
10784 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10785 (const_int 16)
10786 (const_int 16))
10787 (match_operand:SI 1 "const_int_operand" ""))]
10788 "TARGET_HAVE_MOVT"
10789 "@
10790 movt%?\t%0, %L1
10791 movt\t%0, %L1"
10792 [(set_attr "arch" "32,v8mb")
10793 (set_attr "predicable" "yes")
10794 (set_attr "length" "4")
10795 (set_attr "type" "alu_sreg")]
10796 )
10797
10798 (define_insn "*arm_rev"
10799 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10800 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10801 "arm_arch6"
10802 "@
10803 rev\t%0, %1
10804 rev%?\t%0, %1
10805 rev%?\t%0, %1"
10806 [(set_attr "arch" "t1,t2,32")
10807 (set_attr "length" "2,2,4")
10808 (set_attr "predicable" "no,yes,yes")
10809 (set_attr "type" "rev")]
10810 )
10811
10812 (define_expand "arm_legacy_rev"
10813 [(set (match_operand:SI 2 "s_register_operand")
10814 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10815 (const_int 16))
10816 (match_dup 1)))
10817 (set (match_dup 2)
10818 (lshiftrt:SI (match_dup 2)
10819 (const_int 8)))
10820 (set (match_operand:SI 3 "s_register_operand")
10821 (rotatert:SI (match_dup 1)
10822 (const_int 8)))
10823 (set (match_dup 2)
10824 (and:SI (match_dup 2)
10825 (const_int -65281)))
10826 (set (match_operand:SI 0 "s_register_operand")
10827 (xor:SI (match_dup 3)
10828 (match_dup 2)))]
10829 "TARGET_32BIT"
10830 ""
10831 )
10832
10833 ;; Reuse temporaries to keep register pressure down.
10834 (define_expand "thumb_legacy_rev"
10835 [(set (match_operand:SI 2 "s_register_operand")
10836 (ashift:SI (match_operand:SI 1 "s_register_operand")
10837 (const_int 24)))
10838 (set (match_operand:SI 3 "s_register_operand")
10839 (lshiftrt:SI (match_dup 1)
10840 (const_int 24)))
10841 (set (match_dup 3)
10842 (ior:SI (match_dup 3)
10843 (match_dup 2)))
10844 (set (match_operand:SI 4 "s_register_operand")
10845 (const_int 16))
10846 (set (match_operand:SI 5 "s_register_operand")
10847 (rotatert:SI (match_dup 1)
10848 (match_dup 4)))
10849 (set (match_dup 2)
10850 (ashift:SI (match_dup 5)
10851 (const_int 24)))
10852 (set (match_dup 5)
10853 (lshiftrt:SI (match_dup 5)
10854 (const_int 24)))
10855 (set (match_dup 5)
10856 (ior:SI (match_dup 5)
10857 (match_dup 2)))
10858 (set (match_dup 5)
10859 (rotatert:SI (match_dup 5)
10860 (match_dup 4)))
10861 (set (match_operand:SI 0 "s_register_operand")
10862 (ior:SI (match_dup 5)
10863 (match_dup 3)))]
10864 "TARGET_THUMB"
10865 ""
10866 )
10867
10868 ;; ARM-specific expansion of signed mod by power of 2
10869 ;; using conditional negate.
10870 ;; For r0 % n where n is a power of 2 produce:
10871 ;; rsbs r1, r0, #0
10872 ;; and r0, r0, #(n - 1)
10873 ;; and r1, r1, #(n - 1)
10874 ;; rsbpl r0, r1, #0
10875
10876 (define_expand "modsi3"
10877 [(match_operand:SI 0 "register_operand")
10878 (match_operand:SI 1 "register_operand")
10879 (match_operand:SI 2 "const_int_operand")]
10880 "TARGET_32BIT"
10881 {
10882 HOST_WIDE_INT val = INTVAL (operands[2]);
10883
10884 if (val <= 0
10885 || exact_log2 (val) <= 0)
10886 FAIL;
10887
10888 rtx mask = GEN_INT (val - 1);
10889
10890 /* In the special case of x0 % 2 we can do the even shorter:
10891 cmp r0, #0
10892 and r0, r0, #1
10893 rsblt r0, r0, #0. */
10894
10895 if (val == 2)
10896 {
10897 rtx cc_reg = arm_gen_compare_reg (LT,
10898 operands[1], const0_rtx, NULL_RTX);
10899 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10900 rtx masked = gen_reg_rtx (SImode);
10901
10902 emit_insn (gen_andsi3 (masked, operands[1], mask));
10903 emit_move_insn (operands[0],
10904 gen_rtx_IF_THEN_ELSE (SImode, cond,
10905 gen_rtx_NEG (SImode,
10906 masked),
10907 masked));
10908 DONE;
10909 }
10910
10911 rtx neg_op = gen_reg_rtx (SImode);
10912 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
10913 operands[1]));
10914
10915 /* Extract the condition register and mode. */
10916 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
10917 rtx cc_reg = SET_DEST (cmp);
10918 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
10919
10920 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
10921
10922 rtx masked_neg = gen_reg_rtx (SImode);
10923 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
10924
10925 /* We want a conditional negate here, but emitting COND_EXEC rtxes
10926 during expand does not always work. Do an IF_THEN_ELSE instead. */
10927 emit_move_insn (operands[0],
10928 gen_rtx_IF_THEN_ELSE (SImode, cond,
10929 gen_rtx_NEG (SImode, masked_neg),
10930 operands[0]));
10931
10932
10933 DONE;
10934 }
10935 )
10936
10937 (define_expand "bswapsi2"
10938 [(set (match_operand:SI 0 "s_register_operand")
10939 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
10940 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10941 "
10942 if (!arm_arch6)
10943 {
10944 rtx op2 = gen_reg_rtx (SImode);
10945 rtx op3 = gen_reg_rtx (SImode);
10946
10947 if (TARGET_THUMB)
10948 {
10949 rtx op4 = gen_reg_rtx (SImode);
10950 rtx op5 = gen_reg_rtx (SImode);
10951
10952 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10953 op2, op3, op4, op5));
10954 }
10955 else
10956 {
10957 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10958 op2, op3));
10959 }
10960
10961 DONE;
10962 }
10963 "
10964 )
10965
10966 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
10967 ;; and unsigned variants, respectively. For rev16, expose
10968 ;; byte-swapping in the lower 16 bits only.
10969 (define_insn "*arm_revsh"
10970 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10971 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
10972 "arm_arch6"
10973 "@
10974 revsh\t%0, %1
10975 revsh%?\t%0, %1
10976 revsh%?\t%0, %1"
10977 [(set_attr "arch" "t1,t2,32")
10978 (set_attr "length" "2,2,4")
10979 (set_attr "type" "rev")]
10980 )
10981
10982 (define_insn "*arm_rev16"
10983 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
10984 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
10985 "arm_arch6"
10986 "@
10987 rev16\t%0, %1
10988 rev16%?\t%0, %1
10989 rev16%?\t%0, %1"
10990 [(set_attr "arch" "t1,t2,32")
10991 (set_attr "length" "2,2,4")
10992 (set_attr "type" "rev")]
10993 )
10994
10995 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
10996 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
10997 ;; each valid permutation.
10998
10999 (define_insn "arm_rev16si2"
11000 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11001 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11002 (const_int 8))
11003 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11004 (and:SI (lshiftrt:SI (match_dup 1)
11005 (const_int 8))
11006 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11007 "arm_arch6
11008 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11009 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11010 "rev16\\t%0, %1"
11011 [(set_attr "arch" "t1,t2,32")
11012 (set_attr "length" "2,2,4")
11013 (set_attr "type" "rev")]
11014 )
11015
11016 (define_insn "arm_rev16si2_alt"
11017 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11018 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11019 (const_int 8))
11020 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11021 (and:SI (ashift:SI (match_dup 1)
11022 (const_int 8))
11023 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11024 "arm_arch6
11025 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11026 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11027 "rev16\\t%0, %1"
11028 [(set_attr "arch" "t1,t2,32")
11029 (set_attr "length" "2,2,4")
11030 (set_attr "type" "rev")]
11031 )
11032
11033 (define_expand "bswaphi2"
11034 [(set (match_operand:HI 0 "s_register_operand")
11035 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11036 "arm_arch6"
11037 ""
11038 )
11039
11040 ;; Patterns for LDRD/STRD in Thumb2 mode
11041
11042 (define_insn "*thumb2_ldrd"
11043 [(set (match_operand:SI 0 "s_register_operand" "=r")
11044 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11045 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11046 (set (match_operand:SI 3 "s_register_operand" "=r")
11047 (mem:SI (plus:SI (match_dup 1)
11048 (match_operand:SI 4 "const_int_operand" ""))))]
11049 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11050 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11051 && (operands_ok_ldrd_strd (operands[0], operands[3],
11052 operands[1], INTVAL (operands[2]),
11053 false, true))"
11054 "ldrd%?\t%0, %3, [%1, %2]"
11055 [(set_attr "type" "load_8")
11056 (set_attr "predicable" "yes")])
11057
11058 (define_insn "*thumb2_ldrd_base"
11059 [(set (match_operand:SI 0 "s_register_operand" "=r")
11060 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11061 (set (match_operand:SI 2 "s_register_operand" "=r")
11062 (mem:SI (plus:SI (match_dup 1)
11063 (const_int 4))))]
11064 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11065 && (operands_ok_ldrd_strd (operands[0], operands[2],
11066 operands[1], 0, false, true))"
11067 "ldrd%?\t%0, %2, [%1]"
11068 [(set_attr "type" "load_8")
11069 (set_attr "predicable" "yes")])
11070
11071 (define_insn "*thumb2_ldrd_base_neg"
11072 [(set (match_operand:SI 0 "s_register_operand" "=r")
11073 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11074 (const_int -4))))
11075 (set (match_operand:SI 2 "s_register_operand" "=r")
11076 (mem:SI (match_dup 1)))]
11077 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11078 && (operands_ok_ldrd_strd (operands[0], operands[2],
11079 operands[1], -4, false, true))"
11080 "ldrd%?\t%0, %2, [%1, #-4]"
11081 [(set_attr "type" "load_8")
11082 (set_attr "predicable" "yes")])
11083
11084 (define_insn "*thumb2_strd"
11085 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11086 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11087 (match_operand:SI 2 "s_register_operand" "r"))
11088 (set (mem:SI (plus:SI (match_dup 0)
11089 (match_operand:SI 3 "const_int_operand" "")))
11090 (match_operand:SI 4 "s_register_operand" "r"))]
11091 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11092 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11093 && (operands_ok_ldrd_strd (operands[2], operands[4],
11094 operands[0], INTVAL (operands[1]),
11095 false, false))"
11096 "strd%?\t%2, %4, [%0, %1]"
11097 [(set_attr "type" "store_8")
11098 (set_attr "predicable" "yes")])
11099
11100 (define_insn "*thumb2_strd_base"
11101 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11102 (match_operand:SI 1 "s_register_operand" "r"))
11103 (set (mem:SI (plus:SI (match_dup 0)
11104 (const_int 4)))
11105 (match_operand:SI 2 "s_register_operand" "r"))]
11106 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11107 && (operands_ok_ldrd_strd (operands[1], operands[2],
11108 operands[0], 0, false, false))"
11109 "strd%?\t%1, %2, [%0]"
11110 [(set_attr "type" "store_8")
11111 (set_attr "predicable" "yes")])
11112
11113 (define_insn "*thumb2_strd_base_neg"
11114 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11115 (const_int -4)))
11116 (match_operand:SI 1 "s_register_operand" "r"))
11117 (set (mem:SI (match_dup 0))
11118 (match_operand:SI 2 "s_register_operand" "r"))]
11119 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11120 && (operands_ok_ldrd_strd (operands[1], operands[2],
11121 operands[0], -4, false, false))"
11122 "strd%?\t%1, %2, [%0, #-4]"
11123 [(set_attr "type" "store_8")
11124 (set_attr "predicable" "yes")])
11125
11126 ;; ARMv8 CRC32 instructions.
11127 (define_insn "arm_<crc_variant>"
11128 [(set (match_operand:SI 0 "s_register_operand" "=r")
11129 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11130 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11131 CRC))]
11132 "TARGET_CRC32"
11133 "<crc_variant>\\t%0, %1, %2"
11134 [(set_attr "type" "crc")
11135 (set_attr "conds" "unconditional")]
11136 )
11137
11138 ;; Load the load/store double peephole optimizations.
11139 (include "ldrdstrd.md")
11140
11141 ;; Load the load/store multiple patterns
11142 (include "ldmstm.md")
11143
11144 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11145 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11146 ;; The operands are validated through the load_multiple_operation
11147 ;; match_parallel predicate rather than through constraints so enable it only
11148 ;; after reload.
11149 (define_insn "*load_multiple"
11150 [(match_parallel 0 "load_multiple_operation"
11151 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11152 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11153 ])]
11154 "TARGET_32BIT && reload_completed"
11155 "*
11156 {
11157 arm_output_multireg_pop (operands, /*return_pc=*/false,
11158 /*cond=*/const_true_rtx,
11159 /*reverse=*/false,
11160 /*update=*/false);
11161 return \"\";
11162 }
11163 "
11164 [(set_attr "predicable" "yes")]
11165 )
11166
11167 (define_expand "copysignsf3"
11168 [(match_operand:SF 0 "register_operand")
11169 (match_operand:SF 1 "register_operand")
11170 (match_operand:SF 2 "register_operand")]
11171 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11172 "{
11173 emit_move_insn (operands[0], operands[2]);
11174 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11175 GEN_INT (31), GEN_INT (0),
11176 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11177 DONE;
11178 }"
11179 )
11180
11181 (define_expand "copysigndf3"
11182 [(match_operand:DF 0 "register_operand")
11183 (match_operand:DF 1 "register_operand")
11184 (match_operand:DF 2 "register_operand")]
11185 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11186 "{
11187 rtx op0_low = gen_lowpart (SImode, operands[0]);
11188 rtx op0_high = gen_highpart (SImode, operands[0]);
11189 rtx op1_low = gen_lowpart (SImode, operands[1]);
11190 rtx op1_high = gen_highpart (SImode, operands[1]);
11191 rtx op2_high = gen_highpart (SImode, operands[2]);
11192
11193 rtx scratch1 = gen_reg_rtx (SImode);
11194 rtx scratch2 = gen_reg_rtx (SImode);
11195 emit_move_insn (scratch1, op2_high);
11196 emit_move_insn (scratch2, op1_high);
11197
11198 emit_insn(gen_rtx_SET(scratch1,
11199 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11200 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11201 emit_move_insn (op0_low, op1_low);
11202 emit_move_insn (op0_high, scratch2);
11203
11204 DONE;
11205 }"
11206 )
11207
11208 ;; movmisalign patterns for HImode and SImode.
11209 (define_expand "movmisalign<mode>"
11210 [(match_operand:HSI 0 "general_operand")
11211 (match_operand:HSI 1 "general_operand")]
11212 "unaligned_access"
11213 {
11214 /* This pattern is not permitted to fail during expansion: if both arguments
11215 are non-registers (e.g. memory := constant), force operand 1 into a
11216 register. */
11217 rtx (* gen_unaligned_load)(rtx, rtx);
11218 rtx tmp_dest = operands[0];
11219 if (!s_register_operand (operands[0], <MODE>mode)
11220 && !s_register_operand (operands[1], <MODE>mode))
11221 operands[1] = force_reg (<MODE>mode, operands[1]);
11222
11223 if (<MODE>mode == HImode)
11224 {
11225 gen_unaligned_load = gen_unaligned_loadhiu;
11226 tmp_dest = gen_reg_rtx (SImode);
11227 }
11228 else
11229 gen_unaligned_load = gen_unaligned_loadsi;
11230
11231 if (MEM_P (operands[1]))
11232 {
11233 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11234 if (<MODE>mode == HImode)
11235 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11236 }
11237 else
11238 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11239
11240 DONE;
11241 })
11242
11243 (define_insn "arm_<cdp>"
11244 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11245 (match_operand:SI 1 "immediate_operand" "n")
11246 (match_operand:SI 2 "immediate_operand" "n")
11247 (match_operand:SI 3 "immediate_operand" "n")
11248 (match_operand:SI 4 "immediate_operand" "n")
11249 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11250 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11251 {
11252 arm_const_bounds (operands[0], 0, 16);
11253 arm_const_bounds (operands[1], 0, 16);
11254 arm_const_bounds (operands[2], 0, (1 << 5));
11255 arm_const_bounds (operands[3], 0, (1 << 5));
11256 arm_const_bounds (operands[4], 0, (1 << 5));
11257 arm_const_bounds (operands[5], 0, 8);
11258 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11259 }
11260 [(set_attr "length" "4")
11261 (set_attr "type" "coproc")])
11262
11263 (define_insn "*ldc"
11264 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11265 (match_operand:SI 1 "immediate_operand" "n")
11266 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11267 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11268 {
11269 arm_const_bounds (operands[0], 0, 16);
11270 arm_const_bounds (operands[1], 0, (1 << 5));
11271 return "<ldc>\\tp%c0, CR%c1, %2";
11272 }
11273 [(set_attr "length" "4")
11274 (set_attr "type" "coproc")])
11275
11276 (define_insn "*stc"
11277 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11278 (match_operand:SI 1 "immediate_operand" "n")
11279 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11280 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11281 {
11282 arm_const_bounds (operands[0], 0, 16);
11283 arm_const_bounds (operands[1], 0, (1 << 5));
11284 return "<stc>\\tp%c0, CR%c1, %2";
11285 }
11286 [(set_attr "length" "4")
11287 (set_attr "type" "coproc")])
11288
11289 (define_expand "arm_<ldc>"
11290 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11291 (match_operand:SI 1 "immediate_operand")
11292 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11293 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11294
11295 (define_expand "arm_<stc>"
11296 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11297 (match_operand:SI 1 "immediate_operand")
11298 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11299 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11300
11301 (define_insn "arm_<mcr>"
11302 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11303 (match_operand:SI 1 "immediate_operand" "n")
11304 (match_operand:SI 2 "s_register_operand" "r")
11305 (match_operand:SI 3 "immediate_operand" "n")
11306 (match_operand:SI 4 "immediate_operand" "n")
11307 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11308 (use (match_dup 2))]
11309 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11310 {
11311 arm_const_bounds (operands[0], 0, 16);
11312 arm_const_bounds (operands[1], 0, 8);
11313 arm_const_bounds (operands[3], 0, (1 << 5));
11314 arm_const_bounds (operands[4], 0, (1 << 5));
11315 arm_const_bounds (operands[5], 0, 8);
11316 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11317 }
11318 [(set_attr "length" "4")
11319 (set_attr "type" "coproc")])
11320
11321 (define_insn "arm_<mrc>"
11322 [(set (match_operand:SI 0 "s_register_operand" "=r")
11323 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11324 (match_operand:SI 2 "immediate_operand" "n")
11325 (match_operand:SI 3 "immediate_operand" "n")
11326 (match_operand:SI 4 "immediate_operand" "n")
11327 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11328 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11329 {
11330 arm_const_bounds (operands[1], 0, 16);
11331 arm_const_bounds (operands[2], 0, 8);
11332 arm_const_bounds (operands[3], 0, (1 << 5));
11333 arm_const_bounds (operands[4], 0, (1 << 5));
11334 arm_const_bounds (operands[5], 0, 8);
11335 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11336 }
11337 [(set_attr "length" "4")
11338 (set_attr "type" "coproc")])
11339
11340 (define_insn "arm_<mcrr>"
11341 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11342 (match_operand:SI 1 "immediate_operand" "n")
11343 (match_operand:DI 2 "s_register_operand" "r")
11344 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11345 (use (match_dup 2))]
11346 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11347 {
11348 arm_const_bounds (operands[0], 0, 16);
11349 arm_const_bounds (operands[1], 0, 8);
11350 arm_const_bounds (operands[3], 0, (1 << 5));
11351 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11352 }
11353 [(set_attr "length" "4")
11354 (set_attr "type" "coproc")])
11355
11356 (define_insn "arm_<mrrc>"
11357 [(set (match_operand:DI 0 "s_register_operand" "=r")
11358 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11359 (match_operand:SI 2 "immediate_operand" "n")
11360 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11361 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11362 {
11363 arm_const_bounds (operands[1], 0, 16);
11364 arm_const_bounds (operands[2], 0, 8);
11365 arm_const_bounds (operands[3], 0, (1 << 5));
11366 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11367 }
11368 [(set_attr "length" "4")
11369 (set_attr "type" "coproc")])
11370
11371 (define_expand "speculation_barrier"
11372 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11373 "TARGET_EITHER"
11374 "
11375 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11376 have a usable barrier (and probably don't need one in practice).
11377 But to be safe if such code is run on later architectures, call a
11378 helper function in libgcc that will do the thing for the active
11379 system. */
11380 if (!(arm_arch7 || arm_arch8))
11381 {
11382 arm_emit_speculation_barrier_function ();
11383 DONE;
11384 }
11385 "
11386 )
11387
11388 ;; Generate a hard speculation barrier when we have not enabled speculation
11389 ;; tracking.
11390 (define_insn "*speculation_barrier_insn"
11391 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11392 "arm_arch7 || arm_arch8"
11393 "isb\;dsb\\tsy"
11394 [(set_attr "type" "block")
11395 (set_attr "length" "8")]
11396 )
11397
11398 ;; Vector bits common to IWMMXT and Neon
11399 (include "vec-common.md")
11400 ;; Load the Intel Wireless Multimedia Extension patterns
11401 (include "iwmmxt.md")
11402 ;; Load the VFP co-processor patterns
11403 (include "vfp.md")
11404 ;; Thumb-1 patterns
11405 (include "thumb1.md")
11406 ;; Thumb-2 patterns
11407 (include "thumb2.md")
11408 ;; Neon patterns
11409 (include "neon.md")
11410 ;; Crypto patterns
11411 (include "crypto.md")
11412 ;; Synchronization Primitives
11413 (include "sync.md")
11414 ;; Fixed-point patterns
11415 (include "arm-fixed.md")