]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
Update copyright years.
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2021 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 \f
26 ;;---------------------------------------------------------------------------
27 ;; Constants
28
29 ;; Register numbers -- All machine registers should be defined here
30 (define_constants
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 (APSRQ_REGNUM 104) ; Q bit pseudo register
43 (APSRGE_REGNUM 105) ; GE bits pseudo register
44 (VPR_REGNUM 106) ; Vector Predication Register - MVE register.
45 ]
46 )
47 ;; 3rd operand to select_dominance_cc_mode
48 (define_constants
49 [(DOM_CC_X_AND_Y 0)
50 (DOM_CC_NX_OR_Y 1)
51 (DOM_CC_X_OR_Y 2)
52 ]
53 )
54 ;; conditional compare combination
55 (define_constants
56 [(CMP_CMP 0)
57 (CMN_CMP 1)
58 (CMP_CMN 2)
59 (CMN_CMN 3)
60 (NUM_OF_COND_CMP 4)
61 ]
62 )
63
64 \f
65 ;;---------------------------------------------------------------------------
66 ;; Attributes
67
68 ;; Processor type. This is created automatically from arm-cores.def.
69 (include "arm-tune.md")
70
71 ;; Instruction classification types
72 (include "types.md")
73
74 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
75 ; generating ARM code. This is used to control the length of some insn
76 ; patterns that share the same RTL in both ARM and Thumb code.
77 (define_attr "is_thumb" "yes,no"
78 (const (if_then_else (symbol_ref "TARGET_THUMB")
79 (const_string "yes") (const_string "no"))))
80
81 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
82 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
83
84 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
85 (define_attr "is_thumb1" "yes,no"
86 (const (if_then_else (symbol_ref "TARGET_THUMB1")
87 (const_string "yes") (const_string "no"))))
88
89 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
90 ; The arm_restrict_it flag enables the "short IT" feature which
91 ; restricts IT blocks to a single 16-bit instruction.
92 ; This attribute should only be used on 16-bit Thumb-2 instructions
93 ; which may be predicated (the "predicable" attribute must be set).
94 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
95
96 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
97 ; This attribute should only be used on instructions which may emit
98 ; an IT block in their expansion which is not a short IT.
99 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
100
101 ; Mark an instruction sequence as the required way of loading a
102 ; constant when -mpure-code is enabled (which implies
103 ; arm_disable_literal_pool)
104 (define_attr "required_for_purecode" "no,yes" (const_string "no"))
105
106 ;; Operand number of an input operand that is shifted. Zero if the
107 ;; given instruction does not shift one of its input operands.
108 (define_attr "shift" "" (const_int 0))
109
110 ;; [For compatibility with AArch64 in pipeline models]
111 ;; Attribute that specifies whether or not the instruction touches fp
112 ;; registers.
113 (define_attr "fp" "no,yes" (const_string "no"))
114
115 ; Floating Point Unit. If we only have floating point emulation, then there
116 ; is no point in scheduling the floating point insns. (Well, for best
117 ; performance we should try and group them together).
118 (define_attr "fpu" "none,vfp"
119 (const (symbol_ref "arm_fpu_attr")))
120
121 ; Predicated means that the insn form is conditionally executed based on a
122 ; predicate. We default to 'no' because no Thumb patterns match this rule
123 ; and not all ARM insns do.
124 (define_attr "predicated" "yes,no" (const_string "no"))
125
126 ; LENGTH of an instruction (in bytes)
127 (define_attr "length" ""
128 (const_int 4))
129
130 ; The architecture which supports the instruction (or alternative).
131 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
132 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
133 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
134 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
135 ; Baseline. This attribute is used to compute attribute "enabled",
136 ; use type "any" to enable an alternative in all cases.
137 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon,mve"
138 (const_string "any"))
139
140 (define_attr "arch_enabled" "no,yes"
141 (cond [(eq_attr "arch" "any")
142 (const_string "yes")
143
144 (and (eq_attr "arch" "a")
145 (match_test "TARGET_ARM"))
146 (const_string "yes")
147
148 (and (eq_attr "arch" "t")
149 (match_test "TARGET_THUMB"))
150 (const_string "yes")
151
152 (and (eq_attr "arch" "t1")
153 (match_test "TARGET_THUMB1"))
154 (const_string "yes")
155
156 (and (eq_attr "arch" "t2")
157 (match_test "TARGET_THUMB2"))
158 (const_string "yes")
159
160 (and (eq_attr "arch" "32")
161 (match_test "TARGET_32BIT"))
162 (const_string "yes")
163
164 (and (eq_attr "arch" "v6")
165 (match_test "TARGET_32BIT && arm_arch6"))
166 (const_string "yes")
167
168 (and (eq_attr "arch" "nov6")
169 (match_test "TARGET_32BIT && !arm_arch6"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "v6t2")
173 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
174 (const_string "yes")
175
176 (and (eq_attr "arch" "v8mb")
177 (match_test "TARGET_THUMB1 && arm_arch8"))
178 (const_string "yes")
179
180 (and (eq_attr "arch" "iwmmxt2")
181 (match_test "TARGET_REALLY_IWMMXT2"))
182 (const_string "yes")
183
184 (and (eq_attr "arch" "armv6_or_vfpv3")
185 (match_test "arm_arch6 || TARGET_VFP3"))
186 (const_string "yes")
187
188 (and (eq_attr "arch" "neon")
189 (match_test "TARGET_NEON"))
190 (const_string "yes")
191
192 (and (eq_attr "arch" "mve")
193 (match_test "TARGET_HAVE_MVE"))
194 (const_string "yes")
195 ]
196
197 (const_string "no")))
198
199 (define_attr "opt" "any,speed,size"
200 (const_string "any"))
201
202 (define_attr "opt_enabled" "no,yes"
203 (cond [(eq_attr "opt" "any")
204 (const_string "yes")
205
206 (and (eq_attr "opt" "speed")
207 (match_test "optimize_function_for_speed_p (cfun)"))
208 (const_string "yes")
209
210 (and (eq_attr "opt" "size")
211 (match_test "optimize_function_for_size_p (cfun)"))
212 (const_string "yes")]
213 (const_string "no")))
214
215 (define_attr "use_literal_pool" "no,yes"
216 (cond [(and (eq_attr "type" "f_loads,f_loadd")
217 (match_test "CONSTANT_P (operands[1])"))
218 (const_string "yes")]
219 (const_string "no")))
220
221 ; Enable all alternatives that are both arch_enabled and insn_enabled.
222 ; FIXME:: opt_enabled has been temporarily removed till the time we have
223 ; an attribute that allows the use of such alternatives.
224 ; This depends on caching of speed_p, size_p on a per
225 ; alternative basis. The problem is that the enabled attribute
226 ; cannot depend on any state that is not cached or is not constant
227 ; for a compilation unit. We probably need a generic "hot/cold"
228 ; alternative which if implemented can help with this. We disable this
229 ; until such a time as this is implemented and / or the improvements or
230 ; regressions with removing this attribute are double checked.
231 ; See ashldi3_neon and <shift>di3_neon in neon.md.
232
233 (define_attr "enabled" "no,yes"
234 (cond [(and (eq_attr "predicable_short_it" "no")
235 (and (eq_attr "predicated" "yes")
236 (match_test "arm_restrict_it")))
237 (const_string "no")
238
239 (and (eq_attr "enabled_for_short_it" "no")
240 (match_test "arm_restrict_it"))
241 (const_string "no")
242
243 (and (eq_attr "required_for_purecode" "yes")
244 (not (match_test "arm_disable_literal_pool")))
245 (const_string "no")
246
247 (eq_attr "arch_enabled" "no")
248 (const_string "no")]
249 (const_string "yes")))
250
251 ; POOL_RANGE is how far away from a constant pool entry that this insn
252 ; can be placed. If the distance is zero, then this insn will never
253 ; reference the pool.
254 ; Note that for Thumb constant pools the PC value is rounded down to the
255 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
256 ; Thumb insns) should be set to <max_range> - 2.
257 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
258 ; before its address. It is set to <max_range> - (8 + <data_size>).
259 (define_attr "arm_pool_range" "" (const_int 0))
260 (define_attr "thumb2_pool_range" "" (const_int 0))
261 (define_attr "arm_neg_pool_range" "" (const_int 0))
262 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
263
264 (define_attr "pool_range" ""
265 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
266 (attr "arm_pool_range")))
267 (define_attr "neg_pool_range" ""
268 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
269 (attr "arm_neg_pool_range")))
270
271 ; An assembler sequence may clobber the condition codes without us knowing.
272 ; If such an insn references the pool, then we have no way of knowing how,
273 ; so use the most conservative value for pool_range.
274 (define_asm_attributes
275 [(set_attr "conds" "clob")
276 (set_attr "length" "4")
277 (set_attr "pool_range" "250")])
278
279 ; Load scheduling, set from the arm_ld_sched variable
280 ; initialized by arm_option_override()
281 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
282
283 ; condition codes: this one is used by final_prescan_insn to speed up
284 ; conditionalizing instructions. It saves having to scan the rtl to see if
285 ; it uses or alters the condition codes.
286 ;
287 ; USE means that the condition codes are used by the insn in the process of
288 ; outputting code, this means (at present) that we can't use the insn in
289 ; inlined branches
290 ;
291 ; SET means that the purpose of the insn is to set the condition codes in a
292 ; well defined manner.
293 ;
294 ; CLOB means that the condition codes are altered in an undefined manner, if
295 ; they are altered at all
296 ;
297 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
298 ; that the instruction does not use or alter the condition codes.
299 ;
300 ; NOCOND means that the instruction does not use or alter the condition
301 ; codes but can be converted into a conditionally exectuted instruction.
302
303 (define_attr "conds" "use,set,clob,unconditional,nocond"
304 (if_then_else
305 (ior (eq_attr "is_thumb1" "yes")
306 (eq_attr "type" "call"))
307 (const_string "clob")
308 (if_then_else
309 (ior (eq_attr "is_neon_type" "yes")
310 (eq_attr "is_mve_type" "yes"))
311 (const_string "unconditional")
312 (const_string "nocond"))))
313
314 ; Predicable means that the insn can be conditionally executed based on
315 ; an automatically added predicate (additional patterns are generated by
316 ; gen...). We default to 'no' because no Thumb patterns match this rule
317 ; and not all ARM patterns do.
318 (define_attr "predicable" "no,yes" (const_string "no"))
319
320 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
321 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
322 ; suffer blockages enough to warrant modelling this (and it can adversely
323 ; affect the schedule).
324 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
325
326 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
327 ; to stall the processor. Used with model_wbuf above.
328 (define_attr "write_conflict" "no,yes"
329 (if_then_else (eq_attr "type"
330 "block,call,load_4")
331 (const_string "yes")
332 (const_string "no")))
333
334 ; Classify the insns into those that take one cycle and those that take more
335 ; than one on the main cpu execution unit.
336 (define_attr "core_cycles" "single,multi"
337 (if_then_else (eq_attr "type"
338 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
339 alu_shift_imm_lsl_1to4, alu_shift_imm_other, alu_shift_reg, alu_dsp_reg,\
340 alus_ext, alus_imm, alus_sreg,\
341 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
342 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
343 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
344 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
345 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
346 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
347 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
348 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
349 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
350 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
351 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
352 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
353 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
354 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
355 (const_string "single")
356 (const_string "multi")))
357
358 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
359 ;; distant label. Only applicable to Thumb code.
360 (define_attr "far_jump" "yes,no" (const_string "no"))
361
362
363 ;; The number of machine instructions this pattern expands to.
364 ;; Used for Thumb-2 conditional execution.
365 (define_attr "ce_count" "" (const_int 1))
366
367 ;;---------------------------------------------------------------------------
368 ;; Unspecs
369
370 (include "unspecs.md")
371
372 ;;---------------------------------------------------------------------------
373 ;; Mode iterators
374
375 (include "iterators.md")
376
377 ;;---------------------------------------------------------------------------
378 ;; Predicates
379
380 (include "predicates.md")
381 (include "constraints.md")
382
383 ;;---------------------------------------------------------------------------
384 ;; Pipeline descriptions
385
386 (define_attr "tune_cortexr4" "yes,no"
387 (const (if_then_else
388 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
389 (const_string "yes")
390 (const_string "no"))))
391
392 ;; True if the generic scheduling description should be used.
393
394 (define_attr "generic_sched" "yes,no"
395 (const (if_then_else
396 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
397 arm926ejs,arm10e,arm1026ejs,arm1136js,\
398 arm1136jfs,cortexa5,cortexa7,cortexa8,\
399 cortexa9,cortexa12,cortexa15,cortexa17,\
400 cortexa53,cortexa57,cortexm4,cortexm7,\
401 exynosm1,marvell_pj4,xgene1")
402 (eq_attr "tune_cortexr4" "yes"))
403 (const_string "no")
404 (const_string "yes"))))
405
406 (define_attr "generic_vfp" "yes,no"
407 (const (if_then_else
408 (and (eq_attr "fpu" "vfp")
409 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
410 cortexa8,cortexa9,cortexa53,cortexm4,\
411 cortexm7,marvell_pj4,xgene1")
412 (eq_attr "tune_cortexr4" "no"))
413 (const_string "yes")
414 (const_string "no"))))
415
416 (include "marvell-f-iwmmxt.md")
417 (include "arm-generic.md")
418 (include "arm926ejs.md")
419 (include "arm1020e.md")
420 (include "arm1026ejs.md")
421 (include "arm1136jfs.md")
422 (include "fa526.md")
423 (include "fa606te.md")
424 (include "fa626te.md")
425 (include "fmp626.md")
426 (include "fa726te.md")
427 (include "cortex-a5.md")
428 (include "cortex-a7.md")
429 (include "cortex-a8.md")
430 (include "cortex-a9.md")
431 (include "cortex-a15.md")
432 (include "cortex-a17.md")
433 (include "cortex-a53.md")
434 (include "cortex-a57.md")
435 (include "cortex-r4.md")
436 (include "cortex-r4f.md")
437 (include "cortex-m7.md")
438 (include "cortex-m4.md")
439 (include "cortex-m4-fpu.md")
440 (include "exynos-m1.md")
441 (include "vfp11.md")
442 (include "marvell-pj4.md")
443 (include "xgene1.md")
444
445 ;; define_subst and associated attributes
446
447 (define_subst "add_setq"
448 [(set (match_operand:SI 0 "" "")
449 (match_operand:SI 1 "" ""))]
450 ""
451 [(set (match_dup 0)
452 (match_dup 1))
453 (set (reg:CC APSRQ_REGNUM)
454 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
455
456 (define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
457 (define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
458 "ARM_Q_BIT_READ")
459 \f
460 ;;---------------------------------------------------------------------------
461 ;; Insn patterns
462 ;;
463 ;; Addition insns.
464
465 ;; Note: For DImode insns, there is normally no reason why operands should
466 ;; not be in the same register, what we don't want is for something being
467 ;; written to partially overlap something that is an input.
468
469 (define_expand "adddi3"
470 [(parallel
471 [(set (match_operand:DI 0 "s_register_operand")
472 (plus:DI (match_operand:DI 1 "s_register_operand")
473 (match_operand:DI 2 "reg_or_int_operand")))
474 (clobber (reg:CC CC_REGNUM))])]
475 "TARGET_EITHER"
476 "
477 if (TARGET_THUMB1)
478 {
479 if (!REG_P (operands[2]))
480 operands[2] = force_reg (DImode, operands[2]);
481 }
482 else
483 {
484 rtx lo_result, hi_result, lo_dest, hi_dest;
485 rtx lo_op1, hi_op1, lo_op2, hi_op2;
486 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
487 &lo_op2, &hi_op2);
488 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
489 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
490
491 if (lo_op2 == const0_rtx)
492 {
493 lo_dest = lo_op1;
494 if (!arm_add_operand (hi_op2, SImode))
495 hi_op2 = force_reg (SImode, hi_op2);
496 /* Assume hi_op2 won't also be zero. */
497 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
498 }
499 else
500 {
501 if (!arm_add_operand (lo_op2, SImode))
502 lo_op2 = force_reg (SImode, lo_op2);
503 if (!arm_not_operand (hi_op2, SImode))
504 hi_op2 = force_reg (SImode, hi_op2);
505
506 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
507 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
508 const0_rtx);
509 if (hi_op2 == const0_rtx)
510 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
511 else
512 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
513 }
514
515 if (lo_result != lo_dest)
516 emit_move_insn (lo_result, lo_dest);
517 if (hi_result != hi_dest)
518 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
519 DONE;
520 }
521 "
522 )
523
524 (define_expand "addvsi4"
525 [(match_operand:SI 0 "s_register_operand")
526 (match_operand:SI 1 "s_register_operand")
527 (match_operand:SI 2 "arm_add_operand")
528 (match_operand 3 "")]
529 "TARGET_32BIT"
530 {
531 if (CONST_INT_P (operands[2]))
532 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
533 else
534 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
535 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
536
537 DONE;
538 })
539
540 (define_expand "addvdi4"
541 [(match_operand:DI 0 "s_register_operand")
542 (match_operand:DI 1 "s_register_operand")
543 (match_operand:DI 2 "reg_or_int_operand")
544 (match_operand 3 "")]
545 "TARGET_32BIT"
546 {
547 rtx lo_result, hi_result;
548 rtx lo_op1, hi_op1, lo_op2, hi_op2;
549 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
550 &lo_op2, &hi_op2);
551 lo_result = gen_lowpart (SImode, operands[0]);
552 hi_result = gen_highpart (SImode, operands[0]);
553
554 if (lo_op2 == const0_rtx)
555 {
556 emit_move_insn (lo_result, lo_op1);
557 if (!arm_add_operand (hi_op2, SImode))
558 hi_op2 = force_reg (SImode, hi_op2);
559
560 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
561 }
562 else
563 {
564 if (!arm_add_operand (lo_op2, SImode))
565 lo_op2 = force_reg (SImode, lo_op2);
566 if (!arm_not_operand (hi_op2, SImode))
567 hi_op2 = force_reg (SImode, hi_op2);
568
569 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
570
571 if (hi_op2 == const0_rtx)
572 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
573 else if (CONST_INT_P (hi_op2))
574 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
575 else
576 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
577
578 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
579 }
580
581 DONE;
582 })
583
584 (define_expand "addsi3_cin_vout_reg"
585 [(parallel
586 [(set (match_dup 3)
587 (compare:CC_V
588 (plus:DI
589 (plus:DI (match_dup 4)
590 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
591 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
592 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
593 (match_dup 2)))))
594 (set (match_operand:SI 0 "s_register_operand")
595 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
596 (match_dup 2)))])]
597 "TARGET_32BIT"
598 {
599 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
600 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
601 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
602 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
603 }
604 )
605
606 (define_insn "*addsi3_cin_vout_reg_insn"
607 [(set (reg:CC_V CC_REGNUM)
608 (compare:CC_V
609 (plus:DI
610 (plus:DI
611 (match_operand:DI 3 "arm_carry_operation" "")
612 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
613 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
614 (sign_extend:DI
615 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
616 (match_dup 1))
617 (match_dup 2)))))
618 (set (match_operand:SI 0 "s_register_operand" "=l,r")
619 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
620 (match_dup 2)))]
621 "TARGET_32BIT"
622 "@
623 adcs%?\\t%0, %0, %2
624 adcs%?\\t%0, %1, %2"
625 [(set_attr "type" "alus_sreg")
626 (set_attr "arch" "t2,*")
627 (set_attr "length" "2,4")]
628 )
629
630 (define_expand "addsi3_cin_vout_imm"
631 [(parallel
632 [(set (match_dup 3)
633 (compare:CC_V
634 (plus:DI
635 (plus:DI (match_dup 4)
636 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
637 (match_dup 2))
638 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
639 (match_dup 2)))))
640 (set (match_operand:SI 0 "s_register_operand")
641 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
642 (match_operand 2 "arm_adcimm_operand")))])]
643 "TARGET_32BIT"
644 {
645 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
646 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
647 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
648 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
649 }
650 )
651
652 (define_insn "*addsi3_cin_vout_imm_insn"
653 [(set (reg:CC_V CC_REGNUM)
654 (compare:CC_V
655 (plus:DI
656 (plus:DI
657 (match_operand:DI 3 "arm_carry_operation" "")
658 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
659 (match_operand 2 "arm_adcimm_operand" "I,K"))
660 (sign_extend:DI
661 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
662 (match_dup 1))
663 (match_dup 2)))))
664 (set (match_operand:SI 0 "s_register_operand" "=r,r")
665 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
666 (match_dup 2)))]
667 "TARGET_32BIT"
668 "@
669 adcs%?\\t%0, %1, %2
670 sbcs%?\\t%0, %1, #%B2"
671 [(set_attr "type" "alus_imm")]
672 )
673
674 (define_expand "addsi3_cin_vout_0"
675 [(parallel
676 [(set (match_dup 2)
677 (compare:CC_V
678 (plus:DI (match_dup 3)
679 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
680 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
681 (set (match_operand:SI 0 "s_register_operand")
682 (plus:SI (match_dup 4) (match_dup 1)))])]
683 "TARGET_32BIT"
684 {
685 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
686 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
687 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
688 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
689 }
690 )
691
692 (define_insn "*addsi3_cin_vout_0_insn"
693 [(set (reg:CC_V CC_REGNUM)
694 (compare:CC_V
695 (plus:DI
696 (match_operand:DI 2 "arm_carry_operation" "")
697 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
698 (sign_extend:DI (plus:SI
699 (match_operand:SI 3 "arm_carry_operation" "")
700 (match_dup 1)))))
701 (set (match_operand:SI 0 "s_register_operand" "=r")
702 (plus:SI (match_dup 3) (match_dup 1)))]
703 "TARGET_32BIT"
704 "adcs%?\\t%0, %1, #0"
705 [(set_attr "type" "alus_imm")]
706 )
707
708 (define_expand "uaddvsi4"
709 [(match_operand:SI 0 "s_register_operand")
710 (match_operand:SI 1 "s_register_operand")
711 (match_operand:SI 2 "arm_add_operand")
712 (match_operand 3 "")]
713 "TARGET_32BIT"
714 {
715 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
716 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
717
718 DONE;
719 })
720
721 (define_expand "uaddvdi4"
722 [(match_operand:DI 0 "s_register_operand")
723 (match_operand:DI 1 "s_register_operand")
724 (match_operand:DI 2 "reg_or_int_operand")
725 (match_operand 3 "")]
726 "TARGET_32BIT"
727 {
728 rtx lo_result, hi_result;
729 rtx lo_op1, hi_op1, lo_op2, hi_op2;
730 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
731 &lo_op2, &hi_op2);
732 lo_result = gen_lowpart (SImode, operands[0]);
733 hi_result = gen_highpart (SImode, operands[0]);
734
735 if (lo_op2 == const0_rtx)
736 {
737 emit_move_insn (lo_result, lo_op1);
738 if (!arm_add_operand (hi_op2, SImode))
739 hi_op2 = force_reg (SImode, hi_op2);
740
741 emit_insn (gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
742 }
743 else
744 {
745 if (!arm_add_operand (lo_op2, SImode))
746 lo_op2 = force_reg (SImode, lo_op2);
747 if (!arm_not_operand (hi_op2, SImode))
748 hi_op2 = force_reg (SImode, hi_op2);
749
750 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
751
752 if (hi_op2 == const0_rtx)
753 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
754 else if (CONST_INT_P (hi_op2))
755 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
756 else
757 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
758
759 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
760 }
761
762 DONE;
763 })
764
765 (define_expand "addsi3_cin_cout_reg"
766 [(parallel
767 [(set (match_dup 3)
768 (compare:CC_ADC
769 (plus:DI
770 (plus:DI (match_dup 4)
771 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
772 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
773 (const_int 4294967296)))
774 (set (match_operand:SI 0 "s_register_operand")
775 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
776 (match_dup 2)))])]
777 "TARGET_32BIT"
778 {
779 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
780 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
781 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
782 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
783 }
784 )
785
786 (define_insn "*addsi3_cin_cout_reg_insn"
787 [(set (reg:CC_ADC CC_REGNUM)
788 (compare:CC_ADC
789 (plus:DI
790 (plus:DI
791 (match_operand:DI 3 "arm_carry_operation" "")
792 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
793 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
794 (const_int 4294967296)))
795 (set (match_operand:SI 0 "s_register_operand" "=l,r")
796 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
797 (match_dup 1))
798 (match_dup 2)))]
799 "TARGET_32BIT"
800 "@
801 adcs%?\\t%0, %0, %2
802 adcs%?\\t%0, %1, %2"
803 [(set_attr "type" "alus_sreg")
804 (set_attr "arch" "t2,*")
805 (set_attr "length" "2,4")]
806 )
807
808 (define_expand "addsi3_cin_cout_imm"
809 [(parallel
810 [(set (match_dup 3)
811 (compare:CC_ADC
812 (plus:DI
813 (plus:DI (match_dup 4)
814 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
815 (match_dup 6))
816 (const_int 4294967296)))
817 (set (match_operand:SI 0 "s_register_operand")
818 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
819 (match_operand:SI 2 "arm_adcimm_operand")))])]
820 "TARGET_32BIT"
821 {
822 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
823 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
824 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
825 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
826 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
827 }
828 )
829
830 (define_insn "*addsi3_cin_cout_imm_insn"
831 [(set (reg:CC_ADC CC_REGNUM)
832 (compare:CC_ADC
833 (plus:DI
834 (plus:DI
835 (match_operand:DI 3 "arm_carry_operation" "")
836 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
837 (match_operand:DI 5 "const_int_operand" "n,n"))
838 (const_int 4294967296)))
839 (set (match_operand:SI 0 "s_register_operand" "=r,r")
840 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
841 (match_dup 1))
842 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
843 "TARGET_32BIT
844 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
845 "@
846 adcs%?\\t%0, %1, %2
847 sbcs%?\\t%0, %1, #%B2"
848 [(set_attr "type" "alus_imm")]
849 )
850
851 (define_expand "addsi3_cin_cout_0"
852 [(parallel
853 [(set (match_dup 2)
854 (compare:CC_ADC
855 (plus:DI (match_dup 3)
856 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
857 (const_int 4294967296)))
858 (set (match_operand:SI 0 "s_register_operand")
859 (plus:SI (match_dup 4) (match_dup 1)))])]
860 "TARGET_32BIT"
861 {
862 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
863 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
864 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
865 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
866 }
867 )
868
869 (define_insn "*addsi3_cin_cout_0_insn"
870 [(set (reg:CC_ADC CC_REGNUM)
871 (compare:CC_ADC
872 (plus:DI
873 (match_operand:DI 2 "arm_carry_operation" "")
874 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
875 (const_int 4294967296)))
876 (set (match_operand:SI 0 "s_register_operand" "=r")
877 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
878 "TARGET_32BIT"
879 "adcs%?\\t%0, %1, #0"
880 [(set_attr "type" "alus_imm")]
881 )
882
883 (define_expand "addsi3"
884 [(set (match_operand:SI 0 "s_register_operand")
885 (plus:SI (match_operand:SI 1 "s_register_operand")
886 (match_operand:SI 2 "reg_or_int_operand")))]
887 "TARGET_EITHER"
888 "
889 if (TARGET_32BIT && CONST_INT_P (operands[2]))
890 {
891 arm_split_constant (PLUS, SImode, NULL_RTX,
892 INTVAL (operands[2]), operands[0], operands[1],
893 optimize && can_create_pseudo_p ());
894 DONE;
895 }
896 "
897 )
898
899 ; If there is a scratch available, this will be faster than synthesizing the
900 ; addition.
901 (define_peephole2
902 [(match_scratch:SI 3 "r")
903 (set (match_operand:SI 0 "arm_general_register_operand" "")
904 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
905 (match_operand:SI 2 "const_int_operand" "")))]
906 "TARGET_32BIT &&
907 !(const_ok_for_arm (INTVAL (operands[2]))
908 || const_ok_for_arm (-INTVAL (operands[2])))
909 && const_ok_for_arm (~INTVAL (operands[2]))"
910 [(set (match_dup 3) (match_dup 2))
911 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
912 ""
913 )
914
915 ;; The r/r/k alternative is required when reloading the address
916 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
917 ;; put the duplicated register first, and not try the commutative version.
918 (define_insn_and_split "*arm_addsi3"
919 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
920 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
921 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
922 "TARGET_32BIT"
923 "@
924 add%?\\t%0, %0, %2
925 add%?\\t%0, %1, %2
926 add%?\\t%0, %1, %2
927 add%?\\t%0, %1, %2
928 add%?\\t%0, %1, %2
929 add%?\\t%0, %1, %2
930 add%?\\t%0, %2, %1
931 add%?\\t%0, %1, %2
932 addw%?\\t%0, %1, %2
933 addw%?\\t%0, %1, %2
934 sub%?\\t%0, %1, #%n2
935 sub%?\\t%0, %1, #%n2
936 sub%?\\t%0, %1, #%n2
937 subw%?\\t%0, %1, #%n2
938 subw%?\\t%0, %1, #%n2
939 #"
940 "TARGET_32BIT
941 && CONST_INT_P (operands[2])
942 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
943 && (reload_completed || !arm_eliminable_register (operands[1]))"
944 [(clobber (const_int 0))]
945 "
946 arm_split_constant (PLUS, SImode, curr_insn,
947 INTVAL (operands[2]), operands[0],
948 operands[1], 0);
949 DONE;
950 "
951 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
952 (set_attr "predicable" "yes")
953 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
954 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
955 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
956 (const_string "alu_imm")
957 (const_string "alu_sreg")))
958 ]
959 )
960
961 (define_insn "addsi3_compareV_reg"
962 [(set (reg:CC_V CC_REGNUM)
963 (compare:CC_V
964 (plus:DI
965 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
966 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
967 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
968 (set (match_operand:SI 0 "register_operand" "=l,r,r")
969 (plus:SI (match_dup 1) (match_dup 2)))]
970 "TARGET_32BIT"
971 "adds%?\\t%0, %1, %2"
972 [(set_attr "conds" "set")
973 (set_attr "arch" "t2,t2,*")
974 (set_attr "length" "2,2,4")
975 (set_attr "type" "alus_sreg")]
976 )
977
978 (define_insn "*addsi3_compareV_reg_nosum"
979 [(set (reg:CC_V CC_REGNUM)
980 (compare:CC_V
981 (plus:DI
982 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
983 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
984 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
985 "TARGET_32BIT"
986 "cmn%?\\t%0, %1"
987 [(set_attr "conds" "set")
988 (set_attr "arch" "t2,*")
989 (set_attr "length" "2,4")
990 (set_attr "type" "alus_sreg")]
991 )
992
993 (define_insn "subvsi3_intmin"
994 [(set (reg:CC_V CC_REGNUM)
995 (compare:CC_V
996 (plus:DI
997 (sign_extend:DI
998 (match_operand:SI 1 "register_operand" "r"))
999 (const_int 2147483648))
1000 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
1001 (set (match_operand:SI 0 "register_operand" "=r")
1002 (plus:SI (match_dup 1) (const_int -2147483648)))]
1003 "TARGET_32BIT"
1004 "subs%?\\t%0, %1, #-2147483648"
1005 [(set_attr "conds" "set")
1006 (set_attr "type" "alus_imm")]
1007 )
1008
1009 (define_insn "addsi3_compareV_imm"
1010 [(set (reg:CC_V CC_REGNUM)
1011 (compare:CC_V
1012 (plus:DI
1013 (sign_extend:DI
1014 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
1015 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
1016 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
1017 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
1018 (plus:SI (match_dup 1) (match_dup 2)))]
1019 "TARGET_32BIT
1020 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
1021 "@
1022 adds%?\\t%0, %1, %2
1023 adds%?\\t%0, %0, %2
1024 subs%?\\t%0, %1, #%n2
1025 subs%?\\t%0, %0, #%n2
1026 adds%?\\t%0, %1, %2
1027 subs%?\\t%0, %1, #%n2"
1028 [(set_attr "conds" "set")
1029 (set_attr "arch" "t2,t2,t2,t2,*,*")
1030 (set_attr "length" "2,2,2,2,4,4")
1031 (set_attr "type" "alus_imm")]
1032 )
1033
1034 (define_insn "addsi3_compareV_imm_nosum"
1035 [(set (reg:CC_V CC_REGNUM)
1036 (compare:CC_V
1037 (plus:DI
1038 (sign_extend:DI
1039 (match_operand:SI 0 "register_operand" "l,r,r"))
1040 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1041 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1042 "TARGET_32BIT
1043 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1044 "@
1045 cmp%?\\t%0, #%n1
1046 cmn%?\\t%0, %1
1047 cmp%?\\t%0, #%n1"
1048 [(set_attr "conds" "set")
1049 (set_attr "arch" "t2,*,*")
1050 (set_attr "length" "2,4,4")
1051 (set_attr "type" "alus_imm")]
1052 )
1053
1054 ;; We can handle more constants efficently if we can clobber either a scratch
1055 ;; or the other source operand. We deliberately leave this late as in
1056 ;; high register pressure situations it's not worth forcing any reloads.
1057 (define_peephole2
1058 [(match_scratch:SI 2 "l")
1059 (set (reg:CC_V CC_REGNUM)
1060 (compare:CC_V
1061 (plus:DI
1062 (sign_extend:DI
1063 (match_operand:SI 0 "low_register_operand"))
1064 (match_operand 1 "const_int_operand"))
1065 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1066 "TARGET_THUMB2
1067 && satisfies_constraint_Pd (operands[1])"
1068 [(parallel[
1069 (set (reg:CC_V CC_REGNUM)
1070 (compare:CC_V
1071 (plus:DI (sign_extend:DI (match_dup 0))
1072 (sign_extend:DI (match_dup 1)))
1073 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1074 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1075 )
1076
1077 (define_peephole2
1078 [(set (reg:CC_V CC_REGNUM)
1079 (compare:CC_V
1080 (plus:DI
1081 (sign_extend:DI
1082 (match_operand:SI 0 "low_register_operand"))
1083 (match_operand 1 "const_int_operand"))
1084 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1085 "TARGET_THUMB2
1086 && dead_or_set_p (peep2_next_insn (0), operands[0])
1087 && satisfies_constraint_Py (operands[1])"
1088 [(parallel[
1089 (set (reg:CC_V CC_REGNUM)
1090 (compare:CC_V
1091 (plus:DI (sign_extend:DI (match_dup 0))
1092 (sign_extend:DI (match_dup 1)))
1093 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1094 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1095 )
1096
1097 (define_insn "addsi3_compare0"
1098 [(set (reg:CC_NZ CC_REGNUM)
1099 (compare:CC_NZ
1100 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1101 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1102 (const_int 0)))
1103 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1104 (plus:SI (match_dup 1) (match_dup 2)))]
1105 "TARGET_ARM"
1106 "@
1107 adds%?\\t%0, %1, %2
1108 subs%?\\t%0, %1, #%n2
1109 adds%?\\t%0, %1, %2"
1110 [(set_attr "conds" "set")
1111 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1112 )
1113
1114 (define_insn "*addsi3_compare0_scratch"
1115 [(set (reg:CC_NZ CC_REGNUM)
1116 (compare:CC_NZ
1117 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1118 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1119 (const_int 0)))]
1120 "TARGET_ARM"
1121 "@
1122 cmn%?\\t%0, %1
1123 cmp%?\\t%0, #%n1
1124 cmn%?\\t%0, %1"
1125 [(set_attr "conds" "set")
1126 (set_attr "predicable" "yes")
1127 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1128 )
1129
1130 (define_insn "*compare_negsi_si"
1131 [(set (reg:CC_Z CC_REGNUM)
1132 (compare:CC_Z
1133 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1134 (match_operand:SI 1 "s_register_operand" "l,r")))]
1135 "TARGET_32BIT"
1136 "cmn%?\\t%1, %0"
1137 [(set_attr "conds" "set")
1138 (set_attr "predicable" "yes")
1139 (set_attr "arch" "t2,*")
1140 (set_attr "length" "2,4")
1141 (set_attr "predicable_short_it" "yes,no")
1142 (set_attr "type" "alus_sreg")]
1143 )
1144
1145 ;; This is the canonicalization of subsi3_compare when the
1146 ;; addend is a constant.
1147 (define_insn "cmpsi2_addneg"
1148 [(set (reg:CC CC_REGNUM)
1149 (compare:CC
1150 (match_operand:SI 1 "s_register_operand" "r,r")
1151 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1152 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1153 (plus:SI (match_dup 1)
1154 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1155 "TARGET_32BIT
1156 && (INTVAL (operands[2])
1157 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1158 {
1159 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1160 in different condition codes (like cmn rather than like cmp), so that
1161 alternative comes first. Both alternatives can match for any 0x??000000
1162 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1163 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1164 as it is shorter. */
1165 if (which_alternative == 0 && operands[3] != const1_rtx)
1166 return "subs%?\\t%0, %1, #%n3";
1167 else
1168 return "adds%?\\t%0, %1, %3";
1169 }
1170 [(set_attr "conds" "set")
1171 (set_attr "type" "alus_sreg")]
1172 )
1173
1174 ;; Convert the sequence
1175 ;; sub rd, rn, #1
1176 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1177 ;; bne dest
1178 ;; into
1179 ;; subs rd, rn, #1
1180 ;; bcs dest ((unsigned)rn >= 1)
1181 ;; similarly for the beq variant using bcc.
1182 ;; This is a common looping idiom (while (n--))
1183 (define_peephole2
1184 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1185 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1186 (const_int -1)))
1187 (set (match_operand 2 "cc_register" "")
1188 (compare (match_dup 0) (const_int -1)))
1189 (set (pc)
1190 (if_then_else (match_operator 3 "equality_operator"
1191 [(match_dup 2) (const_int 0)])
1192 (match_operand 4 "" "")
1193 (match_operand 5 "" "")))]
1194 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1195 [(parallel[
1196 (set (match_dup 2)
1197 (compare:CC
1198 (match_dup 1) (const_int 1)))
1199 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1200 (set (pc)
1201 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1202 (match_dup 4)
1203 (match_dup 5)))]
1204 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1205 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1206 ? GEU : LTU),
1207 VOIDmode,
1208 operands[2], const0_rtx);"
1209 )
1210
1211 ;; The next four insns work because they compare the result with one of
1212 ;; the operands, and we know that the use of the condition code is
1213 ;; either GEU or LTU, so we can use the carry flag from the addition
1214 ;; instead of doing the compare a second time.
1215 (define_insn "addsi3_compare_op1"
1216 [(set (reg:CC_C CC_REGNUM)
1217 (compare:CC_C
1218 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1219 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1220 (match_dup 1)))
1221 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1222 (plus:SI (match_dup 1) (match_dup 2)))]
1223 "TARGET_32BIT"
1224 "@
1225 adds%?\\t%0, %1, %2
1226 adds%?\\t%0, %0, %2
1227 subs%?\\t%0, %1, #%n2
1228 subs%?\\t%0, %0, #%n2
1229 adds%?\\t%0, %1, %2
1230 subs%?\\t%0, %1, #%n2"
1231 [(set_attr "conds" "set")
1232 (set_attr "arch" "t2,t2,t2,t2,*,*")
1233 (set_attr "length" "2,2,2,2,4,4")
1234 (set (attr "type")
1235 (if_then_else (match_operand 2 "const_int_operand")
1236 (const_string "alu_imm")
1237 (const_string "alu_sreg")))]
1238 )
1239
1240 (define_insn "*addsi3_compare_op2"
1241 [(set (reg:CC_C CC_REGNUM)
1242 (compare:CC_C
1243 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1244 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1245 (match_dup 2)))
1246 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1247 (plus:SI (match_dup 1) (match_dup 2)))]
1248 "TARGET_32BIT"
1249 "@
1250 adds%?\\t%0, %1, %2
1251 adds%?\\t%0, %0, %2
1252 subs%?\\t%0, %1, #%n2
1253 subs%?\\t%0, %0, #%n2
1254 adds%?\\t%0, %1, %2
1255 subs%?\\t%0, %1, #%n2"
1256 [(set_attr "conds" "set")
1257 (set_attr "arch" "t2,t2,t2,t2,*,*")
1258 (set_attr "length" "2,2,2,2,4,4")
1259 (set (attr "type")
1260 (if_then_else (match_operand 2 "const_int_operand")
1261 (const_string "alu_imm")
1262 (const_string "alu_sreg")))]
1263 )
1264
1265 (define_insn "*compare_addsi2_op0"
1266 [(set (reg:CC_C CC_REGNUM)
1267 (compare:CC_C
1268 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1269 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1270 (match_dup 0)))]
1271 "TARGET_32BIT"
1272 "@
1273 cmn%?\\t%0, %1
1274 cmp%?\\t%0, #%n1
1275 cmn%?\\t%0, %1
1276 cmp%?\\t%0, #%n1"
1277 [(set_attr "conds" "set")
1278 (set_attr "predicable" "yes")
1279 (set_attr "arch" "t2,t2,*,*")
1280 (set_attr "predicable_short_it" "yes,yes,no,no")
1281 (set_attr "length" "2,2,4,4")
1282 (set (attr "type")
1283 (if_then_else (match_operand 1 "const_int_operand")
1284 (const_string "alu_imm")
1285 (const_string "alu_sreg")))]
1286 )
1287
1288 (define_insn "*compare_addsi2_op1"
1289 [(set (reg:CC_C CC_REGNUM)
1290 (compare:CC_C
1291 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1292 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1293 (match_dup 1)))]
1294 "TARGET_32BIT"
1295 "@
1296 cmn%?\\t%0, %1
1297 cmp%?\\t%0, #%n1
1298 cmn%?\\t%0, %1
1299 cmp%?\\t%0, #%n1"
1300 [(set_attr "conds" "set")
1301 (set_attr "predicable" "yes")
1302 (set_attr "arch" "t2,t2,*,*")
1303 (set_attr "predicable_short_it" "yes,yes,no,no")
1304 (set_attr "length" "2,2,4,4")
1305 (set (attr "type")
1306 (if_then_else (match_operand 1 "const_int_operand")
1307 (const_string "alu_imm")
1308 (const_string "alu_sreg")))]
1309 )
1310
1311 (define_insn "addsi3_carryin"
1312 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1313 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1314 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1315 (match_operand:SI 3 "arm_carry_operation" "")))]
1316 "TARGET_32BIT"
1317 "@
1318 adc%?\\t%0, %1, %2
1319 adc%?\\t%0, %1, %2
1320 sbc%?\\t%0, %1, #%B2"
1321 [(set_attr "conds" "use")
1322 (set_attr "predicable" "yes")
1323 (set_attr "arch" "t2,*,*")
1324 (set_attr "length" "4")
1325 (set_attr "predicable_short_it" "yes,no,no")
1326 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1327 )
1328
1329 ;; Canonicalization of the above when the immediate is zero.
1330 (define_insn "add0si3_carryin"
1331 [(set (match_operand:SI 0 "s_register_operand" "=r")
1332 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1333 (match_operand:SI 1 "arm_not_operand" "r")))]
1334 "TARGET_32BIT"
1335 "adc%?\\t%0, %1, #0"
1336 [(set_attr "conds" "use")
1337 (set_attr "predicable" "yes")
1338 (set_attr "length" "4")
1339 (set_attr "type" "adc_imm")]
1340 )
1341
1342 (define_insn "*addsi3_carryin_alt2"
1343 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1344 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1345 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1346 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1347 "TARGET_32BIT"
1348 "@
1349 adc%?\\t%0, %1, %2
1350 adc%?\\t%0, %1, %2
1351 sbc%?\\t%0, %1, #%B2"
1352 [(set_attr "conds" "use")
1353 (set_attr "predicable" "yes")
1354 (set_attr "arch" "t2,*,*")
1355 (set_attr "length" "4")
1356 (set_attr "predicable_short_it" "yes,no,no")
1357 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1358 )
1359
1360 (define_insn "*addsi3_carryin_shift"
1361 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1362 (plus:SI (plus:SI
1363 (match_operator:SI 2 "shift_operator"
1364 [(match_operand:SI 3 "s_register_operand" "r,r")
1365 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1366 (match_operand:SI 5 "arm_carry_operation" ""))
1367 (match_operand:SI 1 "s_register_operand" "r,r")))]
1368 "TARGET_32BIT"
1369 "adc%?\\t%0, %1, %3%S2"
1370 [(set_attr "conds" "use")
1371 (set_attr "arch" "32,a")
1372 (set_attr "shift" "3")
1373 (set_attr "predicable" "yes")
1374 (set_attr "autodetect_type" "alu_shift_operator2")]
1375 )
1376
1377 (define_insn "*addsi3_carryin_clobercc"
1378 [(set (match_operand:SI 0 "s_register_operand" "=r")
1379 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1380 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1381 (match_operand:SI 3 "arm_carry_operation" "")))
1382 (clobber (reg:CC CC_REGNUM))]
1383 "TARGET_32BIT"
1384 "adcs%?\\t%0, %1, %2"
1385 [(set_attr "conds" "set")
1386 (set_attr "type" "adcs_reg")]
1387 )
1388
1389 (define_expand "subvsi4"
1390 [(match_operand:SI 0 "s_register_operand")
1391 (match_operand:SI 1 "arm_rhs_operand")
1392 (match_operand:SI 2 "arm_add_operand")
1393 (match_operand 3 "")]
1394 "TARGET_32BIT"
1395 {
1396 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1397 {
1398 /* If both operands are constants we can decide the result statically. */
1399 wi::overflow_type overflow;
1400 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1401 rtx_mode_t (operands[2], SImode),
1402 SIGNED, &overflow);
1403 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1404 if (overflow != wi::OVF_NONE)
1405 emit_jump_insn (gen_jump (operands[3]));
1406 DONE;
1407 }
1408 else if (CONST_INT_P (operands[2]))
1409 {
1410 operands[2] = GEN_INT (-INTVAL (operands[2]));
1411 /* Special case for INT_MIN. */
1412 if (INTVAL (operands[2]) == 0x80000000)
1413 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1414 else
1415 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1416 operands[2]));
1417 }
1418 else if (CONST_INT_P (operands[1]))
1419 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1420 else
1421 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1422
1423 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1424 DONE;
1425 })
1426
1427 (define_expand "subvdi4"
1428 [(match_operand:DI 0 "s_register_operand")
1429 (match_operand:DI 1 "reg_or_int_operand")
1430 (match_operand:DI 2 "reg_or_int_operand")
1431 (match_operand 3 "")]
1432 "TARGET_32BIT"
1433 {
1434 rtx lo_result, hi_result;
1435 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1436 lo_result = gen_lowpart (SImode, operands[0]);
1437 hi_result = gen_highpart (SImode, operands[0]);
1438 machine_mode mode = CCmode;
1439
1440 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1441 {
1442 /* If both operands are constants we can decide the result statically. */
1443 wi::overflow_type overflow;
1444 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1445 rtx_mode_t (operands[2], DImode),
1446 SIGNED, &overflow);
1447 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1448 if (overflow != wi::OVF_NONE)
1449 emit_jump_insn (gen_jump (operands[3]));
1450 DONE;
1451 }
1452 else if (CONST_INT_P (operands[1]))
1453 {
1454 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1455 &lo_op1, &hi_op1);
1456 if (const_ok_for_arm (INTVAL (lo_op1)))
1457 {
1458 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1459 GEN_INT (~UINTVAL (lo_op1))));
1460 /* We could potentially use RSC here in Arm state, but not
1461 in Thumb, so it's probably not worth the effort of handling
1462 this. */
1463 hi_op1 = force_reg (SImode, hi_op1);
1464 mode = CC_RSBmode;
1465 goto highpart;
1466 }
1467 operands[1] = force_reg (DImode, operands[1]);
1468 }
1469
1470 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1471 &lo_op2, &hi_op2);
1472 if (lo_op2 == const0_rtx)
1473 {
1474 emit_move_insn (lo_result, lo_op1);
1475 if (!arm_add_operand (hi_op2, SImode))
1476 hi_op2 = force_reg (SImode, hi_op2);
1477 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1478 DONE;
1479 }
1480
1481 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1482 lo_op2 = force_reg (SImode, lo_op2);
1483 if (CONST_INT_P (lo_op2))
1484 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1485 gen_int_mode (-INTVAL (lo_op2), SImode)));
1486 else
1487 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1488
1489 highpart:
1490 if (!arm_not_operand (hi_op2, SImode))
1491 hi_op2 = force_reg (SImode, hi_op2);
1492 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1493 if (CONST_INT_P (hi_op2))
1494 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1495 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1496 gen_rtx_LTU (DImode, ccreg,
1497 const0_rtx)));
1498 else
1499 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1500 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1501 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1502 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1503
1504 DONE;
1505 })
1506
1507 (define_expand "usubvsi4"
1508 [(match_operand:SI 0 "s_register_operand")
1509 (match_operand:SI 1 "arm_rhs_operand")
1510 (match_operand:SI 2 "arm_add_operand")
1511 (match_operand 3 "")]
1512 "TARGET_32BIT"
1513 {
1514 machine_mode mode = CCmode;
1515 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1516 {
1517 /* If both operands are constants we can decide the result statically. */
1518 wi::overflow_type overflow;
1519 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1520 rtx_mode_t (operands[2], SImode),
1521 UNSIGNED, &overflow);
1522 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1523 if (overflow != wi::OVF_NONE)
1524 emit_jump_insn (gen_jump (operands[3]));
1525 DONE;
1526 }
1527 else if (CONST_INT_P (operands[2]))
1528 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1529 gen_int_mode (-INTVAL (operands[2]),
1530 SImode)));
1531 else if (CONST_INT_P (operands[1]))
1532 {
1533 mode = CC_RSBmode;
1534 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1535 GEN_INT (~UINTVAL (operands[1]))));
1536 }
1537 else
1538 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1539 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1540
1541 DONE;
1542 })
1543
1544 (define_expand "usubvdi4"
1545 [(match_operand:DI 0 "s_register_operand")
1546 (match_operand:DI 1 "reg_or_int_operand")
1547 (match_operand:DI 2 "reg_or_int_operand")
1548 (match_operand 3 "")]
1549 "TARGET_32BIT"
1550 {
1551 rtx lo_result, hi_result;
1552 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1553 lo_result = gen_lowpart (SImode, operands[0]);
1554 hi_result = gen_highpart (SImode, operands[0]);
1555 machine_mode mode = CCmode;
1556
1557 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1558 {
1559 /* If both operands are constants we can decide the result statically. */
1560 wi::overflow_type overflow;
1561 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1562 rtx_mode_t (operands[2], DImode),
1563 UNSIGNED, &overflow);
1564 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1565 if (overflow != wi::OVF_NONE)
1566 emit_jump_insn (gen_jump (operands[3]));
1567 DONE;
1568 }
1569 else if (CONST_INT_P (operands[1]))
1570 {
1571 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1572 &lo_op1, &hi_op1);
1573 if (const_ok_for_arm (INTVAL (lo_op1)))
1574 {
1575 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1576 GEN_INT (~UINTVAL (lo_op1))));
1577 /* We could potentially use RSC here in Arm state, but not
1578 in Thumb, so it's probably not worth the effort of handling
1579 this. */
1580 hi_op1 = force_reg (SImode, hi_op1);
1581 mode = CC_RSBmode;
1582 goto highpart;
1583 }
1584 operands[1] = force_reg (DImode, operands[1]);
1585 }
1586
1587 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1588 &lo_op2, &hi_op2);
1589 if (lo_op2 == const0_rtx)
1590 {
1591 emit_move_insn (lo_result, lo_op1);
1592 if (!arm_add_operand (hi_op2, SImode))
1593 hi_op2 = force_reg (SImode, hi_op2);
1594 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1595 DONE;
1596 }
1597
1598 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1599 lo_op2 = force_reg (SImode, lo_op2);
1600 if (CONST_INT_P (lo_op2))
1601 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1602 gen_int_mode (-INTVAL (lo_op2), SImode)));
1603 else
1604 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1605
1606 highpart:
1607 if (!arm_not_operand (hi_op2, SImode))
1608 hi_op2 = force_reg (SImode, hi_op2);
1609 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1610 if (CONST_INT_P (hi_op2))
1611 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1612 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1613 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1614 gen_rtx_LTU (DImode, ccreg,
1615 const0_rtx)));
1616 else
1617 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1618 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1619 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1620 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1621
1622 DONE;
1623 })
1624
1625 (define_insn "subsi3_compare1"
1626 [(set (reg:CC CC_REGNUM)
1627 (compare:CC
1628 (match_operand:SI 1 "register_operand" "r")
1629 (match_operand:SI 2 "register_operand" "r")))
1630 (set (match_operand:SI 0 "register_operand" "=r")
1631 (minus:SI (match_dup 1) (match_dup 2)))]
1632 "TARGET_32BIT"
1633 "subs%?\\t%0, %1, %2"
1634 [(set_attr "conds" "set")
1635 (set_attr "type" "alus_sreg")]
1636 )
1637
1638 (define_insn "subvsi3"
1639 [(set (reg:CC_V CC_REGNUM)
1640 (compare:CC_V
1641 (minus:DI
1642 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1643 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1644 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1645 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1646 (minus:SI (match_dup 1) (match_dup 2)))]
1647 "TARGET_32BIT"
1648 "subs%?\\t%0, %1, %2"
1649 [(set_attr "conds" "set")
1650 (set_attr "arch" "t2,*")
1651 (set_attr "length" "2,4")
1652 (set_attr "type" "alus_sreg")]
1653 )
1654
1655 (define_insn "subvsi3_imm1"
1656 [(set (reg:CC_V CC_REGNUM)
1657 (compare:CC_V
1658 (minus:DI
1659 (match_operand 1 "arm_immediate_operand" "I")
1660 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1661 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1662 (set (match_operand:SI 0 "s_register_operand" "=r")
1663 (minus:SI (match_dup 1) (match_dup 2)))]
1664 "TARGET_32BIT"
1665 "rsbs%?\\t%0, %2, %1"
1666 [(set_attr "conds" "set")
1667 (set_attr "type" "alus_imm")]
1668 )
1669
1670 (define_insn "subsi3_carryin"
1671 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1672 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1673 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1674 (match_operand:SI 3 "arm_borrow_operation" "")))]
1675 "TARGET_32BIT"
1676 "@
1677 sbc%?\\t%0, %1, %2
1678 rsc%?\\t%0, %2, %1
1679 sbc%?\\t%0, %2, %2, lsl #1"
1680 [(set_attr "conds" "use")
1681 (set_attr "arch" "*,a,t2")
1682 (set_attr "predicable" "yes")
1683 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm_lsl_1to4")]
1684 )
1685
1686 ;; Special canonicalization of the above when operand1 == (const_int 1):
1687 ;; in this case the 'borrow' needs to treated like subtracting from the carry.
1688 (define_insn "rsbsi_carryin_reg"
1689 [(set (match_operand:SI 0 "s_register_operand" "=r")
1690 (minus:SI (match_operand:SI 1 "arm_carry_operation" "")
1691 (match_operand:SI 2 "s_register_operand" "r")))]
1692 "TARGET_ARM"
1693 "rsc%?\\t%0, %2, #1"
1694 [(set_attr "conds" "use")
1695 (set_attr "predicable" "yes")
1696 (set_attr "type" "adc_imm")]
1697 )
1698
1699 ;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
1700 ;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
1701 (define_insn "*add_not_cin"
1702 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1703 (plus:SI
1704 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
1705 (match_operand:SI 3 "arm_carry_operation" ""))
1706 (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
1707 "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
1708 "@
1709 sbc%?\\t%0, %2, %1
1710 rsc%?\\t%0, %1, %2"
1711 [(set_attr "conds" "use")
1712 (set_attr "predicable" "yes")
1713 (set_attr "arch" "*,a")
1714 (set_attr "type" "adc_reg,adc_imm")]
1715 )
1716
1717 ;; On Arm we can also use the same trick when the non-inverted operand is
1718 ;; shifted, using RSC.
1719 (define_insn "add_not_shift_cin"
1720 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1721 (plus:SI
1722 (plus:SI (match_operator:SI 3 "shift_operator"
1723 [(match_operand:SI 1 "s_register_operand" "r,r")
1724 (match_operand:SI 2 "shift_amount_operand" "M,r")])
1725 (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
1726 (match_operand:SI 5 "arm_carry_operation" "")))]
1727 "TARGET_ARM"
1728 "rsc%?\\t%0, %4, %1%S3"
1729 [(set_attr "conds" "use")
1730 (set_attr "predicable" "yes")
1731 (set_attr "autodetect_type" "alu_shift_operator3")]
1732 )
1733
1734 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1735 [(set (reg:<CC_EXTEND> CC_REGNUM)
1736 (compare:<CC_EXTEND>
1737 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1738 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1739 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1740 (clobber (match_scratch:SI 0 "=l,r"))]
1741 "TARGET_32BIT"
1742 "sbcs\\t%0, %1, %2"
1743 [(set_attr "conds" "set")
1744 (set_attr "arch" "t2,*")
1745 (set_attr "length" "2,4")
1746 (set_attr "type" "adc_reg")]
1747 )
1748
1749 ;; Similar to the above, but handling a constant which has a different
1750 ;; canonicalization.
1751 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1752 [(set (reg:<CC_EXTEND> CC_REGNUM)
1753 (compare:<CC_EXTEND>
1754 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1755 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1756 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1757 (clobber (match_scratch:SI 0 "=l,r"))]
1758 "TARGET_32BIT"
1759 "@
1760 sbcs\\t%0, %1, %2
1761 adcs\\t%0, %1, #%B2"
1762 [(set_attr "conds" "set")
1763 (set_attr "type" "adc_imm")]
1764 )
1765
1766 ;; Further canonicalization when the constant is zero.
1767 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1768 [(set (reg:<CC_EXTEND> CC_REGNUM)
1769 (compare:<CC_EXTEND>
1770 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1771 (match_operand:DI 2 "arm_borrow_operation" "")))
1772 (clobber (match_scratch:SI 0 "=l,r"))]
1773 "TARGET_32BIT"
1774 "sbcs\\t%0, %1, #0"
1775 [(set_attr "conds" "set")
1776 (set_attr "type" "adc_imm")]
1777 )
1778
1779 (define_insn "*subsi3_carryin_const"
1780 [(set (match_operand:SI 0 "s_register_operand" "=r")
1781 (minus:SI (plus:SI
1782 (match_operand:SI 1 "s_register_operand" "r")
1783 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1784 (match_operand:SI 3 "arm_borrow_operation" "")))]
1785 "TARGET_32BIT"
1786 "sbc\\t%0, %1, #%n2"
1787 [(set_attr "conds" "use")
1788 (set_attr "type" "adc_imm")]
1789 )
1790
1791 (define_insn "*subsi3_carryin_const0"
1792 [(set (match_operand:SI 0 "s_register_operand" "=r")
1793 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1794 (match_operand:SI 2 "arm_borrow_operation" "")))]
1795 "TARGET_32BIT"
1796 "sbc\\t%0, %1, #0"
1797 [(set_attr "conds" "use")
1798 (set_attr "type" "adc_imm")]
1799 )
1800
1801 (define_insn "*subsi3_carryin_shift"
1802 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1803 (minus:SI (minus:SI
1804 (match_operand:SI 1 "s_register_operand" "r,r")
1805 (match_operator:SI 2 "shift_operator"
1806 [(match_operand:SI 3 "s_register_operand" "r,r")
1807 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1808 (match_operand:SI 5 "arm_borrow_operation" "")))]
1809 "TARGET_32BIT"
1810 "sbc%?\\t%0, %1, %3%S2"
1811 [(set_attr "conds" "use")
1812 (set_attr "arch" "32,a")
1813 (set_attr "shift" "3")
1814 (set_attr "predicable" "yes")
1815 (set_attr "autodetect_type" "alu_shift_operator2")]
1816 )
1817
1818 (define_insn "*subsi3_carryin_shift_alt"
1819 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1820 (minus:SI (minus:SI
1821 (match_operand:SI 1 "s_register_operand" "r,r")
1822 (match_operand:SI 5 "arm_borrow_operation" ""))
1823 (match_operator:SI 2 "shift_operator"
1824 [(match_operand:SI 3 "s_register_operand" "r,r")
1825 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1826 "TARGET_32BIT"
1827 "sbc%?\\t%0, %1, %3%S2"
1828 [(set_attr "conds" "use")
1829 (set_attr "arch" "32,a")
1830 (set_attr "shift" "3")
1831 (set_attr "predicable" "yes")
1832 (set_attr "autodetect_type" "alu_shift_operator2")]
1833 )
1834
1835 ;; No RSC in Thumb2
1836 (define_insn "*rsbsi3_carryin_shift"
1837 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1838 (minus:SI (minus:SI
1839 (match_operator:SI 2 "shift_operator"
1840 [(match_operand:SI 3 "s_register_operand" "r,r")
1841 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1842 (match_operand:SI 1 "s_register_operand" "r,r"))
1843 (match_operand:SI 5 "arm_borrow_operation" "")))]
1844 "TARGET_ARM"
1845 "rsc%?\\t%0, %1, %3%S2"
1846 [(set_attr "conds" "use")
1847 (set_attr "predicable" "yes")
1848 (set_attr "autodetect_type" "alu_shift_operator2")]
1849 )
1850
1851 (define_insn "*rsbsi3_carryin_shift_alt"
1852 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1853 (minus:SI (minus:SI
1854 (match_operator:SI 2 "shift_operator"
1855 [(match_operand:SI 3 "s_register_operand" "r,r")
1856 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1857 (match_operand:SI 5 "arm_borrow_operation" ""))
1858 (match_operand:SI 1 "s_register_operand" "r,r")))]
1859 "TARGET_ARM"
1860 "rsc%?\\t%0, %1, %3%S2"
1861 [(set_attr "conds" "use")
1862 (set_attr "predicable" "yes")
1863 (set_attr "autodetect_type" "alu_shift_operator2")]
1864 )
1865
1866 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1867 (define_split
1868 [(set (match_operand:SI 0 "s_register_operand" "")
1869 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1870 (match_operand:SI 2 "s_register_operand" ""))
1871 (const_int -1)))
1872 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1873 "TARGET_32BIT"
1874 [(set (match_dup 3) (match_dup 1))
1875 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1876 "
1877 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1878 ")
1879
1880 (define_expand "addsf3"
1881 [(set (match_operand:SF 0 "s_register_operand")
1882 (plus:SF (match_operand:SF 1 "s_register_operand")
1883 (match_operand:SF 2 "s_register_operand")))]
1884 "TARGET_32BIT && TARGET_HARD_FLOAT"
1885 "
1886 ")
1887
1888 (define_expand "adddf3"
1889 [(set (match_operand:DF 0 "s_register_operand")
1890 (plus:DF (match_operand:DF 1 "s_register_operand")
1891 (match_operand:DF 2 "s_register_operand")))]
1892 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1893 "
1894 ")
1895
1896 (define_expand "subdi3"
1897 [(parallel
1898 [(set (match_operand:DI 0 "s_register_operand")
1899 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1900 (match_operand:DI 2 "s_register_operand")))
1901 (clobber (reg:CC CC_REGNUM))])]
1902 "TARGET_EITHER"
1903 "
1904 if (TARGET_THUMB1)
1905 {
1906 if (!REG_P (operands[1]))
1907 operands[1] = force_reg (DImode, operands[1]);
1908 }
1909 else
1910 {
1911 rtx lo_result, hi_result, lo_dest, hi_dest;
1912 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1913 rtx condition;
1914
1915 /* Since operands[1] may be an integer, pass it second, so that
1916 any necessary simplifications will be done on the decomposed
1917 constant. */
1918 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1919 &lo_op1, &hi_op1);
1920 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1921 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1922
1923 if (!arm_rhs_operand (lo_op1, SImode))
1924 lo_op1 = force_reg (SImode, lo_op1);
1925
1926 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1927 || !arm_rhs_operand (hi_op1, SImode))
1928 hi_op1 = force_reg (SImode, hi_op1);
1929
1930 rtx cc_reg;
1931 if (lo_op1 == const0_rtx)
1932 {
1933 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1934 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1935 }
1936 else if (CONST_INT_P (lo_op1))
1937 {
1938 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1939 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1940 GEN_INT (~UINTVAL (lo_op1))));
1941 }
1942 else
1943 {
1944 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1945 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1946 }
1947
1948 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1949
1950 if (hi_op1 == const0_rtx)
1951 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1952 else
1953 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1954
1955 if (lo_result != lo_dest)
1956 emit_move_insn (lo_result, lo_dest);
1957
1958 if (hi_result != hi_dest)
1959 emit_move_insn (hi_result, hi_dest);
1960
1961 DONE;
1962 }
1963 "
1964 )
1965
1966 (define_expand "subsi3"
1967 [(set (match_operand:SI 0 "s_register_operand")
1968 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1969 (match_operand:SI 2 "s_register_operand")))]
1970 "TARGET_EITHER"
1971 "
1972 if (CONST_INT_P (operands[1]))
1973 {
1974 if (TARGET_32BIT)
1975 {
1976 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1977 operands[1] = force_reg (SImode, operands[1]);
1978 else
1979 {
1980 arm_split_constant (MINUS, SImode, NULL_RTX,
1981 INTVAL (operands[1]), operands[0],
1982 operands[2],
1983 optimize && can_create_pseudo_p ());
1984 DONE;
1985 }
1986 }
1987 else /* TARGET_THUMB1 */
1988 operands[1] = force_reg (SImode, operands[1]);
1989 }
1990 "
1991 )
1992
1993 ; ??? Check Thumb-2 split length
1994 (define_insn_and_split "*arm_subsi3_insn"
1995 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1996 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1997 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1998 "TARGET_32BIT"
1999 "@
2000 sub%?\\t%0, %1, %2
2001 sub%?\\t%0, %2
2002 sub%?\\t%0, %1, %2
2003 rsb%?\\t%0, %2, %1
2004 rsb%?\\t%0, %2, %1
2005 sub%?\\t%0, %1, %2
2006 sub%?\\t%0, %1, %2
2007 sub%?\\t%0, %1, %2
2008 #"
2009 "&& (CONST_INT_P (operands[1])
2010 && !const_ok_for_arm (INTVAL (operands[1])))"
2011 [(clobber (const_int 0))]
2012 "
2013 arm_split_constant (MINUS, SImode, curr_insn,
2014 INTVAL (operands[1]), operands[0], operands[2], 0);
2015 DONE;
2016 "
2017 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
2018 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
2019 (set_attr "predicable" "yes")
2020 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
2021 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
2022 )
2023
2024 (define_peephole2
2025 [(match_scratch:SI 3 "r")
2026 (set (match_operand:SI 0 "arm_general_register_operand" "")
2027 (minus:SI (match_operand:SI 1 "const_int_operand" "")
2028 (match_operand:SI 2 "arm_general_register_operand" "")))]
2029 "TARGET_32BIT
2030 && !const_ok_for_arm (INTVAL (operands[1]))
2031 && const_ok_for_arm (~INTVAL (operands[1]))"
2032 [(set (match_dup 3) (match_dup 1))
2033 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
2034 ""
2035 )
2036
2037 (define_insn "subsi3_compare0"
2038 [(set (reg:CC_NZ CC_REGNUM)
2039 (compare:CC_NZ
2040 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2041 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
2042 (const_int 0)))
2043 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2044 (minus:SI (match_dup 1) (match_dup 2)))]
2045 "TARGET_32BIT"
2046 "@
2047 subs%?\\t%0, %1, %2
2048 subs%?\\t%0, %1, %2
2049 rsbs%?\\t%0, %2, %1"
2050 [(set_attr "conds" "set")
2051 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
2052 )
2053
2054 (define_insn "subsi3_compare"
2055 [(set (reg:CC CC_REGNUM)
2056 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2057 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
2058 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2059 (minus:SI (match_dup 1) (match_dup 2)))]
2060 "TARGET_32BIT"
2061 "@
2062 subs%?\\t%0, %1, %2
2063 subs%?\\t%0, %1, %2
2064 rsbs%?\\t%0, %2, %1"
2065 [(set_attr "conds" "set")
2066 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
2067 )
2068
2069 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
2070 ;; rather than (0 cmp reg). This gives the same results for unsigned
2071 ;; and equality compares which is what we mostly need here.
2072 (define_insn "rsb_imm_compare"
2073 [(set (reg:CC_RSB CC_REGNUM)
2074 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2075 (match_operand 3 "const_int_operand" "")))
2076 (set (match_operand:SI 0 "s_register_operand" "=r")
2077 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
2078 (match_dup 2)))]
2079 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2080 "rsbs\\t%0, %2, %1"
2081 [(set_attr "conds" "set")
2082 (set_attr "type" "alus_imm")]
2083 )
2084
2085 ;; Similarly, but the result is unused.
2086 (define_insn "rsb_imm_compare_scratch"
2087 [(set (reg:CC_RSB CC_REGNUM)
2088 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2089 (match_operand 1 "arm_not_immediate_operand" "K")))
2090 (clobber (match_scratch:SI 0 "=r"))]
2091 "TARGET_32BIT"
2092 "rsbs\\t%0, %2, #%B1"
2093 [(set_attr "conds" "set")
2094 (set_attr "type" "alus_imm")]
2095 )
2096
2097 ;; Compare the sum of a value plus a carry against a constant. Uses
2098 ;; RSC, so the result is swapped. Only available on Arm
2099 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2100 [(set (reg:CC_SWP CC_REGNUM)
2101 (compare:CC_SWP
2102 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2103 (match_operand:DI 3 "arm_borrow_operation" ""))
2104 (match_operand 1 "arm_immediate_operand" "I")))
2105 (clobber (match_scratch:SI 0 "=r"))]
2106 "TARGET_ARM"
2107 "rscs\\t%0, %2, %1"
2108 [(set_attr "conds" "set")
2109 (set_attr "type" "alus_imm")]
2110 )
2111
2112 (define_insn "usubvsi3_borrow"
2113 [(set (reg:CC_B CC_REGNUM)
2114 (compare:CC_B
2115 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2116 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2117 (zero_extend:DI
2118 (match_operand:SI 2 "s_register_operand" "l,r")))))
2119 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2120 (minus:SI (match_dup 1)
2121 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2122 (match_dup 2))))]
2123 "TARGET_32BIT"
2124 "sbcs%?\\t%0, %1, %2"
2125 [(set_attr "conds" "set")
2126 (set_attr "arch" "t2,*")
2127 (set_attr "length" "2,4")]
2128 )
2129
2130 (define_insn "usubvsi3_borrow_imm"
2131 [(set (reg:CC_B CC_REGNUM)
2132 (compare:CC_B
2133 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2134 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2135 (match_operand:DI 3 "const_int_operand" "n,n"))))
2136 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2137 (minus:SI (match_dup 1)
2138 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2139 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2140 "TARGET_32BIT
2141 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2142 "@
2143 sbcs%?\\t%0, %1, %2
2144 adcs%?\\t%0, %1, #%B2"
2145 [(set_attr "conds" "set")
2146 (set_attr "type" "alus_imm")]
2147 )
2148
2149 (define_insn "subvsi3_borrow"
2150 [(set (reg:CC_V CC_REGNUM)
2151 (compare:CC_V
2152 (minus:DI
2153 (minus:DI
2154 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2155 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2156 (match_operand:DI 4 "arm_borrow_operation" ""))
2157 (sign_extend:DI
2158 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2159 (match_operand:SI 3 "arm_borrow_operation" "")))))
2160 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2161 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2162 (match_dup 3)))]
2163 "TARGET_32BIT"
2164 "sbcs%?\\t%0, %1, %2"
2165 [(set_attr "conds" "set")
2166 (set_attr "arch" "t2,*")
2167 (set_attr "length" "2,4")]
2168 )
2169
2170 (define_insn "subvsi3_borrow_imm"
2171 [(set (reg:CC_V CC_REGNUM)
2172 (compare:CC_V
2173 (minus:DI
2174 (minus:DI
2175 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2176 (match_operand 2 "arm_adcimm_operand" "I,K"))
2177 (match_operand:DI 4 "arm_borrow_operation" ""))
2178 (sign_extend:DI
2179 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2180 (match_operand:SI 3 "arm_borrow_operation" "")))))
2181 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2182 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2183 (match_dup 3)))]
2184 "TARGET_32BIT
2185 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2186 "@
2187 sbcs%?\\t%0, %1, %2
2188 adcs%?\\t%0, %1, #%B2"
2189 [(set_attr "conds" "set")
2190 (set_attr "type" "alus_imm")]
2191 )
2192
2193 (define_expand "subsf3"
2194 [(set (match_operand:SF 0 "s_register_operand")
2195 (minus:SF (match_operand:SF 1 "s_register_operand")
2196 (match_operand:SF 2 "s_register_operand")))]
2197 "TARGET_32BIT && TARGET_HARD_FLOAT"
2198 "
2199 ")
2200
2201 (define_expand "subdf3"
2202 [(set (match_operand:DF 0 "s_register_operand")
2203 (minus:DF (match_operand:DF 1 "s_register_operand")
2204 (match_operand:DF 2 "s_register_operand")))]
2205 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2206 "
2207 ")
2208
2209 \f
2210 ;; Multiplication insns
2211
2212 (define_expand "mulhi3"
2213 [(set (match_operand:HI 0 "s_register_operand")
2214 (mult:HI (match_operand:HI 1 "s_register_operand")
2215 (match_operand:HI 2 "s_register_operand")))]
2216 "TARGET_DSP_MULTIPLY"
2217 "
2218 {
2219 rtx result = gen_reg_rtx (SImode);
2220 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2221 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2222 DONE;
2223 }"
2224 )
2225
2226 (define_expand "mulsi3"
2227 [(set (match_operand:SI 0 "s_register_operand")
2228 (mult:SI (match_operand:SI 2 "s_register_operand")
2229 (match_operand:SI 1 "s_register_operand")))]
2230 "TARGET_EITHER"
2231 ""
2232 )
2233
2234 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2235 (define_insn "*mul"
2236 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2237 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2238 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2239 "TARGET_32BIT"
2240 "mul%?\\t%0, %2, %1"
2241 [(set_attr "type" "mul")
2242 (set_attr "predicable" "yes")
2243 (set_attr "arch" "t2,v6,nov6,nov6")
2244 (set_attr "length" "4")
2245 (set_attr "predicable_short_it" "yes,no,*,*")]
2246 )
2247
2248 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2249 ;; reusing the same register.
2250
2251 (define_insn "*mla"
2252 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2253 (plus:SI
2254 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2255 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2256 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2257 "TARGET_32BIT"
2258 "mla%?\\t%0, %3, %2, %1"
2259 [(set_attr "type" "mla")
2260 (set_attr "predicable" "yes")
2261 (set_attr "arch" "v6,nov6,nov6,nov6")]
2262 )
2263
2264 (define_insn "*mls"
2265 [(set (match_operand:SI 0 "s_register_operand" "=r")
2266 (minus:SI
2267 (match_operand:SI 1 "s_register_operand" "r")
2268 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2269 (match_operand:SI 2 "s_register_operand" "r"))))]
2270 "TARGET_32BIT && arm_arch_thumb2"
2271 "mls%?\\t%0, %3, %2, %1"
2272 [(set_attr "type" "mla")
2273 (set_attr "predicable" "yes")]
2274 )
2275
2276 (define_insn "*mulsi3_compare0"
2277 [(set (reg:CC_NZ CC_REGNUM)
2278 (compare:CC_NZ (mult:SI
2279 (match_operand:SI 2 "s_register_operand" "r,r")
2280 (match_operand:SI 1 "s_register_operand" "%0,r"))
2281 (const_int 0)))
2282 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2283 (mult:SI (match_dup 2) (match_dup 1)))]
2284 "TARGET_ARM && !arm_arch6"
2285 "muls%?\\t%0, %2, %1"
2286 [(set_attr "conds" "set")
2287 (set_attr "type" "muls")]
2288 )
2289
2290 (define_insn "*mulsi3_compare0_v6"
2291 [(set (reg:CC_NZ CC_REGNUM)
2292 (compare:CC_NZ (mult:SI
2293 (match_operand:SI 2 "s_register_operand" "r")
2294 (match_operand:SI 1 "s_register_operand" "r"))
2295 (const_int 0)))
2296 (set (match_operand:SI 0 "s_register_operand" "=r")
2297 (mult:SI (match_dup 2) (match_dup 1)))]
2298 "TARGET_ARM && arm_arch6 && optimize_size"
2299 "muls%?\\t%0, %2, %1"
2300 [(set_attr "conds" "set")
2301 (set_attr "type" "muls")]
2302 )
2303
2304 (define_insn "*mulsi_compare0_scratch"
2305 [(set (reg:CC_NZ CC_REGNUM)
2306 (compare:CC_NZ (mult:SI
2307 (match_operand:SI 2 "s_register_operand" "r,r")
2308 (match_operand:SI 1 "s_register_operand" "%0,r"))
2309 (const_int 0)))
2310 (clobber (match_scratch:SI 0 "=&r,&r"))]
2311 "TARGET_ARM && !arm_arch6"
2312 "muls%?\\t%0, %2, %1"
2313 [(set_attr "conds" "set")
2314 (set_attr "type" "muls")]
2315 )
2316
2317 (define_insn "*mulsi_compare0_scratch_v6"
2318 [(set (reg:CC_NZ CC_REGNUM)
2319 (compare:CC_NZ (mult:SI
2320 (match_operand:SI 2 "s_register_operand" "r")
2321 (match_operand:SI 1 "s_register_operand" "r"))
2322 (const_int 0)))
2323 (clobber (match_scratch:SI 0 "=r"))]
2324 "TARGET_ARM && arm_arch6 && optimize_size"
2325 "muls%?\\t%0, %2, %1"
2326 [(set_attr "conds" "set")
2327 (set_attr "type" "muls")]
2328 )
2329
2330 (define_insn "*mulsi3addsi_compare0"
2331 [(set (reg:CC_NZ CC_REGNUM)
2332 (compare:CC_NZ
2333 (plus:SI (mult:SI
2334 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2335 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2336 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2337 (const_int 0)))
2338 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2339 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2340 (match_dup 3)))]
2341 "TARGET_ARM && arm_arch6"
2342 "mlas%?\\t%0, %2, %1, %3"
2343 [(set_attr "conds" "set")
2344 (set_attr "type" "mlas")]
2345 )
2346
2347 (define_insn "*mulsi3addsi_compare0_v6"
2348 [(set (reg:CC_NZ CC_REGNUM)
2349 (compare:CC_NZ
2350 (plus:SI (mult:SI
2351 (match_operand:SI 2 "s_register_operand" "r")
2352 (match_operand:SI 1 "s_register_operand" "r"))
2353 (match_operand:SI 3 "s_register_operand" "r"))
2354 (const_int 0)))
2355 (set (match_operand:SI 0 "s_register_operand" "=r")
2356 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2357 (match_dup 3)))]
2358 "TARGET_ARM && arm_arch6 && optimize_size"
2359 "mlas%?\\t%0, %2, %1, %3"
2360 [(set_attr "conds" "set")
2361 (set_attr "type" "mlas")]
2362 )
2363
2364 (define_insn "*mulsi3addsi_compare0_scratch"
2365 [(set (reg:CC_NZ CC_REGNUM)
2366 (compare:CC_NZ
2367 (plus:SI (mult:SI
2368 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2369 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2370 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2371 (const_int 0)))
2372 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2373 "TARGET_ARM && !arm_arch6"
2374 "mlas%?\\t%0, %2, %1, %3"
2375 [(set_attr "conds" "set")
2376 (set_attr "type" "mlas")]
2377 )
2378
2379 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2380 [(set (reg:CC_NZ CC_REGNUM)
2381 (compare:CC_NZ
2382 (plus:SI (mult:SI
2383 (match_operand:SI 2 "s_register_operand" "r")
2384 (match_operand:SI 1 "s_register_operand" "r"))
2385 (match_operand:SI 3 "s_register_operand" "r"))
2386 (const_int 0)))
2387 (clobber (match_scratch:SI 0 "=r"))]
2388 "TARGET_ARM && arm_arch6 && optimize_size"
2389 "mlas%?\\t%0, %2, %1, %3"
2390 [(set_attr "conds" "set")
2391 (set_attr "type" "mlas")]
2392 )
2393
2394 ;; 32x32->64 widening multiply.
2395 ;; The only difference between the v3-5 and v6+ versions is the requirement
2396 ;; that the output does not overlap with either input.
2397
2398 (define_expand "<Us>mulsidi3"
2399 [(set (match_operand:DI 0 "s_register_operand")
2400 (mult:DI
2401 (SE:DI (match_operand:SI 1 "s_register_operand"))
2402 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2403 "TARGET_32BIT"
2404 {
2405 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2406 gen_highpart (SImode, operands[0]),
2407 operands[1], operands[2]));
2408 DONE;
2409 }
2410 )
2411
2412 (define_insn "<US>mull"
2413 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2414 (mult:SI
2415 (match_operand:SI 2 "s_register_operand" "%r,r")
2416 (match_operand:SI 3 "s_register_operand" "r,r")))
2417 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2418 (truncate:SI
2419 (lshiftrt:DI
2420 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2421 (const_int 32))))]
2422 "TARGET_32BIT"
2423 "<US>mull%?\\t%0, %1, %2, %3"
2424 [(set_attr "type" "umull")
2425 (set_attr "predicable" "yes")
2426 (set_attr "arch" "v6,nov6")]
2427 )
2428
2429 (define_expand "<Us>maddsidi4"
2430 [(set (match_operand:DI 0 "s_register_operand")
2431 (plus:DI
2432 (mult:DI
2433 (SE:DI (match_operand:SI 1 "s_register_operand"))
2434 (SE:DI (match_operand:SI 2 "s_register_operand")))
2435 (match_operand:DI 3 "s_register_operand")))]
2436 "TARGET_32BIT"
2437 {
2438 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2439 gen_lowpart (SImode, operands[3]),
2440 gen_highpart (SImode, operands[0]),
2441 gen_highpart (SImode, operands[3]),
2442 operands[1], operands[2]));
2443 DONE;
2444 }
2445 )
2446
2447 (define_insn "<US>mlal"
2448 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2449 (plus:SI
2450 (mult:SI
2451 (match_operand:SI 4 "s_register_operand" "%r,r")
2452 (match_operand:SI 5 "s_register_operand" "r,r"))
2453 (match_operand:SI 1 "s_register_operand" "0,0")))
2454 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2455 (plus:SI
2456 (truncate:SI
2457 (lshiftrt:DI
2458 (plus:DI
2459 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2460 (zero_extend:DI (match_dup 1)))
2461 (const_int 32)))
2462 (match_operand:SI 3 "s_register_operand" "2,2")))]
2463 "TARGET_32BIT"
2464 "<US>mlal%?\\t%0, %2, %4, %5"
2465 [(set_attr "type" "umlal")
2466 (set_attr "predicable" "yes")
2467 (set_attr "arch" "v6,nov6")]
2468 )
2469
2470 (define_expand "<US>mulsi3_highpart"
2471 [(parallel
2472 [(set (match_operand:SI 0 "s_register_operand")
2473 (truncate:SI
2474 (lshiftrt:DI
2475 (mult:DI
2476 (SE:DI (match_operand:SI 1 "s_register_operand"))
2477 (SE:DI (match_operand:SI 2 "s_register_operand")))
2478 (const_int 32))))
2479 (clobber (match_scratch:SI 3 ""))])]
2480 "TARGET_32BIT"
2481 ""
2482 )
2483
2484 (define_insn "*<US>mull_high"
2485 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2486 (truncate:SI
2487 (lshiftrt:DI
2488 (mult:DI
2489 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2490 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2491 (const_int 32))))
2492 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2493 "TARGET_32BIT"
2494 "<US>mull%?\\t%3, %0, %2, %1"
2495 [(set_attr "type" "umull")
2496 (set_attr "predicable" "yes")
2497 (set_attr "arch" "v6,nov6,nov6")]
2498 )
2499
2500 (define_insn "mulhisi3"
2501 [(set (match_operand:SI 0 "s_register_operand" "=r")
2502 (mult:SI (sign_extend:SI
2503 (match_operand:HI 1 "s_register_operand" "%r"))
2504 (sign_extend:SI
2505 (match_operand:HI 2 "s_register_operand" "r"))))]
2506 "TARGET_DSP_MULTIPLY"
2507 "smulbb%?\\t%0, %1, %2"
2508 [(set_attr "type" "smulxy")
2509 (set_attr "predicable" "yes")]
2510 )
2511
2512 (define_insn "*mulhisi3tb"
2513 [(set (match_operand:SI 0 "s_register_operand" "=r")
2514 (mult:SI (ashiftrt:SI
2515 (match_operand:SI 1 "s_register_operand" "r")
2516 (const_int 16))
2517 (sign_extend:SI
2518 (match_operand:HI 2 "s_register_operand" "r"))))]
2519 "TARGET_DSP_MULTIPLY"
2520 "smultb%?\\t%0, %1, %2"
2521 [(set_attr "type" "smulxy")
2522 (set_attr "predicable" "yes")]
2523 )
2524
2525 (define_insn "*mulhisi3bt"
2526 [(set (match_operand:SI 0 "s_register_operand" "=r")
2527 (mult:SI (sign_extend:SI
2528 (match_operand:HI 1 "s_register_operand" "r"))
2529 (ashiftrt:SI
2530 (match_operand:SI 2 "s_register_operand" "r")
2531 (const_int 16))))]
2532 "TARGET_DSP_MULTIPLY"
2533 "smulbt%?\\t%0, %1, %2"
2534 [(set_attr "type" "smulxy")
2535 (set_attr "predicable" "yes")]
2536 )
2537
2538 (define_insn "*mulhisi3tt"
2539 [(set (match_operand:SI 0 "s_register_operand" "=r")
2540 (mult:SI (ashiftrt:SI
2541 (match_operand:SI 1 "s_register_operand" "r")
2542 (const_int 16))
2543 (ashiftrt:SI
2544 (match_operand:SI 2 "s_register_operand" "r")
2545 (const_int 16))))]
2546 "TARGET_DSP_MULTIPLY"
2547 "smultt%?\\t%0, %1, %2"
2548 [(set_attr "type" "smulxy")
2549 (set_attr "predicable" "yes")]
2550 )
2551
2552 (define_expand "maddhisi4"
2553 [(set (match_operand:SI 0 "s_register_operand")
2554 (plus:SI (mult:SI (sign_extend:SI
2555 (match_operand:HI 1 "s_register_operand"))
2556 (sign_extend:SI
2557 (match_operand:HI 2 "s_register_operand")))
2558 (match_operand:SI 3 "s_register_operand")))]
2559 "TARGET_DSP_MULTIPLY"
2560 {
2561 /* If this function reads the Q bit from ACLE intrinsics break up the
2562 multiplication and accumulation as an overflow during accumulation will
2563 clobber the Q flag. */
2564 if (ARM_Q_BIT_READ)
2565 {
2566 rtx tmp = gen_reg_rtx (SImode);
2567 emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
2568 emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
2569 DONE;
2570 }
2571 }
2572 )
2573
2574 (define_insn "*arm_maddhisi4"
2575 [(set (match_operand:SI 0 "s_register_operand" "=r")
2576 (plus:SI (mult:SI (sign_extend:SI
2577 (match_operand:HI 1 "s_register_operand" "r"))
2578 (sign_extend:SI
2579 (match_operand:HI 2 "s_register_operand" "r")))
2580 (match_operand:SI 3 "s_register_operand" "r")))]
2581 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2582 "smlabb%?\\t%0, %1, %2, %3"
2583 [(set_attr "type" "smlaxy")
2584 (set_attr "predicable" "yes")]
2585 )
2586
2587 (define_insn "arm_smlabb_setq"
2588 [(set (match_operand:SI 0 "s_register_operand" "=r")
2589 (plus:SI (mult:SI (sign_extend:SI
2590 (match_operand:HI 1 "s_register_operand" "r"))
2591 (sign_extend:SI
2592 (match_operand:HI 2 "s_register_operand" "r")))
2593 (match_operand:SI 3 "s_register_operand" "r")))
2594 (set (reg:CC APSRQ_REGNUM)
2595 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2596 "TARGET_DSP_MULTIPLY"
2597 "smlabb%?\\t%0, %1, %2, %3"
2598 [(set_attr "type" "smlaxy")
2599 (set_attr "predicable" "yes")]
2600 )
2601
2602 (define_expand "arm_smlabb"
2603 [(match_operand:SI 0 "s_register_operand")
2604 (match_operand:SI 1 "s_register_operand")
2605 (match_operand:SI 2 "s_register_operand")
2606 (match_operand:SI 3 "s_register_operand")]
2607 "TARGET_DSP_MULTIPLY"
2608 {
2609 rtx mult1 = gen_lowpart (HImode, operands[1]);
2610 rtx mult2 = gen_lowpart (HImode, operands[2]);
2611 if (ARM_Q_BIT_READ)
2612 emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
2613 else
2614 emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
2615 DONE;
2616 }
2617 )
2618
2619 ;; Note: there is no maddhisi4ibt because this one is canonical form
2620 (define_insn "maddhisi4tb"
2621 [(set (match_operand:SI 0 "s_register_operand" "=r")
2622 (plus:SI (mult:SI (ashiftrt:SI
2623 (match_operand:SI 1 "s_register_operand" "r")
2624 (const_int 16))
2625 (sign_extend:SI
2626 (match_operand:HI 2 "s_register_operand" "r")))
2627 (match_operand:SI 3 "s_register_operand" "r")))]
2628 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2629 "smlatb%?\\t%0, %1, %2, %3"
2630 [(set_attr "type" "smlaxy")
2631 (set_attr "predicable" "yes")]
2632 )
2633
2634 (define_insn "arm_smlatb_setq"
2635 [(set (match_operand:SI 0 "s_register_operand" "=r")
2636 (plus:SI (mult:SI (ashiftrt:SI
2637 (match_operand:SI 1 "s_register_operand" "r")
2638 (const_int 16))
2639 (sign_extend:SI
2640 (match_operand:HI 2 "s_register_operand" "r")))
2641 (match_operand:SI 3 "s_register_operand" "r")))
2642 (set (reg:CC APSRQ_REGNUM)
2643 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2644 "TARGET_DSP_MULTIPLY"
2645 "smlatb%?\\t%0, %1, %2, %3"
2646 [(set_attr "type" "smlaxy")
2647 (set_attr "predicable" "yes")]
2648 )
2649
2650 (define_expand "arm_smlatb"
2651 [(match_operand:SI 0 "s_register_operand")
2652 (match_operand:SI 1 "s_register_operand")
2653 (match_operand:SI 2 "s_register_operand")
2654 (match_operand:SI 3 "s_register_operand")]
2655 "TARGET_DSP_MULTIPLY"
2656 {
2657 rtx mult2 = gen_lowpart (HImode, operands[2]);
2658 if (ARM_Q_BIT_READ)
2659 emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
2660 mult2, operands[3]));
2661 else
2662 emit_insn (gen_maddhisi4tb (operands[0], operands[1],
2663 mult2, operands[3]));
2664 DONE;
2665 }
2666 )
2667
2668 (define_insn "maddhisi4tt"
2669 [(set (match_operand:SI 0 "s_register_operand" "=r")
2670 (plus:SI (mult:SI (ashiftrt:SI
2671 (match_operand:SI 1 "s_register_operand" "r")
2672 (const_int 16))
2673 (ashiftrt:SI
2674 (match_operand:SI 2 "s_register_operand" "r")
2675 (const_int 16)))
2676 (match_operand:SI 3 "s_register_operand" "r")))]
2677 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2678 "smlatt%?\\t%0, %1, %2, %3"
2679 [(set_attr "type" "smlaxy")
2680 (set_attr "predicable" "yes")]
2681 )
2682
2683 (define_insn "arm_smlatt_setq"
2684 [(set (match_operand:SI 0 "s_register_operand" "=r")
2685 (plus:SI (mult:SI (ashiftrt:SI
2686 (match_operand:SI 1 "s_register_operand" "r")
2687 (const_int 16))
2688 (ashiftrt:SI
2689 (match_operand:SI 2 "s_register_operand" "r")
2690 (const_int 16)))
2691 (match_operand:SI 3 "s_register_operand" "r")))
2692 (set (reg:CC APSRQ_REGNUM)
2693 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2694 "TARGET_DSP_MULTIPLY"
2695 "smlatt%?\\t%0, %1, %2, %3"
2696 [(set_attr "type" "smlaxy")
2697 (set_attr "predicable" "yes")]
2698 )
2699
2700 (define_expand "arm_smlatt"
2701 [(match_operand:SI 0 "s_register_operand")
2702 (match_operand:SI 1 "s_register_operand")
2703 (match_operand:SI 2 "s_register_operand")
2704 (match_operand:SI 3 "s_register_operand")]
2705 "TARGET_DSP_MULTIPLY"
2706 {
2707 if (ARM_Q_BIT_READ)
2708 emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
2709 operands[2], operands[3]));
2710 else
2711 emit_insn (gen_maddhisi4tt (operands[0], operands[1],
2712 operands[2], operands[3]));
2713 DONE;
2714 }
2715 )
2716
2717 (define_insn "maddhidi4"
2718 [(set (match_operand:DI 0 "s_register_operand" "=r")
2719 (plus:DI
2720 (mult:DI (sign_extend:DI
2721 (match_operand:HI 1 "s_register_operand" "r"))
2722 (sign_extend:DI
2723 (match_operand:HI 2 "s_register_operand" "r")))
2724 (match_operand:DI 3 "s_register_operand" "0")))]
2725 "TARGET_DSP_MULTIPLY"
2726 "smlalbb%?\\t%Q0, %R0, %1, %2"
2727 [(set_attr "type" "smlalxy")
2728 (set_attr "predicable" "yes")])
2729
2730 ;; Note: there is no maddhidi4ibt because this one is canonical form
2731 (define_insn "*maddhidi4tb"
2732 [(set (match_operand:DI 0 "s_register_operand" "=r")
2733 (plus:DI
2734 (mult:DI (sign_extend:DI
2735 (ashiftrt:SI
2736 (match_operand:SI 1 "s_register_operand" "r")
2737 (const_int 16)))
2738 (sign_extend:DI
2739 (match_operand:HI 2 "s_register_operand" "r")))
2740 (match_operand:DI 3 "s_register_operand" "0")))]
2741 "TARGET_DSP_MULTIPLY"
2742 "smlaltb%?\\t%Q0, %R0, %1, %2"
2743 [(set_attr "type" "smlalxy")
2744 (set_attr "predicable" "yes")])
2745
2746 (define_insn "*maddhidi4tt"
2747 [(set (match_operand:DI 0 "s_register_operand" "=r")
2748 (plus:DI
2749 (mult:DI (sign_extend:DI
2750 (ashiftrt:SI
2751 (match_operand:SI 1 "s_register_operand" "r")
2752 (const_int 16)))
2753 (sign_extend:DI
2754 (ashiftrt:SI
2755 (match_operand:SI 2 "s_register_operand" "r")
2756 (const_int 16))))
2757 (match_operand:DI 3 "s_register_operand" "0")))]
2758 "TARGET_DSP_MULTIPLY"
2759 "smlaltt%?\\t%Q0, %R0, %1, %2"
2760 [(set_attr "type" "smlalxy")
2761 (set_attr "predicable" "yes")])
2762
2763 (define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
2764 [(set (match_operand:SI 0 "s_register_operand" "=r")
2765 (unspec:SI
2766 [(match_operand:SI 1 "s_register_operand" "r")
2767 (match_operand:SI 2 "s_register_operand" "r")
2768 (match_operand:SI 3 "s_register_operand" "r")]
2769 SMLAWBT))]
2770 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
2771 "<smlaw_op>%?\\t%0, %1, %2, %3"
2772 [(set_attr "type" "smlaxy")
2773 (set_attr "predicable" "yes")]
2774 )
2775
2776 (define_expand "arm_<smlaw_op>"
2777 [(set (match_operand:SI 0 "s_register_operand")
2778 (unspec:SI
2779 [(match_operand:SI 1 "s_register_operand")
2780 (match_operand:SI 2 "s_register_operand")
2781 (match_operand:SI 3 "s_register_operand")]
2782 SMLAWBT))]
2783 "TARGET_DSP_MULTIPLY"
2784 {
2785 if (ARM_Q_BIT_READ)
2786 emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
2787 operands[2], operands[3]));
2788 else
2789 emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
2790 operands[2], operands[3]));
2791 DONE;
2792 }
2793 )
2794
2795 (define_expand "mulsf3"
2796 [(set (match_operand:SF 0 "s_register_operand")
2797 (mult:SF (match_operand:SF 1 "s_register_operand")
2798 (match_operand:SF 2 "s_register_operand")))]
2799 "TARGET_32BIT && TARGET_HARD_FLOAT"
2800 "
2801 ")
2802
2803 (define_expand "muldf3"
2804 [(set (match_operand:DF 0 "s_register_operand")
2805 (mult:DF (match_operand:DF 1 "s_register_operand")
2806 (match_operand:DF 2 "s_register_operand")))]
2807 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2808 "
2809 ")
2810 \f
2811 ;; Division insns
2812
2813 (define_expand "divsf3"
2814 [(set (match_operand:SF 0 "s_register_operand")
2815 (div:SF (match_operand:SF 1 "s_register_operand")
2816 (match_operand:SF 2 "s_register_operand")))]
2817 "TARGET_32BIT && TARGET_HARD_FLOAT"
2818 "")
2819
2820 (define_expand "divdf3"
2821 [(set (match_operand:DF 0 "s_register_operand")
2822 (div:DF (match_operand:DF 1 "s_register_operand")
2823 (match_operand:DF 2 "s_register_operand")))]
2824 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2825 "")
2826 \f
2827
2828 ; Expand logical operations. The mid-end expander does not split off memory
2829 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2830 ; So an explicit expander is needed to generate better code.
2831
2832 (define_expand "<LOGICAL:optab>di3"
2833 [(set (match_operand:DI 0 "s_register_operand")
2834 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2835 (match_operand:DI 2 "arm_<optab>di_operand")))]
2836 "TARGET_32BIT"
2837 {
2838 rtx low = simplify_gen_binary (<CODE>, SImode,
2839 gen_lowpart (SImode, operands[1]),
2840 gen_lowpart (SImode, operands[2]));
2841 rtx high = simplify_gen_binary (<CODE>, SImode,
2842 gen_highpart (SImode, operands[1]),
2843 gen_highpart_mode (SImode, DImode,
2844 operands[2]));
2845
2846 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2847 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2848 DONE;
2849 }
2850 )
2851
2852 (define_expand "one_cmpldi2"
2853 [(set (match_operand:DI 0 "s_register_operand")
2854 (not:DI (match_operand:DI 1 "s_register_operand")))]
2855 "TARGET_32BIT"
2856 {
2857 rtx low = simplify_gen_unary (NOT, SImode,
2858 gen_lowpart (SImode, operands[1]),
2859 SImode);
2860 rtx high = simplify_gen_unary (NOT, SImode,
2861 gen_highpart_mode (SImode, DImode,
2862 operands[1]),
2863 SImode);
2864
2865 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2866 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2867 DONE;
2868 }
2869 )
2870
2871 ;; Split DImode and, ior, xor operations. Simply perform the logical
2872 ;; operation on the upper and lower halves of the registers.
2873 ;; This is needed for atomic operations in arm_split_atomic_op.
2874 ;; Avoid splitting IWMMXT instructions.
2875 (define_split
2876 [(set (match_operand:DI 0 "s_register_operand" "")
2877 (match_operator:DI 6 "logical_binary_operator"
2878 [(match_operand:DI 1 "s_register_operand" "")
2879 (match_operand:DI 2 "s_register_operand" "")]))]
2880 "TARGET_32BIT && reload_completed
2881 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2882 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2883 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2884 "
2885 {
2886 operands[3] = gen_highpart (SImode, operands[0]);
2887 operands[0] = gen_lowpart (SImode, operands[0]);
2888 operands[4] = gen_highpart (SImode, operands[1]);
2889 operands[1] = gen_lowpart (SImode, operands[1]);
2890 operands[5] = gen_highpart (SImode, operands[2]);
2891 operands[2] = gen_lowpart (SImode, operands[2]);
2892 }"
2893 )
2894
2895 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2896 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2897 (define_split
2898 [(set (match_operand:DI 0 "s_register_operand")
2899 (not:DI (match_operand:DI 1 "s_register_operand")))]
2900 "TARGET_32BIT"
2901 [(set (match_dup 0) (not:SI (match_dup 1)))
2902 (set (match_dup 2) (not:SI (match_dup 3)))]
2903 "
2904 {
2905 operands[2] = gen_highpart (SImode, operands[0]);
2906 operands[0] = gen_lowpart (SImode, operands[0]);
2907 operands[3] = gen_highpart (SImode, operands[1]);
2908 operands[1] = gen_lowpart (SImode, operands[1]);
2909 }"
2910 )
2911
2912 (define_expand "andsi3"
2913 [(set (match_operand:SI 0 "s_register_operand")
2914 (and:SI (match_operand:SI 1 "s_register_operand")
2915 (match_operand:SI 2 "reg_or_int_operand")))]
2916 "TARGET_EITHER"
2917 "
2918 if (TARGET_32BIT)
2919 {
2920 if (CONST_INT_P (operands[2]))
2921 {
2922 if (INTVAL (operands[2]) == 255 && arm_arch6)
2923 {
2924 operands[1] = convert_to_mode (QImode, operands[1], 1);
2925 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2926 operands[1]));
2927 DONE;
2928 }
2929 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2930 operands[2] = force_reg (SImode, operands[2]);
2931 else
2932 {
2933 arm_split_constant (AND, SImode, NULL_RTX,
2934 INTVAL (operands[2]), operands[0],
2935 operands[1],
2936 optimize && can_create_pseudo_p ());
2937
2938 DONE;
2939 }
2940 }
2941 }
2942 else /* TARGET_THUMB1 */
2943 {
2944 if (!CONST_INT_P (operands[2]))
2945 {
2946 rtx tmp = force_reg (SImode, operands[2]);
2947 if (rtx_equal_p (operands[0], operands[1]))
2948 operands[2] = tmp;
2949 else
2950 {
2951 operands[2] = operands[1];
2952 operands[1] = tmp;
2953 }
2954 }
2955 else
2956 {
2957 int i;
2958
2959 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2960 {
2961 operands[2] = force_reg (SImode,
2962 GEN_INT (~INTVAL (operands[2])));
2963
2964 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2965
2966 DONE;
2967 }
2968
2969 for (i = 9; i <= 31; i++)
2970 {
2971 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2972 {
2973 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2974 const0_rtx));
2975 DONE;
2976 }
2977 else if ((HOST_WIDE_INT_1 << i) - 1
2978 == ~INTVAL (operands[2]))
2979 {
2980 rtx shift = GEN_INT (i);
2981 rtx reg = gen_reg_rtx (SImode);
2982
2983 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2984 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2985
2986 DONE;
2987 }
2988 }
2989
2990 operands[2] = force_reg (SImode, operands[2]);
2991 }
2992 }
2993 "
2994 )
2995
2996 ; ??? Check split length for Thumb-2
2997 (define_insn_and_split "*arm_andsi3_insn"
2998 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2999 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3000 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3001 "TARGET_32BIT"
3002 "@
3003 and%?\\t%0, %1, %2
3004 and%?\\t%0, %1, %2
3005 bic%?\\t%0, %1, #%B2
3006 and%?\\t%0, %1, %2
3007 #"
3008 "TARGET_32BIT
3009 && CONST_INT_P (operands[2])
3010 && !(const_ok_for_arm (INTVAL (operands[2]))
3011 || const_ok_for_arm (~INTVAL (operands[2])))"
3012 [(clobber (const_int 0))]
3013 "
3014 arm_split_constant (AND, SImode, curr_insn,
3015 INTVAL (operands[2]), operands[0], operands[1], 0);
3016 DONE;
3017 "
3018 [(set_attr "length" "4,4,4,4,16")
3019 (set_attr "predicable" "yes")
3020 (set_attr "predicable_short_it" "no,yes,no,no,no")
3021 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
3022 )
3023
3024 (define_insn "*andsi3_compare0"
3025 [(set (reg:CC_NZ CC_REGNUM)
3026 (compare:CC_NZ
3027 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
3028 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
3029 (const_int 0)))
3030 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3031 (and:SI (match_dup 1) (match_dup 2)))]
3032 "TARGET_32BIT"
3033 "@
3034 ands%?\\t%0, %1, %2
3035 bics%?\\t%0, %1, #%B2
3036 ands%?\\t%0, %1, %2"
3037 [(set_attr "conds" "set")
3038 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3039 )
3040
3041 (define_insn "*andsi3_compare0_scratch"
3042 [(set (reg:CC_NZ CC_REGNUM)
3043 (compare:CC_NZ
3044 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
3045 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
3046 (const_int 0)))
3047 (clobber (match_scratch:SI 2 "=X,r,X"))]
3048 "TARGET_32BIT"
3049 "@
3050 tst%?\\t%0, %1
3051 bics%?\\t%2, %0, #%B1
3052 tst%?\\t%0, %1"
3053 [(set_attr "conds" "set")
3054 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3055 )
3056
3057 (define_insn "*zeroextractsi_compare0_scratch"
3058 [(set (reg:CC_NZ CC_REGNUM)
3059 (compare:CC_NZ (zero_extract:SI
3060 (match_operand:SI 0 "s_register_operand" "r")
3061 (match_operand 1 "const_int_operand" "n")
3062 (match_operand 2 "const_int_operand" "n"))
3063 (const_int 0)))]
3064 "TARGET_32BIT
3065 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
3066 && INTVAL (operands[1]) > 0
3067 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
3068 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
3069 "*
3070 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
3071 << INTVAL (operands[2]));
3072 output_asm_insn (\"tst%?\\t%0, %1\", operands);
3073 return \"\";
3074 "
3075 [(set_attr "conds" "set")
3076 (set_attr "predicable" "yes")
3077 (set_attr "type" "logics_imm")]
3078 )
3079
3080 (define_insn_and_split "*ne_zeroextractsi"
3081 [(set (match_operand:SI 0 "s_register_operand" "=r")
3082 (ne:SI (zero_extract:SI
3083 (match_operand:SI 1 "s_register_operand" "r")
3084 (match_operand:SI 2 "const_int_operand" "n")
3085 (match_operand:SI 3 "const_int_operand" "n"))
3086 (const_int 0)))
3087 (clobber (reg:CC CC_REGNUM))]
3088 "TARGET_32BIT
3089 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3090 && INTVAL (operands[2]) > 0
3091 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3092 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3093 "#"
3094 "TARGET_32BIT
3095 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3096 && INTVAL (operands[2]) > 0
3097 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3098 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3099 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3100 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3101 (const_int 0)))
3102 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3103 (set (match_dup 0)
3104 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3105 (match_dup 0) (const_int 1)))]
3106 "
3107 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3108 << INTVAL (operands[3]));
3109 "
3110 [(set_attr "conds" "clob")
3111 (set (attr "length")
3112 (if_then_else (eq_attr "is_thumb" "yes")
3113 (const_int 12)
3114 (const_int 8)))
3115 (set_attr "type" "multiple")]
3116 )
3117
3118 (define_insn_and_split "*ne_zeroextractsi_shifted"
3119 [(set (match_operand:SI 0 "s_register_operand" "=r")
3120 (ne:SI (zero_extract:SI
3121 (match_operand:SI 1 "s_register_operand" "r")
3122 (match_operand:SI 2 "const_int_operand" "n")
3123 (const_int 0))
3124 (const_int 0)))
3125 (clobber (reg:CC CC_REGNUM))]
3126 "TARGET_ARM"
3127 "#"
3128 "TARGET_ARM"
3129 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3130 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3131 (const_int 0)))
3132 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3133 (set (match_dup 0)
3134 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3135 (match_dup 0) (const_int 1)))]
3136 "
3137 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3138 "
3139 [(set_attr "conds" "clob")
3140 (set_attr "length" "8")
3141 (set_attr "type" "multiple")]
3142 )
3143
3144 (define_insn_and_split "*ite_ne_zeroextractsi"
3145 [(set (match_operand:SI 0 "s_register_operand" "=r")
3146 (if_then_else:SI (ne (zero_extract:SI
3147 (match_operand:SI 1 "s_register_operand" "r")
3148 (match_operand:SI 2 "const_int_operand" "n")
3149 (match_operand:SI 3 "const_int_operand" "n"))
3150 (const_int 0))
3151 (match_operand:SI 4 "arm_not_operand" "rIK")
3152 (const_int 0)))
3153 (clobber (reg:CC CC_REGNUM))]
3154 "TARGET_ARM
3155 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3156 && INTVAL (operands[2]) > 0
3157 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3158 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3159 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3160 "#"
3161 "TARGET_ARM
3162 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3163 && INTVAL (operands[2]) > 0
3164 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3165 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3166 && !reg_overlap_mentioned_p (operands[0], operands[4])"
3167 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3168 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3169 (const_int 0)))
3170 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3171 (set (match_dup 0)
3172 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3173 (match_dup 0) (match_dup 4)))]
3174 "
3175 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3176 << INTVAL (operands[3]));
3177 "
3178 [(set_attr "conds" "clob")
3179 (set_attr "length" "8")
3180 (set_attr "type" "multiple")]
3181 )
3182
3183 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
3184 [(set (match_operand:SI 0 "s_register_operand" "=r")
3185 (if_then_else:SI (ne (zero_extract:SI
3186 (match_operand:SI 1 "s_register_operand" "r")
3187 (match_operand:SI 2 "const_int_operand" "n")
3188 (const_int 0))
3189 (const_int 0))
3190 (match_operand:SI 3 "arm_not_operand" "rIK")
3191 (const_int 0)))
3192 (clobber (reg:CC CC_REGNUM))]
3193 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3194 "#"
3195 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3196 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3197 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3198 (const_int 0)))
3199 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3200 (set (match_dup 0)
3201 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3202 (match_dup 0) (match_dup 3)))]
3203 "
3204 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3205 "
3206 [(set_attr "conds" "clob")
3207 (set_attr "length" "8")
3208 (set_attr "type" "multiple")]
3209 )
3210
3211 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
3212 (define_split
3213 [(set (match_operand:SI 0 "s_register_operand" "")
3214 (match_operator:SI 1 "shiftable_operator"
3215 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3216 (match_operand:SI 3 "const_int_operand" "")
3217 (match_operand:SI 4 "const_int_operand" ""))
3218 (match_operand:SI 5 "s_register_operand" "")]))
3219 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3220 "TARGET_ARM"
3221 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3222 (set (match_dup 0)
3223 (match_op_dup 1
3224 [(lshiftrt:SI (match_dup 6) (match_dup 4))
3225 (match_dup 5)]))]
3226 "{
3227 HOST_WIDE_INT temp = INTVAL (operands[3]);
3228
3229 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3230 operands[4] = GEN_INT (32 - temp);
3231 }"
3232 )
3233
3234 (define_split
3235 [(set (match_operand:SI 0 "s_register_operand" "")
3236 (match_operator:SI 1 "shiftable_operator"
3237 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3238 (match_operand:SI 3 "const_int_operand" "")
3239 (match_operand:SI 4 "const_int_operand" ""))
3240 (match_operand:SI 5 "s_register_operand" "")]))
3241 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3242 "TARGET_ARM"
3243 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3244 (set (match_dup 0)
3245 (match_op_dup 1
3246 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3247 (match_dup 5)]))]
3248 "{
3249 HOST_WIDE_INT temp = INTVAL (operands[3]);
3250
3251 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3252 operands[4] = GEN_INT (32 - temp);
3253 }"
3254 )
3255
3256 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3257 ;;; represented by the bitfield, then this will produce incorrect results.
3258 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3259 ;;; which have a real bit-field insert instruction, the truncation happens
3260 ;;; in the bit-field insert instruction itself. Since arm does not have a
3261 ;;; bit-field insert instruction, we would have to emit code here to truncate
3262 ;;; the value before we insert. This loses some of the advantage of having
3263 ;;; this insv pattern, so this pattern needs to be reevalutated.
3264
3265 (define_expand "insv"
3266 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3267 (match_operand 1 "general_operand")
3268 (match_operand 2 "general_operand"))
3269 (match_operand 3 "reg_or_int_operand"))]
3270 "TARGET_ARM || arm_arch_thumb2"
3271 "
3272 {
3273 int start_bit = INTVAL (operands[2]);
3274 int width = INTVAL (operands[1]);
3275 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3276 rtx target, subtarget;
3277
3278 if (arm_arch_thumb2)
3279 {
3280 if (unaligned_access && MEM_P (operands[0])
3281 && s_register_operand (operands[3], GET_MODE (operands[3]))
3282 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3283 {
3284 rtx base_addr;
3285
3286 if (BYTES_BIG_ENDIAN)
3287 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3288 - start_bit;
3289
3290 if (width == 32)
3291 {
3292 base_addr = adjust_address (operands[0], SImode,
3293 start_bit / BITS_PER_UNIT);
3294 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3295 }
3296 else
3297 {
3298 rtx tmp = gen_reg_rtx (HImode);
3299
3300 base_addr = adjust_address (operands[0], HImode,
3301 start_bit / BITS_PER_UNIT);
3302 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3303 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3304 }
3305 DONE;
3306 }
3307 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3308 {
3309 bool use_bfi = TRUE;
3310
3311 if (CONST_INT_P (operands[3]))
3312 {
3313 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3314
3315 if (val == 0)
3316 {
3317 emit_insn (gen_insv_zero (operands[0], operands[1],
3318 operands[2]));
3319 DONE;
3320 }
3321
3322 /* See if the set can be done with a single orr instruction. */
3323 if (val == mask && const_ok_for_arm (val << start_bit))
3324 use_bfi = FALSE;
3325 }
3326
3327 if (use_bfi)
3328 {
3329 if (!REG_P (operands[3]))
3330 operands[3] = force_reg (SImode, operands[3]);
3331
3332 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3333 operands[3]));
3334 DONE;
3335 }
3336 }
3337 else
3338 FAIL;
3339 }
3340
3341 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3342 FAIL;
3343
3344 target = copy_rtx (operands[0]);
3345 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3346 subreg as the final target. */
3347 if (GET_CODE (target) == SUBREG)
3348 {
3349 subtarget = gen_reg_rtx (SImode);
3350 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3351 < GET_MODE_SIZE (SImode))
3352 target = SUBREG_REG (target);
3353 }
3354 else
3355 subtarget = target;
3356
3357 if (CONST_INT_P (operands[3]))
3358 {
3359 /* Since we are inserting a known constant, we may be able to
3360 reduce the number of bits that we have to clear so that
3361 the mask becomes simple. */
3362 /* ??? This code does not check to see if the new mask is actually
3363 simpler. It may not be. */
3364 rtx op1 = gen_reg_rtx (SImode);
3365 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3366 start of this pattern. */
3367 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3368 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3369
3370 emit_insn (gen_andsi3 (op1, operands[0],
3371 gen_int_mode (~mask2, SImode)));
3372 emit_insn (gen_iorsi3 (subtarget, op1,
3373 gen_int_mode (op3_value << start_bit, SImode)));
3374 }
3375 else if (start_bit == 0
3376 && !(const_ok_for_arm (mask)
3377 || const_ok_for_arm (~mask)))
3378 {
3379 /* A Trick, since we are setting the bottom bits in the word,
3380 we can shift operand[3] up, operand[0] down, OR them together
3381 and rotate the result back again. This takes 3 insns, and
3382 the third might be mergeable into another op. */
3383 /* The shift up copes with the possibility that operand[3] is
3384 wider than the bitfield. */
3385 rtx op0 = gen_reg_rtx (SImode);
3386 rtx op1 = gen_reg_rtx (SImode);
3387
3388 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3389 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3390 emit_insn (gen_iorsi3 (op1, op1, op0));
3391 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3392 }
3393 else if ((width + start_bit == 32)
3394 && !(const_ok_for_arm (mask)
3395 || const_ok_for_arm (~mask)))
3396 {
3397 /* Similar trick, but slightly less efficient. */
3398
3399 rtx op0 = gen_reg_rtx (SImode);
3400 rtx op1 = gen_reg_rtx (SImode);
3401
3402 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3403 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3404 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3405 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3406 }
3407 else
3408 {
3409 rtx op0 = gen_int_mode (mask, SImode);
3410 rtx op1 = gen_reg_rtx (SImode);
3411 rtx op2 = gen_reg_rtx (SImode);
3412
3413 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3414 {
3415 rtx tmp = gen_reg_rtx (SImode);
3416
3417 emit_insn (gen_movsi (tmp, op0));
3418 op0 = tmp;
3419 }
3420
3421 /* Mask out any bits in operand[3] that are not needed. */
3422 emit_insn (gen_andsi3 (op1, operands[3], op0));
3423
3424 if (CONST_INT_P (op0)
3425 && (const_ok_for_arm (mask << start_bit)
3426 || const_ok_for_arm (~(mask << start_bit))))
3427 {
3428 op0 = gen_int_mode (~(mask << start_bit), SImode);
3429 emit_insn (gen_andsi3 (op2, operands[0], op0));
3430 }
3431 else
3432 {
3433 if (CONST_INT_P (op0))
3434 {
3435 rtx tmp = gen_reg_rtx (SImode);
3436
3437 emit_insn (gen_movsi (tmp, op0));
3438 op0 = tmp;
3439 }
3440
3441 if (start_bit != 0)
3442 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3443
3444 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3445 }
3446
3447 if (start_bit != 0)
3448 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3449
3450 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3451 }
3452
3453 if (subtarget != target)
3454 {
3455 /* If TARGET is still a SUBREG, then it must be wider than a word,
3456 so we must be careful only to set the subword we were asked to. */
3457 if (GET_CODE (target) == SUBREG)
3458 emit_move_insn (target, subtarget);
3459 else
3460 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3461 }
3462
3463 DONE;
3464 }"
3465 )
3466
3467 (define_insn "insv_zero"
3468 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3469 (match_operand:SI 1 "const_int_M_operand" "M")
3470 (match_operand:SI 2 "const_int_M_operand" "M"))
3471 (const_int 0))]
3472 "arm_arch_thumb2"
3473 "bfc%?\t%0, %2, %1"
3474 [(set_attr "length" "4")
3475 (set_attr "predicable" "yes")
3476 (set_attr "type" "bfm")]
3477 )
3478
3479 (define_insn "insv_t2"
3480 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3481 (match_operand:SI 1 "const_int_M_operand" "M")
3482 (match_operand:SI 2 "const_int_M_operand" "M"))
3483 (match_operand:SI 3 "s_register_operand" "r"))]
3484 "arm_arch_thumb2"
3485 "bfi%?\t%0, %3, %2, %1"
3486 [(set_attr "length" "4")
3487 (set_attr "predicable" "yes")
3488 (set_attr "type" "bfm")]
3489 )
3490
3491 (define_insn "andsi_notsi_si"
3492 [(set (match_operand:SI 0 "s_register_operand" "=r")
3493 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3494 (match_operand:SI 1 "s_register_operand" "r")))]
3495 "TARGET_32BIT"
3496 "bic%?\\t%0, %1, %2"
3497 [(set_attr "predicable" "yes")
3498 (set_attr "type" "logic_reg")]
3499 )
3500
3501 (define_insn "andsi_not_shiftsi_si"
3502 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3503 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3504 [(match_operand:SI 2 "s_register_operand" "r,r")
3505 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3506 (match_operand:SI 1 "s_register_operand" "r,r")))]
3507 "TARGET_32BIT"
3508 "bic%?\\t%0, %1, %2%S4"
3509 [(set_attr "predicable" "yes")
3510 (set_attr "shift" "2")
3511 (set_attr "arch" "32,a")
3512 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3513 )
3514
3515 ;; Shifted bics pattern used to set up CC status register and not reusing
3516 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3517 ;; does not support shift by register.
3518 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3519 [(set (reg:CC_NZ CC_REGNUM)
3520 (compare:CC_NZ
3521 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3522 [(match_operand:SI 1 "s_register_operand" "r,r")
3523 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3524 (match_operand:SI 3 "s_register_operand" "r,r"))
3525 (const_int 0)))
3526 (clobber (match_scratch:SI 4 "=r,r"))]
3527 "TARGET_32BIT"
3528 "bics%?\\t%4, %3, %1%S0"
3529 [(set_attr "predicable" "yes")
3530 (set_attr "arch" "32,a")
3531 (set_attr "conds" "set")
3532 (set_attr "shift" "1")
3533 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3534 )
3535
3536 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3537 ;; getting reused later.
3538 (define_insn "andsi_not_shiftsi_si_scc"
3539 [(parallel [(set (reg:CC_NZ CC_REGNUM)
3540 (compare:CC_NZ
3541 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3542 [(match_operand:SI 1 "s_register_operand" "r,r")
3543 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3544 (match_operand:SI 3 "s_register_operand" "r,r"))
3545 (const_int 0)))
3546 (set (match_operand:SI 4 "s_register_operand" "=r,r")
3547 (and:SI (not:SI (match_op_dup 0
3548 [(match_dup 1)
3549 (match_dup 2)]))
3550 (match_dup 3)))])]
3551 "TARGET_32BIT"
3552 "bics%?\\t%4, %3, %1%S0"
3553 [(set_attr "predicable" "yes")
3554 (set_attr "arch" "32,a")
3555 (set_attr "conds" "set")
3556 (set_attr "shift" "1")
3557 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3558 )
3559
3560 (define_insn "*andsi_notsi_si_compare0"
3561 [(set (reg:CC_NZ CC_REGNUM)
3562 (compare:CC_NZ
3563 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3564 (match_operand:SI 1 "s_register_operand" "r"))
3565 (const_int 0)))
3566 (set (match_operand:SI 0 "s_register_operand" "=r")
3567 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3568 "TARGET_32BIT"
3569 "bics\\t%0, %1, %2"
3570 [(set_attr "conds" "set")
3571 (set_attr "type" "logics_shift_reg")]
3572 )
3573
3574 (define_insn "*andsi_notsi_si_compare0_scratch"
3575 [(set (reg:CC_NZ CC_REGNUM)
3576 (compare:CC_NZ
3577 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3578 (match_operand:SI 1 "s_register_operand" "r"))
3579 (const_int 0)))
3580 (clobber (match_scratch:SI 0 "=r"))]
3581 "TARGET_32BIT"
3582 "bics\\t%0, %1, %2"
3583 [(set_attr "conds" "set")
3584 (set_attr "type" "logics_shift_reg")]
3585 )
3586
3587 (define_expand "iorsi3"
3588 [(set (match_operand:SI 0 "s_register_operand")
3589 (ior:SI (match_operand:SI 1 "s_register_operand")
3590 (match_operand:SI 2 "reg_or_int_operand")))]
3591 "TARGET_EITHER"
3592 "
3593 if (CONST_INT_P (operands[2]))
3594 {
3595 if (TARGET_32BIT)
3596 {
3597 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3598 operands[2] = force_reg (SImode, operands[2]);
3599 else
3600 {
3601 arm_split_constant (IOR, SImode, NULL_RTX,
3602 INTVAL (operands[2]), operands[0],
3603 operands[1],
3604 optimize && can_create_pseudo_p ());
3605 DONE;
3606 }
3607 }
3608 else /* TARGET_THUMB1 */
3609 {
3610 rtx tmp = force_reg (SImode, operands[2]);
3611 if (rtx_equal_p (operands[0], operands[1]))
3612 operands[2] = tmp;
3613 else
3614 {
3615 operands[2] = operands[1];
3616 operands[1] = tmp;
3617 }
3618 }
3619 }
3620 "
3621 )
3622
3623 (define_insn_and_split "*iorsi3_insn"
3624 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3625 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3626 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3627 "TARGET_32BIT"
3628 "@
3629 orr%?\\t%0, %1, %2
3630 orr%?\\t%0, %1, %2
3631 orn%?\\t%0, %1, #%B2
3632 orr%?\\t%0, %1, %2
3633 #"
3634 "TARGET_32BIT
3635 && CONST_INT_P (operands[2])
3636 && !(const_ok_for_arm (INTVAL (operands[2]))
3637 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3638 [(clobber (const_int 0))]
3639 {
3640 arm_split_constant (IOR, SImode, curr_insn,
3641 INTVAL (operands[2]), operands[0], operands[1], 0);
3642 DONE;
3643 }
3644 [(set_attr "length" "4,4,4,4,16")
3645 (set_attr "arch" "32,t2,t2,32,32")
3646 (set_attr "predicable" "yes")
3647 (set_attr "predicable_short_it" "no,yes,no,no,no")
3648 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3649 )
3650
3651 (define_peephole2
3652 [(match_scratch:SI 3 "r")
3653 (set (match_operand:SI 0 "arm_general_register_operand" "")
3654 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3655 (match_operand:SI 2 "const_int_operand" "")))]
3656 "TARGET_ARM
3657 && !const_ok_for_arm (INTVAL (operands[2]))
3658 && const_ok_for_arm (~INTVAL (operands[2]))"
3659 [(set (match_dup 3) (match_dup 2))
3660 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3661 ""
3662 )
3663
3664 (define_insn "*iorsi3_compare0"
3665 [(set (reg:CC_NZ CC_REGNUM)
3666 (compare:CC_NZ
3667 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3668 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3669 (const_int 0)))
3670 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3671 (ior:SI (match_dup 1) (match_dup 2)))]
3672 "TARGET_32BIT"
3673 "orrs%?\\t%0, %1, %2"
3674 [(set_attr "conds" "set")
3675 (set_attr "arch" "*,t2,*")
3676 (set_attr "length" "4,2,4")
3677 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3678 )
3679
3680 (define_insn "*iorsi3_compare0_scratch"
3681 [(set (reg:CC_NZ CC_REGNUM)
3682 (compare:CC_NZ
3683 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3684 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3685 (const_int 0)))
3686 (clobber (match_scratch:SI 0 "=r,l,r"))]
3687 "TARGET_32BIT"
3688 "orrs%?\\t%0, %1, %2"
3689 [(set_attr "conds" "set")
3690 (set_attr "arch" "*,t2,*")
3691 (set_attr "length" "4,2,4")
3692 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3693 )
3694
3695 (define_expand "xorsi3"
3696 [(set (match_operand:SI 0 "s_register_operand")
3697 (xor:SI (match_operand:SI 1 "s_register_operand")
3698 (match_operand:SI 2 "reg_or_int_operand")))]
3699 "TARGET_EITHER"
3700 "if (CONST_INT_P (operands[2]))
3701 {
3702 if (TARGET_32BIT)
3703 {
3704 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3705 operands[2] = force_reg (SImode, operands[2]);
3706 else
3707 {
3708 arm_split_constant (XOR, SImode, NULL_RTX,
3709 INTVAL (operands[2]), operands[0],
3710 operands[1],
3711 optimize && can_create_pseudo_p ());
3712 DONE;
3713 }
3714 }
3715 else /* TARGET_THUMB1 */
3716 {
3717 rtx tmp = force_reg (SImode, operands[2]);
3718 if (rtx_equal_p (operands[0], operands[1]))
3719 operands[2] = tmp;
3720 else
3721 {
3722 operands[2] = operands[1];
3723 operands[1] = tmp;
3724 }
3725 }
3726 }"
3727 )
3728
3729 (define_insn_and_split "*arm_xorsi3"
3730 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3731 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3732 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3733 "TARGET_32BIT"
3734 "@
3735 eor%?\\t%0, %1, %2
3736 eor%?\\t%0, %1, %2
3737 eor%?\\t%0, %1, %2
3738 #"
3739 "TARGET_32BIT
3740 && CONST_INT_P (operands[2])
3741 && !const_ok_for_arm (INTVAL (operands[2]))"
3742 [(clobber (const_int 0))]
3743 {
3744 arm_split_constant (XOR, SImode, curr_insn,
3745 INTVAL (operands[2]), operands[0], operands[1], 0);
3746 DONE;
3747 }
3748 [(set_attr "length" "4,4,4,16")
3749 (set_attr "predicable" "yes")
3750 (set_attr "predicable_short_it" "no,yes,no,no")
3751 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3752 )
3753
3754 (define_insn "*xorsi3_compare0"
3755 [(set (reg:CC_NZ CC_REGNUM)
3756 (compare:CC_NZ (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3757 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3758 (const_int 0)))
3759 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3760 (xor:SI (match_dup 1) (match_dup 2)))]
3761 "TARGET_32BIT"
3762 "eors%?\\t%0, %1, %2"
3763 [(set_attr "conds" "set")
3764 (set_attr "type" "logics_imm,logics_reg")]
3765 )
3766
3767 (define_insn "*xorsi3_compare0_scratch"
3768 [(set (reg:CC_NZ CC_REGNUM)
3769 (compare:CC_NZ (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3770 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3771 (const_int 0)))]
3772 "TARGET_32BIT"
3773 "teq%?\\t%0, %1"
3774 [(set_attr "conds" "set")
3775 (set_attr "type" "logics_imm,logics_reg")]
3776 )
3777
3778 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3779 ; (NOT D) we can sometimes merge the final NOT into one of the following
3780 ; insns.
3781
3782 (define_split
3783 [(set (match_operand:SI 0 "s_register_operand" "")
3784 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3785 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3786 (match_operand:SI 3 "arm_rhs_operand" "")))
3787 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3788 "TARGET_32BIT"
3789 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3790 (not:SI (match_dup 3))))
3791 (set (match_dup 0) (not:SI (match_dup 4)))]
3792 ""
3793 )
3794
3795 (define_insn_and_split "*andsi_iorsi3_notsi"
3796 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3797 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3798 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3799 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3800 "TARGET_32BIT"
3801 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3802 "&& reload_completed"
3803 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3804 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3805 {
3806 /* If operands[3] is a constant make sure to fold the NOT into it
3807 to avoid creating a NOT of a CONST_INT. */
3808 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3809 if (CONST_INT_P (not_rtx))
3810 {
3811 operands[4] = operands[0];
3812 operands[5] = not_rtx;
3813 }
3814 else
3815 {
3816 operands[5] = operands[0];
3817 operands[4] = not_rtx;
3818 }
3819 }
3820 [(set_attr "length" "8")
3821 (set_attr "ce_count" "2")
3822 (set_attr "predicable" "yes")
3823 (set_attr "type" "multiple")]
3824 )
3825
3826 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3827 ; insns are available?
3828 (define_split
3829 [(set (match_operand:SI 0 "s_register_operand" "")
3830 (match_operator:SI 1 "logical_binary_operator"
3831 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3832 (match_operand:SI 3 "const_int_operand" "")
3833 (match_operand:SI 4 "const_int_operand" ""))
3834 (match_operator:SI 9 "logical_binary_operator"
3835 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3836 (match_operand:SI 6 "const_int_operand" ""))
3837 (match_operand:SI 7 "s_register_operand" "")])]))
3838 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3839 "TARGET_32BIT
3840 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3841 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3842 [(set (match_dup 8)
3843 (match_op_dup 1
3844 [(ashift:SI (match_dup 2) (match_dup 4))
3845 (match_dup 5)]))
3846 (set (match_dup 0)
3847 (match_op_dup 1
3848 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3849 (match_dup 7)]))]
3850 "
3851 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3852 ")
3853
3854 (define_split
3855 [(set (match_operand:SI 0 "s_register_operand" "")
3856 (match_operator:SI 1 "logical_binary_operator"
3857 [(match_operator:SI 9 "logical_binary_operator"
3858 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3859 (match_operand:SI 6 "const_int_operand" ""))
3860 (match_operand:SI 7 "s_register_operand" "")])
3861 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3862 (match_operand:SI 3 "const_int_operand" "")
3863 (match_operand:SI 4 "const_int_operand" ""))]))
3864 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3865 "TARGET_32BIT
3866 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3867 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3868 [(set (match_dup 8)
3869 (match_op_dup 1
3870 [(ashift:SI (match_dup 2) (match_dup 4))
3871 (match_dup 5)]))
3872 (set (match_dup 0)
3873 (match_op_dup 1
3874 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3875 (match_dup 7)]))]
3876 "
3877 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3878 ")
3879
3880 (define_split
3881 [(set (match_operand:SI 0 "s_register_operand" "")
3882 (match_operator:SI 1 "logical_binary_operator"
3883 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3884 (match_operand:SI 3 "const_int_operand" "")
3885 (match_operand:SI 4 "const_int_operand" ""))
3886 (match_operator:SI 9 "logical_binary_operator"
3887 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3888 (match_operand:SI 6 "const_int_operand" ""))
3889 (match_operand:SI 7 "s_register_operand" "")])]))
3890 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3891 "TARGET_32BIT
3892 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3893 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3894 [(set (match_dup 8)
3895 (match_op_dup 1
3896 [(ashift:SI (match_dup 2) (match_dup 4))
3897 (match_dup 5)]))
3898 (set (match_dup 0)
3899 (match_op_dup 1
3900 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3901 (match_dup 7)]))]
3902 "
3903 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3904 ")
3905
3906 (define_split
3907 [(set (match_operand:SI 0 "s_register_operand" "")
3908 (match_operator:SI 1 "logical_binary_operator"
3909 [(match_operator:SI 9 "logical_binary_operator"
3910 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3911 (match_operand:SI 6 "const_int_operand" ""))
3912 (match_operand:SI 7 "s_register_operand" "")])
3913 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3914 (match_operand:SI 3 "const_int_operand" "")
3915 (match_operand:SI 4 "const_int_operand" ""))]))
3916 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3917 "TARGET_32BIT
3918 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3919 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3920 [(set (match_dup 8)
3921 (match_op_dup 1
3922 [(ashift:SI (match_dup 2) (match_dup 4))
3923 (match_dup 5)]))
3924 (set (match_dup 0)
3925 (match_op_dup 1
3926 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3927 (match_dup 7)]))]
3928 "
3929 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3930 ")
3931 \f
3932
3933 ;; Minimum and maximum insns
3934
3935 (define_expand "smaxsi3"
3936 [(parallel [
3937 (set (match_operand:SI 0 "s_register_operand")
3938 (smax:SI (match_operand:SI 1 "s_register_operand")
3939 (match_operand:SI 2 "arm_rhs_operand")))
3940 (clobber (reg:CC CC_REGNUM))])]
3941 "TARGET_32BIT"
3942 "
3943 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3944 {
3945 /* No need for a clobber of the condition code register here. */
3946 emit_insn (gen_rtx_SET (operands[0],
3947 gen_rtx_SMAX (SImode, operands[1],
3948 operands[2])));
3949 DONE;
3950 }
3951 ")
3952
3953 (define_insn "*smax_0"
3954 [(set (match_operand:SI 0 "s_register_operand" "=r")
3955 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3956 (const_int 0)))]
3957 "TARGET_32BIT"
3958 "bic%?\\t%0, %1, %1, asr #31"
3959 [(set_attr "predicable" "yes")
3960 (set_attr "type" "logic_shift_reg")]
3961 )
3962
3963 (define_insn "*smax_m1"
3964 [(set (match_operand:SI 0 "s_register_operand" "=r")
3965 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3966 (const_int -1)))]
3967 "TARGET_32BIT"
3968 "orr%?\\t%0, %1, %1, asr #31"
3969 [(set_attr "predicable" "yes")
3970 (set_attr "type" "logic_shift_reg")]
3971 )
3972
3973 (define_insn_and_split "*arm_smax_insn"
3974 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3975 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3976 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3977 (clobber (reg:CC CC_REGNUM))]
3978 "TARGET_ARM"
3979 "#"
3980 ; cmp\\t%1, %2\;movlt\\t%0, %2
3981 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3982 "TARGET_ARM"
3983 [(set (reg:CC CC_REGNUM)
3984 (compare:CC (match_dup 1) (match_dup 2)))
3985 (set (match_dup 0)
3986 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3987 (match_dup 1)
3988 (match_dup 2)))]
3989 ""
3990 [(set_attr "conds" "clob")
3991 (set_attr "length" "8,12")
3992 (set_attr "type" "multiple")]
3993 )
3994
3995 (define_expand "sminsi3"
3996 [(parallel [
3997 (set (match_operand:SI 0 "s_register_operand")
3998 (smin:SI (match_operand:SI 1 "s_register_operand")
3999 (match_operand:SI 2 "arm_rhs_operand")))
4000 (clobber (reg:CC CC_REGNUM))])]
4001 "TARGET_32BIT"
4002 "
4003 if (operands[2] == const0_rtx)
4004 {
4005 /* No need for a clobber of the condition code register here. */
4006 emit_insn (gen_rtx_SET (operands[0],
4007 gen_rtx_SMIN (SImode, operands[1],
4008 operands[2])));
4009 DONE;
4010 }
4011 ")
4012
4013 (define_insn "*smin_0"
4014 [(set (match_operand:SI 0 "s_register_operand" "=r")
4015 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
4016 (const_int 0)))]
4017 "TARGET_32BIT"
4018 "and%?\\t%0, %1, %1, asr #31"
4019 [(set_attr "predicable" "yes")
4020 (set_attr "type" "logic_shift_reg")]
4021 )
4022
4023 (define_insn_and_split "*arm_smin_insn"
4024 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4025 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4026 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4027 (clobber (reg:CC CC_REGNUM))]
4028 "TARGET_ARM"
4029 "#"
4030 ; cmp\\t%1, %2\;movge\\t%0, %2
4031 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
4032 "TARGET_ARM"
4033 [(set (reg:CC CC_REGNUM)
4034 (compare:CC (match_dup 1) (match_dup 2)))
4035 (set (match_dup 0)
4036 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
4037 (match_dup 1)
4038 (match_dup 2)))]
4039 ""
4040 [(set_attr "conds" "clob")
4041 (set_attr "length" "8,12")
4042 (set_attr "type" "multiple,multiple")]
4043 )
4044
4045 (define_expand "umaxsi3"
4046 [(parallel [
4047 (set (match_operand:SI 0 "s_register_operand")
4048 (umax:SI (match_operand:SI 1 "s_register_operand")
4049 (match_operand:SI 2 "arm_rhs_operand")))
4050 (clobber (reg:CC CC_REGNUM))])]
4051 "TARGET_32BIT"
4052 ""
4053 )
4054
4055 (define_insn_and_split "*arm_umaxsi3"
4056 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4057 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4058 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4059 (clobber (reg:CC CC_REGNUM))]
4060 "TARGET_ARM"
4061 "#"
4062 ; cmp\\t%1, %2\;movcc\\t%0, %2
4063 ; cmp\\t%1, %2\;movcs\\t%0, %1
4064 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
4065 "TARGET_ARM"
4066 [(set (reg:CC CC_REGNUM)
4067 (compare:CC (match_dup 1) (match_dup 2)))
4068 (set (match_dup 0)
4069 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
4070 (match_dup 1)
4071 (match_dup 2)))]
4072 ""
4073 [(set_attr "conds" "clob")
4074 (set_attr "length" "8,8,12")
4075 (set_attr "type" "store_4")]
4076 )
4077
4078 (define_expand "uminsi3"
4079 [(parallel [
4080 (set (match_operand:SI 0 "s_register_operand")
4081 (umin:SI (match_operand:SI 1 "s_register_operand")
4082 (match_operand:SI 2 "arm_rhs_operand")))
4083 (clobber (reg:CC CC_REGNUM))])]
4084 "TARGET_32BIT"
4085 ""
4086 )
4087
4088 (define_insn_and_split "*arm_uminsi3"
4089 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4090 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4091 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4092 (clobber (reg:CC CC_REGNUM))]
4093 "TARGET_ARM"
4094 "#"
4095 ; cmp\\t%1, %2\;movcs\\t%0, %2
4096 ; cmp\\t%1, %2\;movcc\\t%0, %1
4097 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
4098 "TARGET_ARM"
4099 [(set (reg:CC CC_REGNUM)
4100 (compare:CC (match_dup 1) (match_dup 2)))
4101 (set (match_dup 0)
4102 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
4103 (match_dup 1)
4104 (match_dup 2)))]
4105 ""
4106 [(set_attr "conds" "clob")
4107 (set_attr "length" "8,8,12")
4108 (set_attr "type" "store_4")]
4109 )
4110
4111 (define_insn "*store_minmaxsi"
4112 [(set (match_operand:SI 0 "memory_operand" "=m")
4113 (match_operator:SI 3 "minmax_operator"
4114 [(match_operand:SI 1 "s_register_operand" "r")
4115 (match_operand:SI 2 "s_register_operand" "r")]))
4116 (clobber (reg:CC CC_REGNUM))]
4117 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
4118 "*
4119 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
4120 operands[1], operands[2]);
4121 output_asm_insn (\"cmp\\t%1, %2\", operands);
4122 if (TARGET_THUMB2)
4123 output_asm_insn (\"ite\t%d3\", operands);
4124 output_asm_insn (\"str%d3\\t%1, %0\", operands);
4125 output_asm_insn (\"str%D3\\t%2, %0\", operands);
4126 return \"\";
4127 "
4128 [(set_attr "conds" "clob")
4129 (set (attr "length")
4130 (if_then_else (eq_attr "is_thumb" "yes")
4131 (const_int 14)
4132 (const_int 12)))
4133 (set_attr "type" "store_4")]
4134 )
4135
4136 ; Reject the frame pointer in operand[1], since reloading this after
4137 ; it has been eliminated can cause carnage.
4138 (define_insn "*minmax_arithsi"
4139 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4140 (match_operator:SI 4 "shiftable_operator"
4141 [(match_operator:SI 5 "minmax_operator"
4142 [(match_operand:SI 2 "s_register_operand" "r,r")
4143 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
4144 (match_operand:SI 1 "s_register_operand" "0,?r")]))
4145 (clobber (reg:CC CC_REGNUM))]
4146 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
4147 "*
4148 {
4149 enum rtx_code code = GET_CODE (operands[4]);
4150 bool need_else;
4151
4152 if (which_alternative != 0 || operands[3] != const0_rtx
4153 || (code != PLUS && code != IOR && code != XOR))
4154 need_else = true;
4155 else
4156 need_else = false;
4157
4158 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4159 operands[2], operands[3]);
4160 output_asm_insn (\"cmp\\t%2, %3\", operands);
4161 if (TARGET_THUMB2)
4162 {
4163 if (need_else)
4164 output_asm_insn (\"ite\\t%d5\", operands);
4165 else
4166 output_asm_insn (\"it\\t%d5\", operands);
4167 }
4168 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4169 if (need_else)
4170 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4171 return \"\";
4172 }"
4173 [(set_attr "conds" "clob")
4174 (set (attr "length")
4175 (if_then_else (eq_attr "is_thumb" "yes")
4176 (const_int 14)
4177 (const_int 12)))
4178 (set_attr "type" "multiple")]
4179 )
4180
4181 ; Reject the frame pointer in operand[1], since reloading this after
4182 ; it has been eliminated can cause carnage.
4183 (define_insn_and_split "*minmax_arithsi_non_canon"
4184 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4185 (minus:SI
4186 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4187 (match_operator:SI 4 "minmax_operator"
4188 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4189 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4190 (clobber (reg:CC CC_REGNUM))]
4191 "TARGET_32BIT && !arm_eliminable_register (operands[1])
4192 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4193 "#"
4194 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4195 [(set (reg:CC CC_REGNUM)
4196 (compare:CC (match_dup 2) (match_dup 3)))
4197
4198 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4199 (set (match_dup 0)
4200 (minus:SI (match_dup 1)
4201 (match_dup 2))))
4202 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4203 (set (match_dup 0)
4204 (match_dup 6)))]
4205 {
4206 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4207 operands[2], operands[3]);
4208 enum rtx_code rc = minmax_code (operands[4]);
4209 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4210 operands[2], operands[3]);
4211
4212 if (mode == CCFPmode || mode == CCFPEmode)
4213 rc = reverse_condition_maybe_unordered (rc);
4214 else
4215 rc = reverse_condition (rc);
4216 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4217 if (CONST_INT_P (operands[3]))
4218 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4219 else
4220 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4221 }
4222 [(set_attr "conds" "clob")
4223 (set (attr "length")
4224 (if_then_else (eq_attr "is_thumb" "yes")
4225 (const_int 14)
4226 (const_int 12)))
4227 (set_attr "type" "multiple")]
4228 )
4229
4230
4231 (define_expand "arm_<ss_op>"
4232 [(set (match_operand:SI 0 "s_register_operand")
4233 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
4234 (match_operand:SI 2 "s_register_operand")))]
4235 "TARGET_DSP_MULTIPLY"
4236 {
4237 if (ARM_Q_BIT_READ)
4238 emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
4239 operands[1], operands[2]));
4240 else
4241 emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
4242 DONE;
4243 }
4244 )
4245
4246 (define_insn "arm_<ss_op><add_clobber_q_name>_insn"
4247 [(set (match_operand:SI 0 "s_register_operand" "=r")
4248 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
4249 (match_operand:SI 2 "s_register_operand" "r")))]
4250 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
4251 "<ss_op>%?\t%0, %1, %2"
4252 [(set_attr "predicable" "yes")
4253 (set_attr "type" "alu_dsp_reg")]
4254 )
4255
4256 (define_code_iterator SAT [smin smax])
4257 (define_code_attr SATrev [(smin "smax") (smax "smin")])
4258 (define_code_attr SATlo [(smin "1") (smax "2")])
4259 (define_code_attr SAThi [(smin "2") (smax "1")])
4260
4261 (define_expand "arm_ssat"
4262 [(match_operand:SI 0 "s_register_operand")
4263 (match_operand:SI 1 "s_register_operand")
4264 (match_operand:SI 2 "const_int_operand")]
4265 "TARGET_32BIT && arm_arch6"
4266 {
4267 HOST_WIDE_INT val = INTVAL (operands[2]);
4268 /* The builtin checking code should have ensured the right
4269 range for the immediate. */
4270 gcc_assert (IN_RANGE (val, 1, 32));
4271 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
4272 HOST_WIDE_INT lower_bound = -upper_bound - 1;
4273 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4274 rtx lo_rtx = gen_int_mode (lower_bound, SImode);
4275 if (ARM_Q_BIT_READ)
4276 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
4277 up_rtx, operands[1]));
4278 else
4279 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4280 DONE;
4281 }
4282 )
4283
4284 (define_expand "arm_usat"
4285 [(match_operand:SI 0 "s_register_operand")
4286 (match_operand:SI 1 "s_register_operand")
4287 (match_operand:SI 2 "const_int_operand")]
4288 "TARGET_32BIT && arm_arch6"
4289 {
4290 HOST_WIDE_INT val = INTVAL (operands[2]);
4291 /* The builtin checking code should have ensured the right
4292 range for the immediate. */
4293 gcc_assert (IN_RANGE (val, 0, 31));
4294 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
4295 rtx up_rtx = gen_int_mode (upper_bound, SImode);
4296 rtx lo_rtx = CONST0_RTX (SImode);
4297 if (ARM_Q_BIT_READ)
4298 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
4299 operands[1]));
4300 else
4301 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4302 DONE;
4303 }
4304 )
4305
4306 (define_insn "arm_get_apsr"
4307 [(set (match_operand:SI 0 "s_register_operand" "=r")
4308 (unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
4309 "TARGET_ARM_QBIT"
4310 "mrs%?\t%0, APSR"
4311 [(set_attr "predicable" "yes")
4312 (set_attr "conds" "use")]
4313 )
4314
4315 (define_insn "arm_set_apsr"
4316 [(set (reg:CC APSRQ_REGNUM)
4317 (unspec_volatile:CC
4318 [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
4319 "TARGET_ARM_QBIT"
4320 "msr%?\tAPSR_nzcvq, %0"
4321 [(set_attr "predicable" "yes")
4322 (set_attr "conds" "set")]
4323 )
4324
4325 ;; Read the APSR and extract the Q bit (bit 27)
4326 (define_expand "arm_saturation_occurred"
4327 [(match_operand:SI 0 "s_register_operand")]
4328 "TARGET_ARM_QBIT"
4329 {
4330 rtx apsr = gen_reg_rtx (SImode);
4331 emit_insn (gen_arm_get_apsr (apsr));
4332 emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
4333 gen_int_mode (27, SImode)));
4334 DONE;
4335 }
4336 )
4337
4338 ;; Read the APSR and set the Q bit (bit position 27) according to operand 0
4339 (define_expand "arm_set_saturation"
4340 [(match_operand:SI 0 "reg_or_int_operand")]
4341 "TARGET_ARM_QBIT"
4342 {
4343 rtx apsr = gen_reg_rtx (SImode);
4344 emit_insn (gen_arm_get_apsr (apsr));
4345 rtx to_insert = gen_reg_rtx (SImode);
4346 if (CONST_INT_P (operands[0]))
4347 emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
4348 ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
4349 else
4350 {
4351 rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
4352 emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
4353 CONST0_RTX (SImode)));
4354 }
4355 emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
4356 gen_int_mode (27, SImode), to_insert));
4357 emit_insn (gen_arm_set_apsr (apsr));
4358 DONE;
4359 }
4360 )
4361
4362 (define_insn "satsi_<SAT:code><add_clobber_q_name>"
4363 [(set (match_operand:SI 0 "s_register_operand" "=r")
4364 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4365 (match_operand:SI 1 "const_int_operand" "i"))
4366 (match_operand:SI 2 "const_int_operand" "i")))]
4367 "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
4368 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4369 {
4370 int mask;
4371 bool signed_sat;
4372 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4373 &mask, &signed_sat))
4374 gcc_unreachable ();
4375
4376 operands[1] = GEN_INT (mask);
4377 if (signed_sat)
4378 return "ssat%?\t%0, %1, %3";
4379 else
4380 return "usat%?\t%0, %1, %3";
4381 }
4382 [(set_attr "predicable" "yes")
4383 (set_attr "type" "alus_imm")]
4384 )
4385
4386 (define_insn "*satsi_<SAT:code>_shift"
4387 [(set (match_operand:SI 0 "s_register_operand" "=r")
4388 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4389 [(match_operand:SI 4 "s_register_operand" "r")
4390 (match_operand:SI 5 "const_int_operand" "i")])
4391 (match_operand:SI 1 "const_int_operand" "i"))
4392 (match_operand:SI 2 "const_int_operand" "i")))]
4393 "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
4394 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4395 {
4396 int mask;
4397 bool signed_sat;
4398 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4399 &mask, &signed_sat))
4400 gcc_unreachable ();
4401
4402 operands[1] = GEN_INT (mask);
4403 if (signed_sat)
4404 return "ssat%?\t%0, %1, %4%S3";
4405 else
4406 return "usat%?\t%0, %1, %4%S3";
4407 }
4408 [(set_attr "predicable" "yes")
4409 (set_attr "shift" "3")
4410 (set_attr "type" "logic_shift_reg")])
4411 \f
4412 ;; Custom Datapath Extension insns.
4413 (define_insn "arm_cx1<mode>"
4414 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4415 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4416 (match_operand:SI 2 "const_int_ccde1_operand" "i")]
4417 UNSPEC_CDE))]
4418 "TARGET_CDE"
4419 "cx1<cde_suffix>\\tp%c1, <cde_dest>, %2"
4420 [(set_attr "type" "coproc")]
4421 )
4422
4423 (define_insn "arm_cx1a<mode>"
4424 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4425 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4426 (match_operand:SIDI 2 "s_register_operand" "0")
4427 (match_operand:SI 3 "const_int_ccde1_operand" "i")]
4428 UNSPEC_CDEA))]
4429 "TARGET_CDE"
4430 "cx1<cde_suffix>a\\tp%c1, <cde_dest>, %3"
4431 [(set_attr "type" "coproc")]
4432 )
4433
4434 (define_insn "arm_cx2<mode>"
4435 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4436 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4437 (match_operand:SI 2 "s_register_operand" "r")
4438 (match_operand:SI 3 "const_int_ccde2_operand" "i")]
4439 UNSPEC_CDE))]
4440 "TARGET_CDE"
4441 "cx2<cde_suffix>\\tp%c1, <cde_dest>, %2, %3"
4442 [(set_attr "type" "coproc")]
4443 )
4444
4445 (define_insn "arm_cx2a<mode>"
4446 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4447 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4448 (match_operand:SIDI 2 "s_register_operand" "0")
4449 (match_operand:SI 3 "s_register_operand" "r")
4450 (match_operand:SI 4 "const_int_ccde2_operand" "i")]
4451 UNSPEC_CDEA))]
4452 "TARGET_CDE"
4453 "cx2<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4"
4454 [(set_attr "type" "coproc")]
4455 )
4456
4457 (define_insn "arm_cx3<mode>"
4458 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4459 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4460 (match_operand:SI 2 "s_register_operand" "r")
4461 (match_operand:SI 3 "s_register_operand" "r")
4462 (match_operand:SI 4 "const_int_ccde3_operand" "i")]
4463 UNSPEC_CDE))]
4464 "TARGET_CDE"
4465 "cx3<cde_suffix>\\tp%c1, <cde_dest>, %2, %3, %4"
4466 [(set_attr "type" "coproc")]
4467 )
4468
4469 (define_insn "arm_cx3a<mode>"
4470 [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4471 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4472 (match_operand:SIDI 2 "s_register_operand" "0")
4473 (match_operand:SI 3 "s_register_operand" "r")
4474 (match_operand:SI 4 "s_register_operand" "r")
4475 (match_operand:SI 5 "const_int_ccde3_operand" "i")]
4476 UNSPEC_CDEA))]
4477 "TARGET_CDE"
4478 "cx3<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4, %5"
4479 [(set_attr "type" "coproc")]
4480 )
4481 \f
4482 ;; Shift and rotation insns
4483
4484 (define_expand "ashldi3"
4485 [(set (match_operand:DI 0 "s_register_operand")
4486 (ashift:DI (match_operand:DI 1 "s_register_operand")
4487 (match_operand:SI 2 "reg_or_int_operand")))]
4488 "TARGET_32BIT"
4489 "
4490 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN)
4491 {
4492 if (!reg_or_int_operand (operands[2], SImode))
4493 operands[2] = force_reg (SImode, operands[2]);
4494
4495 /* Armv8.1-M Mainline double shifts are not expanded. */
4496 if (arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2]))
4497 && (REG_P (operands[2]) || INTVAL(operands[2]) != 32))
4498 {
4499 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4500 emit_insn (gen_movdi (operands[0], operands[1]));
4501
4502 emit_insn (gen_thumb2_lsll (operands[0], operands[2]));
4503 DONE;
4504 }
4505 }
4506
4507 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4508 operands[2], gen_reg_rtx (SImode),
4509 gen_reg_rtx (SImode));
4510 DONE;
4511 ")
4512
4513 (define_expand "ashlsi3"
4514 [(set (match_operand:SI 0 "s_register_operand")
4515 (ashift:SI (match_operand:SI 1 "s_register_operand")
4516 (match_operand:SI 2 "arm_rhs_operand")))]
4517 "TARGET_EITHER"
4518 "
4519 if (CONST_INT_P (operands[2])
4520 && (UINTVAL (operands[2])) > 31)
4521 {
4522 emit_insn (gen_movsi (operands[0], const0_rtx));
4523 DONE;
4524 }
4525 "
4526 )
4527
4528 (define_expand "ashrdi3"
4529 [(set (match_operand:DI 0 "s_register_operand")
4530 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4531 (match_operand:SI 2 "reg_or_int_operand")))]
4532 "TARGET_32BIT"
4533 "
4534 /* Armv8.1-M Mainline double shifts are not expanded. */
4535 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4536 && arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2])))
4537 {
4538 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4539 emit_insn (gen_movdi (operands[0], operands[1]));
4540
4541 emit_insn (gen_thumb2_asrl (operands[0], operands[2]));
4542 DONE;
4543 }
4544
4545 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4546 operands[2], gen_reg_rtx (SImode),
4547 gen_reg_rtx (SImode));
4548 DONE;
4549 ")
4550
4551 (define_expand "ashrsi3"
4552 [(set (match_operand:SI 0 "s_register_operand")
4553 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4554 (match_operand:SI 2 "arm_rhs_operand")))]
4555 "TARGET_EITHER"
4556 "
4557 if (CONST_INT_P (operands[2])
4558 && UINTVAL (operands[2]) > 31)
4559 operands[2] = GEN_INT (31);
4560 "
4561 )
4562
4563 (define_expand "lshrdi3"
4564 [(set (match_operand:DI 0 "s_register_operand")
4565 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4566 (match_operand:SI 2 "reg_or_int_operand")))]
4567 "TARGET_32BIT"
4568 "
4569 /* Armv8.1-M Mainline double shifts are not expanded. */
4570 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4571 && long_shift_imm (operands[2], GET_MODE (operands[2])))
4572 {
4573 if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4574 emit_insn (gen_movdi (operands[0], operands[1]));
4575
4576 emit_insn (gen_thumb2_lsrl (operands[0], operands[2]));
4577 DONE;
4578 }
4579
4580 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4581 operands[2], gen_reg_rtx (SImode),
4582 gen_reg_rtx (SImode));
4583 DONE;
4584 ")
4585
4586 (define_expand "lshrsi3"
4587 [(set (match_operand:SI 0 "s_register_operand")
4588 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4589 (match_operand:SI 2 "arm_rhs_operand")))]
4590 "TARGET_EITHER"
4591 "
4592 if (CONST_INT_P (operands[2])
4593 && (UINTVAL (operands[2])) > 31)
4594 {
4595 emit_insn (gen_movsi (operands[0], const0_rtx));
4596 DONE;
4597 }
4598 "
4599 )
4600
4601 (define_expand "rotlsi3"
4602 [(set (match_operand:SI 0 "s_register_operand")
4603 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4604 (match_operand:SI 2 "reg_or_int_operand")))]
4605 "TARGET_32BIT"
4606 "
4607 if (CONST_INT_P (operands[2]))
4608 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4609 else
4610 {
4611 rtx reg = gen_reg_rtx (SImode);
4612 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4613 operands[2] = reg;
4614 }
4615 "
4616 )
4617
4618 (define_expand "rotrsi3"
4619 [(set (match_operand:SI 0 "s_register_operand")
4620 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4621 (match_operand:SI 2 "arm_rhs_operand")))]
4622 "TARGET_EITHER"
4623 "
4624 if (TARGET_32BIT)
4625 {
4626 if (CONST_INT_P (operands[2])
4627 && UINTVAL (operands[2]) > 31)
4628 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4629 }
4630 else /* TARGET_THUMB1 */
4631 {
4632 if (CONST_INT_P (operands [2]))
4633 operands [2] = force_reg (SImode, operands[2]);
4634 }
4635 "
4636 )
4637
4638 (define_insn "*arm_shiftsi3"
4639 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4640 (match_operator:SI 3 "shift_operator"
4641 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4642 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4643 "TARGET_32BIT"
4644 "* return arm_output_shift(operands, 0);"
4645 [(set_attr "predicable" "yes")
4646 (set_attr "arch" "t2,t2,*,*")
4647 (set_attr "predicable_short_it" "yes,yes,no,no")
4648 (set_attr "length" "4")
4649 (set_attr "shift" "1")
4650 (set_attr "autodetect_type" "alu_shift_operator3")]
4651 )
4652
4653 (define_insn "*shiftsi3_compare0"
4654 [(set (reg:CC_NZ CC_REGNUM)
4655 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4656 [(match_operand:SI 1 "s_register_operand" "r,r")
4657 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4658 (const_int 0)))
4659 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4660 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4661 "TARGET_32BIT"
4662 "* return arm_output_shift(operands, 1);"
4663 [(set_attr "conds" "set")
4664 (set_attr "shift" "1")
4665 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4666 )
4667
4668 (define_insn "*shiftsi3_compare0_scratch"
4669 [(set (reg:CC_NZ CC_REGNUM)
4670 (compare:CC_NZ (match_operator:SI 3 "shift_operator"
4671 [(match_operand:SI 1 "s_register_operand" "r,r")
4672 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4673 (const_int 0)))
4674 (clobber (match_scratch:SI 0 "=r,r"))]
4675 "TARGET_32BIT"
4676 "* return arm_output_shift(operands, 1);"
4677 [(set_attr "conds" "set")
4678 (set_attr "shift" "1")
4679 (set_attr "type" "shift_imm,shift_reg")]
4680 )
4681
4682 (define_insn "*not_shiftsi"
4683 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4684 (not:SI (match_operator:SI 3 "shift_operator"
4685 [(match_operand:SI 1 "s_register_operand" "r,r")
4686 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4687 "TARGET_32BIT"
4688 "mvn%?\\t%0, %1%S3"
4689 [(set_attr "predicable" "yes")
4690 (set_attr "shift" "1")
4691 (set_attr "arch" "32,a")
4692 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4693
4694 (define_insn "*not_shiftsi_compare0"
4695 [(set (reg:CC_NZ CC_REGNUM)
4696 (compare:CC_NZ
4697 (not:SI (match_operator:SI 3 "shift_operator"
4698 [(match_operand:SI 1 "s_register_operand" "r,r")
4699 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4700 (const_int 0)))
4701 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4702 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4703 "TARGET_32BIT"
4704 "mvns%?\\t%0, %1%S3"
4705 [(set_attr "conds" "set")
4706 (set_attr "shift" "1")
4707 (set_attr "arch" "32,a")
4708 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4709
4710 (define_insn "*not_shiftsi_compare0_scratch"
4711 [(set (reg:CC_NZ CC_REGNUM)
4712 (compare:CC_NZ
4713 (not:SI (match_operator:SI 3 "shift_operator"
4714 [(match_operand:SI 1 "s_register_operand" "r,r")
4715 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4716 (const_int 0)))
4717 (clobber (match_scratch:SI 0 "=r,r"))]
4718 "TARGET_32BIT"
4719 "mvns%?\\t%0, %1%S3"
4720 [(set_attr "conds" "set")
4721 (set_attr "shift" "1")
4722 (set_attr "arch" "32,a")
4723 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4724
4725 ;; We don't really have extzv, but defining this using shifts helps
4726 ;; to reduce register pressure later on.
4727
4728 (define_expand "extzv"
4729 [(set (match_operand 0 "s_register_operand")
4730 (zero_extract (match_operand 1 "nonimmediate_operand")
4731 (match_operand 2 "const_int_operand")
4732 (match_operand 3 "const_int_operand")))]
4733 "TARGET_THUMB1 || arm_arch_thumb2"
4734 "
4735 {
4736 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4737 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4738
4739 if (arm_arch_thumb2)
4740 {
4741 HOST_WIDE_INT width = INTVAL (operands[2]);
4742 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4743
4744 if (unaligned_access && MEM_P (operands[1])
4745 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4746 {
4747 rtx base_addr;
4748
4749 if (BYTES_BIG_ENDIAN)
4750 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4751 - bitpos;
4752
4753 if (width == 32)
4754 {
4755 base_addr = adjust_address (operands[1], SImode,
4756 bitpos / BITS_PER_UNIT);
4757 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4758 }
4759 else
4760 {
4761 rtx dest = operands[0];
4762 rtx tmp = gen_reg_rtx (SImode);
4763
4764 /* We may get a paradoxical subreg here. Strip it off. */
4765 if (GET_CODE (dest) == SUBREG
4766 && GET_MODE (dest) == SImode
4767 && GET_MODE (SUBREG_REG (dest)) == HImode)
4768 dest = SUBREG_REG (dest);
4769
4770 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4771 FAIL;
4772
4773 base_addr = adjust_address (operands[1], HImode,
4774 bitpos / BITS_PER_UNIT);
4775 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4776 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4777 }
4778 DONE;
4779 }
4780 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4781 {
4782 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4783 operands[3]));
4784 DONE;
4785 }
4786 else
4787 FAIL;
4788 }
4789
4790 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4791 FAIL;
4792
4793 operands[3] = GEN_INT (rshift);
4794
4795 if (lshift == 0)
4796 {
4797 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4798 DONE;
4799 }
4800
4801 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4802 operands[3], gen_reg_rtx (SImode)));
4803 DONE;
4804 }"
4805 )
4806
4807 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4808
4809 (define_expand "extzv_t1"
4810 [(set (match_operand:SI 4 "s_register_operand")
4811 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4812 (match_operand:SI 2 "const_int_operand")))
4813 (set (match_operand:SI 0 "s_register_operand")
4814 (lshiftrt:SI (match_dup 4)
4815 (match_operand:SI 3 "const_int_operand")))]
4816 "TARGET_THUMB1"
4817 "")
4818
4819 (define_expand "extv"
4820 [(set (match_operand 0 "s_register_operand")
4821 (sign_extract (match_operand 1 "nonimmediate_operand")
4822 (match_operand 2 "const_int_operand")
4823 (match_operand 3 "const_int_operand")))]
4824 "arm_arch_thumb2"
4825 {
4826 HOST_WIDE_INT width = INTVAL (operands[2]);
4827 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4828
4829 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4830 && (bitpos % BITS_PER_UNIT) == 0)
4831 {
4832 rtx base_addr;
4833
4834 if (BYTES_BIG_ENDIAN)
4835 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4836
4837 if (width == 32)
4838 {
4839 base_addr = adjust_address (operands[1], SImode,
4840 bitpos / BITS_PER_UNIT);
4841 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4842 }
4843 else
4844 {
4845 rtx dest = operands[0];
4846 rtx tmp = gen_reg_rtx (SImode);
4847
4848 /* We may get a paradoxical subreg here. Strip it off. */
4849 if (GET_CODE (dest) == SUBREG
4850 && GET_MODE (dest) == SImode
4851 && GET_MODE (SUBREG_REG (dest)) == HImode)
4852 dest = SUBREG_REG (dest);
4853
4854 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4855 FAIL;
4856
4857 base_addr = adjust_address (operands[1], HImode,
4858 bitpos / BITS_PER_UNIT);
4859 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4860 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4861 }
4862
4863 DONE;
4864 }
4865 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4866 FAIL;
4867 else if (GET_MODE (operands[0]) == SImode
4868 && GET_MODE (operands[1]) == SImode)
4869 {
4870 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4871 operands[3]));
4872 DONE;
4873 }
4874
4875 FAIL;
4876 })
4877
4878 ; Helper to expand register forms of extv with the proper modes.
4879
4880 (define_expand "extv_regsi"
4881 [(set (match_operand:SI 0 "s_register_operand")
4882 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4883 (match_operand 2 "const_int_operand")
4884 (match_operand 3 "const_int_operand")))]
4885 ""
4886 {
4887 })
4888
4889 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4890
4891 (define_insn "unaligned_loaddi"
4892 [(set (match_operand:DI 0 "s_register_operand" "=r")
4893 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4894 UNSPEC_UNALIGNED_LOAD))]
4895 "TARGET_32BIT && TARGET_LDRD"
4896 "*
4897 return output_move_double (operands, true, NULL);
4898 "
4899 [(set_attr "length" "8")
4900 (set_attr "type" "load_8")])
4901
4902 (define_insn "unaligned_loadsi"
4903 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4904 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4905 UNSPEC_UNALIGNED_LOAD))]
4906 "unaligned_access"
4907 "@
4908 ldr\t%0, %1\t@ unaligned
4909 ldr%?\t%0, %1\t@ unaligned
4910 ldr%?\t%0, %1\t@ unaligned"
4911 [(set_attr "arch" "t1,t2,32")
4912 (set_attr "length" "2,2,4")
4913 (set_attr "predicable" "no,yes,yes")
4914 (set_attr "predicable_short_it" "no,yes,no")
4915 (set_attr "type" "load_4")])
4916
4917 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4918 ;; address (there's no immediate format). That's tricky to support
4919 ;; here and we don't really need this pattern for that case, so only
4920 ;; enable for 32-bit ISAs.
4921 (define_insn "unaligned_loadhis"
4922 [(set (match_operand:SI 0 "s_register_operand" "=r")
4923 (sign_extend:SI
4924 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4925 UNSPEC_UNALIGNED_LOAD)))]
4926 "unaligned_access && TARGET_32BIT"
4927 "ldrsh%?\t%0, %1\t@ unaligned"
4928 [(set_attr "predicable" "yes")
4929 (set_attr "type" "load_byte")])
4930
4931 (define_insn "unaligned_loadhiu"
4932 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4933 (zero_extend:SI
4934 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4935 UNSPEC_UNALIGNED_LOAD)))]
4936 "unaligned_access"
4937 "@
4938 ldrh\t%0, %1\t@ unaligned
4939 ldrh%?\t%0, %1\t@ unaligned
4940 ldrh%?\t%0, %1\t@ unaligned"
4941 [(set_attr "arch" "t1,t2,32")
4942 (set_attr "length" "2,2,4")
4943 (set_attr "predicable" "no,yes,yes")
4944 (set_attr "predicable_short_it" "no,yes,no")
4945 (set_attr "type" "load_byte")])
4946
4947 (define_insn "unaligned_storedi"
4948 [(set (match_operand:DI 0 "memory_operand" "=m")
4949 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4950 UNSPEC_UNALIGNED_STORE))]
4951 "TARGET_32BIT && TARGET_LDRD"
4952 "*
4953 return output_move_double (operands, true, NULL);
4954 "
4955 [(set_attr "length" "8")
4956 (set_attr "type" "store_8")])
4957
4958 (define_insn "unaligned_storesi"
4959 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4960 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4961 UNSPEC_UNALIGNED_STORE))]
4962 "unaligned_access"
4963 "@
4964 str\t%1, %0\t@ unaligned
4965 str%?\t%1, %0\t@ unaligned
4966 str%?\t%1, %0\t@ unaligned"
4967 [(set_attr "arch" "t1,t2,32")
4968 (set_attr "length" "2,2,4")
4969 (set_attr "predicable" "no,yes,yes")
4970 (set_attr "predicable_short_it" "no,yes,no")
4971 (set_attr "type" "store_4")])
4972
4973 (define_insn "unaligned_storehi"
4974 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4975 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4976 UNSPEC_UNALIGNED_STORE))]
4977 "unaligned_access"
4978 "@
4979 strh\t%1, %0\t@ unaligned
4980 strh%?\t%1, %0\t@ unaligned
4981 strh%?\t%1, %0\t@ unaligned"
4982 [(set_attr "arch" "t1,t2,32")
4983 (set_attr "length" "2,2,4")
4984 (set_attr "predicable" "no,yes,yes")
4985 (set_attr "predicable_short_it" "no,yes,no")
4986 (set_attr "type" "store_4")])
4987
4988
4989 (define_insn "*extv_reg"
4990 [(set (match_operand:SI 0 "s_register_operand" "=r")
4991 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4992 (match_operand:SI 2 "const_int_operand" "n")
4993 (match_operand:SI 3 "const_int_operand" "n")))]
4994 "arm_arch_thumb2
4995 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4996 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4997 "sbfx%?\t%0, %1, %3, %2"
4998 [(set_attr "length" "4")
4999 (set_attr "predicable" "yes")
5000 (set_attr "type" "bfm")]
5001 )
5002
5003 (define_insn "extzv_t2"
5004 [(set (match_operand:SI 0 "s_register_operand" "=r")
5005 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5006 (match_operand:SI 2 "const_int_operand" "n")
5007 (match_operand:SI 3 "const_int_operand" "n")))]
5008 "arm_arch_thumb2
5009 && IN_RANGE (INTVAL (operands[3]), 0, 31)
5010 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5011 "ubfx%?\t%0, %1, %3, %2"
5012 [(set_attr "length" "4")
5013 (set_attr "predicable" "yes")
5014 (set_attr "type" "bfm")]
5015 )
5016
5017
5018 ;; Division instructions
5019 (define_insn "divsi3"
5020 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5021 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
5022 (match_operand:SI 2 "s_register_operand" "r,r")))]
5023 "TARGET_IDIV"
5024 "@
5025 sdiv%?\t%0, %1, %2
5026 sdiv\t%0, %1, %2"
5027 [(set_attr "arch" "32,v8mb")
5028 (set_attr "predicable" "yes")
5029 (set_attr "type" "sdiv")]
5030 )
5031
5032 (define_insn "udivsi3"
5033 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5034 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
5035 (match_operand:SI 2 "s_register_operand" "r,r")))]
5036 "TARGET_IDIV"
5037 "@
5038 udiv%?\t%0, %1, %2
5039 udiv\t%0, %1, %2"
5040 [(set_attr "arch" "32,v8mb")
5041 (set_attr "predicable" "yes")
5042 (set_attr "type" "udiv")]
5043 )
5044
5045 \f
5046 ;; Unary arithmetic insns
5047
5048 (define_expand "negv<SIDI:mode>3"
5049 [(match_operand:SIDI 0 "s_register_operand")
5050 (match_operand:SIDI 1 "s_register_operand")
5051 (match_operand 2 "")]
5052 "TARGET_32BIT"
5053 {
5054 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
5055 operands[2]));
5056 DONE;
5057 })
5058
5059 (define_expand "negsi2"
5060 [(set (match_operand:SI 0 "s_register_operand")
5061 (neg:SI (match_operand:SI 1 "s_register_operand")))]
5062 "TARGET_EITHER"
5063 ""
5064 )
5065
5066 (define_insn "*arm_negsi2"
5067 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5068 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5069 "TARGET_32BIT"
5070 "rsb%?\\t%0, %1, #0"
5071 [(set_attr "predicable" "yes")
5072 (set_attr "predicable_short_it" "yes,no")
5073 (set_attr "arch" "t2,*")
5074 (set_attr "length" "4")
5075 (set_attr "type" "alu_imm")]
5076 )
5077
5078 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
5079 ;; rather than (0 cmp reg). This gives the same results for unsigned
5080 ;; and equality compares which is what we mostly need here.
5081 (define_insn "negsi2_0compare"
5082 [(set (reg:CC_RSB CC_REGNUM)
5083 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
5084 (const_int -1)))
5085 (set (match_operand:SI 0 "s_register_operand" "=l,r")
5086 (neg:SI (match_dup 1)))]
5087 "TARGET_32BIT"
5088 "@
5089 negs\\t%0, %1
5090 rsbs\\t%0, %1, #0"
5091 [(set_attr "conds" "set")
5092 (set_attr "arch" "t2,*")
5093 (set_attr "length" "2,*")
5094 (set_attr "type" "alus_imm")]
5095 )
5096
5097 (define_insn "negsi2_carryin"
5098 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5099 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
5100 (match_operand:SI 2 "arm_borrow_operation" "")))]
5101 "TARGET_32BIT"
5102 "@
5103 rsc\\t%0, %1, #0
5104 sbc\\t%0, %1, %1, lsl #1"
5105 [(set_attr "conds" "use")
5106 (set_attr "arch" "a,t2")
5107 (set_attr "type" "adc_imm,adc_reg")]
5108 )
5109
5110 (define_expand "negsf2"
5111 [(set (match_operand:SF 0 "s_register_operand")
5112 (neg:SF (match_operand:SF 1 "s_register_operand")))]
5113 "TARGET_32BIT && TARGET_HARD_FLOAT"
5114 ""
5115 )
5116
5117 (define_expand "negdf2"
5118 [(set (match_operand:DF 0 "s_register_operand")
5119 (neg:DF (match_operand:DF 1 "s_register_operand")))]
5120 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5121 "")
5122
5123 ;; abssi2 doesn't really clobber the condition codes if a different register
5124 ;; is being set. To keep things simple, assume during rtl manipulations that
5125 ;; it does, but tell the final scan operator the truth. Similarly for
5126 ;; (neg (abs...))
5127
5128 (define_expand "abssi2"
5129 [(parallel
5130 [(set (match_operand:SI 0 "s_register_operand")
5131 (abs:SI (match_operand:SI 1 "s_register_operand")))
5132 (clobber (match_dup 2))])]
5133 "TARGET_EITHER"
5134 "
5135 if (TARGET_THUMB1)
5136 operands[2] = gen_rtx_SCRATCH (SImode);
5137 else
5138 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5139 ")
5140
5141 (define_insn_and_split "*arm_abssi2"
5142 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5143 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5144 (clobber (reg:CC CC_REGNUM))]
5145 "TARGET_ARM"
5146 "#"
5147 "&& reload_completed"
5148 [(const_int 0)]
5149 {
5150 /* if (which_alternative == 0) */
5151 if (REGNO(operands[0]) == REGNO(operands[1]))
5152 {
5153 /* Emit the pattern:
5154 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5155 [(set (reg:CC CC_REGNUM)
5156 (compare:CC (match_dup 0) (const_int 0)))
5157 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5158 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5159 */
5160 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5161 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5162 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5163 (gen_rtx_LT (SImode,
5164 gen_rtx_REG (CCmode, CC_REGNUM),
5165 const0_rtx)),
5166 (gen_rtx_SET (operands[0],
5167 (gen_rtx_MINUS (SImode,
5168 const0_rtx,
5169 operands[1]))))));
5170 DONE;
5171 }
5172 else
5173 {
5174 /* Emit the pattern:
5175 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5176 [(set (match_dup 0)
5177 (xor:SI (match_dup 1)
5178 (ashiftrt:SI (match_dup 1) (const_int 31))))
5179 (set (match_dup 0)
5180 (minus:SI (match_dup 0)
5181 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5182 */
5183 emit_insn (gen_rtx_SET (operands[0],
5184 gen_rtx_XOR (SImode,
5185 gen_rtx_ASHIFTRT (SImode,
5186 operands[1],
5187 GEN_INT (31)),
5188 operands[1])));
5189 emit_insn (gen_rtx_SET (operands[0],
5190 gen_rtx_MINUS (SImode,
5191 operands[0],
5192 gen_rtx_ASHIFTRT (SImode,
5193 operands[1],
5194 GEN_INT (31)))));
5195 DONE;
5196 }
5197 }
5198 [(set_attr "conds" "clob,*")
5199 (set_attr "shift" "1")
5200 (set_attr "predicable" "no, yes")
5201 (set_attr "length" "8")
5202 (set_attr "type" "multiple")]
5203 )
5204
5205 (define_insn_and_split "*arm_neg_abssi2"
5206 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5207 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5208 (clobber (reg:CC CC_REGNUM))]
5209 "TARGET_ARM"
5210 "#"
5211 "&& reload_completed"
5212 [(const_int 0)]
5213 {
5214 /* if (which_alternative == 0) */
5215 if (REGNO (operands[0]) == REGNO (operands[1]))
5216 {
5217 /* Emit the pattern:
5218 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5219 */
5220 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5221 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5222 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5223 gen_rtx_GT (SImode,
5224 gen_rtx_REG (CCmode, CC_REGNUM),
5225 const0_rtx),
5226 gen_rtx_SET (operands[0],
5227 (gen_rtx_MINUS (SImode,
5228 const0_rtx,
5229 operands[1])))));
5230 }
5231 else
5232 {
5233 /* Emit the pattern:
5234 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5235 */
5236 emit_insn (gen_rtx_SET (operands[0],
5237 gen_rtx_XOR (SImode,
5238 gen_rtx_ASHIFTRT (SImode,
5239 operands[1],
5240 GEN_INT (31)),
5241 operands[1])));
5242 emit_insn (gen_rtx_SET (operands[0],
5243 gen_rtx_MINUS (SImode,
5244 gen_rtx_ASHIFTRT (SImode,
5245 operands[1],
5246 GEN_INT (31)),
5247 operands[0])));
5248 }
5249 DONE;
5250 }
5251 [(set_attr "conds" "clob,*")
5252 (set_attr "shift" "1")
5253 (set_attr "predicable" "no, yes")
5254 (set_attr "length" "8")
5255 (set_attr "type" "multiple")]
5256 )
5257
5258 (define_expand "abssf2"
5259 [(set (match_operand:SF 0 "s_register_operand")
5260 (abs:SF (match_operand:SF 1 "s_register_operand")))]
5261 "TARGET_32BIT && TARGET_HARD_FLOAT"
5262 "")
5263
5264 (define_expand "absdf2"
5265 [(set (match_operand:DF 0 "s_register_operand")
5266 (abs:DF (match_operand:DF 1 "s_register_operand")))]
5267 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5268 "")
5269
5270 (define_expand "sqrtsf2"
5271 [(set (match_operand:SF 0 "s_register_operand")
5272 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
5273 "TARGET_32BIT && TARGET_HARD_FLOAT"
5274 "")
5275
5276 (define_expand "sqrtdf2"
5277 [(set (match_operand:DF 0 "s_register_operand")
5278 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
5279 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5280 "")
5281
5282 (define_expand "one_cmplsi2"
5283 [(set (match_operand:SI 0 "s_register_operand")
5284 (not:SI (match_operand:SI 1 "s_register_operand")))]
5285 "TARGET_EITHER"
5286 ""
5287 )
5288
5289 (define_insn "*arm_one_cmplsi2"
5290 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5291 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5292 "TARGET_32BIT"
5293 "mvn%?\\t%0, %1"
5294 [(set_attr "predicable" "yes")
5295 (set_attr "predicable_short_it" "yes,no")
5296 (set_attr "arch" "t2,*")
5297 (set_attr "length" "4")
5298 (set_attr "type" "mvn_reg")]
5299 )
5300
5301 (define_insn "*notsi_compare0"
5302 [(set (reg:CC_NZ CC_REGNUM)
5303 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5304 (const_int 0)))
5305 (set (match_operand:SI 0 "s_register_operand" "=r")
5306 (not:SI (match_dup 1)))]
5307 "TARGET_32BIT"
5308 "mvns%?\\t%0, %1"
5309 [(set_attr "conds" "set")
5310 (set_attr "type" "mvn_reg")]
5311 )
5312
5313 (define_insn "*notsi_compare0_scratch"
5314 [(set (reg:CC_NZ CC_REGNUM)
5315 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5316 (const_int 0)))
5317 (clobber (match_scratch:SI 0 "=r"))]
5318 "TARGET_32BIT"
5319 "mvns%?\\t%0, %1"
5320 [(set_attr "conds" "set")
5321 (set_attr "type" "mvn_reg")]
5322 )
5323 \f
5324 ;; Fixed <--> Floating conversion insns
5325
5326 (define_expand "floatsihf2"
5327 [(set (match_operand:HF 0 "general_operand")
5328 (float:HF (match_operand:SI 1 "general_operand")))]
5329 "TARGET_EITHER"
5330 "
5331 {
5332 rtx op1 = gen_reg_rtx (SFmode);
5333 expand_float (op1, operands[1], 0);
5334 op1 = convert_to_mode (HFmode, op1, 0);
5335 emit_move_insn (operands[0], op1);
5336 DONE;
5337 }"
5338 )
5339
5340 (define_expand "floatdihf2"
5341 [(set (match_operand:HF 0 "general_operand")
5342 (float:HF (match_operand:DI 1 "general_operand")))]
5343 "TARGET_EITHER"
5344 "
5345 {
5346 rtx op1 = gen_reg_rtx (SFmode);
5347 expand_float (op1, operands[1], 0);
5348 op1 = convert_to_mode (HFmode, op1, 0);
5349 emit_move_insn (operands[0], op1);
5350 DONE;
5351 }"
5352 )
5353
5354 (define_expand "floatsisf2"
5355 [(set (match_operand:SF 0 "s_register_operand")
5356 (float:SF (match_operand:SI 1 "s_register_operand")))]
5357 "TARGET_32BIT && TARGET_HARD_FLOAT"
5358 "
5359 ")
5360
5361 (define_expand "floatsidf2"
5362 [(set (match_operand:DF 0 "s_register_operand")
5363 (float:DF (match_operand:SI 1 "s_register_operand")))]
5364 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5365 "
5366 ")
5367
5368 (define_expand "fix_trunchfsi2"
5369 [(set (match_operand:SI 0 "general_operand")
5370 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
5371 "TARGET_EITHER"
5372 "
5373 {
5374 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5375 expand_fix (operands[0], op1, 0);
5376 DONE;
5377 }"
5378 )
5379
5380 (define_expand "fix_trunchfdi2"
5381 [(set (match_operand:DI 0 "general_operand")
5382 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
5383 "TARGET_EITHER"
5384 "
5385 {
5386 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5387 expand_fix (operands[0], op1, 0);
5388 DONE;
5389 }"
5390 )
5391
5392 (define_expand "fix_truncsfsi2"
5393 [(set (match_operand:SI 0 "s_register_operand")
5394 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
5395 "TARGET_32BIT && TARGET_HARD_FLOAT"
5396 "
5397 ")
5398
5399 (define_expand "fix_truncdfsi2"
5400 [(set (match_operand:SI 0 "s_register_operand")
5401 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
5402 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5403 "
5404 ")
5405
5406 ;; Truncation insns
5407
5408 (define_expand "truncdfsf2"
5409 [(set (match_operand:SF 0 "s_register_operand")
5410 (float_truncate:SF
5411 (match_operand:DF 1 "s_register_operand")))]
5412 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5413 ""
5414 )
5415
5416 ;; DFmode to HFmode conversions on targets without a single-step hardware
5417 ;; instruction for it would have to go through SFmode. This is dangerous
5418 ;; as it introduces double rounding.
5419 ;;
5420 ;; Disable this pattern unless we are in an unsafe math mode, or we have
5421 ;; a single-step instruction.
5422
5423 (define_expand "truncdfhf2"
5424 [(set (match_operand:HF 0 "s_register_operand")
5425 (float_truncate:HF
5426 (match_operand:DF 1 "s_register_operand")))]
5427 "(TARGET_EITHER && flag_unsafe_math_optimizations)
5428 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
5429 {
5430 /* We don't have a direct instruction for this, so we must be in
5431 an unsafe math mode, and going via SFmode. */
5432
5433 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5434 {
5435 rtx op1;
5436 op1 = convert_to_mode (SFmode, operands[1], 0);
5437 op1 = convert_to_mode (HFmode, op1, 0);
5438 emit_move_insn (operands[0], op1);
5439 DONE;
5440 }
5441 /* Otherwise, we will pick this up as a single instruction with
5442 no intermediary rounding. */
5443 }
5444 )
5445 \f
5446 ;; Zero and sign extension instructions.
5447
5448 (define_expand "zero_extend<mode>di2"
5449 [(set (match_operand:DI 0 "s_register_operand" "")
5450 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5451 "TARGET_32BIT <qhs_zextenddi_cond>"
5452 {
5453 rtx res_lo, res_hi, op0_lo, op0_hi;
5454 res_lo = gen_lowpart (SImode, operands[0]);
5455 res_hi = gen_highpart (SImode, operands[0]);
5456 if (can_create_pseudo_p ())
5457 {
5458 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5459 op0_hi = gen_reg_rtx (SImode);
5460 }
5461 else
5462 {
5463 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5464 op0_hi = res_hi;
5465 }
5466 if (<MODE>mode != SImode)
5467 emit_insn (gen_rtx_SET (op0_lo,
5468 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5469 emit_insn (gen_movsi (op0_hi, const0_rtx));
5470 if (res_lo != op0_lo)
5471 emit_move_insn (res_lo, op0_lo);
5472 if (res_hi != op0_hi)
5473 emit_move_insn (res_hi, op0_hi);
5474 DONE;
5475 }
5476 )
5477
5478 (define_expand "extend<mode>di2"
5479 [(set (match_operand:DI 0 "s_register_operand" "")
5480 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5481 "TARGET_32BIT <qhs_sextenddi_cond>"
5482 {
5483 rtx res_lo, res_hi, op0_lo, op0_hi;
5484 res_lo = gen_lowpart (SImode, operands[0]);
5485 res_hi = gen_highpart (SImode, operands[0]);
5486 if (can_create_pseudo_p ())
5487 {
5488 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5489 op0_hi = gen_reg_rtx (SImode);
5490 }
5491 else
5492 {
5493 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5494 op0_hi = res_hi;
5495 }
5496 if (<MODE>mode != SImode)
5497 emit_insn (gen_rtx_SET (op0_lo,
5498 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5499 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5500 if (res_lo != op0_lo)
5501 emit_move_insn (res_lo, op0_lo);
5502 if (res_hi != op0_hi)
5503 emit_move_insn (res_hi, op0_hi);
5504 DONE;
5505 }
5506 )
5507
5508 ;; Splits for all extensions to DImode
5509 (define_split
5510 [(set (match_operand:DI 0 "s_register_operand" "")
5511 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5512 "TARGET_32BIT"
5513 [(set (match_dup 0) (match_dup 1))]
5514 {
5515 rtx lo_part = gen_lowpart (SImode, operands[0]);
5516 machine_mode src_mode = GET_MODE (operands[1]);
5517
5518 if (src_mode == SImode)
5519 emit_move_insn (lo_part, operands[1]);
5520 else
5521 emit_insn (gen_rtx_SET (lo_part,
5522 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5523 operands[0] = gen_highpart (SImode, operands[0]);
5524 operands[1] = const0_rtx;
5525 })
5526
5527 (define_split
5528 [(set (match_operand:DI 0 "s_register_operand" "")
5529 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5530 "TARGET_32BIT"
5531 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5532 {
5533 rtx lo_part = gen_lowpart (SImode, operands[0]);
5534 machine_mode src_mode = GET_MODE (operands[1]);
5535
5536 if (src_mode == SImode)
5537 emit_move_insn (lo_part, operands[1]);
5538 else
5539 emit_insn (gen_rtx_SET (lo_part,
5540 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5541 operands[1] = lo_part;
5542 operands[0] = gen_highpart (SImode, operands[0]);
5543 })
5544
5545 (define_expand "zero_extendhisi2"
5546 [(set (match_operand:SI 0 "s_register_operand")
5547 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5548 "TARGET_EITHER"
5549 {
5550 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5551 {
5552 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5553 DONE;
5554 }
5555 if (!arm_arch6 && !MEM_P (operands[1]))
5556 {
5557 rtx t = gen_lowpart (SImode, operands[1]);
5558 rtx tmp = gen_reg_rtx (SImode);
5559 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5560 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5561 DONE;
5562 }
5563 })
5564
5565 (define_split
5566 [(set (match_operand:SI 0 "s_register_operand" "")
5567 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5568 "!TARGET_THUMB2 && !arm_arch6"
5569 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5570 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5571 {
5572 operands[2] = gen_lowpart (SImode, operands[1]);
5573 })
5574
5575 (define_insn "*arm_zero_extendhisi2"
5576 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5577 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5578 "TARGET_ARM && arm_arch4 && !arm_arch6"
5579 "@
5580 #
5581 ldrh%?\\t%0, %1"
5582 [(set_attr "type" "alu_shift_reg,load_byte")
5583 (set_attr "predicable" "yes")]
5584 )
5585
5586 (define_insn "*arm_zero_extendhisi2_v6"
5587 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5588 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5589 "TARGET_ARM && arm_arch6"
5590 "@
5591 uxth%?\\t%0, %1
5592 ldrh%?\\t%0, %1"
5593 [(set_attr "predicable" "yes")
5594 (set_attr "type" "extend,load_byte")]
5595 )
5596
5597 (define_insn "*arm_zero_extendhisi2addsi"
5598 [(set (match_operand:SI 0 "s_register_operand" "=r")
5599 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5600 (match_operand:SI 2 "s_register_operand" "r")))]
5601 "TARGET_INT_SIMD"
5602 "uxtah%?\\t%0, %2, %1"
5603 [(set_attr "type" "alu_shift_reg")
5604 (set_attr "predicable" "yes")]
5605 )
5606
5607 (define_expand "zero_extendqisi2"
5608 [(set (match_operand:SI 0 "s_register_operand")
5609 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5610 "TARGET_EITHER"
5611 {
5612 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5613 {
5614 emit_insn (gen_andsi3 (operands[0],
5615 gen_lowpart (SImode, operands[1]),
5616 GEN_INT (255)));
5617 DONE;
5618 }
5619 if (!arm_arch6 && !MEM_P (operands[1]))
5620 {
5621 rtx t = gen_lowpart (SImode, operands[1]);
5622 rtx tmp = gen_reg_rtx (SImode);
5623 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5624 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5625 DONE;
5626 }
5627 })
5628
5629 (define_split
5630 [(set (match_operand:SI 0 "s_register_operand" "")
5631 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5632 "!arm_arch6"
5633 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5634 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5635 {
5636 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5637 if (TARGET_ARM)
5638 {
5639 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5640 DONE;
5641 }
5642 })
5643
5644 (define_insn "*arm_zero_extendqisi2"
5645 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5646 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5647 "TARGET_ARM && !arm_arch6"
5648 "@
5649 #
5650 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5651 [(set_attr "length" "8,4")
5652 (set_attr "type" "alu_shift_reg,load_byte")
5653 (set_attr "predicable" "yes")]
5654 )
5655
5656 (define_insn "*arm_zero_extendqisi2_v6"
5657 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5658 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5659 "TARGET_ARM && arm_arch6"
5660 "@
5661 uxtb%?\\t%0, %1
5662 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5663 [(set_attr "type" "extend,load_byte")
5664 (set_attr "predicable" "yes")]
5665 )
5666
5667 (define_insn "*arm_zero_extendqisi2addsi"
5668 [(set (match_operand:SI 0 "s_register_operand" "=r")
5669 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5670 (match_operand:SI 2 "s_register_operand" "r")))]
5671 "TARGET_INT_SIMD"
5672 "uxtab%?\\t%0, %2, %1"
5673 [(set_attr "predicable" "yes")
5674 (set_attr "type" "alu_shift_reg")]
5675 )
5676
5677 (define_split
5678 [(set (match_operand:SI 0 "s_register_operand" "")
5679 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5680 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5681 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5682 [(set (match_dup 2) (match_dup 1))
5683 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5684 ""
5685 )
5686
5687 (define_split
5688 [(set (match_operand:SI 0 "s_register_operand" "")
5689 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5690 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5691 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5692 [(set (match_dup 2) (match_dup 1))
5693 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5694 ""
5695 )
5696
5697
5698 (define_split
5699 [(set (match_operand:SI 0 "s_register_operand" "")
5700 (IOR_XOR:SI (and:SI (ashift:SI
5701 (match_operand:SI 1 "s_register_operand" "")
5702 (match_operand:SI 2 "const_int_operand" ""))
5703 (match_operand:SI 3 "const_int_operand" ""))
5704 (zero_extend:SI
5705 (match_operator 5 "subreg_lowpart_operator"
5706 [(match_operand:SI 4 "s_register_operand" "")]))))]
5707 "TARGET_32BIT
5708 && (UINTVAL (operands[3])
5709 == (GET_MODE_MASK (GET_MODE (operands[5]))
5710 & (GET_MODE_MASK (GET_MODE (operands[5]))
5711 << (INTVAL (operands[2])))))"
5712 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5713 (match_dup 4)))
5714 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5715 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5716 )
5717
5718 (define_insn "*compareqi_eq0"
5719 [(set (reg:CC_Z CC_REGNUM)
5720 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5721 (const_int 0)))]
5722 "TARGET_32BIT"
5723 "tst%?\\t%0, #255"
5724 [(set_attr "conds" "set")
5725 (set_attr "predicable" "yes")
5726 (set_attr "type" "logic_imm")]
5727 )
5728
5729 (define_expand "extendhisi2"
5730 [(set (match_operand:SI 0 "s_register_operand")
5731 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5732 "TARGET_EITHER"
5733 {
5734 if (TARGET_THUMB1)
5735 {
5736 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5737 DONE;
5738 }
5739 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5740 {
5741 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5742 DONE;
5743 }
5744
5745 if (!arm_arch6 && !MEM_P (operands[1]))
5746 {
5747 rtx t = gen_lowpart (SImode, operands[1]);
5748 rtx tmp = gen_reg_rtx (SImode);
5749 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5750 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5751 DONE;
5752 }
5753 })
5754
5755 (define_split
5756 [(parallel
5757 [(set (match_operand:SI 0 "register_operand" "")
5758 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5759 (clobber (match_scratch:SI 2 ""))])]
5760 "!arm_arch6"
5761 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5762 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5763 {
5764 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5765 })
5766
5767 ;; This pattern will only be used when ldsh is not available
5768 (define_expand "extendhisi2_mem"
5769 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5770 (set (match_dup 3)
5771 (zero_extend:SI (match_dup 7)))
5772 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5773 (set (match_operand:SI 0 "" "")
5774 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5775 "TARGET_ARM"
5776 "
5777 {
5778 rtx mem1, mem2;
5779 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5780
5781 mem1 = change_address (operands[1], QImode, addr);
5782 mem2 = change_address (operands[1], QImode,
5783 plus_constant (Pmode, addr, 1));
5784 operands[0] = gen_lowpart (SImode, operands[0]);
5785 operands[1] = mem1;
5786 operands[2] = gen_reg_rtx (SImode);
5787 operands[3] = gen_reg_rtx (SImode);
5788 operands[6] = gen_reg_rtx (SImode);
5789 operands[7] = mem2;
5790
5791 if (BYTES_BIG_ENDIAN)
5792 {
5793 operands[4] = operands[2];
5794 operands[5] = operands[3];
5795 }
5796 else
5797 {
5798 operands[4] = operands[3];
5799 operands[5] = operands[2];
5800 }
5801 }"
5802 )
5803
5804 (define_split
5805 [(set (match_operand:SI 0 "register_operand" "")
5806 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5807 "!arm_arch6"
5808 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5809 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5810 {
5811 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5812 })
5813
5814 (define_insn "*arm_extendhisi2"
5815 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5816 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5817 "TARGET_ARM && arm_arch4 && !arm_arch6"
5818 "@
5819 #
5820 ldrsh%?\\t%0, %1"
5821 [(set_attr "length" "8,4")
5822 (set_attr "type" "alu_shift_reg,load_byte")
5823 (set_attr "predicable" "yes")]
5824 )
5825
5826 ;; ??? Check Thumb-2 pool range
5827 (define_insn "*arm_extendhisi2_v6"
5828 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5829 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5830 "TARGET_32BIT && arm_arch6"
5831 "@
5832 sxth%?\\t%0, %1
5833 ldrsh%?\\t%0, %1"
5834 [(set_attr "type" "extend,load_byte")
5835 (set_attr "predicable" "yes")]
5836 )
5837
5838 (define_insn "*arm_extendhisi2addsi"
5839 [(set (match_operand:SI 0 "s_register_operand" "=r")
5840 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5841 (match_operand:SI 2 "s_register_operand" "r")))]
5842 "TARGET_INT_SIMD"
5843 "sxtah%?\\t%0, %2, %1"
5844 [(set_attr "type" "alu_shift_reg")]
5845 )
5846
5847 (define_expand "extendqihi2"
5848 [(set (match_dup 2)
5849 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5850 (const_int 24)))
5851 (set (match_operand:HI 0 "s_register_operand")
5852 (ashiftrt:SI (match_dup 2)
5853 (const_int 24)))]
5854 "TARGET_ARM"
5855 "
5856 {
5857 if (arm_arch4 && MEM_P (operands[1]))
5858 {
5859 emit_insn (gen_rtx_SET (operands[0],
5860 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5861 DONE;
5862 }
5863 if (!s_register_operand (operands[1], QImode))
5864 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5865 operands[0] = gen_lowpart (SImode, operands[0]);
5866 operands[1] = gen_lowpart (SImode, operands[1]);
5867 operands[2] = gen_reg_rtx (SImode);
5868 }"
5869 )
5870
5871 (define_insn "*arm_extendqihi_insn"
5872 [(set (match_operand:HI 0 "s_register_operand" "=r")
5873 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5874 "TARGET_ARM && arm_arch4"
5875 "ldrsb%?\\t%0, %1"
5876 [(set_attr "type" "load_byte")
5877 (set_attr "predicable" "yes")]
5878 )
5879
5880 (define_expand "extendqisi2"
5881 [(set (match_operand:SI 0 "s_register_operand")
5882 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5883 "TARGET_EITHER"
5884 {
5885 if (!arm_arch4 && MEM_P (operands[1]))
5886 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5887
5888 if (!arm_arch6 && !MEM_P (operands[1]))
5889 {
5890 rtx t = gen_lowpart (SImode, operands[1]);
5891 rtx tmp = gen_reg_rtx (SImode);
5892 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5893 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5894 DONE;
5895 }
5896 })
5897
5898 (define_split
5899 [(set (match_operand:SI 0 "register_operand" "")
5900 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5901 "!arm_arch6"
5902 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5903 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5904 {
5905 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5906 })
5907
5908 (define_insn "*arm_extendqisi"
5909 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5910 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5911 "TARGET_ARM && arm_arch4 && !arm_arch6"
5912 "@
5913 #
5914 ldrsb%?\\t%0, %1"
5915 [(set_attr "length" "8,4")
5916 (set_attr "type" "alu_shift_reg,load_byte")
5917 (set_attr "predicable" "yes")]
5918 )
5919
5920 (define_insn "*arm_extendqisi_v6"
5921 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5922 (sign_extend:SI
5923 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5924 "TARGET_ARM && arm_arch6"
5925 "@
5926 sxtb%?\\t%0, %1
5927 ldrsb%?\\t%0, %1"
5928 [(set_attr "type" "extend,load_byte")
5929 (set_attr "predicable" "yes")]
5930 )
5931
5932 (define_insn "*arm_extendqisi2addsi"
5933 [(set (match_operand:SI 0 "s_register_operand" "=r")
5934 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5935 (match_operand:SI 2 "s_register_operand" "r")))]
5936 "TARGET_INT_SIMD"
5937 "sxtab%?\\t%0, %2, %1"
5938 [(set_attr "type" "alu_shift_reg")
5939 (set_attr "predicable" "yes")]
5940 )
5941
5942 (define_insn "arm_<sup>xtb16"
5943 [(set (match_operand:SI 0 "s_register_operand" "=r")
5944 (unspec:SI
5945 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5946 "TARGET_INT_SIMD"
5947 "<sup>xtb16%?\\t%0, %1"
5948 [(set_attr "predicable" "yes")
5949 (set_attr "type" "alu_dsp_reg")])
5950
5951 (define_insn "arm_<simd32_op>"
5952 [(set (match_operand:SI 0 "s_register_operand" "=r")
5953 (unspec:SI
5954 [(match_operand:SI 1 "s_register_operand" "r")
5955 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5956 "TARGET_INT_SIMD"
5957 "<simd32_op>%?\\t%0, %1, %2"
5958 [(set_attr "predicable" "yes")
5959 (set_attr "type" "alu_dsp_reg")])
5960
5961 (define_insn "arm_usada8"
5962 [(set (match_operand:SI 0 "s_register_operand" "=r")
5963 (unspec:SI
5964 [(match_operand:SI 1 "s_register_operand" "r")
5965 (match_operand:SI 2 "s_register_operand" "r")
5966 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5967 "TARGET_INT_SIMD"
5968 "usada8%?\\t%0, %1, %2, %3"
5969 [(set_attr "predicable" "yes")
5970 (set_attr "type" "alu_dsp_reg")])
5971
5972 (define_insn "arm_<simd32_op>"
5973 [(set (match_operand:DI 0 "s_register_operand" "=r")
5974 (unspec:DI
5975 [(match_operand:SI 1 "s_register_operand" "r")
5976 (match_operand:SI 2 "s_register_operand" "r")
5977 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5978 "TARGET_INT_SIMD"
5979 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5980 [(set_attr "predicable" "yes")
5981 (set_attr "type" "smlald")])
5982
5983 (define_insn "arm_<simd32_op>"
5984 [(set (match_operand:SI 0 "s_register_operand" "=r")
5985 (unspec:SI
5986 [(match_operand:SI 1 "s_register_operand" "r")
5987 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
5988 (set (reg:CC APSRGE_REGNUM)
5989 (unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
5990 "TARGET_INT_SIMD"
5991 "<simd32_op>%?\\t%0, %1, %2"
5992 [(set_attr "predicable" "yes")
5993 (set_attr "type" "alu_sreg")])
5994
5995 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
5996 [(set (match_operand:SI 0 "s_register_operand" "=r")
5997 (unspec:SI
5998 [(match_operand:SI 1 "s_register_operand" "r")
5999 (match_operand:SI 2 "s_register_operand" "r")
6000 (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
6001 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6002 "<simd32_op>%?\\t%0, %1, %2, %3"
6003 [(set_attr "predicable" "yes")
6004 (set_attr "type" "alu_sreg")])
6005
6006 (define_expand "arm_<simd32_op>"
6007 [(set (match_operand:SI 0 "s_register_operand")
6008 (unspec:SI
6009 [(match_operand:SI 1 "s_register_operand")
6010 (match_operand:SI 2 "s_register_operand")
6011 (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
6012 "TARGET_INT_SIMD"
6013 {
6014 if (ARM_Q_BIT_READ)
6015 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6016 operands[2], operands[3]));
6017 else
6018 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6019 operands[2], operands[3]));
6020 DONE;
6021 }
6022 )
6023
6024 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6025 [(set (match_operand:SI 0 "s_register_operand" "=r")
6026 (unspec:SI
6027 [(match_operand:SI 1 "s_register_operand" "r")
6028 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
6029 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6030 "<simd32_op>%?\\t%0, %1, %2"
6031 [(set_attr "predicable" "yes")
6032 (set_attr "type" "alu_sreg")])
6033
6034 (define_expand "arm_<simd32_op>"
6035 [(set (match_operand:SI 0 "s_register_operand")
6036 (unspec:SI
6037 [(match_operand:SI 1 "s_register_operand")
6038 (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
6039 "TARGET_INT_SIMD"
6040 {
6041 if (ARM_Q_BIT_READ)
6042 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6043 operands[2]));
6044 else
6045 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6046 operands[2]));
6047 DONE;
6048 }
6049 )
6050
6051 (define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6052 [(set (match_operand:SI 0 "s_register_operand" "=r")
6053 (unspec:SI
6054 [(match_operand:SI 1 "s_register_operand" "r")
6055 (match_operand:SI 2 "<sup>sat16_imm" "i")] USSAT16))]
6056 "TARGET_INT_SIMD && <add_clobber_q_pred>"
6057 "<simd32_op>%?\\t%0, %2, %1"
6058 [(set_attr "predicable" "yes")
6059 (set_attr "type" "alu_sreg")])
6060
6061 (define_expand "arm_<simd32_op>"
6062 [(set (match_operand:SI 0 "s_register_operand")
6063 (unspec:SI
6064 [(match_operand:SI 1 "s_register_operand")
6065 (match_operand:SI 2 "<sup>sat16_imm")] USSAT16))]
6066 "TARGET_INT_SIMD"
6067 {
6068 if (ARM_Q_BIT_READ)
6069 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6070 operands[2]));
6071 else
6072 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6073 operands[2]));
6074 DONE;
6075 }
6076 )
6077
6078 (define_insn "arm_sel"
6079 [(set (match_operand:SI 0 "s_register_operand" "=r")
6080 (unspec:SI
6081 [(match_operand:SI 1 "s_register_operand" "r")
6082 (match_operand:SI 2 "s_register_operand" "r")
6083 (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
6084 "TARGET_INT_SIMD"
6085 "sel%?\\t%0, %1, %2"
6086 [(set_attr "predicable" "yes")
6087 (set_attr "type" "alu_sreg")])
6088
6089 (define_expand "extendsfdf2"
6090 [(set (match_operand:DF 0 "s_register_operand")
6091 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
6092 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6093 ""
6094 )
6095
6096 ;; HFmode -> DFmode conversions where we don't have an instruction for it
6097 ;; must go through SFmode.
6098 ;;
6099 ;; This is always safe for an extend.
6100
6101 (define_expand "extendhfdf2"
6102 [(set (match_operand:DF 0 "s_register_operand")
6103 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
6104 "TARGET_EITHER"
6105 {
6106 /* We don't have a direct instruction for this, so go via SFmode. */
6107 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
6108 {
6109 rtx op1;
6110 op1 = convert_to_mode (SFmode, operands[1], 0);
6111 op1 = convert_to_mode (DFmode, op1, 0);
6112 emit_insn (gen_movdf (operands[0], op1));
6113 DONE;
6114 }
6115 /* Otherwise, we're done producing RTL and will pick up the correct
6116 pattern to do this with one rounding-step in a single instruction. */
6117 }
6118 )
6119 \f
6120 ;; Move insns (including loads and stores)
6121
6122 ;; XXX Just some ideas about movti.
6123 ;; I don't think these are a good idea on the arm, there just aren't enough
6124 ;; registers
6125 ;;(define_expand "loadti"
6126 ;; [(set (match_operand:TI 0 "s_register_operand")
6127 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
6128 ;; "" "")
6129
6130 ;;(define_expand "storeti"
6131 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
6132 ;; (match_operand:TI 1 "s_register_operand"))]
6133 ;; "" "")
6134
6135 ;;(define_expand "movti"
6136 ;; [(set (match_operand:TI 0 "general_operand")
6137 ;; (match_operand:TI 1 "general_operand"))]
6138 ;; ""
6139 ;; "
6140 ;;{
6141 ;; rtx insn;
6142 ;;
6143 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
6144 ;; operands[1] = copy_to_reg (operands[1]);
6145 ;; if (MEM_P (operands[0]))
6146 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6147 ;; else if (MEM_P (operands[1]))
6148 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6149 ;; else
6150 ;; FAIL;
6151 ;;
6152 ;; emit_insn (insn);
6153 ;; DONE;
6154 ;;}")
6155
6156 ;; Recognize garbage generated above.
6157
6158 ;;(define_insn ""
6159 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6160 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6161 ;; ""
6162 ;; "*
6163 ;; {
6164 ;; register mem = (which_alternative < 3);
6165 ;; register const char *template;
6166 ;;
6167 ;; operands[mem] = XEXP (operands[mem], 0);
6168 ;; switch (which_alternative)
6169 ;; {
6170 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6171 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6172 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6173 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6174 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6175 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6176 ;; }
6177 ;; output_asm_insn (template, operands);
6178 ;; return \"\";
6179 ;; }")
6180
6181 (define_expand "movdi"
6182 [(set (match_operand:DI 0 "general_operand")
6183 (match_operand:DI 1 "general_operand"))]
6184 "TARGET_EITHER"
6185 "
6186 gcc_checking_assert (aligned_operand (operands[0], DImode));
6187 gcc_checking_assert (aligned_operand (operands[1], DImode));
6188 if (can_create_pseudo_p ())
6189 {
6190 if (!REG_P (operands[0]))
6191 operands[1] = force_reg (DImode, operands[1]);
6192 }
6193 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
6194 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
6195 {
6196 /* Avoid LDRD's into an odd-numbered register pair in ARM state
6197 when expanding function calls. */
6198 gcc_assert (can_create_pseudo_p ());
6199 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
6200 {
6201 /* Perform load into legal reg pair first, then move. */
6202 rtx reg = gen_reg_rtx (DImode);
6203 emit_insn (gen_movdi (reg, operands[1]));
6204 operands[1] = reg;
6205 }
6206 emit_move_insn (gen_lowpart (SImode, operands[0]),
6207 gen_lowpart (SImode, operands[1]));
6208 emit_move_insn (gen_highpart (SImode, operands[0]),
6209 gen_highpart (SImode, operands[1]));
6210 DONE;
6211 }
6212 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
6213 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
6214 {
6215 /* Avoid STRD's from an odd-numbered register pair in ARM state
6216 when expanding function prologue. */
6217 gcc_assert (can_create_pseudo_p ());
6218 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
6219 ? gen_reg_rtx (DImode)
6220 : operands[0];
6221 emit_move_insn (gen_lowpart (SImode, split_dest),
6222 gen_lowpart (SImode, operands[1]));
6223 emit_move_insn (gen_highpart (SImode, split_dest),
6224 gen_highpart (SImode, operands[1]));
6225 if (split_dest != operands[0])
6226 emit_insn (gen_movdi (operands[0], split_dest));
6227 DONE;
6228 }
6229 "
6230 )
6231
6232 (define_insn "*arm_movdi"
6233 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
6234 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
6235 "TARGET_32BIT
6236 && !(TARGET_HARD_FLOAT)
6237 && !(TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT)
6238 && !TARGET_IWMMXT
6239 && ( register_operand (operands[0], DImode)
6240 || register_operand (operands[1], DImode))"
6241 "*
6242 switch (which_alternative)
6243 {
6244 case 0:
6245 case 1:
6246 case 2:
6247 return \"#\";
6248 case 3:
6249 /* Cannot load it directly, split to load it via MOV / MOVT. */
6250 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6251 return \"#\";
6252 /* Fall through. */
6253 default:
6254 return output_move_double (operands, true, NULL);
6255 }
6256 "
6257 [(set_attr "length" "8,12,16,8,8")
6258 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6259 (set_attr "arm_pool_range" "*,*,*,1020,*")
6260 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6261 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6262 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6263 )
6264
6265 (define_split
6266 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6267 (match_operand:ANY64 1 "immediate_operand" ""))]
6268 "TARGET_32BIT
6269 && reload_completed
6270 && (arm_disable_literal_pool
6271 || (arm_const_double_inline_cost (operands[1])
6272 <= arm_max_const_double_inline_cost ()))"
6273 [(const_int 0)]
6274 "
6275 arm_split_constant (SET, SImode, curr_insn,
6276 INTVAL (gen_lowpart (SImode, operands[1])),
6277 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6278 arm_split_constant (SET, SImode, curr_insn,
6279 INTVAL (gen_highpart_mode (SImode,
6280 GET_MODE (operands[0]),
6281 operands[1])),
6282 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6283 DONE;
6284 "
6285 )
6286
6287 ; If optimizing for size, or if we have load delay slots, then
6288 ; we want to split the constant into two separate operations.
6289 ; In both cases this may split a trivial part into a single data op
6290 ; leaving a single complex constant to load. We can also get longer
6291 ; offsets in a LDR which means we get better chances of sharing the pool
6292 ; entries. Finally, we can normally do a better job of scheduling
6293 ; LDR instructions than we can with LDM.
6294 ; This pattern will only match if the one above did not.
6295 (define_split
6296 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6297 (match_operand:ANY64 1 "const_double_operand" ""))]
6298 "TARGET_ARM && reload_completed
6299 && arm_const_double_by_parts (operands[1])"
6300 [(set (match_dup 0) (match_dup 1))
6301 (set (match_dup 2) (match_dup 3))]
6302 "
6303 operands[2] = gen_highpart (SImode, operands[0]);
6304 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6305 operands[1]);
6306 operands[0] = gen_lowpart (SImode, operands[0]);
6307 operands[1] = gen_lowpart (SImode, operands[1]);
6308 "
6309 )
6310
6311 (define_split
6312 [(set (match_operand:ANY64_BF 0 "arm_general_register_operand" "")
6313 (match_operand:ANY64_BF 1 "arm_general_register_operand" ""))]
6314 "TARGET_EITHER && reload_completed"
6315 [(set (match_dup 0) (match_dup 1))
6316 (set (match_dup 2) (match_dup 3))]
6317 "
6318 operands[2] = gen_highpart (SImode, operands[0]);
6319 operands[3] = gen_highpart (SImode, operands[1]);
6320 operands[0] = gen_lowpart (SImode, operands[0]);
6321 operands[1] = gen_lowpart (SImode, operands[1]);
6322
6323 /* Handle a partial overlap. */
6324 if (rtx_equal_p (operands[0], operands[3]))
6325 {
6326 rtx tmp0 = operands[0];
6327 rtx tmp1 = operands[1];
6328
6329 operands[0] = operands[2];
6330 operands[1] = operands[3];
6331 operands[2] = tmp0;
6332 operands[3] = tmp1;
6333 }
6334 "
6335 )
6336
6337 ;; We can't actually do base+index doubleword loads if the index and
6338 ;; destination overlap. Split here so that we at least have chance to
6339 ;; schedule.
6340 (define_split
6341 [(set (match_operand:DI 0 "s_register_operand" "")
6342 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6343 (match_operand:SI 2 "s_register_operand" ""))))]
6344 "TARGET_LDRD
6345 && reg_overlap_mentioned_p (operands[0], operands[1])
6346 && reg_overlap_mentioned_p (operands[0], operands[2])"
6347 [(set (match_dup 4)
6348 (plus:SI (match_dup 1)
6349 (match_dup 2)))
6350 (set (match_dup 0)
6351 (mem:DI (match_dup 4)))]
6352 "
6353 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6354 "
6355 )
6356
6357 (define_expand "movsi"
6358 [(set (match_operand:SI 0 "general_operand")
6359 (match_operand:SI 1 "general_operand"))]
6360 "TARGET_EITHER"
6361 "
6362 {
6363 rtx base, offset, tmp;
6364
6365 gcc_checking_assert (aligned_operand (operands[0], SImode));
6366 gcc_checking_assert (aligned_operand (operands[1], SImode));
6367 if (TARGET_32BIT || TARGET_HAVE_MOVT)
6368 {
6369 /* Everything except mem = const or mem = mem can be done easily. */
6370 if (MEM_P (operands[0]))
6371 operands[1] = force_reg (SImode, operands[1]);
6372 if (arm_general_register_operand (operands[0], SImode)
6373 && CONST_INT_P (operands[1])
6374 && !(const_ok_for_arm (INTVAL (operands[1]))
6375 || const_ok_for_arm (~INTVAL (operands[1]))))
6376 {
6377 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
6378 {
6379 emit_insn (gen_rtx_SET (operands[0], operands[1]));
6380 DONE;
6381 }
6382 else
6383 {
6384 arm_split_constant (SET, SImode, NULL_RTX,
6385 INTVAL (operands[1]), operands[0], NULL_RTX,
6386 optimize && can_create_pseudo_p ());
6387 DONE;
6388 }
6389 }
6390 }
6391 else /* Target doesn't have MOVT... */
6392 {
6393 if (can_create_pseudo_p ())
6394 {
6395 if (!REG_P (operands[0]))
6396 operands[1] = force_reg (SImode, operands[1]);
6397 }
6398 }
6399
6400 split_const (operands[1], &base, &offset);
6401 if (INTVAL (offset) != 0
6402 && targetm.cannot_force_const_mem (SImode, operands[1]))
6403 {
6404 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6405 emit_move_insn (tmp, base);
6406 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6407 DONE;
6408 }
6409
6410 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
6411
6412 /* Recognize the case where operand[1] is a reference to thread-local
6413 data and load its address to a register. Offsets have been split off
6414 already. */
6415 if (arm_tls_referenced_p (operands[1]))
6416 operands[1] = legitimize_tls_address (operands[1], tmp);
6417 else if (flag_pic
6418 && (CONSTANT_P (operands[1])
6419 || symbol_mentioned_p (operands[1])
6420 || label_mentioned_p (operands[1])))
6421 operands[1] =
6422 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
6423 }
6424 "
6425 )
6426
6427 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6428 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6429 ;; so this does not matter.
6430 (define_insn "*arm_movt"
6431 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
6432 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
6433 (match_operand:SI 2 "general_operand" "i,i")))]
6434 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
6435 "@
6436 movt%?\t%0, #:upper16:%c2
6437 movt\t%0, #:upper16:%c2"
6438 [(set_attr "arch" "32,v8mb")
6439 (set_attr "predicable" "yes")
6440 (set_attr "length" "4")
6441 (set_attr "type" "alu_sreg")]
6442 )
6443
6444 (define_insn "*arm_movsi_insn"
6445 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6446 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6447 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
6448 && ( register_operand (operands[0], SImode)
6449 || register_operand (operands[1], SImode))"
6450 "@
6451 mov%?\\t%0, %1
6452 mov%?\\t%0, %1
6453 mvn%?\\t%0, #%B1
6454 movw%?\\t%0, %1
6455 ldr%?\\t%0, %1
6456 str%?\\t%1, %0"
6457 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
6458 (set_attr "predicable" "yes")
6459 (set_attr "arch" "*,*,*,v6t2,*,*")
6460 (set_attr "pool_range" "*,*,*,*,4096,*")
6461 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6462 )
6463
6464 (define_split
6465 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6466 (match_operand:SI 1 "const_int_operand" ""))]
6467 "(TARGET_32BIT || TARGET_HAVE_MOVT)
6468 && (!(const_ok_for_arm (INTVAL (operands[1]))
6469 || const_ok_for_arm (~INTVAL (operands[1]))))"
6470 [(clobber (const_int 0))]
6471 "
6472 arm_split_constant (SET, SImode, NULL_RTX,
6473 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6474 DONE;
6475 "
6476 )
6477
6478 ;; A normal way to do (symbol + offset) requires three instructions at least
6479 ;; (depends on how big the offset is) as below:
6480 ;; movw r0, #:lower16:g
6481 ;; movw r0, #:upper16:g
6482 ;; adds r0, #4
6483 ;;
6484 ;; A better way would be:
6485 ;; movw r0, #:lower16:g+4
6486 ;; movw r0, #:upper16:g+4
6487 ;;
6488 ;; The limitation of this way is that the length of offset should be a 16-bit
6489 ;; signed value, because current assembler only supports REL type relocation for
6490 ;; such case. If the more powerful RELA type is supported in future, we should
6491 ;; update this pattern to go with better way.
6492 (define_split
6493 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6494 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
6495 (match_operand:SI 2 "const_int_operand" ""))))]
6496 "TARGET_THUMB
6497 && TARGET_HAVE_MOVT
6498 && arm_disable_literal_pool
6499 && reload_completed
6500 && GET_CODE (operands[1]) == SYMBOL_REF"
6501 [(clobber (const_int 0))]
6502 "
6503 int offset = INTVAL (operands[2]);
6504
6505 if (offset < -0x8000 || offset > 0x7fff)
6506 {
6507 arm_emit_movpair (operands[0], operands[1]);
6508 emit_insn (gen_rtx_SET (operands[0],
6509 gen_rtx_PLUS (SImode, operands[0], operands[2])));
6510 }
6511 else
6512 {
6513 rtx op = gen_rtx_CONST (SImode,
6514 gen_rtx_PLUS (SImode, operands[1], operands[2]));
6515 arm_emit_movpair (operands[0], op);
6516 }
6517 "
6518 )
6519
6520 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6521 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6522 ;; and lo_sum would be merged back into memory load at cprop. However,
6523 ;; if the default is to prefer movt/movw rather than a load from the constant
6524 ;; pool, the performance is better.
6525 (define_split
6526 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6527 (match_operand:SI 1 "general_operand" ""))]
6528 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6529 && !target_word_relocations
6530 && !arm_tls_referenced_p (operands[1])"
6531 [(clobber (const_int 0))]
6532 {
6533 arm_emit_movpair (operands[0], operands[1]);
6534 DONE;
6535 })
6536
6537 ;; When generating pic, we need to load the symbol offset into a register.
6538 ;; So that the optimizer does not confuse this with a normal symbol load
6539 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6540 ;; since that is the only type of relocation we can use.
6541
6542 ;; Wrap calculation of the whole PIC address in a single pattern for the
6543 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6544 ;; a PIC address involves two loads from memory, so we want to CSE it
6545 ;; as often as possible.
6546 ;; This pattern will be split into one of the pic_load_addr_* patterns
6547 ;; and a move after GCSE optimizations.
6548 ;;
6549 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6550 (define_expand "calculate_pic_address"
6551 [(set (match_operand:SI 0 "register_operand")
6552 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6553 (unspec:SI [(match_operand:SI 2 "" "")]
6554 UNSPEC_PIC_SYM))))]
6555 "flag_pic"
6556 )
6557
6558 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6559 (define_split
6560 [(set (match_operand:SI 0 "register_operand" "")
6561 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6562 (unspec:SI [(match_operand:SI 2 "" "")]
6563 UNSPEC_PIC_SYM))))]
6564 "flag_pic"
6565 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6566 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6567 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6568 )
6569
6570 ;; operand1 is the memory address to go into
6571 ;; pic_load_addr_32bit.
6572 ;; operand2 is the PIC label to be emitted
6573 ;; from pic_add_dot_plus_eight.
6574 ;; We do this to allow hoisting of the entire insn.
6575 (define_insn_and_split "pic_load_addr_unified"
6576 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6577 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6578 (match_operand:SI 2 "" "")]
6579 UNSPEC_PIC_UNIFIED))]
6580 "flag_pic"
6581 "#"
6582 "&& reload_completed"
6583 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6584 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6585 (match_dup 2)] UNSPEC_PIC_BASE))]
6586 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6587 [(set_attr "type" "load_4,load_4,load_4")
6588 (set_attr "pool_range" "4096,4094,1022")
6589 (set_attr "neg_pool_range" "4084,0,0")
6590 (set_attr "arch" "a,t2,t1")
6591 (set_attr "length" "8,6,4")]
6592 )
6593
6594 ;; The rather odd constraints on the following are to force reload to leave
6595 ;; the insn alone, and to force the minipool generation pass to then move
6596 ;; the GOT symbol to memory.
6597
6598 (define_insn "pic_load_addr_32bit"
6599 [(set (match_operand:SI 0 "s_register_operand" "=r")
6600 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6601 "TARGET_32BIT && flag_pic"
6602 "ldr%?\\t%0, %1"
6603 [(set_attr "type" "load_4")
6604 (set (attr "pool_range")
6605 (if_then_else (eq_attr "is_thumb" "no")
6606 (const_int 4096)
6607 (const_int 4094)))
6608 (set (attr "neg_pool_range")
6609 (if_then_else (eq_attr "is_thumb" "no")
6610 (const_int 4084)
6611 (const_int 0)))]
6612 )
6613
6614 (define_insn "pic_load_addr_thumb1"
6615 [(set (match_operand:SI 0 "s_register_operand" "=l")
6616 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6617 "TARGET_THUMB1 && flag_pic"
6618 "ldr\\t%0, %1"
6619 [(set_attr "type" "load_4")
6620 (set (attr "pool_range") (const_int 1018))]
6621 )
6622
6623 (define_insn "pic_add_dot_plus_four"
6624 [(set (match_operand:SI 0 "register_operand" "=r")
6625 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6626 (const_int 4)
6627 (match_operand 2 "" "")]
6628 UNSPEC_PIC_BASE))]
6629 "TARGET_THUMB"
6630 "*
6631 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6632 INTVAL (operands[2]));
6633 return \"add\\t%0, %|pc\";
6634 "
6635 [(set_attr "length" "2")
6636 (set_attr "type" "alu_sreg")]
6637 )
6638
6639 (define_insn "pic_add_dot_plus_eight"
6640 [(set (match_operand:SI 0 "register_operand" "=r")
6641 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6642 (const_int 8)
6643 (match_operand 2 "" "")]
6644 UNSPEC_PIC_BASE))]
6645 "TARGET_ARM"
6646 "*
6647 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6648 INTVAL (operands[2]));
6649 return \"add%?\\t%0, %|pc, %1\";
6650 "
6651 [(set_attr "predicable" "yes")
6652 (set_attr "type" "alu_sreg")]
6653 )
6654
6655 (define_insn "tls_load_dot_plus_eight"
6656 [(set (match_operand:SI 0 "register_operand" "=r")
6657 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6658 (const_int 8)
6659 (match_operand 2 "" "")]
6660 UNSPEC_PIC_BASE)))]
6661 "TARGET_ARM"
6662 "*
6663 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6664 INTVAL (operands[2]));
6665 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6666 "
6667 [(set_attr "predicable" "yes")
6668 (set_attr "type" "load_4")]
6669 )
6670
6671 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6672 ;; followed by a load. These sequences can be crunched down to
6673 ;; tls_load_dot_plus_eight by a peephole.
6674
6675 (define_peephole2
6676 [(set (match_operand:SI 0 "register_operand" "")
6677 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6678 (const_int 8)
6679 (match_operand 1 "" "")]
6680 UNSPEC_PIC_BASE))
6681 (set (match_operand:SI 2 "arm_general_register_operand" "")
6682 (mem:SI (match_dup 0)))]
6683 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6684 [(set (match_dup 2)
6685 (mem:SI (unspec:SI [(match_dup 3)
6686 (const_int 8)
6687 (match_dup 1)]
6688 UNSPEC_PIC_BASE)))]
6689 ""
6690 )
6691
6692 (define_insn "pic_offset_arm"
6693 [(set (match_operand:SI 0 "register_operand" "=r")
6694 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6695 (unspec:SI [(match_operand:SI 2 "" "X")]
6696 UNSPEC_PIC_OFFSET))))]
6697 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6698 "ldr%?\\t%0, [%1,%2]"
6699 [(set_attr "type" "load_4")]
6700 )
6701
6702 (define_expand "builtin_setjmp_receiver"
6703 [(label_ref (match_operand 0 "" ""))]
6704 "flag_pic"
6705 "
6706 {
6707 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6708 register. */
6709 if (arm_pic_register != INVALID_REGNUM)
6710 arm_load_pic_register (1UL << 3, NULL_RTX);
6711 DONE;
6712 }")
6713
6714 ;; If copying one reg to another we can set the condition codes according to
6715 ;; its value. Such a move is common after a return from subroutine and the
6716 ;; result is being tested against zero.
6717
6718 (define_insn "*movsi_compare0"
6719 [(set (reg:CC CC_REGNUM)
6720 (compare:CC (match_operand:SI 1 "s_register_operand" "0,0,l,rk,rk")
6721 (const_int 0)))
6722 (set (match_operand:SI 0 "s_register_operand" "=l,rk,l,r,rk")
6723 (match_dup 1))]
6724 "TARGET_32BIT"
6725 "@
6726 cmp%?\\t%0, #0
6727 cmp%?\\t%0, #0
6728 subs%?\\t%0, %1, #0
6729 subs%?\\t%0, %1, #0
6730 subs%?\\t%0, %1, #0"
6731 [(set_attr "conds" "set")
6732 (set_attr "arch" "t2,*,t2,t2,a")
6733 (set_attr "type" "alus_imm")
6734 (set_attr "length" "2,4,2,4,4")]
6735 )
6736
6737 ;; Subroutine to store a half word from a register into memory.
6738 ;; Operand 0 is the source register (HImode)
6739 ;; Operand 1 is the destination address in a register (SImode)
6740
6741 ;; In both this routine and the next, we must be careful not to spill
6742 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6743 ;; can generate unrecognizable rtl.
6744
6745 (define_expand "storehi"
6746 [;; store the low byte
6747 (set (match_operand 1 "" "") (match_dup 3))
6748 ;; extract the high byte
6749 (set (match_dup 2)
6750 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6751 ;; store the high byte
6752 (set (match_dup 4) (match_dup 5))]
6753 "TARGET_ARM"
6754 "
6755 {
6756 rtx op1 = operands[1];
6757 rtx addr = XEXP (op1, 0);
6758 enum rtx_code code = GET_CODE (addr);
6759
6760 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6761 || code == MINUS)
6762 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6763
6764 operands[4] = adjust_address (op1, QImode, 1);
6765 operands[1] = adjust_address (operands[1], QImode, 0);
6766 operands[3] = gen_lowpart (QImode, operands[0]);
6767 operands[0] = gen_lowpart (SImode, operands[0]);
6768 operands[2] = gen_reg_rtx (SImode);
6769 operands[5] = gen_lowpart (QImode, operands[2]);
6770 }"
6771 )
6772
6773 (define_expand "storehi_bigend"
6774 [(set (match_dup 4) (match_dup 3))
6775 (set (match_dup 2)
6776 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6777 (set (match_operand 1 "" "") (match_dup 5))]
6778 "TARGET_ARM"
6779 "
6780 {
6781 rtx op1 = operands[1];
6782 rtx addr = XEXP (op1, 0);
6783 enum rtx_code code = GET_CODE (addr);
6784
6785 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6786 || code == MINUS)
6787 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6788
6789 operands[4] = adjust_address (op1, QImode, 1);
6790 operands[1] = adjust_address (operands[1], QImode, 0);
6791 operands[3] = gen_lowpart (QImode, operands[0]);
6792 operands[0] = gen_lowpart (SImode, operands[0]);
6793 operands[2] = gen_reg_rtx (SImode);
6794 operands[5] = gen_lowpart (QImode, operands[2]);
6795 }"
6796 )
6797
6798 ;; Subroutine to store a half word integer constant into memory.
6799 (define_expand "storeinthi"
6800 [(set (match_operand 0 "" "")
6801 (match_operand 1 "" ""))
6802 (set (match_dup 3) (match_dup 2))]
6803 "TARGET_ARM"
6804 "
6805 {
6806 HOST_WIDE_INT value = INTVAL (operands[1]);
6807 rtx addr = XEXP (operands[0], 0);
6808 rtx op0 = operands[0];
6809 enum rtx_code code = GET_CODE (addr);
6810
6811 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6812 || code == MINUS)
6813 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6814
6815 operands[1] = gen_reg_rtx (SImode);
6816 if (BYTES_BIG_ENDIAN)
6817 {
6818 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6819 if ((value & 255) == ((value >> 8) & 255))
6820 operands[2] = operands[1];
6821 else
6822 {
6823 operands[2] = gen_reg_rtx (SImode);
6824 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6825 }
6826 }
6827 else
6828 {
6829 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6830 if ((value & 255) == ((value >> 8) & 255))
6831 operands[2] = operands[1];
6832 else
6833 {
6834 operands[2] = gen_reg_rtx (SImode);
6835 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6836 }
6837 }
6838
6839 operands[3] = adjust_address (op0, QImode, 1);
6840 operands[0] = adjust_address (operands[0], QImode, 0);
6841 operands[2] = gen_lowpart (QImode, operands[2]);
6842 operands[1] = gen_lowpart (QImode, operands[1]);
6843 }"
6844 )
6845
6846 (define_expand "storehi_single_op"
6847 [(set (match_operand:HI 0 "memory_operand")
6848 (match_operand:HI 1 "general_operand"))]
6849 "TARGET_32BIT && arm_arch4"
6850 "
6851 if (!s_register_operand (operands[1], HImode))
6852 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6853 "
6854 )
6855
6856 (define_expand "movhi"
6857 [(set (match_operand:HI 0 "general_operand")
6858 (match_operand:HI 1 "general_operand"))]
6859 "TARGET_EITHER"
6860 "
6861 gcc_checking_assert (aligned_operand (operands[0], HImode));
6862 gcc_checking_assert (aligned_operand (operands[1], HImode));
6863 if (TARGET_ARM)
6864 {
6865 if (can_create_pseudo_p ())
6866 {
6867 if (MEM_P (operands[0]))
6868 {
6869 if (arm_arch4)
6870 {
6871 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6872 DONE;
6873 }
6874 if (CONST_INT_P (operands[1]))
6875 emit_insn (gen_storeinthi (operands[0], operands[1]));
6876 else
6877 {
6878 if (MEM_P (operands[1]))
6879 operands[1] = force_reg (HImode, operands[1]);
6880 if (BYTES_BIG_ENDIAN)
6881 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6882 else
6883 emit_insn (gen_storehi (operands[1], operands[0]));
6884 }
6885 DONE;
6886 }
6887 /* Sign extend a constant, and keep it in an SImode reg. */
6888 else if (CONST_INT_P (operands[1]))
6889 {
6890 rtx reg = gen_reg_rtx (SImode);
6891 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6892
6893 /* If the constant is already valid, leave it alone. */
6894 if (!const_ok_for_arm (val))
6895 {
6896 /* If setting all the top bits will make the constant
6897 loadable in a single instruction, then set them.
6898 Otherwise, sign extend the number. */
6899
6900 if (const_ok_for_arm (~(val | ~0xffff)))
6901 val |= ~0xffff;
6902 else if (val & 0x8000)
6903 val |= ~0xffff;
6904 }
6905
6906 emit_insn (gen_movsi (reg, GEN_INT (val)));
6907 operands[1] = gen_lowpart (HImode, reg);
6908 }
6909 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6910 && MEM_P (operands[1]))
6911 {
6912 rtx reg = gen_reg_rtx (SImode);
6913
6914 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6915 operands[1] = gen_lowpart (HImode, reg);
6916 }
6917 else if (!arm_arch4)
6918 {
6919 if (MEM_P (operands[1]))
6920 {
6921 rtx base;
6922 rtx offset = const0_rtx;
6923 rtx reg = gen_reg_rtx (SImode);
6924
6925 if ((REG_P (base = XEXP (operands[1], 0))
6926 || (GET_CODE (base) == PLUS
6927 && (CONST_INT_P (offset = XEXP (base, 1)))
6928 && ((INTVAL(offset) & 1) != 1)
6929 && REG_P (base = XEXP (base, 0))))
6930 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6931 {
6932 rtx new_rtx;
6933
6934 new_rtx = widen_memory_access (operands[1], SImode,
6935 ((INTVAL (offset) & ~3)
6936 - INTVAL (offset)));
6937 emit_insn (gen_movsi (reg, new_rtx));
6938 if (((INTVAL (offset) & 2) != 0)
6939 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6940 {
6941 rtx reg2 = gen_reg_rtx (SImode);
6942
6943 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6944 reg = reg2;
6945 }
6946 }
6947 else
6948 emit_insn (gen_movhi_bytes (reg, operands[1]));
6949
6950 operands[1] = gen_lowpart (HImode, reg);
6951 }
6952 }
6953 }
6954 /* Handle loading a large integer during reload. */
6955 else if (CONST_INT_P (operands[1])
6956 && !const_ok_for_arm (INTVAL (operands[1]))
6957 && !const_ok_for_arm (~INTVAL (operands[1])))
6958 {
6959 /* Writing a constant to memory needs a scratch, which should
6960 be handled with SECONDARY_RELOADs. */
6961 gcc_assert (REG_P (operands[0]));
6962
6963 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6964 emit_insn (gen_movsi (operands[0], operands[1]));
6965 DONE;
6966 }
6967 }
6968 else if (TARGET_THUMB2)
6969 {
6970 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6971 if (can_create_pseudo_p ())
6972 {
6973 if (!REG_P (operands[0]))
6974 operands[1] = force_reg (HImode, operands[1]);
6975 /* Zero extend a constant, and keep it in an SImode reg. */
6976 else if (CONST_INT_P (operands[1]))
6977 {
6978 rtx reg = gen_reg_rtx (SImode);
6979 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6980
6981 emit_insn (gen_movsi (reg, GEN_INT (val)));
6982 operands[1] = gen_lowpart (HImode, reg);
6983 }
6984 }
6985 }
6986 else /* TARGET_THUMB1 */
6987 {
6988 if (can_create_pseudo_p ())
6989 {
6990 if (CONST_INT_P (operands[1]))
6991 {
6992 rtx reg = gen_reg_rtx (SImode);
6993
6994 emit_insn (gen_movsi (reg, operands[1]));
6995 operands[1] = gen_lowpart (HImode, reg);
6996 }
6997
6998 /* ??? We shouldn't really get invalid addresses here, but this can
6999 happen if we are passed a SP (never OK for HImode/QImode) or
7000 virtual register (also rejected as illegitimate for HImode/QImode)
7001 relative address. */
7002 /* ??? This should perhaps be fixed elsewhere, for instance, in
7003 fixup_stack_1, by checking for other kinds of invalid addresses,
7004 e.g. a bare reference to a virtual register. This may confuse the
7005 alpha though, which must handle this case differently. */
7006 if (MEM_P (operands[0])
7007 && !memory_address_p (GET_MODE (operands[0]),
7008 XEXP (operands[0], 0)))
7009 operands[0]
7010 = replace_equiv_address (operands[0],
7011 copy_to_reg (XEXP (operands[0], 0)));
7012
7013 if (MEM_P (operands[1])
7014 && !memory_address_p (GET_MODE (operands[1]),
7015 XEXP (operands[1], 0)))
7016 operands[1]
7017 = replace_equiv_address (operands[1],
7018 copy_to_reg (XEXP (operands[1], 0)));
7019
7020 if (MEM_P (operands[1]) && optimize > 0)
7021 {
7022 rtx reg = gen_reg_rtx (SImode);
7023
7024 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7025 operands[1] = gen_lowpart (HImode, reg);
7026 }
7027
7028 if (MEM_P (operands[0]))
7029 operands[1] = force_reg (HImode, operands[1]);
7030 }
7031 else if (CONST_INT_P (operands[1])
7032 && !satisfies_constraint_I (operands[1]))
7033 {
7034 /* Handle loading a large integer during reload. */
7035
7036 /* Writing a constant to memory needs a scratch, which should
7037 be handled with SECONDARY_RELOADs. */
7038 gcc_assert (REG_P (operands[0]));
7039
7040 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7041 emit_insn (gen_movsi (operands[0], operands[1]));
7042 DONE;
7043 }
7044 }
7045 "
7046 )
7047
7048 (define_expand "movhi_bytes"
7049 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7050 (set (match_dup 3)
7051 (zero_extend:SI (match_dup 6)))
7052 (set (match_operand:SI 0 "" "")
7053 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7054 "TARGET_ARM"
7055 "
7056 {
7057 rtx mem1, mem2;
7058 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7059
7060 mem1 = change_address (operands[1], QImode, addr);
7061 mem2 = change_address (operands[1], QImode,
7062 plus_constant (Pmode, addr, 1));
7063 operands[0] = gen_lowpart (SImode, operands[0]);
7064 operands[1] = mem1;
7065 operands[2] = gen_reg_rtx (SImode);
7066 operands[3] = gen_reg_rtx (SImode);
7067 operands[6] = mem2;
7068
7069 if (BYTES_BIG_ENDIAN)
7070 {
7071 operands[4] = operands[2];
7072 operands[5] = operands[3];
7073 }
7074 else
7075 {
7076 operands[4] = operands[3];
7077 operands[5] = operands[2];
7078 }
7079 }"
7080 )
7081
7082 (define_expand "movhi_bigend"
7083 [(set (match_dup 2)
7084 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
7085 (const_int 16)))
7086 (set (match_dup 3)
7087 (ashiftrt:SI (match_dup 2) (const_int 16)))
7088 (set (match_operand:HI 0 "s_register_operand")
7089 (match_dup 4))]
7090 "TARGET_ARM"
7091 "
7092 operands[2] = gen_reg_rtx (SImode);
7093 operands[3] = gen_reg_rtx (SImode);
7094 operands[4] = gen_lowpart (HImode, operands[3]);
7095 "
7096 )
7097
7098 ;; Pattern to recognize insn generated default case above
7099 (define_insn "*movhi_insn_arch4"
7100 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
7101 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
7102 "TARGET_ARM
7103 && arm_arch4 && !TARGET_HARD_FLOAT
7104 && (register_operand (operands[0], HImode)
7105 || register_operand (operands[1], HImode))"
7106 "@
7107 mov%?\\t%0, %1\\t%@ movhi
7108 mvn%?\\t%0, #%B1\\t%@ movhi
7109 movw%?\\t%0, %L1\\t%@ movhi
7110 strh%?\\t%1, %0\\t%@ movhi
7111 ldrh%?\\t%0, %1\\t%@ movhi"
7112 [(set_attr "predicable" "yes")
7113 (set_attr "pool_range" "*,*,*,*,256")
7114 (set_attr "neg_pool_range" "*,*,*,*,244")
7115 (set_attr "arch" "*,*,v6t2,*,*")
7116 (set_attr_alternative "type"
7117 [(if_then_else (match_operand 1 "const_int_operand" "")
7118 (const_string "mov_imm" )
7119 (const_string "mov_reg"))
7120 (const_string "mvn_imm")
7121 (const_string "mov_imm")
7122 (const_string "store_4")
7123 (const_string "load_4")])]
7124 )
7125
7126 (define_insn "*movhi_bytes"
7127 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7128 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
7129 "TARGET_ARM && !TARGET_HARD_FLOAT"
7130 "@
7131 mov%?\\t%0, %1\\t%@ movhi
7132 mov%?\\t%0, %1\\t%@ movhi
7133 mvn%?\\t%0, #%B1\\t%@ movhi"
7134 [(set_attr "predicable" "yes")
7135 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
7136 )
7137
7138 ;; We use a DImode scratch because we may occasionally need an additional
7139 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
7140 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
7141 ;; The reload_in<m> and reload_out<m> patterns require special constraints
7142 ;; to be correctly handled in default_secondary_reload function.
7143 (define_expand "reload_outhi"
7144 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7145 (match_operand:HI 1 "s_register_operand" "r")
7146 (match_operand:DI 2 "s_register_operand" "=&l")])]
7147 "TARGET_EITHER"
7148 "if (TARGET_ARM)
7149 arm_reload_out_hi (operands);
7150 else
7151 thumb_reload_out_hi (operands);
7152 DONE;
7153 "
7154 )
7155
7156 (define_expand "reload_inhi"
7157 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7158 (match_operand:HI 1 "arm_reload_memory_operand" "o")
7159 (match_operand:DI 2 "s_register_operand" "=&r")])]
7160 "TARGET_EITHER"
7161 "
7162 if (TARGET_ARM)
7163 arm_reload_in_hi (operands);
7164 else
7165 thumb_reload_out_hi (operands);
7166 DONE;
7167 ")
7168
7169 (define_expand "movqi"
7170 [(set (match_operand:QI 0 "general_operand")
7171 (match_operand:QI 1 "general_operand"))]
7172 "TARGET_EITHER"
7173 "
7174 /* Everything except mem = const or mem = mem can be done easily */
7175
7176 if (can_create_pseudo_p ())
7177 {
7178 if (CONST_INT_P (operands[1]))
7179 {
7180 rtx reg = gen_reg_rtx (SImode);
7181
7182 /* For thumb we want an unsigned immediate, then we are more likely
7183 to be able to use a movs insn. */
7184 if (TARGET_THUMB)
7185 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7186
7187 emit_insn (gen_movsi (reg, operands[1]));
7188 operands[1] = gen_lowpart (QImode, reg);
7189 }
7190
7191 if (TARGET_THUMB)
7192 {
7193 /* ??? We shouldn't really get invalid addresses here, but this can
7194 happen if we are passed a SP (never OK for HImode/QImode) or
7195 virtual register (also rejected as illegitimate for HImode/QImode)
7196 relative address. */
7197 /* ??? This should perhaps be fixed elsewhere, for instance, in
7198 fixup_stack_1, by checking for other kinds of invalid addresses,
7199 e.g. a bare reference to a virtual register. This may confuse the
7200 alpha though, which must handle this case differently. */
7201 if (MEM_P (operands[0])
7202 && !memory_address_p (GET_MODE (operands[0]),
7203 XEXP (operands[0], 0)))
7204 operands[0]
7205 = replace_equiv_address (operands[0],
7206 copy_to_reg (XEXP (operands[0], 0)));
7207 if (MEM_P (operands[1])
7208 && !memory_address_p (GET_MODE (operands[1]),
7209 XEXP (operands[1], 0)))
7210 operands[1]
7211 = replace_equiv_address (operands[1],
7212 copy_to_reg (XEXP (operands[1], 0)));
7213 }
7214
7215 if (MEM_P (operands[1]) && optimize > 0)
7216 {
7217 rtx reg = gen_reg_rtx (SImode);
7218
7219 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7220 operands[1] = gen_lowpart (QImode, reg);
7221 }
7222
7223 if (MEM_P (operands[0]))
7224 operands[1] = force_reg (QImode, operands[1]);
7225 }
7226 else if (TARGET_THUMB
7227 && CONST_INT_P (operands[1])
7228 && !satisfies_constraint_I (operands[1]))
7229 {
7230 /* Handle loading a large integer during reload. */
7231
7232 /* Writing a constant to memory needs a scratch, which should
7233 be handled with SECONDARY_RELOADs. */
7234 gcc_assert (REG_P (operands[0]));
7235
7236 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7237 emit_insn (gen_movsi (operands[0], operands[1]));
7238 DONE;
7239 }
7240 "
7241 )
7242
7243 (define_insn "*arm_movqi_insn"
7244 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7245 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
7246 "TARGET_32BIT
7247 && ( register_operand (operands[0], QImode)
7248 || register_operand (operands[1], QImode))"
7249 "@
7250 mov%?\\t%0, %1
7251 mov%?\\t%0, %1
7252 mov%?\\t%0, %1
7253 mov%?\\t%0, %1
7254 mvn%?\\t%0, #%B1
7255 ldrb%?\\t%0, %1
7256 strb%?\\t%1, %0
7257 ldrb%?\\t%0, %1
7258 strb%?\\t%1, %0"
7259 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
7260 (set_attr "predicable" "yes")
7261 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
7262 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7263 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7264 )
7265
7266 ;; HFmode and BFmode moves.
7267 (define_expand "mov<mode>"
7268 [(set (match_operand:HFBF 0 "general_operand")
7269 (match_operand:HFBF 1 "general_operand"))]
7270 "TARGET_EITHER"
7271 "
7272 gcc_checking_assert (aligned_operand (operands[0], <MODE>mode));
7273 gcc_checking_assert (aligned_operand (operands[1], <MODE>mode));
7274 if (TARGET_32BIT)
7275 {
7276 if (MEM_P (operands[0]))
7277 operands[1] = force_reg (<MODE>mode, operands[1]);
7278 }
7279 else /* TARGET_THUMB1 */
7280 {
7281 if (can_create_pseudo_p ())
7282 {
7283 if (!REG_P (operands[0]))
7284 operands[1] = force_reg (<MODE>mode, operands[1]);
7285 }
7286 }
7287 "
7288 )
7289
7290 (define_insn "*arm32_mov<mode>"
7291 [(set (match_operand:HFBF 0 "nonimmediate_operand" "=r,m,r,r")
7292 (match_operand:HFBF 1 "general_operand" " m,r,r,F"))]
7293 "TARGET_32BIT
7294 && !TARGET_HARD_FLOAT
7295 && !TARGET_HAVE_MVE
7296 && ( s_register_operand (operands[0], <MODE>mode)
7297 || s_register_operand (operands[1], <MODE>mode))"
7298 "*
7299 switch (which_alternative)
7300 {
7301 case 0: /* ARM register from memory */
7302 return \"ldrh%?\\t%0, %1\\t%@ __<fporbf>\";
7303 case 1: /* memory from ARM register */
7304 return \"strh%?\\t%1, %0\\t%@ __<fporbf>\";
7305 case 2: /* ARM register from ARM register */
7306 return \"mov%?\\t%0, %1\\t%@ __<fporbf>\";
7307 case 3: /* ARM register from constant */
7308 {
7309 long bits;
7310 rtx ops[4];
7311
7312 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
7313 <MODE>mode);
7314 ops[0] = operands[0];
7315 ops[1] = GEN_INT (bits);
7316 ops[2] = GEN_INT (bits & 0xff00);
7317 ops[3] = GEN_INT (bits & 0x00ff);
7318
7319 if (arm_arch_thumb2)
7320 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7321 else
7322 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7323 return \"\";
7324 }
7325 default:
7326 gcc_unreachable ();
7327 }
7328 "
7329 [(set_attr "conds" "unconditional")
7330 (set_attr "type" "load_4,store_4,mov_reg,multiple")
7331 (set_attr "length" "4,4,4,8")
7332 (set_attr "predicable" "yes")]
7333 )
7334
7335 (define_expand "movsf"
7336 [(set (match_operand:SF 0 "general_operand")
7337 (match_operand:SF 1 "general_operand"))]
7338 "TARGET_EITHER"
7339 "
7340 gcc_checking_assert (aligned_operand (operands[0], SFmode));
7341 gcc_checking_assert (aligned_operand (operands[1], SFmode));
7342 if (TARGET_32BIT)
7343 {
7344 if (MEM_P (operands[0]))
7345 operands[1] = force_reg (SFmode, operands[1]);
7346 }
7347 else /* TARGET_THUMB1 */
7348 {
7349 if (can_create_pseudo_p ())
7350 {
7351 if (!REG_P (operands[0]))
7352 operands[1] = force_reg (SFmode, operands[1]);
7353 }
7354 }
7355
7356 /* Cannot load it directly, generate a load with clobber so that it can be
7357 loaded via GPR with MOV / MOVT. */
7358 if (arm_disable_literal_pool
7359 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7360 && CONST_DOUBLE_P (operands[1])
7361 && TARGET_VFP_BASE
7362 && !vfp3_const_double_rtx (operands[1]))
7363 {
7364 rtx clobreg = gen_reg_rtx (SFmode);
7365 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
7366 clobreg));
7367 DONE;
7368 }
7369 "
7370 )
7371
7372 ;; Transform a floating-point move of a constant into a core register into
7373 ;; an SImode operation.
7374 (define_split
7375 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7376 (match_operand:SF 1 "immediate_operand" ""))]
7377 "TARGET_EITHER
7378 && reload_completed
7379 && CONST_DOUBLE_P (operands[1])"
7380 [(set (match_dup 2) (match_dup 3))]
7381 "
7382 operands[2] = gen_lowpart (SImode, operands[0]);
7383 operands[3] = gen_lowpart (SImode, operands[1]);
7384 if (operands[2] == 0 || operands[3] == 0)
7385 FAIL;
7386 "
7387 )
7388
7389 (define_insn "*arm_movsf_soft_insn"
7390 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7391 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7392 "TARGET_32BIT
7393 && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7394 && (!MEM_P (operands[0])
7395 || register_operand (operands[1], SFmode))"
7396 {
7397 switch (which_alternative)
7398 {
7399 case 0: return \"mov%?\\t%0, %1\";
7400 case 1:
7401 /* Cannot load it directly, split to load it via MOV / MOVT. */
7402 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7403 return \"#\";
7404 return \"ldr%?\\t%0, %1\\t%@ float\";
7405 case 2: return \"str%?\\t%1, %0\\t%@ float\";
7406 default: gcc_unreachable ();
7407 }
7408 }
7409 [(set_attr "predicable" "yes")
7410 (set_attr "type" "mov_reg,load_4,store_4")
7411 (set_attr "arm_pool_range" "*,4096,*")
7412 (set_attr "thumb2_pool_range" "*,4094,*")
7413 (set_attr "arm_neg_pool_range" "*,4084,*")
7414 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7415 )
7416
7417 ;; Splitter for the above.
7418 (define_split
7419 [(set (match_operand:SF 0 "s_register_operand")
7420 (match_operand:SF 1 "const_double_operand"))]
7421 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7422 [(const_int 0)]
7423 {
7424 long buf;
7425 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
7426 rtx cst = gen_int_mode (buf, SImode);
7427 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
7428 DONE;
7429 }
7430 )
7431
7432 (define_expand "movdf"
7433 [(set (match_operand:DF 0 "general_operand")
7434 (match_operand:DF 1 "general_operand"))]
7435 "TARGET_EITHER"
7436 "
7437 gcc_checking_assert (aligned_operand (operands[0], DFmode));
7438 gcc_checking_assert (aligned_operand (operands[1], DFmode));
7439 if (TARGET_32BIT)
7440 {
7441 if (MEM_P (operands[0]))
7442 operands[1] = force_reg (DFmode, operands[1]);
7443 }
7444 else /* TARGET_THUMB */
7445 {
7446 if (can_create_pseudo_p ())
7447 {
7448 if (!REG_P (operands[0]))
7449 operands[1] = force_reg (DFmode, operands[1]);
7450 }
7451 }
7452
7453 /* Cannot load it directly, generate a load with clobber so that it can be
7454 loaded via GPR with MOV / MOVT. */
7455 if (arm_disable_literal_pool
7456 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7457 && CONSTANT_P (operands[1])
7458 && TARGET_VFP_BASE
7459 && !arm_const_double_rtx (operands[1])
7460 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
7461 {
7462 rtx clobreg = gen_reg_rtx (DFmode);
7463 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
7464 clobreg));
7465 DONE;
7466 }
7467 "
7468 )
7469
7470 ;; Reloading a df mode value stored in integer regs to memory can require a
7471 ;; scratch reg.
7472 ;; Another reload_out<m> pattern that requires special constraints.
7473 (define_expand "reload_outdf"
7474 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7475 (match_operand:DF 1 "s_register_operand" "r")
7476 (match_operand:SI 2 "s_register_operand" "=&r")]
7477 "TARGET_THUMB2"
7478 "
7479 {
7480 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7481
7482 if (code == REG)
7483 operands[2] = XEXP (operands[0], 0);
7484 else if (code == POST_INC || code == PRE_DEC)
7485 {
7486 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7487 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7488 emit_insn (gen_movdi (operands[0], operands[1]));
7489 DONE;
7490 }
7491 else if (code == PRE_INC)
7492 {
7493 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7494
7495 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7496 operands[2] = reg;
7497 }
7498 else if (code == POST_DEC)
7499 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7500 else
7501 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7502 XEXP (XEXP (operands[0], 0), 1)));
7503
7504 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
7505 operands[1]));
7506
7507 if (code == POST_DEC)
7508 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7509
7510 DONE;
7511 }"
7512 )
7513
7514 (define_insn "*movdf_soft_insn"
7515 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
7516 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
7517 "TARGET_32BIT && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7518 && ( register_operand (operands[0], DFmode)
7519 || register_operand (operands[1], DFmode))"
7520 "*
7521 switch (which_alternative)
7522 {
7523 case 0:
7524 case 1:
7525 case 2:
7526 return \"#\";
7527 case 3:
7528 /* Cannot load it directly, split to load it via MOV / MOVT. */
7529 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7530 return \"#\";
7531 /* Fall through. */
7532 default:
7533 return output_move_double (operands, true, NULL);
7534 }
7535 "
7536 [(set_attr "length" "8,12,16,8,8")
7537 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
7538 (set_attr "arm_pool_range" "*,*,*,1020,*")
7539 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7540 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7541 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7542 )
7543
7544 ;; Splitter for the above.
7545 (define_split
7546 [(set (match_operand:DF 0 "s_register_operand")
7547 (match_operand:DF 1 "const_double_operand"))]
7548 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7549 [(const_int 0)]
7550 {
7551 long buf[2];
7552 int order = BYTES_BIG_ENDIAN ? 1 : 0;
7553 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7554 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7555 ival |= (zext_hwi (buf[1 - order], 32) << 32);
7556 rtx cst = gen_int_mode (ival, DImode);
7557 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7558 DONE;
7559 }
7560 )
7561 \f
7562
7563 ;; load- and store-multiple insns
7564 ;; The arm can load/store any set of registers, provided that they are in
7565 ;; ascending order, but these expanders assume a contiguous set.
7566
7567 (define_expand "load_multiple"
7568 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7569 (match_operand:SI 1 "" ""))
7570 (use (match_operand:SI 2 "" ""))])]
7571 "TARGET_32BIT"
7572 {
7573 HOST_WIDE_INT offset = 0;
7574
7575 /* Support only fixed point registers. */
7576 if (!CONST_INT_P (operands[2])
7577 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7578 || INTVAL (operands[2]) < 2
7579 || !MEM_P (operands[1])
7580 || !REG_P (operands[0])
7581 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7582 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7583 FAIL;
7584
7585 operands[3]
7586 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7587 INTVAL (operands[2]),
7588 force_reg (SImode, XEXP (operands[1], 0)),
7589 FALSE, operands[1], &offset);
7590 })
7591
7592 (define_expand "store_multiple"
7593 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7594 (match_operand:SI 1 "" ""))
7595 (use (match_operand:SI 2 "" ""))])]
7596 "TARGET_32BIT"
7597 {
7598 HOST_WIDE_INT offset = 0;
7599
7600 /* Support only fixed point registers. */
7601 if (!CONST_INT_P (operands[2])
7602 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7603 || INTVAL (operands[2]) < 2
7604 || !REG_P (operands[1])
7605 || !MEM_P (operands[0])
7606 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7607 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7608 FAIL;
7609
7610 operands[3]
7611 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7612 INTVAL (operands[2]),
7613 force_reg (SImode, XEXP (operands[0], 0)),
7614 FALSE, operands[0], &offset);
7615 })
7616
7617
7618 (define_expand "setmemsi"
7619 [(match_operand:BLK 0 "general_operand")
7620 (match_operand:SI 1 "const_int_operand")
7621 (match_operand:SI 2 "const_int_operand")
7622 (match_operand:SI 3 "const_int_operand")]
7623 "TARGET_32BIT"
7624 {
7625 if (arm_gen_setmem (operands))
7626 DONE;
7627
7628 FAIL;
7629 })
7630
7631
7632 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7633 ;; We could let this apply for blocks of less than this, but it clobbers so
7634 ;; many registers that there is then probably a better way.
7635
7636 (define_expand "cpymemqi"
7637 [(match_operand:BLK 0 "general_operand")
7638 (match_operand:BLK 1 "general_operand")
7639 (match_operand:SI 2 "const_int_operand")
7640 (match_operand:SI 3 "const_int_operand")]
7641 ""
7642 "
7643 if (TARGET_32BIT)
7644 {
7645 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7646 && !optimize_function_for_size_p (cfun))
7647 {
7648 if (gen_cpymem_ldrd_strd (operands))
7649 DONE;
7650 FAIL;
7651 }
7652
7653 if (arm_gen_cpymemqi (operands))
7654 DONE;
7655 FAIL;
7656 }
7657 else /* TARGET_THUMB1 */
7658 {
7659 if ( INTVAL (operands[3]) != 4
7660 || INTVAL (operands[2]) > 48)
7661 FAIL;
7662
7663 thumb_expand_cpymemqi (operands);
7664 DONE;
7665 }
7666 "
7667 )
7668 \f
7669
7670 ;; Compare & branch insns
7671 ;; The range calculations are based as follows:
7672 ;; For forward branches, the address calculation returns the address of
7673 ;; the next instruction. This is 2 beyond the branch instruction.
7674 ;; For backward branches, the address calculation returns the address of
7675 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7676 ;; instruction for the shortest sequence, and 4 before the branch instruction
7677 ;; if we have to jump around an unconditional branch.
7678 ;; To the basic branch range the PC offset must be added (this is +4).
7679 ;; So for forward branches we have
7680 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7681 ;; And for backward branches we have
7682 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7683 ;;
7684 ;; In 16-bit Thumb these ranges are:
7685 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7686 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7687
7688 ;; In 32-bit Thumb these ranges are:
7689 ;; For a 'b' +/- 16MB is not checked for.
7690 ;; For a 'b<cond>' pos_range = 1048574, neg_range = -1048576 giving
7691 ;; (-1048568 -> 1048576).
7692
7693 (define_expand "cbranchsi4"
7694 [(set (pc) (if_then_else
7695 (match_operator 0 "expandable_comparison_operator"
7696 [(match_operand:SI 1 "s_register_operand")
7697 (match_operand:SI 2 "nonmemory_operand")])
7698 (label_ref (match_operand 3 "" ""))
7699 (pc)))]
7700 "TARGET_EITHER"
7701 "
7702 if (!TARGET_THUMB1)
7703 {
7704 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7705 FAIL;
7706 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7707 operands[3]));
7708 DONE;
7709 }
7710 if (thumb1_cmpneg_operand (operands[2], SImode))
7711 {
7712 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7713 operands[3], operands[0]));
7714 DONE;
7715 }
7716 if (!thumb1_cmp_operand (operands[2], SImode))
7717 operands[2] = force_reg (SImode, operands[2]);
7718 ")
7719
7720 (define_expand "cbranchsf4"
7721 [(set (pc) (if_then_else
7722 (match_operator 0 "expandable_comparison_operator"
7723 [(match_operand:SF 1 "s_register_operand")
7724 (match_operand:SF 2 "vfp_compare_operand")])
7725 (label_ref (match_operand 3 "" ""))
7726 (pc)))]
7727 "TARGET_32BIT && TARGET_HARD_FLOAT"
7728 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7729 operands[3])); DONE;"
7730 )
7731
7732 (define_expand "cbranchdf4"
7733 [(set (pc) (if_then_else
7734 (match_operator 0 "expandable_comparison_operator"
7735 [(match_operand:DF 1 "s_register_operand")
7736 (match_operand:DF 2 "vfp_compare_operand")])
7737 (label_ref (match_operand 3 "" ""))
7738 (pc)))]
7739 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7740 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7741 operands[3])); DONE;"
7742 )
7743
7744 (define_expand "cbranchdi4"
7745 [(set (pc) (if_then_else
7746 (match_operator 0 "expandable_comparison_operator"
7747 [(match_operand:DI 1 "s_register_operand")
7748 (match_operand:DI 2 "reg_or_int_operand")])
7749 (label_ref (match_operand 3 "" ""))
7750 (pc)))]
7751 "TARGET_32BIT"
7752 "{
7753 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7754 FAIL;
7755 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7756 operands[3]));
7757 DONE;
7758 }"
7759 )
7760
7761 ;; Comparison and test insns
7762
7763 (define_insn "*arm_cmpsi_insn"
7764 [(set (reg:CC CC_REGNUM)
7765 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7766 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7767 "TARGET_32BIT"
7768 "@
7769 cmp%?\\t%0, %1
7770 cmp%?\\t%0, %1
7771 cmp%?\\t%0, %1
7772 cmp%?\\t%0, %1
7773 cmn%?\\t%0, #%n1"
7774 [(set_attr "conds" "set")
7775 (set_attr "arch" "t2,t2,any,any,any")
7776 (set_attr "length" "2,2,4,4,4")
7777 (set_attr "predicable" "yes")
7778 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7779 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7780 )
7781
7782 (define_insn "*cmpsi_shiftsi"
7783 [(set (reg:CC CC_REGNUM)
7784 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7785 (match_operator:SI 3 "shift_operator"
7786 [(match_operand:SI 1 "s_register_operand" "r,r")
7787 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7788 "TARGET_32BIT"
7789 "cmp\\t%0, %1%S3"
7790 [(set_attr "conds" "set")
7791 (set_attr "shift" "1")
7792 (set_attr "arch" "32,a")
7793 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7794
7795 (define_insn "*cmpsi_shiftsi_swp"
7796 [(set (reg:CC_SWP CC_REGNUM)
7797 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7798 [(match_operand:SI 1 "s_register_operand" "r,r")
7799 (match_operand:SI 2 "shift_amount_operand" "M,r")])
7800 (match_operand:SI 0 "s_register_operand" "r,r")))]
7801 "TARGET_32BIT"
7802 "cmp%?\\t%0, %1%S3"
7803 [(set_attr "conds" "set")
7804 (set_attr "shift" "1")
7805 (set_attr "arch" "32,a")
7806 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7807
7808 (define_insn "*arm_cmpsi_negshiftsi_si"
7809 [(set (reg:CC_Z CC_REGNUM)
7810 (compare:CC_Z
7811 (neg:SI (match_operator:SI 1 "shift_operator"
7812 [(match_operand:SI 2 "s_register_operand" "r,r")
7813 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7814 (match_operand:SI 0 "s_register_operand" "r,r")))]
7815 "TARGET_32BIT"
7816 "cmn%?\\t%0, %2%S1"
7817 [(set_attr "conds" "set")
7818 (set_attr "arch" "32,a")
7819 (set_attr "shift" "2")
7820 (set_attr "type" "alus_shift_imm,alus_shift_reg")
7821 (set_attr "predicable" "yes")]
7822 )
7823
7824 ; This insn allows redundant compares to be removed by cse, nothing should
7825 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7826 ; is deleted later on. The match_dup will match the mode here, so that
7827 ; mode changes of the condition codes aren't lost by this even though we don't
7828 ; specify what they are.
7829
7830 (define_insn "*deleted_compare"
7831 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7832 "TARGET_32BIT"
7833 "\\t%@ deleted compare"
7834 [(set_attr "conds" "set")
7835 (set_attr "length" "0")
7836 (set_attr "type" "no_insn")]
7837 )
7838
7839 \f
7840 ;; Conditional branch insns
7841
7842 (define_expand "cbranch_cc"
7843 [(set (pc)
7844 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7845 (match_operand 2 "" "")])
7846 (label_ref (match_operand 3 "" ""))
7847 (pc)))]
7848 "TARGET_32BIT"
7849 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7850 operands[1], operands[2], NULL_RTX);
7851 operands[2] = const0_rtx;"
7852 )
7853
7854 ;;
7855 ;; Patterns to match conditional branch insns.
7856 ;;
7857
7858 (define_insn "arm_cond_branch"
7859 [(set (pc)
7860 (if_then_else (match_operator 1 "arm_comparison_operator"
7861 [(match_operand 2 "cc_register" "") (const_int 0)])
7862 (label_ref (match_operand 0 "" ""))
7863 (pc)))]
7864 "TARGET_32BIT"
7865 {
7866 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7867 {
7868 arm_ccfsm_state += 2;
7869 return "";
7870 }
7871 switch (get_attr_length (insn))
7872 {
7873 case 2: /* Thumb2 16-bit b{cond}. */
7874 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
7875 return "b%d1\t%l0";
7876 break;
7877
7878 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
7879 unconditional branch b. */
7880 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%D1\t");
7881 }
7882 }
7883 [(set_attr "conds" "use")
7884 (set_attr "type" "branch")
7885 (set (attr "length")
7886 (if_then_else (match_test "!TARGET_THUMB2")
7887
7888 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
7889 (const_int 4)
7890
7891 ;; Check if target is within 16-bit Thumb2 b{cond} range.
7892 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7893 (le (minus (match_dup 0) (pc)) (const_int 256)))
7894
7895 ;; Target is Thumb2, within narrow range.
7896 ;; Generate b{cond}.
7897 (const_int 2)
7898
7899 ;; Check if target is within 32-bit Thumb2 b{cond} range.
7900 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7901 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7902
7903 ;; Target is Thumb2, within wide range.
7904 ;; Generate b{cond}
7905 (const_int 4)
7906 ;; Target is Thumb2, out of range.
7907 ;; Generate narrow b{cond} and unconditional branch b.
7908 (const_int 6)))))]
7909 )
7910
7911 (define_insn "*arm_cond_branch_reversed"
7912 [(set (pc)
7913 (if_then_else (match_operator 1 "arm_comparison_operator"
7914 [(match_operand 2 "cc_register" "") (const_int 0)])
7915 (pc)
7916 (label_ref (match_operand 0 "" ""))))]
7917 "TARGET_32BIT"
7918 {
7919 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7920 {
7921 arm_ccfsm_state += 2;
7922 return "";
7923 }
7924 switch (get_attr_length (insn))
7925 {
7926 case 2: /* Thumb2 16-bit b{cond}. */
7927 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */
7928 return "b%D1\t%l0";
7929 break;
7930
7931 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and
7932 unconditional branch b. */
7933 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%d1\t");
7934 }
7935 }
7936 [(set_attr "conds" "use")
7937 (set_attr "type" "branch")
7938 (set (attr "length")
7939 (if_then_else (match_test "!TARGET_THUMB2")
7940
7941 ;;Target is not Thumb2, therefore is A32. Generate b{cond}.
7942 (const_int 4)
7943
7944 ;; Check if target is within 16-bit Thumb2 b{cond} range.
7945 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7946 (le (minus (match_dup 0) (pc)) (const_int 256)))
7947
7948 ;; Target is Thumb2, within narrow range.
7949 ;; Generate b{cond}.
7950 (const_int 2)
7951
7952 ;; Check if target is within 32-bit Thumb2 b{cond} range.
7953 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7954 (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7955
7956 ;; Target is Thumb2, within wide range.
7957 ;; Generate b{cond}.
7958 (const_int 4)
7959 ;; Target is Thumb2, out of range.
7960 ;; Generate narrow b{cond} and unconditional branch b.
7961 (const_int 6)))))]
7962 )
7963
7964 \f
7965
7966 ; scc insns
7967
7968 (define_expand "cstore_cc"
7969 [(set (match_operand:SI 0 "s_register_operand")
7970 (match_operator:SI 1 "" [(match_operand 2 "" "")
7971 (match_operand 3 "" "")]))]
7972 "TARGET_32BIT"
7973 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7974 operands[2], operands[3], NULL_RTX);
7975 operands[3] = const0_rtx;"
7976 )
7977
7978 (define_insn_and_split "*mov_scc"
7979 [(set (match_operand:SI 0 "s_register_operand" "=r")
7980 (match_operator:SI 1 "arm_comparison_operator_mode"
7981 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7982 "TARGET_ARM"
7983 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7984 "TARGET_ARM"
7985 [(set (match_dup 0)
7986 (if_then_else:SI (match_dup 1)
7987 (const_int 1)
7988 (const_int 0)))]
7989 ""
7990 [(set_attr "conds" "use")
7991 (set_attr "length" "8")
7992 (set_attr "type" "multiple")]
7993 )
7994
7995 (define_insn "*negscc_borrow"
7996 [(set (match_operand:SI 0 "s_register_operand" "=r")
7997 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
7998 "TARGET_32BIT"
7999 "sbc\\t%0, %0, %0"
8000 [(set_attr "conds" "use")
8001 (set_attr "length" "4")
8002 (set_attr "type" "adc_reg")]
8003 )
8004
8005 (define_insn_and_split "*mov_negscc"
8006 [(set (match_operand:SI 0 "s_register_operand" "=r")
8007 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
8008 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8009 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
8010 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8011 "&& true"
8012 [(set (match_dup 0)
8013 (if_then_else:SI (match_dup 1)
8014 (match_dup 3)
8015 (const_int 0)))]
8016 {
8017 operands[3] = GEN_INT (~0);
8018 }
8019 [(set_attr "conds" "use")
8020 (set_attr "length" "8")
8021 (set_attr "type" "multiple")]
8022 )
8023
8024 (define_insn_and_split "*mov_notscc"
8025 [(set (match_operand:SI 0 "s_register_operand" "=r")
8026 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8027 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8028 "TARGET_ARM"
8029 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8030 "TARGET_ARM"
8031 [(set (match_dup 0)
8032 (if_then_else:SI (match_dup 1)
8033 (match_dup 3)
8034 (match_dup 4)))]
8035 {
8036 operands[3] = GEN_INT (~1);
8037 operands[4] = GEN_INT (~0);
8038 }
8039 [(set_attr "conds" "use")
8040 (set_attr "length" "8")
8041 (set_attr "type" "multiple")]
8042 )
8043
8044 (define_expand "cstoresi4"
8045 [(set (match_operand:SI 0 "s_register_operand")
8046 (match_operator:SI 1 "expandable_comparison_operator"
8047 [(match_operand:SI 2 "s_register_operand")
8048 (match_operand:SI 3 "reg_or_int_operand")]))]
8049 "TARGET_32BIT || TARGET_THUMB1"
8050 "{
8051 rtx op3, scratch, scratch2;
8052
8053 if (!TARGET_THUMB1)
8054 {
8055 if (!arm_add_operand (operands[3], SImode))
8056 operands[3] = force_reg (SImode, operands[3]);
8057 emit_insn (gen_cstore_cc (operands[0], operands[1],
8058 operands[2], operands[3]));
8059 DONE;
8060 }
8061
8062 if (operands[3] == const0_rtx)
8063 {
8064 switch (GET_CODE (operands[1]))
8065 {
8066 case EQ:
8067 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8068 break;
8069
8070 case NE:
8071 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8072 break;
8073
8074 case LE:
8075 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8076 NULL_RTX, 0, OPTAB_WIDEN);
8077 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8078 NULL_RTX, 0, OPTAB_WIDEN);
8079 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8080 operands[0], 1, OPTAB_WIDEN);
8081 break;
8082
8083 case GE:
8084 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8085 NULL_RTX, 1);
8086 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8087 NULL_RTX, 1, OPTAB_WIDEN);
8088 break;
8089
8090 case GT:
8091 scratch = expand_binop (SImode, ashr_optab, operands[2],
8092 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8093 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8094 NULL_RTX, 0, OPTAB_WIDEN);
8095 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8096 0, OPTAB_WIDEN);
8097 break;
8098
8099 /* LT is handled by generic code. No need for unsigned with 0. */
8100 default:
8101 FAIL;
8102 }
8103 DONE;
8104 }
8105
8106 switch (GET_CODE (operands[1]))
8107 {
8108 case EQ:
8109 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8110 NULL_RTX, 0, OPTAB_WIDEN);
8111 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8112 break;
8113
8114 case NE:
8115 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8116 NULL_RTX, 0, OPTAB_WIDEN);
8117 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8118 break;
8119
8120 case LE:
8121 op3 = force_reg (SImode, operands[3]);
8122
8123 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8124 NULL_RTX, 1, OPTAB_WIDEN);
8125 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8126 NULL_RTX, 0, OPTAB_WIDEN);
8127 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8128 op3, operands[2]));
8129 break;
8130
8131 case GE:
8132 op3 = operands[3];
8133 if (!thumb1_cmp_operand (op3, SImode))
8134 op3 = force_reg (SImode, op3);
8135 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8136 NULL_RTX, 0, OPTAB_WIDEN);
8137 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8138 NULL_RTX, 1, OPTAB_WIDEN);
8139 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8140 operands[2], op3));
8141 break;
8142
8143 case LEU:
8144 op3 = force_reg (SImode, operands[3]);
8145 scratch = force_reg (SImode, const0_rtx);
8146 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8147 op3, operands[2]));
8148 break;
8149
8150 case GEU:
8151 op3 = operands[3];
8152 if (!thumb1_cmp_operand (op3, SImode))
8153 op3 = force_reg (SImode, op3);
8154 scratch = force_reg (SImode, const0_rtx);
8155 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8156 operands[2], op3));
8157 break;
8158
8159 case LTU:
8160 op3 = operands[3];
8161 if (!thumb1_cmp_operand (op3, SImode))
8162 op3 = force_reg (SImode, op3);
8163 scratch = gen_reg_rtx (SImode);
8164 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8165 break;
8166
8167 case GTU:
8168 op3 = force_reg (SImode, operands[3]);
8169 scratch = gen_reg_rtx (SImode);
8170 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8171 break;
8172
8173 /* No good sequences for GT, LT. */
8174 default:
8175 FAIL;
8176 }
8177 DONE;
8178 }")
8179
8180 (define_expand "cstorehf4"
8181 [(set (match_operand:SI 0 "s_register_operand")
8182 (match_operator:SI 1 "expandable_comparison_operator"
8183 [(match_operand:HF 2 "s_register_operand")
8184 (match_operand:HF 3 "vfp_compare_operand")]))]
8185 "TARGET_VFP_FP16INST"
8186 {
8187 if (!arm_validize_comparison (&operands[1],
8188 &operands[2],
8189 &operands[3]))
8190 FAIL;
8191
8192 emit_insn (gen_cstore_cc (operands[0], operands[1],
8193 operands[2], operands[3]));
8194 DONE;
8195 }
8196 )
8197
8198 (define_expand "cstoresf4"
8199 [(set (match_operand:SI 0 "s_register_operand")
8200 (match_operator:SI 1 "expandable_comparison_operator"
8201 [(match_operand:SF 2 "s_register_operand")
8202 (match_operand:SF 3 "vfp_compare_operand")]))]
8203 "TARGET_32BIT && TARGET_HARD_FLOAT"
8204 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8205 operands[2], operands[3])); DONE;"
8206 )
8207
8208 (define_expand "cstoredf4"
8209 [(set (match_operand:SI 0 "s_register_operand")
8210 (match_operator:SI 1 "expandable_comparison_operator"
8211 [(match_operand:DF 2 "s_register_operand")
8212 (match_operand:DF 3 "vfp_compare_operand")]))]
8213 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8214 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8215 operands[2], operands[3])); DONE;"
8216 )
8217
8218 (define_expand "cstoredi4"
8219 [(set (match_operand:SI 0 "s_register_operand")
8220 (match_operator:SI 1 "expandable_comparison_operator"
8221 [(match_operand:DI 2 "s_register_operand")
8222 (match_operand:DI 3 "reg_or_int_operand")]))]
8223 "TARGET_32BIT"
8224 "{
8225 if (!arm_validize_comparison (&operands[1],
8226 &operands[2],
8227 &operands[3]))
8228 FAIL;
8229 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8230 operands[3]));
8231 DONE;
8232 }"
8233 )
8234
8235 \f
8236 ;; Conditional move insns
8237
8238 (define_expand "movsicc"
8239 [(set (match_operand:SI 0 "s_register_operand")
8240 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
8241 (match_operand:SI 2 "arm_not_operand")
8242 (match_operand:SI 3 "arm_not_operand")))]
8243 "TARGET_32BIT"
8244 "
8245 {
8246 enum rtx_code code;
8247 rtx ccreg;
8248
8249 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8250 &XEXP (operands[1], 1)))
8251 FAIL;
8252
8253 code = GET_CODE (operands[1]);
8254 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8255 XEXP (operands[1], 1), NULL_RTX);
8256 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8257 }"
8258 )
8259
8260 (define_expand "movhfcc"
8261 [(set (match_operand:HF 0 "s_register_operand")
8262 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
8263 (match_operand:HF 2 "s_register_operand")
8264 (match_operand:HF 3 "s_register_operand")))]
8265 "TARGET_VFP_FP16INST"
8266 "
8267 {
8268 enum rtx_code code = GET_CODE (operands[1]);
8269 rtx ccreg;
8270
8271 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8272 &XEXP (operands[1], 1)))
8273 FAIL;
8274
8275 code = GET_CODE (operands[1]);
8276 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8277 XEXP (operands[1], 1), NULL_RTX);
8278 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8279 }"
8280 )
8281
8282 (define_expand "movsfcc"
8283 [(set (match_operand:SF 0 "s_register_operand")
8284 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
8285 (match_operand:SF 2 "s_register_operand")
8286 (match_operand:SF 3 "s_register_operand")))]
8287 "TARGET_32BIT && TARGET_HARD_FLOAT"
8288 "
8289 {
8290 enum rtx_code code = GET_CODE (operands[1]);
8291 rtx ccreg;
8292
8293 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8294 &XEXP (operands[1], 1)))
8295 FAIL;
8296
8297 code = GET_CODE (operands[1]);
8298 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8299 XEXP (operands[1], 1), NULL_RTX);
8300 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8301 }"
8302 )
8303
8304 (define_expand "movdfcc"
8305 [(set (match_operand:DF 0 "s_register_operand")
8306 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
8307 (match_operand:DF 2 "s_register_operand")
8308 (match_operand:DF 3 "s_register_operand")))]
8309 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8310 "
8311 {
8312 enum rtx_code code = GET_CODE (operands[1]);
8313 rtx ccreg;
8314
8315 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8316 &XEXP (operands[1], 1)))
8317 FAIL;
8318 code = GET_CODE (operands[1]);
8319 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8320 XEXP (operands[1], 1), NULL_RTX);
8321 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8322 }"
8323 )
8324
8325 (define_insn "*cmov<mode>"
8326 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8327 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8328 [(match_operand 2 "cc_register" "") (const_int 0)])
8329 (match_operand:SDF 3 "s_register_operand"
8330 "<F_constraint>")
8331 (match_operand:SDF 4 "s_register_operand"
8332 "<F_constraint>")))]
8333 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
8334 "*
8335 {
8336 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8337 switch (code)
8338 {
8339 case ARM_GE:
8340 case ARM_GT:
8341 case ARM_EQ:
8342 case ARM_VS:
8343 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8344 case ARM_LT:
8345 case ARM_LE:
8346 case ARM_NE:
8347 case ARM_VC:
8348 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8349 default:
8350 gcc_unreachable ();
8351 }
8352 return \"\";
8353 }"
8354 [(set_attr "conds" "use")
8355 (set_attr "type" "fcsel")]
8356 )
8357
8358 (define_insn "*cmovhf"
8359 [(set (match_operand:HF 0 "s_register_operand" "=t")
8360 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
8361 [(match_operand 2 "cc_register" "") (const_int 0)])
8362 (match_operand:HF 3 "s_register_operand" "t")
8363 (match_operand:HF 4 "s_register_operand" "t")))]
8364 "TARGET_VFP_FP16INST"
8365 "*
8366 {
8367 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8368 switch (code)
8369 {
8370 case ARM_GE:
8371 case ARM_GT:
8372 case ARM_EQ:
8373 case ARM_VS:
8374 return \"vsel%d1.f16\\t%0, %3, %4\";
8375 case ARM_LT:
8376 case ARM_LE:
8377 case ARM_NE:
8378 case ARM_VC:
8379 return \"vsel%D1.f16\\t%0, %4, %3\";
8380 default:
8381 gcc_unreachable ();
8382 }
8383 return \"\";
8384 }"
8385 [(set_attr "conds" "use")
8386 (set_attr "type" "fcsel")]
8387 )
8388
8389 (define_insn_and_split "*movsicc_insn"
8390 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8391 (if_then_else:SI
8392 (match_operator 3 "arm_comparison_operator"
8393 [(match_operand 4 "cc_register" "") (const_int 0)])
8394 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8395 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8396 "TARGET_ARM"
8397 "@
8398 mov%D3\\t%0, %2
8399 mvn%D3\\t%0, #%B2
8400 mov%d3\\t%0, %1
8401 mvn%d3\\t%0, #%B1
8402 #
8403 #
8404 #
8405 #"
8406 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8407 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8408 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8409 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8410 "&& reload_completed"
8411 [(const_int 0)]
8412 {
8413 enum rtx_code rev_code;
8414 machine_mode mode;
8415 rtx rev_cond;
8416
8417 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8418 operands[3],
8419 gen_rtx_SET (operands[0], operands[1])));
8420
8421 rev_code = GET_CODE (operands[3]);
8422 mode = GET_MODE (operands[4]);
8423 if (mode == CCFPmode || mode == CCFPEmode)
8424 rev_code = reverse_condition_maybe_unordered (rev_code);
8425 else
8426 rev_code = reverse_condition (rev_code);
8427
8428 rev_cond = gen_rtx_fmt_ee (rev_code,
8429 VOIDmode,
8430 operands[4],
8431 const0_rtx);
8432 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8433 rev_cond,
8434 gen_rtx_SET (operands[0], operands[2])));
8435 DONE;
8436 }
8437 [(set_attr "length" "4,4,4,4,8,8,8,8")
8438 (set_attr "conds" "use")
8439 (set_attr_alternative "type"
8440 [(if_then_else (match_operand 2 "const_int_operand" "")
8441 (const_string "mov_imm")
8442 (const_string "mov_reg"))
8443 (const_string "mvn_imm")
8444 (if_then_else (match_operand 1 "const_int_operand" "")
8445 (const_string "mov_imm")
8446 (const_string "mov_reg"))
8447 (const_string "mvn_imm")
8448 (const_string "multiple")
8449 (const_string "multiple")
8450 (const_string "multiple")
8451 (const_string "multiple")])]
8452 )
8453
8454 (define_insn "*movsfcc_soft_insn"
8455 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8456 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8457 [(match_operand 4 "cc_register" "") (const_int 0)])
8458 (match_operand:SF 1 "s_register_operand" "0,r")
8459 (match_operand:SF 2 "s_register_operand" "r,0")))]
8460 "TARGET_ARM && TARGET_SOFT_FLOAT"
8461 "@
8462 mov%D3\\t%0, %2
8463 mov%d3\\t%0, %1"
8464 [(set_attr "conds" "use")
8465 (set_attr "type" "mov_reg")]
8466 )
8467
8468 \f
8469 ;; Jump and linkage insns
8470
8471 (define_expand "jump"
8472 [(set (pc)
8473 (label_ref (match_operand 0 "" "")))]
8474 "TARGET_EITHER"
8475 ""
8476 )
8477
8478 (define_insn "*arm_jump"
8479 [(set (pc)
8480 (label_ref (match_operand 0 "" "")))]
8481 "TARGET_32BIT"
8482 "*
8483 {
8484 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8485 {
8486 arm_ccfsm_state += 2;
8487 return \"\";
8488 }
8489 return \"b%?\\t%l0\";
8490 }
8491 "
8492 [(set_attr "predicable" "yes")
8493 (set (attr "length")
8494 (if_then_else
8495 (and (match_test "TARGET_THUMB2")
8496 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8497 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8498 (const_int 2)
8499 (const_int 4)))
8500 (set_attr "type" "branch")]
8501 )
8502
8503 (define_expand "call"
8504 [(parallel [(call (match_operand 0 "memory_operand")
8505 (match_operand 1 "general_operand"))
8506 (use (match_operand 2 "" ""))
8507 (clobber (reg:SI LR_REGNUM))])]
8508 "TARGET_EITHER"
8509 "
8510 {
8511 rtx callee, pat;
8512 tree addr = MEM_EXPR (operands[0]);
8513
8514 /* In an untyped call, we can get NULL for operand 2. */
8515 if (operands[2] == NULL_RTX)
8516 operands[2] = const0_rtx;
8517
8518 /* Decide if we should generate indirect calls by loading the
8519 32-bit address of the callee into a register before performing the
8520 branch and link. */
8521 callee = XEXP (operands[0], 0);
8522 if (GET_CODE (callee) == SYMBOL_REF
8523 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8524 : !REG_P (callee))
8525 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8526
8527 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
8528 /* Indirect call: set r9 with FDPIC value of callee. */
8529 XEXP (operands[0], 0)
8530 = arm_load_function_descriptor (XEXP (operands[0], 0));
8531
8532 if (detect_cmse_nonsecure_call (addr))
8533 {
8534 pat = gen_nonsecure_call_internal (operands[0], operands[1],
8535 operands[2]);
8536 emit_call_insn (pat);
8537 }
8538 else
8539 {
8540 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8541 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
8542 }
8543
8544 /* Restore FDPIC register (r9) after call. */
8545 if (TARGET_FDPIC)
8546 {
8547 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8548 rtx initial_fdpic_reg
8549 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8550
8551 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8552 initial_fdpic_reg));
8553 }
8554
8555 DONE;
8556 }"
8557 )
8558
8559 (define_insn "restore_pic_register_after_call"
8560 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
8561 (unspec:SI [(match_dup 0)
8562 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
8563 UNSPEC_PIC_RESTORE))]
8564 ""
8565 "@
8566 mov\t%0, %1
8567 ldr\t%0, %1"
8568 )
8569
8570 (define_expand "call_internal"
8571 [(parallel [(call (match_operand 0 "memory_operand")
8572 (match_operand 1 "general_operand"))
8573 (use (match_operand 2 "" ""))
8574 (clobber (reg:SI LR_REGNUM))])])
8575
8576 (define_expand "nonsecure_call_internal"
8577 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
8578 UNSPEC_NONSECURE_MEM)
8579 (match_operand 1 "general_operand"))
8580 (use (match_operand 2 "" ""))
8581 (clobber (reg:SI LR_REGNUM))])]
8582 "use_cmse"
8583 "
8584 {
8585 if (!TARGET_HAVE_FPCXT_CMSE)
8586 {
8587 rtx tmp =
8588 copy_to_suggested_reg (XEXP (operands[0], 0),
8589 gen_rtx_REG (SImode, R4_REGNUM),
8590 SImode);
8591
8592 operands[0] = replace_equiv_address (operands[0], tmp);
8593 }
8594 }")
8595
8596 (define_insn "*call_reg_armv5"
8597 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8598 (match_operand 1 "" ""))
8599 (use (match_operand 2 "" ""))
8600 (clobber (reg:SI LR_REGNUM))]
8601 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8602 "blx%?\\t%0"
8603 [(set_attr "type" "call")]
8604 )
8605
8606 (define_insn "*call_reg_arm"
8607 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8608 (match_operand 1 "" ""))
8609 (use (match_operand 2 "" ""))
8610 (clobber (reg:SI LR_REGNUM))]
8611 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8612 "*
8613 return output_call (operands);
8614 "
8615 ;; length is worst case, normally it is only two
8616 [(set_attr "length" "12")
8617 (set_attr "type" "call")]
8618 )
8619
8620
8621 (define_expand "call_value"
8622 [(parallel [(set (match_operand 0 "" "")
8623 (call (match_operand 1 "memory_operand")
8624 (match_operand 2 "general_operand")))
8625 (use (match_operand 3 "" ""))
8626 (clobber (reg:SI LR_REGNUM))])]
8627 "TARGET_EITHER"
8628 "
8629 {
8630 rtx pat, callee;
8631 tree addr = MEM_EXPR (operands[1]);
8632
8633 /* In an untyped call, we can get NULL for operand 2. */
8634 if (operands[3] == 0)
8635 operands[3] = const0_rtx;
8636
8637 /* Decide if we should generate indirect calls by loading the
8638 32-bit address of the callee into a register before performing the
8639 branch and link. */
8640 callee = XEXP (operands[1], 0);
8641 if (GET_CODE (callee) == SYMBOL_REF
8642 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8643 : !REG_P (callee))
8644 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8645
8646 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8647 /* Indirect call: set r9 with FDPIC value of callee. */
8648 XEXP (operands[1], 0)
8649 = arm_load_function_descriptor (XEXP (operands[1], 0));
8650
8651 if (detect_cmse_nonsecure_call (addr))
8652 {
8653 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8654 operands[2], operands[3]);
8655 emit_call_insn (pat);
8656 }
8657 else
8658 {
8659 pat = gen_call_value_internal (operands[0], operands[1],
8660 operands[2], operands[3]);
8661 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8662 }
8663
8664 /* Restore FDPIC register (r9) after call. */
8665 if (TARGET_FDPIC)
8666 {
8667 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8668 rtx initial_fdpic_reg
8669 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8670
8671 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8672 initial_fdpic_reg));
8673 }
8674
8675 DONE;
8676 }"
8677 )
8678
8679 (define_expand "call_value_internal"
8680 [(parallel [(set (match_operand 0 "" "")
8681 (call (match_operand 1 "memory_operand")
8682 (match_operand 2 "general_operand")))
8683 (use (match_operand 3 "" ""))
8684 (clobber (reg:SI LR_REGNUM))])])
8685
8686 (define_expand "nonsecure_call_value_internal"
8687 [(parallel [(set (match_operand 0 "" "")
8688 (call (unspec:SI [(match_operand 1 "memory_operand")]
8689 UNSPEC_NONSECURE_MEM)
8690 (match_operand 2 "general_operand")))
8691 (use (match_operand 3 "" ""))
8692 (clobber (reg:SI LR_REGNUM))])]
8693 "use_cmse"
8694 "
8695 {
8696 if (!TARGET_HAVE_FPCXT_CMSE)
8697 {
8698 rtx tmp =
8699 copy_to_suggested_reg (XEXP (operands[1], 0),
8700 gen_rtx_REG (SImode, R4_REGNUM),
8701 SImode);
8702
8703 operands[1] = replace_equiv_address (operands[1], tmp);
8704 }
8705 }")
8706
8707 (define_insn "*call_value_reg_armv5"
8708 [(set (match_operand 0 "" "")
8709 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8710 (match_operand 2 "" "")))
8711 (use (match_operand 3 "" ""))
8712 (clobber (reg:SI LR_REGNUM))]
8713 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8714 "blx%?\\t%1"
8715 [(set_attr "type" "call")]
8716 )
8717
8718 (define_insn "*call_value_reg_arm"
8719 [(set (match_operand 0 "" "")
8720 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8721 (match_operand 2 "" "")))
8722 (use (match_operand 3 "" ""))
8723 (clobber (reg:SI LR_REGNUM))]
8724 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8725 "*
8726 return output_call (&operands[1]);
8727 "
8728 [(set_attr "length" "12")
8729 (set_attr "type" "call")]
8730 )
8731
8732 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8733 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8734
8735 (define_insn "*call_symbol"
8736 [(call (mem:SI (match_operand:SI 0 "" ""))
8737 (match_operand 1 "" ""))
8738 (use (match_operand 2 "" ""))
8739 (clobber (reg:SI LR_REGNUM))]
8740 "TARGET_32BIT
8741 && !SIBLING_CALL_P (insn)
8742 && (GET_CODE (operands[0]) == SYMBOL_REF)
8743 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8744 "*
8745 {
8746 rtx op = operands[0];
8747
8748 /* Switch mode now when possible. */
8749 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8750 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8751 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8752
8753 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8754 }"
8755 [(set_attr "type" "call")]
8756 )
8757
8758 (define_insn "*call_value_symbol"
8759 [(set (match_operand 0 "" "")
8760 (call (mem:SI (match_operand:SI 1 "" ""))
8761 (match_operand:SI 2 "" "")))
8762 (use (match_operand 3 "" ""))
8763 (clobber (reg:SI LR_REGNUM))]
8764 "TARGET_32BIT
8765 && !SIBLING_CALL_P (insn)
8766 && (GET_CODE (operands[1]) == SYMBOL_REF)
8767 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8768 "*
8769 {
8770 rtx op = operands[1];
8771
8772 /* Switch mode now when possible. */
8773 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8774 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8775 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8776
8777 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8778 }"
8779 [(set_attr "type" "call")]
8780 )
8781
8782 (define_expand "sibcall_internal"
8783 [(parallel [(call (match_operand 0 "memory_operand")
8784 (match_operand 1 "general_operand"))
8785 (return)
8786 (use (match_operand 2 "" ""))])])
8787
8788 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8789 (define_expand "sibcall"
8790 [(parallel [(call (match_operand 0 "memory_operand")
8791 (match_operand 1 "general_operand"))
8792 (return)
8793 (use (match_operand 2 "" ""))])]
8794 "TARGET_32BIT"
8795 "
8796 {
8797 rtx pat;
8798
8799 if ((!REG_P (XEXP (operands[0], 0))
8800 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8801 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8802 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8803 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8804
8805 if (operands[2] == NULL_RTX)
8806 operands[2] = const0_rtx;
8807
8808 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8809 arm_emit_call_insn (pat, operands[0], true);
8810 DONE;
8811 }"
8812 )
8813
8814 (define_expand "sibcall_value_internal"
8815 [(parallel [(set (match_operand 0 "" "")
8816 (call (match_operand 1 "memory_operand")
8817 (match_operand 2 "general_operand")))
8818 (return)
8819 (use (match_operand 3 "" ""))])])
8820
8821 (define_expand "sibcall_value"
8822 [(parallel [(set (match_operand 0 "" "")
8823 (call (match_operand 1 "memory_operand")
8824 (match_operand 2 "general_operand")))
8825 (return)
8826 (use (match_operand 3 "" ""))])]
8827 "TARGET_32BIT"
8828 "
8829 {
8830 rtx pat;
8831
8832 if ((!REG_P (XEXP (operands[1], 0))
8833 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8834 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8835 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8836 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8837
8838 if (operands[3] == NULL_RTX)
8839 operands[3] = const0_rtx;
8840
8841 pat = gen_sibcall_value_internal (operands[0], operands[1],
8842 operands[2], operands[3]);
8843 arm_emit_call_insn (pat, operands[1], true);
8844 DONE;
8845 }"
8846 )
8847
8848 (define_insn "*sibcall_insn"
8849 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8850 (match_operand 1 "" ""))
8851 (return)
8852 (use (match_operand 2 "" ""))]
8853 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8854 "*
8855 if (which_alternative == 1)
8856 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8857 else
8858 {
8859 if (arm_arch5t || arm_arch4t)
8860 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8861 else
8862 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8863 }
8864 "
8865 [(set_attr "type" "call")]
8866 )
8867
8868 (define_insn "*sibcall_value_insn"
8869 [(set (match_operand 0 "" "")
8870 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8871 (match_operand 2 "" "")))
8872 (return)
8873 (use (match_operand 3 "" ""))]
8874 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8875 "*
8876 if (which_alternative == 1)
8877 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8878 else
8879 {
8880 if (arm_arch5t || arm_arch4t)
8881 return \"bx%?\\t%1\";
8882 else
8883 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8884 }
8885 "
8886 [(set_attr "type" "call")]
8887 )
8888
8889 (define_expand "<return_str>return"
8890 [(RETURNS)]
8891 "(TARGET_ARM || (TARGET_THUMB2
8892 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8893 && !IS_STACKALIGN (arm_current_func_type ())))
8894 <return_cond_false>"
8895 "
8896 {
8897 if (TARGET_THUMB2)
8898 {
8899 thumb2_expand_return (<return_simple_p>);
8900 DONE;
8901 }
8902 }
8903 "
8904 )
8905
8906 ;; Often the return insn will be the same as loading from memory, so set attr
8907 (define_insn "*arm_return"
8908 [(return)]
8909 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8910 "*
8911 {
8912 if (arm_ccfsm_state == 2)
8913 {
8914 arm_ccfsm_state += 2;
8915 return \"\";
8916 }
8917 return output_return_instruction (const_true_rtx, true, false, false);
8918 }"
8919 [(set_attr "type" "load_4")
8920 (set_attr "length" "12")
8921 (set_attr "predicable" "yes")]
8922 )
8923
8924 (define_insn "*cond_<return_str>return"
8925 [(set (pc)
8926 (if_then_else (match_operator 0 "arm_comparison_operator"
8927 [(match_operand 1 "cc_register" "") (const_int 0)])
8928 (RETURNS)
8929 (pc)))]
8930 "TARGET_ARM <return_cond_true>"
8931 "*
8932 {
8933 if (arm_ccfsm_state == 2)
8934 {
8935 arm_ccfsm_state += 2;
8936 return \"\";
8937 }
8938 return output_return_instruction (operands[0], true, false,
8939 <return_simple_p>);
8940 }"
8941 [(set_attr "conds" "use")
8942 (set_attr "length" "12")
8943 (set_attr "type" "load_4")]
8944 )
8945
8946 (define_insn "*cond_<return_str>return_inverted"
8947 [(set (pc)
8948 (if_then_else (match_operator 0 "arm_comparison_operator"
8949 [(match_operand 1 "cc_register" "") (const_int 0)])
8950 (pc)
8951 (RETURNS)))]
8952 "TARGET_ARM <return_cond_true>"
8953 "*
8954 {
8955 if (arm_ccfsm_state == 2)
8956 {
8957 arm_ccfsm_state += 2;
8958 return \"\";
8959 }
8960 return output_return_instruction (operands[0], true, true,
8961 <return_simple_p>);
8962 }"
8963 [(set_attr "conds" "use")
8964 (set_attr "length" "12")
8965 (set_attr "type" "load_4")]
8966 )
8967
8968 (define_insn "*arm_simple_return"
8969 [(simple_return)]
8970 "TARGET_ARM"
8971 "*
8972 {
8973 if (arm_ccfsm_state == 2)
8974 {
8975 arm_ccfsm_state += 2;
8976 return \"\";
8977 }
8978 return output_return_instruction (const_true_rtx, true, false, true);
8979 }"
8980 [(set_attr "type" "branch")
8981 (set_attr "length" "4")
8982 (set_attr "predicable" "yes")]
8983 )
8984
8985 ;; Generate a sequence of instructions to determine if the processor is
8986 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8987 ;; mask.
8988
8989 (define_expand "return_addr_mask"
8990 [(set (match_dup 1)
8991 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8992 (const_int 0)))
8993 (set (match_operand:SI 0 "s_register_operand")
8994 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8995 (const_int -1)
8996 (const_int 67108860)))] ; 0x03fffffc
8997 "TARGET_ARM"
8998 "
8999 operands[1] = gen_rtx_REG (CC_NZmode, CC_REGNUM);
9000 ")
9001
9002 (define_insn "*check_arch2"
9003 [(set (match_operand:CC_NZ 0 "cc_register" "")
9004 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9005 (const_int 0)))]
9006 "TARGET_ARM"
9007 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9008 [(set_attr "length" "8")
9009 (set_attr "conds" "set")
9010 (set_attr "type" "multiple")]
9011 )
9012
9013 ;; Call subroutine returning any type.
9014
9015 (define_expand "untyped_call"
9016 [(parallel [(call (match_operand 0 "" "")
9017 (const_int 0))
9018 (match_operand 1 "" "")
9019 (match_operand 2 "" "")])]
9020 "TARGET_EITHER && !TARGET_FDPIC"
9021 "
9022 {
9023 int i;
9024 rtx par = gen_rtx_PARALLEL (VOIDmode,
9025 rtvec_alloc (XVECLEN (operands[2], 0)));
9026 rtx addr = gen_reg_rtx (Pmode);
9027 rtx mem;
9028 int size = 0;
9029
9030 emit_move_insn (addr, XEXP (operands[1], 0));
9031 mem = change_address (operands[1], BLKmode, addr);
9032
9033 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9034 {
9035 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9036
9037 /* Default code only uses r0 as a return value, but we could
9038 be using anything up to 4 registers. */
9039 if (REGNO (src) == R0_REGNUM)
9040 src = gen_rtx_REG (TImode, R0_REGNUM);
9041
9042 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9043 GEN_INT (size));
9044 size += GET_MODE_SIZE (GET_MODE (src));
9045 }
9046
9047 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
9048
9049 size = 0;
9050
9051 for (i = 0; i < XVECLEN (par, 0); i++)
9052 {
9053 HOST_WIDE_INT offset = 0;
9054 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9055
9056 if (size != 0)
9057 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9058
9059 mem = change_address (mem, GET_MODE (reg), NULL);
9060 if (REGNO (reg) == R0_REGNUM)
9061 {
9062 /* On thumb we have to use a write-back instruction. */
9063 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9064 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9065 size = TARGET_ARM ? 16 : 0;
9066 }
9067 else
9068 {
9069 emit_move_insn (mem, reg);
9070 size = GET_MODE_SIZE (GET_MODE (reg));
9071 }
9072 }
9073
9074 /* The optimizer does not know that the call sets the function value
9075 registers we stored in the result block. We avoid problems by
9076 claiming that all hard registers are used and clobbered at this
9077 point. */
9078 emit_insn (gen_blockage ());
9079
9080 DONE;
9081 }"
9082 )
9083
9084 (define_expand "untyped_return"
9085 [(match_operand:BLK 0 "memory_operand")
9086 (match_operand 1 "" "")]
9087 "TARGET_EITHER && !TARGET_FDPIC"
9088 "
9089 {
9090 int i;
9091 rtx addr = gen_reg_rtx (Pmode);
9092 rtx mem;
9093 int size = 0;
9094
9095 emit_move_insn (addr, XEXP (operands[0], 0));
9096 mem = change_address (operands[0], BLKmode, addr);
9097
9098 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9099 {
9100 HOST_WIDE_INT offset = 0;
9101 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9102
9103 if (size != 0)
9104 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9105
9106 mem = change_address (mem, GET_MODE (reg), NULL);
9107 if (REGNO (reg) == R0_REGNUM)
9108 {
9109 /* On thumb we have to use a write-back instruction. */
9110 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9111 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9112 size = TARGET_ARM ? 16 : 0;
9113 }
9114 else
9115 {
9116 emit_move_insn (reg, mem);
9117 size = GET_MODE_SIZE (GET_MODE (reg));
9118 }
9119 }
9120
9121 /* Emit USE insns before the return. */
9122 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9123 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9124
9125 /* Construct the return. */
9126 expand_naked_return ();
9127
9128 DONE;
9129 }"
9130 )
9131
9132 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9133 ;; all of memory. This blocks insns from being moved across this point.
9134
9135 (define_insn "blockage"
9136 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9137 "TARGET_EITHER"
9138 ""
9139 [(set_attr "length" "0")
9140 (set_attr "type" "block")]
9141 )
9142
9143 ;; Since we hard code r0 here use the 'o' constraint to prevent
9144 ;; provoking undefined behaviour in the hardware with putting out
9145 ;; auto-increment operations with potentially r0 as the base register.
9146 (define_insn "probe_stack"
9147 [(set (match_operand:SI 0 "memory_operand" "=o")
9148 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
9149 "TARGET_32BIT"
9150 "str%?\\tr0, %0"
9151 [(set_attr "type" "store_4")
9152 (set_attr "predicable" "yes")]
9153 )
9154
9155 (define_insn "probe_stack_range"
9156 [(set (match_operand:SI 0 "register_operand" "=r")
9157 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
9158 (match_operand:SI 2 "register_operand" "r")]
9159 VUNSPEC_PROBE_STACK_RANGE))]
9160 "TARGET_32BIT"
9161 {
9162 return output_probe_stack_range (operands[0], operands[2]);
9163 }
9164 [(set_attr "type" "multiple")
9165 (set_attr "conds" "clob")]
9166 )
9167
9168 ;; Named patterns for stack smashing protection.
9169 (define_expand "stack_protect_combined_set"
9170 [(parallel
9171 [(set (match_operand:SI 0 "memory_operand")
9172 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9173 UNSPEC_SP_SET))
9174 (clobber (match_scratch:SI 2 ""))
9175 (clobber (match_scratch:SI 3 ""))])]
9176 ""
9177 ""
9178 )
9179
9180 ;; Use a separate insn from the above expand to be able to have the mem outside
9181 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9182 ;; try to reload the guard since we need to control how PIC access is done in
9183 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9184 ;; legitimize_pic_address ()).
9185 (define_insn_and_split "*stack_protect_combined_set_insn"
9186 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9187 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9188 UNSPEC_SP_SET))
9189 (clobber (match_scratch:SI 2 "=&l,&r"))
9190 (clobber (match_scratch:SI 3 "=&l,&r"))]
9191 ""
9192 "#"
9193 "reload_completed"
9194 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
9195 UNSPEC_SP_SET))
9196 (clobber (match_dup 2))])]
9197 "
9198 {
9199 if (flag_pic)
9200 {
9201 rtx pic_reg;
9202
9203 if (TARGET_FDPIC)
9204 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9205 else
9206 pic_reg = operands[3];
9207
9208 /* Forces recomputing of GOT base now. */
9209 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
9210 true /*compute_now*/);
9211 }
9212 else
9213 {
9214 if (address_operand (operands[1], SImode))
9215 operands[2] = operands[1];
9216 else
9217 {
9218 rtx mem = force_const_mem (SImode, operands[1]);
9219 emit_move_insn (operands[2], mem);
9220 }
9221 }
9222 }"
9223 [(set_attr "arch" "t1,32")]
9224 )
9225
9226 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
9227 ;; canary value does not live beyond the life of this sequence.
9228 (define_insn "*stack_protect_set_insn"
9229 [(set (match_operand:SI 0 "memory_operand" "=m,m")
9230 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
9231 UNSPEC_SP_SET))
9232 (clobber (match_dup 1))]
9233 ""
9234 "@
9235 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
9236 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
9237 [(set_attr "length" "8,12")
9238 (set_attr "conds" "clob,nocond")
9239 (set_attr "type" "multiple")
9240 (set_attr "arch" "t1,32")]
9241 )
9242
9243 (define_expand "stack_protect_combined_test"
9244 [(parallel
9245 [(set (pc)
9246 (if_then_else
9247 (eq (match_operand:SI 0 "memory_operand")
9248 (unspec:SI [(match_operand:SI 1 "guard_operand")]
9249 UNSPEC_SP_TEST))
9250 (label_ref (match_operand 2))
9251 (pc)))
9252 (clobber (match_scratch:SI 3 ""))
9253 (clobber (match_scratch:SI 4 ""))
9254 (clobber (reg:CC CC_REGNUM))])]
9255 ""
9256 ""
9257 )
9258
9259 ;; Use a separate insn from the above expand to be able to have the mem outside
9260 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
9261 ;; try to reload the guard since we need to control how PIC access is done in
9262 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9263 ;; legitimize_pic_address ()).
9264 (define_insn_and_split "*stack_protect_combined_test_insn"
9265 [(set (pc)
9266 (if_then_else
9267 (eq (match_operand:SI 0 "memory_operand" "m,m")
9268 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9269 UNSPEC_SP_TEST))
9270 (label_ref (match_operand 2))
9271 (pc)))
9272 (clobber (match_scratch:SI 3 "=&l,&r"))
9273 (clobber (match_scratch:SI 4 "=&l,&r"))
9274 (clobber (reg:CC CC_REGNUM))]
9275 ""
9276 "#"
9277 "reload_completed"
9278 [(const_int 0)]
9279 {
9280 rtx eq;
9281
9282 if (flag_pic)
9283 {
9284 rtx pic_reg;
9285
9286 if (TARGET_FDPIC)
9287 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9288 else
9289 pic_reg = operands[4];
9290
9291 /* Forces recomputing of GOT base now. */
9292 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
9293 true /*compute_now*/);
9294 }
9295 else
9296 {
9297 if (address_operand (operands[1], SImode))
9298 operands[3] = operands[1];
9299 else
9300 {
9301 rtx mem = force_const_mem (SImode, operands[1]);
9302 emit_move_insn (operands[3], mem);
9303 }
9304 }
9305 if (TARGET_32BIT)
9306 {
9307 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
9308 operands[3]));
9309 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9310 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9311 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9312 }
9313 else
9314 {
9315 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
9316 operands[3]));
9317 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
9318 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
9319 operands[2]));
9320 }
9321 DONE;
9322 }
9323 [(set_attr "arch" "t1,32")]
9324 )
9325
9326 ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the
9327 ;; canary value does not live beyond the end of this sequence.
9328 (define_insn "arm_stack_protect_test_insn"
9329 [(set (reg:CC_Z CC_REGNUM)
9330 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
9331 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
9332 UNSPEC_SP_TEST)
9333 (const_int 0)))
9334 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
9335 (clobber (match_dup 2))]
9336 "TARGET_32BIT"
9337 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;mov\t%2, #0"
9338 [(set_attr "length" "12,16")
9339 (set_attr "conds" "set")
9340 (set_attr "type" "multiple")
9341 (set_attr "arch" "t,32")]
9342 )
9343
9344 (define_expand "casesi"
9345 [(match_operand:SI 0 "s_register_operand") ; index to jump on
9346 (match_operand:SI 1 "const_int_operand") ; lower bound
9347 (match_operand:SI 2 "const_int_operand") ; total range
9348 (match_operand:SI 3 "" "") ; table label
9349 (match_operand:SI 4 "" "")] ; Out of range label
9350 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
9351 "
9352 {
9353 enum insn_code code;
9354 if (operands[1] != const0_rtx)
9355 {
9356 rtx reg = gen_reg_rtx (SImode);
9357
9358 emit_insn (gen_addsi3 (reg, operands[0],
9359 gen_int_mode (-INTVAL (operands[1]),
9360 SImode)));
9361 operands[0] = reg;
9362 }
9363
9364 if (TARGET_ARM)
9365 code = CODE_FOR_arm_casesi_internal;
9366 else if (TARGET_THUMB1)
9367 code = CODE_FOR_thumb1_casesi_internal_pic;
9368 else if (flag_pic)
9369 code = CODE_FOR_thumb2_casesi_internal_pic;
9370 else
9371 code = CODE_FOR_thumb2_casesi_internal;
9372
9373 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9374 operands[2] = force_reg (SImode, operands[2]);
9375
9376 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9377 operands[3], operands[4]));
9378 DONE;
9379 }"
9380 )
9381
9382 ;; The USE in this pattern is needed to tell flow analysis that this is
9383 ;; a CASESI insn. It has no other purpose.
9384 (define_expand "arm_casesi_internal"
9385 [(parallel [(set (pc)
9386 (if_then_else
9387 (leu (match_operand:SI 0 "s_register_operand")
9388 (match_operand:SI 1 "arm_rhs_operand"))
9389 (match_dup 4)
9390 (label_ref:SI (match_operand 3 ""))))
9391 (clobber (reg:CC CC_REGNUM))
9392 (use (label_ref:SI (match_operand 2 "")))])]
9393 "TARGET_ARM"
9394 {
9395 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
9396 operands[4] = gen_rtx_PLUS (SImode, operands[4],
9397 gen_rtx_LABEL_REF (SImode, operands[2]));
9398 operands[4] = gen_rtx_MEM (SImode, operands[4]);
9399 MEM_READONLY_P (operands[4]) = 1;
9400 MEM_NOTRAP_P (operands[4]) = 1;
9401 })
9402
9403 (define_insn "*arm_casesi_internal"
9404 [(parallel [(set (pc)
9405 (if_then_else
9406 (leu (match_operand:SI 0 "s_register_operand" "r")
9407 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9408 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9409 (label_ref:SI (match_operand 2 "" ""))))
9410 (label_ref:SI (match_operand 3 "" ""))))
9411 (clobber (reg:CC CC_REGNUM))
9412 (use (label_ref:SI (match_dup 2)))])]
9413 "TARGET_ARM"
9414 "*
9415 if (flag_pic)
9416 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9417 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9418 "
9419 [(set_attr "conds" "clob")
9420 (set_attr "length" "12")
9421 (set_attr "type" "multiple")]
9422 )
9423
9424 (define_expand "indirect_jump"
9425 [(set (pc)
9426 (match_operand:SI 0 "s_register_operand"))]
9427 "TARGET_EITHER"
9428 "
9429 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9430 address and use bx. */
9431 if (TARGET_THUMB2)
9432 {
9433 rtx tmp;
9434 tmp = gen_reg_rtx (SImode);
9435 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9436 operands[0] = tmp;
9437 }
9438 "
9439 )
9440
9441 ;; NB Never uses BX.
9442 (define_insn "*arm_indirect_jump"
9443 [(set (pc)
9444 (match_operand:SI 0 "s_register_operand" "r"))]
9445 "TARGET_ARM"
9446 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9447 [(set_attr "predicable" "yes")
9448 (set_attr "type" "branch")]
9449 )
9450
9451 (define_insn "*load_indirect_jump"
9452 [(set (pc)
9453 (match_operand:SI 0 "memory_operand" "m"))]
9454 "TARGET_ARM"
9455 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9456 [(set_attr "type" "load_4")
9457 (set_attr "pool_range" "4096")
9458 (set_attr "neg_pool_range" "4084")
9459 (set_attr "predicable" "yes")]
9460 )
9461
9462 \f
9463 ;; Misc insns
9464
9465 (define_insn "nop"
9466 [(const_int 0)]
9467 "TARGET_EITHER"
9468 "nop"
9469 [(set (attr "length")
9470 (if_then_else (eq_attr "is_thumb" "yes")
9471 (const_int 2)
9472 (const_int 4)))
9473 (set_attr "type" "mov_reg")]
9474 )
9475
9476 (define_insn "trap"
9477 [(trap_if (const_int 1) (const_int 0))]
9478 ""
9479 "*
9480 if (TARGET_ARM)
9481 return \".inst\\t0xe7f000f0\";
9482 else
9483 return \".inst\\t0xdeff\";
9484 "
9485 [(set (attr "length")
9486 (if_then_else (eq_attr "is_thumb" "yes")
9487 (const_int 2)
9488 (const_int 4)))
9489 (set_attr "type" "trap")
9490 (set_attr "conds" "unconditional")]
9491 )
9492
9493 \f
9494 ;; Patterns to allow combination of arithmetic, cond code and shifts
9495
9496 (define_insn "*<arith_shift_insn>_multsi"
9497 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9498 (SHIFTABLE_OPS:SI
9499 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
9500 (match_operand:SI 3 "power_of_two_operand" ""))
9501 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
9502 "TARGET_32BIT"
9503 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
9504 [(set_attr "predicable" "yes")
9505 (set_attr "shift" "2")
9506 (set_attr "arch" "a,t2")
9507 (set_attr "autodetect_type" "alu_shift_mul_op3")])
9508
9509 (define_insn "*<arith_shift_insn>_shiftsi"
9510 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9511 (SHIFTABLE_OPS:SI
9512 (match_operator:SI 2 "shift_nomul_operator"
9513 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9514 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
9515 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
9516 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
9517 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
9518 [(set_attr "predicable" "yes")
9519 (set_attr "shift" "3")
9520 (set_attr "arch" "a,t2,a")
9521 (set_attr "autodetect_type" "alu_shift_operator2")])
9522
9523 (define_split
9524 [(set (match_operand:SI 0 "s_register_operand" "")
9525 (match_operator:SI 1 "shiftable_operator"
9526 [(match_operator:SI 2 "shiftable_operator"
9527 [(match_operator:SI 3 "shift_operator"
9528 [(match_operand:SI 4 "s_register_operand" "")
9529 (match_operand:SI 5 "reg_or_int_operand" "")])
9530 (match_operand:SI 6 "s_register_operand" "")])
9531 (match_operand:SI 7 "arm_rhs_operand" "")]))
9532 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9533 "TARGET_32BIT"
9534 [(set (match_dup 8)
9535 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9536 (match_dup 6)]))
9537 (set (match_dup 0)
9538 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9539 "")
9540
9541 (define_insn "*arith_shiftsi_compare0"
9542 [(set (reg:CC_NZ CC_REGNUM)
9543 (compare:CC_NZ
9544 (match_operator:SI 1 "shiftable_operator"
9545 [(match_operator:SI 3 "shift_operator"
9546 [(match_operand:SI 4 "s_register_operand" "r,r")
9547 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9548 (match_operand:SI 2 "s_register_operand" "r,r")])
9549 (const_int 0)))
9550 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9551 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9552 (match_dup 2)]))]
9553 "TARGET_32BIT"
9554 "%i1s%?\\t%0, %2, %4%S3"
9555 [(set_attr "conds" "set")
9556 (set_attr "shift" "4")
9557 (set_attr "arch" "32,a")
9558 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9559
9560 (define_insn "*arith_shiftsi_compare0_scratch"
9561 [(set (reg:CC_NZ CC_REGNUM)
9562 (compare:CC_NZ
9563 (match_operator:SI 1 "shiftable_operator"
9564 [(match_operator:SI 3 "shift_operator"
9565 [(match_operand:SI 4 "s_register_operand" "r,r")
9566 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9567 (match_operand:SI 2 "s_register_operand" "r,r")])
9568 (const_int 0)))
9569 (clobber (match_scratch:SI 0 "=r,r"))]
9570 "TARGET_32BIT"
9571 "%i1s%?\\t%0, %2, %4%S3"
9572 [(set_attr "conds" "set")
9573 (set_attr "shift" "4")
9574 (set_attr "arch" "32,a")
9575 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9576
9577 (define_insn "*sub_shiftsi"
9578 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9579 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9580 (match_operator:SI 2 "shift_operator"
9581 [(match_operand:SI 3 "s_register_operand" "r,r")
9582 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9583 "TARGET_32BIT"
9584 "sub%?\\t%0, %1, %3%S2"
9585 [(set_attr "predicable" "yes")
9586 (set_attr "predicable_short_it" "no")
9587 (set_attr "shift" "3")
9588 (set_attr "arch" "32,a")
9589 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9590
9591 (define_insn "*sub_shiftsi_compare0"
9592 [(set (reg:CC_NZ CC_REGNUM)
9593 (compare:CC_NZ
9594 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9595 (match_operator:SI 2 "shift_operator"
9596 [(match_operand:SI 3 "s_register_operand" "r,r")
9597 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9598 (const_int 0)))
9599 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9600 (minus:SI (match_dup 1)
9601 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9602 "TARGET_32BIT"
9603 "subs%?\\t%0, %1, %3%S2"
9604 [(set_attr "conds" "set")
9605 (set_attr "shift" "3")
9606 (set_attr "arch" "32,a")
9607 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9608
9609 (define_insn "*sub_shiftsi_compare0_scratch"
9610 [(set (reg:CC_NZ CC_REGNUM)
9611 (compare:CC_NZ
9612 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9613 (match_operator:SI 2 "shift_operator"
9614 [(match_operand:SI 3 "s_register_operand" "r,r")
9615 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9616 (const_int 0)))
9617 (clobber (match_scratch:SI 0 "=r,r"))]
9618 "TARGET_32BIT"
9619 "subs%?\\t%0, %1, %3%S2"
9620 [(set_attr "conds" "set")
9621 (set_attr "shift" "3")
9622 (set_attr "arch" "32,a")
9623 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9624 \f
9625
9626 (define_insn_and_split "*and_scc"
9627 [(set (match_operand:SI 0 "s_register_operand" "=r")
9628 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9629 [(match_operand 2 "cc_register" "") (const_int 0)])
9630 (match_operand:SI 3 "s_register_operand" "r")))]
9631 "TARGET_ARM"
9632 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9633 "&& reload_completed"
9634 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9635 (cond_exec (match_dup 4) (set (match_dup 0)
9636 (and:SI (match_dup 3) (const_int 1))))]
9637 {
9638 machine_mode mode = GET_MODE (operands[2]);
9639 enum rtx_code rc = GET_CODE (operands[1]);
9640
9641 /* Note that operands[4] is the same as operands[1],
9642 but with VOIDmode as the result. */
9643 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9644 if (mode == CCFPmode || mode == CCFPEmode)
9645 rc = reverse_condition_maybe_unordered (rc);
9646 else
9647 rc = reverse_condition (rc);
9648 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9649 }
9650 [(set_attr "conds" "use")
9651 (set_attr "type" "multiple")
9652 (set_attr "length" "8")]
9653 )
9654
9655 (define_insn_and_split "*ior_scc"
9656 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9657 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9658 [(match_operand 2 "cc_register" "") (const_int 0)])
9659 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9660 "TARGET_ARM"
9661 "@
9662 orr%d1\\t%0, %3, #1
9663 #"
9664 "&& reload_completed
9665 && REGNO (operands [0]) != REGNO (operands[3])"
9666 ;; && which_alternative == 1
9667 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9668 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9669 (cond_exec (match_dup 4) (set (match_dup 0)
9670 (ior:SI (match_dup 3) (const_int 1))))]
9671 {
9672 machine_mode mode = GET_MODE (operands[2]);
9673 enum rtx_code rc = GET_CODE (operands[1]);
9674
9675 /* Note that operands[4] is the same as operands[1],
9676 but with VOIDmode as the result. */
9677 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9678 if (mode == CCFPmode || mode == CCFPEmode)
9679 rc = reverse_condition_maybe_unordered (rc);
9680 else
9681 rc = reverse_condition (rc);
9682 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9683 }
9684 [(set_attr "conds" "use")
9685 (set_attr "length" "4,8")
9686 (set_attr "type" "logic_imm,multiple")]
9687 )
9688
9689 ; A series of splitters for the compare_scc pattern below. Note that
9690 ; order is important.
9691 (define_split
9692 [(set (match_operand:SI 0 "s_register_operand" "")
9693 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9694 (const_int 0)))
9695 (clobber (reg:CC CC_REGNUM))]
9696 "TARGET_32BIT && reload_completed"
9697 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9698
9699 (define_split
9700 [(set (match_operand:SI 0 "s_register_operand" "")
9701 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9702 (const_int 0)))
9703 (clobber (reg:CC CC_REGNUM))]
9704 "TARGET_32BIT && reload_completed"
9705 [(set (match_dup 0) (not:SI (match_dup 1)))
9706 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9707
9708 (define_split
9709 [(set (match_operand:SI 0 "s_register_operand" "")
9710 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9711 (const_int 0)))
9712 (clobber (reg:CC CC_REGNUM))]
9713 "arm_arch5t && TARGET_32BIT"
9714 [(set (match_dup 0) (clz:SI (match_dup 1)))
9715 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9716 )
9717
9718 (define_split
9719 [(set (match_operand:SI 0 "s_register_operand" "")
9720 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9721 (const_int 0)))
9722 (clobber (reg:CC CC_REGNUM))]
9723 "TARGET_32BIT && reload_completed"
9724 [(parallel
9725 [(set (reg:CC CC_REGNUM)
9726 (compare:CC (const_int 1) (match_dup 1)))
9727 (set (match_dup 0)
9728 (minus:SI (const_int 1) (match_dup 1)))])
9729 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9730 (set (match_dup 0) (const_int 0)))])
9731
9732 (define_split
9733 [(set (match_operand:SI 0 "s_register_operand" "")
9734 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9735 (match_operand:SI 2 "const_int_operand" "")))
9736 (clobber (reg:CC CC_REGNUM))]
9737 "TARGET_32BIT && reload_completed"
9738 [(parallel
9739 [(set (reg:CC CC_REGNUM)
9740 (compare:CC (match_dup 1) (match_dup 2)))
9741 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9742 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9743 (set (match_dup 0) (const_int 1)))]
9744 {
9745 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9746 })
9747
9748 (define_split
9749 [(set (match_operand:SI 0 "s_register_operand" "")
9750 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9751 (match_operand:SI 2 "arm_add_operand" "")))
9752 (clobber (reg:CC CC_REGNUM))]
9753 "TARGET_32BIT && reload_completed"
9754 [(parallel
9755 [(set (reg:CC_NZ CC_REGNUM)
9756 (compare:CC_NZ (minus:SI (match_dup 1) (match_dup 2))
9757 (const_int 0)))
9758 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9759 (cond_exec (ne:CC_NZ (reg:CC_NZ CC_REGNUM) (const_int 0))
9760 (set (match_dup 0) (const_int 1)))])
9761
9762 (define_insn_and_split "*compare_scc"
9763 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9764 (match_operator:SI 1 "arm_comparison_operator"
9765 [(match_operand:SI 2 "s_register_operand" "r,r")
9766 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9767 (clobber (reg:CC CC_REGNUM))]
9768 "TARGET_32BIT"
9769 "#"
9770 "&& reload_completed"
9771 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9772 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9773 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9774 {
9775 rtx tmp1;
9776 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9777 operands[2], operands[3]);
9778 enum rtx_code rc = GET_CODE (operands[1]);
9779
9780 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9781
9782 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9783 if (mode == CCFPmode || mode == CCFPEmode)
9784 rc = reverse_condition_maybe_unordered (rc);
9785 else
9786 rc = reverse_condition (rc);
9787 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9788 }
9789 [(set_attr "type" "multiple")]
9790 )
9791
9792 ;; Attempt to improve the sequence generated by the compare_scc splitters
9793 ;; not to use conditional execution.
9794
9795 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9796 ;; clz Rd, reg1
9797 ;; lsr Rd, Rd, #5
9798 (define_peephole2
9799 [(set (reg:CC CC_REGNUM)
9800 (compare:CC (match_operand:SI 1 "register_operand" "")
9801 (const_int 0)))
9802 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9803 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9804 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9805 (set (match_dup 0) (const_int 1)))]
9806 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9807 [(set (match_dup 0) (clz:SI (match_dup 1)))
9808 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9809 )
9810
9811 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9812 ;; negs Rd, reg1
9813 ;; adc Rd, Rd, reg1
9814 (define_peephole2
9815 [(set (reg:CC CC_REGNUM)
9816 (compare:CC (match_operand:SI 1 "register_operand" "")
9817 (const_int 0)))
9818 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9819 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9820 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9821 (set (match_dup 0) (const_int 1)))
9822 (match_scratch:SI 2 "r")]
9823 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9824 [(parallel
9825 [(set (reg:CC CC_REGNUM)
9826 (compare:CC (const_int 0) (match_dup 1)))
9827 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9828 (set (match_dup 0)
9829 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9830 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9831 )
9832
9833 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9834 ;; sub Rd, Reg1, reg2
9835 ;; clz Rd, Rd
9836 ;; lsr Rd, Rd, #5
9837 (define_peephole2
9838 [(set (reg:CC CC_REGNUM)
9839 (compare:CC (match_operand:SI 1 "register_operand" "")
9840 (match_operand:SI 2 "arm_rhs_operand" "")))
9841 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9842 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9843 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9844 (set (match_dup 0) (const_int 1)))]
9845 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9846 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9847 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9848 (set (match_dup 0) (clz:SI (match_dup 0)))
9849 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9850 )
9851
9852
9853 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9854 ;; sub T1, Reg1, reg2
9855 ;; negs Rd, T1
9856 ;; adc Rd, Rd, T1
9857 (define_peephole2
9858 [(set (reg:CC CC_REGNUM)
9859 (compare:CC (match_operand:SI 1 "register_operand" "")
9860 (match_operand:SI 2 "arm_rhs_operand" "")))
9861 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9862 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9863 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9864 (set (match_dup 0) (const_int 1)))
9865 (match_scratch:SI 3 "r")]
9866 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9867 [(set (match_dup 3) (match_dup 4))
9868 (parallel
9869 [(set (reg:CC CC_REGNUM)
9870 (compare:CC (const_int 0) (match_dup 3)))
9871 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9872 (set (match_dup 0)
9873 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9874 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9875 "
9876 if (CONST_INT_P (operands[2]))
9877 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9878 else
9879 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9880 ")
9881
9882 (define_insn "*cond_move"
9883 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9884 (if_then_else:SI (match_operator 3 "equality_operator"
9885 [(match_operator 4 "arm_comparison_operator"
9886 [(match_operand 5 "cc_register" "") (const_int 0)])
9887 (const_int 0)])
9888 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9889 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9890 "TARGET_ARM"
9891 "*
9892 if (GET_CODE (operands[3]) == NE)
9893 {
9894 if (which_alternative != 1)
9895 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9896 if (which_alternative != 0)
9897 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9898 return \"\";
9899 }
9900 if (which_alternative != 0)
9901 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9902 if (which_alternative != 1)
9903 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9904 return \"\";
9905 "
9906 [(set_attr "conds" "use")
9907 (set_attr_alternative "type"
9908 [(if_then_else (match_operand 2 "const_int_operand" "")
9909 (const_string "mov_imm")
9910 (const_string "mov_reg"))
9911 (if_then_else (match_operand 1 "const_int_operand" "")
9912 (const_string "mov_imm")
9913 (const_string "mov_reg"))
9914 (const_string "multiple")])
9915 (set_attr "length" "4,4,8")]
9916 )
9917
9918 (define_insn "*cond_arith"
9919 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9920 (match_operator:SI 5 "shiftable_operator"
9921 [(match_operator:SI 4 "arm_comparison_operator"
9922 [(match_operand:SI 2 "s_register_operand" "r,r")
9923 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9924 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9925 (clobber (reg:CC CC_REGNUM))]
9926 "TARGET_ARM"
9927 "*
9928 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9929 return \"%i5\\t%0, %1, %2, lsr #31\";
9930
9931 output_asm_insn (\"cmp\\t%2, %3\", operands);
9932 if (GET_CODE (operands[5]) == AND)
9933 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9934 else if (GET_CODE (operands[5]) == MINUS)
9935 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9936 else if (which_alternative != 0)
9937 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9938 return \"%i5%d4\\t%0, %1, #1\";
9939 "
9940 [(set_attr "conds" "clob")
9941 (set_attr "length" "12")
9942 (set_attr "type" "multiple")]
9943 )
9944
9945 (define_insn "*cond_sub"
9946 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9947 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9948 (match_operator:SI 4 "arm_comparison_operator"
9949 [(match_operand:SI 2 "s_register_operand" "r,r")
9950 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9951 (clobber (reg:CC CC_REGNUM))]
9952 "TARGET_ARM"
9953 "*
9954 output_asm_insn (\"cmp\\t%2, %3\", operands);
9955 if (which_alternative != 0)
9956 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9957 return \"sub%d4\\t%0, %1, #1\";
9958 "
9959 [(set_attr "conds" "clob")
9960 (set_attr "length" "8,12")
9961 (set_attr "type" "multiple")]
9962 )
9963
9964 (define_insn "*cmp_ite0"
9965 [(set (match_operand 6 "dominant_cc_register" "")
9966 (compare
9967 (if_then_else:SI
9968 (match_operator 4 "arm_comparison_operator"
9969 [(match_operand:SI 0 "s_register_operand"
9970 "l,l,l,r,r,r,r,r,r")
9971 (match_operand:SI 1 "arm_add_operand"
9972 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9973 (match_operator:SI 5 "arm_comparison_operator"
9974 [(match_operand:SI 2 "s_register_operand"
9975 "l,r,r,l,l,r,r,r,r")
9976 (match_operand:SI 3 "arm_add_operand"
9977 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9978 (const_int 0))
9979 (const_int 0)))]
9980 "TARGET_32BIT"
9981 "*
9982 {
9983 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9984 {
9985 {\"cmp%d5\\t%0, %1\",
9986 \"cmp%d4\\t%2, %3\"},
9987 {\"cmn%d5\\t%0, #%n1\",
9988 \"cmp%d4\\t%2, %3\"},
9989 {\"cmp%d5\\t%0, %1\",
9990 \"cmn%d4\\t%2, #%n3\"},
9991 {\"cmn%d5\\t%0, #%n1\",
9992 \"cmn%d4\\t%2, #%n3\"}
9993 };
9994 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9995 {
9996 {\"cmp\\t%2, %3\",
9997 \"cmp\\t%0, %1\"},
9998 {\"cmp\\t%2, %3\",
9999 \"cmn\\t%0, #%n1\"},
10000 {\"cmn\\t%2, #%n3\",
10001 \"cmp\\t%0, %1\"},
10002 {\"cmn\\t%2, #%n3\",
10003 \"cmn\\t%0, #%n1\"}
10004 };
10005 static const char * const ite[2] =
10006 {
10007 \"it\\t%d5\",
10008 \"it\\t%d4\"
10009 };
10010 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10011 CMP_CMP, CMN_CMP, CMP_CMP,
10012 CMN_CMP, CMP_CMN, CMN_CMN};
10013 int swap =
10014 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10015
10016 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10017 if (TARGET_THUMB2) {
10018 output_asm_insn (ite[swap], operands);
10019 }
10020 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10021 return \"\";
10022 }"
10023 [(set_attr "conds" "set")
10024 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10025 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10026 (set_attr "type" "multiple")
10027 (set_attr_alternative "length"
10028 [(const_int 6)
10029 (const_int 8)
10030 (const_int 8)
10031 (const_int 8)
10032 (const_int 8)
10033 (if_then_else (eq_attr "is_thumb" "no")
10034 (const_int 8)
10035 (const_int 10))
10036 (if_then_else (eq_attr "is_thumb" "no")
10037 (const_int 8)
10038 (const_int 10))
10039 (if_then_else (eq_attr "is_thumb" "no")
10040 (const_int 8)
10041 (const_int 10))
10042 (if_then_else (eq_attr "is_thumb" "no")
10043 (const_int 8)
10044 (const_int 10))])]
10045 )
10046
10047 (define_insn "*cmp_ite1"
10048 [(set (match_operand 6 "dominant_cc_register" "")
10049 (compare
10050 (if_then_else:SI
10051 (match_operator 4 "arm_comparison_operator"
10052 [(match_operand:SI 0 "s_register_operand"
10053 "l,l,l,r,r,r,r,r,r")
10054 (match_operand:SI 1 "arm_add_operand"
10055 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10056 (match_operator:SI 5 "arm_comparison_operator"
10057 [(match_operand:SI 2 "s_register_operand"
10058 "l,r,r,l,l,r,r,r,r")
10059 (match_operand:SI 3 "arm_add_operand"
10060 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10061 (const_int 1))
10062 (const_int 0)))]
10063 "TARGET_32BIT"
10064 "*
10065 {
10066 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10067 {
10068 {\"cmp\\t%0, %1\",
10069 \"cmp\\t%2, %3\"},
10070 {\"cmn\\t%0, #%n1\",
10071 \"cmp\\t%2, %3\"},
10072 {\"cmp\\t%0, %1\",
10073 \"cmn\\t%2, #%n3\"},
10074 {\"cmn\\t%0, #%n1\",
10075 \"cmn\\t%2, #%n3\"}
10076 };
10077 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10078 {
10079 {\"cmp%d4\\t%2, %3\",
10080 \"cmp%D5\\t%0, %1\"},
10081 {\"cmp%d4\\t%2, %3\",
10082 \"cmn%D5\\t%0, #%n1\"},
10083 {\"cmn%d4\\t%2, #%n3\",
10084 \"cmp%D5\\t%0, %1\"},
10085 {\"cmn%d4\\t%2, #%n3\",
10086 \"cmn%D5\\t%0, #%n1\"}
10087 };
10088 static const char * const ite[2] =
10089 {
10090 \"it\\t%d4\",
10091 \"it\\t%D5\"
10092 };
10093 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10094 CMP_CMP, CMN_CMP, CMP_CMP,
10095 CMN_CMP, CMP_CMN, CMN_CMN};
10096 int swap =
10097 comparison_dominates_p (GET_CODE (operands[5]),
10098 reverse_condition (GET_CODE (operands[4])));
10099
10100 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10101 if (TARGET_THUMB2) {
10102 output_asm_insn (ite[swap], operands);
10103 }
10104 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10105 return \"\";
10106 }"
10107 [(set_attr "conds" "set")
10108 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10109 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10110 (set_attr_alternative "length"
10111 [(const_int 6)
10112 (const_int 8)
10113 (const_int 8)
10114 (const_int 8)
10115 (const_int 8)
10116 (if_then_else (eq_attr "is_thumb" "no")
10117 (const_int 8)
10118 (const_int 10))
10119 (if_then_else (eq_attr "is_thumb" "no")
10120 (const_int 8)
10121 (const_int 10))
10122 (if_then_else (eq_attr "is_thumb" "no")
10123 (const_int 8)
10124 (const_int 10))
10125 (if_then_else (eq_attr "is_thumb" "no")
10126 (const_int 8)
10127 (const_int 10))])
10128 (set_attr "type" "multiple")]
10129 )
10130
10131 (define_insn "*cmp_and"
10132 [(set (match_operand 6 "dominant_cc_register" "")
10133 (compare
10134 (and:SI
10135 (match_operator 4 "arm_comparison_operator"
10136 [(match_operand:SI 0 "s_register_operand"
10137 "l,l,l,r,r,r,r,r,r,r")
10138 (match_operand:SI 1 "arm_add_operand"
10139 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10140 (match_operator:SI 5 "arm_comparison_operator"
10141 [(match_operand:SI 2 "s_register_operand"
10142 "l,r,r,l,l,r,r,r,r,r")
10143 (match_operand:SI 3 "arm_add_operand"
10144 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10145 (const_int 0)))]
10146 "TARGET_32BIT"
10147 "*
10148 {
10149 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10150 {
10151 {\"cmp%d5\\t%0, %1\",
10152 \"cmp%d4\\t%2, %3\"},
10153 {\"cmn%d5\\t%0, #%n1\",
10154 \"cmp%d4\\t%2, %3\"},
10155 {\"cmp%d5\\t%0, %1\",
10156 \"cmn%d4\\t%2, #%n3\"},
10157 {\"cmn%d5\\t%0, #%n1\",
10158 \"cmn%d4\\t%2, #%n3\"}
10159 };
10160 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10161 {
10162 {\"cmp\\t%2, %3\",
10163 \"cmp\\t%0, %1\"},
10164 {\"cmp\\t%2, %3\",
10165 \"cmn\\t%0, #%n1\"},
10166 {\"cmn\\t%2, #%n3\",
10167 \"cmp\\t%0, %1\"},
10168 {\"cmn\\t%2, #%n3\",
10169 \"cmn\\t%0, #%n1\"}
10170 };
10171 static const char *const ite[2] =
10172 {
10173 \"it\\t%d5\",
10174 \"it\\t%d4\"
10175 };
10176 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10177 CMP_CMP, CMN_CMP, CMP_CMP,
10178 CMP_CMP, CMN_CMP, CMP_CMN,
10179 CMN_CMN};
10180 int swap =
10181 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10182
10183 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10184 if (TARGET_THUMB2) {
10185 output_asm_insn (ite[swap], operands);
10186 }
10187 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10188 return \"\";
10189 }"
10190 [(set_attr "conds" "set")
10191 (set_attr "predicable" "no")
10192 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10193 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10194 (set_attr_alternative "length"
10195 [(const_int 6)
10196 (const_int 8)
10197 (const_int 8)
10198 (const_int 8)
10199 (const_int 8)
10200 (const_int 6)
10201 (if_then_else (eq_attr "is_thumb" "no")
10202 (const_int 8)
10203 (const_int 10))
10204 (if_then_else (eq_attr "is_thumb" "no")
10205 (const_int 8)
10206 (const_int 10))
10207 (if_then_else (eq_attr "is_thumb" "no")
10208 (const_int 8)
10209 (const_int 10))
10210 (if_then_else (eq_attr "is_thumb" "no")
10211 (const_int 8)
10212 (const_int 10))])
10213 (set_attr "type" "multiple")]
10214 )
10215
10216 (define_insn "*cmp_ior"
10217 [(set (match_operand 6 "dominant_cc_register" "")
10218 (compare
10219 (ior:SI
10220 (match_operator 4 "arm_comparison_operator"
10221 [(match_operand:SI 0 "s_register_operand"
10222 "l,l,l,r,r,r,r,r,r,r")
10223 (match_operand:SI 1 "arm_add_operand"
10224 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10225 (match_operator:SI 5 "arm_comparison_operator"
10226 [(match_operand:SI 2 "s_register_operand"
10227 "l,r,r,l,l,r,r,r,r,r")
10228 (match_operand:SI 3 "arm_add_operand"
10229 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10230 (const_int 0)))]
10231 "TARGET_32BIT"
10232 "*
10233 {
10234 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10235 {
10236 {\"cmp\\t%0, %1\",
10237 \"cmp\\t%2, %3\"},
10238 {\"cmn\\t%0, #%n1\",
10239 \"cmp\\t%2, %3\"},
10240 {\"cmp\\t%0, %1\",
10241 \"cmn\\t%2, #%n3\"},
10242 {\"cmn\\t%0, #%n1\",
10243 \"cmn\\t%2, #%n3\"}
10244 };
10245 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10246 {
10247 {\"cmp%D4\\t%2, %3\",
10248 \"cmp%D5\\t%0, %1\"},
10249 {\"cmp%D4\\t%2, %3\",
10250 \"cmn%D5\\t%0, #%n1\"},
10251 {\"cmn%D4\\t%2, #%n3\",
10252 \"cmp%D5\\t%0, %1\"},
10253 {\"cmn%D4\\t%2, #%n3\",
10254 \"cmn%D5\\t%0, #%n1\"}
10255 };
10256 static const char *const ite[2] =
10257 {
10258 \"it\\t%D4\",
10259 \"it\\t%D5\"
10260 };
10261 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10262 CMP_CMP, CMN_CMP, CMP_CMP,
10263 CMP_CMP, CMN_CMP, CMP_CMN,
10264 CMN_CMN};
10265 int swap =
10266 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10267
10268 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10269 if (TARGET_THUMB2) {
10270 output_asm_insn (ite[swap], operands);
10271 }
10272 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10273 return \"\";
10274 }
10275 "
10276 [(set_attr "conds" "set")
10277 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10278 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10279 (set_attr_alternative "length"
10280 [(const_int 6)
10281 (const_int 8)
10282 (const_int 8)
10283 (const_int 8)
10284 (const_int 8)
10285 (const_int 6)
10286 (if_then_else (eq_attr "is_thumb" "no")
10287 (const_int 8)
10288 (const_int 10))
10289 (if_then_else (eq_attr "is_thumb" "no")
10290 (const_int 8)
10291 (const_int 10))
10292 (if_then_else (eq_attr "is_thumb" "no")
10293 (const_int 8)
10294 (const_int 10))
10295 (if_then_else (eq_attr "is_thumb" "no")
10296 (const_int 8)
10297 (const_int 10))])
10298 (set_attr "type" "multiple")]
10299 )
10300
10301 (define_insn_and_split "*ior_scc_scc"
10302 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10303 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10304 [(match_operand:SI 1 "s_register_operand" "l,r")
10305 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10306 (match_operator:SI 6 "arm_comparison_operator"
10307 [(match_operand:SI 4 "s_register_operand" "l,r")
10308 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10309 (clobber (reg:CC CC_REGNUM))]
10310 "TARGET_32BIT
10311 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10312 != CCmode)"
10313 "#"
10314 "TARGET_32BIT && reload_completed"
10315 [(set (match_dup 7)
10316 (compare
10317 (ior:SI
10318 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10319 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10320 (const_int 0)))
10321 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10322 "operands[7]
10323 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10324 DOM_CC_X_OR_Y),
10325 CC_REGNUM);"
10326 [(set_attr "conds" "clob")
10327 (set_attr "enabled_for_short_it" "yes,no")
10328 (set_attr "length" "16")
10329 (set_attr "type" "multiple")]
10330 )
10331
10332 ; If the above pattern is followed by a CMP insn, then the compare is
10333 ; redundant, since we can rework the conditional instruction that follows.
10334 (define_insn_and_split "*ior_scc_scc_cmp"
10335 [(set (match_operand 0 "dominant_cc_register" "")
10336 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10337 [(match_operand:SI 1 "s_register_operand" "l,r")
10338 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10339 (match_operator:SI 6 "arm_comparison_operator"
10340 [(match_operand:SI 4 "s_register_operand" "l,r")
10341 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10342 (const_int 0)))
10343 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10344 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10345 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10346 "TARGET_32BIT"
10347 "#"
10348 "TARGET_32BIT && reload_completed"
10349 [(set (match_dup 0)
10350 (compare
10351 (ior:SI
10352 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10353 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10354 (const_int 0)))
10355 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10356 ""
10357 [(set_attr "conds" "set")
10358 (set_attr "enabled_for_short_it" "yes,no")
10359 (set_attr "length" "16")
10360 (set_attr "type" "multiple")]
10361 )
10362
10363 (define_insn_and_split "*and_scc_scc"
10364 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10365 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10366 [(match_operand:SI 1 "s_register_operand" "l,r")
10367 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10368 (match_operator:SI 6 "arm_comparison_operator"
10369 [(match_operand:SI 4 "s_register_operand" "l,r")
10370 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10371 (clobber (reg:CC CC_REGNUM))]
10372 "TARGET_32BIT
10373 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10374 != CCmode)"
10375 "#"
10376 "TARGET_32BIT && reload_completed
10377 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10378 != CCmode)"
10379 [(set (match_dup 7)
10380 (compare
10381 (and:SI
10382 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10383 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10384 (const_int 0)))
10385 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10386 "operands[7]
10387 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10388 DOM_CC_X_AND_Y),
10389 CC_REGNUM);"
10390 [(set_attr "conds" "clob")
10391 (set_attr "enabled_for_short_it" "yes,no")
10392 (set_attr "length" "16")
10393 (set_attr "type" "multiple")]
10394 )
10395
10396 ; If the above pattern is followed by a CMP insn, then the compare is
10397 ; redundant, since we can rework the conditional instruction that follows.
10398 (define_insn_and_split "*and_scc_scc_cmp"
10399 [(set (match_operand 0 "dominant_cc_register" "")
10400 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10401 [(match_operand:SI 1 "s_register_operand" "l,r")
10402 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10403 (match_operator:SI 6 "arm_comparison_operator"
10404 [(match_operand:SI 4 "s_register_operand" "l,r")
10405 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10406 (const_int 0)))
10407 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10408 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10409 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10410 "TARGET_32BIT"
10411 "#"
10412 "TARGET_32BIT && reload_completed"
10413 [(set (match_dup 0)
10414 (compare
10415 (and:SI
10416 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10417 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10418 (const_int 0)))
10419 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10420 ""
10421 [(set_attr "conds" "set")
10422 (set_attr "enabled_for_short_it" "yes,no")
10423 (set_attr "length" "16")
10424 (set_attr "type" "multiple")]
10425 )
10426
10427 ;; If there is no dominance in the comparison, then we can still save an
10428 ;; instruction in the AND case, since we can know that the second compare
10429 ;; need only zero the value if false (if true, then the value is already
10430 ;; correct).
10431 (define_insn_and_split "*and_scc_scc_nodom"
10432 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10433 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10434 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10435 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10436 (match_operator:SI 6 "arm_comparison_operator"
10437 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10438 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10439 (clobber (reg:CC CC_REGNUM))]
10440 "TARGET_32BIT
10441 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10442 == CCmode)"
10443 "#"
10444 "TARGET_32BIT && reload_completed"
10445 [(parallel [(set (match_dup 0)
10446 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10447 (clobber (reg:CC CC_REGNUM))])
10448 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10449 (set (match_dup 0)
10450 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10451 (match_dup 0)
10452 (const_int 0)))]
10453 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10454 operands[4], operands[5]),
10455 CC_REGNUM);
10456 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10457 operands[5]);"
10458 [(set_attr "conds" "clob")
10459 (set_attr "length" "20")
10460 (set_attr "type" "multiple")]
10461 )
10462
10463 (define_split
10464 [(set (reg:CC_NZ CC_REGNUM)
10465 (compare:CC_NZ (ior:SI
10466 (and:SI (match_operand:SI 0 "s_register_operand" "")
10467 (const_int 1))
10468 (match_operator:SI 1 "arm_comparison_operator"
10469 [(match_operand:SI 2 "s_register_operand" "")
10470 (match_operand:SI 3 "arm_add_operand" "")]))
10471 (const_int 0)))
10472 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10473 "TARGET_ARM"
10474 [(set (match_dup 4)
10475 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10476 (match_dup 0)))
10477 (set (reg:CC_NZ CC_REGNUM)
10478 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10479 (const_int 0)))]
10480 "")
10481
10482 (define_split
10483 [(set (reg:CC_NZ CC_REGNUM)
10484 (compare:CC_NZ (ior:SI
10485 (match_operator:SI 1 "arm_comparison_operator"
10486 [(match_operand:SI 2 "s_register_operand" "")
10487 (match_operand:SI 3 "arm_add_operand" "")])
10488 (and:SI (match_operand:SI 0 "s_register_operand" "")
10489 (const_int 1)))
10490 (const_int 0)))
10491 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10492 "TARGET_ARM"
10493 [(set (match_dup 4)
10494 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10495 (match_dup 0)))
10496 (set (reg:CC_NZ CC_REGNUM)
10497 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10498 (const_int 0)))]
10499 "")
10500 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10501
10502 (define_insn_and_split "*negscc"
10503 [(set (match_operand:SI 0 "s_register_operand" "=r")
10504 (neg:SI (match_operator 3 "arm_comparison_operator"
10505 [(match_operand:SI 1 "s_register_operand" "r")
10506 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10507 (clobber (reg:CC CC_REGNUM))]
10508 "TARGET_ARM"
10509 "#"
10510 "&& reload_completed"
10511 [(const_int 0)]
10512 {
10513 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10514
10515 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10516 {
10517 /* Emit mov\\t%0, %1, asr #31 */
10518 emit_insn (gen_rtx_SET (operands[0],
10519 gen_rtx_ASHIFTRT (SImode,
10520 operands[1],
10521 GEN_INT (31))));
10522 DONE;
10523 }
10524 else if (GET_CODE (operands[3]) == NE)
10525 {
10526 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10527 if (CONST_INT_P (operands[2]))
10528 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10529 gen_int_mode (-INTVAL (operands[2]),
10530 SImode)));
10531 else
10532 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10533
10534 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10535 gen_rtx_NE (SImode,
10536 cc_reg,
10537 const0_rtx),
10538 gen_rtx_SET (operands[0],
10539 GEN_INT (~0))));
10540 DONE;
10541 }
10542 else
10543 {
10544 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10545 emit_insn (gen_rtx_SET (cc_reg,
10546 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10547 enum rtx_code rc = GET_CODE (operands[3]);
10548
10549 rc = reverse_condition (rc);
10550 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10551 gen_rtx_fmt_ee (rc,
10552 VOIDmode,
10553 cc_reg,
10554 const0_rtx),
10555 gen_rtx_SET (operands[0], const0_rtx)));
10556 rc = GET_CODE (operands[3]);
10557 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10558 gen_rtx_fmt_ee (rc,
10559 VOIDmode,
10560 cc_reg,
10561 const0_rtx),
10562 gen_rtx_SET (operands[0],
10563 GEN_INT (~0))));
10564 DONE;
10565 }
10566 FAIL;
10567 }
10568 [(set_attr "conds" "clob")
10569 (set_attr "length" "12")
10570 (set_attr "type" "multiple")]
10571 )
10572
10573 (define_insn_and_split "movcond_addsi"
10574 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
10575 (if_then_else:SI
10576 (match_operator 5 "comparison_operator"
10577 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
10578 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
10579 (const_int 0)])
10580 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
10581 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
10582 (clobber (reg:CC CC_REGNUM))]
10583 "TARGET_32BIT"
10584 "#"
10585 "&& reload_completed"
10586 [(set (reg:CC_NZ CC_REGNUM)
10587 (compare:CC_NZ
10588 (plus:SI (match_dup 3)
10589 (match_dup 4))
10590 (const_int 0)))
10591 (set (match_dup 0) (match_dup 1))
10592 (cond_exec (match_dup 6)
10593 (set (match_dup 0) (match_dup 2)))]
10594 "
10595 {
10596 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
10597 operands[3], operands[4]);
10598 enum rtx_code rc = GET_CODE (operands[5]);
10599 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10600 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
10601 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
10602 rc = reverse_condition (rc);
10603 else
10604 std::swap (operands[1], operands[2]);
10605
10606 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10607 }
10608 "
10609 [(set_attr "conds" "clob")
10610 (set_attr "enabled_for_short_it" "no,yes,yes")
10611 (set_attr "type" "multiple")]
10612 )
10613
10614 (define_insn "movcond"
10615 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10616 (if_then_else:SI
10617 (match_operator 5 "arm_comparison_operator"
10618 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10619 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10620 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10621 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10622 (clobber (reg:CC CC_REGNUM))]
10623 "TARGET_ARM"
10624 "*
10625 if (GET_CODE (operands[5]) == LT
10626 && (operands[4] == const0_rtx))
10627 {
10628 if (which_alternative != 1 && REG_P (operands[1]))
10629 {
10630 if (operands[2] == const0_rtx)
10631 return \"and\\t%0, %1, %3, asr #31\";
10632 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10633 }
10634 else if (which_alternative != 0 && REG_P (operands[2]))
10635 {
10636 if (operands[1] == const0_rtx)
10637 return \"bic\\t%0, %2, %3, asr #31\";
10638 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10639 }
10640 /* The only case that falls through to here is when both ops 1 & 2
10641 are constants. */
10642 }
10643
10644 if (GET_CODE (operands[5]) == GE
10645 && (operands[4] == const0_rtx))
10646 {
10647 if (which_alternative != 1 && REG_P (operands[1]))
10648 {
10649 if (operands[2] == const0_rtx)
10650 return \"bic\\t%0, %1, %3, asr #31\";
10651 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10652 }
10653 else if (which_alternative != 0 && REG_P (operands[2]))
10654 {
10655 if (operands[1] == const0_rtx)
10656 return \"and\\t%0, %2, %3, asr #31\";
10657 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10658 }
10659 /* The only case that falls through to here is when both ops 1 & 2
10660 are constants. */
10661 }
10662 if (CONST_INT_P (operands[4])
10663 && !const_ok_for_arm (INTVAL (operands[4])))
10664 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10665 else
10666 output_asm_insn (\"cmp\\t%3, %4\", operands);
10667 if (which_alternative != 0)
10668 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10669 if (which_alternative != 1)
10670 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10671 return \"\";
10672 "
10673 [(set_attr "conds" "clob")
10674 (set_attr "length" "8,8,12")
10675 (set_attr "type" "multiple")]
10676 )
10677
10678 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10679
10680 (define_insn "*ifcompare_plus_move"
10681 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10682 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10683 [(match_operand:SI 4 "s_register_operand" "r,r")
10684 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10685 (plus:SI
10686 (match_operand:SI 2 "s_register_operand" "r,r")
10687 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10688 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10689 (clobber (reg:CC CC_REGNUM))]
10690 "TARGET_ARM"
10691 "#"
10692 [(set_attr "conds" "clob")
10693 (set_attr "length" "8,12")
10694 (set_attr "type" "multiple")]
10695 )
10696
10697 (define_insn "*if_plus_move"
10698 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10699 (if_then_else:SI
10700 (match_operator 4 "arm_comparison_operator"
10701 [(match_operand 5 "cc_register" "") (const_int 0)])
10702 (plus:SI
10703 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10704 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10705 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10706 "TARGET_ARM"
10707 "@
10708 add%d4\\t%0, %2, %3
10709 sub%d4\\t%0, %2, #%n3
10710 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10711 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10712 [(set_attr "conds" "use")
10713 (set_attr "length" "4,4,8,8")
10714 (set_attr_alternative "type"
10715 [(if_then_else (match_operand 3 "const_int_operand" "")
10716 (const_string "alu_imm" )
10717 (const_string "alu_sreg"))
10718 (const_string "alu_imm")
10719 (const_string "multiple")
10720 (const_string "multiple")])]
10721 )
10722
10723 (define_insn "*ifcompare_move_plus"
10724 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10725 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10726 [(match_operand:SI 4 "s_register_operand" "r,r")
10727 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10728 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10729 (plus:SI
10730 (match_operand:SI 2 "s_register_operand" "r,r")
10731 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10732 (clobber (reg:CC CC_REGNUM))]
10733 "TARGET_ARM"
10734 "#"
10735 [(set_attr "conds" "clob")
10736 (set_attr "length" "8,12")
10737 (set_attr "type" "multiple")]
10738 )
10739
10740 (define_insn "*if_move_plus"
10741 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10742 (if_then_else:SI
10743 (match_operator 4 "arm_comparison_operator"
10744 [(match_operand 5 "cc_register" "") (const_int 0)])
10745 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10746 (plus:SI
10747 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10748 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10749 "TARGET_ARM"
10750 "@
10751 add%D4\\t%0, %2, %3
10752 sub%D4\\t%0, %2, #%n3
10753 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10754 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10755 [(set_attr "conds" "use")
10756 (set_attr "length" "4,4,8,8")
10757 (set_attr_alternative "type"
10758 [(if_then_else (match_operand 3 "const_int_operand" "")
10759 (const_string "alu_imm" )
10760 (const_string "alu_sreg"))
10761 (const_string "alu_imm")
10762 (const_string "multiple")
10763 (const_string "multiple")])]
10764 )
10765
10766 (define_insn "*ifcompare_arith_arith"
10767 [(set (match_operand:SI 0 "s_register_operand" "=r")
10768 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10769 [(match_operand:SI 5 "s_register_operand" "r")
10770 (match_operand:SI 6 "arm_add_operand" "rIL")])
10771 (match_operator:SI 8 "shiftable_operator"
10772 [(match_operand:SI 1 "s_register_operand" "r")
10773 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10774 (match_operator:SI 7 "shiftable_operator"
10775 [(match_operand:SI 3 "s_register_operand" "r")
10776 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10777 (clobber (reg:CC CC_REGNUM))]
10778 "TARGET_ARM"
10779 "#"
10780 [(set_attr "conds" "clob")
10781 (set_attr "length" "12")
10782 (set_attr "type" "multiple")]
10783 )
10784
10785 (define_insn "*if_arith_arith"
10786 [(set (match_operand:SI 0 "s_register_operand" "=r")
10787 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10788 [(match_operand 8 "cc_register" "") (const_int 0)])
10789 (match_operator:SI 6 "shiftable_operator"
10790 [(match_operand:SI 1 "s_register_operand" "r")
10791 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10792 (match_operator:SI 7 "shiftable_operator"
10793 [(match_operand:SI 3 "s_register_operand" "r")
10794 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10795 "TARGET_ARM"
10796 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10797 [(set_attr "conds" "use")
10798 (set_attr "length" "8")
10799 (set_attr "type" "multiple")]
10800 )
10801
10802 (define_insn "*ifcompare_arith_move"
10803 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10804 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10805 [(match_operand:SI 2 "s_register_operand" "r,r")
10806 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10807 (match_operator:SI 7 "shiftable_operator"
10808 [(match_operand:SI 4 "s_register_operand" "r,r")
10809 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10810 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10811 (clobber (reg:CC CC_REGNUM))]
10812 "TARGET_ARM"
10813 "*
10814 /* If we have an operation where (op x 0) is the identity operation and
10815 the conditional operator is LT or GE and we are comparing against zero and
10816 everything is in registers then we can do this in two instructions. */
10817 if (operands[3] == const0_rtx
10818 && GET_CODE (operands[7]) != AND
10819 && REG_P (operands[5])
10820 && REG_P (operands[1])
10821 && REGNO (operands[1]) == REGNO (operands[4])
10822 && REGNO (operands[4]) != REGNO (operands[0]))
10823 {
10824 if (GET_CODE (operands[6]) == LT)
10825 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10826 else if (GET_CODE (operands[6]) == GE)
10827 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10828 }
10829 if (CONST_INT_P (operands[3])
10830 && !const_ok_for_arm (INTVAL (operands[3])))
10831 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10832 else
10833 output_asm_insn (\"cmp\\t%2, %3\", operands);
10834 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10835 if (which_alternative != 0)
10836 return \"mov%D6\\t%0, %1\";
10837 return \"\";
10838 "
10839 [(set_attr "conds" "clob")
10840 (set_attr "length" "8,12")
10841 (set_attr "type" "multiple")]
10842 )
10843
10844 (define_insn "*if_arith_move"
10845 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10846 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10847 [(match_operand 6 "cc_register" "") (const_int 0)])
10848 (match_operator:SI 5 "shiftable_operator"
10849 [(match_operand:SI 2 "s_register_operand" "r,r")
10850 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10851 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10852 "TARGET_ARM"
10853 "@
10854 %I5%d4\\t%0, %2, %3
10855 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10856 [(set_attr "conds" "use")
10857 (set_attr "length" "4,8")
10858 (set_attr_alternative "type"
10859 [(if_then_else (match_operand 3 "const_int_operand" "")
10860 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
10861 (const_string "alu_shift_imm_lsl_1to4")
10862 (const_string "alu_shift_imm_other"))
10863 (const_string "alu_shift_reg"))
10864 (const_string "multiple")])]
10865 )
10866
10867 (define_insn "*ifcompare_move_arith"
10868 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10869 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10870 [(match_operand:SI 4 "s_register_operand" "r,r")
10871 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10872 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10873 (match_operator:SI 7 "shiftable_operator"
10874 [(match_operand:SI 2 "s_register_operand" "r,r")
10875 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10876 (clobber (reg:CC CC_REGNUM))]
10877 "TARGET_ARM"
10878 "*
10879 /* If we have an operation where (op x 0) is the identity operation and
10880 the conditional operator is LT or GE and we are comparing against zero and
10881 everything is in registers then we can do this in two instructions */
10882 if (operands[5] == const0_rtx
10883 && GET_CODE (operands[7]) != AND
10884 && REG_P (operands[3])
10885 && REG_P (operands[1])
10886 && REGNO (operands[1]) == REGNO (operands[2])
10887 && REGNO (operands[2]) != REGNO (operands[0]))
10888 {
10889 if (GET_CODE (operands[6]) == GE)
10890 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10891 else if (GET_CODE (operands[6]) == LT)
10892 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10893 }
10894
10895 if (CONST_INT_P (operands[5])
10896 && !const_ok_for_arm (INTVAL (operands[5])))
10897 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10898 else
10899 output_asm_insn (\"cmp\\t%4, %5\", operands);
10900
10901 if (which_alternative != 0)
10902 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10903 return \"%I7%D6\\t%0, %2, %3\";
10904 "
10905 [(set_attr "conds" "clob")
10906 (set_attr "length" "8,12")
10907 (set_attr "type" "multiple")]
10908 )
10909
10910 (define_insn "*if_move_arith"
10911 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10912 (if_then_else:SI
10913 (match_operator 4 "arm_comparison_operator"
10914 [(match_operand 6 "cc_register" "") (const_int 0)])
10915 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10916 (match_operator:SI 5 "shiftable_operator"
10917 [(match_operand:SI 2 "s_register_operand" "r,r")
10918 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10919 "TARGET_ARM"
10920 "@
10921 %I5%D4\\t%0, %2, %3
10922 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10923 [(set_attr "conds" "use")
10924 (set_attr "length" "4,8")
10925 (set_attr_alternative "type"
10926 [(if_then_else (match_operand 3 "const_int_operand" "")
10927 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
10928 (const_string "alu_shift_imm_lsl_1to4")
10929 (const_string "alu_shift_imm_other"))
10930 (const_string "alu_shift_reg"))
10931 (const_string "multiple")])]
10932 )
10933
10934 (define_insn "*ifcompare_move_not"
10935 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10936 (if_then_else:SI
10937 (match_operator 5 "arm_comparison_operator"
10938 [(match_operand:SI 3 "s_register_operand" "r,r")
10939 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10940 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10941 (not:SI
10942 (match_operand:SI 2 "s_register_operand" "r,r"))))
10943 (clobber (reg:CC CC_REGNUM))]
10944 "TARGET_ARM"
10945 "#"
10946 [(set_attr "conds" "clob")
10947 (set_attr "length" "8,12")
10948 (set_attr "type" "multiple")]
10949 )
10950
10951 (define_insn "*if_move_not"
10952 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10953 (if_then_else:SI
10954 (match_operator 4 "arm_comparison_operator"
10955 [(match_operand 3 "cc_register" "") (const_int 0)])
10956 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10957 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10958 "TARGET_ARM"
10959 "@
10960 mvn%D4\\t%0, %2
10961 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10962 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10963 [(set_attr "conds" "use")
10964 (set_attr "type" "mvn_reg")
10965 (set_attr "length" "4,8,8")
10966 (set_attr "type" "mvn_reg,multiple,multiple")]
10967 )
10968
10969 (define_insn "*ifcompare_not_move"
10970 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10971 (if_then_else:SI
10972 (match_operator 5 "arm_comparison_operator"
10973 [(match_operand:SI 3 "s_register_operand" "r,r")
10974 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10975 (not:SI
10976 (match_operand:SI 2 "s_register_operand" "r,r"))
10977 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10978 (clobber (reg:CC CC_REGNUM))]
10979 "TARGET_ARM"
10980 "#"
10981 [(set_attr "conds" "clob")
10982 (set_attr "length" "8,12")
10983 (set_attr "type" "multiple")]
10984 )
10985
10986 (define_insn "*if_not_move"
10987 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10988 (if_then_else:SI
10989 (match_operator 4 "arm_comparison_operator"
10990 [(match_operand 3 "cc_register" "") (const_int 0)])
10991 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10992 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10993 "TARGET_ARM"
10994 "@
10995 mvn%d4\\t%0, %2
10996 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10997 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10998 [(set_attr "conds" "use")
10999 (set_attr "type" "mvn_reg,multiple,multiple")
11000 (set_attr "length" "4,8,8")]
11001 )
11002
11003 (define_insn "*ifcompare_shift_move"
11004 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11005 (if_then_else:SI
11006 (match_operator 6 "arm_comparison_operator"
11007 [(match_operand:SI 4 "s_register_operand" "r,r")
11008 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11009 (match_operator:SI 7 "shift_operator"
11010 [(match_operand:SI 2 "s_register_operand" "r,r")
11011 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11012 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11013 (clobber (reg:CC CC_REGNUM))]
11014 "TARGET_ARM"
11015 "#"
11016 [(set_attr "conds" "clob")
11017 (set_attr "length" "8,12")
11018 (set_attr "type" "multiple")]
11019 )
11020
11021 (define_insn "*if_shift_move"
11022 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11023 (if_then_else:SI
11024 (match_operator 5 "arm_comparison_operator"
11025 [(match_operand 6 "cc_register" "") (const_int 0)])
11026 (match_operator:SI 4 "shift_operator"
11027 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11028 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11029 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11030 "TARGET_ARM"
11031 "@
11032 mov%d5\\t%0, %2%S4
11033 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11034 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11035 [(set_attr "conds" "use")
11036 (set_attr "shift" "2")
11037 (set_attr "length" "4,8,8")
11038 (set_attr_alternative "type"
11039 [(if_then_else (match_operand 3 "const_int_operand" "")
11040 (const_string "mov_shift" )
11041 (const_string "mov_shift_reg"))
11042 (const_string "multiple")
11043 (const_string "multiple")])]
11044 )
11045
11046 (define_insn "*ifcompare_move_shift"
11047 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11048 (if_then_else:SI
11049 (match_operator 6 "arm_comparison_operator"
11050 [(match_operand:SI 4 "s_register_operand" "r,r")
11051 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11052 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11053 (match_operator:SI 7 "shift_operator"
11054 [(match_operand:SI 2 "s_register_operand" "r,r")
11055 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11056 (clobber (reg:CC CC_REGNUM))]
11057 "TARGET_ARM"
11058 "#"
11059 [(set_attr "conds" "clob")
11060 (set_attr "length" "8,12")
11061 (set_attr "type" "multiple")]
11062 )
11063
11064 (define_insn "*if_move_shift"
11065 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11066 (if_then_else:SI
11067 (match_operator 5 "arm_comparison_operator"
11068 [(match_operand 6 "cc_register" "") (const_int 0)])
11069 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11070 (match_operator:SI 4 "shift_operator"
11071 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11072 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11073 "TARGET_ARM"
11074 "@
11075 mov%D5\\t%0, %2%S4
11076 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11077 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11078 [(set_attr "conds" "use")
11079 (set_attr "shift" "2")
11080 (set_attr "length" "4,8,8")
11081 (set_attr_alternative "type"
11082 [(if_then_else (match_operand 3 "const_int_operand" "")
11083 (const_string "mov_shift" )
11084 (const_string "mov_shift_reg"))
11085 (const_string "multiple")
11086 (const_string "multiple")])]
11087 )
11088
11089 (define_insn "*ifcompare_shift_shift"
11090 [(set (match_operand:SI 0 "s_register_operand" "=r")
11091 (if_then_else:SI
11092 (match_operator 7 "arm_comparison_operator"
11093 [(match_operand:SI 5 "s_register_operand" "r")
11094 (match_operand:SI 6 "arm_add_operand" "rIL")])
11095 (match_operator:SI 8 "shift_operator"
11096 [(match_operand:SI 1 "s_register_operand" "r")
11097 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11098 (match_operator:SI 9 "shift_operator"
11099 [(match_operand:SI 3 "s_register_operand" "r")
11100 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11101 (clobber (reg:CC CC_REGNUM))]
11102 "TARGET_ARM"
11103 "#"
11104 [(set_attr "conds" "clob")
11105 (set_attr "length" "12")
11106 (set_attr "type" "multiple")]
11107 )
11108
11109 (define_insn "*if_shift_shift"
11110 [(set (match_operand:SI 0 "s_register_operand" "=r")
11111 (if_then_else:SI
11112 (match_operator 5 "arm_comparison_operator"
11113 [(match_operand 8 "cc_register" "") (const_int 0)])
11114 (match_operator:SI 6 "shift_operator"
11115 [(match_operand:SI 1 "s_register_operand" "r")
11116 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11117 (match_operator:SI 7 "shift_operator"
11118 [(match_operand:SI 3 "s_register_operand" "r")
11119 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11120 "TARGET_ARM"
11121 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11122 [(set_attr "conds" "use")
11123 (set_attr "shift" "1")
11124 (set_attr "length" "8")
11125 (set (attr "type") (if_then_else
11126 (and (match_operand 2 "const_int_operand" "")
11127 (match_operand 4 "const_int_operand" ""))
11128 (const_string "mov_shift")
11129 (const_string "mov_shift_reg")))]
11130 )
11131
11132 (define_insn "*ifcompare_not_arith"
11133 [(set (match_operand:SI 0 "s_register_operand" "=r")
11134 (if_then_else:SI
11135 (match_operator 6 "arm_comparison_operator"
11136 [(match_operand:SI 4 "s_register_operand" "r")
11137 (match_operand:SI 5 "arm_add_operand" "rIL")])
11138 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11139 (match_operator:SI 7 "shiftable_operator"
11140 [(match_operand:SI 2 "s_register_operand" "r")
11141 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11142 (clobber (reg:CC CC_REGNUM))]
11143 "TARGET_ARM"
11144 "#"
11145 [(set_attr "conds" "clob")
11146 (set_attr "length" "12")
11147 (set_attr "type" "multiple")]
11148 )
11149
11150 (define_insn "*if_not_arith"
11151 [(set (match_operand:SI 0 "s_register_operand" "=r")
11152 (if_then_else:SI
11153 (match_operator 5 "arm_comparison_operator"
11154 [(match_operand 4 "cc_register" "") (const_int 0)])
11155 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11156 (match_operator:SI 6 "shiftable_operator"
11157 [(match_operand:SI 2 "s_register_operand" "r")
11158 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11159 "TARGET_ARM"
11160 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11161 [(set_attr "conds" "use")
11162 (set_attr "type" "mvn_reg")
11163 (set_attr "length" "8")]
11164 )
11165
11166 (define_insn "*ifcompare_arith_not"
11167 [(set (match_operand:SI 0 "s_register_operand" "=r")
11168 (if_then_else:SI
11169 (match_operator 6 "arm_comparison_operator"
11170 [(match_operand:SI 4 "s_register_operand" "r")
11171 (match_operand:SI 5 "arm_add_operand" "rIL")])
11172 (match_operator:SI 7 "shiftable_operator"
11173 [(match_operand:SI 2 "s_register_operand" "r")
11174 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11175 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11176 (clobber (reg:CC CC_REGNUM))]
11177 "TARGET_ARM"
11178 "#"
11179 [(set_attr "conds" "clob")
11180 (set_attr "length" "12")
11181 (set_attr "type" "multiple")]
11182 )
11183
11184 (define_insn "*if_arith_not"
11185 [(set (match_operand:SI 0 "s_register_operand" "=r")
11186 (if_then_else:SI
11187 (match_operator 5 "arm_comparison_operator"
11188 [(match_operand 4 "cc_register" "") (const_int 0)])
11189 (match_operator:SI 6 "shiftable_operator"
11190 [(match_operand:SI 2 "s_register_operand" "r")
11191 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11192 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11193 "TARGET_ARM"
11194 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11195 [(set_attr "conds" "use")
11196 (set_attr "type" "multiple")
11197 (set_attr "length" "8")]
11198 )
11199
11200 (define_insn "*ifcompare_neg_move"
11201 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11202 (if_then_else:SI
11203 (match_operator 5 "arm_comparison_operator"
11204 [(match_operand:SI 3 "s_register_operand" "r,r")
11205 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11206 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11207 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11208 (clobber (reg:CC CC_REGNUM))]
11209 "TARGET_ARM"
11210 "#"
11211 [(set_attr "conds" "clob")
11212 (set_attr "length" "8,12")
11213 (set_attr "type" "multiple")]
11214 )
11215
11216 (define_insn_and_split "*if_neg_move"
11217 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11218 (if_then_else:SI
11219 (match_operator 4 "arm_comparison_operator"
11220 [(match_operand 3 "cc_register" "") (const_int 0)])
11221 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
11222 (match_operand:SI 1 "s_register_operand" "0,0")))]
11223 "TARGET_32BIT && !TARGET_COND_ARITH"
11224 "#"
11225 "&& reload_completed"
11226 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
11227 (set (match_dup 0) (neg:SI (match_dup 2))))]
11228 ""
11229 [(set_attr "conds" "use")
11230 (set_attr "length" "4")
11231 (set_attr "arch" "t2,32")
11232 (set_attr "enabled_for_short_it" "yes,no")
11233 (set_attr "type" "logic_shift_imm")]
11234 )
11235
11236 (define_insn "*ifcompare_move_neg"
11237 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11238 (if_then_else:SI
11239 (match_operator 5 "arm_comparison_operator"
11240 [(match_operand:SI 3 "s_register_operand" "r,r")
11241 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11242 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11243 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11244 (clobber (reg:CC CC_REGNUM))]
11245 "TARGET_ARM"
11246 "#"
11247 [(set_attr "conds" "clob")
11248 (set_attr "length" "8,12")
11249 (set_attr "type" "multiple")]
11250 )
11251
11252 (define_insn_and_split "*if_move_neg"
11253 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11254 (if_then_else:SI
11255 (match_operator 4 "arm_comparison_operator"
11256 [(match_operand 3 "cc_register" "") (const_int 0)])
11257 (match_operand:SI 1 "s_register_operand" "0,0")
11258 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
11259 "TARGET_32BIT"
11260 "#"
11261 "&& reload_completed"
11262 [(cond_exec (match_dup 5)
11263 (set (match_dup 0) (neg:SI (match_dup 2))))]
11264 {
11265 machine_mode mode = GET_MODE (operands[3]);
11266 rtx_code rc = GET_CODE (operands[4]);
11267
11268 if (mode == CCFPmode || mode == CCFPEmode)
11269 rc = reverse_condition_maybe_unordered (rc);
11270 else
11271 rc = reverse_condition (rc);
11272
11273 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
11274 }
11275 [(set_attr "conds" "use")
11276 (set_attr "length" "4")
11277 (set_attr "arch" "t2,32")
11278 (set_attr "enabled_for_short_it" "yes,no")
11279 (set_attr "type" "logic_shift_imm")]
11280 )
11281
11282 (define_insn "*arith_adjacentmem"
11283 [(set (match_operand:SI 0 "s_register_operand" "=r")
11284 (match_operator:SI 1 "shiftable_operator"
11285 [(match_operand:SI 2 "memory_operand" "m")
11286 (match_operand:SI 3 "memory_operand" "m")]))
11287 (clobber (match_scratch:SI 4 "=r"))]
11288 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11289 "*
11290 {
11291 rtx ldm[3];
11292 rtx arith[4];
11293 rtx base_reg;
11294 HOST_WIDE_INT val1 = 0, val2 = 0;
11295
11296 if (REGNO (operands[0]) > REGNO (operands[4]))
11297 {
11298 ldm[1] = operands[4];
11299 ldm[2] = operands[0];
11300 }
11301 else
11302 {
11303 ldm[1] = operands[0];
11304 ldm[2] = operands[4];
11305 }
11306
11307 base_reg = XEXP (operands[2], 0);
11308
11309 if (!REG_P (base_reg))
11310 {
11311 val1 = INTVAL (XEXP (base_reg, 1));
11312 base_reg = XEXP (base_reg, 0);
11313 }
11314
11315 if (!REG_P (XEXP (operands[3], 0)))
11316 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11317
11318 arith[0] = operands[0];
11319 arith[3] = operands[1];
11320
11321 if (val1 < val2)
11322 {
11323 arith[1] = ldm[1];
11324 arith[2] = ldm[2];
11325 }
11326 else
11327 {
11328 arith[1] = ldm[2];
11329 arith[2] = ldm[1];
11330 }
11331
11332 ldm[0] = base_reg;
11333 if (val1 !=0 && val2 != 0)
11334 {
11335 rtx ops[3];
11336
11337 if (val1 == 4 || val2 == 4)
11338 /* Other val must be 8, since we know they are adjacent and neither
11339 is zero. */
11340 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
11341 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11342 {
11343 ldm[0] = ops[0] = operands[4];
11344 ops[1] = base_reg;
11345 ops[2] = GEN_INT (val1);
11346 output_add_immediate (ops);
11347 if (val1 < val2)
11348 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11349 else
11350 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11351 }
11352 else
11353 {
11354 /* Offset is out of range for a single add, so use two ldr. */
11355 ops[0] = ldm[1];
11356 ops[1] = base_reg;
11357 ops[2] = GEN_INT (val1);
11358 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11359 ops[0] = ldm[2];
11360 ops[2] = GEN_INT (val2);
11361 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11362 }
11363 }
11364 else if (val1 != 0)
11365 {
11366 if (val1 < val2)
11367 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11368 else
11369 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11370 }
11371 else
11372 {
11373 if (val1 < val2)
11374 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11375 else
11376 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11377 }
11378 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11379 return \"\";
11380 }"
11381 [(set_attr "length" "12")
11382 (set_attr "predicable" "yes")
11383 (set_attr "type" "load_4")]
11384 )
11385
11386 ; This pattern is never tried by combine, so do it as a peephole
11387
11388 (define_peephole2
11389 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11390 (match_operand:SI 1 "arm_general_register_operand" ""))
11391 (set (reg:CC CC_REGNUM)
11392 (compare:CC (match_dup 1) (const_int 0)))]
11393 "TARGET_ARM"
11394 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11395 (set (match_dup 0) (match_dup 1))])]
11396 ""
11397 )
11398
11399 (define_split
11400 [(set (match_operand:SI 0 "s_register_operand" "")
11401 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11402 (const_int 0))
11403 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11404 [(match_operand:SI 3 "s_register_operand" "")
11405 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11406 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11407 "TARGET_ARM"
11408 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11409 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11410 (match_dup 5)))]
11411 ""
11412 )
11413
11414 ;; This split can be used because CC_Z mode implies that the following
11415 ;; branch will be an equality, or an unsigned inequality, so the sign
11416 ;; extension is not needed.
11417
11418 (define_split
11419 [(set (reg:CC_Z CC_REGNUM)
11420 (compare:CC_Z
11421 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11422 (const_int 24))
11423 (match_operand 1 "const_int_operand" "")))
11424 (clobber (match_scratch:SI 2 ""))]
11425 "TARGET_ARM
11426 && ((UINTVAL (operands[1]))
11427 == ((UINTVAL (operands[1])) >> 24) << 24)"
11428 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11429 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11430 "
11431 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11432 "
11433 )
11434 ;; ??? Check the patterns above for Thumb-2 usefulness
11435
11436 (define_expand "prologue"
11437 [(clobber (const_int 0))]
11438 "TARGET_EITHER"
11439 "if (TARGET_32BIT)
11440 arm_expand_prologue ();
11441 else
11442 thumb1_expand_prologue ();
11443 DONE;
11444 "
11445 )
11446
11447 (define_expand "epilogue"
11448 [(clobber (const_int 0))]
11449 "TARGET_EITHER"
11450 "
11451 if (crtl->calls_eh_return)
11452 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11453 if (TARGET_THUMB1)
11454 {
11455 thumb1_expand_epilogue ();
11456 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11457 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11458 }
11459 else if (HAVE_return)
11460 {
11461 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11462 no need for explicit testing again. */
11463 emit_jump_insn (gen_return ());
11464 }
11465 else if (TARGET_32BIT)
11466 {
11467 arm_expand_epilogue (true);
11468 }
11469 DONE;
11470 "
11471 )
11472
11473 ;; Note - although unspec_volatile's USE all hard registers,
11474 ;; USEs are ignored after relaod has completed. Thus we need
11475 ;; to add an unspec of the link register to ensure that flow
11476 ;; does not think that it is unused by the sibcall branch that
11477 ;; will replace the standard function epilogue.
11478 (define_expand "sibcall_epilogue"
11479 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11480 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11481 "TARGET_32BIT"
11482 "
11483 arm_expand_epilogue (false);
11484 DONE;
11485 "
11486 )
11487
11488 (define_expand "eh_epilogue"
11489 [(use (match_operand:SI 0 "register_operand"))
11490 (use (match_operand:SI 1 "register_operand"))
11491 (use (match_operand:SI 2 "register_operand"))]
11492 "TARGET_EITHER"
11493 "
11494 {
11495 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11496 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11497 {
11498 rtx ra = gen_rtx_REG (Pmode, 2);
11499
11500 emit_move_insn (ra, operands[2]);
11501 operands[2] = ra;
11502 }
11503 /* This is a hack -- we may have crystalized the function type too
11504 early. */
11505 cfun->machine->func_type = 0;
11506 }"
11507 )
11508
11509 ;; This split is only used during output to reduce the number of patterns
11510 ;; that need assembler instructions adding to them. We allowed the setting
11511 ;; of the conditions to be implicit during rtl generation so that
11512 ;; the conditional compare patterns would work. However this conflicts to
11513 ;; some extent with the conditional data operations, so we have to split them
11514 ;; up again here.
11515
11516 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11517 ;; conditional execution sufficient?
11518
11519 (define_split
11520 [(set (match_operand:SI 0 "s_register_operand" "")
11521 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11522 [(match_operand 2 "" "") (match_operand 3 "" "")])
11523 (match_dup 0)
11524 (match_operand 4 "" "")))
11525 (clobber (reg:CC CC_REGNUM))]
11526 "TARGET_ARM && reload_completed"
11527 [(set (match_dup 5) (match_dup 6))
11528 (cond_exec (match_dup 7)
11529 (set (match_dup 0) (match_dup 4)))]
11530 "
11531 {
11532 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11533 operands[2], operands[3]);
11534 enum rtx_code rc = GET_CODE (operands[1]);
11535
11536 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11537 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11538 if (mode == CCFPmode || mode == CCFPEmode)
11539 rc = reverse_condition_maybe_unordered (rc);
11540 else
11541 rc = reverse_condition (rc);
11542
11543 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11544 }"
11545 )
11546
11547 (define_split
11548 [(set (match_operand:SI 0 "s_register_operand" "")
11549 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11550 [(match_operand 2 "" "") (match_operand 3 "" "")])
11551 (match_operand 4 "" "")
11552 (match_dup 0)))
11553 (clobber (reg:CC CC_REGNUM))]
11554 "TARGET_ARM && reload_completed"
11555 [(set (match_dup 5) (match_dup 6))
11556 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11557 (set (match_dup 0) (match_dup 4)))]
11558 "
11559 {
11560 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11561 operands[2], operands[3]);
11562
11563 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11564 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11565 }"
11566 )
11567
11568 (define_split
11569 [(set (match_operand:SI 0 "s_register_operand" "")
11570 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11571 [(match_operand 2 "" "") (match_operand 3 "" "")])
11572 (match_operand 4 "" "")
11573 (match_operand 5 "" "")))
11574 (clobber (reg:CC CC_REGNUM))]
11575 "TARGET_ARM && reload_completed"
11576 [(set (match_dup 6) (match_dup 7))
11577 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11578 (set (match_dup 0) (match_dup 4)))
11579 (cond_exec (match_dup 8)
11580 (set (match_dup 0) (match_dup 5)))]
11581 "
11582 {
11583 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11584 operands[2], operands[3]);
11585 enum rtx_code rc = GET_CODE (operands[1]);
11586
11587 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11588 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11589 if (mode == CCFPmode || mode == CCFPEmode)
11590 rc = reverse_condition_maybe_unordered (rc);
11591 else
11592 rc = reverse_condition (rc);
11593
11594 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11595 }"
11596 )
11597
11598 (define_split
11599 [(set (match_operand:SI 0 "s_register_operand" "")
11600 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11601 [(match_operand:SI 2 "s_register_operand" "")
11602 (match_operand:SI 3 "arm_add_operand" "")])
11603 (match_operand:SI 4 "arm_rhs_operand" "")
11604 (not:SI
11605 (match_operand:SI 5 "s_register_operand" ""))))
11606 (clobber (reg:CC CC_REGNUM))]
11607 "TARGET_ARM && reload_completed"
11608 [(set (match_dup 6) (match_dup 7))
11609 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11610 (set (match_dup 0) (match_dup 4)))
11611 (cond_exec (match_dup 8)
11612 (set (match_dup 0) (not:SI (match_dup 5))))]
11613 "
11614 {
11615 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11616 operands[2], operands[3]);
11617 enum rtx_code rc = GET_CODE (operands[1]);
11618
11619 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11620 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11621 if (mode == CCFPmode || mode == CCFPEmode)
11622 rc = reverse_condition_maybe_unordered (rc);
11623 else
11624 rc = reverse_condition (rc);
11625
11626 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11627 }"
11628 )
11629
11630 (define_insn "*cond_move_not"
11631 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11632 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11633 [(match_operand 3 "cc_register" "") (const_int 0)])
11634 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11635 (not:SI
11636 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11637 "TARGET_ARM"
11638 "@
11639 mvn%D4\\t%0, %2
11640 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11641 [(set_attr "conds" "use")
11642 (set_attr "type" "mvn_reg,multiple")
11643 (set_attr "length" "4,8")]
11644 )
11645
11646 ;; The next two patterns occur when an AND operation is followed by a
11647 ;; scc insn sequence
11648
11649 (define_insn "*sign_extract_onebit"
11650 [(set (match_operand:SI 0 "s_register_operand" "=r")
11651 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11652 (const_int 1)
11653 (match_operand:SI 2 "const_int_operand" "n")))
11654 (clobber (reg:CC CC_REGNUM))]
11655 "TARGET_ARM"
11656 "*
11657 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11658 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11659 return \"mvnne\\t%0, #0\";
11660 "
11661 [(set_attr "conds" "clob")
11662 (set_attr "length" "8")
11663 (set_attr "type" "multiple")]
11664 )
11665
11666 (define_insn "*not_signextract_onebit"
11667 [(set (match_operand:SI 0 "s_register_operand" "=r")
11668 (not:SI
11669 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11670 (const_int 1)
11671 (match_operand:SI 2 "const_int_operand" "n"))))
11672 (clobber (reg:CC CC_REGNUM))]
11673 "TARGET_ARM"
11674 "*
11675 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11676 output_asm_insn (\"tst\\t%1, %2\", operands);
11677 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11678 return \"movne\\t%0, #0\";
11679 "
11680 [(set_attr "conds" "clob")
11681 (set_attr "length" "12")
11682 (set_attr "type" "multiple")]
11683 )
11684 ;; ??? The above patterns need auditing for Thumb-2
11685
11686 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11687 ;; expressions. For simplicity, the first register is also in the unspec
11688 ;; part.
11689 ;; To avoid the usage of GNU extension, the length attribute is computed
11690 ;; in a C function arm_attr_length_push_multi.
11691 (define_insn "*push_multi"
11692 [(match_parallel 2 "multi_register_push"
11693 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11694 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11695 UNSPEC_PUSH_MULT))])]
11696 ""
11697 "*
11698 {
11699 int num_saves = XVECLEN (operands[2], 0);
11700
11701 /* For the StrongARM at least it is faster to
11702 use STR to store only a single register.
11703 In Thumb mode always use push, and the assembler will pick
11704 something appropriate. */
11705 if (num_saves == 1 && TARGET_ARM)
11706 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11707 else
11708 {
11709 int i;
11710 char pattern[100];
11711
11712 if (TARGET_32BIT)
11713 strcpy (pattern, \"push%?\\t{%1\");
11714 else
11715 strcpy (pattern, \"push\\t{%1\");
11716
11717 for (i = 1; i < num_saves; i++)
11718 {
11719 strcat (pattern, \", %|\");
11720 strcat (pattern,
11721 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11722 }
11723
11724 strcat (pattern, \"}\");
11725 output_asm_insn (pattern, operands);
11726 }
11727
11728 return \"\";
11729 }"
11730 [(set_attr "type" "store_16")
11731 (set (attr "length")
11732 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11733 )
11734
11735 (define_insn "stack_tie"
11736 [(set (mem:BLK (scratch))
11737 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11738 (match_operand:SI 1 "s_register_operand" "rk")]
11739 UNSPEC_PRLG_STK))]
11740 ""
11741 ""
11742 [(set_attr "length" "0")
11743 (set_attr "type" "block")]
11744 )
11745
11746 ;; Pop (as used in epilogue RTL)
11747 ;;
11748 (define_insn "*load_multiple_with_writeback"
11749 [(match_parallel 0 "load_multiple_operation"
11750 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11751 (plus:SI (match_dup 1)
11752 (match_operand:SI 2 "const_int_I_operand" "I")))
11753 (set (match_operand:SI 3 "s_register_operand" "=rk")
11754 (mem:SI (match_dup 1)))
11755 ])]
11756 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11757 "*
11758 {
11759 arm_output_multireg_pop (operands, /*return_pc=*/false,
11760 /*cond=*/const_true_rtx,
11761 /*reverse=*/false,
11762 /*update=*/true);
11763 return \"\";
11764 }
11765 "
11766 [(set_attr "type" "load_16")
11767 (set_attr "predicable" "yes")
11768 (set (attr "length")
11769 (symbol_ref "arm_attr_length_pop_multi (operands,
11770 /*return_pc=*/false,
11771 /*write_back_p=*/true)"))]
11772 )
11773
11774 ;; Pop with return (as used in epilogue RTL)
11775 ;;
11776 ;; This instruction is generated when the registers are popped at the end of
11777 ;; epilogue. Here, instead of popping the value into LR and then generating
11778 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11779 ;; with (return).
11780 (define_insn "*pop_multiple_with_writeback_and_return"
11781 [(match_parallel 0 "pop_multiple_return"
11782 [(return)
11783 (set (match_operand:SI 1 "s_register_operand" "+rk")
11784 (plus:SI (match_dup 1)
11785 (match_operand:SI 2 "const_int_I_operand" "I")))
11786 (set (match_operand:SI 3 "s_register_operand" "=rk")
11787 (mem:SI (match_dup 1)))
11788 ])]
11789 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11790 "*
11791 {
11792 arm_output_multireg_pop (operands, /*return_pc=*/true,
11793 /*cond=*/const_true_rtx,
11794 /*reverse=*/false,
11795 /*update=*/true);
11796 return \"\";
11797 }
11798 "
11799 [(set_attr "type" "load_16")
11800 (set_attr "predicable" "yes")
11801 (set (attr "length")
11802 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11803 /*write_back_p=*/true)"))]
11804 )
11805
11806 (define_insn "*pop_multiple_with_return"
11807 [(match_parallel 0 "pop_multiple_return"
11808 [(return)
11809 (set (match_operand:SI 2 "s_register_operand" "=rk")
11810 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11811 ])]
11812 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11813 "*
11814 {
11815 arm_output_multireg_pop (operands, /*return_pc=*/true,
11816 /*cond=*/const_true_rtx,
11817 /*reverse=*/false,
11818 /*update=*/false);
11819 return \"\";
11820 }
11821 "
11822 [(set_attr "type" "load_16")
11823 (set_attr "predicable" "yes")
11824 (set (attr "length")
11825 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11826 /*write_back_p=*/false)"))]
11827 )
11828
11829 ;; Load into PC and return
11830 (define_insn "*ldr_with_return"
11831 [(return)
11832 (set (reg:SI PC_REGNUM)
11833 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11834 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11835 "ldr%?\t%|pc, [%0], #4"
11836 [(set_attr "type" "load_4")
11837 (set_attr "predicable" "yes")]
11838 )
11839 ;; Pop for floating point registers (as used in epilogue RTL)
11840 (define_insn "*vfp_pop_multiple_with_writeback"
11841 [(match_parallel 0 "pop_multiple_fp"
11842 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11843 (plus:SI (match_dup 1)
11844 (match_operand:SI 2 "const_int_I_operand" "I")))
11845 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11846 (mem:DF (match_dup 1)))])]
11847 "TARGET_32BIT && TARGET_VFP_BASE"
11848 "*
11849 {
11850 int num_regs = XVECLEN (operands[0], 0);
11851 char pattern[100];
11852 rtx op_list[2];
11853 strcpy (pattern, \"vldm\\t\");
11854 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11855 strcat (pattern, \"!, {\");
11856 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11857 strcat (pattern, \"%P0\");
11858 if ((num_regs - 1) > 1)
11859 {
11860 strcat (pattern, \"-%P1\");
11861 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11862 }
11863
11864 strcat (pattern, \"}\");
11865 output_asm_insn (pattern, op_list);
11866 return \"\";
11867 }
11868 "
11869 [(set_attr "type" "load_16")
11870 (set_attr "conds" "unconditional")
11871 (set_attr "predicable" "no")]
11872 )
11873
11874 ;; Special patterns for dealing with the constant pool
11875
11876 (define_insn "align_4"
11877 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11878 "TARGET_EITHER"
11879 "*
11880 assemble_align (32);
11881 return \"\";
11882 "
11883 [(set_attr "type" "no_insn")]
11884 )
11885
11886 (define_insn "align_8"
11887 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11888 "TARGET_EITHER"
11889 "*
11890 assemble_align (64);
11891 return \"\";
11892 "
11893 [(set_attr "type" "no_insn")]
11894 )
11895
11896 (define_insn "consttable_end"
11897 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11898 "TARGET_EITHER"
11899 "*
11900 making_const_table = FALSE;
11901 return \"\";
11902 "
11903 [(set_attr "type" "no_insn")]
11904 )
11905
11906 (define_insn "consttable_1"
11907 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11908 "TARGET_EITHER"
11909 "*
11910 making_const_table = TRUE;
11911 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11912 assemble_zeros (3);
11913 return \"\";
11914 "
11915 [(set_attr "length" "4")
11916 (set_attr "type" "no_insn")]
11917 )
11918
11919 (define_insn "consttable_2"
11920 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11921 "TARGET_EITHER"
11922 "*
11923 {
11924 rtx x = operands[0];
11925 making_const_table = TRUE;
11926 switch (GET_MODE_CLASS (GET_MODE (x)))
11927 {
11928 case MODE_FLOAT:
11929 arm_emit_fp16_const (x);
11930 break;
11931 default:
11932 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11933 assemble_zeros (2);
11934 break;
11935 }
11936 return \"\";
11937 }"
11938 [(set_attr "length" "4")
11939 (set_attr "type" "no_insn")]
11940 )
11941
11942 (define_insn "consttable_4"
11943 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11944 "TARGET_EITHER"
11945 "*
11946 {
11947 rtx x = operands[0];
11948 making_const_table = TRUE;
11949 scalar_float_mode float_mode;
11950 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11951 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11952 else
11953 {
11954 /* XXX: Sometimes gcc does something really dumb and ends up with
11955 a HIGH in a constant pool entry, usually because it's trying to
11956 load into a VFP register. We know this will always be used in
11957 combination with a LO_SUM which ignores the high bits, so just
11958 strip off the HIGH. */
11959 if (GET_CODE (x) == HIGH)
11960 x = XEXP (x, 0);
11961 assemble_integer (x, 4, BITS_PER_WORD, 1);
11962 mark_symbol_refs_as_used (x);
11963 }
11964 return \"\";
11965 }"
11966 [(set_attr "length" "4")
11967 (set_attr "type" "no_insn")]
11968 )
11969
11970 (define_insn "consttable_8"
11971 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11972 "TARGET_EITHER"
11973 "*
11974 {
11975 making_const_table = TRUE;
11976 scalar_float_mode float_mode;
11977 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11978 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11979 float_mode, BITS_PER_WORD);
11980 else
11981 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11982 return \"\";
11983 }"
11984 [(set_attr "length" "8")
11985 (set_attr "type" "no_insn")]
11986 )
11987
11988 (define_insn "consttable_16"
11989 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11990 "TARGET_EITHER"
11991 "*
11992 {
11993 making_const_table = TRUE;
11994 scalar_float_mode float_mode;
11995 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11996 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11997 float_mode, BITS_PER_WORD);
11998 else
11999 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12000 return \"\";
12001 }"
12002 [(set_attr "length" "16")
12003 (set_attr "type" "no_insn")]
12004 )
12005
12006 ;; V5 Instructions,
12007
12008 (define_insn "clzsi2"
12009 [(set (match_operand:SI 0 "s_register_operand" "=r")
12010 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12011 "TARGET_32BIT && arm_arch5t"
12012 "clz%?\\t%0, %1"
12013 [(set_attr "predicable" "yes")
12014 (set_attr "type" "clz")])
12015
12016 (define_insn "rbitsi2"
12017 [(set (match_operand:SI 0 "s_register_operand" "=r")
12018 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12019 "TARGET_32BIT && arm_arch_thumb2"
12020 "rbit%?\\t%0, %1"
12021 [(set_attr "predicable" "yes")
12022 (set_attr "type" "clz")])
12023
12024 ;; Keep this as a CTZ expression until after reload and then split
12025 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
12026 ;; to fold with any other expression.
12027
12028 (define_insn_and_split "ctzsi2"
12029 [(set (match_operand:SI 0 "s_register_operand" "=r")
12030 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12031 "TARGET_32BIT && arm_arch_thumb2"
12032 "#"
12033 "&& reload_completed"
12034 [(const_int 0)]
12035 "
12036 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
12037 emit_insn (gen_clzsi2 (operands[0], operands[0]));
12038 DONE;
12039 ")
12040
12041 ;; V5E instructions.
12042
12043 (define_insn "prefetch"
12044 [(prefetch (match_operand:SI 0 "address_operand" "p")
12045 (match_operand:SI 1 "" "")
12046 (match_operand:SI 2 "" ""))]
12047 "TARGET_32BIT && arm_arch5te"
12048 "pld\\t%a0"
12049 [(set_attr "type" "load_4")]
12050 )
12051
12052 ;; General predication pattern
12053
12054 (define_cond_exec
12055 [(match_operator 0 "arm_comparison_operator"
12056 [(match_operand 1 "cc_register" "")
12057 (const_int 0)])]
12058 "TARGET_32BIT
12059 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
12060 ""
12061 [(set_attr "predicated" "yes")]
12062 )
12063
12064 (define_insn "force_register_use"
12065 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12066 ""
12067 "%@ %0 needed"
12068 [(set_attr "length" "0")
12069 (set_attr "type" "no_insn")]
12070 )
12071
12072
12073 ;; Patterns for exception handling
12074
12075 (define_expand "eh_return"
12076 [(use (match_operand 0 "general_operand"))]
12077 "TARGET_EITHER"
12078 "
12079 {
12080 if (TARGET_32BIT)
12081 emit_insn (gen_arm_eh_return (operands[0]));
12082 else
12083 emit_insn (gen_thumb_eh_return (operands[0]));
12084 DONE;
12085 }"
12086 )
12087
12088 ;; We can't expand this before we know where the link register is stored.
12089 (define_insn_and_split "arm_eh_return"
12090 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12091 VUNSPEC_EH_RETURN)
12092 (clobber (match_scratch:SI 1 "=&r"))]
12093 "TARGET_ARM"
12094 "#"
12095 "&& reload_completed"
12096 [(const_int 0)]
12097 "
12098 {
12099 arm_set_return_address (operands[0], operands[1]);
12100 DONE;
12101 }"
12102 )
12103
12104 \f
12105 ;; TLS support
12106
12107 (define_insn "load_tp_hard"
12108 [(set (match_operand:SI 0 "register_operand" "=r")
12109 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12110 "TARGET_HARD_TP"
12111 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12112 [(set_attr "predicable" "yes")
12113 (set_attr "type" "mrs")]
12114 )
12115
12116 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12117 (define_insn "load_tp_soft_fdpic"
12118 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12119 (clobber (reg:SI FDPIC_REGNUM))
12120 (clobber (reg:SI LR_REGNUM))
12121 (clobber (reg:SI IP_REGNUM))
12122 (clobber (reg:CC CC_REGNUM))]
12123 "TARGET_SOFT_TP && TARGET_FDPIC"
12124 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12125 [(set_attr "conds" "clob")
12126 (set_attr "type" "branch")]
12127 )
12128
12129 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12130 (define_insn "load_tp_soft"
12131 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12132 (clobber (reg:SI LR_REGNUM))
12133 (clobber (reg:SI IP_REGNUM))
12134 (clobber (reg:CC CC_REGNUM))]
12135 "TARGET_SOFT_TP && !TARGET_FDPIC"
12136 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12137 [(set_attr "conds" "clob")
12138 (set_attr "type" "branch")]
12139 )
12140
12141 ;; tls descriptor call
12142 (define_insn "tlscall"
12143 [(set (reg:SI R0_REGNUM)
12144 (unspec:SI [(reg:SI R0_REGNUM)
12145 (match_operand:SI 0 "" "X")
12146 (match_operand 1 "" "")] UNSPEC_TLS))
12147 (clobber (reg:SI R1_REGNUM))
12148 (clobber (reg:SI LR_REGNUM))
12149 (clobber (reg:SI CC_REGNUM))]
12150 "TARGET_GNU2_TLS"
12151 {
12152 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12153 INTVAL (operands[1]));
12154 return "bl\\t%c0(tlscall)";
12155 }
12156 [(set_attr "conds" "clob")
12157 (set_attr "length" "4")
12158 (set_attr "type" "branch")]
12159 )
12160
12161 ;; For thread pointer builtin
12162 (define_expand "get_thread_pointersi"
12163 [(match_operand:SI 0 "s_register_operand")]
12164 ""
12165 "
12166 {
12167 arm_load_tp (operands[0]);
12168 DONE;
12169 }")
12170
12171 ;;
12172
12173 ;; We only care about the lower 16 bits of the constant
12174 ;; being inserted into the upper 16 bits of the register.
12175 (define_insn "*arm_movtas_ze"
12176 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
12177 (const_int 16)
12178 (const_int 16))
12179 (match_operand:SI 1 "const_int_operand" ""))]
12180 "TARGET_HAVE_MOVT"
12181 "@
12182 movt%?\t%0, %L1
12183 movt\t%0, %L1"
12184 [(set_attr "arch" "32,v8mb")
12185 (set_attr "predicable" "yes")
12186 (set_attr "length" "4")
12187 (set_attr "type" "alu_sreg")]
12188 )
12189
12190 (define_insn "*arm_rev"
12191 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12192 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12193 "arm_arch6"
12194 "@
12195 rev\t%0, %1
12196 rev%?\t%0, %1
12197 rev%?\t%0, %1"
12198 [(set_attr "arch" "t1,t2,32")
12199 (set_attr "length" "2,2,4")
12200 (set_attr "predicable" "no,yes,yes")
12201 (set_attr "type" "rev")]
12202 )
12203
12204 (define_expand "arm_legacy_rev"
12205 [(set (match_operand:SI 2 "s_register_operand")
12206 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
12207 (const_int 16))
12208 (match_dup 1)))
12209 (set (match_dup 2)
12210 (lshiftrt:SI (match_dup 2)
12211 (const_int 8)))
12212 (set (match_operand:SI 3 "s_register_operand")
12213 (rotatert:SI (match_dup 1)
12214 (const_int 8)))
12215 (set (match_dup 2)
12216 (and:SI (match_dup 2)
12217 (const_int -65281)))
12218 (set (match_operand:SI 0 "s_register_operand")
12219 (xor:SI (match_dup 3)
12220 (match_dup 2)))]
12221 "TARGET_32BIT"
12222 ""
12223 )
12224
12225 ;; Reuse temporaries to keep register pressure down.
12226 (define_expand "thumb_legacy_rev"
12227 [(set (match_operand:SI 2 "s_register_operand")
12228 (ashift:SI (match_operand:SI 1 "s_register_operand")
12229 (const_int 24)))
12230 (set (match_operand:SI 3 "s_register_operand")
12231 (lshiftrt:SI (match_dup 1)
12232 (const_int 24)))
12233 (set (match_dup 3)
12234 (ior:SI (match_dup 3)
12235 (match_dup 2)))
12236 (set (match_operand:SI 4 "s_register_operand")
12237 (const_int 16))
12238 (set (match_operand:SI 5 "s_register_operand")
12239 (rotatert:SI (match_dup 1)
12240 (match_dup 4)))
12241 (set (match_dup 2)
12242 (ashift:SI (match_dup 5)
12243 (const_int 24)))
12244 (set (match_dup 5)
12245 (lshiftrt:SI (match_dup 5)
12246 (const_int 24)))
12247 (set (match_dup 5)
12248 (ior:SI (match_dup 5)
12249 (match_dup 2)))
12250 (set (match_dup 5)
12251 (rotatert:SI (match_dup 5)
12252 (match_dup 4)))
12253 (set (match_operand:SI 0 "s_register_operand")
12254 (ior:SI (match_dup 5)
12255 (match_dup 3)))]
12256 "TARGET_THUMB"
12257 ""
12258 )
12259
12260 ;; ARM-specific expansion of signed mod by power of 2
12261 ;; using conditional negate.
12262 ;; For r0 % n where n is a power of 2 produce:
12263 ;; rsbs r1, r0, #0
12264 ;; and r0, r0, #(n - 1)
12265 ;; and r1, r1, #(n - 1)
12266 ;; rsbpl r0, r1, #0
12267
12268 (define_expand "modsi3"
12269 [(match_operand:SI 0 "register_operand")
12270 (match_operand:SI 1 "register_operand")
12271 (match_operand:SI 2 "const_int_operand")]
12272 "TARGET_32BIT"
12273 {
12274 HOST_WIDE_INT val = INTVAL (operands[2]);
12275
12276 if (val <= 0
12277 || exact_log2 (val) <= 0)
12278 FAIL;
12279
12280 rtx mask = GEN_INT (val - 1);
12281
12282 /* In the special case of x0 % 2 we can do the even shorter:
12283 cmp r0, #0
12284 and r0, r0, #1
12285 rsblt r0, r0, #0. */
12286
12287 if (val == 2)
12288 {
12289 rtx cc_reg = arm_gen_compare_reg (LT,
12290 operands[1], const0_rtx, NULL_RTX);
12291 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
12292 rtx masked = gen_reg_rtx (SImode);
12293
12294 emit_insn (gen_andsi3 (masked, operands[1], mask));
12295 emit_move_insn (operands[0],
12296 gen_rtx_IF_THEN_ELSE (SImode, cond,
12297 gen_rtx_NEG (SImode,
12298 masked),
12299 masked));
12300 DONE;
12301 }
12302
12303 rtx neg_op = gen_reg_rtx (SImode);
12304 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
12305 operands[1]));
12306
12307 /* Extract the condition register and mode. */
12308 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
12309 rtx cc_reg = SET_DEST (cmp);
12310 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
12311
12312 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
12313
12314 rtx masked_neg = gen_reg_rtx (SImode);
12315 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
12316
12317 /* We want a conditional negate here, but emitting COND_EXEC rtxes
12318 during expand does not always work. Do an IF_THEN_ELSE instead. */
12319 emit_move_insn (operands[0],
12320 gen_rtx_IF_THEN_ELSE (SImode, cond,
12321 gen_rtx_NEG (SImode, masked_neg),
12322 operands[0]));
12323
12324
12325 DONE;
12326 }
12327 )
12328
12329 (define_expand "bswapsi2"
12330 [(set (match_operand:SI 0 "s_register_operand")
12331 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
12332 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12333 "
12334 if (!arm_arch6)
12335 {
12336 rtx op2 = gen_reg_rtx (SImode);
12337 rtx op3 = gen_reg_rtx (SImode);
12338
12339 if (TARGET_THUMB)
12340 {
12341 rtx op4 = gen_reg_rtx (SImode);
12342 rtx op5 = gen_reg_rtx (SImode);
12343
12344 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12345 op2, op3, op4, op5));
12346 }
12347 else
12348 {
12349 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12350 op2, op3));
12351 }
12352
12353 DONE;
12354 }
12355 "
12356 )
12357
12358 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12359 ;; and unsigned variants, respectively. For rev16, expose
12360 ;; byte-swapping in the lower 16 bits only.
12361 (define_insn "*arm_revsh"
12362 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12363 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12364 "arm_arch6"
12365 "@
12366 revsh\t%0, %1
12367 revsh%?\t%0, %1
12368 revsh%?\t%0, %1"
12369 [(set_attr "arch" "t1,t2,32")
12370 (set_attr "length" "2,2,4")
12371 (set_attr "type" "rev")]
12372 )
12373
12374 (define_insn "*arm_rev16"
12375 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12376 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12377 "arm_arch6"
12378 "@
12379 rev16\t%0, %1
12380 rev16%?\t%0, %1
12381 rev16%?\t%0, %1"
12382 [(set_attr "arch" "t1,t2,32")
12383 (set_attr "length" "2,2,4")
12384 (set_attr "type" "rev")]
12385 )
12386
12387 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
12388 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
12389 ;; each valid permutation.
12390
12391 (define_insn "arm_rev16si2"
12392 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12393 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
12394 (const_int 8))
12395 (match_operand:SI 3 "const_int_operand" "n,n,n"))
12396 (and:SI (lshiftrt:SI (match_dup 1)
12397 (const_int 8))
12398 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
12399 "arm_arch6
12400 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12401 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12402 "rev16\\t%0, %1"
12403 [(set_attr "arch" "t1,t2,32")
12404 (set_attr "length" "2,2,4")
12405 (set_attr "type" "rev")]
12406 )
12407
12408 (define_insn "arm_rev16si2_alt"
12409 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12410 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
12411 (const_int 8))
12412 (match_operand:SI 2 "const_int_operand" "n,n,n"))
12413 (and:SI (ashift:SI (match_dup 1)
12414 (const_int 8))
12415 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
12416 "arm_arch6
12417 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12418 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12419 "rev16\\t%0, %1"
12420 [(set_attr "arch" "t1,t2,32")
12421 (set_attr "length" "2,2,4")
12422 (set_attr "type" "rev")]
12423 )
12424
12425 (define_expand "bswaphi2"
12426 [(set (match_operand:HI 0 "s_register_operand")
12427 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
12428 "arm_arch6"
12429 ""
12430 )
12431
12432 ;; Patterns for LDRD/STRD in Thumb2 mode
12433
12434 (define_insn "*thumb2_ldrd"
12435 [(set (match_operand:SI 0 "s_register_operand" "=r")
12436 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12437 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12438 (set (match_operand:SI 3 "s_register_operand" "=r")
12439 (mem:SI (plus:SI (match_dup 1)
12440 (match_operand:SI 4 "const_int_operand" ""))))]
12441 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12442 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12443 && (operands_ok_ldrd_strd (operands[0], operands[3],
12444 operands[1], INTVAL (operands[2]),
12445 false, true))"
12446 "ldrd%?\t%0, %3, [%1, %2]"
12447 [(set_attr "type" "load_8")
12448 (set_attr "predicable" "yes")])
12449
12450 (define_insn "*thumb2_ldrd_base"
12451 [(set (match_operand:SI 0 "s_register_operand" "=r")
12452 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12453 (set (match_operand:SI 2 "s_register_operand" "=r")
12454 (mem:SI (plus:SI (match_dup 1)
12455 (const_int 4))))]
12456 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12457 && (operands_ok_ldrd_strd (operands[0], operands[2],
12458 operands[1], 0, false, true))"
12459 "ldrd%?\t%0, %2, [%1]"
12460 [(set_attr "type" "load_8")
12461 (set_attr "predicable" "yes")])
12462
12463 (define_insn "*thumb2_ldrd_base_neg"
12464 [(set (match_operand:SI 0 "s_register_operand" "=r")
12465 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12466 (const_int -4))))
12467 (set (match_operand:SI 2 "s_register_operand" "=r")
12468 (mem:SI (match_dup 1)))]
12469 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12470 && (operands_ok_ldrd_strd (operands[0], operands[2],
12471 operands[1], -4, false, true))"
12472 "ldrd%?\t%0, %2, [%1, #-4]"
12473 [(set_attr "type" "load_8")
12474 (set_attr "predicable" "yes")])
12475
12476 (define_insn "*thumb2_strd"
12477 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12478 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12479 (match_operand:SI 2 "s_register_operand" "r"))
12480 (set (mem:SI (plus:SI (match_dup 0)
12481 (match_operand:SI 3 "const_int_operand" "")))
12482 (match_operand:SI 4 "s_register_operand" "r"))]
12483 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12484 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12485 && (operands_ok_ldrd_strd (operands[2], operands[4],
12486 operands[0], INTVAL (operands[1]),
12487 false, false))"
12488 "strd%?\t%2, %4, [%0, %1]"
12489 [(set_attr "type" "store_8")
12490 (set_attr "predicable" "yes")])
12491
12492 (define_insn "*thumb2_strd_base"
12493 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12494 (match_operand:SI 1 "s_register_operand" "r"))
12495 (set (mem:SI (plus:SI (match_dup 0)
12496 (const_int 4)))
12497 (match_operand:SI 2 "s_register_operand" "r"))]
12498 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12499 && (operands_ok_ldrd_strd (operands[1], operands[2],
12500 operands[0], 0, false, false))"
12501 "strd%?\t%1, %2, [%0]"
12502 [(set_attr "type" "store_8")
12503 (set_attr "predicable" "yes")])
12504
12505 (define_insn "*thumb2_strd_base_neg"
12506 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12507 (const_int -4)))
12508 (match_operand:SI 1 "s_register_operand" "r"))
12509 (set (mem:SI (match_dup 0))
12510 (match_operand:SI 2 "s_register_operand" "r"))]
12511 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12512 && (operands_ok_ldrd_strd (operands[1], operands[2],
12513 operands[0], -4, false, false))"
12514 "strd%?\t%1, %2, [%0, #-4]"
12515 [(set_attr "type" "store_8")
12516 (set_attr "predicable" "yes")])
12517
12518 ;; ARMv8 CRC32 instructions.
12519 (define_insn "arm_<crc_variant>"
12520 [(set (match_operand:SI 0 "s_register_operand" "=r")
12521 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
12522 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
12523 CRC))]
12524 "TARGET_CRC32"
12525 "<crc_variant>\\t%0, %1, %2"
12526 [(set_attr "type" "crc")
12527 (set_attr "conds" "unconditional")]
12528 )
12529
12530 ;; Load the load/store double peephole optimizations.
12531 (include "ldrdstrd.md")
12532
12533 ;; Load the load/store multiple patterns
12534 (include "ldmstm.md")
12535
12536 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12537 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12538 ;; The operands are validated through the load_multiple_operation
12539 ;; match_parallel predicate rather than through constraints so enable it only
12540 ;; after reload.
12541 (define_insn "*load_multiple"
12542 [(match_parallel 0 "load_multiple_operation"
12543 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12544 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12545 ])]
12546 "TARGET_32BIT && reload_completed"
12547 "*
12548 {
12549 arm_output_multireg_pop (operands, /*return_pc=*/false,
12550 /*cond=*/const_true_rtx,
12551 /*reverse=*/false,
12552 /*update=*/false);
12553 return \"\";
12554 }
12555 "
12556 [(set_attr "predicable" "yes")]
12557 )
12558
12559 (define_expand "copysignsf3"
12560 [(match_operand:SF 0 "register_operand")
12561 (match_operand:SF 1 "register_operand")
12562 (match_operand:SF 2 "register_operand")]
12563 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12564 "{
12565 emit_move_insn (operands[0], operands[2]);
12566 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
12567 GEN_INT (31), GEN_INT (0),
12568 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
12569 DONE;
12570 }"
12571 )
12572
12573 (define_expand "copysigndf3"
12574 [(match_operand:DF 0 "register_operand")
12575 (match_operand:DF 1 "register_operand")
12576 (match_operand:DF 2 "register_operand")]
12577 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12578 "{
12579 rtx op0_low = gen_lowpart (SImode, operands[0]);
12580 rtx op0_high = gen_highpart (SImode, operands[0]);
12581 rtx op1_low = gen_lowpart (SImode, operands[1]);
12582 rtx op1_high = gen_highpart (SImode, operands[1]);
12583 rtx op2_high = gen_highpart (SImode, operands[2]);
12584
12585 rtx scratch1 = gen_reg_rtx (SImode);
12586 rtx scratch2 = gen_reg_rtx (SImode);
12587 emit_move_insn (scratch1, op2_high);
12588 emit_move_insn (scratch2, op1_high);
12589
12590 emit_insn(gen_rtx_SET(scratch1,
12591 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
12592 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
12593 emit_move_insn (op0_low, op1_low);
12594 emit_move_insn (op0_high, scratch2);
12595
12596 DONE;
12597 }"
12598 )
12599
12600 ;; movmisalign patterns for HImode and SImode.
12601 (define_expand "movmisalign<mode>"
12602 [(match_operand:HSI 0 "general_operand")
12603 (match_operand:HSI 1 "general_operand")]
12604 "unaligned_access"
12605 {
12606 /* This pattern is not permitted to fail during expansion: if both arguments
12607 are non-registers (e.g. memory := constant), force operand 1 into a
12608 register. */
12609 rtx (* gen_unaligned_load)(rtx, rtx);
12610 rtx tmp_dest = operands[0];
12611 if (!s_register_operand (operands[0], <MODE>mode)
12612 && !s_register_operand (operands[1], <MODE>mode))
12613 operands[1] = force_reg (<MODE>mode, operands[1]);
12614
12615 if (<MODE>mode == HImode)
12616 {
12617 gen_unaligned_load = gen_unaligned_loadhiu;
12618 tmp_dest = gen_reg_rtx (SImode);
12619 }
12620 else
12621 gen_unaligned_load = gen_unaligned_loadsi;
12622
12623 if (MEM_P (operands[1]))
12624 {
12625 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12626 if (<MODE>mode == HImode)
12627 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12628 }
12629 else
12630 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12631
12632 DONE;
12633 })
12634
12635 (define_insn "arm_<cdp>"
12636 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12637 (match_operand:SI 1 "immediate_operand" "n")
12638 (match_operand:SI 2 "immediate_operand" "n")
12639 (match_operand:SI 3 "immediate_operand" "n")
12640 (match_operand:SI 4 "immediate_operand" "n")
12641 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12642 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12643 {
12644 arm_const_bounds (operands[0], 0, 16);
12645 arm_const_bounds (operands[1], 0, 16);
12646 arm_const_bounds (operands[2], 0, (1 << 5));
12647 arm_const_bounds (operands[3], 0, (1 << 5));
12648 arm_const_bounds (operands[4], 0, (1 << 5));
12649 arm_const_bounds (operands[5], 0, 8);
12650 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12651 }
12652 [(set_attr "length" "4")
12653 (set_attr "type" "coproc")])
12654
12655 (define_insn "*ldc"
12656 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12657 (match_operand:SI 1 "immediate_operand" "n")
12658 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12659 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12660 {
12661 arm_const_bounds (operands[0], 0, 16);
12662 arm_const_bounds (operands[1], 0, (1 << 5));
12663 return "<ldc>\\tp%c0, CR%c1, %2";
12664 }
12665 [(set_attr "length" "4")
12666 (set_attr "type" "coproc")])
12667
12668 (define_insn "*stc"
12669 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12670 (match_operand:SI 1 "immediate_operand" "n")
12671 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12672 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12673 {
12674 arm_const_bounds (operands[0], 0, 16);
12675 arm_const_bounds (operands[1], 0, (1 << 5));
12676 return "<stc>\\tp%c0, CR%c1, %2";
12677 }
12678 [(set_attr "length" "4")
12679 (set_attr "type" "coproc")])
12680
12681 (define_expand "arm_<ldc>"
12682 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12683 (match_operand:SI 1 "immediate_operand")
12684 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12685 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12686
12687 (define_expand "arm_<stc>"
12688 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12689 (match_operand:SI 1 "immediate_operand")
12690 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12691 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12692
12693 (define_insn "arm_<mcr>"
12694 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12695 (match_operand:SI 1 "immediate_operand" "n")
12696 (match_operand:SI 2 "s_register_operand" "r")
12697 (match_operand:SI 3 "immediate_operand" "n")
12698 (match_operand:SI 4 "immediate_operand" "n")
12699 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12700 (use (match_dup 2))]
12701 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12702 {
12703 arm_const_bounds (operands[0], 0, 16);
12704 arm_const_bounds (operands[1], 0, 8);
12705 arm_const_bounds (operands[3], 0, (1 << 5));
12706 arm_const_bounds (operands[4], 0, (1 << 5));
12707 arm_const_bounds (operands[5], 0, 8);
12708 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12709 }
12710 [(set_attr "length" "4")
12711 (set_attr "type" "coproc")])
12712
12713 (define_insn "arm_<mrc>"
12714 [(set (match_operand:SI 0 "s_register_operand" "=r")
12715 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12716 (match_operand:SI 2 "immediate_operand" "n")
12717 (match_operand:SI 3 "immediate_operand" "n")
12718 (match_operand:SI 4 "immediate_operand" "n")
12719 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12720 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12721 {
12722 arm_const_bounds (operands[1], 0, 16);
12723 arm_const_bounds (operands[2], 0, 8);
12724 arm_const_bounds (operands[3], 0, (1 << 5));
12725 arm_const_bounds (operands[4], 0, (1 << 5));
12726 arm_const_bounds (operands[5], 0, 8);
12727 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12728 }
12729 [(set_attr "length" "4")
12730 (set_attr "type" "coproc")])
12731
12732 (define_insn "arm_<mcrr>"
12733 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12734 (match_operand:SI 1 "immediate_operand" "n")
12735 (match_operand:DI 2 "s_register_operand" "r")
12736 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12737 (use (match_dup 2))]
12738 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12739 {
12740 arm_const_bounds (operands[0], 0, 16);
12741 arm_const_bounds (operands[1], 0, 8);
12742 arm_const_bounds (operands[3], 0, (1 << 5));
12743 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12744 }
12745 [(set_attr "length" "4")
12746 (set_attr "type" "coproc")])
12747
12748 (define_insn "arm_<mrrc>"
12749 [(set (match_operand:DI 0 "s_register_operand" "=r")
12750 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12751 (match_operand:SI 2 "immediate_operand" "n")
12752 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12753 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12754 {
12755 arm_const_bounds (operands[1], 0, 16);
12756 arm_const_bounds (operands[2], 0, 8);
12757 arm_const_bounds (operands[3], 0, (1 << 5));
12758 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12759 }
12760 [(set_attr "length" "4")
12761 (set_attr "type" "coproc")])
12762
12763 (define_expand "speculation_barrier"
12764 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12765 "TARGET_EITHER"
12766 "
12767 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12768 have a usable barrier (and probably don't need one in practice).
12769 But to be safe if such code is run on later architectures, call a
12770 helper function in libgcc that will do the thing for the active
12771 system. */
12772 if (!(arm_arch7 || arm_arch8))
12773 {
12774 arm_emit_speculation_barrier_function ();
12775 DONE;
12776 }
12777 "
12778 )
12779
12780 ;; Generate a hard speculation barrier when we have not enabled speculation
12781 ;; tracking.
12782 (define_insn "*speculation_barrier_insn"
12783 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12784 "arm_arch7 || arm_arch8"
12785 "isb\;dsb\\tsy"
12786 [(set_attr "type" "block")
12787 (set_attr "length" "8")]
12788 )
12789
12790 ;; Vector bits common to IWMMXT, Neon and MVE
12791 (include "vec-common.md")
12792 ;; Load the Intel Wireless Multimedia Extension patterns
12793 (include "iwmmxt.md")
12794 ;; Load the VFP co-processor patterns
12795 (include "vfp.md")
12796 ;; Thumb-1 patterns
12797 (include "thumb1.md")
12798 ;; Thumb-2 patterns
12799 (include "thumb2.md")
12800 ;; Neon patterns
12801 (include "neon.md")
12802 ;; Crypto patterns
12803 (include "crypto.md")
12804 ;; Synchronization Primitives
12805 (include "sync.md")
12806 ;; Fixed-point patterns
12807 (include "arm-fixed.md")
12808 ;; M-profile Vector Extension
12809 (include "mve.md")