]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/thumb1.md
613cf9ccc30832474a5edf25ceff7247ad2f39f8
[thirdparty/gcc.git] / gcc / config / arm / thumb1.md
1 ;; ARM Thumb-1 Machine Description
2 ;; Copyright (C) 2007-2020 Free Software Foundation, Inc.
3 ;;
4 ;; This file is part of GCC.
5 ;;
6 ;; GCC is free software; you can redistribute it and/or modify it
7 ;; under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
9 ;; any later version.
10 ;;
11 ;; GCC is distributed in the hope that it will be useful, but
12 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 ;; General Public License for more details.
15 ;;
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>. */
19
20 \f
21 ;;---------------------------------------------------------------------------
22 ;; Insn patterns
23 ;;
24
25 ;; Beware of splitting Thumb1 patterns that output multiple
26 ;; assembly instructions, in particular instruction such as SBC and
27 ;; ADC which consume flags. For example, in the pattern thumb_subdi3
28 ;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29 ;; and then the Carry flag is used by SBC to compute the correct
30 ;; result. If we split thumb_subdi3 pattern into two separate RTL
31 ;; insns (using define_insn_and_split), the scheduler might place
32 ;; other RTL insns between SUB and SBC, possibly modifying the Carry
33 ;; flag used by SBC. This might happen because most Thumb1 patterns
34 ;; for flag-setting instructions do not have explicit RTL for setting
35 ;; or clobbering the flags. Instead, they have the attribute "conds"
36 ;; with value "set" or "clob". However, this attribute is not used to
37 ;; identify dependencies and therefore the scheduler might reorder
38 ;; these instruction. Currenly, this problem cannot happen because
39 ;; there are no separate Thumb1 patterns for individual instruction
40 ;; that consume flags (except conditional execution, which is treated
41 ;; differently). In particular there is no Thumb1 armv6-m pattern for
42 ;; sbc or adc.
43
44
45
46 (define_insn "thumb1_movsi_symbol_ref"
47 [(set (match_operand:SI 0 "register_operand" "=l")
48 (match_operand:SI 1 "general_operand" ""))
49 ]
50 "TARGET_THUMB1
51 && arm_disable_literal_pool
52 && GET_CODE (operands[1]) == SYMBOL_REF"
53 "*
54 output_asm_insn (\"movs\\t%0, #:upper8_15:%1\", operands);
55 output_asm_insn (\"lsls\\t%0, #8\", operands);
56 output_asm_insn (\"adds\\t%0, #:upper0_7:%1\", operands);
57 output_asm_insn (\"lsls\\t%0, #8\", operands);
58 output_asm_insn (\"adds\\t%0, #:lower8_15:%1\", operands);
59 output_asm_insn (\"lsls\\t%0, #8\", operands);
60 output_asm_insn (\"adds\\t%0, #:lower0_7:%1\", operands);
61 return \"\";
62 "
63 [(set_attr "length" "14")
64 (set_attr "conds" "clob")]
65 )
66
67 (define_split
68 [(set (match_operand:SI 0 "register_operand" "")
69 (match_operand:SI 1 "immediate_operand" ""))]
70 "TARGET_THUMB1
71 && arm_disable_literal_pool
72 && GET_CODE (operands[1]) == CONST_INT
73 && !satisfies_constraint_I (operands[1])"
74 [(clobber (const_int 0))]
75 "
76 thumb1_gen_const_int (operands[0], INTVAL (operands[1]));
77 DONE;
78 "
79 )
80
81 (define_insn "*thumb1_adddi3"
82 [(set (match_operand:DI 0 "register_operand" "=l")
83 (plus:DI (match_operand:DI 1 "register_operand" "%0")
84 (match_operand:DI 2 "register_operand" "l")))
85 (clobber (reg:CC CC_REGNUM))
86 ]
87 "TARGET_THUMB1"
88 "adds\\t%Q0, %Q0, %Q2\;adcs\\t%R0, %R0, %R2"
89 [(set_attr "length" "4")
90 (set_attr "type" "multiple")]
91 )
92
93 ;; Changes to the constraints of this pattern must be propagated to those of
94 ;; atomic additions in sync.md and to the logic for bind_old_new in
95 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
96 ;; constraints here and aim to be as permissive.
97 (define_insn_and_split "*thumb1_addsi3"
98 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
99 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
100 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
101 "TARGET_THUMB1"
102 "*
103 static const char * const asms[] =
104 {
105 \"adds\\t%0, %0, %2\",
106 \"subs\\t%0, %0, #%n2\",
107 \"adds\\t%0, %1, %2\",
108 \"add\\t%0, %0, %2\",
109 \"add\\t%0, %0, %2\",
110 \"add\\t%0, %1, %2\",
111 \"add\\t%0, %1, %2\",
112 \"#\",
113 \"#\",
114 \"#\"
115 };
116 if ((which_alternative == 2 || which_alternative == 6)
117 && CONST_INT_P (operands[2])
118 && INTVAL (operands[2]) < 0)
119 return (which_alternative == 2) ? \"subs\\t%0, %1, #%n2\" : \"sub\\t%0, %1, #%n2\";
120 return asms[which_alternative];
121 "
122 "&& reload_completed && CONST_INT_P (operands[2])
123 && ((operands[1] != stack_pointer_rtx
124 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
125 || (operands[1] == stack_pointer_rtx
126 && INTVAL (operands[2]) > 1020))"
127 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
128 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
129 {
130 HOST_WIDE_INT offset = INTVAL (operands[2]);
131 if (operands[1] == stack_pointer_rtx)
132 offset -= 1020;
133 else
134 {
135 if (offset > 255)
136 offset = 255;
137 else if (offset < -255)
138 offset = -255;
139 }
140 operands[3] = GEN_INT (offset);
141 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
142 }
143 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")
144 (set_attr "type" "alus_imm,alus_imm,alus_sreg,alus_sreg,alus_sreg,
145 alus_sreg,alus_sreg,multiple,multiple,multiple")]
146 )
147
148 ;; Reloading and elimination of the frame pointer can
149 ;; sometimes cause this optimization to be missed.
150 (define_peephole2
151 [(set (match_operand:SI 0 "arm_general_register_operand" "")
152 (match_operand:SI 1 "const_int_operand" ""))
153 (set (match_dup 0)
154 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
155 "TARGET_THUMB1
156 && UINTVAL (operands[1]) < 1024
157 && (UINTVAL (operands[1]) & 3) == 0"
158 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
159 ""
160 )
161
162 (define_insn "*thumb_subdi3"
163 [(set (match_operand:DI 0 "register_operand" "=l")
164 (minus:DI (match_operand:DI 1 "register_operand" "0")
165 (match_operand:DI 2 "register_operand" "l")))
166 (clobber (reg:CC CC_REGNUM))]
167 "TARGET_THUMB1"
168 "subs\\t%Q0, %Q0, %Q2\;sbcs\\t%R0, %R0, %R2"
169 [(set_attr "length" "4")
170 (set_attr "type" "multiple")]
171 )
172
173 ;; Changes to the constraints of this pattern must be propagated to those of
174 ;; atomic subtractions in sync.md and to the logic for bind_old_new in
175 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
176 ;; constraints here and aim to be as permissive.
177 (define_insn "thumb1_subsi3_insn"
178 [(set (match_operand:SI 0 "register_operand" "=l")
179 (minus:SI (match_operand:SI 1 "register_operand" "l")
180 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
181 "TARGET_THUMB1"
182 "subs\\t%0, %1, %2"
183 [(set_attr "length" "2")
184 (set_attr "conds" "set")
185 (set_attr "type" "alus_sreg")]
186 )
187
188 ;; Unfortunately on Thumb the '&'/'0' trick can fail when operands
189 ;; 1 and 2 are the same, because reload will make operand 0 match
190 ;; operand 1 without realizing that this conflicts with operand 2. We fix
191 ;; this by adding another alternative to match this case, and then `reload'
192 ;; it ourselves. This alternative must come first.
193 (define_insn "*thumb_mulsi3"
194 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
195 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
196 (match_operand:SI 2 "register_operand" "l,l,l")))]
197 "TARGET_THUMB1 && !arm_arch6"
198 "@
199 movs\\t%0, %1\;muls\\t%0, %2
200 mov\\t%0, %1\;muls\\t%0, %2
201 muls\\t%0, %2"
202 [(set_attr "length" "4,4,2")
203 (set_attr "type" "muls")]
204 )
205
206 (define_insn "*thumb_mulsi3_v6"
207 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
208 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
209 (match_operand:SI 2 "register_operand" "l,0,0")))]
210 "TARGET_THUMB1 && arm_arch6"
211 "@
212 muls\\t%0, %2
213 muls\\t%0, %1
214 muls\\t%0, %1"
215 [(set_attr "length" "2")
216 (set_attr "type" "muls")]
217 )
218
219 ;; Changes to the constraints of this pattern must be propagated to those of
220 ;; atomic bitwise ANDs and NANDs in sync.md and to the logic for bind_old_new
221 ;; in arm_split_atomic_op in arm.c. These must be at least as strict as the
222 ;; constraints here and aim to be as permissive.
223 (define_insn "*thumb1_andsi3_insn"
224 [(set (match_operand:SI 0 "register_operand" "=l")
225 (and:SI (match_operand:SI 1 "register_operand" "%0")
226 (match_operand:SI 2 "register_operand" "l")))]
227 "TARGET_THUMB1"
228 "ands\\t%0, %2"
229 [(set_attr "length" "2")
230 (set_attr "type" "logic_imm")
231 (set_attr "conds" "set")])
232
233 (define_split
234 [(set (match_operand:SI 0 "s_register_operand" "")
235 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
236 (match_operand:SI 2 "const_int_operand" "")
237 (match_operand:SI 3 "const_int_operand" "")))
238 (clobber (match_operand:SI 4 "s_register_operand" ""))]
239 "TARGET_THUMB1"
240 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
241 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
242 "{
243 HOST_WIDE_INT temp = INTVAL (operands[2]);
244
245 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
246 operands[3] = GEN_INT (32 - temp);
247 }"
248 )
249
250 (define_split
251 [(set (match_operand:SI 0 "s_register_operand" "")
252 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
253 (match_operand:SI 2 "const_int_operand" "")
254 (match_operand:SI 3 "const_int_operand" "")))]
255 "TARGET_THUMB1"
256 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
257 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
258 "{
259 HOST_WIDE_INT temp = INTVAL (operands[2]);
260
261 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
262 operands[3] = GEN_INT (32 - temp);
263 }"
264 )
265
266 (define_insn "thumb1_bicsi3"
267 [(set (match_operand:SI 0 "register_operand" "=l")
268 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
269 (match_operand:SI 2 "register_operand" "0")))]
270 "TARGET_THUMB1"
271 "bics\\t%0, %1"
272 [(set_attr "length" "2")
273 (set_attr "conds" "set")
274 (set_attr "type" "logics_reg")]
275 )
276
277 ;; Changes to the constraints of this pattern must be propagated to those of
278 ;; atomic inclusive ORs in sync.md and to the logic for bind_old_new in
279 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
280 ;; constraints here and aim to be as permissive.
281 (define_insn "*thumb1_iorsi3_insn"
282 [(set (match_operand:SI 0 "register_operand" "=l")
283 (ior:SI (match_operand:SI 1 "register_operand" "%0")
284 (match_operand:SI 2 "register_operand" "l")))]
285 "TARGET_THUMB1"
286 "orrs\\t%0, %2"
287 [(set_attr "length" "2")
288 (set_attr "conds" "set")
289 (set_attr "type" "logics_reg")])
290
291 ;; Changes to the constraints of this pattern must be propagated to those of
292 ;; atomic exclusive ORs in sync.md and to the logic for bind_old_new in
293 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
294 ;; constraints here and aim to be as permissive.
295 (define_insn "*thumb1_xorsi3_insn"
296 [(set (match_operand:SI 0 "register_operand" "=l")
297 (xor:SI (match_operand:SI 1 "register_operand" "%0")
298 (match_operand:SI 2 "register_operand" "l")))]
299 "TARGET_THUMB1"
300 "eors\\t%0, %2"
301 [(set_attr "length" "2")
302 (set_attr "conds" "set")
303 (set_attr "type" "logics_reg")]
304 )
305
306 (define_insn "*thumb1_ashlsi3"
307 [(set (match_operand:SI 0 "register_operand" "=l,l")
308 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
309 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
310 "TARGET_THUMB1"
311 "lsls\\t%0, %1, %2"
312 [(set_attr "length" "2")
313 (set_attr "type" "shift_imm,shift_reg")
314 (set_attr "conds" "set")])
315
316 (define_insn "*thumb1_ashrsi3"
317 [(set (match_operand:SI 0 "register_operand" "=l,l")
318 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
319 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
320 "TARGET_THUMB1"
321 "asrs\\t%0, %1, %2"
322 [(set_attr "length" "2")
323 (set_attr "type" "shift_imm,shift_reg")
324 (set_attr "conds" "set")])
325
326 (define_insn "*thumb1_lshrsi3"
327 [(set (match_operand:SI 0 "register_operand" "=l,l")
328 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
329 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
330 "TARGET_THUMB1"
331 "lsrs\\t%0, %1, %2"
332 [(set_attr "length" "2")
333 (set_attr "type" "shift_imm,shift_reg")
334 (set_attr "conds" "set")])
335
336 (define_insn "*thumb1_rotrsi3"
337 [(set (match_operand:SI 0 "register_operand" "=l")
338 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
339 (match_operand:SI 2 "register_operand" "l")))]
340 "TARGET_THUMB1"
341 "rors\\t%0, %0, %2"
342 [(set_attr "type" "shift_reg")
343 (set_attr "length" "2")]
344 )
345
346 (define_insn "*thumb1_negdi2"
347 [(set (match_operand:DI 0 "register_operand" "=&l")
348 (neg:DI (match_operand:DI 1 "register_operand" "l")))
349 (clobber (reg:CC CC_REGNUM))]
350 "TARGET_THUMB1"
351 "movs\\t%R0, #0\;rsbs\\t%Q0, %Q1, #0\;sbcs\\t%R0, %R1"
352 [(set_attr "length" "6")
353 (set_attr "type" "multiple")]
354 )
355
356 (define_insn "*thumb1_negsi2"
357 [(set (match_operand:SI 0 "register_operand" "=l")
358 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
359 "TARGET_THUMB1"
360 "rsbs\\t%0, %1, #0"
361 [(set_attr "length" "2")
362 (set_attr "type" "alu_imm")]
363 )
364
365 (define_insn_and_split "*thumb1_abssi2"
366 [(set (match_operand:SI 0 "s_register_operand" "=l")
367 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
368 (clobber (match_scratch:SI 2 "=&l"))]
369 "TARGET_THUMB1"
370 "#"
371 "TARGET_THUMB1 && reload_completed"
372 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
373 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
374 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
375 ""
376 [(set_attr "length" "6")
377 (set_attr "type" "multiple")]
378 )
379
380 (define_insn_and_split "*thumb1_neg_abssi2"
381 [(set (match_operand:SI 0 "s_register_operand" "=l")
382 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
383 (clobber (match_scratch:SI 2 "=&l"))]
384 "TARGET_THUMB1"
385 "#"
386 "TARGET_THUMB1 && reload_completed"
387 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
388 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
389 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
390 ""
391 [(set_attr "length" "6")
392 (set_attr "type" "multiple")]
393 )
394
395 (define_insn "*thumb1_one_cmplsi2"
396 [(set (match_operand:SI 0 "register_operand" "=l")
397 (not:SI (match_operand:SI 1 "register_operand" "l")))]
398 "TARGET_THUMB1"
399 "mvns\\t%0, %1"
400 [(set_attr "length" "2")
401 (set_attr "type" "mvn_reg")]
402 )
403
404 (define_insn "*thumb1_zero_extendhisi2"
405 [(set (match_operand:SI 0 "register_operand" "=l,l")
406 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
407 "TARGET_THUMB1"
408 {
409 rtx mem;
410
411 if (which_alternative == 0 && arm_arch6)
412 return "uxth\t%0, %1";
413 if (which_alternative == 0)
414 return "#";
415
416 mem = XEXP (operands[1], 0);
417
418 if (GET_CODE (mem) == CONST)
419 mem = XEXP (mem, 0);
420
421 if (GET_CODE (mem) == PLUS)
422 {
423 rtx a = XEXP (mem, 0);
424
425 /* This can happen due to bugs in reload. */
426 if (REG_P (a) && REGNO (a) == SP_REGNUM)
427 {
428 rtx ops[2];
429 ops[0] = operands[0];
430 ops[1] = a;
431
432 output_asm_insn ("mov\t%0, %1", ops);
433
434 XEXP (mem, 0) = operands[0];
435 }
436 }
437
438 return "ldrh\t%0, %1";
439 }
440 [(set_attr_alternative "length"
441 [(if_then_else (eq_attr "is_arch6" "yes")
442 (const_int 2) (const_int 4))
443 (const_int 4)])
444 (set_attr "type" "extend,load_byte")]
445 )
446
447 (define_insn "*thumb1_zero_extendqisi2"
448 [(set (match_operand:SI 0 "register_operand" "=l,l")
449 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
450 "TARGET_THUMB1 && !arm_arch6"
451 "@
452 #
453 ldrb\\t%0, %1"
454 [(set_attr "length" "4,2")
455 (set_attr "type" "alu_shift_reg,load_byte")
456 (set_attr "pool_range" "*,32")]
457 )
458
459 (define_insn "*thumb1_zero_extendqisi2_v6"
460 [(set (match_operand:SI 0 "register_operand" "=l,l")
461 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
462 "TARGET_THUMB1 && arm_arch6"
463 "@
464 uxtb\\t%0, %1
465 ldrb\\t%0, %1"
466 [(set_attr "length" "2")
467 (set_attr "type" "extend,load_byte")]
468 )
469
470 ;; We used to have an early-clobber on the scratch register here.
471 ;; However, there's a bug somewhere in reload which means that this
472 ;; can be partially ignored during spill allocation if the memory
473 ;; address also needs reloading; this causes us to die later on when
474 ;; we try to verify the operands. Fortunately, we don't really need
475 ;; the early-clobber: we can always use operand 0 if operand 2
476 ;; overlaps the address.
477 (define_insn "thumb1_extendhisi2"
478 [(set (match_operand:SI 0 "register_operand" "=l,l")
479 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
480 (clobber (match_scratch:SI 2 "=X,l"))]
481 "TARGET_THUMB1"
482 "*
483 {
484 rtx ops[4];
485 rtx mem;
486
487 if (which_alternative == 0 && !arm_arch6)
488 return \"#\";
489 if (which_alternative == 0)
490 return \"sxth\\t%0, %1\";
491
492 mem = XEXP (operands[1], 0);
493
494 /* This code used to try to use 'V', and fix the address only if it was
495 offsettable, but this fails for e.g. REG+48 because 48 is outside the
496 range of QImode offsets, and offsettable_address_p does a QImode
497 address check. */
498
499 if (GET_CODE (mem) == CONST)
500 mem = XEXP (mem, 0);
501
502 if (GET_CODE (mem) == LABEL_REF)
503 return \"ldr\\t%0, %1\";
504
505 if (GET_CODE (mem) == PLUS)
506 {
507 rtx a = XEXP (mem, 0);
508 rtx b = XEXP (mem, 1);
509
510 if (GET_CODE (a) == LABEL_REF
511 && CONST_INT_P (b))
512 return \"ldr\\t%0, %1\";
513
514 if (REG_P (b))
515 return \"ldrsh\\t%0, %1\";
516
517 ops[1] = a;
518 ops[2] = b;
519 }
520 else
521 {
522 ops[1] = mem;
523 ops[2] = const0_rtx;
524 }
525
526 gcc_assert (REG_P (ops[1]));
527
528 ops[0] = operands[0];
529 if (reg_mentioned_p (operands[2], ops[1]))
530 ops[3] = ops[0];
531 else
532 ops[3] = operands[2];
533 output_asm_insn (\"movs\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
534 return \"\";
535 }"
536 [(set_attr_alternative "length"
537 [(if_then_else (eq_attr "is_arch6" "yes")
538 (const_int 2) (const_int 4))
539 (const_int 4)])
540 (set_attr "type" "extend,load_byte")
541 (set_attr "pool_range" "*,1018")]
542 )
543
544 (define_split
545 [(set (match_operand:SI 0 "register_operand" "")
546 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
547 "TARGET_THUMB1 && reload_completed"
548 [(set (match_dup 0) (match_dup 2))
549 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
550 {
551 rtx addr = XEXP (operands[1], 0);
552
553 if (GET_CODE (addr) == CONST)
554 addr = XEXP (addr, 0);
555
556 if (GET_CODE (addr) == PLUS
557 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
558 /* No split necessary. */
559 FAIL;
560
561 if (GET_CODE (addr) == PLUS
562 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
563 FAIL;
564
565 if (reg_overlap_mentioned_p (operands[0], addr))
566 {
567 rtx t = gen_lowpart (QImode, operands[0]);
568 emit_move_insn (t, operands[1]);
569 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
570 DONE;
571 }
572
573 if (REG_P (addr))
574 {
575 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
576 operands[2] = const0_rtx;
577 }
578 else if (GET_CODE (addr) != PLUS)
579 FAIL;
580 else if (REG_P (XEXP (addr, 0)))
581 {
582 operands[2] = XEXP (addr, 1);
583 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
584 }
585 else
586 {
587 operands[2] = XEXP (addr, 0);
588 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
589 }
590
591 operands[3] = change_address (operands[1], QImode, addr);
592 })
593
594 (define_peephole2
595 [(set (match_operand:SI 0 "register_operand" "")
596 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
597 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
598 (set (match_operand:SI 3 "register_operand" "")
599 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
600 "TARGET_THUMB1
601 && GET_CODE (XEXP (operands[4], 0)) == PLUS
602 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
603 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
604 && (peep2_reg_dead_p (3, operands[0])
605 || rtx_equal_p (operands[0], operands[3]))
606 && (peep2_reg_dead_p (3, operands[2])
607 || rtx_equal_p (operands[2], operands[3]))"
608 [(set (match_dup 2) (match_dup 1))
609 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
610 {
611 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
612 operands[4] = change_address (operands[4], QImode, addr);
613 })
614
615 (define_insn "thumb1_extendqisi2"
616 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
617 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
618 "TARGET_THUMB1"
619 {
620 rtx addr;
621
622 if (which_alternative == 0 && arm_arch6)
623 return "sxtb\\t%0, %1";
624 if (which_alternative == 0)
625 return "#";
626
627 addr = XEXP (operands[1], 0);
628 if (GET_CODE (addr) == PLUS
629 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
630 return "ldrsb\\t%0, %1";
631
632 return "#";
633 }
634 [(set_attr_alternative "length"
635 [(if_then_else (eq_attr "is_arch6" "yes")
636 (const_int 2) (const_int 4))
637 (const_int 2)
638 (if_then_else (eq_attr "is_arch6" "yes")
639 (const_int 4) (const_int 6))])
640 (set_attr "type" "extend,load_byte,load_byte")]
641 )
642
643 ;;; ??? This should have alternatives for constants.
644 ;;; ??? This was originally identical to the movdf_insn pattern.
645 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
646 ;;; thumb_reorg with a memory reference.
647 (define_insn "*thumb1_movdi_insn"
648 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,r,l,>,l, m,*r")
649 (match_operand:DI 1 "general_operand" "l, I,J,j,>,l,mi,l,*r"))]
650 "TARGET_THUMB1
651 && ( register_operand (operands[0], DImode)
652 || register_operand (operands[1], DImode))"
653 "*
654 {
655 switch (which_alternative)
656 {
657 default:
658 case 0:
659 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
660 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
661 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
662 case 1:
663 return \"movs\\t%Q0, %1\;movs\\t%R0, #0\";
664 case 2:
665 operands[1] = GEN_INT (- INTVAL (operands[1]));
666 return \"movs\\t%Q0, %1\;rsbs\\t%Q0, %Q0, #0\;asrs\\t%R0, %Q0, #31\";
667 case 3:
668 gcc_assert (TARGET_HAVE_MOVT);
669 return \"movw\\t%Q0, %L1\;movs\\tR0, #0\";
670 case 4:
671 return \"ldmia\\t%1, {%0, %H0}\";
672 case 5:
673 return \"stmia\\t%0, {%1, %H1}\";
674 case 6:
675 return thumb_load_double_from_address (operands);
676 case 7:
677 operands[2] = gen_rtx_MEM (SImode,
678 plus_constant (Pmode, XEXP (operands[0], 0), 4));
679 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
680 return \"\";
681 case 8:
682 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
683 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
684 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
685 }
686 }"
687 [(set_attr "length" "4,4,6,6,2,2,6,4,4")
688 (set_attr "type" "multiple,multiple,multiple,multiple,load_8,store_8,load_8,store_8,multiple")
689 (set_attr "arch" "t1,t1,t1,v8mb,t1,t1,t1,t1,t1")
690 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")]
691 )
692
693 (define_insn "*thumb1_movsi_insn"
694 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,r,l,l,l,>,l, m,*l*h*k")
695 (match_operand:SI 1 "general_operand" "l, I,j,J,K,>,l,mi,l,*l*h*k"))]
696 "TARGET_THUMB1
697 && ( register_operand (operands[0], SImode)
698 || register_operand (operands[1], SImode))"
699 "@
700 movs %0, %1
701 movs %0, %1
702 movw %0, %1
703 #
704 #
705 ldmia\\t%1, {%0}
706 stmia\\t%0, {%1}
707 ldr\\t%0, %1
708 str\\t%1, %0
709 mov\\t%0, %1"
710 [(set_attr "length" "2,2,4,4,4,2,2,2,2,2")
711 (set_attr "type" "mov_reg,mov_imm,mov_imm,multiple,multiple,load_4,store_4,load_4,store_4,mov_reg")
712 (set_attr "pool_range" "*,*,*,*,*,*,*,1018,*,*")
713 (set_attr "arch" "t1,t1,v8mb,t1,t1,t1,t1,t1,t1,t1")
714 (set_attr "conds" "set,clob,nocond,*,*,nocond,nocond,nocond,nocond,nocond")])
715
716 ; Split the load of 64-bit constant into two loads for high and low 32-bit parts respectively
717 ; to see if we can load them in fewer instructions or fewer cycles.
718 ; For the small 64-bit integer constants that satisfy constraint J, the instruction pattern
719 ; thumb1_movdi_insn has a better way to handle them.
720 (define_split
721 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
722 (match_operand:ANY64 1 "immediate_operand" ""))]
723 "TARGET_THUMB1 && reload_completed && !satisfies_constraint_J (operands[1])"
724 [(set (match_dup 0) (match_dup 1))
725 (set (match_dup 2) (match_dup 3))]
726 "
727 operands[2] = gen_highpart (SImode, operands[0]);
728 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
729 operands[1]);
730 operands[0] = gen_lowpart (SImode, operands[0]);
731 operands[1] = gen_lowpart (SImode, operands[1]);
732 "
733 )
734
735 (define_split
736 [(set (match_operand:SI 0 "register_operand" "")
737 (match_operand:SI 1 "const_int_operand" ""))]
738 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
739 [(set (match_dup 2) (match_dup 1))
740 (set (match_dup 0) (neg:SI (match_dup 2)))]
741 "
742 {
743 operands[1] = GEN_INT (- INTVAL (operands[1]));
744 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
745 }"
746 )
747
748 (define_split
749 [(set (match_operand:SI 0 "register_operand" "")
750 (match_operand:SI 1 "const_int_operand" ""))]
751 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])
752 && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
753 [(set (match_dup 2) (match_dup 1))
754 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
755 "
756 {
757 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
758 unsigned HOST_WIDE_INT mask = 0xff;
759 int i;
760
761 for (i = 0; i < 25; i++)
762 if ((val & (mask << i)) == val)
763 break;
764
765 /* Don't split if the shift is zero. */
766 if (i == 0)
767 FAIL;
768
769 operands[1] = GEN_INT (val >> i);
770 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
771 operands[3] = GEN_INT (i);
772 }"
773 )
774
775 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
776 (define_split
777 [(set (match_operand:SI 0 "register_operand" "")
778 (match_operand:SI 1 "const_int_operand" ""))]
779 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])
780 && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
781 [(set (match_dup 2) (match_dup 1))
782 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
783 "
784 {
785 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
786 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
787 operands[3] = GEN_INT (255);
788 }"
789 )
790
791 (define_insn "*thumb1_movhi_insn"
792 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l,r")
793 (match_operand:HI 1 "general_operand" "l,m,l,k*h,*r,I,n"))]
794 "TARGET_THUMB1
795 && ( register_operand (operands[0], HImode)
796 || register_operand (operands[1], HImode))"
797 "*
798 switch (which_alternative)
799 {
800 case 0: return \"adds %0, %1, #0\";
801 case 2: return \"strh %1, %0\";
802 case 3: return \"mov %0, %1\";
803 case 4: return \"mov %0, %1\";
804 case 5: return \"movs %0, %1\";
805 case 6: gcc_assert (TARGET_HAVE_MOVT);
806 return \"movw %0, %L1\";
807 default: gcc_unreachable ();
808 case 1:
809 /* The stack pointer can end up being taken as an index register.
810 Catch this case here and deal with it. */
811 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
812 && REG_P (XEXP (XEXP (operands[1], 0), 0))
813 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
814 {
815 rtx ops[2];
816 ops[0] = operands[0];
817 ops[1] = XEXP (XEXP (operands[1], 0), 0);
818
819 output_asm_insn (\"mov %0, %1\", ops);
820
821 XEXP (XEXP (operands[1], 0), 0) = operands[0];
822
823 }
824 return \"ldrh %0, %1\";
825 }"
826 [(set_attr "length" "2,4,2,2,2,2,4")
827 (set_attr "type" "alus_imm,load_4,store_4,mov_reg,mov_reg,mov_imm,mov_imm")
828 (set_attr "arch" "t1,t1,t1,t1,t1,t1,v8mb")
829 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob,nocond")])
830
831 (define_expand "thumb_movhi_clobber"
832 [(set (match_operand:HI 0 "memory_operand")
833 (match_operand:HI 1 "register_operand"))
834 (clobber (match_operand:DI 2 "register_operand"))]
835 "TARGET_THUMB1"
836 "
837 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
838 && REGNO (operands[1]) <= LAST_LO_REGNUM)
839 {
840 emit_insn (gen_movhi (operands[0], operands[1]));
841 DONE;
842 }
843 /* XXX Fixme, need to handle other cases here as well. */
844 gcc_unreachable ();
845 "
846 )
847
848 (define_insn "*thumb1_movqi_insn"
849 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l")
850 (match_operand:QI 1 "general_operand" "l,m,l,k*h,*r,I"))]
851 "TARGET_THUMB1
852 && ( register_operand (operands[0], QImode)
853 || register_operand (operands[1], QImode))"
854 "@
855 adds\\t%0, %1, #0
856 ldrb\\t%0, %1
857 strb\\t%1, %0
858 mov\\t%0, %1
859 mov\\t%0, %1
860 movs\\t%0, %1"
861 [(set_attr "length" "2")
862 (set_attr "type" "alu_imm,load_4,store_4,mov_reg,mov_imm,mov_imm")
863 (set_attr "pool_range" "*,32,*,*,*,*")
864 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
865
866 (define_insn "*thumb1_movhf"
867 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,l,m,*r,*h")
868 (match_operand:HF 1 "general_operand" "l, m,F,l,*h,*r"))]
869 "TARGET_THUMB1
870 && ( s_register_operand (operands[0], HFmode)
871 || s_register_operand (operands[1], HFmode))"
872 "*
873 switch (which_alternative)
874 {
875 case 0:
876 return \"movs\\t%0, %1\";
877 case 1:
878 {
879 rtx addr;
880 gcc_assert (MEM_P (operands[1]));
881 addr = XEXP (operands[1], 0);
882 if (GET_CODE (addr) == LABEL_REF
883 || (GET_CODE (addr) == CONST
884 && GET_CODE (XEXP (addr, 0)) == PLUS
885 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
886 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
887 {
888 /* Constant pool entry. */
889 return \"ldr\\t%0, %1\";
890 }
891 return \"ldrh\\t%0, %1\";
892 }
893 case 2:
894 {
895 int bits;
896 int high;
897 rtx ops[3];
898
899 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
900 HFmode);
901 ops[0] = operands[0];
902 high = (bits >> 8) & 0xff;
903 ops[1] = GEN_INT (high);
904 ops[2] = GEN_INT (bits & 0xff);
905 if (high != 0)
906 output_asm_insn (\"movs\\t%0, %1\;lsls\\t%0, #8\;adds\\t%0, %2\", ops);
907 else
908 output_asm_insn (\"movs\\t%0, %2\", ops);
909
910 return \"\";
911 }
912 case 3: return \"strh\\t%1, %0\";
913 default: return \"mov\\t%0, %1\";
914 }
915 "
916 [(set_attr "length" "2,2,6,2,2,2")
917 (set_attr "type" "mov_reg,load_4,mov_reg,store_4,mov_reg,mov_reg")
918 (set_attr "pool_range" "*,1018,*,*,*,*")
919 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond")])
920
921 ;;; ??? This should have alternatives for constants.
922 (define_insn "*thumb1_movsf_insn"
923 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
924 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
925 "TARGET_THUMB1
926 && ( register_operand (operands[0], SFmode)
927 || register_operand (operands[1], SFmode))"
928 "@
929 adds\\t%0, %1, #0
930 ldmia\\t%1, {%0}
931 stmia\\t%0, {%1}
932 ldr\\t%0, %1
933 str\\t%1, %0
934 mov\\t%0, %1
935 mov\\t%0, %1"
936 [(set_attr "length" "2")
937 (set_attr "type" "alus_imm,load_4,store_4,load_4,store_4,mov_reg,mov_reg")
938 (set_attr "pool_range" "*,*,*,1018,*,*,*")
939 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
940 )
941
942 ;;; ??? This should have alternatives for constants.
943 ;;; ??? This was originally identical to the movdi_insn pattern.
944 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
945 ;;; thumb_reorg with a memory reference.
946 (define_insn "*thumb_movdf_insn"
947 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
948 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
949 "TARGET_THUMB1
950 && ( register_operand (operands[0], DFmode)
951 || register_operand (operands[1], DFmode))"
952 "*
953 switch (which_alternative)
954 {
955 default:
956 case 0:
957 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
958 return \"adds\\t%0, %1, #0\;adds\\t%H0, %H1, #0\";
959 return \"adds\\t%H0, %H1, #0\;adds\\t%0, %1, #0\";
960 case 1:
961 return \"ldmia\\t%1, {%0, %H0}\";
962 case 2:
963 return \"stmia\\t%0, {%1, %H1}\";
964 case 3:
965 return thumb_load_double_from_address (operands);
966 case 4:
967 operands[2] = gen_rtx_MEM (SImode,
968 plus_constant (Pmode,
969 XEXP (operands[0], 0), 4));
970 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
971 return \"\";
972 case 5:
973 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
974 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
975 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
976 }
977 "
978 [(set_attr "length" "4,2,2,6,4,4")
979 (set_attr "type" "multiple,load_8,store_8,load_8,store_8,multiple")
980 (set_attr "pool_range" "*,*,*,1018,*,*")]
981 )
982 \f
983
984 ;; Thumb block-move insns
985
986 (define_insn "cpymem12b"
987 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
988 (mem:SI (match_operand:SI 3 "register_operand" "1")))
989 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
990 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
991 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
992 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
993 (set (match_operand:SI 0 "register_operand" "=l")
994 (plus:SI (match_dup 2) (const_int 12)))
995 (set (match_operand:SI 1 "register_operand" "=l")
996 (plus:SI (match_dup 3) (const_int 12)))
997 (clobber (match_scratch:SI 4 "=&l"))
998 (clobber (match_scratch:SI 5 "=&l"))
999 (clobber (match_scratch:SI 6 "=&l"))]
1000 "TARGET_THUMB1"
1001 "* return thumb_output_move_mem_multiple (3, operands);"
1002 [(set_attr "length" "4")
1003 ; This isn't entirely accurate... It loads as well, but in terms of
1004 ; scheduling the following insn it is better to consider it as a store
1005 (set_attr "type" "store_12")]
1006 )
1007
1008 (define_insn "cpymem8b"
1009 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
1010 (mem:SI (match_operand:SI 3 "register_operand" "1")))
1011 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
1012 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
1013 (set (match_operand:SI 0 "register_operand" "=l")
1014 (plus:SI (match_dup 2) (const_int 8)))
1015 (set (match_operand:SI 1 "register_operand" "=l")
1016 (plus:SI (match_dup 3) (const_int 8)))
1017 (clobber (match_scratch:SI 4 "=&l"))
1018 (clobber (match_scratch:SI 5 "=&l"))]
1019 "TARGET_THUMB1"
1020 "* return thumb_output_move_mem_multiple (2, operands);"
1021 [(set_attr "length" "4")
1022 ; This isn't entirely accurate... It loads as well, but in terms of
1023 ; scheduling the following insn it is better to consider it as a store
1024 (set_attr "type" "store_8")]
1025 )
1026
1027 \f
1028 ;; A pattern to recognize a special situation and optimize for it.
1029 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
1030 ;; due to the available addressing modes. Hence, convert a signed comparison
1031 ;; with zero into an unsigned comparison with 127 if possible.
1032 (define_expand "cbranchqi4"
1033 [(set (pc) (if_then_else
1034 (match_operator 0 "lt_ge_comparison_operator"
1035 [(match_operand:QI 1 "memory_operand")
1036 (match_operand:QI 2 "const0_operand")])
1037 (label_ref (match_operand 3 "" ""))
1038 (pc)))]
1039 "TARGET_THUMB1"
1040 {
1041 rtx xops[4];
1042 xops[1] = gen_reg_rtx (SImode);
1043 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
1044 xops[2] = GEN_INT (127);
1045 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
1046 VOIDmode, xops[1], xops[2]);
1047 xops[3] = operands[3];
1048 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
1049 DONE;
1050 })
1051
1052 ;; A pattern for the CB(N)Z instruction added in ARMv8-M Baseline profile,
1053 ;; adapted from cbranchsi4_insn. Modifying cbranchsi4_insn instead leads to
1054 ;; code generation difference for ARMv6-M because the minimum length of the
1055 ;; instruction becomes 2 even for ARMv6-M due to a limitation in genattrtab's
1056 ;; handling of PC in the length condition.
1057 (define_insn "thumb1_cbz"
1058 [(set (pc) (if_then_else
1059 (match_operator 0 "equality_operator"
1060 [(match_operand:SI 1 "s_register_operand" "l")
1061 (const_int 0)])
1062 (label_ref (match_operand 2 "" ""))
1063 (pc)))]
1064 "TARGET_THUMB1 && TARGET_HAVE_CBZ"
1065 {
1066 if (get_attr_length (insn) == 2)
1067 {
1068 if (GET_CODE (operands[0]) == EQ)
1069 return "cbz\t%1, %l2";
1070 else
1071 return "cbnz\t%1, %l2";
1072 }
1073 else
1074 {
1075 rtx t = cfun->machine->thumb1_cc_insn;
1076 if (t != NULL_RTX)
1077 {
1078 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
1079 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
1080 t = NULL_RTX;
1081 if (cfun->machine->thumb1_cc_mode == CC_NZmode)
1082 {
1083 if (!nz_comparison_operator (operands[0], VOIDmode))
1084 t = NULL_RTX;
1085 }
1086 else if (cfun->machine->thumb1_cc_mode != CCmode)
1087 t = NULL_RTX;
1088 }
1089 if (t == NULL_RTX)
1090 {
1091 output_asm_insn ("cmp\t%1, #0", operands);
1092 cfun->machine->thumb1_cc_insn = insn;
1093 cfun->machine->thumb1_cc_op0 = operands[1];
1094 cfun->machine->thumb1_cc_op1 = operands[2];
1095 cfun->machine->thumb1_cc_mode = CCmode;
1096 }
1097 else
1098 /* Ensure we emit the right type of condition code on the jump. */
1099 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1100 CC_REGNUM);
1101
1102 switch (get_attr_length (insn))
1103 {
1104 case 4: return "b%d0\t%l2";
1105 case 6: return "b%D0\t.LCB%=;b\t%l2\t%@long jump\n.LCB%=:";
1106 case 8: return "b%D0\t.LCB%=;bl\t%l2\t%@far jump\n.LCB%=:";
1107 default: gcc_unreachable ();
1108 }
1109 }
1110 }
1111 [(set (attr "far_jump")
1112 (if_then_else
1113 (eq_attr "length" "8")
1114 (const_string "yes")
1115 (const_string "no")))
1116 (set (attr "length")
1117 (if_then_else
1118 (and (ge (minus (match_dup 2) (pc)) (const_int 2))
1119 (le (minus (match_dup 2) (pc)) (const_int 128)))
1120 (const_int 2)
1121 (if_then_else
1122 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
1123 (le (minus (match_dup 2) (pc)) (const_int 256)))
1124 (const_int 4)
1125 (if_then_else
1126 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1127 (le (minus (match_dup 2) (pc)) (const_int 2048)))
1128 (const_int 6)
1129 (const_int 8)))))
1130 (set (attr "type")
1131 (if_then_else
1132 (eq_attr "length" "2")
1133 (const_string "branch")
1134 (const_string "multiple")))]
1135 )
1136
1137 ;; Changes to the constraints of this pattern must be propagated to those of
1138 ;; atomic compare_and_swap splitters in sync.md. These must be at least as
1139 ;; strict as the constraints here and aim to be as permissive.
1140 (define_insn "cbranchsi4_insn"
1141 [(set (pc) (if_then_else
1142 (match_operator 0 "arm_comparison_operator"
1143 [(match_operand:SI 1 "s_register_operand" "l,l*h")
1144 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
1145 (label_ref (match_operand 3 "" ""))
1146 (pc)))]
1147 "TARGET_THUMB1"
1148 {
1149 rtx t = cfun->machine->thumb1_cc_insn;
1150 if (t != NULL_RTX)
1151 {
1152 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
1153 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
1154 t = NULL_RTX;
1155 if (cfun->machine->thumb1_cc_mode == CC_NZmode)
1156 {
1157 if (!nz_comparison_operator (operands[0], VOIDmode))
1158 t = NULL_RTX;
1159 }
1160 else if (cfun->machine->thumb1_cc_mode != CCmode)
1161 t = NULL_RTX;
1162 }
1163 if (t == NULL_RTX)
1164 {
1165 output_asm_insn ("cmp\t%1, %2", operands);
1166 cfun->machine->thumb1_cc_insn = insn;
1167 cfun->machine->thumb1_cc_op0 = operands[1];
1168 cfun->machine->thumb1_cc_op1 = operands[2];
1169 cfun->machine->thumb1_cc_mode = CCmode;
1170 }
1171 else
1172 /* Ensure we emit the right type of condition code on the jump. */
1173 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1174 CC_REGNUM);
1175
1176 switch (get_attr_length (insn))
1177 {
1178 case 4: return \"b%d0\\t%l3\";
1179 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1180 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1181 }
1182 }
1183 [(set (attr "far_jump")
1184 (if_then_else
1185 (eq_attr "length" "8")
1186 (const_string "yes")
1187 (const_string "no")))
1188 (set (attr "length")
1189 (if_then_else
1190 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1191 (le (minus (match_dup 3) (pc)) (const_int 256)))
1192 (const_int 4)
1193 (if_then_else
1194 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1195 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1196 (const_int 6)
1197 (const_int 8))))
1198 (set_attr "type" "multiple")]
1199 )
1200
1201 ;; Changes to the constraints of this pattern must be propagated to those of
1202 ;; atomic compare_and_swap splitters in sync.md. These must be at least as
1203 ;; strict as the constraints here and aim to be as permissive.
1204 (define_insn "cbranchsi4_scratch"
1205 [(set (pc) (if_then_else
1206 (match_operator 4 "arm_comparison_operator"
1207 [(match_operand:SI 1 "s_register_operand" "l,0")
1208 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
1209 (label_ref (match_operand 3 "" ""))
1210 (pc)))
1211 (clobber (match_scratch:SI 0 "=l,l"))]
1212 "TARGET_THUMB1"
1213 "*
1214 output_asm_insn (\"adds\\t%0, %1, #%n2\", operands);
1215
1216 switch (get_attr_length (insn))
1217 {
1218 case 4: return \"b%d4\\t%l3\";
1219 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1220 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1221 }
1222 "
1223 [(set (attr "far_jump")
1224 (if_then_else
1225 (eq_attr "length" "8")
1226 (const_string "yes")
1227 (const_string "no")))
1228 (set (attr "length")
1229 (if_then_else
1230 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1231 (le (minus (match_dup 3) (pc)) (const_int 256)))
1232 (const_int 4)
1233 (if_then_else
1234 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1235 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1236 (const_int 6)
1237 (const_int 8))))
1238 (set_attr "type" "multiple")]
1239 )
1240
1241 (define_insn "*negated_cbranchsi4"
1242 [(set (pc)
1243 (if_then_else
1244 (match_operator 0 "equality_operator"
1245 [(match_operand:SI 1 "s_register_operand" "l")
1246 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
1247 (label_ref (match_operand 3 "" ""))
1248 (pc)))]
1249 "TARGET_THUMB1"
1250 "*
1251 output_asm_insn (\"cmn\\t%1, %2\", operands);
1252 switch (get_attr_length (insn))
1253 {
1254 case 4: return \"b%d0\\t%l3\";
1255 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1256 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1257 }
1258 "
1259 [(set (attr "far_jump")
1260 (if_then_else
1261 (eq_attr "length" "8")
1262 (const_string "yes")
1263 (const_string "no")))
1264 (set (attr "length")
1265 (if_then_else
1266 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1267 (le (minus (match_dup 3) (pc)) (const_int 256)))
1268 (const_int 4)
1269 (if_then_else
1270 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1271 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1272 (const_int 6)
1273 (const_int 8))))
1274 (set_attr "type" "multiple")]
1275 )
1276
1277 (define_insn "*tbit_cbranch"
1278 [(set (pc)
1279 (if_then_else
1280 (match_operator 0 "equality_operator"
1281 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1282 (const_int 1)
1283 (match_operand:SI 2 "const_int_operand" "i"))
1284 (const_int 0)])
1285 (label_ref (match_operand 3 "" ""))
1286 (pc)))
1287 (clobber (match_scratch:SI 4 "=l"))]
1288 "TARGET_THUMB1"
1289 "*
1290 {
1291 rtx op[3];
1292 op[0] = operands[4];
1293 op[1] = operands[1];
1294 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
1295
1296 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1297 switch (get_attr_length (insn))
1298 {
1299 case 4: return \"b%d0\\t%l3\";
1300 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1301 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1302 }
1303 }"
1304 [(set (attr "far_jump")
1305 (if_then_else
1306 (eq_attr "length" "8")
1307 (const_string "yes")
1308 (const_string "no")))
1309 (set (attr "length")
1310 (if_then_else
1311 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1312 (le (minus (match_dup 3) (pc)) (const_int 256)))
1313 (const_int 4)
1314 (if_then_else
1315 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1316 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1317 (const_int 6)
1318 (const_int 8))))
1319 (set_attr "type" "multiple")]
1320 )
1321
1322 (define_insn "*tlobits_cbranch"
1323 [(set (pc)
1324 (if_then_else
1325 (match_operator 0 "equality_operator"
1326 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1327 (match_operand:SI 2 "const_int_operand" "i")
1328 (const_int 0))
1329 (const_int 0)])
1330 (label_ref (match_operand 3 "" ""))
1331 (pc)))
1332 (clobber (match_scratch:SI 4 "=l"))]
1333 "TARGET_THUMB1"
1334 "*
1335 {
1336 rtx op[3];
1337 op[0] = operands[4];
1338 op[1] = operands[1];
1339 op[2] = GEN_INT (32 - INTVAL (operands[2]));
1340
1341 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1342 switch (get_attr_length (insn))
1343 {
1344 case 4: return \"b%d0\\t%l3\";
1345 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1346 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1347 }
1348 }"
1349 [(set (attr "far_jump")
1350 (if_then_else
1351 (eq_attr "length" "8")
1352 (const_string "yes")
1353 (const_string "no")))
1354 (set (attr "length")
1355 (if_then_else
1356 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1357 (le (minus (match_dup 3) (pc)) (const_int 256)))
1358 (const_int 4)
1359 (if_then_else
1360 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1361 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1362 (const_int 6)
1363 (const_int 8))))
1364 (set_attr "type" "multiple")]
1365 )
1366
1367 (define_insn "*tstsi3_cbranch"
1368 [(set (pc)
1369 (if_then_else
1370 (match_operator 3 "equality_operator"
1371 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
1372 (match_operand:SI 1 "s_register_operand" "l"))
1373 (const_int 0)])
1374 (label_ref (match_operand 2 "" ""))
1375 (pc)))]
1376 "TARGET_THUMB1"
1377 "*
1378 {
1379 output_asm_insn (\"tst\\t%0, %1\", operands);
1380 switch (get_attr_length (insn))
1381 {
1382 case 4: return \"b%d3\\t%l2\";
1383 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
1384 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
1385 }
1386 }"
1387 [(set (attr "far_jump")
1388 (if_then_else
1389 (eq_attr "length" "8")
1390 (const_string "yes")
1391 (const_string "no")))
1392 (set (attr "length")
1393 (if_then_else
1394 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
1395 (le (minus (match_dup 2) (pc)) (const_int 256)))
1396 (const_int 4)
1397 (if_then_else
1398 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1399 (le (minus (match_dup 2) (pc)) (const_int 2048)))
1400 (const_int 6)
1401 (const_int 8))))
1402 (set_attr "type" "multiple")]
1403 )
1404
1405 (define_insn "*cbranchne_decr1"
1406 [(set (pc)
1407 (if_then_else (match_operator 3 "equality_operator"
1408 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
1409 (const_int 0)])
1410 (label_ref (match_operand 4 "" ""))
1411 (pc)))
1412 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
1413 (plus:SI (match_dup 2) (const_int -1)))
1414 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
1415 "TARGET_THUMB1"
1416 "*
1417 {
1418 rtx cond[2];
1419 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1420 ? GEU : LTU),
1421 VOIDmode, operands[2], const1_rtx);
1422 cond[1] = operands[4];
1423
1424 if (which_alternative == 0)
1425 output_asm_insn (\"subs\\t%0, %2, #1\", operands);
1426 else if (which_alternative == 1)
1427 {
1428 /* We must provide an alternative for a hi reg because reload
1429 cannot handle output reloads on a jump instruction, but we
1430 can't subtract into that. Fortunately a mov from lo to hi
1431 does not clobber the condition codes. */
1432 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1433 output_asm_insn (\"mov\\t%0, %1\", operands);
1434 }
1435 else
1436 {
1437 /* Similarly, but the target is memory. */
1438 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1439 output_asm_insn (\"str\\t%1, %0\", operands);
1440 }
1441
1442 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
1443 {
1444 case 4:
1445 output_asm_insn (\"b%d0\\t%l1\", cond);
1446 return \"\";
1447 case 6:
1448 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1449 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
1450 default:
1451 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1452 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1453 }
1454 }
1455 "
1456 [(set (attr "far_jump")
1457 (if_then_else
1458 (ior (and (eq (symbol_ref ("which_alternative"))
1459 (const_int 0))
1460 (eq_attr "length" "8"))
1461 (eq_attr "length" "10"))
1462 (const_string "yes")
1463 (const_string "no")))
1464 (set_attr_alternative "length"
1465 [
1466 ;; Alternative 0
1467 (if_then_else
1468 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1469 (le (minus (match_dup 4) (pc)) (const_int 256)))
1470 (const_int 4)
1471 (if_then_else
1472 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1473 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1474 (const_int 6)
1475 (const_int 8)))
1476 ;; Alternative 1
1477 (if_then_else
1478 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1479 (le (minus (match_dup 4) (pc)) (const_int 256)))
1480 (const_int 6)
1481 (if_then_else
1482 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1483 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1484 (const_int 8)
1485 (const_int 10)))
1486 ;; Alternative 2
1487 (if_then_else
1488 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1489 (le (minus (match_dup 4) (pc)) (const_int 256)))
1490 (const_int 6)
1491 (if_then_else
1492 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1493 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1494 (const_int 8)
1495 (const_int 10)))
1496 ;; Alternative 3
1497 (if_then_else
1498 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1499 (le (minus (match_dup 4) (pc)) (const_int 256)))
1500 (const_int 6)
1501 (if_then_else
1502 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1503 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1504 (const_int 8)
1505 (const_int 10)))])
1506 (set_attr "type" "multiple")]
1507 )
1508
1509 (define_insn "*addsi3_cbranch"
1510 [(set (pc)
1511 (if_then_else
1512 (match_operator 4 "arm_comparison_operator"
1513 [(plus:SI
1514 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
1515 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
1516 (const_int 0)])
1517 (label_ref (match_operand 5 "" ""))
1518 (pc)))
1519 (set
1520 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
1521 (plus:SI (match_dup 2) (match_dup 3)))
1522 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
1523 "TARGET_THUMB1
1524 && (GET_CODE (operands[4]) == EQ
1525 || GET_CODE (operands[4]) == NE
1526 || GET_CODE (operands[4]) == GE
1527 || GET_CODE (operands[4]) == LT)"
1528 "*
1529 {
1530 rtx cond[3];
1531
1532 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
1533 cond[1] = operands[2];
1534 cond[2] = operands[3];
1535
1536 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
1537 output_asm_insn (\"subs\\t%0, %1, #%n2\", cond);
1538 else
1539 output_asm_insn (\"adds\\t%0, %1, %2\", cond);
1540
1541 if (which_alternative >= 2
1542 && which_alternative < 4)
1543 output_asm_insn (\"mov\\t%0, %1\", operands);
1544 else if (which_alternative >= 4)
1545 output_asm_insn (\"str\\t%1, %0\", operands);
1546
1547 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
1548 {
1549 case 4:
1550 return \"b%d4\\t%l5\";
1551 case 6:
1552 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
1553 default:
1554 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
1555 }
1556 }
1557 "
1558 [(set (attr "far_jump")
1559 (if_then_else
1560 (ior (and (lt (symbol_ref ("which_alternative"))
1561 (const_int 2))
1562 (eq_attr "length" "8"))
1563 (eq_attr "length" "10"))
1564 (const_string "yes")
1565 (const_string "no")))
1566 (set (attr "length")
1567 (if_then_else
1568 (lt (symbol_ref ("which_alternative"))
1569 (const_int 2))
1570 (if_then_else
1571 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
1572 (le (minus (match_dup 5) (pc)) (const_int 256)))
1573 (const_int 4)
1574 (if_then_else
1575 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
1576 (le (minus (match_dup 5) (pc)) (const_int 2048)))
1577 (const_int 6)
1578 (const_int 8)))
1579 (if_then_else
1580 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
1581 (le (minus (match_dup 5) (pc)) (const_int 256)))
1582 (const_int 6)
1583 (if_then_else
1584 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
1585 (le (minus (match_dup 5) (pc)) (const_int 2048)))
1586 (const_int 8)
1587 (const_int 10)))))
1588 (set_attr "type" "multiple")]
1589 )
1590
1591 (define_insn "*addsi3_cbranch_scratch"
1592 [(set (pc)
1593 (if_then_else
1594 (match_operator 3 "arm_comparison_operator"
1595 [(plus:SI
1596 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
1597 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
1598 (const_int 0)])
1599 (label_ref (match_operand 4 "" ""))
1600 (pc)))
1601 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
1602 "TARGET_THUMB1
1603 && (GET_CODE (operands[3]) == EQ
1604 || GET_CODE (operands[3]) == NE
1605 || GET_CODE (operands[3]) == GE
1606 || GET_CODE (operands[3]) == LT)"
1607 "*
1608 {
1609 switch (which_alternative)
1610 {
1611 case 0:
1612 output_asm_insn (\"cmp\t%1, #%n2\", operands);
1613 break;
1614 case 1:
1615 output_asm_insn (\"cmn\t%1, %2\", operands);
1616 break;
1617 case 2:
1618 if (INTVAL (operands[2]) < 0)
1619 output_asm_insn (\"subs\t%0, %1, %2\", operands);
1620 else
1621 output_asm_insn (\"adds\t%0, %1, %2\", operands);
1622 break;
1623 case 3:
1624 if (INTVAL (operands[2]) < 0)
1625 output_asm_insn (\"subs\t%0, %0, %2\", operands);
1626 else
1627 output_asm_insn (\"adds\t%0, %0, %2\", operands);
1628 break;
1629 }
1630
1631 switch (get_attr_length (insn))
1632 {
1633 case 4:
1634 return \"b%d3\\t%l4\";
1635 case 6:
1636 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
1637 default:
1638 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1639 }
1640 }
1641 "
1642 [(set (attr "far_jump")
1643 (if_then_else
1644 (eq_attr "length" "8")
1645 (const_string "yes")
1646 (const_string "no")))
1647 (set (attr "length")
1648 (if_then_else
1649 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1650 (le (minus (match_dup 4) (pc)) (const_int 256)))
1651 (const_int 4)
1652 (if_then_else
1653 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1654 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1655 (const_int 6)
1656 (const_int 8))))
1657 (set_attr "type" "multiple")]
1658 )
1659
1660 (define_insn "*thumb_cmpdi_zero"
1661 [(set (reg:CC_Z CC_REGNUM)
1662 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
1663 (const_int 0)))
1664 (clobber (match_scratch:SI 1 "=l"))]
1665 "TARGET_THUMB1"
1666 "orrs\\t%1, %Q0, %R0"
1667 [(set_attr "conds" "set")
1668 (set_attr "length" "2")
1669 (set_attr "type" "logics_reg")]
1670 )
1671
1672 (define_expand "cstoresi_eq0_thumb1"
1673 [(parallel
1674 [(set (match_operand:SI 0 "s_register_operand")
1675 (eq:SI (match_operand:SI 1 "s_register_operand")
1676 (const_int 0)))
1677 (clobber (match_dup:SI 2))])]
1678 "TARGET_THUMB1"
1679 "operands[2] = gen_reg_rtx (SImode);"
1680 )
1681
1682 (define_expand "cstoresi_ne0_thumb1"
1683 [(parallel
1684 [(set (match_operand:SI 0 "s_register_operand")
1685 (ne:SI (match_operand:SI 1 "s_register_operand")
1686 (const_int 0)))
1687 (clobber (match_dup:SI 2))])]
1688 "TARGET_THUMB1"
1689 "operands[2] = gen_reg_rtx (SImode);"
1690 )
1691
1692 (define_insn "*cstoresi_eq0_thumb1_insn"
1693 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
1694 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
1695 (const_int 0)))
1696 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
1697 "TARGET_THUMB1"
1698 "@
1699 rsbs\\t%0, %1, #0\;adcs\\t%0, %0, %1
1700 rsbs\\t%2, %1, #0\;adcs\\t%0, %1, %2"
1701 [(set_attr "length" "4")
1702 (set_attr "type" "multiple")]
1703 )
1704
1705 (define_insn "*cstoresi_ne0_thumb1_insn"
1706 [(set (match_operand:SI 0 "s_register_operand" "=l")
1707 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
1708 (const_int 0)))
1709 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
1710 "TARGET_THUMB1"
1711 "subs\\t%2, %1, #1\;sbcs\\t%0, %1, %2"
1712 [(set_attr "length" "4")]
1713 )
1714
1715 ;; Used as part of the expansion of thumb ltu and gtu sequences
1716 (define_insn "cstoresi_nltu_thumb1"
1717 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1718 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1719 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
1720 "TARGET_THUMB1"
1721 "cmp\\t%1, %2\;sbcs\\t%0, %0, %0"
1722 [(set_attr "length" "4")
1723 (set_attr "type" "multiple")]
1724 )
1725
1726 (define_insn_and_split "cstoresi_ltu_thumb1"
1727 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1728 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1729 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
1730 "TARGET_THUMB1"
1731 "#"
1732 "TARGET_THUMB1"
1733 [(set (match_dup 3)
1734 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
1735 (set (match_dup 0) (neg:SI (match_dup 3)))]
1736 "operands[3] = gen_reg_rtx (SImode);"
1737 [(set_attr "length" "4")
1738 (set_attr "type" "multiple")]
1739 )
1740
1741 ;; Used as part of the expansion of thumb les sequence.
1742 (define_insn "thumb1_addsi3_addgeu"
1743 [(set (match_operand:SI 0 "s_register_operand" "=l")
1744 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
1745 (match_operand:SI 2 "s_register_operand" "l"))
1746 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
1747 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
1748 "TARGET_THUMB1"
1749 "cmp\\t%3, %4\;adcs\\t%0, %1, %2"
1750 [(set_attr "length" "4")
1751 (set_attr "type" "multiple")]
1752 )
1753
1754 \f
1755 (define_insn "*thumb_jump"
1756 [(set (pc)
1757 (label_ref (match_operand 0 "" "")))]
1758 "TARGET_THUMB1"
1759 "*
1760 if (get_attr_length (insn) == 2)
1761 return \"b\\t%l0\";
1762 return \"bl\\t%l0\\t%@ far jump\";
1763 "
1764 [(set (attr "far_jump")
1765 (if_then_else
1766 (eq_attr "length" "4")
1767 (const_string "yes")
1768 (const_string "no")))
1769 (set (attr "length")
1770 (if_then_else
1771 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
1772 (le (minus (match_dup 0) (pc)) (const_int 2048)))
1773 (const_int 2)
1774 (const_int 4)))
1775 (set_attr "type" "branch")]
1776 )
1777
1778 (define_insn "*call_reg_thumb1_v5"
1779 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1780 (match_operand 1 "" ""))
1781 (use (match_operand 2 "" ""))
1782 (clobber (reg:SI LR_REGNUM))]
1783 "TARGET_THUMB1 && arm_arch5t && !SIBLING_CALL_P (insn)"
1784 "blx\\t%0"
1785 [(set_attr "length" "2")
1786 (set_attr "type" "call")]
1787 )
1788
1789 (define_insn "*nonsecure_call_reg_thumb1_v5"
1790 [(call (unspec:SI [(mem:SI (reg:SI R4_REGNUM))]
1791 UNSPEC_NONSECURE_MEM)
1792 (match_operand 0 "" ""))
1793 (use (match_operand 1 "" ""))
1794 (clobber (reg:SI LR_REGNUM))]
1795 "TARGET_THUMB1 && use_cmse && !SIBLING_CALL_P (insn)"
1796 "bl\\t__gnu_cmse_nonsecure_call"
1797 [(set_attr "length" "4")
1798 (set_attr "type" "call")]
1799 )
1800
1801 (define_insn "*call_reg_thumb1"
1802 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1803 (match_operand 1 "" ""))
1804 (use (match_operand 2 "" ""))
1805 (clobber (reg:SI LR_REGNUM))]
1806 "TARGET_THUMB1 && !arm_arch5t && !SIBLING_CALL_P (insn)"
1807 "*
1808 {
1809 if (!TARGET_CALLER_INTERWORKING)
1810 return thumb_call_via_reg (operands[0]);
1811 else if (operands[1] == const0_rtx)
1812 return \"bl\\t%__interwork_call_via_%0\";
1813 else if (frame_pointer_needed)
1814 return \"bl\\t%__interwork_r7_call_via_%0\";
1815 else
1816 return \"bl\\t%__interwork_r11_call_via_%0\";
1817 }"
1818 [(set_attr "type" "call")]
1819 )
1820
1821 (define_insn "*call_value_reg_thumb1_v5"
1822 [(set (match_operand 0 "" "")
1823 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1824 (match_operand 2 "" "")))
1825 (use (match_operand 3 "" ""))
1826 (clobber (reg:SI LR_REGNUM))]
1827 "TARGET_THUMB1 && arm_arch5t"
1828 "blx\\t%1"
1829 [(set_attr "length" "2")
1830 (set_attr "type" "call")]
1831 )
1832
1833 (define_insn "*nonsecure_call_value_reg_thumb1_v5"
1834 [(set (match_operand 0 "" "")
1835 (call (unspec:SI
1836 [(mem:SI (reg:SI R4_REGNUM))]
1837 UNSPEC_NONSECURE_MEM)
1838 (match_operand 1 "" "")))
1839 (use (match_operand 2 "" ""))
1840 (clobber (reg:SI LR_REGNUM))]
1841 "TARGET_THUMB1 && use_cmse"
1842 "bl\\t__gnu_cmse_nonsecure_call"
1843 [(set_attr "length" "4")
1844 (set_attr "type" "call")]
1845 )
1846
1847 (define_insn "*call_value_reg_thumb1"
1848 [(set (match_operand 0 "" "")
1849 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1850 (match_operand 2 "" "")))
1851 (use (match_operand 3 "" ""))
1852 (clobber (reg:SI LR_REGNUM))]
1853 "TARGET_THUMB1 && !arm_arch5t"
1854 "*
1855 {
1856 if (!TARGET_CALLER_INTERWORKING)
1857 return thumb_call_via_reg (operands[1]);
1858 else if (operands[2] == const0_rtx)
1859 return \"bl\\t%__interwork_call_via_%1\";
1860 else if (frame_pointer_needed)
1861 return \"bl\\t%__interwork_r7_call_via_%1\";
1862 else
1863 return \"bl\\t%__interwork_r11_call_via_%1\";
1864 }"
1865 [(set_attr "type" "call")]
1866 )
1867
1868 (define_insn "*call_insn"
1869 [(call (mem:SI (match_operand:SI 0 "" ""))
1870 (match_operand:SI 1 "" ""))
1871 (use (match_operand 2 "" ""))
1872 (clobber (reg:SI LR_REGNUM))]
1873 "TARGET_THUMB1
1874 && GET_CODE (operands[0]) == SYMBOL_REF
1875 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
1876 "bl\\t%a0"
1877 [(set_attr "length" "4")
1878 (set_attr "type" "call")]
1879 )
1880
1881 (define_insn "*call_value_insn"
1882 [(set (match_operand 0 "" "")
1883 (call (mem:SI (match_operand 1 "" ""))
1884 (match_operand 2 "" "")))
1885 (use (match_operand 3 "" ""))
1886 (clobber (reg:SI LR_REGNUM))]
1887 "TARGET_THUMB1
1888 && GET_CODE (operands[1]) == SYMBOL_REF
1889 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
1890 "bl\\t%a1"
1891 [(set_attr "length" "4")
1892 (set_attr "type" "call")]
1893 )
1894
1895 (define_expand "thumb1_casesi_internal_pic"
1896 [(match_operand:SI 0 "s_register_operand")
1897 (match_operand:SI 1 "thumb1_cmp_operand")
1898 (match_operand 2 "" "")
1899 (match_operand 3 "" "")]
1900 "TARGET_THUMB1"
1901 {
1902 rtx reg0;
1903 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
1904 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
1905 operands[3]));
1906 reg0 = gen_rtx_REG (SImode, 0);
1907 emit_move_insn (reg0, operands[0]);
1908 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
1909 DONE;
1910 }
1911 )
1912
1913 (define_insn "thumb1_casesi_dispatch"
1914 [(parallel [(set (pc) (unspec [(reg:SI 0)
1915 (label_ref (match_operand 0 "" ""))
1916 ;; (label_ref (match_operand 1 "" ""))
1917 ]
1918 UNSPEC_THUMB1_CASESI))
1919 (clobber (reg:SI IP_REGNUM))
1920 (clobber (reg:SI LR_REGNUM))])]
1921 "TARGET_THUMB1"
1922 "* return thumb1_output_casesi(operands);"
1923 [(set_attr "length" "4")
1924 (set_attr "type" "multiple")]
1925 )
1926
1927 ;; NB Never uses BX.
1928 (define_insn "*thumb1_indirect_jump"
1929 [(set (pc)
1930 (match_operand:SI 0 "register_operand" "l*r"))]
1931 "TARGET_THUMB1"
1932 "mov\\tpc, %0"
1933 [(set_attr "conds" "clob")
1934 (set_attr "length" "2")
1935 (set_attr "type" "branch")]
1936 )
1937
1938 \f
1939 (define_insn "prologue_thumb1_interwork"
1940 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
1941 "TARGET_THUMB1"
1942 "* return thumb1_output_interwork ();"
1943 [(set_attr "length" "8")
1944 (set_attr "type" "multiple")]
1945 )
1946
1947 (define_insn "*epilogue_insns"
1948 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
1949 "TARGET_THUMB1"
1950 "*
1951 return thumb1_unexpanded_epilogue ();
1952 "
1953 ; Length is absolute worst case, when using CMSE and if this is an entry
1954 ; function an extra 4 (MSR) bytes will be added.
1955 [(set (attr "length")
1956 (if_then_else
1957 (match_test "IS_CMSE_ENTRY (arm_current_func_type ())")
1958 (const_int 48)
1959 (const_int 44)))
1960 (set_attr "type" "block")
1961 ;; We don't clobber the conditions, but the potential length of this
1962 ;; operation is sufficient to make conditionalizing the sequence
1963 ;; unlikely to be profitable.
1964 (set_attr "conds" "clob")]
1965 )
1966
1967 ;; Miscellaneous Thumb patterns
1968 (define_expand "tablejump"
1969 [(parallel [(set (pc) (match_operand:SI 0 "register_operand"))
1970 (use (label_ref (match_operand 1 "" "")))])]
1971 "TARGET_THUMB1"
1972 "
1973 if (flag_pic)
1974 {
1975 /* Hopefully, CSE will eliminate this copy. */
1976 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
1977 rtx reg2 = gen_reg_rtx (SImode);
1978
1979 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
1980 operands[0] = reg2;
1981 }
1982 "
1983 )
1984
1985 (define_insn "*thumb1_movpc_insn"
1986 [(set (match_operand:SI 0 "s_register_operand" "=l")
1987 (reg:SI PC_REGNUM))]
1988 "TARGET_THUMB1"
1989 "mov\\t%0, pc"
1990 [(set_attr "length" "2")
1991 (set_attr "conds" "nocond")
1992 (set_attr "type" "mov_reg")]
1993 )
1994
1995 ;; NB never uses BX.
1996 (define_insn "*thumb1_tablejump"
1997 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
1998 (use (label_ref (match_operand 1 "" "")))]
1999 "TARGET_THUMB1"
2000 "mov\\t%|pc, %0"
2001 [(set_attr "length" "2")
2002 (set_attr "type" "branch")]
2003 )
2004
2005 (define_insn_and_split "thumb_eh_return"
2006 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
2007 VUNSPEC_EH_RETURN)
2008 (clobber (match_scratch:SI 1 "=&l"))]
2009 "TARGET_THUMB1"
2010 "#"
2011 "&& reload_completed"
2012 [(const_int 0)]
2013 "
2014 {
2015 thumb_set_return_address (operands[0], operands[1]);
2016 DONE;
2017 }"
2018 [(set_attr "type" "mov_reg")]
2019 )
2020
2021 (define_insn "thumb1_stack_protect_test_insn"
2022 [(set (match_operand:SI 0 "register_operand" "=&l")
2023 (unspec:SI [(match_operand:SI 1 "memory_operand" "m")
2024 (mem:SI (match_operand:SI 2 "register_operand" "+l"))]
2025 UNSPEC_SP_TEST))
2026 (clobber (match_dup 2))]
2027 "TARGET_THUMB1"
2028 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
2029 [(set_attr "length" "8")
2030 (set_attr "conds" "set")
2031 (set_attr "type" "multiple")]
2032 )
2033 \f