1 ;; ARM Thumb-1 Machine Description
2 ;; Copyright (C) 2007-2020 Free Software Foundation, Inc.
4 ;; This file is part of GCC.
6 ;; GCC is free software; you can redistribute it and/or modify it
7 ;; under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; GCC is distributed in the hope that it will be useful, but
12 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 ;; General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>. */
21 ;;---------------------------------------------------------------------------
25 ;; Beware of splitting Thumb1 patterns that output multiple
26 ;; assembly instructions, in particular instruction such as SBC and
27 ;; ADC which consume flags. For example, in the pattern thumb_subdi3
28 ;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29 ;; and then the Carry flag is used by SBC to compute the correct
30 ;; result. If we split thumb_subdi3 pattern into two separate RTL
31 ;; insns (using define_insn_and_split), the scheduler might place
32 ;; other RTL insns between SUB and SBC, possibly modifying the Carry
33 ;; flag used by SBC. This might happen because most Thumb1 patterns
34 ;; for flag-setting instructions do not have explicit RTL for setting
35 ;; or clobbering the flags. Instead, they have the attribute "conds"
36 ;; with value "set" or "clob". However, this attribute is not used to
37 ;; identify dependencies and therefore the scheduler might reorder
38 ;; these instruction. Currenly, this problem cannot happen because
39 ;; there are no separate Thumb1 patterns for individual instruction
40 ;; that consume flags (except conditional execution, which is treated
41 ;; differently). In particular there is no Thumb1 armv6-m pattern for
46 (define_insn "thumb1_movsi_symbol_ref"
47 [(set (match_operand:SI 0 "register_operand" "=l")
48 (match_operand:SI 1 "general_operand" ""))
51 && arm_disable_literal_pool
52 && GET_CODE (operands[1]) == SYMBOL_REF"
54 output_asm_insn (\"movs\\t%0, #:upper8_15:%1\", operands);
55 output_asm_insn (\"lsls\\t%0, #8\", operands);
56 output_asm_insn (\"adds\\t%0, #:upper0_7:%1\", operands);
57 output_asm_insn (\"lsls\\t%0, #8\", operands);
58 output_asm_insn (\"adds\\t%0, #:lower8_15:%1\", operands);
59 output_asm_insn (\"lsls\\t%0, #8\", operands);
60 output_asm_insn (\"adds\\t%0, #:lower0_7:%1\", operands);
63 [(set_attr "length" "14")
64 (set_attr "conds" "clob")]
68 [(set (match_operand:SI 0 "register_operand" "")
69 (match_operand:SI 1 "immediate_operand" ""))]
71 && arm_disable_literal_pool
72 && GET_CODE (operands[1]) == CONST_INT
73 && !satisfies_constraint_I (operands[1])"
74 [(clobber (const_int 0))]
76 thumb1_gen_const_int (operands[0], INTVAL (operands[1]));
81 (define_insn "*thumb1_adddi3"
82 [(set (match_operand:DI 0 "register_operand" "=l")
83 (plus:DI (match_operand:DI 1 "register_operand" "%0")
84 (match_operand:DI 2 "register_operand" "l")))
85 (clobber (reg:CC CC_REGNUM))
88 "adds\\t%Q0, %Q0, %Q2\;adcs\\t%R0, %R0, %R2"
89 [(set_attr "length" "4")
90 (set_attr "type" "multiple")]
93 ;; Changes to the constraints of this pattern must be propagated to those of
94 ;; atomic additions in sync.md and to the logic for bind_old_new in
95 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
96 ;; constraints here and aim to be as permissive.
97 (define_insn_and_split "*thumb1_addsi3"
98 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
99 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
100 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
103 static const char * const asms[] =
105 \"adds\\t%0, %0, %2\",
106 \"subs\\t%0, %0, #%n2\",
107 \"adds\\t%0, %1, %2\",
108 \"add\\t%0, %0, %2\",
109 \"add\\t%0, %0, %2\",
110 \"add\\t%0, %1, %2\",
111 \"add\\t%0, %1, %2\",
116 if ((which_alternative == 2 || which_alternative == 6)
117 && CONST_INT_P (operands[2])
118 && INTVAL (operands[2]) < 0)
119 return (which_alternative == 2) ? \"subs\\t%0, %1, #%n2\" : \"sub\\t%0, %1, #%n2\";
120 return asms[which_alternative];
122 "&& reload_completed && CONST_INT_P (operands[2])
123 && ((operands[1] != stack_pointer_rtx
124 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
125 || (operands[1] == stack_pointer_rtx
126 && INTVAL (operands[2]) > 1020))"
127 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
128 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
130 HOST_WIDE_INT offset = INTVAL (operands[2]);
131 if (operands[1] == stack_pointer_rtx)
137 else if (offset < -255)
140 operands[3] = GEN_INT (offset);
141 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
143 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")
144 (set_attr "type" "alus_imm,alus_imm,alus_sreg,alus_sreg,alus_sreg,
145 alus_sreg,alus_sreg,multiple,multiple,multiple")]
148 ;; Reloading and elimination of the frame pointer can
149 ;; sometimes cause this optimization to be missed.
151 [(set (match_operand:SI 0 "arm_general_register_operand" "")
152 (match_operand:SI 1 "const_int_operand" ""))
154 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
156 && UINTVAL (operands[1]) < 1024
157 && (UINTVAL (operands[1]) & 3) == 0"
158 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
162 (define_insn "*thumb_subdi3"
163 [(set (match_operand:DI 0 "register_operand" "=l")
164 (minus:DI (match_operand:DI 1 "register_operand" "0")
165 (match_operand:DI 2 "register_operand" "l")))
166 (clobber (reg:CC CC_REGNUM))]
168 "subs\\t%Q0, %Q0, %Q2\;sbcs\\t%R0, %R0, %R2"
169 [(set_attr "length" "4")
170 (set_attr "type" "multiple")]
173 ;; Changes to the constraints of this pattern must be propagated to those of
174 ;; atomic subtractions in sync.md and to the logic for bind_old_new in
175 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
176 ;; constraints here and aim to be as permissive.
177 (define_insn "thumb1_subsi3_insn"
178 [(set (match_operand:SI 0 "register_operand" "=l")
179 (minus:SI (match_operand:SI 1 "register_operand" "l")
180 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
183 [(set_attr "length" "2")
184 (set_attr "conds" "set")
185 (set_attr "type" "alus_sreg")]
188 ;; Unfortunately on Thumb the '&'/'0' trick can fail when operands
189 ;; 1 and 2 are the same, because reload will make operand 0 match
190 ;; operand 1 without realizing that this conflicts with operand 2. We fix
191 ;; this by adding another alternative to match this case, and then `reload'
192 ;; it ourselves. This alternative must come first.
193 (define_insn "*thumb_mulsi3"
194 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
195 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
196 (match_operand:SI 2 "register_operand" "l,l,l")))]
197 "TARGET_THUMB1 && !arm_arch6"
199 movs\\t%0, %1\;muls\\t%0, %2
200 mov\\t%0, %1\;muls\\t%0, %2
202 [(set_attr "length" "4,4,2")
203 (set_attr "type" "muls")]
206 (define_insn "*thumb_mulsi3_v6"
207 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
208 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
209 (match_operand:SI 2 "register_operand" "l,0,0")))]
210 "TARGET_THUMB1 && arm_arch6"
215 [(set_attr "length" "2")
216 (set_attr "type" "muls")]
219 ;; Changes to the constraints of this pattern must be propagated to those of
220 ;; atomic bitwise ANDs and NANDs in sync.md and to the logic for bind_old_new
221 ;; in arm_split_atomic_op in arm.c. These must be at least as strict as the
222 ;; constraints here and aim to be as permissive.
223 (define_insn "*thumb1_andsi3_insn"
224 [(set (match_operand:SI 0 "register_operand" "=l")
225 (and:SI (match_operand:SI 1 "register_operand" "%0")
226 (match_operand:SI 2 "register_operand" "l")))]
229 [(set_attr "length" "2")
230 (set_attr "type" "logic_imm")
231 (set_attr "conds" "set")])
234 [(set (match_operand:SI 0 "s_register_operand" "")
235 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
236 (match_operand:SI 2 "const_int_operand" "")
237 (match_operand:SI 3 "const_int_operand" "")))
238 (clobber (match_operand:SI 4 "s_register_operand" ""))]
240 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
241 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
243 HOST_WIDE_INT temp = INTVAL (operands[2]);
245 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
246 operands[3] = GEN_INT (32 - temp);
251 [(set (match_operand:SI 0 "s_register_operand" "")
252 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
253 (match_operand:SI 2 "const_int_operand" "")
254 (match_operand:SI 3 "const_int_operand" "")))]
256 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
257 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
259 HOST_WIDE_INT temp = INTVAL (operands[2]);
261 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
262 operands[3] = GEN_INT (32 - temp);
266 (define_insn "thumb1_bicsi3"
267 [(set (match_operand:SI 0 "register_operand" "=l")
268 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
269 (match_operand:SI 2 "register_operand" "0")))]
272 [(set_attr "length" "2")
273 (set_attr "conds" "set")
274 (set_attr "type" "logics_reg")]
277 ;; Changes to the constraints of this pattern must be propagated to those of
278 ;; atomic inclusive ORs in sync.md and to the logic for bind_old_new in
279 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
280 ;; constraints here and aim to be as permissive.
281 (define_insn "*thumb1_iorsi3_insn"
282 [(set (match_operand:SI 0 "register_operand" "=l")
283 (ior:SI (match_operand:SI 1 "register_operand" "%0")
284 (match_operand:SI 2 "register_operand" "l")))]
287 [(set_attr "length" "2")
288 (set_attr "conds" "set")
289 (set_attr "type" "logics_reg")])
291 ;; Changes to the constraints of this pattern must be propagated to those of
292 ;; atomic exclusive ORs in sync.md and to the logic for bind_old_new in
293 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
294 ;; constraints here and aim to be as permissive.
295 (define_insn "*thumb1_xorsi3_insn"
296 [(set (match_operand:SI 0 "register_operand" "=l")
297 (xor:SI (match_operand:SI 1 "register_operand" "%0")
298 (match_operand:SI 2 "register_operand" "l")))]
301 [(set_attr "length" "2")
302 (set_attr "conds" "set")
303 (set_attr "type" "logics_reg")]
306 (define_insn "*thumb1_ashlsi3"
307 [(set (match_operand:SI 0 "register_operand" "=l,l")
308 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
309 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
312 [(set_attr "length" "2")
313 (set_attr "type" "shift_imm,shift_reg")
314 (set_attr "conds" "set")])
316 (define_insn "*thumb1_ashrsi3"
317 [(set (match_operand:SI 0 "register_operand" "=l,l")
318 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
319 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
322 [(set_attr "length" "2")
323 (set_attr "type" "shift_imm,shift_reg")
324 (set_attr "conds" "set")])
326 (define_insn "*thumb1_lshrsi3"
327 [(set (match_operand:SI 0 "register_operand" "=l,l")
328 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
329 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
332 [(set_attr "length" "2")
333 (set_attr "type" "shift_imm,shift_reg")
334 (set_attr "conds" "set")])
336 (define_insn "*thumb1_rotrsi3"
337 [(set (match_operand:SI 0 "register_operand" "=l")
338 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
339 (match_operand:SI 2 "register_operand" "l")))]
342 [(set_attr "type" "shift_reg")
343 (set_attr "length" "2")]
346 (define_insn "*thumb1_negdi2"
347 [(set (match_operand:DI 0 "register_operand" "=&l")
348 (neg:DI (match_operand:DI 1 "register_operand" "l")))
349 (clobber (reg:CC CC_REGNUM))]
351 "movs\\t%R0, #0\;rsbs\\t%Q0, %Q1, #0\;sbcs\\t%R0, %R1"
352 [(set_attr "length" "6")
353 (set_attr "type" "multiple")]
356 (define_insn "*thumb1_negsi2"
357 [(set (match_operand:SI 0 "register_operand" "=l")
358 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
361 [(set_attr "length" "2")
362 (set_attr "type" "alu_imm")]
365 (define_insn_and_split "*thumb1_abssi2"
366 [(set (match_operand:SI 0 "s_register_operand" "=l")
367 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
368 (clobber (match_scratch:SI 2 "=&l"))]
371 "TARGET_THUMB1 && reload_completed"
372 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
373 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
374 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
376 [(set_attr "length" "6")
377 (set_attr "type" "multiple")]
380 (define_insn_and_split "*thumb1_neg_abssi2"
381 [(set (match_operand:SI 0 "s_register_operand" "=l")
382 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
383 (clobber (match_scratch:SI 2 "=&l"))]
386 "TARGET_THUMB1 && reload_completed"
387 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
388 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
389 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
391 [(set_attr "length" "6")
392 (set_attr "type" "multiple")]
395 (define_insn "*thumb1_one_cmplsi2"
396 [(set (match_operand:SI 0 "register_operand" "=l")
397 (not:SI (match_operand:SI 1 "register_operand" "l")))]
400 [(set_attr "length" "2")
401 (set_attr "type" "mvn_reg")]
404 (define_insn "*thumb1_zero_extendhisi2"
405 [(set (match_operand:SI 0 "register_operand" "=l,l")
406 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
411 if (which_alternative == 0 && arm_arch6)
412 return "uxth\t%0, %1";
413 if (which_alternative == 0)
416 mem = XEXP (operands[1], 0);
418 if (GET_CODE (mem) == CONST)
421 if (GET_CODE (mem) == PLUS)
423 rtx a = XEXP (mem, 0);
425 /* This can happen due to bugs in reload. */
426 if (REG_P (a) && REGNO (a) == SP_REGNUM)
429 ops[0] = operands[0];
432 output_asm_insn ("mov\t%0, %1", ops);
434 XEXP (mem, 0) = operands[0];
438 return "ldrh\t%0, %1";
440 [(set_attr_alternative "length"
441 [(if_then_else (eq_attr "is_arch6" "yes")
442 (const_int 2) (const_int 4))
444 (set_attr "type" "extend,load_byte")]
447 (define_insn "*thumb1_zero_extendqisi2"
448 [(set (match_operand:SI 0 "register_operand" "=l,l")
449 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
450 "TARGET_THUMB1 && !arm_arch6"
454 [(set_attr "length" "4,2")
455 (set_attr "type" "alu_shift_reg,load_byte")
456 (set_attr "pool_range" "*,32")]
459 (define_insn "*thumb1_zero_extendqisi2_v6"
460 [(set (match_operand:SI 0 "register_operand" "=l,l")
461 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
462 "TARGET_THUMB1 && arm_arch6"
466 [(set_attr "length" "2")
467 (set_attr "type" "extend,load_byte")]
470 ;; We used to have an early-clobber on the scratch register here.
471 ;; However, there's a bug somewhere in reload which means that this
472 ;; can be partially ignored during spill allocation if the memory
473 ;; address also needs reloading; this causes us to die later on when
474 ;; we try to verify the operands. Fortunately, we don't really need
475 ;; the early-clobber: we can always use operand 0 if operand 2
476 ;; overlaps the address.
477 (define_insn "thumb1_extendhisi2"
478 [(set (match_operand:SI 0 "register_operand" "=l,l")
479 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
480 (clobber (match_scratch:SI 2 "=X,l"))]
487 if (which_alternative == 0 && !arm_arch6)
489 if (which_alternative == 0)
490 return \"sxth\\t%0, %1\";
492 mem = XEXP (operands[1], 0);
494 /* This code used to try to use 'V', and fix the address only if it was
495 offsettable, but this fails for e.g. REG+48 because 48 is outside the
496 range of QImode offsets, and offsettable_address_p does a QImode
499 if (GET_CODE (mem) == CONST)
502 if (GET_CODE (mem) == LABEL_REF)
503 return \"ldr\\t%0, %1\";
505 if (GET_CODE (mem) == PLUS)
507 rtx a = XEXP (mem, 0);
508 rtx b = XEXP (mem, 1);
510 if (GET_CODE (a) == LABEL_REF
512 return \"ldr\\t%0, %1\";
515 return \"ldrsh\\t%0, %1\";
526 gcc_assert (REG_P (ops[1]));
528 ops[0] = operands[0];
529 if (reg_mentioned_p (operands[2], ops[1]))
532 ops[3] = operands[2];
533 output_asm_insn (\"movs\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
536 [(set_attr_alternative "length"
537 [(if_then_else (eq_attr "is_arch6" "yes")
538 (const_int 2) (const_int 4))
540 (set_attr "type" "extend,load_byte")
541 (set_attr "pool_range" "*,1018")]
545 [(set (match_operand:SI 0 "register_operand" "")
546 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
547 "TARGET_THUMB1 && reload_completed"
548 [(set (match_dup 0) (match_dup 2))
549 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
551 rtx addr = XEXP (operands[1], 0);
553 if (GET_CODE (addr) == CONST)
554 addr = XEXP (addr, 0);
556 if (GET_CODE (addr) == PLUS
557 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
558 /* No split necessary. */
561 if (GET_CODE (addr) == PLUS
562 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
565 if (reg_overlap_mentioned_p (operands[0], addr))
567 rtx t = gen_lowpart (QImode, operands[0]);
568 emit_move_insn (t, operands[1]);
569 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
575 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
576 operands[2] = const0_rtx;
578 else if (GET_CODE (addr) != PLUS)
580 else if (REG_P (XEXP (addr, 0)))
582 operands[2] = XEXP (addr, 1);
583 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
587 operands[2] = XEXP (addr, 0);
588 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
591 operands[3] = change_address (operands[1], QImode, addr);
595 [(set (match_operand:SI 0 "register_operand" "")
596 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
597 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
598 (set (match_operand:SI 3 "register_operand" "")
599 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
601 && GET_CODE (XEXP (operands[4], 0)) == PLUS
602 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
603 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
604 && (peep2_reg_dead_p (3, operands[0])
605 || rtx_equal_p (operands[0], operands[3]))
606 && (peep2_reg_dead_p (3, operands[2])
607 || rtx_equal_p (operands[2], operands[3]))"
608 [(set (match_dup 2) (match_dup 1))
609 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
611 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
612 operands[4] = change_address (operands[4], QImode, addr);
615 (define_insn "thumb1_extendqisi2"
616 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
617 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
622 if (which_alternative == 0 && arm_arch6)
623 return "sxtb\\t%0, %1";
624 if (which_alternative == 0)
627 addr = XEXP (operands[1], 0);
628 if (GET_CODE (addr) == PLUS
629 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
630 return "ldrsb\\t%0, %1";
634 [(set_attr_alternative "length"
635 [(if_then_else (eq_attr "is_arch6" "yes")
636 (const_int 2) (const_int 4))
638 (if_then_else (eq_attr "is_arch6" "yes")
639 (const_int 4) (const_int 6))])
640 (set_attr "type" "extend,load_byte,load_byte")]
643 ;;; ??? This should have alternatives for constants.
644 ;;; ??? This was originally identical to the movdf_insn pattern.
645 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
646 ;;; thumb_reorg with a memory reference.
647 (define_insn "*thumb1_movdi_insn"
648 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,r,l,>,l, m,*r")
649 (match_operand:DI 1 "general_operand" "l, I,J,j,>,l,mi,l,*r"))]
651 && ( register_operand (operands[0], DImode)
652 || register_operand (operands[1], DImode))"
655 switch (which_alternative)
659 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
660 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
661 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
663 return \"movs\\t%Q0, %1\;movs\\t%R0, #0\";
665 operands[1] = GEN_INT (- INTVAL (operands[1]));
666 return \"movs\\t%Q0, %1\;rsbs\\t%Q0, %Q0, #0\;asrs\\t%R0, %Q0, #31\";
668 gcc_assert (TARGET_HAVE_MOVT);
669 return \"movw\\t%Q0, %L1\;movs\\tR0, #0\";
671 return \"ldmia\\t%1, {%0, %H0}\";
673 return \"stmia\\t%0, {%1, %H1}\";
675 return thumb_load_double_from_address (operands);
677 operands[2] = gen_rtx_MEM (SImode,
678 plus_constant (Pmode, XEXP (operands[0], 0), 4));
679 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
682 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
683 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
684 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
687 [(set_attr "length" "4,4,6,6,2,2,6,4,4")
688 (set_attr "type" "multiple,multiple,multiple,multiple,load_8,store_8,load_8,store_8,multiple")
689 (set_attr "arch" "t1,t1,t1,v8mb,t1,t1,t1,t1,t1")
690 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")]
693 (define_insn "*thumb1_movsi_insn"
694 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,r,l,l,l,>,l, m,*l*h*k")
695 (match_operand:SI 1 "general_operand" "l, I,j,J,K,>,l,mi,l,*l*h*k"))]
697 && ( register_operand (operands[0], SImode)
698 || register_operand (operands[1], SImode))"
710 [(set_attr "length" "2,2,4,4,4,2,2,2,2,2")
711 (set_attr "type" "mov_reg,mov_imm,mov_imm,multiple,multiple,load_4,store_4,load_4,store_4,mov_reg")
712 (set_attr "pool_range" "*,*,*,*,*,*,*,1018,*,*")
713 (set_attr "arch" "t1,t1,v8mb,t1,t1,t1,t1,t1,t1,t1")
714 (set_attr "conds" "set,clob,nocond,*,*,nocond,nocond,nocond,nocond,nocond")])
716 ; Split the load of 64-bit constant into two loads for high and low 32-bit parts respectively
717 ; to see if we can load them in fewer instructions or fewer cycles.
718 ; For the small 64-bit integer constants that satisfy constraint J, the instruction pattern
719 ; thumb1_movdi_insn has a better way to handle them.
721 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
722 (match_operand:ANY64 1 "immediate_operand" ""))]
723 "TARGET_THUMB1 && reload_completed && !satisfies_constraint_J (operands[1])"
724 [(set (match_dup 0) (match_dup 1))
725 (set (match_dup 2) (match_dup 3))]
727 operands[2] = gen_highpart (SImode, operands[0]);
728 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
730 operands[0] = gen_lowpart (SImode, operands[0]);
731 operands[1] = gen_lowpart (SImode, operands[1]);
736 [(set (match_operand:SI 0 "register_operand" "")
737 (match_operand:SI 1 "const_int_operand" ""))]
738 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
739 [(set (match_dup 2) (match_dup 1))
740 (set (match_dup 0) (neg:SI (match_dup 2)))]
743 operands[1] = GEN_INT (- INTVAL (operands[1]));
744 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
749 [(set (match_operand:SI 0 "register_operand" "")
750 (match_operand:SI 1 "const_int_operand" ""))]
751 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])
752 && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
753 [(set (match_dup 2) (match_dup 1))
754 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
757 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
758 unsigned HOST_WIDE_INT mask = 0xff;
761 for (i = 0; i < 25; i++)
762 if ((val & (mask << i)) == val)
765 /* Don't split if the shift is zero. */
769 operands[1] = GEN_INT (val >> i);
770 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
771 operands[3] = GEN_INT (i);
775 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
777 [(set (match_operand:SI 0 "register_operand" "")
778 (match_operand:SI 1 "const_int_operand" ""))]
779 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])
780 && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
781 [(set (match_dup 2) (match_dup 1))
782 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
785 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
786 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
787 operands[3] = GEN_INT (255);
791 (define_insn "*thumb1_movhi_insn"
792 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l,r")
793 (match_operand:HI 1 "general_operand" "l,m,l,k*h,*r,I,n"))]
795 && ( register_operand (operands[0], HImode)
796 || register_operand (operands[1], HImode))"
798 switch (which_alternative)
800 case 0: return \"adds %0, %1, #0\";
801 case 2: return \"strh %1, %0\";
802 case 3: return \"mov %0, %1\";
803 case 4: return \"mov %0, %1\";
804 case 5: return \"movs %0, %1\";
805 case 6: gcc_assert (TARGET_HAVE_MOVT);
806 return \"movw %0, %L1\";
807 default: gcc_unreachable ();
809 /* The stack pointer can end up being taken as an index register.
810 Catch this case here and deal with it. */
811 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
812 && REG_P (XEXP (XEXP (operands[1], 0), 0))
813 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
816 ops[0] = operands[0];
817 ops[1] = XEXP (XEXP (operands[1], 0), 0);
819 output_asm_insn (\"mov %0, %1\", ops);
821 XEXP (XEXP (operands[1], 0), 0) = operands[0];
824 return \"ldrh %0, %1\";
826 [(set_attr "length" "2,4,2,2,2,2,4")
827 (set_attr "type" "alus_imm,load_4,store_4,mov_reg,mov_reg,mov_imm,mov_imm")
828 (set_attr "arch" "t1,t1,t1,t1,t1,t1,v8mb")
829 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob,nocond")])
831 (define_expand "thumb_movhi_clobber"
832 [(set (match_operand:HI 0 "memory_operand")
833 (match_operand:HI 1 "register_operand"))
834 (clobber (match_operand:DI 2 "register_operand"))]
837 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
838 && REGNO (operands[1]) <= LAST_LO_REGNUM)
840 emit_insn (gen_movhi (operands[0], operands[1]));
843 /* XXX Fixme, need to handle other cases here as well. */
848 (define_insn "*thumb1_movqi_insn"
849 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l")
850 (match_operand:QI 1 "general_operand" "l,m,l,k*h,*r,I"))]
852 && ( register_operand (operands[0], QImode)
853 || register_operand (operands[1], QImode))"
861 [(set_attr "length" "2")
862 (set_attr "type" "alu_imm,load_4,store_4,mov_reg,mov_imm,mov_imm")
863 (set_attr "pool_range" "*,32,*,*,*,*")
864 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
866 (define_insn "*thumb1_movhf"
867 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,l,m,*r,*h")
868 (match_operand:HF 1 "general_operand" "l, m,F,l,*h,*r"))]
870 && ( s_register_operand (operands[0], HFmode)
871 || s_register_operand (operands[1], HFmode))"
873 switch (which_alternative)
876 return \"movs\\t%0, %1\";
880 gcc_assert (MEM_P (operands[1]));
881 addr = XEXP (operands[1], 0);
882 if (GET_CODE (addr) == LABEL_REF
883 || (GET_CODE (addr) == CONST
884 && GET_CODE (XEXP (addr, 0)) == PLUS
885 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
886 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
888 /* Constant pool entry. */
889 return \"ldr\\t%0, %1\";
891 return \"ldrh\\t%0, %1\";
899 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
901 ops[0] = operands[0];
902 high = (bits >> 8) & 0xff;
903 ops[1] = GEN_INT (high);
904 ops[2] = GEN_INT (bits & 0xff);
906 output_asm_insn (\"movs\\t%0, %1\;lsls\\t%0, #8\;adds\\t%0, %2\", ops);
908 output_asm_insn (\"movs\\t%0, %2\", ops);
912 case 3: return \"strh\\t%1, %0\";
913 default: return \"mov\\t%0, %1\";
916 [(set_attr "length" "2,2,6,2,2,2")
917 (set_attr "type" "mov_reg,load_4,mov_reg,store_4,mov_reg,mov_reg")
918 (set_attr "pool_range" "*,1018,*,*,*,*")
919 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond")])
921 ;;; ??? This should have alternatives for constants.
922 (define_insn "*thumb1_movsf_insn"
923 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
924 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
926 && ( register_operand (operands[0], SFmode)
927 || register_operand (operands[1], SFmode))"
936 [(set_attr "length" "2")
937 (set_attr "type" "alus_imm,load_4,store_4,load_4,store_4,mov_reg,mov_reg")
938 (set_attr "pool_range" "*,*,*,1018,*,*,*")
939 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
942 ;;; ??? This should have alternatives for constants.
943 ;;; ??? This was originally identical to the movdi_insn pattern.
944 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
945 ;;; thumb_reorg with a memory reference.
946 (define_insn "*thumb_movdf_insn"
947 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
948 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
950 && ( register_operand (operands[0], DFmode)
951 || register_operand (operands[1], DFmode))"
953 switch (which_alternative)
957 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
958 return \"adds\\t%0, %1, #0\;adds\\t%H0, %H1, #0\";
959 return \"adds\\t%H0, %H1, #0\;adds\\t%0, %1, #0\";
961 return \"ldmia\\t%1, {%0, %H0}\";
963 return \"stmia\\t%0, {%1, %H1}\";
965 return thumb_load_double_from_address (operands);
967 operands[2] = gen_rtx_MEM (SImode,
968 plus_constant (Pmode,
969 XEXP (operands[0], 0), 4));
970 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
973 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
974 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
975 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
978 [(set_attr "length" "4,2,2,6,4,4")
979 (set_attr "type" "multiple,load_8,store_8,load_8,store_8,multiple")
980 (set_attr "pool_range" "*,*,*,1018,*,*")]
984 ;; Thumb block-move insns
986 (define_insn "cpymem12b"
987 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
988 (mem:SI (match_operand:SI 3 "register_operand" "1")))
989 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
990 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
991 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
992 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
993 (set (match_operand:SI 0 "register_operand" "=l")
994 (plus:SI (match_dup 2) (const_int 12)))
995 (set (match_operand:SI 1 "register_operand" "=l")
996 (plus:SI (match_dup 3) (const_int 12)))
997 (clobber (match_scratch:SI 4 "=&l"))
998 (clobber (match_scratch:SI 5 "=&l"))
999 (clobber (match_scratch:SI 6 "=&l"))]
1001 "* return thumb_output_move_mem_multiple (3, operands);"
1002 [(set_attr "length" "4")
1003 ; This isn't entirely accurate... It loads as well, but in terms of
1004 ; scheduling the following insn it is better to consider it as a store
1005 (set_attr "type" "store_12")]
1008 (define_insn "cpymem8b"
1009 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
1010 (mem:SI (match_operand:SI 3 "register_operand" "1")))
1011 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
1012 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
1013 (set (match_operand:SI 0 "register_operand" "=l")
1014 (plus:SI (match_dup 2) (const_int 8)))
1015 (set (match_operand:SI 1 "register_operand" "=l")
1016 (plus:SI (match_dup 3) (const_int 8)))
1017 (clobber (match_scratch:SI 4 "=&l"))
1018 (clobber (match_scratch:SI 5 "=&l"))]
1020 "* return thumb_output_move_mem_multiple (2, operands);"
1021 [(set_attr "length" "4")
1022 ; This isn't entirely accurate... It loads as well, but in terms of
1023 ; scheduling the following insn it is better to consider it as a store
1024 (set_attr "type" "store_8")]
1028 ;; A pattern to recognize a special situation and optimize for it.
1029 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
1030 ;; due to the available addressing modes. Hence, convert a signed comparison
1031 ;; with zero into an unsigned comparison with 127 if possible.
1032 (define_expand "cbranchqi4"
1033 [(set (pc) (if_then_else
1034 (match_operator 0 "lt_ge_comparison_operator"
1035 [(match_operand:QI 1 "memory_operand")
1036 (match_operand:QI 2 "const0_operand")])
1037 (label_ref (match_operand 3 "" ""))
1042 xops[1] = gen_reg_rtx (SImode);
1043 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
1044 xops[2] = GEN_INT (127);
1045 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
1046 VOIDmode, xops[1], xops[2]);
1047 xops[3] = operands[3];
1048 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
1052 ;; A pattern for the CB(N)Z instruction added in ARMv8-M Baseline profile,
1053 ;; adapted from cbranchsi4_insn. Modifying cbranchsi4_insn instead leads to
1054 ;; code generation difference for ARMv6-M because the minimum length of the
1055 ;; instruction becomes 2 even for ARMv6-M due to a limitation in genattrtab's
1056 ;; handling of PC in the length condition.
1057 (define_insn "thumb1_cbz"
1058 [(set (pc) (if_then_else
1059 (match_operator 0 "equality_operator"
1060 [(match_operand:SI 1 "s_register_operand" "l")
1062 (label_ref (match_operand 2 "" ""))
1064 "TARGET_THUMB1 && TARGET_HAVE_CBZ"
1066 if (get_attr_length (insn) == 2)
1068 if (GET_CODE (operands[0]) == EQ)
1069 return "cbz\t%1, %l2";
1071 return "cbnz\t%1, %l2";
1075 rtx t = cfun->machine->thumb1_cc_insn;
1078 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
1079 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
1081 if (cfun->machine->thumb1_cc_mode == CC_NZmode)
1083 if (!nz_comparison_operator (operands[0], VOIDmode))
1086 else if (cfun->machine->thumb1_cc_mode != CCmode)
1091 output_asm_insn ("cmp\t%1, #0", operands);
1092 cfun->machine->thumb1_cc_insn = insn;
1093 cfun->machine->thumb1_cc_op0 = operands[1];
1094 cfun->machine->thumb1_cc_op1 = operands[2];
1095 cfun->machine->thumb1_cc_mode = CCmode;
1098 /* Ensure we emit the right type of condition code on the jump. */
1099 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1102 switch (get_attr_length (insn))
1104 case 4: return "b%d0\t%l2";
1105 case 6: return "b%D0\t.LCB%=;b\t%l2\t%@long jump\n.LCB%=:";
1106 case 8: return "b%D0\t.LCB%=;bl\t%l2\t%@far jump\n.LCB%=:";
1107 default: gcc_unreachable ();
1111 [(set (attr "far_jump")
1113 (eq_attr "length" "8")
1114 (const_string "yes")
1115 (const_string "no")))
1116 (set (attr "length")
1118 (and (ge (minus (match_dup 2) (pc)) (const_int 2))
1119 (le (minus (match_dup 2) (pc)) (const_int 128)))
1122 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
1123 (le (minus (match_dup 2) (pc)) (const_int 256)))
1126 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1127 (le (minus (match_dup 2) (pc)) (const_int 2048)))
1132 (eq_attr "length" "2")
1133 (const_string "branch")
1134 (const_string "multiple")))]
1137 ;; Changes to the constraints of this pattern must be propagated to those of
1138 ;; atomic compare_and_swap splitters in sync.md. These must be at least as
1139 ;; strict as the constraints here and aim to be as permissive.
1140 (define_insn "cbranchsi4_insn"
1141 [(set (pc) (if_then_else
1142 (match_operator 0 "arm_comparison_operator"
1143 [(match_operand:SI 1 "s_register_operand" "l,l*h")
1144 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
1145 (label_ref (match_operand 3 "" ""))
1149 rtx t = cfun->machine->thumb1_cc_insn;
1152 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
1153 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
1155 if (cfun->machine->thumb1_cc_mode == CC_NZmode)
1157 if (!nz_comparison_operator (operands[0], VOIDmode))
1160 else if (cfun->machine->thumb1_cc_mode != CCmode)
1165 output_asm_insn ("cmp\t%1, %2", operands);
1166 cfun->machine->thumb1_cc_insn = insn;
1167 cfun->machine->thumb1_cc_op0 = operands[1];
1168 cfun->machine->thumb1_cc_op1 = operands[2];
1169 cfun->machine->thumb1_cc_mode = CCmode;
1172 /* Ensure we emit the right type of condition code on the jump. */
1173 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1176 switch (get_attr_length (insn))
1178 case 4: return \"b%d0\\t%l3\";
1179 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1180 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1183 [(set (attr "far_jump")
1185 (eq_attr "length" "8")
1186 (const_string "yes")
1187 (const_string "no")))
1188 (set (attr "length")
1190 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1191 (le (minus (match_dup 3) (pc)) (const_int 256)))
1194 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1195 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1198 (set_attr "type" "multiple")]
1201 ;; Changes to the constraints of this pattern must be propagated to those of
1202 ;; atomic compare_and_swap splitters in sync.md. These must be at least as
1203 ;; strict as the constraints here and aim to be as permissive.
1204 (define_insn "cbranchsi4_scratch"
1205 [(set (pc) (if_then_else
1206 (match_operator 4 "arm_comparison_operator"
1207 [(match_operand:SI 1 "s_register_operand" "l,0")
1208 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
1209 (label_ref (match_operand 3 "" ""))
1211 (clobber (match_scratch:SI 0 "=l,l"))]
1214 output_asm_insn (\"adds\\t%0, %1, #%n2\", operands);
1216 switch (get_attr_length (insn))
1218 case 4: return \"b%d4\\t%l3\";
1219 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1220 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1223 [(set (attr "far_jump")
1225 (eq_attr "length" "8")
1226 (const_string "yes")
1227 (const_string "no")))
1228 (set (attr "length")
1230 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1231 (le (minus (match_dup 3) (pc)) (const_int 256)))
1234 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1235 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1238 (set_attr "type" "multiple")]
1241 (define_insn "*negated_cbranchsi4"
1244 (match_operator 0 "equality_operator"
1245 [(match_operand:SI 1 "s_register_operand" "l")
1246 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
1247 (label_ref (match_operand 3 "" ""))
1251 output_asm_insn (\"cmn\\t%1, %2\", operands);
1252 switch (get_attr_length (insn))
1254 case 4: return \"b%d0\\t%l3\";
1255 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1256 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1259 [(set (attr "far_jump")
1261 (eq_attr "length" "8")
1262 (const_string "yes")
1263 (const_string "no")))
1264 (set (attr "length")
1266 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1267 (le (minus (match_dup 3) (pc)) (const_int 256)))
1270 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1271 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1274 (set_attr "type" "multiple")]
1277 (define_insn "*tbit_cbranch"
1280 (match_operator 0 "equality_operator"
1281 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1283 (match_operand:SI 2 "const_int_operand" "i"))
1285 (label_ref (match_operand 3 "" ""))
1287 (clobber (match_scratch:SI 4 "=l"))]
1292 op[0] = operands[4];
1293 op[1] = operands[1];
1294 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
1296 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1297 switch (get_attr_length (insn))
1299 case 4: return \"b%d0\\t%l3\";
1300 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1301 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1304 [(set (attr "far_jump")
1306 (eq_attr "length" "8")
1307 (const_string "yes")
1308 (const_string "no")))
1309 (set (attr "length")
1311 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1312 (le (minus (match_dup 3) (pc)) (const_int 256)))
1315 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1316 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1319 (set_attr "type" "multiple")]
1322 (define_insn "*tlobits_cbranch"
1325 (match_operator 0 "equality_operator"
1326 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1327 (match_operand:SI 2 "const_int_operand" "i")
1330 (label_ref (match_operand 3 "" ""))
1332 (clobber (match_scratch:SI 4 "=l"))]
1337 op[0] = operands[4];
1338 op[1] = operands[1];
1339 op[2] = GEN_INT (32 - INTVAL (operands[2]));
1341 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1342 switch (get_attr_length (insn))
1344 case 4: return \"b%d0\\t%l3\";
1345 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1346 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1349 [(set (attr "far_jump")
1351 (eq_attr "length" "8")
1352 (const_string "yes")
1353 (const_string "no")))
1354 (set (attr "length")
1356 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1357 (le (minus (match_dup 3) (pc)) (const_int 256)))
1360 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1361 (le (minus (match_dup 3) (pc)) (const_int 2048)))
1364 (set_attr "type" "multiple")]
1367 (define_insn "*tstsi3_cbranch"
1370 (match_operator 3 "equality_operator"
1371 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
1372 (match_operand:SI 1 "s_register_operand" "l"))
1374 (label_ref (match_operand 2 "" ""))
1379 output_asm_insn (\"tst\\t%0, %1\", operands);
1380 switch (get_attr_length (insn))
1382 case 4: return \"b%d3\\t%l2\";
1383 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
1384 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
1387 [(set (attr "far_jump")
1389 (eq_attr "length" "8")
1390 (const_string "yes")
1391 (const_string "no")))
1392 (set (attr "length")
1394 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
1395 (le (minus (match_dup 2) (pc)) (const_int 256)))
1398 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1399 (le (minus (match_dup 2) (pc)) (const_int 2048)))
1402 (set_attr "type" "multiple")]
1405 (define_insn "*cbranchne_decr1"
1407 (if_then_else (match_operator 3 "equality_operator"
1408 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
1410 (label_ref (match_operand 4 "" ""))
1412 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
1413 (plus:SI (match_dup 2) (const_int -1)))
1414 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
1419 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1421 VOIDmode, operands[2], const1_rtx);
1422 cond[1] = operands[4];
1424 if (which_alternative == 0)
1425 output_asm_insn (\"subs\\t%0, %2, #1\", operands);
1426 else if (which_alternative == 1)
1428 /* We must provide an alternative for a hi reg because reload
1429 cannot handle output reloads on a jump instruction, but we
1430 can't subtract into that. Fortunately a mov from lo to hi
1431 does not clobber the condition codes. */
1432 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1433 output_asm_insn (\"mov\\t%0, %1\", operands);
1437 /* Similarly, but the target is memory. */
1438 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1439 output_asm_insn (\"str\\t%1, %0\", operands);
1442 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
1445 output_asm_insn (\"b%d0\\t%l1\", cond);
1448 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1449 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
1451 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1452 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1456 [(set (attr "far_jump")
1458 (ior (and (eq (symbol_ref ("which_alternative"))
1460 (eq_attr "length" "8"))
1461 (eq_attr "length" "10"))
1462 (const_string "yes")
1463 (const_string "no")))
1464 (set_attr_alternative "length"
1468 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1469 (le (minus (match_dup 4) (pc)) (const_int 256)))
1472 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1473 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1478 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1479 (le (minus (match_dup 4) (pc)) (const_int 256)))
1482 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1483 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1488 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1489 (le (minus (match_dup 4) (pc)) (const_int 256)))
1492 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1493 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1498 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1499 (le (minus (match_dup 4) (pc)) (const_int 256)))
1502 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1503 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1506 (set_attr "type" "multiple")]
1509 (define_insn "*addsi3_cbranch"
1512 (match_operator 4 "arm_comparison_operator"
1514 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
1515 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
1517 (label_ref (match_operand 5 "" ""))
1520 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
1521 (plus:SI (match_dup 2) (match_dup 3)))
1522 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
1524 && (GET_CODE (operands[4]) == EQ
1525 || GET_CODE (operands[4]) == NE
1526 || GET_CODE (operands[4]) == GE
1527 || GET_CODE (operands[4]) == LT)"
1532 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
1533 cond[1] = operands[2];
1534 cond[2] = operands[3];
1536 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
1537 output_asm_insn (\"subs\\t%0, %1, #%n2\", cond);
1539 output_asm_insn (\"adds\\t%0, %1, %2\", cond);
1541 if (which_alternative >= 2
1542 && which_alternative < 4)
1543 output_asm_insn (\"mov\\t%0, %1\", operands);
1544 else if (which_alternative >= 4)
1545 output_asm_insn (\"str\\t%1, %0\", operands);
1547 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
1550 return \"b%d4\\t%l5\";
1552 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
1554 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
1558 [(set (attr "far_jump")
1560 (ior (and (lt (symbol_ref ("which_alternative"))
1562 (eq_attr "length" "8"))
1563 (eq_attr "length" "10"))
1564 (const_string "yes")
1565 (const_string "no")))
1566 (set (attr "length")
1568 (lt (symbol_ref ("which_alternative"))
1571 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
1572 (le (minus (match_dup 5) (pc)) (const_int 256)))
1575 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
1576 (le (minus (match_dup 5) (pc)) (const_int 2048)))
1580 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
1581 (le (minus (match_dup 5) (pc)) (const_int 256)))
1584 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
1585 (le (minus (match_dup 5) (pc)) (const_int 2048)))
1588 (set_attr "type" "multiple")]
1591 (define_insn "*addsi3_cbranch_scratch"
1594 (match_operator 3 "arm_comparison_operator"
1596 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
1597 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
1599 (label_ref (match_operand 4 "" ""))
1601 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
1603 && (GET_CODE (operands[3]) == EQ
1604 || GET_CODE (operands[3]) == NE
1605 || GET_CODE (operands[3]) == GE
1606 || GET_CODE (operands[3]) == LT)"
1609 switch (which_alternative)
1612 output_asm_insn (\"cmp\t%1, #%n2\", operands);
1615 output_asm_insn (\"cmn\t%1, %2\", operands);
1618 if (INTVAL (operands[2]) < 0)
1619 output_asm_insn (\"subs\t%0, %1, %2\", operands);
1621 output_asm_insn (\"adds\t%0, %1, %2\", operands);
1624 if (INTVAL (operands[2]) < 0)
1625 output_asm_insn (\"subs\t%0, %0, %2\", operands);
1627 output_asm_insn (\"adds\t%0, %0, %2\", operands);
1631 switch (get_attr_length (insn))
1634 return \"b%d3\\t%l4\";
1636 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
1638 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1642 [(set (attr "far_jump")
1644 (eq_attr "length" "8")
1645 (const_string "yes")
1646 (const_string "no")))
1647 (set (attr "length")
1649 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1650 (le (minus (match_dup 4) (pc)) (const_int 256)))
1653 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1654 (le (minus (match_dup 4) (pc)) (const_int 2048)))
1657 (set_attr "type" "multiple")]
1660 (define_insn "*thumb_cmpdi_zero"
1661 [(set (reg:CC_Z CC_REGNUM)
1662 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
1664 (clobber (match_scratch:SI 1 "=l"))]
1666 "orrs\\t%1, %Q0, %R0"
1667 [(set_attr "conds" "set")
1668 (set_attr "length" "2")
1669 (set_attr "type" "logics_reg")]
1672 (define_expand "cstoresi_eq0_thumb1"
1674 [(set (match_operand:SI 0 "s_register_operand")
1675 (eq:SI (match_operand:SI 1 "s_register_operand")
1677 (clobber (match_dup:SI 2))])]
1679 "operands[2] = gen_reg_rtx (SImode);"
1682 (define_expand "cstoresi_ne0_thumb1"
1684 [(set (match_operand:SI 0 "s_register_operand")
1685 (ne:SI (match_operand:SI 1 "s_register_operand")
1687 (clobber (match_dup:SI 2))])]
1689 "operands[2] = gen_reg_rtx (SImode);"
1692 (define_insn "*cstoresi_eq0_thumb1_insn"
1693 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
1694 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
1696 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
1699 rsbs\\t%0, %1, #0\;adcs\\t%0, %0, %1
1700 rsbs\\t%2, %1, #0\;adcs\\t%0, %1, %2"
1701 [(set_attr "length" "4")
1702 (set_attr "type" "multiple")]
1705 (define_insn "*cstoresi_ne0_thumb1_insn"
1706 [(set (match_operand:SI 0 "s_register_operand" "=l")
1707 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
1709 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
1711 "subs\\t%2, %1, #1\;sbcs\\t%0, %1, %2"
1712 [(set_attr "length" "4")]
1715 ;; Used as part of the expansion of thumb ltu and gtu sequences
1716 (define_insn "cstoresi_nltu_thumb1"
1717 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1718 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1719 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
1721 "cmp\\t%1, %2\;sbcs\\t%0, %0, %0"
1722 [(set_attr "length" "4")
1723 (set_attr "type" "multiple")]
1726 (define_insn_and_split "cstoresi_ltu_thumb1"
1727 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1728 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1729 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
1734 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
1735 (set (match_dup 0) (neg:SI (match_dup 3)))]
1736 "operands[3] = gen_reg_rtx (SImode);"
1737 [(set_attr "length" "4")
1738 (set_attr "type" "multiple")]
1741 ;; Used as part of the expansion of thumb les sequence.
1742 (define_insn "thumb1_addsi3_addgeu"
1743 [(set (match_operand:SI 0 "s_register_operand" "=l")
1744 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
1745 (match_operand:SI 2 "s_register_operand" "l"))
1746 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
1747 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
1749 "cmp\\t%3, %4\;adcs\\t%0, %1, %2"
1750 [(set_attr "length" "4")
1751 (set_attr "type" "multiple")]
1755 (define_insn "*thumb_jump"
1757 (label_ref (match_operand 0 "" "")))]
1760 if (get_attr_length (insn) == 2)
1762 return \"bl\\t%l0\\t%@ far jump\";
1764 [(set (attr "far_jump")
1766 (eq_attr "length" "4")
1767 (const_string "yes")
1768 (const_string "no")))
1769 (set (attr "length")
1771 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
1772 (le (minus (match_dup 0) (pc)) (const_int 2048)))
1775 (set_attr "type" "branch")]
1778 (define_insn "*call_reg_thumb1_v5"
1779 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1780 (match_operand 1 "" ""))
1781 (use (match_operand 2 "" ""))
1782 (clobber (reg:SI LR_REGNUM))]
1783 "TARGET_THUMB1 && arm_arch5t && !SIBLING_CALL_P (insn)"
1785 [(set_attr "length" "2")
1786 (set_attr "type" "call")]
1789 (define_insn "*nonsecure_call_reg_thumb1_v5"
1790 [(call (unspec:SI [(mem:SI (reg:SI R4_REGNUM))]
1791 UNSPEC_NONSECURE_MEM)
1792 (match_operand 0 "" ""))
1793 (use (match_operand 1 "" ""))
1794 (clobber (reg:SI LR_REGNUM))]
1795 "TARGET_THUMB1 && use_cmse && !SIBLING_CALL_P (insn)"
1796 "bl\\t__gnu_cmse_nonsecure_call"
1797 [(set_attr "length" "4")
1798 (set_attr "type" "call")]
1801 (define_insn "*call_reg_thumb1"
1802 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1803 (match_operand 1 "" ""))
1804 (use (match_operand 2 "" ""))
1805 (clobber (reg:SI LR_REGNUM))]
1806 "TARGET_THUMB1 && !arm_arch5t && !SIBLING_CALL_P (insn)"
1809 if (!TARGET_CALLER_INTERWORKING)
1810 return thumb_call_via_reg (operands[0]);
1811 else if (operands[1] == const0_rtx)
1812 return \"bl\\t%__interwork_call_via_%0\";
1813 else if (frame_pointer_needed)
1814 return \"bl\\t%__interwork_r7_call_via_%0\";
1816 return \"bl\\t%__interwork_r11_call_via_%0\";
1818 [(set_attr "type" "call")]
1821 (define_insn "*call_value_reg_thumb1_v5"
1822 [(set (match_operand 0 "" "")
1823 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1824 (match_operand 2 "" "")))
1825 (use (match_operand 3 "" ""))
1826 (clobber (reg:SI LR_REGNUM))]
1827 "TARGET_THUMB1 && arm_arch5t"
1829 [(set_attr "length" "2")
1830 (set_attr "type" "call")]
1833 (define_insn "*nonsecure_call_value_reg_thumb1_v5"
1834 [(set (match_operand 0 "" "")
1836 [(mem:SI (reg:SI R4_REGNUM))]
1837 UNSPEC_NONSECURE_MEM)
1838 (match_operand 1 "" "")))
1839 (use (match_operand 2 "" ""))
1840 (clobber (reg:SI LR_REGNUM))]
1841 "TARGET_THUMB1 && use_cmse"
1842 "bl\\t__gnu_cmse_nonsecure_call"
1843 [(set_attr "length" "4")
1844 (set_attr "type" "call")]
1847 (define_insn "*call_value_reg_thumb1"
1848 [(set (match_operand 0 "" "")
1849 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1850 (match_operand 2 "" "")))
1851 (use (match_operand 3 "" ""))
1852 (clobber (reg:SI LR_REGNUM))]
1853 "TARGET_THUMB1 && !arm_arch5t"
1856 if (!TARGET_CALLER_INTERWORKING)
1857 return thumb_call_via_reg (operands[1]);
1858 else if (operands[2] == const0_rtx)
1859 return \"bl\\t%__interwork_call_via_%1\";
1860 else if (frame_pointer_needed)
1861 return \"bl\\t%__interwork_r7_call_via_%1\";
1863 return \"bl\\t%__interwork_r11_call_via_%1\";
1865 [(set_attr "type" "call")]
1868 (define_insn "*call_insn"
1869 [(call (mem:SI (match_operand:SI 0 "" ""))
1870 (match_operand:SI 1 "" ""))
1871 (use (match_operand 2 "" ""))
1872 (clobber (reg:SI LR_REGNUM))]
1874 && GET_CODE (operands[0]) == SYMBOL_REF
1875 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
1877 [(set_attr "length" "4")
1878 (set_attr "type" "call")]
1881 (define_insn "*call_value_insn"
1882 [(set (match_operand 0 "" "")
1883 (call (mem:SI (match_operand 1 "" ""))
1884 (match_operand 2 "" "")))
1885 (use (match_operand 3 "" ""))
1886 (clobber (reg:SI LR_REGNUM))]
1888 && GET_CODE (operands[1]) == SYMBOL_REF
1889 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
1891 [(set_attr "length" "4")
1892 (set_attr "type" "call")]
1895 (define_expand "thumb1_casesi_internal_pic"
1896 [(match_operand:SI 0 "s_register_operand")
1897 (match_operand:SI 1 "thumb1_cmp_operand")
1898 (match_operand 2 "" "")
1899 (match_operand 3 "" "")]
1903 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
1904 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
1906 reg0 = gen_rtx_REG (SImode, 0);
1907 emit_move_insn (reg0, operands[0]);
1908 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
1913 (define_insn "thumb1_casesi_dispatch"
1914 [(parallel [(set (pc) (unspec [(reg:SI 0)
1915 (label_ref (match_operand 0 "" ""))
1916 ;; (label_ref (match_operand 1 "" ""))
1918 UNSPEC_THUMB1_CASESI))
1919 (clobber (reg:SI IP_REGNUM))
1920 (clobber (reg:SI LR_REGNUM))])]
1922 "* return thumb1_output_casesi(operands);"
1923 [(set_attr "length" "4")
1924 (set_attr "type" "multiple")]
1927 ;; NB Never uses BX.
1928 (define_insn "*thumb1_indirect_jump"
1930 (match_operand:SI 0 "register_operand" "l*r"))]
1933 [(set_attr "conds" "clob")
1934 (set_attr "length" "2")
1935 (set_attr "type" "branch")]
1939 (define_insn "prologue_thumb1_interwork"
1940 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
1942 "* return thumb1_output_interwork ();"
1943 [(set_attr "length" "8")
1944 (set_attr "type" "multiple")]
1947 (define_insn "*epilogue_insns"
1948 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
1951 return thumb1_unexpanded_epilogue ();
1953 ; Length is absolute worst case, when using CMSE and if this is an entry
1954 ; function an extra 4 (MSR) bytes will be added.
1955 [(set (attr "length")
1957 (match_test "IS_CMSE_ENTRY (arm_current_func_type ())")
1960 (set_attr "type" "block")
1961 ;; We don't clobber the conditions, but the potential length of this
1962 ;; operation is sufficient to make conditionalizing the sequence
1963 ;; unlikely to be profitable.
1964 (set_attr "conds" "clob")]
1967 ;; Miscellaneous Thumb patterns
1968 (define_expand "tablejump"
1969 [(parallel [(set (pc) (match_operand:SI 0 "register_operand"))
1970 (use (label_ref (match_operand 1 "" "")))])]
1975 /* Hopefully, CSE will eliminate this copy. */
1976 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
1977 rtx reg2 = gen_reg_rtx (SImode);
1979 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
1985 (define_insn "*thumb1_movpc_insn"
1986 [(set (match_operand:SI 0 "s_register_operand" "=l")
1987 (reg:SI PC_REGNUM))]
1990 [(set_attr "length" "2")
1991 (set_attr "conds" "nocond")
1992 (set_attr "type" "mov_reg")]
1995 ;; NB never uses BX.
1996 (define_insn "*thumb1_tablejump"
1997 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
1998 (use (label_ref (match_operand 1 "" "")))]
2001 [(set_attr "length" "2")
2002 (set_attr "type" "branch")]
2005 (define_insn_and_split "thumb_eh_return"
2006 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
2008 (clobber (match_scratch:SI 1 "=&l"))]
2011 "&& reload_completed"
2015 thumb_set_return_address (operands[0], operands[1]);
2018 [(set_attr "type" "mov_reg")]
2021 (define_insn "thumb1_stack_protect_test_insn"
2022 [(set (match_operand:SI 0 "register_operand" "=&l")
2023 (unspec:SI [(match_operand:SI 1 "memory_operand" "m")
2024 (mem:SI (match_operand:SI 2 "register_operand" "+l"))]
2026 (clobber (match_dup 2))]
2028 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
2029 [(set_attr "length" "8")
2030 (set_attr "conds" "set")
2031 (set_attr "type" "multiple")]