]>
Commit | Line | Data |
---|---|---|
1d02d8b8 | 1 | ;; ARM Thumb-1 Machine Description |
99dee823 | 2 | ;; Copyright (C) 2007-2021 Free Software Foundation, Inc. |
1d02d8b8 TG |
3 | ;; |
4 | ;; This file is part of GCC. | |
5 | ;; | |
6 | ;; GCC is free software; you can redistribute it and/or modify it | |
7 | ;; under the terms of the GNU General Public License as published by | |
8 | ;; the Free Software Foundation; either version 3, or (at your option) | |
9 | ;; any later version. | |
10 | ;; | |
11 | ;; GCC is distributed in the hope that it will be useful, but | |
12 | ;; WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
14 | ;; General Public License for more details. | |
15 | ;; | |
16 | ;; You should have received a copy of the GNU General Public License | |
17 | ;; along with GCC; see the file COPYING3. If not see | |
18 | ;; <http://www.gnu.org/licenses/>. */ | |
19 | ||
20 | \f | |
21 | ;;--------------------------------------------------------------------------- | |
22 | ;; Insn patterns | |
23 | ;; | |
24 | ||
2a37d9d0 KT |
25 | ;; Beware of splitting Thumb1 patterns that output multiple |
26 | ;; assembly instructions, in particular instruction such as SBC and | |
27 | ;; ADC which consume flags. For example, in the pattern thumb_subdi3 | |
28 | ;; below, the output SUB implicitly sets the flags (assembled to SUBS) | |
29 | ;; and then the Carry flag is used by SBC to compute the correct | |
30 | ;; result. If we split thumb_subdi3 pattern into two separate RTL | |
31 | ;; insns (using define_insn_and_split), the scheduler might place | |
32 | ;; other RTL insns between SUB and SBC, possibly modifying the Carry | |
33 | ;; flag used by SBC. This might happen because most Thumb1 patterns | |
34 | ;; for flag-setting instructions do not have explicit RTL for setting | |
35 | ;; or clobbering the flags. Instead, they have the attribute "conds" | |
36 | ;; with value "set" or "clob". However, this attribute is not used to | |
37 | ;; identify dependencies and therefore the scheduler might reorder | |
38 | ;; these instruction. Currenly, this problem cannot happen because | |
39 | ;; there are no separate Thumb1 patterns for individual instruction | |
40 | ;; that consume flags (except conditional execution, which is treated | |
41 | ;; differently). In particular there is no Thumb1 armv6-m pattern for | |
42 | ;; sbc or adc. | |
43 | ||
44 | ||
45 | ||
1d02d8b8 TG |
46 | (define_insn "*thumb1_adddi3" |
47 | [(set (match_operand:DI 0 "register_operand" "=l") | |
48 | (plus:DI (match_operand:DI 1 "register_operand" "%0") | |
49 | (match_operand:DI 2 "register_operand" "l"))) | |
50 | (clobber (reg:CC CC_REGNUM)) | |
51 | ] | |
52 | "TARGET_THUMB1" | |
decfc6e1 | 53 | "adds\\t%Q0, %Q0, %Q2\;adcs\\t%R0, %R0, %R2" |
1d02d8b8 TG |
54 | [(set_attr "length" "4") |
55 | (set_attr "type" "multiple")] | |
56 | ) | |
57 | ||
33cab746 TP |
58 | ;; Changes to the constraints of this pattern must be propagated to those of |
59 | ;; atomic additions in sync.md and to the logic for bind_old_new in | |
60 | ;; arm_split_atomic_op in arm.c. These must be at least as strict as the | |
61 | ;; constraints here and aim to be as permissive. | |
1d02d8b8 TG |
62 | (define_insn_and_split "*thumb1_addsi3" |
63 | [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l") | |
64 | (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k") | |
65 | (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))] | |
66 | "TARGET_THUMB1" | |
67 | "* | |
68 | static const char * const asms[] = | |
69 | { | |
decfc6e1 TG |
70 | \"adds\\t%0, %0, %2\", |
71 | \"subs\\t%0, %0, #%n2\", | |
72 | \"adds\\t%0, %1, %2\", | |
1d02d8b8 TG |
73 | \"add\\t%0, %0, %2\", |
74 | \"add\\t%0, %0, %2\", | |
75 | \"add\\t%0, %1, %2\", | |
76 | \"add\\t%0, %1, %2\", | |
77 | \"#\", | |
78 | \"#\", | |
79 | \"#\" | |
80 | }; | |
81 | if ((which_alternative == 2 || which_alternative == 6) | |
82 | && CONST_INT_P (operands[2]) | |
83 | && INTVAL (operands[2]) < 0) | |
decfc6e1 | 84 | return (which_alternative == 2) ? \"subs\\t%0, %1, #%n2\" : \"sub\\t%0, %1, #%n2\"; |
1d02d8b8 TG |
85 | return asms[which_alternative]; |
86 | " | |
87 | "&& reload_completed && CONST_INT_P (operands[2]) | |
88 | && ((operands[1] != stack_pointer_rtx | |
89 | && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)) | |
90 | || (operands[1] == stack_pointer_rtx | |
91 | && INTVAL (operands[2]) > 1020))" | |
92 | [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2))) | |
93 | (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))] | |
94 | { | |
95 | HOST_WIDE_INT offset = INTVAL (operands[2]); | |
96 | if (operands[1] == stack_pointer_rtx) | |
97 | offset -= 1020; | |
98 | else | |
99 | { | |
100 | if (offset > 255) | |
101 | offset = 255; | |
102 | else if (offset < -255) | |
103 | offset = -255; | |
104 | } | |
105 | operands[3] = GEN_INT (offset); | |
106 | operands[2] = GEN_INT (INTVAL (operands[2]) - offset); | |
107 | } | |
108 | [(set_attr "length" "2,2,2,2,2,2,2,4,4,4") | |
1d61feeb TG |
109 | (set_attr "type" "alus_imm,alus_imm,alus_sreg,alus_sreg,alus_sreg, |
110 | alus_sreg,alus_sreg,multiple,multiple,multiple")] | |
1d02d8b8 TG |
111 | ) |
112 | ||
113 | ;; Reloading and elimination of the frame pointer can | |
114 | ;; sometimes cause this optimization to be missed. | |
115 | (define_peephole2 | |
116 | [(set (match_operand:SI 0 "arm_general_register_operand" "") | |
117 | (match_operand:SI 1 "const_int_operand" "")) | |
118 | (set (match_dup 0) | |
119 | (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))] | |
120 | "TARGET_THUMB1 | |
dec21bbc KT |
121 | && UINTVAL (operands[1]) < 1024 |
122 | && (UINTVAL (operands[1]) & 3) == 0" | |
1d02d8b8 TG |
123 | [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))] |
124 | "" | |
125 | ) | |
126 | ||
127 | (define_insn "*thumb_subdi3" | |
128 | [(set (match_operand:DI 0 "register_operand" "=l") | |
129 | (minus:DI (match_operand:DI 1 "register_operand" "0") | |
130 | (match_operand:DI 2 "register_operand" "l"))) | |
131 | (clobber (reg:CC CC_REGNUM))] | |
132 | "TARGET_THUMB1" | |
decfc6e1 | 133 | "subs\\t%Q0, %Q0, %Q2\;sbcs\\t%R0, %R0, %R2" |
1d02d8b8 TG |
134 | [(set_attr "length" "4") |
135 | (set_attr "type" "multiple")] | |
136 | ) | |
137 | ||
33cab746 TP |
138 | ;; Changes to the constraints of this pattern must be propagated to those of |
139 | ;; atomic subtractions in sync.md and to the logic for bind_old_new in | |
140 | ;; arm_split_atomic_op in arm.c. These must be at least as strict as the | |
141 | ;; constraints here and aim to be as permissive. | |
1d02d8b8 TG |
142 | (define_insn "thumb1_subsi3_insn" |
143 | [(set (match_operand:SI 0 "register_operand" "=l") | |
144 | (minus:SI (match_operand:SI 1 "register_operand" "l") | |
145 | (match_operand:SI 2 "reg_or_int_operand" "lPd")))] | |
146 | "TARGET_THUMB1" | |
decfc6e1 | 147 | "subs\\t%0, %1, %2" |
1d02d8b8 TG |
148 | [(set_attr "length" "2") |
149 | (set_attr "conds" "set") | |
1d61feeb | 150 | (set_attr "type" "alus_sreg")] |
1d02d8b8 TG |
151 | ) |
152 | ||
b82617e3 KT |
153 | ;; Unfortunately on Thumb the '&'/'0' trick can fail when operands |
154 | ;; 1 and 2 are the same, because reload will make operand 0 match | |
155 | ;; operand 1 without realizing that this conflicts with operand 2. We fix | |
156 | ;; this by adding another alternative to match this case, and then `reload' | |
157 | ;; it ourselves. This alternative must come first. | |
1d02d8b8 TG |
158 | (define_insn "*thumb_mulsi3" |
159 | [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l") | |
160 | (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0") | |
161 | (match_operand:SI 2 "register_operand" "l,l,l")))] | |
162 | "TARGET_THUMB1 && !arm_arch6" | |
04dc44ac TG |
163 | "@ |
164 | movs\\t%0, %1\;muls\\t%0, %2 | |
165 | mov\\t%0, %1\;muls\\t%0, %2 | |
166 | muls\\t%0, %2" | |
1d02d8b8 TG |
167 | [(set_attr "length" "4,4,2") |
168 | (set_attr "type" "muls")] | |
169 | ) | |
170 | ||
171 | (define_insn "*thumb_mulsi3_v6" | |
172 | [(set (match_operand:SI 0 "register_operand" "=l,l,l") | |
173 | (mult:SI (match_operand:SI 1 "register_operand" "0,l,0") | |
174 | (match_operand:SI 2 "register_operand" "l,0,0")))] | |
175 | "TARGET_THUMB1 && arm_arch6" | |
176 | "@ | |
decfc6e1 TG |
177 | muls\\t%0, %2 |
178 | muls\\t%0, %1 | |
179 | muls\\t%0, %1" | |
1d02d8b8 TG |
180 | [(set_attr "length" "2") |
181 | (set_attr "type" "muls")] | |
182 | ) | |
183 | ||
33cab746 TP |
184 | ;; Changes to the constraints of this pattern must be propagated to those of |
185 | ;; atomic bitwise ANDs and NANDs in sync.md and to the logic for bind_old_new | |
186 | ;; in arm_split_atomic_op in arm.c. These must be at least as strict as the | |
187 | ;; constraints here and aim to be as permissive. | |
1d02d8b8 TG |
188 | (define_insn "*thumb1_andsi3_insn" |
189 | [(set (match_operand:SI 0 "register_operand" "=l") | |
190 | (and:SI (match_operand:SI 1 "register_operand" "%0") | |
191 | (match_operand:SI 2 "register_operand" "l")))] | |
192 | "TARGET_THUMB1" | |
decfc6e1 | 193 | "ands\\t%0, %2" |
1d02d8b8 TG |
194 | [(set_attr "length" "2") |
195 | (set_attr "type" "logic_imm") | |
196 | (set_attr "conds" "set")]) | |
197 | ||
198 | (define_split | |
199 | [(set (match_operand:SI 0 "s_register_operand" "") | |
200 | (zero_extract:SI (match_operand:SI 1 "s_register_operand" "") | |
201 | (match_operand:SI 2 "const_int_operand" "") | |
202 | (match_operand:SI 3 "const_int_operand" ""))) | |
203 | (clobber (match_operand:SI 4 "s_register_operand" ""))] | |
204 | "TARGET_THUMB1" | |
205 | [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2))) | |
206 | (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))] | |
207 | "{ | |
208 | HOST_WIDE_INT temp = INTVAL (operands[2]); | |
209 | ||
210 | operands[2] = GEN_INT (32 - temp - INTVAL (operands[3])); | |
211 | operands[3] = GEN_INT (32 - temp); | |
212 | }" | |
213 | ) | |
214 | ||
215 | (define_split | |
216 | [(set (match_operand:SI 0 "s_register_operand" "") | |
217 | (sign_extract:SI (match_operand:SI 1 "s_register_operand" "") | |
218 | (match_operand:SI 2 "const_int_operand" "") | |
219 | (match_operand:SI 3 "const_int_operand" "")))] | |
220 | "TARGET_THUMB1" | |
221 | [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2))) | |
222 | (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))] | |
223 | "{ | |
224 | HOST_WIDE_INT temp = INTVAL (operands[2]); | |
225 | ||
226 | operands[2] = GEN_INT (32 - temp - INTVAL (operands[3])); | |
227 | operands[3] = GEN_INT (32 - temp); | |
228 | }" | |
229 | ) | |
230 | ||
231 | (define_insn "thumb1_bicsi3" | |
232 | [(set (match_operand:SI 0 "register_operand" "=l") | |
233 | (and:SI (not:SI (match_operand:SI 1 "register_operand" "l")) | |
234 | (match_operand:SI 2 "register_operand" "0")))] | |
235 | "TARGET_THUMB1" | |
decfc6e1 | 236 | "bics\\t%0, %1" |
1d02d8b8 TG |
237 | [(set_attr "length" "2") |
238 | (set_attr "conds" "set") | |
239 | (set_attr "type" "logics_reg")] | |
240 | ) | |
241 | ||
33cab746 TP |
242 | ;; Changes to the constraints of this pattern must be propagated to those of |
243 | ;; atomic inclusive ORs in sync.md and to the logic for bind_old_new in | |
244 | ;; arm_split_atomic_op in arm.c. These must be at least as strict as the | |
245 | ;; constraints here and aim to be as permissive. | |
1d02d8b8 TG |
246 | (define_insn "*thumb1_iorsi3_insn" |
247 | [(set (match_operand:SI 0 "register_operand" "=l") | |
248 | (ior:SI (match_operand:SI 1 "register_operand" "%0") | |
249 | (match_operand:SI 2 "register_operand" "l")))] | |
250 | "TARGET_THUMB1" | |
decfc6e1 | 251 | "orrs\\t%0, %2" |
1d02d8b8 TG |
252 | [(set_attr "length" "2") |
253 | (set_attr "conds" "set") | |
254 | (set_attr "type" "logics_reg")]) | |
255 | ||
33cab746 TP |
256 | ;; Changes to the constraints of this pattern must be propagated to those of |
257 | ;; atomic exclusive ORs in sync.md and to the logic for bind_old_new in | |
258 | ;; arm_split_atomic_op in arm.c. These must be at least as strict as the | |
259 | ;; constraints here and aim to be as permissive. | |
1d02d8b8 TG |
260 | (define_insn "*thumb1_xorsi3_insn" |
261 | [(set (match_operand:SI 0 "register_operand" "=l") | |
262 | (xor:SI (match_operand:SI 1 "register_operand" "%0") | |
263 | (match_operand:SI 2 "register_operand" "l")))] | |
264 | "TARGET_THUMB1" | |
decfc6e1 | 265 | "eors\\t%0, %2" |
1d02d8b8 TG |
266 | [(set_attr "length" "2") |
267 | (set_attr "conds" "set") | |
268 | (set_attr "type" "logics_reg")] | |
269 | ) | |
270 | ||
271 | (define_insn "*thumb1_ashlsi3" | |
272 | [(set (match_operand:SI 0 "register_operand" "=l,l") | |
273 | (ashift:SI (match_operand:SI 1 "register_operand" "l,0") | |
274 | (match_operand:SI 2 "nonmemory_operand" "N,l")))] | |
275 | "TARGET_THUMB1" | |
decfc6e1 | 276 | "lsls\\t%0, %1, %2" |
1d02d8b8 TG |
277 | [(set_attr "length" "2") |
278 | (set_attr "type" "shift_imm,shift_reg") | |
279 | (set_attr "conds" "set")]) | |
280 | ||
281 | (define_insn "*thumb1_ashrsi3" | |
282 | [(set (match_operand:SI 0 "register_operand" "=l,l") | |
283 | (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0") | |
284 | (match_operand:SI 2 "nonmemory_operand" "N,l")))] | |
285 | "TARGET_THUMB1" | |
decfc6e1 | 286 | "asrs\\t%0, %1, %2" |
1d02d8b8 TG |
287 | [(set_attr "length" "2") |
288 | (set_attr "type" "shift_imm,shift_reg") | |
289 | (set_attr "conds" "set")]) | |
290 | ||
291 | (define_insn "*thumb1_lshrsi3" | |
292 | [(set (match_operand:SI 0 "register_operand" "=l,l") | |
293 | (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0") | |
294 | (match_operand:SI 2 "nonmemory_operand" "N,l")))] | |
295 | "TARGET_THUMB1" | |
decfc6e1 | 296 | "lsrs\\t%0, %1, %2" |
1d02d8b8 TG |
297 | [(set_attr "length" "2") |
298 | (set_attr "type" "shift_imm,shift_reg") | |
299 | (set_attr "conds" "set")]) | |
300 | ||
301 | (define_insn "*thumb1_rotrsi3" | |
302 | [(set (match_operand:SI 0 "register_operand" "=l") | |
303 | (rotatert:SI (match_operand:SI 1 "register_operand" "0") | |
304 | (match_operand:SI 2 "register_operand" "l")))] | |
305 | "TARGET_THUMB1" | |
decfc6e1 | 306 | "rors\\t%0, %0, %2" |
1d02d8b8 TG |
307 | [(set_attr "type" "shift_reg") |
308 | (set_attr "length" "2")] | |
309 | ) | |
310 | ||
311 | (define_insn "*thumb1_negdi2" | |
312 | [(set (match_operand:DI 0 "register_operand" "=&l") | |
313 | (neg:DI (match_operand:DI 1 "register_operand" "l"))) | |
314 | (clobber (reg:CC CC_REGNUM))] | |
315 | "TARGET_THUMB1" | |
decfc6e1 | 316 | "movs\\t%R0, #0\;rsbs\\t%Q0, %Q1, #0\;sbcs\\t%R0, %R1" |
1d02d8b8 TG |
317 | [(set_attr "length" "6") |
318 | (set_attr "type" "multiple")] | |
319 | ) | |
320 | ||
321 | (define_insn "*thumb1_negsi2" | |
322 | [(set (match_operand:SI 0 "register_operand" "=l") | |
323 | (neg:SI (match_operand:SI 1 "register_operand" "l")))] | |
324 | "TARGET_THUMB1" | |
decfc6e1 | 325 | "rsbs\\t%0, %1, #0" |
1d02d8b8 TG |
326 | [(set_attr "length" "2") |
327 | (set_attr "type" "alu_imm")] | |
328 | ) | |
329 | ||
330 | (define_insn_and_split "*thumb1_abssi2" | |
331 | [(set (match_operand:SI 0 "s_register_operand" "=l") | |
332 | (abs:SI (match_operand:SI 1 "s_register_operand" "l"))) | |
333 | (clobber (match_scratch:SI 2 "=&l"))] | |
334 | "TARGET_THUMB1" | |
335 | "#" | |
336 | "TARGET_THUMB1 && reload_completed" | |
337 | [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31))) | |
338 | (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2))) | |
339 | (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))] | |
340 | "" | |
341 | [(set_attr "length" "6") | |
342 | (set_attr "type" "multiple")] | |
343 | ) | |
344 | ||
345 | (define_insn_and_split "*thumb1_neg_abssi2" | |
346 | [(set (match_operand:SI 0 "s_register_operand" "=l") | |
347 | (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l")))) | |
348 | (clobber (match_scratch:SI 2 "=&l"))] | |
349 | "TARGET_THUMB1" | |
350 | "#" | |
351 | "TARGET_THUMB1 && reload_completed" | |
352 | [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31))) | |
353 | (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1))) | |
354 | (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))] | |
355 | "" | |
356 | [(set_attr "length" "6") | |
357 | (set_attr "type" "multiple")] | |
358 | ) | |
359 | ||
360 | (define_insn "*thumb1_one_cmplsi2" | |
361 | [(set (match_operand:SI 0 "register_operand" "=l") | |
362 | (not:SI (match_operand:SI 1 "register_operand" "l")))] | |
363 | "TARGET_THUMB1" | |
decfc6e1 | 364 | "mvns\\t%0, %1" |
1d02d8b8 TG |
365 | [(set_attr "length" "2") |
366 | (set_attr "type" "mvn_reg")] | |
367 | ) | |
368 | ||
369 | (define_insn "*thumb1_zero_extendhisi2" | |
370 | [(set (match_operand:SI 0 "register_operand" "=l,l") | |
371 | (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))] | |
372 | "TARGET_THUMB1" | |
373 | { | |
374 | rtx mem; | |
375 | ||
376 | if (which_alternative == 0 && arm_arch6) | |
377 | return "uxth\t%0, %1"; | |
378 | if (which_alternative == 0) | |
379 | return "#"; | |
380 | ||
381 | mem = XEXP (operands[1], 0); | |
382 | ||
383 | if (GET_CODE (mem) == CONST) | |
384 | mem = XEXP (mem, 0); | |
385 | ||
386 | if (GET_CODE (mem) == PLUS) | |
387 | { | |
388 | rtx a = XEXP (mem, 0); | |
389 | ||
390 | /* This can happen due to bugs in reload. */ | |
391 | if (REG_P (a) && REGNO (a) == SP_REGNUM) | |
392 | { | |
393 | rtx ops[2]; | |
394 | ops[0] = operands[0]; | |
395 | ops[1] = a; | |
396 | ||
397 | output_asm_insn ("mov\t%0, %1", ops); | |
398 | ||
399 | XEXP (mem, 0) = operands[0]; | |
400 | } | |
401 | } | |
402 | ||
403 | return "ldrh\t%0, %1"; | |
404 | } | |
405 | [(set_attr_alternative "length" | |
406 | [(if_then_else (eq_attr "is_arch6" "yes") | |
407 | (const_int 2) (const_int 4)) | |
408 | (const_int 4)]) | |
409 | (set_attr "type" "extend,load_byte")] | |
410 | ) | |
411 | ||
412 | (define_insn "*thumb1_zero_extendqisi2" | |
413 | [(set (match_operand:SI 0 "register_operand" "=l,l") | |
414 | (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))] | |
415 | "TARGET_THUMB1 && !arm_arch6" | |
416 | "@ | |
417 | # | |
418 | ldrb\\t%0, %1" | |
419 | [(set_attr "length" "4,2") | |
420 | (set_attr "type" "alu_shift_reg,load_byte") | |
421 | (set_attr "pool_range" "*,32")] | |
422 | ) | |
423 | ||
424 | (define_insn "*thumb1_zero_extendqisi2_v6" | |
425 | [(set (match_operand:SI 0 "register_operand" "=l,l") | |
426 | (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))] | |
427 | "TARGET_THUMB1 && arm_arch6" | |
428 | "@ | |
429 | uxtb\\t%0, %1 | |
430 | ldrb\\t%0, %1" | |
431 | [(set_attr "length" "2") | |
432 | (set_attr "type" "extend,load_byte")] | |
433 | ) | |
434 | ||
435 | ;; We used to have an early-clobber on the scratch register here. | |
436 | ;; However, there's a bug somewhere in reload which means that this | |
437 | ;; can be partially ignored during spill allocation if the memory | |
438 | ;; address also needs reloading; this causes us to die later on when | |
439 | ;; we try to verify the operands. Fortunately, we don't really need | |
440 | ;; the early-clobber: we can always use operand 0 if operand 2 | |
441 | ;; overlaps the address. | |
442 | (define_insn "thumb1_extendhisi2" | |
443 | [(set (match_operand:SI 0 "register_operand" "=l,l") | |
444 | (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m"))) | |
445 | (clobber (match_scratch:SI 2 "=X,l"))] | |
446 | "TARGET_THUMB1" | |
447 | "* | |
448 | { | |
449 | rtx ops[4]; | |
450 | rtx mem; | |
451 | ||
452 | if (which_alternative == 0 && !arm_arch6) | |
453 | return \"#\"; | |
454 | if (which_alternative == 0) | |
455 | return \"sxth\\t%0, %1\"; | |
456 | ||
457 | mem = XEXP (operands[1], 0); | |
458 | ||
459 | /* This code used to try to use 'V', and fix the address only if it was | |
460 | offsettable, but this fails for e.g. REG+48 because 48 is outside the | |
461 | range of QImode offsets, and offsettable_address_p does a QImode | |
462 | address check. */ | |
463 | ||
464 | if (GET_CODE (mem) == CONST) | |
465 | mem = XEXP (mem, 0); | |
466 | ||
467 | if (GET_CODE (mem) == LABEL_REF) | |
468 | return \"ldr\\t%0, %1\"; | |
469 | ||
470 | if (GET_CODE (mem) == PLUS) | |
471 | { | |
472 | rtx a = XEXP (mem, 0); | |
473 | rtx b = XEXP (mem, 1); | |
474 | ||
475 | if (GET_CODE (a) == LABEL_REF | |
476 | && CONST_INT_P (b)) | |
477 | return \"ldr\\t%0, %1\"; | |
478 | ||
479 | if (REG_P (b)) | |
480 | return \"ldrsh\\t%0, %1\"; | |
481 | ||
482 | ops[1] = a; | |
483 | ops[2] = b; | |
484 | } | |
485 | else | |
486 | { | |
487 | ops[1] = mem; | |
488 | ops[2] = const0_rtx; | |
489 | } | |
490 | ||
491 | gcc_assert (REG_P (ops[1])); | |
492 | ||
493 | ops[0] = operands[0]; | |
494 | if (reg_mentioned_p (operands[2], ops[1])) | |
495 | ops[3] = ops[0]; | |
496 | else | |
497 | ops[3] = operands[2]; | |
decfc6e1 | 498 | output_asm_insn (\"movs\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops); |
1d02d8b8 TG |
499 | return \"\"; |
500 | }" | |
501 | [(set_attr_alternative "length" | |
502 | [(if_then_else (eq_attr "is_arch6" "yes") | |
503 | (const_int 2) (const_int 4)) | |
504 | (const_int 4)]) | |
505 | (set_attr "type" "extend,load_byte") | |
506 | (set_attr "pool_range" "*,1018")] | |
507 | ) | |
508 | ||
509 | (define_split | |
510 | [(set (match_operand:SI 0 "register_operand" "") | |
511 | (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))] | |
512 | "TARGET_THUMB1 && reload_completed" | |
513 | [(set (match_dup 0) (match_dup 2)) | |
514 | (set (match_dup 0) (sign_extend:SI (match_dup 3)))] | |
515 | { | |
516 | rtx addr = XEXP (operands[1], 0); | |
517 | ||
518 | if (GET_CODE (addr) == CONST) | |
519 | addr = XEXP (addr, 0); | |
520 | ||
521 | if (GET_CODE (addr) == PLUS | |
522 | && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1))) | |
523 | /* No split necessary. */ | |
524 | FAIL; | |
525 | ||
526 | if (GET_CODE (addr) == PLUS | |
527 | && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1))) | |
528 | FAIL; | |
529 | ||
530 | if (reg_overlap_mentioned_p (operands[0], addr)) | |
531 | { | |
532 | rtx t = gen_lowpart (QImode, operands[0]); | |
533 | emit_move_insn (t, operands[1]); | |
534 | emit_insn (gen_thumb1_extendqisi2 (operands[0], t)); | |
535 | DONE; | |
536 | } | |
537 | ||
538 | if (REG_P (addr)) | |
539 | { | |
540 | addr = gen_rtx_PLUS (Pmode, addr, operands[0]); | |
541 | operands[2] = const0_rtx; | |
542 | } | |
543 | else if (GET_CODE (addr) != PLUS) | |
544 | FAIL; | |
545 | else if (REG_P (XEXP (addr, 0))) | |
546 | { | |
547 | operands[2] = XEXP (addr, 1); | |
548 | addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]); | |
549 | } | |
550 | else | |
551 | { | |
552 | operands[2] = XEXP (addr, 0); | |
553 | addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]); | |
554 | } | |
555 | ||
556 | operands[3] = change_address (operands[1], QImode, addr); | |
557 | }) | |
558 | ||
559 | (define_peephole2 | |
560 | [(set (match_operand:SI 0 "register_operand" "") | |
561 | (plus:SI (match_dup 0) (match_operand 1 "const_int_operand"))) | |
562 | (set (match_operand:SI 2 "register_operand" "") (const_int 0)) | |
563 | (set (match_operand:SI 3 "register_operand" "") | |
564 | (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))] | |
565 | "TARGET_THUMB1 | |
566 | && GET_CODE (XEXP (operands[4], 0)) == PLUS | |
567 | && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0)) | |
568 | && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1)) | |
569 | && (peep2_reg_dead_p (3, operands[0]) | |
570 | || rtx_equal_p (operands[0], operands[3])) | |
571 | && (peep2_reg_dead_p (3, operands[2]) | |
572 | || rtx_equal_p (operands[2], operands[3]))" | |
573 | [(set (match_dup 2) (match_dup 1)) | |
574 | (set (match_dup 3) (sign_extend:SI (match_dup 4)))] | |
575 | { | |
576 | rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]); | |
577 | operands[4] = change_address (operands[4], QImode, addr); | |
578 | }) | |
579 | ||
580 | (define_insn "thumb1_extendqisi2" | |
581 | [(set (match_operand:SI 0 "register_operand" "=l,l,l") | |
582 | (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))] | |
583 | "TARGET_THUMB1" | |
584 | { | |
585 | rtx addr; | |
586 | ||
587 | if (which_alternative == 0 && arm_arch6) | |
588 | return "sxtb\\t%0, %1"; | |
589 | if (which_alternative == 0) | |
590 | return "#"; | |
591 | ||
592 | addr = XEXP (operands[1], 0); | |
593 | if (GET_CODE (addr) == PLUS | |
594 | && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1))) | |
595 | return "ldrsb\\t%0, %1"; | |
596 | ||
597 | return "#"; | |
598 | } | |
599 | [(set_attr_alternative "length" | |
600 | [(if_then_else (eq_attr "is_arch6" "yes") | |
601 | (const_int 2) (const_int 4)) | |
602 | (const_int 2) | |
603 | (if_then_else (eq_attr "is_arch6" "yes") | |
604 | (const_int 4) (const_int 6))]) | |
605 | (set_attr "type" "extend,load_byte,load_byte")] | |
606 | ) | |
607 | ||
608 | ;;; ??? This should have alternatives for constants. | |
609 | ;;; ??? This was originally identical to the movdf_insn pattern. | |
610 | ;;; ??? The 'i' constraint looks funny, but it should always be replaced by | |
611 | ;;; thumb_reorg with a memory reference. | |
612 | (define_insn "*thumb1_movdi_insn" | |
2b9509a3 TP |
613 | [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,r,l,>,l, m,*r") |
614 | (match_operand:DI 1 "general_operand" "l, I,J,j,>,l,mi,l,*r"))] | |
1d02d8b8 TG |
615 | "TARGET_THUMB1 |
616 | && ( register_operand (operands[0], DImode) | |
617 | || register_operand (operands[1], DImode))" | |
618 | "* | |
619 | { | |
620 | switch (which_alternative) | |
621 | { | |
622 | default: | |
623 | case 0: | |
624 | if (REGNO (operands[1]) == REGNO (operands[0]) + 1) | |
625 | return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\"; | |
626 | return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\"; | |
627 | case 1: | |
decfc6e1 | 628 | return \"movs\\t%Q0, %1\;movs\\t%R0, #0\"; |
1d02d8b8 TG |
629 | case 2: |
630 | operands[1] = GEN_INT (- INTVAL (operands[1])); | |
decfc6e1 | 631 | return \"movs\\t%Q0, %1\;rsbs\\t%Q0, %Q0, #0\;asrs\\t%R0, %Q0, #31\"; |
1d02d8b8 | 632 | case 3: |
2b9509a3 TP |
633 | gcc_assert (TARGET_HAVE_MOVT); |
634 | return \"movw\\t%Q0, %L1\;movs\\tR0, #0\"; | |
1d02d8b8 | 635 | case 4: |
2b9509a3 | 636 | return \"ldmia\\t%1, {%0, %H0}\"; |
1d02d8b8 | 637 | case 5: |
2b9509a3 | 638 | return \"stmia\\t%0, {%1, %H1}\"; |
1d02d8b8 | 639 | case 6: |
2b9509a3 TP |
640 | return thumb_load_double_from_address (operands); |
641 | case 7: | |
1d02d8b8 TG |
642 | operands[2] = gen_rtx_MEM (SImode, |
643 | plus_constant (Pmode, XEXP (operands[0], 0), 4)); | |
644 | output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands); | |
645 | return \"\"; | |
2b9509a3 | 646 | case 8: |
1d02d8b8 TG |
647 | if (REGNO (operands[1]) == REGNO (operands[0]) + 1) |
648 | return \"mov\\t%0, %1\;mov\\t%H0, %H1\"; | |
649 | return \"mov\\t%H0, %H1\;mov\\t%0, %1\"; | |
650 | } | |
651 | }" | |
2b9509a3 | 652 | [(set_attr "length" "4,4,6,6,2,2,6,4,4") |
89b2133e | 653 | (set_attr "type" "multiple,multiple,multiple,multiple,load_8,store_8,load_8,store_8,multiple") |
2b9509a3 TP |
654 | (set_attr "arch" "t1,t1,t1,v8mb,t1,t1,t1,t1,t1") |
655 | (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")] | |
1d02d8b8 TG |
656 | ) |
657 | ||
658 | (define_insn "*thumb1_movsi_insn" | |
a71f2193 CL |
659 | [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,r,l,l,l,>,l, l, m,*l*h*k") |
660 | (match_operand:SI 1 "general_operand" "l, I,j,J,K,>,l,i, mi,l,*l*h*k"))] | |
1d02d8b8 TG |
661 | "TARGET_THUMB1 |
662 | && ( register_operand (operands[0], SImode) | |
663 | || register_operand (operands[1], SImode))" | |
259d0720 CL |
664 | { |
665 | switch (which_alternative) | |
666 | { | |
667 | default: | |
668 | case 0: return "movs\t%0, %1"; | |
669 | case 1: return "movs\t%0, %1"; | |
670 | case 2: return "movw\t%0, %1"; | |
671 | case 3: return "#"; | |
672 | case 4: return "#"; | |
673 | case 5: return "ldmia\t%1, {%0}"; | |
674 | case 6: return "stmia\t%0, {%1}"; | |
675 | case 7: | |
676 | /* pure-code alternative: build the constant byte by byte, | |
677 | instead of loading it from a constant pool. */ | |
c3c3e2c9 | 678 | if (arm_valid_symbolic_address_p (operands[1])) |
4d9af90d CL |
679 | { |
680 | output_asm_insn (\"movs\\t%0, #:upper8_15:%1\", operands); | |
681 | output_asm_insn (\"lsls\\t%0, #8\", operands); | |
682 | output_asm_insn (\"adds\\t%0, #:upper0_7:%1\", operands); | |
683 | output_asm_insn (\"lsls\\t%0, #8\", operands); | |
684 | output_asm_insn (\"adds\\t%0, #:lower8_15:%1\", operands); | |
685 | output_asm_insn (\"lsls\\t%0, #8\", operands); | |
686 | output_asm_insn (\"adds\\t%0, #:lower0_7:%1\", operands); | |
687 | return \"\"; | |
688 | } | |
c3c3e2c9 | 689 | else if (GET_CODE (operands[1]) == CONST_INT) |
4d9af90d | 690 | { |
c7f49e05 CL |
691 | thumb1_gen_const_int_print (operands[0], INTVAL (operands[1])); |
692 | return \"\"; | |
4d9af90d | 693 | } |
c7f49e05 CL |
694 | |
695 | gcc_unreachable (); | |
4d9af90d | 696 | |
259d0720 CL |
697 | case 8: return "ldr\t%0, %1"; |
698 | case 9: return "str\t%1, %0"; | |
699 | case 10: return "mov\t%0, %1"; | |
700 | } | |
701 | } | |
a71f2193 CL |
702 | [(set_attr "length" "2,2,4,4,4,2,2,14,2,2,2") |
703 | (set_attr "type" "mov_reg,mov_imm,mov_imm,multiple,multiple,load_4,store_4,alu_sreg,load_4,store_4,mov_reg") | |
704 | (set_attr "pool_range" "*,*,*,*,*,*,*, *,1018,*,*") | |
705 | (set_attr "arch" "t1,t1,v8mb,t1,t1,t1,t1,t1,t1,t1,t1") | |
706 | (set_attr "required_for_purecode" "no,no,no,no,no,no,no,yes,no,no,no") | |
4d9af90d | 707 | (set_attr "conds" "set,clob,nocond,*,*,nocond,nocond,clob,nocond,nocond,nocond")]) |
1d02d8b8 | 708 | |
dbb73879 TG |
709 | ; Split the load of 64-bit constant into two loads for high and low 32-bit parts respectively |
710 | ; to see if we can load them in fewer instructions or fewer cycles. | |
711 | ; For the small 64-bit integer constants that satisfy constraint J, the instruction pattern | |
712 | ; thumb1_movdi_insn has a better way to handle them. | |
713 | (define_split | |
714 | [(set (match_operand:ANY64 0 "arm_general_register_operand" "") | |
a9442c7a | 715 | (match_operand:ANY64 1 "immediate_operand" ""))] |
dbb73879 TG |
716 | "TARGET_THUMB1 && reload_completed && !satisfies_constraint_J (operands[1])" |
717 | [(set (match_dup 0) (match_dup 1)) | |
718 | (set (match_dup 2) (match_dup 3))] | |
719 | " | |
720 | operands[2] = gen_highpart (SImode, operands[0]); | |
721 | operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]), | |
722 | operands[1]); | |
723 | operands[0] = gen_lowpart (SImode, operands[0]); | |
724 | operands[1] = gen_lowpart (SImode, operands[1]); | |
725 | " | |
726 | ) | |
727 | ||
1d02d8b8 TG |
728 | (define_split |
729 | [(set (match_operand:SI 0 "register_operand" "") | |
730 | (match_operand:SI 1 "const_int_operand" ""))] | |
731 | "TARGET_THUMB1 && satisfies_constraint_J (operands[1])" | |
732 | [(set (match_dup 2) (match_dup 1)) | |
733 | (set (match_dup 0) (neg:SI (match_dup 2)))] | |
734 | " | |
735 | { | |
736 | operands[1] = GEN_INT (- INTVAL (operands[1])); | |
737 | operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0]; | |
738 | }" | |
739 | ) | |
740 | ||
741 | (define_split | |
742 | [(set (match_operand:SI 0 "register_operand" "") | |
743 | (match_operand:SI 1 "const_int_operand" ""))] | |
2b9509a3 TP |
744 | "TARGET_THUMB1 && satisfies_constraint_K (operands[1]) |
745 | && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))" | |
1d02d8b8 TG |
746 | [(set (match_dup 2) (match_dup 1)) |
747 | (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))] | |
748 | " | |
749 | { | |
750 | unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu; | |
751 | unsigned HOST_WIDE_INT mask = 0xff; | |
752 | int i; | |
753 | ||
754 | for (i = 0; i < 25; i++) | |
755 | if ((val & (mask << i)) == val) | |
756 | break; | |
757 | ||
758 | /* Don't split if the shift is zero. */ | |
759 | if (i == 0) | |
760 | FAIL; | |
761 | ||
762 | operands[1] = GEN_INT (val >> i); | |
763 | operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0]; | |
764 | operands[3] = GEN_INT (i); | |
765 | }" | |
766 | ) | |
767 | ||
768 | ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255 | |
769 | (define_split | |
770 | [(set (match_operand:SI 0 "register_operand" "") | |
771 | (match_operand:SI 1 "const_int_operand" ""))] | |
2b9509a3 TP |
772 | "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1]) |
773 | && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))" | |
1d02d8b8 TG |
774 | [(set (match_dup 2) (match_dup 1)) |
775 | (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))] | |
776 | " | |
777 | { | |
778 | operands[1] = GEN_INT (INTVAL (operands[1]) - 255); | |
779 | operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0]; | |
780 | operands[3] = GEN_INT (255); | |
781 | }" | |
782 | ) | |
783 | ||
2033a63c CL |
784 | (define_split |
785 | [(set (match_operand:SI 0 "register_operand" "") | |
786 | (match_operand:SI 1 "const_int_operand" ""))] | |
787 | "TARGET_THUMB1 | |
788 | && arm_disable_literal_pool | |
789 | && GET_CODE (operands[1]) == CONST_INT | |
790 | && !TARGET_HAVE_MOVT | |
791 | && !satisfies_constraint_K (operands[1])" | |
792 | [(clobber (const_int 0))] | |
793 | " | |
011f5e92 | 794 | thumb1_gen_const_int_rtl (operands[0], INTVAL (operands[1])); |
2033a63c CL |
795 | DONE; |
796 | " | |
797 | ) | |
798 | ||
1d02d8b8 | 799 | (define_insn "*thumb1_movhi_insn" |
2b9509a3 TP |
800 | [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l,r") |
801 | (match_operand:HI 1 "general_operand" "l,m,l,k*h,*r,I,n"))] | |
1d02d8b8 TG |
802 | "TARGET_THUMB1 |
803 | && ( register_operand (operands[0], HImode) | |
804 | || register_operand (operands[1], HImode))" | |
805 | "* | |
806 | switch (which_alternative) | |
807 | { | |
decfc6e1 | 808 | case 0: return \"adds %0, %1, #0\"; |
1d02d8b8 TG |
809 | case 2: return \"strh %1, %0\"; |
810 | case 3: return \"mov %0, %1\"; | |
811 | case 4: return \"mov %0, %1\"; | |
decfc6e1 | 812 | case 5: return \"movs %0, %1\"; |
2b9509a3 TP |
813 | case 6: gcc_assert (TARGET_HAVE_MOVT); |
814 | return \"movw %0, %L1\"; | |
1d02d8b8 TG |
815 | default: gcc_unreachable (); |
816 | case 1: | |
817 | /* The stack pointer can end up being taken as an index register. | |
818 | Catch this case here and deal with it. */ | |
819 | if (GET_CODE (XEXP (operands[1], 0)) == PLUS | |
820 | && REG_P (XEXP (XEXP (operands[1], 0), 0)) | |
821 | && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM) | |
822 | { | |
823 | rtx ops[2]; | |
824 | ops[0] = operands[0]; | |
825 | ops[1] = XEXP (XEXP (operands[1], 0), 0); | |
826 | ||
827 | output_asm_insn (\"mov %0, %1\", ops); | |
828 | ||
829 | XEXP (XEXP (operands[1], 0), 0) = operands[0]; | |
830 | ||
831 | } | |
832 | return \"ldrh %0, %1\"; | |
833 | }" | |
2b9509a3 | 834 | [(set_attr "length" "2,4,2,2,2,2,4") |
89b2133e | 835 | (set_attr "type" "alus_imm,load_4,store_4,mov_reg,mov_reg,mov_imm,mov_imm") |
2b9509a3 TP |
836 | (set_attr "arch" "t1,t1,t1,t1,t1,t1,v8mb") |
837 | (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob,nocond")]) | |
1d02d8b8 TG |
838 | |
839 | (define_expand "thumb_movhi_clobber" | |
cd65e265 DZ |
840 | [(set (match_operand:HI 0 "memory_operand") |
841 | (match_operand:HI 1 "register_operand")) | |
842 | (clobber (match_operand:DI 2 "register_operand"))] | |
1d02d8b8 TG |
843 | "TARGET_THUMB1" |
844 | " | |
845 | if (strict_memory_address_p (HImode, XEXP (operands[0], 0)) | |
846 | && REGNO (operands[1]) <= LAST_LO_REGNUM) | |
847 | { | |
848 | emit_insn (gen_movhi (operands[0], operands[1])); | |
849 | DONE; | |
850 | } | |
851 | /* XXX Fixme, need to handle other cases here as well. */ | |
852 | gcc_unreachable (); | |
853 | " | |
854 | ) | |
855 | ||
856 | (define_insn "*thumb1_movqi_insn" | |
4da2eb98 MZ |
857 | [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l") |
858 | (match_operand:QI 1 "general_operand" "l,m,l,k*h,*r,I"))] | |
1d02d8b8 TG |
859 | "TARGET_THUMB1 |
860 | && ( register_operand (operands[0], QImode) | |
861 | || register_operand (operands[1], QImode))" | |
862 | "@ | |
decfc6e1 | 863 | adds\\t%0, %1, #0 |
1d02d8b8 TG |
864 | ldrb\\t%0, %1 |
865 | strb\\t%1, %0 | |
866 | mov\\t%0, %1 | |
867 | mov\\t%0, %1 | |
decfc6e1 | 868 | movs\\t%0, %1" |
1d02d8b8 | 869 | [(set_attr "length" "2") |
89b2133e | 870 | (set_attr "type" "alu_imm,load_4,store_4,mov_reg,mov_imm,mov_imm") |
1d02d8b8 TG |
871 | (set_attr "pool_range" "*,32,*,*,*,*") |
872 | (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")]) | |
873 | ||
874 | (define_insn "*thumb1_movhf" | |
e24f6408 CL |
875 | [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,l,m,*r,*h") |
876 | (match_operand:HF 1 "general_operand" "l, m,F,l,*h,*r"))] | |
1d02d8b8 TG |
877 | "TARGET_THUMB1 |
878 | && ( s_register_operand (operands[0], HFmode) | |
879 | || s_register_operand (operands[1], HFmode))" | |
880 | "* | |
881 | switch (which_alternative) | |
882 | { | |
04dc44ac TG |
883 | case 0: |
884 | return \"movs\\t%0, %1\"; | |
1d02d8b8 TG |
885 | case 1: |
886 | { | |
887 | rtx addr; | |
888 | gcc_assert (MEM_P (operands[1])); | |
889 | addr = XEXP (operands[1], 0); | |
890 | if (GET_CODE (addr) == LABEL_REF | |
891 | || (GET_CODE (addr) == CONST | |
892 | && GET_CODE (XEXP (addr, 0)) == PLUS | |
893 | && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF | |
894 | && CONST_INT_P (XEXP (XEXP (addr, 0), 1)))) | |
895 | { | |
896 | /* Constant pool entry. */ | |
897 | return \"ldr\\t%0, %1\"; | |
898 | } | |
899 | return \"ldrh\\t%0, %1\"; | |
900 | } | |
e24f6408 CL |
901 | case 2: |
902 | { | |
903 | int bits; | |
904 | int high; | |
905 | rtx ops[3]; | |
906 | ||
907 | bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]), | |
908 | HFmode); | |
909 | ops[0] = operands[0]; | |
910 | high = (bits >> 8) & 0xff; | |
911 | ops[1] = GEN_INT (high); | |
912 | ops[2] = GEN_INT (bits & 0xff); | |
913 | if (high != 0) | |
914 | output_asm_insn (\"movs\\t%0, %1\;lsls\\t%0, #8\;adds\\t%0, %2\", ops); | |
915 | else | |
916 | output_asm_insn (\"movs\\t%0, %2\", ops); | |
917 | ||
918 | return \"\"; | |
919 | } | |
920 | case 3: return \"strh\\t%1, %0\"; | |
1d02d8b8 TG |
921 | default: return \"mov\\t%0, %1\"; |
922 | } | |
923 | " | |
e24f6408 CL |
924 | [(set_attr "length" "2,2,6,2,2,2") |
925 | (set_attr "type" "mov_reg,load_4,mov_reg,store_4,mov_reg,mov_reg") | |
926 | (set_attr "pool_range" "*,1018,*,*,*,*") | |
927 | (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond")]) | |
928 | ||
1d02d8b8 TG |
929 | ;;; ??? This should have alternatives for constants. |
930 | (define_insn "*thumb1_movsf_insn" | |
931 | [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h") | |
932 | (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))] | |
933 | "TARGET_THUMB1 | |
934 | && ( register_operand (operands[0], SFmode) | |
935 | || register_operand (operands[1], SFmode))" | |
936 | "@ | |
decfc6e1 | 937 | adds\\t%0, %1, #0 |
1d02d8b8 TG |
938 | ldmia\\t%1, {%0} |
939 | stmia\\t%0, {%1} | |
940 | ldr\\t%0, %1 | |
941 | str\\t%1, %0 | |
942 | mov\\t%0, %1 | |
943 | mov\\t%0, %1" | |
944 | [(set_attr "length" "2") | |
89b2133e | 945 | (set_attr "type" "alus_imm,load_4,store_4,load_4,store_4,mov_reg,mov_reg") |
1d02d8b8 TG |
946 | (set_attr "pool_range" "*,*,*,1018,*,*,*") |
947 | (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")] | |
948 | ) | |
949 | ||
950 | ;;; ??? This should have alternatives for constants. | |
951 | ;;; ??? This was originally identical to the movdi_insn pattern. | |
952 | ;;; ??? The 'F' constraint looks funny, but it should always be replaced by | |
953 | ;;; thumb_reorg with a memory reference. | |
954 | (define_insn "*thumb_movdf_insn" | |
955 | [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r") | |
956 | (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))] | |
957 | "TARGET_THUMB1 | |
958 | && ( register_operand (operands[0], DFmode) | |
959 | || register_operand (operands[1], DFmode))" | |
960 | "* | |
961 | switch (which_alternative) | |
962 | { | |
963 | default: | |
964 | case 0: | |
965 | if (REGNO (operands[1]) == REGNO (operands[0]) + 1) | |
decfc6e1 TG |
966 | return \"adds\\t%0, %1, #0\;adds\\t%H0, %H1, #0\"; |
967 | return \"adds\\t%H0, %H1, #0\;adds\\t%0, %1, #0\"; | |
1d02d8b8 TG |
968 | case 1: |
969 | return \"ldmia\\t%1, {%0, %H0}\"; | |
970 | case 2: | |
971 | return \"stmia\\t%0, {%1, %H1}\"; | |
972 | case 3: | |
973 | return thumb_load_double_from_address (operands); | |
974 | case 4: | |
975 | operands[2] = gen_rtx_MEM (SImode, | |
976 | plus_constant (Pmode, | |
977 | XEXP (operands[0], 0), 4)); | |
978 | output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands); | |
979 | return \"\"; | |
980 | case 5: | |
981 | if (REGNO (operands[1]) == REGNO (operands[0]) + 1) | |
982 | return \"mov\\t%0, %1\;mov\\t%H0, %H1\"; | |
983 | return \"mov\\t%H0, %H1\;mov\\t%0, %1\"; | |
984 | } | |
985 | " | |
986 | [(set_attr "length" "4,2,2,6,4,4") | |
89b2133e | 987 | (set_attr "type" "multiple,load_8,store_8,load_8,store_8,multiple") |
1d02d8b8 TG |
988 | (set_attr "pool_range" "*,*,*,1018,*,*")] |
989 | ) | |
990 | \f | |
991 | ||
992 | ;; Thumb block-move insns | |
993 | ||
76715c32 | 994 | (define_insn "cpymem12b" |
1d02d8b8 TG |
995 | [(set (mem:SI (match_operand:SI 2 "register_operand" "0")) |
996 | (mem:SI (match_operand:SI 3 "register_operand" "1"))) | |
997 | (set (mem:SI (plus:SI (match_dup 2) (const_int 4))) | |
998 | (mem:SI (plus:SI (match_dup 3) (const_int 4)))) | |
999 | (set (mem:SI (plus:SI (match_dup 2) (const_int 8))) | |
1000 | (mem:SI (plus:SI (match_dup 3) (const_int 8)))) | |
1001 | (set (match_operand:SI 0 "register_operand" "=l") | |
1002 | (plus:SI (match_dup 2) (const_int 12))) | |
1003 | (set (match_operand:SI 1 "register_operand" "=l") | |
1004 | (plus:SI (match_dup 3) (const_int 12))) | |
1005 | (clobber (match_scratch:SI 4 "=&l")) | |
1006 | (clobber (match_scratch:SI 5 "=&l")) | |
1007 | (clobber (match_scratch:SI 6 "=&l"))] | |
1008 | "TARGET_THUMB1" | |
1009 | "* return thumb_output_move_mem_multiple (3, operands);" | |
1010 | [(set_attr "length" "4") | |
1011 | ; This isn't entirely accurate... It loads as well, but in terms of | |
1012 | ; scheduling the following insn it is better to consider it as a store | |
89b2133e | 1013 | (set_attr "type" "store_12")] |
1d02d8b8 TG |
1014 | ) |
1015 | ||
76715c32 | 1016 | (define_insn "cpymem8b" |
1d02d8b8 TG |
1017 | [(set (mem:SI (match_operand:SI 2 "register_operand" "0")) |
1018 | (mem:SI (match_operand:SI 3 "register_operand" "1"))) | |
1019 | (set (mem:SI (plus:SI (match_dup 2) (const_int 4))) | |
1020 | (mem:SI (plus:SI (match_dup 3) (const_int 4)))) | |
1021 | (set (match_operand:SI 0 "register_operand" "=l") | |
1022 | (plus:SI (match_dup 2) (const_int 8))) | |
1023 | (set (match_operand:SI 1 "register_operand" "=l") | |
1024 | (plus:SI (match_dup 3) (const_int 8))) | |
1025 | (clobber (match_scratch:SI 4 "=&l")) | |
1026 | (clobber (match_scratch:SI 5 "=&l"))] | |
1027 | "TARGET_THUMB1" | |
1028 | "* return thumb_output_move_mem_multiple (2, operands);" | |
1029 | [(set_attr "length" "4") | |
1030 | ; This isn't entirely accurate... It loads as well, but in terms of | |
1031 | ; scheduling the following insn it is better to consider it as a store | |
89b2133e | 1032 | (set_attr "type" "store_8")] |
1d02d8b8 TG |
1033 | ) |
1034 | ||
1035 | \f | |
1036 | ;; A pattern to recognize a special situation and optimize for it. | |
1037 | ;; On the thumb, zero-extension from memory is preferrable to sign-extension | |
1038 | ;; due to the available addressing modes. Hence, convert a signed comparison | |
1039 | ;; with zero into an unsigned comparison with 127 if possible. | |
1040 | (define_expand "cbranchqi4" | |
1041 | [(set (pc) (if_then_else | |
1042 | (match_operator 0 "lt_ge_comparison_operator" | |
cd65e265 DZ |
1043 | [(match_operand:QI 1 "memory_operand") |
1044 | (match_operand:QI 2 "const0_operand")]) | |
1d02d8b8 TG |
1045 | (label_ref (match_operand 3 "" "")) |
1046 | (pc)))] | |
1047 | "TARGET_THUMB1" | |
1048 | { | |
1049 | rtx xops[4]; | |
1050 | xops[1] = gen_reg_rtx (SImode); | |
1051 | emit_insn (gen_zero_extendqisi2 (xops[1], operands[1])); | |
1052 | xops[2] = GEN_INT (127); | |
1053 | xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU, | |
1054 | VOIDmode, xops[1], xops[2]); | |
1055 | xops[3] = operands[3]; | |
1056 | emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3])); | |
1057 | DONE; | |
1058 | }) | |
1059 | ||
5ce15300 TP |
1060 | ;; A pattern for the CB(N)Z instruction added in ARMv8-M Baseline profile, |
1061 | ;; adapted from cbranchsi4_insn. Modifying cbranchsi4_insn instead leads to | |
1062 | ;; code generation difference for ARMv6-M because the minimum length of the | |
1063 | ;; instruction becomes 2 even for ARMv6-M due to a limitation in genattrtab's | |
1064 | ;; handling of PC in the length condition. | |
1065 | (define_insn "thumb1_cbz" | |
1066 | [(set (pc) (if_then_else | |
1067 | (match_operator 0 "equality_operator" | |
1068 | [(match_operand:SI 1 "s_register_operand" "l") | |
1069 | (const_int 0)]) | |
1070 | (label_ref (match_operand 2 "" "")) | |
1071 | (pc)))] | |
1072 | "TARGET_THUMB1 && TARGET_HAVE_CBZ" | |
1073 | { | |
1074 | if (get_attr_length (insn) == 2) | |
1075 | { | |
1076 | if (GET_CODE (operands[0]) == EQ) | |
1077 | return "cbz\t%1, %l2"; | |
1078 | else | |
1079 | return "cbnz\t%1, %l2"; | |
1080 | } | |
1081 | else | |
1082 | { | |
1083 | rtx t = cfun->machine->thumb1_cc_insn; | |
1084 | if (t != NULL_RTX) | |
1085 | { | |
1086 | if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1]) | |
1087 | || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2])) | |
1088 | t = NULL_RTX; | |
54138d95 | 1089 | if (cfun->machine->thumb1_cc_mode == CC_NZmode) |
5ce15300 | 1090 | { |
54138d95 | 1091 | if (!nz_comparison_operator (operands[0], VOIDmode)) |
5ce15300 TP |
1092 | t = NULL_RTX; |
1093 | } | |
1094 | else if (cfun->machine->thumb1_cc_mode != CCmode) | |
1095 | t = NULL_RTX; | |
1096 | } | |
1097 | if (t == NULL_RTX) | |
1098 | { | |
1099 | output_asm_insn ("cmp\t%1, #0", operands); | |
1100 | cfun->machine->thumb1_cc_insn = insn; | |
1101 | cfun->machine->thumb1_cc_op0 = operands[1]; | |
1102 | cfun->machine->thumb1_cc_op1 = operands[2]; | |
1103 | cfun->machine->thumb1_cc_mode = CCmode; | |
1104 | } | |
1105 | else | |
1106 | /* Ensure we emit the right type of condition code on the jump. */ | |
1107 | XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode, | |
1108 | CC_REGNUM); | |
1109 | ||
1110 | switch (get_attr_length (insn)) | |
1111 | { | |
1112 | case 4: return "b%d0\t%l2"; | |
1113 | case 6: return "b%D0\t.LCB%=;b\t%l2\t%@long jump\n.LCB%=:"; | |
1114 | case 8: return "b%D0\t.LCB%=;bl\t%l2\t%@far jump\n.LCB%=:"; | |
1115 | default: gcc_unreachable (); | |
1116 | } | |
1117 | } | |
1118 | } | |
1119 | [(set (attr "far_jump") | |
1120 | (if_then_else | |
1121 | (eq_attr "length" "8") | |
1122 | (const_string "yes") | |
1123 | (const_string "no"))) | |
1124 | (set (attr "length") | |
1125 | (if_then_else | |
1126 | (and (ge (minus (match_dup 2) (pc)) (const_int 2)) | |
1127 | (le (minus (match_dup 2) (pc)) (const_int 128))) | |
1128 | (const_int 2) | |
1129 | (if_then_else | |
1130 | (and (ge (minus (match_dup 2) (pc)) (const_int -250)) | |
1131 | (le (minus (match_dup 2) (pc)) (const_int 256))) | |
1132 | (const_int 4) | |
1133 | (if_then_else | |
1134 | (and (ge (minus (match_dup 2) (pc)) (const_int -2040)) | |
1135 | (le (minus (match_dup 2) (pc)) (const_int 2048))) | |
1136 | (const_int 6) | |
1137 | (const_int 8))))) | |
1138 | (set (attr "type") | |
1139 | (if_then_else | |
1140 | (eq_attr "length" "2") | |
1141 | (const_string "branch") | |
1142 | (const_string "multiple")))] | |
1143 | ) | |
1144 | ||
3a5a75b5 TP |
1145 | ;; Changes to the constraints of this pattern must be propagated to those of |
1146 | ;; atomic compare_and_swap splitters in sync.md. These must be at least as | |
1147 | ;; strict as the constraints here and aim to be as permissive. | |
1d02d8b8 TG |
1148 | (define_insn "cbranchsi4_insn" |
1149 | [(set (pc) (if_then_else | |
1150 | (match_operator 0 "arm_comparison_operator" | |
1151 | [(match_operand:SI 1 "s_register_operand" "l,l*h") | |
1152 | (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")]) | |
1153 | (label_ref (match_operand 3 "" "")) | |
1154 | (pc)))] | |
1155 | "TARGET_THUMB1" | |
1156 | { | |
1157 | rtx t = cfun->machine->thumb1_cc_insn; | |
1158 | if (t != NULL_RTX) | |
1159 | { | |
1160 | if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1]) | |
1161 | || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2])) | |
1162 | t = NULL_RTX; | |
54138d95 | 1163 | if (cfun->machine->thumb1_cc_mode == CC_NZmode) |
1d02d8b8 | 1164 | { |
54138d95 | 1165 | if (!nz_comparison_operator (operands[0], VOIDmode)) |
1d02d8b8 TG |
1166 | t = NULL_RTX; |
1167 | } | |
1168 | else if (cfun->machine->thumb1_cc_mode != CCmode) | |
1169 | t = NULL_RTX; | |
1170 | } | |
1171 | if (t == NULL_RTX) | |
1172 | { | |
1173 | output_asm_insn ("cmp\t%1, %2", operands); | |
1174 | cfun->machine->thumb1_cc_insn = insn; | |
1175 | cfun->machine->thumb1_cc_op0 = operands[1]; | |
1176 | cfun->machine->thumb1_cc_op1 = operands[2]; | |
1177 | cfun->machine->thumb1_cc_mode = CCmode; | |
1178 | } | |
1179 | else | |
1180 | /* Ensure we emit the right type of condition code on the jump. */ | |
1181 | XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode, | |
1182 | CC_REGNUM); | |
1183 | ||
1184 | switch (get_attr_length (insn)) | |
1185 | { | |
1186 | case 4: return \"b%d0\\t%l3\"; | |
1187 | case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\"; | |
1188 | default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\"; | |
1189 | } | |
1190 | } | |
1191 | [(set (attr "far_jump") | |
1192 | (if_then_else | |
1193 | (eq_attr "length" "8") | |
1194 | (const_string "yes") | |
1195 | (const_string "no"))) | |
1196 | (set (attr "length") | |
1197 | (if_then_else | |
1198 | (and (ge (minus (match_dup 3) (pc)) (const_int -250)) | |
1199 | (le (minus (match_dup 3) (pc)) (const_int 256))) | |
1200 | (const_int 4) | |
1201 | (if_then_else | |
1202 | (and (ge (minus (match_dup 3) (pc)) (const_int -2040)) | |
1203 | (le (minus (match_dup 3) (pc)) (const_int 2048))) | |
1204 | (const_int 6) | |
1205 | (const_int 8)))) | |
1206 | (set_attr "type" "multiple")] | |
1207 | ) | |
1208 | ||
3a5a75b5 TP |
1209 | ;; Changes to the constraints of this pattern must be propagated to those of |
1210 | ;; atomic compare_and_swap splitters in sync.md. These must be at least as | |
1211 | ;; strict as the constraints here and aim to be as permissive. | |
1d02d8b8 TG |
1212 | (define_insn "cbranchsi4_scratch" |
1213 | [(set (pc) (if_then_else | |
1214 | (match_operator 4 "arm_comparison_operator" | |
1215 | [(match_operand:SI 1 "s_register_operand" "l,0") | |
1216 | (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")]) | |
1217 | (label_ref (match_operand 3 "" "")) | |
1218 | (pc))) | |
1219 | (clobber (match_scratch:SI 0 "=l,l"))] | |
1220 | "TARGET_THUMB1" | |
1221 | "* | |
decfc6e1 | 1222 | output_asm_insn (\"adds\\t%0, %1, #%n2\", operands); |
1d02d8b8 TG |
1223 | |
1224 | switch (get_attr_length (insn)) | |
1225 | { | |
1226 | case 4: return \"b%d4\\t%l3\"; | |
1227 | case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\"; | |
1228 | default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\"; | |
1229 | } | |
1230 | " | |
1231 | [(set (attr "far_jump") | |
1232 | (if_then_else | |
1233 | (eq_attr "length" "8") | |
1234 | (const_string "yes") | |
1235 | (const_string "no"))) | |
1236 | (set (attr "length") | |
1237 | (if_then_else | |
1238 | (and (ge (minus (match_dup 3) (pc)) (const_int -250)) | |
1239 | (le (minus (match_dup 3) (pc)) (const_int 256))) | |
1240 | (const_int 4) | |
1241 | (if_then_else | |
1242 | (and (ge (minus (match_dup 3) (pc)) (const_int -2040)) | |
1243 | (le (minus (match_dup 3) (pc)) (const_int 2048))) | |
1244 | (const_int 6) | |
1245 | (const_int 8)))) | |
1246 | (set_attr "type" "multiple")] | |
1247 | ) | |
1248 | ||
1249 | (define_insn "*negated_cbranchsi4" | |
1250 | [(set (pc) | |
1251 | (if_then_else | |
1252 | (match_operator 0 "equality_operator" | |
1253 | [(match_operand:SI 1 "s_register_operand" "l") | |
1254 | (neg:SI (match_operand:SI 2 "s_register_operand" "l"))]) | |
1255 | (label_ref (match_operand 3 "" "")) | |
1256 | (pc)))] | |
1257 | "TARGET_THUMB1" | |
1258 | "* | |
1259 | output_asm_insn (\"cmn\\t%1, %2\", operands); | |
1260 | switch (get_attr_length (insn)) | |
1261 | { | |
1262 | case 4: return \"b%d0\\t%l3\"; | |
1263 | case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\"; | |
1264 | default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\"; | |
1265 | } | |
1266 | " | |
1267 | [(set (attr "far_jump") | |
1268 | (if_then_else | |
1269 | (eq_attr "length" "8") | |
1270 | (const_string "yes") | |
1271 | (const_string "no"))) | |
1272 | (set (attr "length") | |
1273 | (if_then_else | |
1274 | (and (ge (minus (match_dup 3) (pc)) (const_int -250)) | |
1275 | (le (minus (match_dup 3) (pc)) (const_int 256))) | |
1276 | (const_int 4) | |
1277 | (if_then_else | |
1278 | (and (ge (minus (match_dup 3) (pc)) (const_int -2040)) | |
1279 | (le (minus (match_dup 3) (pc)) (const_int 2048))) | |
1280 | (const_int 6) | |
1281 | (const_int 8)))) | |
1282 | (set_attr "type" "multiple")] | |
1283 | ) | |
1284 | ||
1285 | (define_insn "*tbit_cbranch" | |
1286 | [(set (pc) | |
1287 | (if_then_else | |
1288 | (match_operator 0 "equality_operator" | |
1289 | [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l") | |
1290 | (const_int 1) | |
1291 | (match_operand:SI 2 "const_int_operand" "i")) | |
1292 | (const_int 0)]) | |
1293 | (label_ref (match_operand 3 "" "")) | |
1294 | (pc))) | |
1295 | (clobber (match_scratch:SI 4 "=l"))] | |
1296 | "TARGET_THUMB1" | |
1297 | "* | |
1298 | { | |
1299 | rtx op[3]; | |
1300 | op[0] = operands[4]; | |
1301 | op[1] = operands[1]; | |
1302 | op[2] = GEN_INT (32 - 1 - INTVAL (operands[2])); | |
1303 | ||
decfc6e1 | 1304 | output_asm_insn (\"lsls\\t%0, %1, %2\", op); |
1d02d8b8 TG |
1305 | switch (get_attr_length (insn)) |
1306 | { | |
1307 | case 4: return \"b%d0\\t%l3\"; | |
1308 | case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\"; | |
1309 | default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\"; | |
1310 | } | |
1311 | }" | |
1312 | [(set (attr "far_jump") | |
1313 | (if_then_else | |
1314 | (eq_attr "length" "8") | |
1315 | (const_string "yes") | |
1316 | (const_string "no"))) | |
1317 | (set (attr "length") | |
1318 | (if_then_else | |
1319 | (and (ge (minus (match_dup 3) (pc)) (const_int -250)) | |
1320 | (le (minus (match_dup 3) (pc)) (const_int 256))) | |
1321 | (const_int 4) | |
1322 | (if_then_else | |
1323 | (and (ge (minus (match_dup 3) (pc)) (const_int -2040)) | |
1324 | (le (minus (match_dup 3) (pc)) (const_int 2048))) | |
1325 | (const_int 6) | |
1326 | (const_int 8)))) | |
1327 | (set_attr "type" "multiple")] | |
1328 | ) | |
1329 | ||
1330 | (define_insn "*tlobits_cbranch" | |
1331 | [(set (pc) | |
1332 | (if_then_else | |
1333 | (match_operator 0 "equality_operator" | |
1334 | [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l") | |
1335 | (match_operand:SI 2 "const_int_operand" "i") | |
1336 | (const_int 0)) | |
1337 | (const_int 0)]) | |
1338 | (label_ref (match_operand 3 "" "")) | |
1339 | (pc))) | |
1340 | (clobber (match_scratch:SI 4 "=l"))] | |
1341 | "TARGET_THUMB1" | |
1342 | "* | |
1343 | { | |
1344 | rtx op[3]; | |
1345 | op[0] = operands[4]; | |
1346 | op[1] = operands[1]; | |
1347 | op[2] = GEN_INT (32 - INTVAL (operands[2])); | |
1348 | ||
decfc6e1 | 1349 | output_asm_insn (\"lsls\\t%0, %1, %2\", op); |
1d02d8b8 TG |
1350 | switch (get_attr_length (insn)) |
1351 | { | |
1352 | case 4: return \"b%d0\\t%l3\"; | |
1353 | case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\"; | |
1354 | default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\"; | |
1355 | } | |
1356 | }" | |
1357 | [(set (attr "far_jump") | |
1358 | (if_then_else | |
1359 | (eq_attr "length" "8") | |
1360 | (const_string "yes") | |
1361 | (const_string "no"))) | |
1362 | (set (attr "length") | |
1363 | (if_then_else | |
1364 | (and (ge (minus (match_dup 3) (pc)) (const_int -250)) | |
1365 | (le (minus (match_dup 3) (pc)) (const_int 256))) | |
1366 | (const_int 4) | |
1367 | (if_then_else | |
1368 | (and (ge (minus (match_dup 3) (pc)) (const_int -2040)) | |
1369 | (le (minus (match_dup 3) (pc)) (const_int 2048))) | |
1370 | (const_int 6) | |
1371 | (const_int 8)))) | |
1372 | (set_attr "type" "multiple")] | |
1373 | ) | |
1374 | ||
1375 | (define_insn "*tstsi3_cbranch" | |
1376 | [(set (pc) | |
1377 | (if_then_else | |
1378 | (match_operator 3 "equality_operator" | |
1379 | [(and:SI (match_operand:SI 0 "s_register_operand" "%l") | |
1380 | (match_operand:SI 1 "s_register_operand" "l")) | |
1381 | (const_int 0)]) | |
1382 | (label_ref (match_operand 2 "" "")) | |
1383 | (pc)))] | |
1384 | "TARGET_THUMB1" | |
1385 | "* | |
1386 | { | |
1387 | output_asm_insn (\"tst\\t%0, %1\", operands); | |
1388 | switch (get_attr_length (insn)) | |
1389 | { | |
1390 | case 4: return \"b%d3\\t%l2\"; | |
1391 | case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\"; | |
1392 | default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\"; | |
1393 | } | |
1394 | }" | |
1395 | [(set (attr "far_jump") | |
1396 | (if_then_else | |
1397 | (eq_attr "length" "8") | |
1398 | (const_string "yes") | |
1399 | (const_string "no"))) | |
1400 | (set (attr "length") | |
1401 | (if_then_else | |
1402 | (and (ge (minus (match_dup 2) (pc)) (const_int -250)) | |
1403 | (le (minus (match_dup 2) (pc)) (const_int 256))) | |
1404 | (const_int 4) | |
1405 | (if_then_else | |
1406 | (and (ge (minus (match_dup 2) (pc)) (const_int -2040)) | |
1407 | (le (minus (match_dup 2) (pc)) (const_int 2048))) | |
1408 | (const_int 6) | |
1409 | (const_int 8)))) | |
1410 | (set_attr "type" "multiple")] | |
1411 | ) | |
1412 | ||
1413 | (define_insn "*cbranchne_decr1" | |
1414 | [(set (pc) | |
1415 | (if_then_else (match_operator 3 "equality_operator" | |
1416 | [(match_operand:SI 2 "s_register_operand" "l,l,1,l") | |
1417 | (const_int 0)]) | |
1418 | (label_ref (match_operand 4 "" "")) | |
1419 | (pc))) | |
1420 | (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m") | |
1421 | (plus:SI (match_dup 2) (const_int -1))) | |
1422 | (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] | |
1423 | "TARGET_THUMB1" | |
1424 | "* | |
1425 | { | |
1426 | rtx cond[2]; | |
1427 | cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE | |
1428 | ? GEU : LTU), | |
1429 | VOIDmode, operands[2], const1_rtx); | |
1430 | cond[1] = operands[4]; | |
1431 | ||
1432 | if (which_alternative == 0) | |
decfc6e1 | 1433 | output_asm_insn (\"subs\\t%0, %2, #1\", operands); |
1d02d8b8 TG |
1434 | else if (which_alternative == 1) |
1435 | { | |
1436 | /* We must provide an alternative for a hi reg because reload | |
1437 | cannot handle output reloads on a jump instruction, but we | |
1438 | can't subtract into that. Fortunately a mov from lo to hi | |
1439 | does not clobber the condition codes. */ | |
decfc6e1 | 1440 | output_asm_insn (\"subs\\t%1, %2, #1\", operands); |
1d02d8b8 TG |
1441 | output_asm_insn (\"mov\\t%0, %1\", operands); |
1442 | } | |
1443 | else | |
1444 | { | |
1445 | /* Similarly, but the target is memory. */ | |
decfc6e1 | 1446 | output_asm_insn (\"subs\\t%1, %2, #1\", operands); |
1d02d8b8 TG |
1447 | output_asm_insn (\"str\\t%1, %0\", operands); |
1448 | } | |
1449 | ||
1450 | switch (get_attr_length (insn) - (which_alternative ? 2 : 0)) | |
1451 | { | |
1452 | case 4: | |
1453 | output_asm_insn (\"b%d0\\t%l1\", cond); | |
1454 | return \"\"; | |
1455 | case 6: | |
1456 | output_asm_insn (\"b%D0\\t.LCB%=\", cond); | |
1457 | return \"b\\t%l4\\t%@long jump\\n.LCB%=:\"; | |
1458 | default: | |
1459 | output_asm_insn (\"b%D0\\t.LCB%=\", cond); | |
1460 | return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\"; | |
1461 | } | |
1462 | } | |
1463 | " | |
1464 | [(set (attr "far_jump") | |
1465 | (if_then_else | |
1466 | (ior (and (eq (symbol_ref ("which_alternative")) | |
1467 | (const_int 0)) | |
1468 | (eq_attr "length" "8")) | |
1469 | (eq_attr "length" "10")) | |
1470 | (const_string "yes") | |
1471 | (const_string "no"))) | |
1472 | (set_attr_alternative "length" | |
1473 | [ | |
1474 | ;; Alternative 0 | |
1475 | (if_then_else | |
1476 | (and (ge (minus (match_dup 4) (pc)) (const_int -250)) | |
1477 | (le (minus (match_dup 4) (pc)) (const_int 256))) | |
1478 | (const_int 4) | |
1479 | (if_then_else | |
1480 | (and (ge (minus (match_dup 4) (pc)) (const_int -2040)) | |
1481 | (le (minus (match_dup 4) (pc)) (const_int 2048))) | |
1482 | (const_int 6) | |
1483 | (const_int 8))) | |
1484 | ;; Alternative 1 | |
1485 | (if_then_else | |
1486 | (and (ge (minus (match_dup 4) (pc)) (const_int -248)) | |
1487 | (le (minus (match_dup 4) (pc)) (const_int 256))) | |
1488 | (const_int 6) | |
1489 | (if_then_else | |
1490 | (and (ge (minus (match_dup 4) (pc)) (const_int -2038)) | |
1491 | (le (minus (match_dup 4) (pc)) (const_int 2048))) | |
1492 | (const_int 8) | |
1493 | (const_int 10))) | |
1494 | ;; Alternative 2 | |
1495 | (if_then_else | |
1496 | (and (ge (minus (match_dup 4) (pc)) (const_int -248)) | |
1497 | (le (minus (match_dup 4) (pc)) (const_int 256))) | |
1498 | (const_int 6) | |
1499 | (if_then_else | |
1500 | (and (ge (minus (match_dup 4) (pc)) (const_int -2038)) | |
1501 | (le (minus (match_dup 4) (pc)) (const_int 2048))) | |
1502 | (const_int 8) | |
1503 | (const_int 10))) | |
1504 | ;; Alternative 3 | |
1505 | (if_then_else | |
1506 | (and (ge (minus (match_dup 4) (pc)) (const_int -248)) | |
1507 | (le (minus (match_dup 4) (pc)) (const_int 256))) | |
1508 | (const_int 6) | |
1509 | (if_then_else | |
1510 | (and (ge (minus (match_dup 4) (pc)) (const_int -2038)) | |
1511 | (le (minus (match_dup 4) (pc)) (const_int 2048))) | |
1512 | (const_int 8) | |
1513 | (const_int 10)))]) | |
1514 | (set_attr "type" "multiple")] | |
1515 | ) | |
1516 | ||
1517 | (define_insn "*addsi3_cbranch" | |
1518 | [(set (pc) | |
1519 | (if_then_else | |
1520 | (match_operator 4 "arm_comparison_operator" | |
1521 | [(plus:SI | |
1522 | (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1") | |
1523 | (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ")) | |
1524 | (const_int 0)]) | |
1525 | (label_ref (match_operand 5 "" "")) | |
1526 | (pc))) | |
1527 | (set | |
1528 | (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m") | |
1529 | (plus:SI (match_dup 2) (match_dup 3))) | |
1530 | (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))] | |
1531 | "TARGET_THUMB1 | |
1532 | && (GET_CODE (operands[4]) == EQ | |
1533 | || GET_CODE (operands[4]) == NE | |
1534 | || GET_CODE (operands[4]) == GE | |
1535 | || GET_CODE (operands[4]) == LT)" | |
1536 | "* | |
1537 | { | |
1538 | rtx cond[3]; | |
1539 | ||
1540 | cond[0] = (which_alternative < 2) ? operands[0] : operands[1]; | |
1541 | cond[1] = operands[2]; | |
1542 | cond[2] = operands[3]; | |
1543 | ||
1544 | if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0) | |
decfc6e1 | 1545 | output_asm_insn (\"subs\\t%0, %1, #%n2\", cond); |
1d02d8b8 | 1546 | else |
decfc6e1 | 1547 | output_asm_insn (\"adds\\t%0, %1, %2\", cond); |
1d02d8b8 TG |
1548 | |
1549 | if (which_alternative >= 2 | |
1550 | && which_alternative < 4) | |
1551 | output_asm_insn (\"mov\\t%0, %1\", operands); | |
1552 | else if (which_alternative >= 4) | |
1553 | output_asm_insn (\"str\\t%1, %0\", operands); | |
1554 | ||
1555 | switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0)) | |
1556 | { | |
1557 | case 4: | |
1558 | return \"b%d4\\t%l5\"; | |
1559 | case 6: | |
1560 | return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\"; | |
1561 | default: | |
1562 | return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\"; | |
1563 | } | |
1564 | } | |
1565 | " | |
1566 | [(set (attr "far_jump") | |
1567 | (if_then_else | |
1568 | (ior (and (lt (symbol_ref ("which_alternative")) | |
1569 | (const_int 2)) | |
1570 | (eq_attr "length" "8")) | |
1571 | (eq_attr "length" "10")) | |
1572 | (const_string "yes") | |
1573 | (const_string "no"))) | |
1574 | (set (attr "length") | |
1575 | (if_then_else | |
1576 | (lt (symbol_ref ("which_alternative")) | |
1577 | (const_int 2)) | |
1578 | (if_then_else | |
1579 | (and (ge (minus (match_dup 5) (pc)) (const_int -250)) | |
1580 | (le (minus (match_dup 5) (pc)) (const_int 256))) | |
1581 | (const_int 4) | |
1582 | (if_then_else | |
1583 | (and (ge (minus (match_dup 5) (pc)) (const_int -2040)) | |
1584 | (le (minus (match_dup 5) (pc)) (const_int 2048))) | |
1585 | (const_int 6) | |
1586 | (const_int 8))) | |
1587 | (if_then_else | |
1588 | (and (ge (minus (match_dup 5) (pc)) (const_int -248)) | |
1589 | (le (minus (match_dup 5) (pc)) (const_int 256))) | |
1590 | (const_int 6) | |
1591 | (if_then_else | |
1592 | (and (ge (minus (match_dup 5) (pc)) (const_int -2038)) | |
1593 | (le (minus (match_dup 5) (pc)) (const_int 2048))) | |
1594 | (const_int 8) | |
1595 | (const_int 10))))) | |
1596 | (set_attr "type" "multiple")] | |
1597 | ) | |
1598 | ||
1599 | (define_insn "*addsi3_cbranch_scratch" | |
1600 | [(set (pc) | |
1601 | (if_then_else | |
1602 | (match_operator 3 "arm_comparison_operator" | |
1603 | [(plus:SI | |
1604 | (match_operand:SI 1 "s_register_operand" "%l,l,l,0") | |
1605 | (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ")) | |
1606 | (const_int 0)]) | |
1607 | (label_ref (match_operand 4 "" "")) | |
1608 | (pc))) | |
1609 | (clobber (match_scratch:SI 0 "=X,X,l,l"))] | |
1610 | "TARGET_THUMB1 | |
1611 | && (GET_CODE (operands[3]) == EQ | |
1612 | || GET_CODE (operands[3]) == NE | |
1613 | || GET_CODE (operands[3]) == GE | |
1614 | || GET_CODE (operands[3]) == LT)" | |
1615 | "* | |
1616 | { | |
1617 | switch (which_alternative) | |
1618 | { | |
1619 | case 0: | |
1620 | output_asm_insn (\"cmp\t%1, #%n2\", operands); | |
1621 | break; | |
1622 | case 1: | |
1623 | output_asm_insn (\"cmn\t%1, %2\", operands); | |
1624 | break; | |
1625 | case 2: | |
1626 | if (INTVAL (operands[2]) < 0) | |
decfc6e1 | 1627 | output_asm_insn (\"subs\t%0, %1, %2\", operands); |
1d02d8b8 | 1628 | else |
c121b4b7 | 1629 | output_asm_insn (\"adds\t%0, %1, %2\", operands); |
1d02d8b8 TG |
1630 | break; |
1631 | case 3: | |
1632 | if (INTVAL (operands[2]) < 0) | |
decfc6e1 | 1633 | output_asm_insn (\"subs\t%0, %0, %2\", operands); |
1d02d8b8 | 1634 | else |
c121b4b7 | 1635 | output_asm_insn (\"adds\t%0, %0, %2\", operands); |
1d02d8b8 TG |
1636 | break; |
1637 | } | |
1638 | ||
1639 | switch (get_attr_length (insn)) | |
1640 | { | |
1641 | case 4: | |
1642 | return \"b%d3\\t%l4\"; | |
1643 | case 6: | |
1644 | return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\"; | |
1645 | default: | |
1646 | return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\"; | |
1647 | } | |
1648 | } | |
1649 | " | |
1650 | [(set (attr "far_jump") | |
1651 | (if_then_else | |
1652 | (eq_attr "length" "8") | |
1653 | (const_string "yes") | |
1654 | (const_string "no"))) | |
1655 | (set (attr "length") | |
1656 | (if_then_else | |
1657 | (and (ge (minus (match_dup 4) (pc)) (const_int -250)) | |
1658 | (le (minus (match_dup 4) (pc)) (const_int 256))) | |
1659 | (const_int 4) | |
1660 | (if_then_else | |
1661 | (and (ge (minus (match_dup 4) (pc)) (const_int -2040)) | |
1662 | (le (minus (match_dup 4) (pc)) (const_int 2048))) | |
1663 | (const_int 6) | |
1664 | (const_int 8)))) | |
1665 | (set_attr "type" "multiple")] | |
1666 | ) | |
1667 | ||
1668 | (define_insn "*thumb_cmpdi_zero" | |
1669 | [(set (reg:CC_Z CC_REGNUM) | |
1670 | (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l") | |
1671 | (const_int 0))) | |
1672 | (clobber (match_scratch:SI 1 "=l"))] | |
1673 | "TARGET_THUMB1" | |
decfc6e1 | 1674 | "orrs\\t%1, %Q0, %R0" |
1d02d8b8 TG |
1675 | [(set_attr "conds" "set") |
1676 | (set_attr "length" "2") | |
1677 | (set_attr "type" "logics_reg")] | |
1678 | ) | |
1679 | ||
1680 | (define_expand "cstoresi_eq0_thumb1" | |
1681 | [(parallel | |
cd65e265 DZ |
1682 | [(set (match_operand:SI 0 "s_register_operand") |
1683 | (eq:SI (match_operand:SI 1 "s_register_operand") | |
1d02d8b8 TG |
1684 | (const_int 0))) |
1685 | (clobber (match_dup:SI 2))])] | |
1686 | "TARGET_THUMB1" | |
1687 | "operands[2] = gen_reg_rtx (SImode);" | |
1688 | ) | |
1689 | ||
1690 | (define_expand "cstoresi_ne0_thumb1" | |
1691 | [(parallel | |
cd65e265 DZ |
1692 | [(set (match_operand:SI 0 "s_register_operand") |
1693 | (ne:SI (match_operand:SI 1 "s_register_operand") | |
1d02d8b8 TG |
1694 | (const_int 0))) |
1695 | (clobber (match_dup:SI 2))])] | |
1696 | "TARGET_THUMB1" | |
1697 | "operands[2] = gen_reg_rtx (SImode);" | |
1698 | ) | |
1699 | ||
1700 | (define_insn "*cstoresi_eq0_thumb1_insn" | |
1701 | [(set (match_operand:SI 0 "s_register_operand" "=&l,l") | |
1702 | (eq:SI (match_operand:SI 1 "s_register_operand" "l,0") | |
1703 | (const_int 0))) | |
1704 | (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))] | |
1705 | "TARGET_THUMB1" | |
1706 | "@ | |
decfc6e1 TG |
1707 | rsbs\\t%0, %1, #0\;adcs\\t%0, %0, %1 |
1708 | rsbs\\t%2, %1, #0\;adcs\\t%0, %1, %2" | |
1d02d8b8 TG |
1709 | [(set_attr "length" "4") |
1710 | (set_attr "type" "multiple")] | |
1711 | ) | |
1712 | ||
1713 | (define_insn "*cstoresi_ne0_thumb1_insn" | |
1714 | [(set (match_operand:SI 0 "s_register_operand" "=l") | |
1715 | (ne:SI (match_operand:SI 1 "s_register_operand" "0") | |
1716 | (const_int 0))) | |
1717 | (clobber (match_operand:SI 2 "s_register_operand" "=l"))] | |
1718 | "TARGET_THUMB1" | |
decfc6e1 | 1719 | "subs\\t%2, %1, #1\;sbcs\\t%0, %1, %2" |
1d02d8b8 TG |
1720 | [(set_attr "length" "4")] |
1721 | ) | |
1722 | ||
1723 | ;; Used as part of the expansion of thumb ltu and gtu sequences | |
1724 | (define_insn "cstoresi_nltu_thumb1" | |
1725 | [(set (match_operand:SI 0 "s_register_operand" "=l,l") | |
1726 | (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h") | |
1727 | (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))] | |
1728 | "TARGET_THUMB1" | |
decfc6e1 | 1729 | "cmp\\t%1, %2\;sbcs\\t%0, %0, %0" |
1d02d8b8 TG |
1730 | [(set_attr "length" "4") |
1731 | (set_attr "type" "multiple")] | |
1732 | ) | |
1733 | ||
1734 | (define_insn_and_split "cstoresi_ltu_thumb1" | |
1735 | [(set (match_operand:SI 0 "s_register_operand" "=l,l") | |
1736 | (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h") | |
1737 | (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))] | |
1738 | "TARGET_THUMB1" | |
1739 | "#" | |
1740 | "TARGET_THUMB1" | |
1741 | [(set (match_dup 3) | |
1742 | (neg:SI (ltu:SI (match_dup 1) (match_dup 2)))) | |
1743 | (set (match_dup 0) (neg:SI (match_dup 3)))] | |
1744 | "operands[3] = gen_reg_rtx (SImode);" | |
1745 | [(set_attr "length" "4") | |
1746 | (set_attr "type" "multiple")] | |
1747 | ) | |
1748 | ||
1749 | ;; Used as part of the expansion of thumb les sequence. | |
1750 | (define_insn "thumb1_addsi3_addgeu" | |
1751 | [(set (match_operand:SI 0 "s_register_operand" "=l") | |
1752 | (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0") | |
1753 | (match_operand:SI 2 "s_register_operand" "l")) | |
1754 | (geu:SI (match_operand:SI 3 "s_register_operand" "l") | |
1755 | (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))] | |
1756 | "TARGET_THUMB1" | |
decfc6e1 | 1757 | "cmp\\t%3, %4\;adcs\\t%0, %1, %2" |
1d02d8b8 TG |
1758 | [(set_attr "length" "4") |
1759 | (set_attr "type" "multiple")] | |
1760 | ) | |
1761 | ||
1762 | \f | |
1763 | (define_insn "*thumb_jump" | |
1764 | [(set (pc) | |
1765 | (label_ref (match_operand 0 "" "")))] | |
1766 | "TARGET_THUMB1" | |
1767 | "* | |
1768 | if (get_attr_length (insn) == 2) | |
1769 | return \"b\\t%l0\"; | |
1770 | return \"bl\\t%l0\\t%@ far jump\"; | |
1771 | " | |
1772 | [(set (attr "far_jump") | |
1773 | (if_then_else | |
1774 | (eq_attr "length" "4") | |
1775 | (const_string "yes") | |
1776 | (const_string "no"))) | |
1777 | (set (attr "length") | |
1778 | (if_then_else | |
1779 | (and (ge (minus (match_dup 0) (pc)) (const_int -2044)) | |
1780 | (le (minus (match_dup 0) (pc)) (const_int 2048))) | |
1781 | (const_int 2) | |
1782 | (const_int 4))) | |
1783 | (set_attr "type" "branch")] | |
1784 | ) | |
1785 | ||
1786 | (define_insn "*call_reg_thumb1_v5" | |
1787 | [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r")) | |
1788 | (match_operand 1 "" "")) | |
1789 | (use (match_operand 2 "" "")) | |
1790 | (clobber (reg:SI LR_REGNUM))] | |
c3f808d3 | 1791 | "TARGET_THUMB1 && arm_arch5t && !SIBLING_CALL_P (insn)" |
1d02d8b8 TG |
1792 | "blx\\t%0" |
1793 | [(set_attr "length" "2") | |
1794 | (set_attr "type" "call")] | |
1795 | ) | |
1796 | ||
c92e08e3 | 1797 | (define_insn "*nonsecure_call_reg_thumb1_v5" |
0a413fbc | 1798 | [(call (unspec:SI [(mem:SI (reg:SI R4_REGNUM))] |
c92e08e3 | 1799 | UNSPEC_NONSECURE_MEM) |
0a413fbc TP |
1800 | (match_operand 0 "" "")) |
1801 | (use (match_operand 1 "" "")) | |
1802 | (clobber (reg:SI LR_REGNUM))] | |
c92e08e3 AV |
1803 | "TARGET_THUMB1 && use_cmse && !SIBLING_CALL_P (insn)" |
1804 | "bl\\t__gnu_cmse_nonsecure_call" | |
1805 | [(set_attr "length" "4") | |
1806 | (set_attr "type" "call")] | |
1807 | ) | |
1808 | ||
1d02d8b8 TG |
1809 | (define_insn "*call_reg_thumb1" |
1810 | [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r")) | |
1811 | (match_operand 1 "" "")) | |
1812 | (use (match_operand 2 "" "")) | |
1813 | (clobber (reg:SI LR_REGNUM))] | |
c3f808d3 | 1814 | "TARGET_THUMB1 && !arm_arch5t && !SIBLING_CALL_P (insn)" |
1d02d8b8 TG |
1815 | "* |
1816 | { | |
1817 | if (!TARGET_CALLER_INTERWORKING) | |
1818 | return thumb_call_via_reg (operands[0]); | |
1819 | else if (operands[1] == const0_rtx) | |
1820 | return \"bl\\t%__interwork_call_via_%0\"; | |
1821 | else if (frame_pointer_needed) | |
1822 | return \"bl\\t%__interwork_r7_call_via_%0\"; | |
1823 | else | |
1824 | return \"bl\\t%__interwork_r11_call_via_%0\"; | |
1825 | }" | |
1826 | [(set_attr "type" "call")] | |
1827 | ) | |
1828 | ||
1829 | (define_insn "*call_value_reg_thumb1_v5" | |
1830 | [(set (match_operand 0 "" "") | |
1831 | (call (mem:SI (match_operand:SI 1 "register_operand" "l*r")) | |
1832 | (match_operand 2 "" ""))) | |
1833 | (use (match_operand 3 "" "")) | |
1834 | (clobber (reg:SI LR_REGNUM))] | |
c3f808d3 | 1835 | "TARGET_THUMB1 && arm_arch5t" |
1d02d8b8 TG |
1836 | "blx\\t%1" |
1837 | [(set_attr "length" "2") | |
1838 | (set_attr "type" "call")] | |
1839 | ) | |
1840 | ||
c92e08e3 AV |
1841 | (define_insn "*nonsecure_call_value_reg_thumb1_v5" |
1842 | [(set (match_operand 0 "" "") | |
1843 | (call (unspec:SI | |
0a413fbc | 1844 | [(mem:SI (reg:SI R4_REGNUM))] |
c92e08e3 | 1845 | UNSPEC_NONSECURE_MEM) |
0a413fbc TP |
1846 | (match_operand 1 "" ""))) |
1847 | (use (match_operand 2 "" "")) | |
1848 | (clobber (reg:SI LR_REGNUM))] | |
c92e08e3 AV |
1849 | "TARGET_THUMB1 && use_cmse" |
1850 | "bl\\t__gnu_cmse_nonsecure_call" | |
1851 | [(set_attr "length" "4") | |
1852 | (set_attr "type" "call")] | |
1853 | ) | |
1854 | ||
1d02d8b8 TG |
1855 | (define_insn "*call_value_reg_thumb1" |
1856 | [(set (match_operand 0 "" "") | |
1857 | (call (mem:SI (match_operand:SI 1 "register_operand" "l*r")) | |
1858 | (match_operand 2 "" ""))) | |
1859 | (use (match_operand 3 "" "")) | |
1860 | (clobber (reg:SI LR_REGNUM))] | |
c3f808d3 | 1861 | "TARGET_THUMB1 && !arm_arch5t" |
1d02d8b8 TG |
1862 | "* |
1863 | { | |
1864 | if (!TARGET_CALLER_INTERWORKING) | |
1865 | return thumb_call_via_reg (operands[1]); | |
1866 | else if (operands[2] == const0_rtx) | |
1867 | return \"bl\\t%__interwork_call_via_%1\"; | |
1868 | else if (frame_pointer_needed) | |
1869 | return \"bl\\t%__interwork_r7_call_via_%1\"; | |
1870 | else | |
1871 | return \"bl\\t%__interwork_r11_call_via_%1\"; | |
1872 | }" | |
1873 | [(set_attr "type" "call")] | |
1874 | ) | |
1875 | ||
1876 | (define_insn "*call_insn" | |
1877 | [(call (mem:SI (match_operand:SI 0 "" "")) | |
1878 | (match_operand:SI 1 "" "")) | |
1879 | (use (match_operand 2 "" "")) | |
1880 | (clobber (reg:SI LR_REGNUM))] | |
1881 | "TARGET_THUMB1 | |
1882 | && GET_CODE (operands[0]) == SYMBOL_REF | |
1883 | && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))" | |
1884 | "bl\\t%a0" | |
1885 | [(set_attr "length" "4") | |
1886 | (set_attr "type" "call")] | |
1887 | ) | |
1888 | ||
1889 | (define_insn "*call_value_insn" | |
1890 | [(set (match_operand 0 "" "") | |
1891 | (call (mem:SI (match_operand 1 "" "")) | |
1892 | (match_operand 2 "" ""))) | |
1893 | (use (match_operand 3 "" "")) | |
1894 | (clobber (reg:SI LR_REGNUM))] | |
1895 | "TARGET_THUMB1 | |
1896 | && GET_CODE (operands[1]) == SYMBOL_REF | |
1897 | && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))" | |
1898 | "bl\\t%a1" | |
1899 | [(set_attr "length" "4") | |
1900 | (set_attr "type" "call")] | |
1901 | ) | |
1902 | ||
1903 | (define_expand "thumb1_casesi_internal_pic" | |
cd65e265 DZ |
1904 | [(match_operand:SI 0 "s_register_operand") |
1905 | (match_operand:SI 1 "thumb1_cmp_operand") | |
1d02d8b8 TG |
1906 | (match_operand 2 "" "") |
1907 | (match_operand 3 "" "")] | |
1908 | "TARGET_THUMB1" | |
1909 | { | |
1910 | rtx reg0; | |
1911 | rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]); | |
1912 | emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1], | |
1913 | operands[3])); | |
1914 | reg0 = gen_rtx_REG (SImode, 0); | |
1915 | emit_move_insn (reg0, operands[0]); | |
1916 | emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/)); | |
1917 | DONE; | |
1918 | } | |
1919 | ) | |
1920 | ||
1921 | (define_insn "thumb1_casesi_dispatch" | |
1922 | [(parallel [(set (pc) (unspec [(reg:SI 0) | |
1923 | (label_ref (match_operand 0 "" "")) | |
1924 | ;; (label_ref (match_operand 1 "" "")) | |
1925 | ] | |
1926 | UNSPEC_THUMB1_CASESI)) | |
1927 | (clobber (reg:SI IP_REGNUM)) | |
1928 | (clobber (reg:SI LR_REGNUM))])] | |
1929 | "TARGET_THUMB1" | |
1930 | "* return thumb1_output_casesi(operands);" | |
1931 | [(set_attr "length" "4") | |
1932 | (set_attr "type" "multiple")] | |
1933 | ) | |
1934 | ||
1935 | ;; NB Never uses BX. | |
1936 | (define_insn "*thumb1_indirect_jump" | |
1937 | [(set (pc) | |
1938 | (match_operand:SI 0 "register_operand" "l*r"))] | |
1939 | "TARGET_THUMB1" | |
1940 | "mov\\tpc, %0" | |
1941 | [(set_attr "conds" "clob") | |
1942 | (set_attr "length" "2") | |
1943 | (set_attr "type" "branch")] | |
1944 | ) | |
1945 | ||
1946 | \f | |
1947 | (define_insn "prologue_thumb1_interwork" | |
1948 | [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)] | |
1949 | "TARGET_THUMB1" | |
1950 | "* return thumb1_output_interwork ();" | |
1951 | [(set_attr "length" "8") | |
1952 | (set_attr "type" "multiple")] | |
1953 | ) | |
1954 | ||
1955 | (define_insn "*epilogue_insns" | |
1956 | [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)] | |
1957 | "TARGET_THUMB1" | |
1958 | "* | |
1959 | return thumb1_unexpanded_epilogue (); | |
1960 | " | |
de954d6a AV |
1961 | ; Length is absolute worst case, when using CMSE and if this is an entry |
1962 | ; function an extra 4 (MSR) bytes will be added. | |
1963 | [(set (attr "length") | |
1964 | (if_then_else | |
1965 | (match_test "IS_CMSE_ENTRY (arm_current_func_type ())") | |
1966 | (const_int 48) | |
1967 | (const_int 44))) | |
1d02d8b8 TG |
1968 | (set_attr "type" "block") |
1969 | ;; We don't clobber the conditions, but the potential length of this | |
1970 | ;; operation is sufficient to make conditionalizing the sequence | |
1971 | ;; unlikely to be profitable. | |
1972 | (set_attr "conds" "clob")] | |
1973 | ) | |
1974 | ||
1d02d8b8 TG |
1975 | ;; Miscellaneous Thumb patterns |
1976 | (define_expand "tablejump" | |
cd65e265 | 1977 | [(parallel [(set (pc) (match_operand:SI 0 "register_operand")) |
1d02d8b8 TG |
1978 | (use (label_ref (match_operand 1 "" "")))])] |
1979 | "TARGET_THUMB1" | |
1980 | " | |
1981 | if (flag_pic) | |
1982 | { | |
1983 | /* Hopefully, CSE will eliminate this copy. */ | |
1984 | rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1])); | |
1985 | rtx reg2 = gen_reg_rtx (SImode); | |
1986 | ||
1987 | emit_insn (gen_addsi3 (reg2, operands[0], reg1)); | |
1988 | operands[0] = reg2; | |
1989 | } | |
1990 | " | |
1991 | ) | |
1992 | ||
5ea22cfc TG |
1993 | (define_insn "*thumb1_movpc_insn" |
1994 | [(set (match_operand:SI 0 "s_register_operand" "=l") | |
1995 | (reg:SI PC_REGNUM))] | |
1996 | "TARGET_THUMB1" | |
1997 | "mov\\t%0, pc" | |
1998 | [(set_attr "length" "2") | |
1999 | (set_attr "conds" "nocond") | |
2000 | (set_attr "type" "mov_reg")] | |
2001 | ) | |
2002 | ||
1d02d8b8 TG |
2003 | ;; NB never uses BX. |
2004 | (define_insn "*thumb1_tablejump" | |
2005 | [(set (pc) (match_operand:SI 0 "register_operand" "l*r")) | |
2006 | (use (label_ref (match_operand 1 "" "")))] | |
2007 | "TARGET_THUMB1" | |
2008 | "mov\\t%|pc, %0" | |
2009 | [(set_attr "length" "2") | |
5d4efa79 | 2010 | (set_attr "type" "branch")] |
1d02d8b8 TG |
2011 | ) |
2012 | ||
2013 | (define_insn_and_split "thumb_eh_return" | |
2014 | [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")] | |
2015 | VUNSPEC_EH_RETURN) | |
2016 | (clobber (match_scratch:SI 1 "=&l"))] | |
2017 | "TARGET_THUMB1" | |
2018 | "#" | |
2019 | "&& reload_completed" | |
2020 | [(const_int 0)] | |
2021 | " | |
2022 | { | |
2023 | thumb_set_return_address (operands[0], operands[1]); | |
2024 | DONE; | |
2025 | }" | |
2026 | [(set_attr "type" "mov_reg")] | |
2027 | ) | |
89d75572 | 2028 | |
6a3f3e08 RS |
2029 | ;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the |
2030 | ;; canary value does not live beyond the end of this sequence. | |
89d75572 TP |
2031 | (define_insn "thumb1_stack_protect_test_insn" |
2032 | [(set (match_operand:SI 0 "register_operand" "=&l") | |
2033 | (unspec:SI [(match_operand:SI 1 "memory_operand" "m") | |
2034 | (mem:SI (match_operand:SI 2 "register_operand" "+l"))] | |
2035 | UNSPEC_SP_TEST)) | |
2036 | (clobber (match_dup 2))] | |
2037 | "TARGET_THUMB1" | |
6a3f3e08 RS |
2038 | "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;movs\t%2, #0" |
2039 | [(set_attr "length" "10") | |
2040 | (set_attr "conds" "clob") | |
89d75572 TP |
2041 | (set_attr "type" "multiple")] |
2042 | ) | |
1d02d8b8 | 2043 | \f |