]>
Commit | Line | Data |
---|---|---|
2055cea7 | 1 | /* Subroutines used by or related to instruction recognition. |
af841dbd | 2 | Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998 |
eca72963 | 3 | 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 |
fb0a2460 | 4 | Free Software Foundation, Inc. |
2055cea7 | 5 | |
1322177d | 6 | This file is part of GCC. |
2055cea7 | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 10 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 11 | version. |
2055cea7 | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
2055cea7 RK |
17 | |
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
2055cea7 RK |
21 | |
22 | ||
23 | #include "config.h" | |
670ee920 | 24 | #include "system.h" |
4977bab6 ZW |
25 | #include "coretypes.h" |
26 | #include "tm.h" | |
38a448ca | 27 | #include "rtl.h" |
6baf1cc8 | 28 | #include "tm_p.h" |
2055cea7 RK |
29 | #include "insn-config.h" |
30 | #include "insn-attr.h" | |
d80eb1e1 | 31 | #include "hard-reg-set.h" |
2055cea7 RK |
32 | #include "recog.h" |
33 | #include "regs.h" | |
c4963a0a | 34 | #include "addresses.h" |
f1ec5147 | 35 | #include "expr.h" |
49ad7cfa | 36 | #include "function.h" |
2055cea7 RK |
37 | #include "flags.h" |
38 | #include "real.h" | |
7f7f8214 | 39 | #include "toplev.h" |
ca545bb5 | 40 | #include "basic-block.h" |
ede7cd44 | 41 | #include "output.h" |
0e9295cf | 42 | #include "reload.h" |
dbc42c44 | 43 | #include "target.h" |
ef330312 PB |
44 | #include "timevar.h" |
45 | #include "tree-pass.h" | |
6fb5fa3c | 46 | #include "df.h" |
2055cea7 RK |
47 | |
48 | #ifndef STACK_PUSH_CODE | |
49 | #ifdef STACK_GROWS_DOWNWARD | |
50 | #define STACK_PUSH_CODE PRE_DEC | |
51 | #else | |
52 | #define STACK_PUSH_CODE PRE_INC | |
53 | #endif | |
54 | #endif | |
55 | ||
6fbe9bd8 RH |
56 | #ifndef STACK_POP_CODE |
57 | #ifdef STACK_GROWS_DOWNWARD | |
58 | #define STACK_POP_CODE POST_INC | |
59 | #else | |
60 | #define STACK_POP_CODE POST_DEC | |
61 | #endif | |
62 | #endif | |
63 | ||
7ac28727 AK |
64 | #ifndef HAVE_ATTR_enabled |
65 | static inline bool | |
66 | get_attr_enabled (rtx insn ATTRIBUTE_UNUSED) | |
67 | { | |
68 | return true; | |
69 | } | |
70 | #endif | |
71 | ||
e855c69d | 72 | static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool); |
0c20a65f AJ |
73 | static void validate_replace_src_1 (rtx *, void *); |
74 | static rtx split_insn (rtx); | |
2055cea7 RK |
75 | |
76 | /* Nonzero means allow operands to be volatile. | |
77 | This should be 0 if you are generating rtl, such as if you are calling | |
78 | the functions in optabs.c and expmed.c (most of the time). | |
79 | This should be 1 if all valid insns need to be recognized, | |
328e13b7 | 80 | such as in reginfo.c and final.c and reload.c. |
2055cea7 RK |
81 | |
82 | init_recog and init_recog_no_volatile are responsible for setting this. */ | |
83 | ||
84 | int volatile_ok; | |
85 | ||
1ccbefce | 86 | struct recog_data recog_data; |
0a578fee | 87 | |
f62a15e3 BS |
88 | /* Contains a vector of operand_alternative structures for every operand. |
89 | Set up by preprocess_constraints. */ | |
90 | struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES]; | |
91 | ||
2055cea7 RK |
92 | /* On return from `constrain_operands', indicate which alternative |
93 | was satisfied. */ | |
94 | ||
95 | int which_alternative; | |
96 | ||
97 | /* Nonzero after end of reload pass. | |
98 | Set to 1 or 0 by toplev.c. | |
99 | Controls the significance of (SUBREG (MEM)). */ | |
100 | ||
101 | int reload_completed; | |
102 | ||
fe3ad572 SC |
103 | /* Nonzero after thread_prologue_and_epilogue_insns has run. */ |
104 | int epilogue_completed; | |
105 | ||
2055cea7 RK |
106 | /* Initialize data used by the function `recog'. |
107 | This must be called once in the compilation of a function | |
108 | before any insn recognition may be done in the function. */ | |
109 | ||
110 | void | |
0c20a65f | 111 | init_recog_no_volatile (void) |
2055cea7 RK |
112 | { |
113 | volatile_ok = 0; | |
114 | } | |
115 | ||
e0069e43 | 116 | void |
0c20a65f | 117 | init_recog (void) |
2055cea7 RK |
118 | { |
119 | volatile_ok = 1; | |
120 | } | |
121 | ||
2055cea7 RK |
122 | \f |
123 | /* Check that X is an insn-body for an `asm' with operands | |
124 | and that the operands mentioned in it are legitimate. */ | |
125 | ||
126 | int | |
0c20a65f | 127 | check_asm_operands (rtx x) |
2055cea7 | 128 | { |
1f06ee8d | 129 | int noperands; |
2055cea7 | 130 | rtx *operands; |
9b3142b3 | 131 | const char **constraints; |
2055cea7 RK |
132 | int i; |
133 | ||
1f06ee8d RH |
134 | /* Post-reload, be more strict with things. */ |
135 | if (reload_completed) | |
136 | { | |
137 | /* ??? Doh! We've not got the wrapping insn. Cook one up. */ | |
138 | extract_insn (make_insn_raw (x)); | |
139 | constrain_operands (1); | |
140 | return which_alternative >= 0; | |
141 | } | |
142 | ||
143 | noperands = asm_noperands (x); | |
2055cea7 RK |
144 | if (noperands < 0) |
145 | return 0; | |
146 | if (noperands == 0) | |
147 | return 1; | |
148 | ||
1634b18f KG |
149 | operands = XALLOCAVEC (rtx, noperands); |
150 | constraints = XALLOCAVEC (const char *, noperands); | |
1f06ee8d | 151 | |
bff4b63d | 152 | decode_asm_operands (x, operands, NULL, constraints, NULL, NULL); |
2055cea7 RK |
153 | |
154 | for (i = 0; i < noperands; i++) | |
1f06ee8d | 155 | { |
9b3142b3 | 156 | const char *c = constraints[i]; |
1afbe1c4 RH |
157 | if (c[0] == '%') |
158 | c++; | |
eca72963 | 159 | if (! asm_operand_ok (operands[i], c, constraints)) |
a6a2274a | 160 | return 0; |
1f06ee8d | 161 | } |
2055cea7 RK |
162 | |
163 | return 1; | |
164 | } | |
165 | \f | |
41a972a9 | 166 | /* Static data for the next two routines. */ |
2055cea7 | 167 | |
41a972a9 MM |
168 | typedef struct change_t |
169 | { | |
170 | rtx object; | |
171 | int old_code; | |
172 | rtx *loc; | |
173 | rtx old; | |
95e88efd | 174 | bool unshare; |
41a972a9 | 175 | } change_t; |
2055cea7 | 176 | |
41a972a9 MM |
177 | static change_t *changes; |
178 | static int changes_allocated; | |
2055cea7 RK |
179 | |
180 | static int num_changes = 0; | |
181 | ||
4d893612 | 182 | /* Validate a proposed change to OBJECT. LOC is the location in the rtl |
55d796da | 183 | at which NEW_RTX will be placed. If OBJECT is zero, no validation is done, |
2055cea7 RK |
184 | the change is simply made. |
185 | ||
186 | Two types of objects are supported: If OBJECT is a MEM, memory_address_p | |
187 | will be called with the address and mode as parameters. If OBJECT is | |
188 | an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with | |
189 | the change in place. | |
190 | ||
40f03658 | 191 | IN_GROUP is nonzero if this is part of a group of changes that must be |
2055cea7 RK |
192 | performed as a group. In that case, the changes will be stored. The |
193 | function `apply_change_group' will validate and apply the changes. | |
194 | ||
195 | If IN_GROUP is zero, this is a single change. Try to recognize the insn | |
196 | or validate the memory reference with the change applied. If the result | |
197 | is not valid for the machine, suppress the change and return zero. | |
198 | Otherwise, perform the change and return 1. */ | |
199 | ||
95e88efd | 200 | static bool |
55d796da | 201 | validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare) |
2055cea7 RK |
202 | { |
203 | rtx old = *loc; | |
204 | ||
55d796da | 205 | if (old == new_rtx || rtx_equal_p (old, new_rtx)) |
2055cea7 RK |
206 | return 1; |
207 | ||
41374e13 | 208 | gcc_assert (in_group != 0 || num_changes == 0); |
2055cea7 | 209 | |
55d796da | 210 | *loc = new_rtx; |
2055cea7 RK |
211 | |
212 | /* Save the information describing this change. */ | |
41a972a9 MM |
213 | if (num_changes >= changes_allocated) |
214 | { | |
215 | if (changes_allocated == 0) | |
216 | /* This value allows for repeated substitutions inside complex | |
217 | indexed addresses, or changes in up to 5 insns. */ | |
218 | changes_allocated = MAX_RECOG_OPERANDS * 5; | |
219 | else | |
220 | changes_allocated *= 2; | |
221 | ||
1634b18f | 222 | changes = XRESIZEVEC (change_t, changes, changes_allocated); |
41a972a9 | 223 | } |
a6a2274a | 224 | |
41a972a9 MM |
225 | changes[num_changes].object = object; |
226 | changes[num_changes].loc = loc; | |
227 | changes[num_changes].old = old; | |
95e88efd | 228 | changes[num_changes].unshare = unshare; |
2055cea7 | 229 | |
3c0cb5de | 230 | if (object && !MEM_P (object)) |
2055cea7 RK |
231 | { |
232 | /* Set INSN_CODE to force rerecognition of insn. Save old code in | |
233 | case invalid. */ | |
41a972a9 | 234 | changes[num_changes].old_code = INSN_CODE (object); |
2055cea7 RK |
235 | INSN_CODE (object) = -1; |
236 | } | |
237 | ||
238 | num_changes++; | |
239 | ||
240 | /* If we are making a group of changes, return 1. Otherwise, validate the | |
241 | change group we made. */ | |
242 | ||
243 | if (in_group) | |
244 | return 1; | |
245 | else | |
246 | return apply_change_group (); | |
247 | } | |
248 | ||
95e88efd JH |
249 | /* Wrapper for validate_change_1 without the UNSHARE argument defaulting |
250 | UNSHARE to false. */ | |
251 | ||
252 | bool | |
55d796da | 253 | validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group) |
95e88efd | 254 | { |
55d796da | 255 | return validate_change_1 (object, loc, new_rtx, in_group, false); |
95e88efd JH |
256 | } |
257 | ||
258 | /* Wrapper for validate_change_1 without the UNSHARE argument defaulting | |
259 | UNSHARE to true. */ | |
260 | ||
261 | bool | |
55d796da | 262 | validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group) |
95e88efd | 263 | { |
55d796da | 264 | return validate_change_1 (object, loc, new_rtx, in_group, true); |
95e88efd JH |
265 | } |
266 | ||
267 | ||
a52b023a PB |
268 | /* Keep X canonicalized if some changes have made it non-canonical; only |
269 | modifies the operands of X, not (for example) its code. Simplifications | |
270 | are not the job of this routine. | |
271 | ||
272 | Return true if anything was changed. */ | |
273 | bool | |
274 | canonicalize_change_group (rtx insn, rtx x) | |
275 | { | |
276 | if (COMMUTATIVE_P (x) | |
277 | && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))) | |
278 | { | |
279 | /* Oops, the caller has made X no longer canonical. | |
280 | Let's redo the changes in the correct order. */ | |
281 | rtx tem = XEXP (x, 0); | |
282 | validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1); | |
283 | validate_change (insn, &XEXP (x, 1), tem, 1); | |
284 | return true; | |
285 | } | |
286 | else | |
287 | return false; | |
288 | } | |
289 | ||
46382283 | 290 | |
61719ba7 BS |
291 | /* This subroutine of apply_change_group verifies whether the changes to INSN |
292 | were valid; i.e. whether INSN can still be recognized. */ | |
293 | ||
fb0c0a12 | 294 | int |
0c20a65f | 295 | insn_invalid_p (rtx insn) |
61719ba7 | 296 | { |
fb0c0a12 RK |
297 | rtx pat = PATTERN (insn); |
298 | int num_clobbers = 0; | |
299 | /* If we are before reload and the pattern is a SET, see if we can add | |
300 | clobbers. */ | |
301 | int icode = recog (pat, insn, | |
302 | (GET_CODE (pat) == SET | |
303 | && ! reload_completed && ! reload_in_progress) | |
6496a589 | 304 | ? &num_clobbers : 0); |
61719ba7 BS |
305 | int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0; |
306 | ||
a6a2274a | 307 | |
fb0c0a12 RK |
308 | /* If this is an asm and the operand aren't legal, then fail. Likewise if |
309 | this is not an asm and the insn wasn't recognized. */ | |
310 | if ((is_asm && ! check_asm_operands (PATTERN (insn))) | |
311 | || (!is_asm && icode < 0)) | |
61719ba7 BS |
312 | return 1; |
313 | ||
fb0c0a12 RK |
314 | /* If we have to add CLOBBERs, fail if we have to add ones that reference |
315 | hard registers since our callers can't know if they are live or not. | |
316 | Otherwise, add them. */ | |
317 | if (num_clobbers > 0) | |
318 | { | |
319 | rtx newpat; | |
320 | ||
321 | if (added_clobbers_hard_reg_p (icode)) | |
322 | return 1; | |
323 | ||
324 | newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1)); | |
325 | XVECEXP (newpat, 0, 0) = pat; | |
326 | add_clobbers (newpat, icode); | |
327 | PATTERN (insn) = pat = newpat; | |
328 | } | |
329 | ||
61719ba7 BS |
330 | /* After reload, verify that all constraints are satisfied. */ |
331 | if (reload_completed) | |
332 | { | |
0eadeb15 | 333 | extract_insn (insn); |
61719ba7 | 334 | |
0eadeb15 | 335 | if (! constrain_operands (1)) |
61719ba7 BS |
336 | return 1; |
337 | } | |
338 | ||
fb0c0a12 | 339 | INSN_CODE (insn) = icode; |
61719ba7 BS |
340 | return 0; |
341 | } | |
342 | ||
2b773ee2 JH |
343 | /* Return number of changes made and not validated yet. */ |
344 | int | |
0c20a65f | 345 | num_changes_pending (void) |
2b773ee2 JH |
346 | { |
347 | return num_changes; | |
348 | } | |
349 | ||
0a634832 | 350 | /* Tentatively apply the changes numbered NUM and up. |
2055cea7 RK |
351 | Return 1 if all changes are valid, zero otherwise. */ |
352 | ||
7d22e898 | 353 | int |
0a634832 | 354 | verify_changes (int num) |
2055cea7 RK |
355 | { |
356 | int i; | |
66aa2d30 | 357 | rtx last_validated = NULL_RTX; |
2055cea7 RK |
358 | |
359 | /* The changes have been applied and all INSN_CODEs have been reset to force | |
360 | rerecognition. | |
361 | ||
362 | The changes are valid if we aren't given an object, or if we are | |
363 | given a MEM and it still is a valid address, or if this is in insn | |
364 | and it is recognized. In the latter case, if reload has completed, | |
365 | we also require that the operands meet the constraints for | |
0eadeb15 | 366 | the insn. */ |
2055cea7 | 367 | |
0a634832 | 368 | for (i = num; i < num_changes; i++) |
2055cea7 | 369 | { |
41a972a9 | 370 | rtx object = changes[i].object; |
2055cea7 | 371 | |
938d968e | 372 | /* If there is no object to test or if it is the same as the one we |
66aa2d30 JH |
373 | already tested, ignore it. */ |
374 | if (object == 0 || object == last_validated) | |
2055cea7 RK |
375 | continue; |
376 | ||
3c0cb5de | 377 | if (MEM_P (object)) |
2055cea7 RK |
378 | { |
379 | if (! memory_address_p (GET_MODE (object), XEXP (object, 0))) | |
380 | break; | |
381 | } | |
929b7fc3 AK |
382 | else if (REG_P (changes[i].old) |
383 | && asm_noperands (PATTERN (object)) > 0 | |
384 | && REG_EXPR (changes[i].old) != NULL_TREE | |
385 | && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old)) | |
386 | && DECL_REGISTER (REG_EXPR (changes[i].old))) | |
387 | { | |
388 | /* Don't allow changes of hard register operands to inline | |
389 | assemblies if they have been defined as register asm ("x"). */ | |
390 | break; | |
391 | } | |
b5b8b0ac AO |
392 | else if (DEBUG_INSN_P (object)) |
393 | continue; | |
61719ba7 | 394 | else if (insn_invalid_p (object)) |
2055cea7 RK |
395 | { |
396 | rtx pat = PATTERN (object); | |
397 | ||
398 | /* Perhaps we couldn't recognize the insn because there were | |
399 | extra CLOBBERs at the end. If so, try to re-recognize | |
400 | without the last CLOBBER (later iterations will cause each of | |
401 | them to be eliminated, in turn). But don't do this if we | |
402 | have an ASM_OPERAND. */ | |
403 | if (GET_CODE (pat) == PARALLEL | |
404 | && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER | |
405 | && asm_noperands (PATTERN (object)) < 0) | |
406 | { | |
ffb5e2e2 AM |
407 | rtx newpat; |
408 | ||
409 | if (XVECLEN (pat, 0) == 2) | |
410 | newpat = XVECEXP (pat, 0, 0); | |
411 | else | |
412 | { | |
413 | int j; | |
414 | ||
415 | newpat | |
a6a2274a | 416 | = gen_rtx_PARALLEL (VOIDmode, |
ffb5e2e2 AM |
417 | rtvec_alloc (XVECLEN (pat, 0) - 1)); |
418 | for (j = 0; j < XVECLEN (newpat, 0); j++) | |
419 | XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j); | |
420 | } | |
421 | ||
422 | /* Add a new change to this group to replace the pattern | |
423 | with this new pattern. Then consider this change | |
424 | as having succeeded. The change we added will | |
425 | cause the entire call to fail if things remain invalid. | |
426 | ||
427 | Note that this can lose if a later change than the one | |
428 | we are processing specified &XVECEXP (PATTERN (object), 0, X) | |
429 | but this shouldn't occur. */ | |
430 | ||
431 | validate_change (object, &PATTERN (object), newpat, 1); | |
432 | continue; | |
433 | } | |
b5b8b0ac AO |
434 | else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER |
435 | || GET_CODE (pat) == VAR_LOCATION) | |
2055cea7 RK |
436 | /* If this insn is a CLOBBER or USE, it is always valid, but is |
437 | never recognized. */ | |
438 | continue; | |
439 | else | |
440 | break; | |
441 | } | |
66aa2d30 | 442 | last_validated = object; |
2055cea7 RK |
443 | } |
444 | ||
0a634832 R |
445 | return (i == num_changes); |
446 | } | |
447 | ||
6fb5fa3c DB |
448 | /* A group of changes has previously been issued with validate_change |
449 | and verified with verify_changes. Call df_insn_rescan for each of | |
450 | the insn changed and clear num_changes. */ | |
0a634832 R |
451 | |
452 | void | |
453 | confirm_change_group (void) | |
454 | { | |
455 | int i; | |
95e88efd | 456 | rtx last_object = NULL; |
38c1593d | 457 | |
0a634832 | 458 | for (i = 0; i < num_changes; i++) |
6fb5fa3c DB |
459 | { |
460 | rtx object = changes[i].object; | |
95e88efd JH |
461 | |
462 | if (changes[i].unshare) | |
463 | *changes[i].loc = copy_rtx (*changes[i].loc); | |
464 | ||
fa10beec | 465 | /* Avoid unnecessary rescanning when multiple changes to same instruction |
95e88efd JH |
466 | are made. */ |
467 | if (object) | |
468 | { | |
469 | if (object != last_object && last_object && INSN_P (last_object)) | |
470 | df_insn_rescan (last_object); | |
471 | last_object = object; | |
472 | } | |
6fb5fa3c | 473 | } |
0a634832 | 474 | |
95e88efd JH |
475 | if (last_object && INSN_P (last_object)) |
476 | df_insn_rescan (last_object); | |
0a634832 R |
477 | num_changes = 0; |
478 | } | |
479 | ||
480 | /* Apply a group of changes previously issued with `validate_change'. | |
481 | If all changes are valid, call confirm_change_group and return 1, | |
482 | otherwise, call cancel_changes and return 0. */ | |
38c1593d | 483 | |
0a634832 R |
484 | int |
485 | apply_change_group (void) | |
486 | { | |
487 | if (verify_changes (0)) | |
488 | { | |
489 | confirm_change_group (); | |
2055cea7 RK |
490 | return 1; |
491 | } | |
492 | else | |
493 | { | |
494 | cancel_changes (0); | |
495 | return 0; | |
496 | } | |
497 | } | |
498 | ||
0a634832 | 499 | |
6d2f8887 | 500 | /* Return the number of changes so far in the current group. */ |
2055cea7 RK |
501 | |
502 | int | |
0c20a65f | 503 | num_validated_changes (void) |
2055cea7 RK |
504 | { |
505 | return num_changes; | |
506 | } | |
507 | ||
508 | /* Retract the changes numbered NUM and up. */ | |
509 | ||
510 | void | |
0c20a65f | 511 | cancel_changes (int num) |
2055cea7 RK |
512 | { |
513 | int i; | |
514 | ||
515 | /* Back out all the changes. Do this in the opposite order in which | |
516 | they were made. */ | |
517 | for (i = num_changes - 1; i >= num; i--) | |
518 | { | |
41a972a9 | 519 | *changes[i].loc = changes[i].old; |
3c0cb5de | 520 | if (changes[i].object && !MEM_P (changes[i].object)) |
41a972a9 | 521 | INSN_CODE (changes[i].object) = changes[i].old_code; |
2055cea7 RK |
522 | } |
523 | num_changes = num; | |
524 | } | |
525 | ||
e855c69d AB |
526 | /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting |
527 | rtx. */ | |
2055cea7 RK |
528 | |
529 | static void | |
e855c69d AB |
530 | simplify_while_replacing (rtx *loc, rtx to, rtx object, |
531 | enum machine_mode op0_mode) | |
2055cea7 | 532 | { |
b3694847 | 533 | rtx x = *loc; |
e855c69d | 534 | enum rtx_code code = GET_CODE (x); |
55d796da | 535 | rtx new_rtx; |
2055cea7 | 536 | |
ec8e098d | 537 | if (SWAPPABLE_OPERANDS_P (x) |
30cf266f JH |
538 | && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))) |
539 | { | |
162bfc7e UB |
540 | validate_unshare_change (object, loc, |
541 | gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code | |
542 | : swap_condition (code), | |
543 | GET_MODE (x), XEXP (x, 1), | |
544 | XEXP (x, 0)), 1); | |
30cf266f JH |
545 | x = *loc; |
546 | code = GET_CODE (x); | |
547 | } | |
06140bdf | 548 | |
2055cea7 RK |
549 | switch (code) |
550 | { | |
551 | case PLUS: | |
38e01259 | 552 | /* If we have a PLUS whose second operand is now a CONST_INT, use |
45ed7228 | 553 | simplify_gen_binary to try to simplify it. |
30cf266f JH |
554 | ??? We may want later to remove this, once simplification is |
555 | separated from this function. */ | |
481683e1 | 556 | if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to) |
30cf266f | 557 | validate_change (object, loc, |
aff8a8d5 CM |
558 | simplify_gen_binary |
559 | (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1); | |
30cf266f | 560 | break; |
06140bdf | 561 | case MINUS: |
481683e1 | 562 | if (CONST_INT_P (XEXP (x, 1)) |
30cf266f JH |
563 | || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE) |
564 | validate_change (object, loc, | |
565 | simplify_gen_binary | |
566 | (PLUS, GET_MODE (x), XEXP (x, 0), | |
567 | simplify_gen_unary (NEG, | |
0068fd96 JH |
568 | GET_MODE (x), XEXP (x, 1), |
569 | GET_MODE (x))), 1); | |
06140bdf | 570 | break; |
2055cea7 RK |
571 | case ZERO_EXTEND: |
572 | case SIGN_EXTEND: | |
30cf266f | 573 | if (GET_MODE (XEXP (x, 0)) == VOIDmode) |
2055cea7 | 574 | { |
55d796da | 575 | new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0), |
30cf266f | 576 | op0_mode); |
c0e3f87d RH |
577 | /* If any of the above failed, substitute in something that |
578 | we know won't be recognized. */ | |
55d796da KG |
579 | if (!new_rtx) |
580 | new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); | |
581 | validate_change (object, loc, new_rtx, 1); | |
2055cea7 RK |
582 | } |
583 | break; | |
2055cea7 | 584 | case SUBREG: |
30cf266f | 585 | /* All subregs possible to simplify should be simplified. */ |
55d796da | 586 | new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode, |
30cf266f JH |
587 | SUBREG_BYTE (x)); |
588 | ||
ffb5e2e2 | 589 | /* Subregs of VOIDmode operands are incorrect. */ |
55d796da KG |
590 | if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode) |
591 | new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); | |
592 | if (new_rtx) | |
593 | validate_change (object, loc, new_rtx, 1); | |
2055cea7 | 594 | break; |
2055cea7 RK |
595 | case ZERO_EXTRACT: |
596 | case SIGN_EXTRACT: | |
597 | /* If we are replacing a register with memory, try to change the memory | |
30cf266f JH |
598 | to be the mode required for memory in extract operations (this isn't |
599 | likely to be an insertion operation; if it was, nothing bad will | |
600 | happen, we might just fail in some cases). */ | |
2055cea7 | 601 | |
3c0cb5de | 602 | if (MEM_P (XEXP (x, 0)) |
481683e1 SZ |
603 | && CONST_INT_P (XEXP (x, 1)) |
604 | && CONST_INT_P (XEXP (x, 2)) | |
30cf266f JH |
605 | && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0)) |
606 | && !MEM_VOLATILE_P (XEXP (x, 0))) | |
2055cea7 RK |
607 | { |
608 | enum machine_mode wanted_mode = VOIDmode; | |
30cf266f | 609 | enum machine_mode is_mode = GET_MODE (XEXP (x, 0)); |
2055cea7 RK |
610 | int pos = INTVAL (XEXP (x, 2)); |
611 | ||
da920570 | 612 | if (GET_CODE (x) == ZERO_EXTRACT) |
0d8e55d8 | 613 | { |
da920570 ZW |
614 | enum machine_mode new_mode |
615 | = mode_for_extraction (EP_extzv, 1); | |
616 | if (new_mode != MAX_MACHINE_MODE) | |
617 | wanted_mode = new_mode; | |
0d8e55d8 | 618 | } |
da920570 | 619 | else if (GET_CODE (x) == SIGN_EXTRACT) |
0d8e55d8 | 620 | { |
da920570 ZW |
621 | enum machine_mode new_mode |
622 | = mode_for_extraction (EP_extv, 1); | |
623 | if (new_mode != MAX_MACHINE_MODE) | |
624 | wanted_mode = new_mode; | |
0d8e55d8 | 625 | } |
2055cea7 | 626 | |
6dc42e49 | 627 | /* If we have a narrower mode, we can do something. */ |
2055cea7 RK |
628 | if (wanted_mode != VOIDmode |
629 | && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) | |
630 | { | |
631 | int offset = pos / BITS_PER_UNIT; | |
632 | rtx newmem; | |
633 | ||
ddef6bc7 | 634 | /* If the bytes and bits are counted differently, we |
30cf266f | 635 | must adjust the offset. */ |
f76b9db2 | 636 | if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN) |
30cf266f JH |
637 | offset = |
638 | (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) - | |
639 | offset); | |
2055cea7 RK |
640 | |
641 | pos %= GET_MODE_BITSIZE (wanted_mode); | |
642 | ||
f1ec5147 | 643 | newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset); |
2055cea7 | 644 | |
9e4223f2 | 645 | validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1); |
2055cea7 RK |
646 | validate_change (object, &XEXP (x, 0), newmem, 1); |
647 | } | |
648 | } | |
649 | ||
650 | break; | |
30cf266f | 651 | |
38a448ca RH |
652 | default: |
653 | break; | |
2055cea7 | 654 | } |
2055cea7 RK |
655 | } |
656 | ||
e855c69d AB |
657 | /* Replace every occurrence of FROM in X with TO. Mark each change with |
658 | validate_change passing OBJECT. */ | |
659 | ||
660 | static void | |
661 | validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object, | |
662 | bool simplify) | |
663 | { | |
664 | int i, j; | |
665 | const char *fmt; | |
666 | rtx x = *loc; | |
667 | enum rtx_code code; | |
668 | enum machine_mode op0_mode = VOIDmode; | |
669 | int prev_changes = num_changes; | |
670 | ||
671 | if (!x) | |
672 | return; | |
673 | ||
674 | code = GET_CODE (x); | |
675 | fmt = GET_RTX_FORMAT (code); | |
676 | if (fmt[0] == 'e') | |
677 | op0_mode = GET_MODE (XEXP (x, 0)); | |
678 | ||
679 | /* X matches FROM if it is the same rtx or they are both referring to the | |
680 | same register in the same mode. Avoid calling rtx_equal_p unless the | |
681 | operands look similar. */ | |
682 | ||
683 | if (x == from | |
684 | || (REG_P (x) && REG_P (from) | |
685 | && GET_MODE (x) == GET_MODE (from) | |
686 | && REGNO (x) == REGNO (from)) | |
687 | || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from) | |
688 | && rtx_equal_p (x, from))) | |
689 | { | |
690 | validate_unshare_change (object, loc, to, 1); | |
691 | return; | |
692 | } | |
693 | ||
694 | /* Call ourself recursively to perform the replacements. | |
695 | We must not replace inside already replaced expression, otherwise we | |
696 | get infinite recursion for replacements like (reg X)->(subreg (reg X)) | |
697 | done by regmove, so we must special case shared ASM_OPERANDS. */ | |
698 | ||
699 | if (GET_CODE (x) == PARALLEL) | |
700 | { | |
701 | for (j = XVECLEN (x, 0) - 1; j >= 0; j--) | |
702 | { | |
703 | if (j && GET_CODE (XVECEXP (x, 0, j)) == SET | |
704 | && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS) | |
705 | { | |
706 | /* Verify that operands are really shared. */ | |
707 | gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) | |
708 | == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP | |
709 | (x, 0, j)))); | |
710 | validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)), | |
711 | from, to, object, simplify); | |
712 | } | |
713 | else | |
714 | validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object, | |
715 | simplify); | |
716 | } | |
717 | } | |
718 | else | |
719 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
720 | { | |
721 | if (fmt[i] == 'e') | |
722 | validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify); | |
723 | else if (fmt[i] == 'E') | |
724 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
725 | validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object, | |
726 | simplify); | |
727 | } | |
728 | ||
729 | /* If we didn't substitute, there is nothing more to do. */ | |
730 | if (num_changes == prev_changes) | |
731 | return; | |
732 | ||
733 | /* Allow substituted expression to have different mode. This is used by | |
734 | regmove to change mode of pseudo register. */ | |
735 | if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode) | |
736 | op0_mode = GET_MODE (XEXP (x, 0)); | |
737 | ||
738 | /* Do changes needed to keep rtx consistent. Don't do any other | |
739 | simplifications, as it is not our job. */ | |
740 | if (simplify) | |
741 | simplify_while_replacing (loc, to, object, op0_mode); | |
742 | } | |
743 | ||
7acf4da6 DD |
744 | /* Try replacing every occurrence of FROM in subexpression LOC of INSN |
745 | with TO. After all changes have been made, validate by seeing | |
746 | if INSN is still valid. */ | |
747 | ||
748 | int | |
749 | validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc) | |
750 | { | |
751 | validate_replace_rtx_1 (loc, from, to, insn, true); | |
752 | return apply_change_group (); | |
753 | } | |
754 | ||
2055cea7 RK |
755 | /* Try replacing every occurrence of FROM in INSN with TO. After all |
756 | changes have been made, validate by seeing if INSN is still valid. */ | |
757 | ||
758 | int | |
0c20a65f | 759 | validate_replace_rtx (rtx from, rtx to, rtx insn) |
2055cea7 | 760 | { |
e855c69d | 761 | validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true); |
2055cea7 RK |
762 | return apply_change_group (); |
763 | } | |
7506f491 | 764 | |
e855c69d AB |
765 | /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE |
766 | is a part of INSN. After all changes have been made, validate by seeing if | |
767 | INSN is still valid. | |
768 | validate_replace_rtx (from, to, insn) is equivalent to | |
769 | validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */ | |
770 | ||
771 | int | |
772 | validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn) | |
773 | { | |
774 | validate_replace_rtx_1 (where, from, to, insn, true); | |
775 | return apply_change_group (); | |
776 | } | |
777 | ||
778 | /* Same as above, but do not simplify rtx afterwards. */ | |
779 | int | |
780 | validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where, | |
781 | rtx insn) | |
782 | { | |
783 | validate_replace_rtx_1 (where, from, to, insn, false); | |
784 | return apply_change_group (); | |
785 | ||
786 | } | |
787 | ||
b71e8e84 | 788 | /* Try replacing every occurrence of FROM in INSN with TO. */ |
edfac33e JL |
789 | |
790 | void | |
0c20a65f | 791 | validate_replace_rtx_group (rtx from, rtx to, rtx insn) |
edfac33e | 792 | { |
e855c69d | 793 | validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true); |
edfac33e JL |
794 | } |
795 | ||
e2373f95 RK |
796 | /* Function called by note_uses to replace used subexpressions. */ |
797 | struct validate_replace_src_data | |
fb0c0a12 RK |
798 | { |
799 | rtx from; /* Old RTX */ | |
800 | rtx to; /* New RTX */ | |
801 | rtx insn; /* Insn in which substitution is occurring. */ | |
802 | }; | |
e2373f95 RK |
803 | |
804 | static void | |
0c20a65f | 805 | validate_replace_src_1 (rtx *x, void *data) |
e2373f95 RK |
806 | { |
807 | struct validate_replace_src_data *d | |
808 | = (struct validate_replace_src_data *) data; | |
809 | ||
e855c69d | 810 | validate_replace_rtx_1 (x, d->from, d->to, d->insn, true); |
e2373f95 RK |
811 | } |
812 | ||
7506f491 | 813 | /* Try replacing every occurrence of FROM in INSN with TO, avoiding |
2b773ee2 | 814 | SET_DESTs. */ |
7506f491 | 815 | |
2b773ee2 | 816 | void |
0c20a65f | 817 | validate_replace_src_group (rtx from, rtx to, rtx insn) |
7506f491 | 818 | { |
e2373f95 | 819 | struct validate_replace_src_data d; |
b71e8e84 | 820 | |
e2373f95 RK |
821 | d.from = from; |
822 | d.to = to; | |
823 | d.insn = insn; | |
824 | note_uses (&PATTERN (insn), validate_replace_src_1, &d); | |
2b773ee2 | 825 | } |
8cd37d0b RL |
826 | |
827 | /* Try simplify INSN. | |
828 | Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's | |
829 | pattern and return true if something was simplified. */ | |
830 | ||
831 | bool | |
832 | validate_simplify_insn (rtx insn) | |
833 | { | |
834 | int i; | |
835 | rtx pat = NULL; | |
836 | rtx newpat = NULL; | |
837 | ||
838 | pat = PATTERN (insn); | |
839 | ||
840 | if (GET_CODE (pat) == SET) | |
841 | { | |
842 | newpat = simplify_rtx (SET_SRC (pat)); | |
843 | if (newpat && !rtx_equal_p (SET_SRC (pat), newpat)) | |
844 | validate_change (insn, &SET_SRC (pat), newpat, 1); | |
845 | newpat = simplify_rtx (SET_DEST (pat)); | |
846 | if (newpat && !rtx_equal_p (SET_DEST (pat), newpat)) | |
847 | validate_change (insn, &SET_DEST (pat), newpat, 1); | |
848 | } | |
849 | else if (GET_CODE (pat) == PARALLEL) | |
850 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
851 | { | |
852 | rtx s = XVECEXP (pat, 0, i); | |
853 | ||
854 | if (GET_CODE (XVECEXP (pat, 0, i)) == SET) | |
855 | { | |
856 | newpat = simplify_rtx (SET_SRC (s)); | |
857 | if (newpat && !rtx_equal_p (SET_SRC (s), newpat)) | |
858 | validate_change (insn, &SET_SRC (s), newpat, 1); | |
859 | newpat = simplify_rtx (SET_DEST (s)); | |
860 | if (newpat && !rtx_equal_p (SET_DEST (s), newpat)) | |
861 | validate_change (insn, &SET_DEST (s), newpat, 1); | |
862 | } | |
863 | } | |
864 | return ((num_changes_pending () > 0) && (apply_change_group () > 0)); | |
865 | } | |
2055cea7 RK |
866 | \f |
867 | #ifdef HAVE_cc0 | |
868 | /* Return 1 if the insn using CC0 set by INSN does not contain | |
869 | any ordered tests applied to the condition codes. | |
870 | EQ and NE tests do not count. */ | |
871 | ||
872 | int | |
0c20a65f | 873 | next_insn_tests_no_inequality (rtx insn) |
2055cea7 | 874 | { |
b3694847 | 875 | rtx next = next_cc0_user (insn); |
2055cea7 RK |
876 | |
877 | /* If there is no next insn, we have to take the conservative choice. */ | |
878 | if (next == 0) | |
879 | return 0; | |
880 | ||
4b4bf941 | 881 | return (INSN_P (next) |
2055cea7 RK |
882 | && ! inequality_comparisons_p (PATTERN (next))); |
883 | } | |
2055cea7 RK |
884 | #endif |
885 | \f | |
2055cea7 RK |
886 | /* Return 1 if OP is a valid general operand for machine mode MODE. |
887 | This is either a register reference, a memory reference, | |
888 | or a constant. In the case of a memory reference, the address | |
889 | is checked for general validity for the target machine. | |
890 | ||
891 | Register and memory references must have mode MODE in order to be valid, | |
892 | but some constants have no machine mode and are valid for any mode. | |
893 | ||
894 | If MODE is VOIDmode, OP is checked for validity for whatever mode | |
895 | it has. | |
896 | ||
897 | The main use of this function is as a predicate in match_operand | |
898 | expressions in the machine description. | |
899 | ||
6dc42e49 | 900 | For an explanation of this function's behavior for registers of |
2055cea7 RK |
901 | class NO_REGS, see the comment for `register_operand'. */ |
902 | ||
903 | int | |
0c20a65f | 904 | general_operand (rtx op, enum machine_mode mode) |
2055cea7 | 905 | { |
b3694847 | 906 | enum rtx_code code = GET_CODE (op); |
2055cea7 RK |
907 | |
908 | if (mode == VOIDmode) | |
909 | mode = GET_MODE (op); | |
910 | ||
911 | /* Don't accept CONST_INT or anything similar | |
912 | if the caller wants something floating. */ | |
913 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode | |
4bb4c82e RK |
914 | && GET_MODE_CLASS (mode) != MODE_INT |
915 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) | |
2055cea7 RK |
916 | return 0; |
917 | ||
481683e1 | 918 | if (CONST_INT_P (op) |
71012d97 | 919 | && mode != VOIDmode |
c033e268 AO |
920 | && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) |
921 | return 0; | |
922 | ||
2055cea7 | 923 | if (CONSTANT_P (op)) |
8acb2f24 JH |
924 | return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode |
925 | || mode == VOIDmode) | |
2055cea7 | 926 | && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)) |
2055cea7 RK |
927 | && LEGITIMATE_CONSTANT_P (op)); |
928 | ||
929 | /* Except for certain constants with VOIDmode, already checked for, | |
930 | OP's mode must match MODE if MODE specifies a mode. */ | |
931 | ||
932 | if (GET_MODE (op) != mode) | |
933 | return 0; | |
934 | ||
935 | if (code == SUBREG) | |
936 | { | |
53501a19 BS |
937 | rtx sub = SUBREG_REG (op); |
938 | ||
2055cea7 RK |
939 | #ifdef INSN_SCHEDULING |
940 | /* On machines that have insn scheduling, we want all memory | |
fd326ba8 UW |
941 | reference to be explicit, so outlaw paradoxical SUBREGs. |
942 | However, we must allow them after reload so that they can | |
943 | get cleaned up by cleanup_subreg_operands. */ | |
944 | if (!reload_completed && MEM_P (sub) | |
53501a19 | 945 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub))) |
2055cea7 RK |
946 | return 0; |
947 | #endif | |
30cf266f JH |
948 | /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory |
949 | may result in incorrect reference. We should simplify all valid | |
e86f9f32 | 950 | subregs of MEM anyway. But allow this after reload because we |
a6a2274a | 951 | might be called from cleanup_subreg_operands. |
e86f9f32 RK |
952 | |
953 | ??? This is a kludge. */ | |
954 | if (!reload_completed && SUBREG_BYTE (op) != 0 | |
3c0cb5de | 955 | && MEM_P (sub)) |
53501a19 BS |
956 | return 0; |
957 | ||
958 | /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally | |
0c20a65f | 959 | create such rtl, and we must reject it. */ |
3d8bf70f | 960 | if (SCALAR_FLOAT_MODE_P (GET_MODE (op)) |
53501a19 | 961 | && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub))) |
a6a2274a | 962 | return 0; |
2055cea7 | 963 | |
53501a19 | 964 | op = sub; |
2055cea7 | 965 | code = GET_CODE (op); |
2055cea7 RK |
966 | } |
967 | ||
968 | if (code == REG) | |
969 | /* A register whose class is NO_REGS is not a general operand. */ | |
970 | return (REGNO (op) >= FIRST_PSEUDO_REGISTER | |
971 | || REGNO_REG_CLASS (REGNO (op)) != NO_REGS); | |
972 | ||
973 | if (code == MEM) | |
974 | { | |
b3694847 | 975 | rtx y = XEXP (op, 0); |
4eeaee5d | 976 | |
2055cea7 RK |
977 | if (! volatile_ok && MEM_VOLATILE_P (op)) |
978 | return 0; | |
4eeaee5d | 979 | |
2055cea7 | 980 | /* Use the mem's mode, since it will be reloaded thus. */ |
9b305d55 ZW |
981 | if (memory_address_p (GET_MODE (op), y)) |
982 | return 1; | |
2055cea7 | 983 | } |
38a448ca | 984 | |
2055cea7 | 985 | return 0; |
2055cea7 RK |
986 | } |
987 | \f | |
988 | /* Return 1 if OP is a valid memory address for a memory reference | |
989 | of mode MODE. | |
990 | ||
991 | The main use of this function is as a predicate in match_operand | |
992 | expressions in the machine description. */ | |
993 | ||
994 | int | |
0c20a65f | 995 | address_operand (rtx op, enum machine_mode mode) |
2055cea7 RK |
996 | { |
997 | return memory_address_p (mode, op); | |
998 | } | |
999 | ||
1000 | /* Return 1 if OP is a register reference of mode MODE. | |
1001 | If MODE is VOIDmode, accept a register in any mode. | |
1002 | ||
1003 | The main use of this function is as a predicate in match_operand | |
1004 | expressions in the machine description. | |
1005 | ||
1006 | As a special exception, registers whose class is NO_REGS are | |
1007 | not accepted by `register_operand'. The reason for this change | |
1008 | is to allow the representation of special architecture artifacts | |
1009 | (such as a condition code register) without extending the rtl | |
1010 | definitions. Since registers of class NO_REGS cannot be used | |
1011 | as registers in any case where register classes are examined, | |
1012 | it is most consistent to keep this function from accepting them. */ | |
1013 | ||
1014 | int | |
0c20a65f | 1015 | register_operand (rtx op, enum machine_mode mode) |
2055cea7 RK |
1016 | { |
1017 | if (GET_MODE (op) != mode && mode != VOIDmode) | |
1018 | return 0; | |
1019 | ||
1020 | if (GET_CODE (op) == SUBREG) | |
1021 | { | |
53501a19 BS |
1022 | rtx sub = SUBREG_REG (op); |
1023 | ||
2055cea7 RK |
1024 | /* Before reload, we can allow (SUBREG (MEM...)) as a register operand |
1025 | because it is guaranteed to be reloaded into one. | |
1026 | Just make sure the MEM is valid in itself. | |
1027 | (Ideally, (SUBREG (MEM)...) should not exist after reload, | |
1028 | but currently it does result from (SUBREG (REG)...) where the | |
1029 | reg went on the stack.) */ | |
3c0cb5de | 1030 | if (! reload_completed && MEM_P (sub)) |
2055cea7 | 1031 | return general_operand (op, mode); |
cba057ed | 1032 | |
cff9f8d5 | 1033 | #ifdef CANNOT_CHANGE_MODE_CLASS |
f8cfc6aa | 1034 | if (REG_P (sub) |
53501a19 | 1035 | && REGNO (sub) < FIRST_PSEUDO_REGISTER |
b0c42aed | 1036 | && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode) |
53501a19 BS |
1037 | && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT |
1038 | && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT) | |
cba057ed RK |
1039 | return 0; |
1040 | #endif | |
1041 | ||
53501a19 BS |
1042 | /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally |
1043 | create such rtl, and we must reject it. */ | |
3d8bf70f | 1044 | if (SCALAR_FLOAT_MODE_P (GET_MODE (op)) |
53501a19 BS |
1045 | && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub))) |
1046 | return 0; | |
1047 | ||
1048 | op = sub; | |
2055cea7 RK |
1049 | } |
1050 | ||
1051 | /* We don't consider registers whose class is NO_REGS | |
1052 | to be a register operand. */ | |
f8cfc6aa | 1053 | return (REG_P (op) |
2055cea7 RK |
1054 | && (REGNO (op) >= FIRST_PSEUDO_REGISTER |
1055 | || REGNO_REG_CLASS (REGNO (op)) != NO_REGS)); | |
1056 | } | |
1057 | ||
556ffcc5 RH |
1058 | /* Return 1 for a register in Pmode; ignore the tested mode. */ |
1059 | ||
1060 | int | |
0c20a65f | 1061 | pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED) |
556ffcc5 RH |
1062 | { |
1063 | return register_operand (op, Pmode); | |
1064 | } | |
1065 | ||
2055cea7 RK |
1066 | /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH |
1067 | or a hard register. */ | |
1068 | ||
1069 | int | |
0c20a65f | 1070 | scratch_operand (rtx op, enum machine_mode mode) |
2055cea7 | 1071 | { |
a05924f9 JH |
1072 | if (GET_MODE (op) != mode && mode != VOIDmode) |
1073 | return 0; | |
1074 | ||
1075 | return (GET_CODE (op) == SCRATCH | |
f8cfc6aa | 1076 | || (REG_P (op) |
a05924f9 | 1077 | && REGNO (op) < FIRST_PSEUDO_REGISTER)); |
2055cea7 RK |
1078 | } |
1079 | ||
1080 | /* Return 1 if OP is a valid immediate operand for mode MODE. | |
1081 | ||
1082 | The main use of this function is as a predicate in match_operand | |
1083 | expressions in the machine description. */ | |
1084 | ||
1085 | int | |
0c20a65f | 1086 | immediate_operand (rtx op, enum machine_mode mode) |
2055cea7 RK |
1087 | { |
1088 | /* Don't accept CONST_INT or anything similar | |
1089 | if the caller wants something floating. */ | |
1090 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode | |
4bb4c82e RK |
1091 | && GET_MODE_CLASS (mode) != MODE_INT |
1092 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) | |
2055cea7 RK |
1093 | return 0; |
1094 | ||
481683e1 | 1095 | if (CONST_INT_P (op) |
71012d97 | 1096 | && mode != VOIDmode |
c033e268 AO |
1097 | && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) |
1098 | return 0; | |
1099 | ||
2055cea7 RK |
1100 | return (CONSTANT_P (op) |
1101 | && (GET_MODE (op) == mode || mode == VOIDmode | |
1102 | || GET_MODE (op) == VOIDmode) | |
2055cea7 | 1103 | && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)) |
2055cea7 RK |
1104 | && LEGITIMATE_CONSTANT_P (op)); |
1105 | } | |
1106 | ||
1107 | /* Returns 1 if OP is an operand that is a CONST_INT. */ | |
1108 | ||
1109 | int | |
0c20a65f | 1110 | const_int_operand (rtx op, enum machine_mode mode) |
2055cea7 | 1111 | { |
481683e1 | 1112 | if (!CONST_INT_P (op)) |
b4fbaca7 RH |
1113 | return 0; |
1114 | ||
1115 | if (mode != VOIDmode | |
1116 | && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) | |
1117 | return 0; | |
1118 | ||
1119 | return 1; | |
2055cea7 RK |
1120 | } |
1121 | ||
1122 | /* Returns 1 if OP is an operand that is a constant integer or constant | |
1123 | floating-point number. */ | |
1124 | ||
1125 | int | |
0c20a65f | 1126 | const_double_operand (rtx op, enum machine_mode mode) |
2055cea7 RK |
1127 | { |
1128 | /* Don't accept CONST_INT or anything similar | |
1129 | if the caller wants something floating. */ | |
1130 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode | |
4bb4c82e RK |
1131 | && GET_MODE_CLASS (mode) != MODE_INT |
1132 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) | |
2055cea7 RK |
1133 | return 0; |
1134 | ||
481683e1 | 1135 | return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op)) |
2055cea7 RK |
1136 | && (mode == VOIDmode || GET_MODE (op) == mode |
1137 | || GET_MODE (op) == VOIDmode)); | |
1138 | } | |
1139 | ||
1140 | /* Return 1 if OP is a general operand that is not an immediate operand. */ | |
1141 | ||
1142 | int | |
0c20a65f | 1143 | nonimmediate_operand (rtx op, enum machine_mode mode) |
2055cea7 RK |
1144 | { |
1145 | return (general_operand (op, mode) && ! CONSTANT_P (op)); | |
1146 | } | |
1147 | ||
1148 | /* Return 1 if OP is a register reference or immediate value of mode MODE. */ | |
1149 | ||
1150 | int | |
0c20a65f | 1151 | nonmemory_operand (rtx op, enum machine_mode mode) |
2055cea7 RK |
1152 | { |
1153 | if (CONSTANT_P (op)) | |
1154 | { | |
1155 | /* Don't accept CONST_INT or anything similar | |
1156 | if the caller wants something floating. */ | |
1157 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode | |
4bb4c82e RK |
1158 | && GET_MODE_CLASS (mode) != MODE_INT |
1159 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) | |
2055cea7 RK |
1160 | return 0; |
1161 | ||
481683e1 | 1162 | if (CONST_INT_P (op) |
71012d97 | 1163 | && mode != VOIDmode |
c033e268 AO |
1164 | && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) |
1165 | return 0; | |
1166 | ||
8acb2f24 | 1167 | return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode |
ffb5e2e2 | 1168 | || mode == VOIDmode) |
2055cea7 | 1169 | && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)) |
2055cea7 RK |
1170 | && LEGITIMATE_CONSTANT_P (op)); |
1171 | } | |
1172 | ||
1173 | if (GET_MODE (op) != mode && mode != VOIDmode) | |
1174 | return 0; | |
1175 | ||
1176 | if (GET_CODE (op) == SUBREG) | |
1177 | { | |
1178 | /* Before reload, we can allow (SUBREG (MEM...)) as a register operand | |
1179 | because it is guaranteed to be reloaded into one. | |
1180 | Just make sure the MEM is valid in itself. | |
1181 | (Ideally, (SUBREG (MEM)...) should not exist after reload, | |
1182 | but currently it does result from (SUBREG (REG)...) where the | |
1183 | reg went on the stack.) */ | |
3c0cb5de | 1184 | if (! reload_completed && MEM_P (SUBREG_REG (op))) |
2055cea7 RK |
1185 | return general_operand (op, mode); |
1186 | op = SUBREG_REG (op); | |
1187 | } | |
1188 | ||
1189 | /* We don't consider registers whose class is NO_REGS | |
1190 | to be a register operand. */ | |
f8cfc6aa | 1191 | return (REG_P (op) |
2055cea7 RK |
1192 | && (REGNO (op) >= FIRST_PSEUDO_REGISTER |
1193 | || REGNO_REG_CLASS (REGNO (op)) != NO_REGS)); | |
1194 | } | |
1195 | ||
1196 | /* Return 1 if OP is a valid operand that stands for pushing a | |
1197 | value of mode MODE onto the stack. | |
1198 | ||
1199 | The main use of this function is as a predicate in match_operand | |
1200 | expressions in the machine description. */ | |
1201 | ||
1202 | int | |
0c20a65f | 1203 | push_operand (rtx op, enum machine_mode mode) |
2055cea7 | 1204 | { |
a8d19608 RK |
1205 | unsigned int rounded_size = GET_MODE_SIZE (mode); |
1206 | ||
1207 | #ifdef PUSH_ROUNDING | |
1208 | rounded_size = PUSH_ROUNDING (rounded_size); | |
1209 | #endif | |
1210 | ||
3c0cb5de | 1211 | if (!MEM_P (op)) |
2055cea7 RK |
1212 | return 0; |
1213 | ||
aeb7ff68 | 1214 | if (mode != VOIDmode && GET_MODE (op) != mode) |
2055cea7 RK |
1215 | return 0; |
1216 | ||
1217 | op = XEXP (op, 0); | |
1218 | ||
a8d19608 | 1219 | if (rounded_size == GET_MODE_SIZE (mode)) |
70a32495 JH |
1220 | { |
1221 | if (GET_CODE (op) != STACK_PUSH_CODE) | |
1222 | return 0; | |
1223 | } | |
1224 | else | |
1225 | { | |
70a32495 JH |
1226 | if (GET_CODE (op) != PRE_MODIFY |
1227 | || GET_CODE (XEXP (op, 1)) != PLUS | |
1228 | || XEXP (XEXP (op, 1), 0) != XEXP (op, 0) | |
481683e1 | 1229 | || !CONST_INT_P (XEXP (XEXP (op, 1), 1)) |
70a32495 | 1230 | #ifdef STACK_GROWS_DOWNWARD |
a8d19608 | 1231 | || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size |
70a32495 | 1232 | #else |
fc555370 | 1233 | || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size |
70a32495 JH |
1234 | #endif |
1235 | ) | |
1236 | return 0; | |
1237 | } | |
2055cea7 RK |
1238 | |
1239 | return XEXP (op, 0) == stack_pointer_rtx; | |
1240 | } | |
1241 | ||
6fbe9bd8 RH |
1242 | /* Return 1 if OP is a valid operand that stands for popping a |
1243 | value of mode MODE off the stack. | |
1244 | ||
1245 | The main use of this function is as a predicate in match_operand | |
1246 | expressions in the machine description. */ | |
1247 | ||
1248 | int | |
0c20a65f | 1249 | pop_operand (rtx op, enum machine_mode mode) |
6fbe9bd8 | 1250 | { |
3c0cb5de | 1251 | if (!MEM_P (op)) |
6fbe9bd8 RH |
1252 | return 0; |
1253 | ||
aeb7ff68 | 1254 | if (mode != VOIDmode && GET_MODE (op) != mode) |
6fbe9bd8 RH |
1255 | return 0; |
1256 | ||
1257 | op = XEXP (op, 0); | |
1258 | ||
1259 | if (GET_CODE (op) != STACK_POP_CODE) | |
1260 | return 0; | |
1261 | ||
1262 | return XEXP (op, 0) == stack_pointer_rtx; | |
1263 | } | |
1264 | ||
2055cea7 RK |
1265 | /* Return 1 if ADDR is a valid memory address for mode MODE. */ |
1266 | ||
1267 | int | |
0c20a65f | 1268 | memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr) |
2055cea7 | 1269 | { |
c6c3dba9 | 1270 | #ifdef GO_IF_LEGITIMATE_ADDRESS |
2055cea7 RK |
1271 | GO_IF_LEGITIMATE_ADDRESS (mode, addr, win); |
1272 | return 0; | |
1273 | ||
1274 | win: | |
1275 | return 1; | |
c6c3dba9 PB |
1276 | #else |
1277 | return targetm.legitimate_address_p (mode, addr, 0); | |
1278 | #endif | |
2055cea7 RK |
1279 | } |
1280 | ||
1281 | /* Return 1 if OP is a valid memory reference with mode MODE, | |
1282 | including a valid address. | |
1283 | ||
1284 | The main use of this function is as a predicate in match_operand | |
1285 | expressions in the machine description. */ | |
1286 | ||
1287 | int | |
0c20a65f | 1288 | memory_operand (rtx op, enum machine_mode mode) |
2055cea7 RK |
1289 | { |
1290 | rtx inner; | |
1291 | ||
1292 | if (! reload_completed) | |
1293 | /* Note that no SUBREG is a memory operand before end of reload pass, | |
1294 | because (SUBREG (MEM...)) forces reloading into a register. */ | |
3c0cb5de | 1295 | return MEM_P (op) && general_operand (op, mode); |
2055cea7 RK |
1296 | |
1297 | if (mode != VOIDmode && GET_MODE (op) != mode) | |
1298 | return 0; | |
1299 | ||
1300 | inner = op; | |
1301 | if (GET_CODE (inner) == SUBREG) | |
1302 | inner = SUBREG_REG (inner); | |
1303 | ||
3c0cb5de | 1304 | return (MEM_P (inner) && general_operand (op, mode)); |
2055cea7 RK |
1305 | } |
1306 | ||
1307 | /* Return 1 if OP is a valid indirect memory reference with mode MODE; | |
1308 | that is, a memory reference whose address is a general_operand. */ | |
1309 | ||
1310 | int | |
0c20a65f | 1311 | indirect_operand (rtx op, enum machine_mode mode) |
2055cea7 RK |
1312 | { |
1313 | /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */ | |
1314 | if (! reload_completed | |
3c0cb5de | 1315 | && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op))) |
2055cea7 | 1316 | { |
b3694847 | 1317 | int offset = SUBREG_BYTE (op); |
2055cea7 RK |
1318 | rtx inner = SUBREG_REG (op); |
1319 | ||
b0e0a0f9 RK |
1320 | if (mode != VOIDmode && GET_MODE (op) != mode) |
1321 | return 0; | |
1322 | ||
2055cea7 RK |
1323 | /* The only way that we can have a general_operand as the resulting |
1324 | address is if OFFSET is zero and the address already is an operand | |
1325 | or if the address is (plus Y (const_int -OFFSET)) and Y is an | |
1326 | operand. */ | |
1327 | ||
1328 | return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode)) | |
1329 | || (GET_CODE (XEXP (inner, 0)) == PLUS | |
481683e1 | 1330 | && CONST_INT_P (XEXP (XEXP (inner, 0), 1)) |
2055cea7 RK |
1331 | && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset |
1332 | && general_operand (XEXP (XEXP (inner, 0), 0), Pmode))); | |
1333 | } | |
1334 | ||
3c0cb5de | 1335 | return (MEM_P (op) |
2055cea7 RK |
1336 | && memory_operand (op, mode) |
1337 | && general_operand (XEXP (op, 0), Pmode)); | |
1338 | } | |
1339 | ||
c6963675 PB |
1340 | /* Return 1 if this is an ordered comparison operator (not including |
1341 | ORDERED and UNORDERED). */ | |
1342 | ||
1343 | int | |
1344 | ordered_comparison_operator (rtx op, enum machine_mode mode) | |
1345 | { | |
1346 | if (mode != VOIDmode && GET_MODE (op) != mode) | |
1347 | return false; | |
1348 | switch (GET_CODE (op)) | |
1349 | { | |
1350 | case EQ: | |
1351 | case NE: | |
1352 | case LT: | |
1353 | case LTU: | |
1354 | case LE: | |
1355 | case LEU: | |
1356 | case GT: | |
1357 | case GTU: | |
1358 | case GE: | |
1359 | case GEU: | |
1360 | return true; | |
1361 | default: | |
1362 | return false; | |
1363 | } | |
1364 | } | |
1365 | ||
2055cea7 RK |
1366 | /* Return 1 if this is a comparison operator. This allows the use of |
1367 | MATCH_OPERATOR to recognize all the branch insns. */ | |
1368 | ||
1369 | int | |
0c20a65f | 1370 | comparison_operator (rtx op, enum machine_mode mode) |
2055cea7 RK |
1371 | { |
1372 | return ((mode == VOIDmode || GET_MODE (op) == mode) | |
ec8e098d | 1373 | && COMPARISON_P (op)); |
2055cea7 RK |
1374 | } |
1375 | \f | |
1376 | /* If BODY is an insn body that uses ASM_OPERANDS, | |
1377 | return the number of operands (both input and output) in the insn. | |
1378 | Otherwise return -1. */ | |
1379 | ||
1380 | int | |
4f588890 | 1381 | asm_noperands (const_rtx body) |
2055cea7 | 1382 | { |
6c698a6d | 1383 | switch (GET_CODE (body)) |
2055cea7 | 1384 | { |
6c698a6d JH |
1385 | case ASM_OPERANDS: |
1386 | /* No output operands: return number of input operands. */ | |
1387 | return ASM_OPERANDS_INPUT_LENGTH (body); | |
1388 | case SET: | |
1389 | if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS) | |
1390 | /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */ | |
1391 | return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1; | |
1392 | else | |
1393 | return -1; | |
1394 | case PARALLEL: | |
1395 | if (GET_CODE (XVECEXP (body, 0, 0)) == SET | |
1396 | && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS) | |
2055cea7 | 1397 | { |
6c698a6d JH |
1398 | /* Multiple output operands, or 1 output plus some clobbers: |
1399 | body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */ | |
1400 | int i; | |
1401 | int n_sets; | |
2055cea7 | 1402 | |
6c698a6d JH |
1403 | /* Count backwards through CLOBBERs to determine number of SETs. */ |
1404 | for (i = XVECLEN (body, 0); i > 0; i--) | |
1405 | { | |
1406 | if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET) | |
1407 | break; | |
1408 | if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER) | |
1409 | return -1; | |
1410 | } | |
2055cea7 | 1411 | |
6c698a6d JH |
1412 | /* N_SETS is now number of output operands. */ |
1413 | n_sets = i; | |
1414 | ||
1415 | /* Verify that all the SETs we have | |
1416 | came from a single original asm_operands insn | |
1417 | (so that invalid combinations are blocked). */ | |
1418 | for (i = 0; i < n_sets; i++) | |
1419 | { | |
1420 | rtx elt = XVECEXP (body, 0, i); | |
1421 | if (GET_CODE (elt) != SET) | |
1422 | return -1; | |
1423 | if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS) | |
1424 | return -1; | |
1425 | /* If these ASM_OPERANDS rtx's came from different original insns | |
1426 | then they aren't allowed together. */ | |
1427 | if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt)) | |
1428 | != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0)))) | |
1429 | return -1; | |
1430 | } | |
1431 | return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0))) | |
1432 | + n_sets); | |
2055cea7 | 1433 | } |
6c698a6d JH |
1434 | else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS) |
1435 | { | |
1436 | /* 0 outputs, but some clobbers: | |
1437 | body is [(asm_operands ...) (clobber (reg ...))...]. */ | |
1438 | int i; | |
2055cea7 | 1439 | |
6c698a6d JH |
1440 | /* Make sure all the other parallel things really are clobbers. */ |
1441 | for (i = XVECLEN (body, 0) - 1; i > 0; i--) | |
1442 | if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER) | |
1443 | return -1; | |
2055cea7 | 1444 | |
6c698a6d JH |
1445 | return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0)); |
1446 | } | |
1447 | else | |
1448 | return -1; | |
1449 | default: | |
1450 | return -1; | |
2055cea7 | 1451 | } |
2055cea7 RK |
1452 | } |
1453 | ||
1454 | /* Assuming BODY is an insn body that uses ASM_OPERANDS, | |
1455 | copy its operands (both input and output) into the vector OPERANDS, | |
1456 | the locations of the operands within the insn into the vector OPERAND_LOCS, | |
1457 | and the constraints for the operands into CONSTRAINTS. | |
1458 | Write the modes of the operands into MODES. | |
1459 | Return the assembler-template. | |
1460 | ||
1461 | If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0, | |
1462 | we don't store that info. */ | |
1463 | ||
3cce094d | 1464 | const char * |
0c20a65f | 1465 | decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs, |
bff4b63d AO |
1466 | const char **constraints, enum machine_mode *modes, |
1467 | location_t *loc) | |
2055cea7 | 1468 | { |
b3694847 | 1469 | int i; |
2055cea7 | 1470 | int noperands; |
bff4b63d | 1471 | rtx asmop = 0; |
2055cea7 RK |
1472 | |
1473 | if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS) | |
1474 | { | |
bff4b63d | 1475 | asmop = SET_SRC (body); |
2055cea7 RK |
1476 | /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */ |
1477 | ||
1478 | noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1; | |
1479 | ||
1480 | for (i = 1; i < noperands; i++) | |
1481 | { | |
1482 | if (operand_locs) | |
1483 | operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1); | |
1484 | if (operands) | |
1485 | operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1); | |
1486 | if (constraints) | |
1487 | constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1); | |
1488 | if (modes) | |
1489 | modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1); | |
1490 | } | |
1491 | ||
1492 | /* The output is in the SET. | |
1493 | Its constraint is in the ASM_OPERANDS itself. */ | |
1494 | if (operands) | |
1495 | operands[0] = SET_DEST (body); | |
1496 | if (operand_locs) | |
1497 | operand_locs[0] = &SET_DEST (body); | |
1498 | if (constraints) | |
1499 | constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop); | |
1500 | if (modes) | |
1501 | modes[0] = GET_MODE (SET_DEST (body)); | |
2055cea7 RK |
1502 | } |
1503 | else if (GET_CODE (body) == ASM_OPERANDS) | |
1504 | { | |
bff4b63d | 1505 | asmop = body; |
2055cea7 RK |
1506 | /* No output operands: BODY is (asm_operands ....). */ |
1507 | ||
1508 | noperands = ASM_OPERANDS_INPUT_LENGTH (asmop); | |
1509 | ||
1510 | /* The input operands are found in the 1st element vector. */ | |
1511 | /* Constraints for inputs are in the 2nd element vector. */ | |
1512 | for (i = 0; i < noperands; i++) | |
1513 | { | |
1514 | if (operand_locs) | |
1515 | operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i); | |
1516 | if (operands) | |
1517 | operands[i] = ASM_OPERANDS_INPUT (asmop, i); | |
1518 | if (constraints) | |
1519 | constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); | |
1520 | if (modes) | |
1521 | modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i); | |
1522 | } | |
2055cea7 RK |
1523 | } |
1524 | else if (GET_CODE (body) == PARALLEL | |
f5a5ea4a GS |
1525 | && GET_CODE (XVECEXP (body, 0, 0)) == SET |
1526 | && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS) | |
2055cea7 | 1527 | { |
2055cea7 | 1528 | int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */ |
fb0a2460 | 1529 | int nin; |
2055cea7 RK |
1530 | int nout = 0; /* Does not include CLOBBERs. */ |
1531 | ||
fb0a2460 AO |
1532 | asmop = SET_SRC (XVECEXP (body, 0, 0)); |
1533 | nin = ASM_OPERANDS_INPUT_LENGTH (asmop); | |
1534 | ||
2055cea7 RK |
1535 | /* At least one output, plus some CLOBBERs. */ |
1536 | ||
1537 | /* The outputs are in the SETs. | |
1538 | Their constraints are in the ASM_OPERANDS itself. */ | |
1539 | for (i = 0; i < nparallel; i++) | |
1540 | { | |
1541 | if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER) | |
1542 | break; /* Past last SET */ | |
a6a2274a | 1543 | |
2055cea7 RK |
1544 | if (operands) |
1545 | operands[i] = SET_DEST (XVECEXP (body, 0, i)); | |
1546 | if (operand_locs) | |
1547 | operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i)); | |
1548 | if (constraints) | |
1549 | constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1); | |
1550 | if (modes) | |
1551 | modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i))); | |
1552 | nout++; | |
1553 | } | |
1554 | ||
1555 | for (i = 0; i < nin; i++) | |
1556 | { | |
1557 | if (operand_locs) | |
1558 | operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i); | |
1559 | if (operands) | |
1560 | operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i); | |
1561 | if (constraints) | |
1562 | constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); | |
1563 | if (modes) | |
1564 | modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i); | |
1565 | } | |
2055cea7 RK |
1566 | } |
1567 | else if (GET_CODE (body) == PARALLEL | |
1568 | && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS) | |
1569 | { | |
1570 | /* No outputs, but some CLOBBERs. */ | |
1571 | ||
fb0a2460 AO |
1572 | int nin; |
1573 | ||
bff4b63d | 1574 | asmop = XVECEXP (body, 0, 0); |
fb0a2460 | 1575 | nin = ASM_OPERANDS_INPUT_LENGTH (asmop); |
2055cea7 RK |
1576 | |
1577 | for (i = 0; i < nin; i++) | |
1578 | { | |
1579 | if (operand_locs) | |
1580 | operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i); | |
1581 | if (operands) | |
1582 | operands[i] = ASM_OPERANDS_INPUT (asmop, i); | |
1583 | if (constraints) | |
1584 | constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); | |
1585 | if (modes) | |
1586 | modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i); | |
1587 | } | |
1588 | ||
2055cea7 RK |
1589 | } |
1590 | ||
bff4b63d | 1591 | if (loc) |
2d593c86 | 1592 | *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop); |
bff4b63d AO |
1593 | |
1594 | return ASM_OPERANDS_TEMPLATE (asmop); | |
2055cea7 | 1595 | } |
1f06ee8d | 1596 | |
d1a6adeb | 1597 | /* Check if an asm_operand matches its constraints. |
1afbe1c4 | 1598 | Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */ |
1f06ee8d RH |
1599 | |
1600 | int | |
eca72963 | 1601 | asm_operand_ok (rtx op, const char *constraint, const char **constraints) |
1f06ee8d | 1602 | { |
1afbe1c4 RH |
1603 | int result = 0; |
1604 | ||
1f06ee8d | 1605 | /* Use constrain_operands after reload. */ |
41374e13 | 1606 | gcc_assert (!reload_completed); |
1f06ee8d RH |
1607 | |
1608 | while (*constraint) | |
1609 | { | |
97488870 R |
1610 | char c = *constraint; |
1611 | int len; | |
c2cba7a9 | 1612 | switch (c) |
1f06ee8d | 1613 | { |
97488870 R |
1614 | case ',': |
1615 | constraint++; | |
1616 | continue; | |
1f06ee8d RH |
1617 | case '=': |
1618 | case '+': | |
1619 | case '*': | |
1620 | case '%': | |
1f06ee8d RH |
1621 | case '!': |
1622 | case '#': | |
1623 | case '&': | |
97488870 | 1624 | case '?': |
1f06ee8d RH |
1625 | break; |
1626 | ||
1627 | case '0': case '1': case '2': case '3': case '4': | |
1628 | case '5': case '6': case '7': case '8': case '9': | |
eca72963 JJ |
1629 | /* If caller provided constraints pointer, look up |
1630 | the maching constraint. Otherwise, our caller should have | |
1631 | given us the proper matching constraint, but we can't | |
1632 | actually fail the check if they didn't. Indicate that | |
1633 | results are inconclusive. */ | |
1634 | if (constraints) | |
1635 | { | |
1636 | char *end; | |
1637 | unsigned long match; | |
1638 | ||
1639 | match = strtoul (constraint, &end, 10); | |
1640 | if (!result) | |
1641 | result = asm_operand_ok (op, constraints[match], NULL); | |
1642 | constraint = (const char *) end; | |
1643 | } | |
1644 | else | |
1645 | { | |
1646 | do | |
1647 | constraint++; | |
1648 | while (ISDIGIT (*constraint)); | |
1649 | if (! result) | |
1650 | result = -1; | |
1651 | } | |
97488870 | 1652 | continue; |
1f06ee8d RH |
1653 | |
1654 | case 'p': | |
1655 | if (address_operand (op, VOIDmode)) | |
97488870 | 1656 | result = 1; |
1f06ee8d RH |
1657 | break; |
1658 | ||
a4edaf83 | 1659 | case TARGET_MEM_CONSTRAINT: |
1f06ee8d RH |
1660 | case 'V': /* non-offsettable */ |
1661 | if (memory_operand (op, VOIDmode)) | |
97488870 | 1662 | result = 1; |
1f06ee8d RH |
1663 | break; |
1664 | ||
1665 | case 'o': /* offsettable */ | |
1666 | if (offsettable_nonstrict_memref_p (op)) | |
97488870 | 1667 | result = 1; |
1f06ee8d RH |
1668 | break; |
1669 | ||
1670 | case '<': | |
6fb5fa3c | 1671 | /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist, |
1afbe1c4 RH |
1672 | excepting those that expand_call created. Further, on some |
1673 | machines which do not have generalized auto inc/dec, an inc/dec | |
1674 | is not a memory_operand. | |
1675 | ||
1676 | Match any memory and hope things are resolved after reload. */ | |
1677 | ||
3c0cb5de | 1678 | if (MEM_P (op) |
1afbe1c4 RH |
1679 | && (1 |
1680 | || GET_CODE (XEXP (op, 0)) == PRE_DEC | |
a6a2274a | 1681 | || GET_CODE (XEXP (op, 0)) == POST_DEC)) |
97488870 | 1682 | result = 1; |
1f06ee8d RH |
1683 | break; |
1684 | ||
1685 | case '>': | |
3c0cb5de | 1686 | if (MEM_P (op) |
1afbe1c4 RH |
1687 | && (1 |
1688 | || GET_CODE (XEXP (op, 0)) == PRE_INC | |
a6a2274a | 1689 | || GET_CODE (XEXP (op, 0)) == POST_INC)) |
97488870 | 1690 | result = 1; |
1f06ee8d RH |
1691 | break; |
1692 | ||
1693 | case 'E': | |
1f06ee8d | 1694 | case 'F': |
bf7cd754 R |
1695 | if (GET_CODE (op) == CONST_DOUBLE |
1696 | || (GET_CODE (op) == CONST_VECTOR | |
1697 | && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)) | |
97488870 | 1698 | result = 1; |
1f06ee8d RH |
1699 | break; |
1700 | ||
1701 | case 'G': | |
1702 | if (GET_CODE (op) == CONST_DOUBLE | |
97488870 R |
1703 | && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint)) |
1704 | result = 1; | |
1f06ee8d RH |
1705 | break; |
1706 | case 'H': | |
1707 | if (GET_CODE (op) == CONST_DOUBLE | |
97488870 R |
1708 | && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint)) |
1709 | result = 1; | |
1f06ee8d RH |
1710 | break; |
1711 | ||
1712 | case 's': | |
481683e1 | 1713 | if (CONST_INT_P (op) |
1f06ee8d RH |
1714 | || (GET_CODE (op) == CONST_DOUBLE |
1715 | && GET_MODE (op) == VOIDmode)) | |
1716 | break; | |
5d3cc252 | 1717 | /* Fall through. */ |
1f06ee8d RH |
1718 | |
1719 | case 'i': | |
2e4e72b1 | 1720 | if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))) |
97488870 | 1721 | result = 1; |
1f06ee8d RH |
1722 | break; |
1723 | ||
1724 | case 'n': | |
481683e1 | 1725 | if (CONST_INT_P (op) |
1f06ee8d RH |
1726 | || (GET_CODE (op) == CONST_DOUBLE |
1727 | && GET_MODE (op) == VOIDmode)) | |
97488870 | 1728 | result = 1; |
1f06ee8d RH |
1729 | break; |
1730 | ||
1731 | case 'I': | |
481683e1 | 1732 | if (CONST_INT_P (op) |
97488870 R |
1733 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint)) |
1734 | result = 1; | |
1f06ee8d RH |
1735 | break; |
1736 | case 'J': | |
481683e1 | 1737 | if (CONST_INT_P (op) |
97488870 R |
1738 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint)) |
1739 | result = 1; | |
1f06ee8d RH |
1740 | break; |
1741 | case 'K': | |
481683e1 | 1742 | if (CONST_INT_P (op) |
97488870 R |
1743 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint)) |
1744 | result = 1; | |
1f06ee8d RH |
1745 | break; |
1746 | case 'L': | |
481683e1 | 1747 | if (CONST_INT_P (op) |
97488870 R |
1748 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint)) |
1749 | result = 1; | |
1f06ee8d RH |
1750 | break; |
1751 | case 'M': | |
481683e1 | 1752 | if (CONST_INT_P (op) |
97488870 R |
1753 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint)) |
1754 | result = 1; | |
1f06ee8d RH |
1755 | break; |
1756 | case 'N': | |
481683e1 | 1757 | if (CONST_INT_P (op) |
97488870 R |
1758 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint)) |
1759 | result = 1; | |
1f06ee8d RH |
1760 | break; |
1761 | case 'O': | |
481683e1 | 1762 | if (CONST_INT_P (op) |
97488870 R |
1763 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint)) |
1764 | result = 1; | |
1f06ee8d RH |
1765 | break; |
1766 | case 'P': | |
481683e1 | 1767 | if (CONST_INT_P (op) |
97488870 R |
1768 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint)) |
1769 | result = 1; | |
1f06ee8d RH |
1770 | break; |
1771 | ||
1772 | case 'X': | |
97488870 | 1773 | result = 1; |
3b6c3bb0 | 1774 | break; |
1f06ee8d RH |
1775 | |
1776 | case 'g': | |
1777 | if (general_operand (op, VOIDmode)) | |
97488870 | 1778 | result = 1; |
1f06ee8d RH |
1779 | break; |
1780 | ||
c2cba7a9 RH |
1781 | default: |
1782 | /* For all other letters, we first check for a register class, | |
1783 | otherwise it is an EXTRA_CONSTRAINT. */ | |
97488870 | 1784 | if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS) |
c2cba7a9 RH |
1785 | { |
1786 | case 'r': | |
1787 | if (GET_MODE (op) == BLKmode) | |
1788 | break; | |
1789 | if (register_operand (op, VOIDmode)) | |
97488870 | 1790 | result = 1; |
c2cba7a9 | 1791 | } |
97488870 | 1792 | #ifdef EXTRA_CONSTRAINT_STR |
c499b300 AK |
1793 | else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)) |
1794 | /* Every memory operand can be reloaded to fit. */ | |
1795 | result = result || memory_operand (op, VOIDmode); | |
1796 | else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)) | |
1797 | /* Every address operand can be reloaded to fit. */ | |
1798 | result = result || address_operand (op, VOIDmode); | |
3b6c3bb0 JW |
1799 | else if (EXTRA_CONSTRAINT_STR (op, c, constraint)) |
1800 | result = 1; | |
1f06ee8d | 1801 | #endif |
1f06ee8d RH |
1802 | break; |
1803 | } | |
97488870 R |
1804 | len = CONSTRAINT_LEN (c, constraint); |
1805 | do | |
1806 | constraint++; | |
1807 | while (--len && *constraint); | |
1808 | if (len) | |
1809 | return 0; | |
1f06ee8d RH |
1810 | } |
1811 | ||
1afbe1c4 | 1812 | return result; |
1f06ee8d | 1813 | } |
2055cea7 | 1814 | \f |
2055cea7 RK |
1815 | /* Given an rtx *P, if it is a sum containing an integer constant term, |
1816 | return the location (type rtx *) of the pointer to that constant term. | |
1817 | Otherwise, return a null pointer. */ | |
1818 | ||
b72f00af | 1819 | rtx * |
0c20a65f | 1820 | find_constant_term_loc (rtx *p) |
2055cea7 | 1821 | { |
b3694847 SS |
1822 | rtx *tem; |
1823 | enum rtx_code code = GET_CODE (*p); | |
2055cea7 RK |
1824 | |
1825 | /* If *P IS such a constant term, P is its location. */ | |
1826 | ||
1827 | if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF | |
1828 | || code == CONST) | |
1829 | return p; | |
1830 | ||
1831 | /* Otherwise, if not a sum, it has no constant term. */ | |
1832 | ||
1833 | if (GET_CODE (*p) != PLUS) | |
1834 | return 0; | |
1835 | ||
1836 | /* If one of the summands is constant, return its location. */ | |
1837 | ||
1838 | if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0)) | |
1839 | && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1))) | |
1840 | return p; | |
1841 | ||
1842 | /* Otherwise, check each summand for containing a constant term. */ | |
1843 | ||
1844 | if (XEXP (*p, 0) != 0) | |
1845 | { | |
1846 | tem = find_constant_term_loc (&XEXP (*p, 0)); | |
1847 | if (tem != 0) | |
1848 | return tem; | |
1849 | } | |
1850 | ||
1851 | if (XEXP (*p, 1) != 0) | |
1852 | { | |
1853 | tem = find_constant_term_loc (&XEXP (*p, 1)); | |
1854 | if (tem != 0) | |
1855 | return tem; | |
1856 | } | |
1857 | ||
1858 | return 0; | |
1859 | } | |
1860 | \f | |
1861 | /* Return 1 if OP is a memory reference | |
1862 | whose address contains no side effects | |
1863 | and remains valid after the addition | |
1864 | of a positive integer less than the | |
1865 | size of the object being referenced. | |
1866 | ||
1867 | We assume that the original address is valid and do not check it. | |
1868 | ||
1869 | This uses strict_memory_address_p as a subroutine, so | |
1870 | don't use it before reload. */ | |
1871 | ||
1872 | int | |
0c20a65f | 1873 | offsettable_memref_p (rtx op) |
2055cea7 | 1874 | { |
3c0cb5de | 1875 | return ((MEM_P (op)) |
2055cea7 RK |
1876 | && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0))); |
1877 | } | |
1878 | ||
1879 | /* Similar, but don't require a strictly valid mem ref: | |
1880 | consider pseudo-regs valid as index or base regs. */ | |
1881 | ||
1882 | int | |
0c20a65f | 1883 | offsettable_nonstrict_memref_p (rtx op) |
2055cea7 | 1884 | { |
3c0cb5de | 1885 | return ((MEM_P (op)) |
2055cea7 RK |
1886 | && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0))); |
1887 | } | |
1888 | ||
1889 | /* Return 1 if Y is a memory address which contains no side effects | |
1890 | and would remain valid after the addition of a positive integer | |
1891 | less than the size of that mode. | |
1892 | ||
1893 | We assume that the original address is valid and do not check it. | |
1894 | We do check that it is valid for narrower modes. | |
1895 | ||
1896 | If STRICTP is nonzero, we require a strictly valid address, | |
1897 | for the sake of use in reload.c. */ | |
1898 | ||
1899 | int | |
0c20a65f | 1900 | offsettable_address_p (int strictp, enum machine_mode mode, rtx y) |
2055cea7 | 1901 | { |
b3694847 SS |
1902 | enum rtx_code ycode = GET_CODE (y); |
1903 | rtx z; | |
2055cea7 RK |
1904 | rtx y1 = y; |
1905 | rtx *y2; | |
0c20a65f | 1906 | int (*addressp) (enum machine_mode, rtx) = |
341a243e | 1907 | (strictp ? strict_memory_address_p : memory_address_p); |
7bdebc3a | 1908 | unsigned int mode_sz = GET_MODE_SIZE (mode); |
2055cea7 RK |
1909 | |
1910 | if (CONSTANT_ADDRESS_P (y)) | |
1911 | return 1; | |
1912 | ||
1913 | /* Adjusting an offsettable address involves changing to a narrower mode. | |
1914 | Make sure that's OK. */ | |
1915 | ||
1916 | if (mode_dependent_address_p (y)) | |
1917 | return 0; | |
1918 | ||
7bdebc3a RH |
1919 | /* ??? How much offset does an offsettable BLKmode reference need? |
1920 | Clearly that depends on the situation in which it's being used. | |
1921 | However, the current situation in which we test 0xffffffff is | |
1922 | less than ideal. Caveat user. */ | |
1923 | if (mode_sz == 0) | |
1924 | mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
1925 | ||
2055cea7 RK |
1926 | /* If the expression contains a constant term, |
1927 | see if it remains valid when max possible offset is added. */ | |
1928 | ||
1929 | if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1))) | |
1930 | { | |
1931 | int good; | |
1932 | ||
1933 | y1 = *y2; | |
7bdebc3a | 1934 | *y2 = plus_constant (*y2, mode_sz - 1); |
2055cea7 RK |
1935 | /* Use QImode because an odd displacement may be automatically invalid |
1936 | for any wider mode. But it should be valid for a single byte. */ | |
1937 | good = (*addressp) (QImode, y); | |
1938 | ||
1939 | /* In any case, restore old contents of memory. */ | |
1940 | *y2 = y1; | |
1941 | return good; | |
1942 | } | |
1943 | ||
ec8e098d | 1944 | if (GET_RTX_CLASS (ycode) == RTX_AUTOINC) |
2055cea7 RK |
1945 | return 0; |
1946 | ||
1947 | /* The offset added here is chosen as the maximum offset that | |
1948 | any instruction could need to add when operating on something | |
1949 | of the specified mode. We assume that if Y and Y+c are | |
07217645 RK |
1950 | valid addresses then so is Y+d for all 0<d<c. adjust_address will |
1951 | go inside a LO_SUM here, so we do so as well. */ | |
2f15e255 RH |
1952 | if (GET_CODE (y) == LO_SUM |
1953 | && mode != BLKmode | |
1954 | && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT) | |
07217645 RK |
1955 | z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0), |
1956 | plus_constant (XEXP (y, 1), mode_sz - 1)); | |
1957 | else | |
1958 | z = plus_constant (y, mode_sz - 1); | |
2055cea7 RK |
1959 | |
1960 | /* Use QImode because an odd displacement may be automatically invalid | |
1961 | for any wider mode. But it should be valid for a single byte. */ | |
1962 | return (*addressp) (QImode, z); | |
1963 | } | |
1964 | ||
1965 | /* Return 1 if ADDR is an address-expression whose effect depends | |
1966 | on the mode of the memory reference it is used in. | |
1967 | ||
1968 | Autoincrement addressing is a typical example of mode-dependence | |
1969 | because the amount of the increment depends on the mode. */ | |
1970 | ||
1971 | int | |
b9a76028 | 1972 | mode_dependent_address_p (rtx addr) |
2055cea7 | 1973 | { |
b9a76028 MS |
1974 | /* Auto-increment addressing with anything other than post_modify |
1975 | or pre_modify always introduces a mode dependency. Catch such | |
1976 | cases now instead of deferring to the target. */ | |
1977 | if (GET_CODE (addr) == PRE_INC | |
1978 | || GET_CODE (addr) == POST_INC | |
1979 | || GET_CODE (addr) == PRE_DEC | |
1980 | || GET_CODE (addr) == POST_DEC) | |
1981 | return 1; | |
1982 | ||
2055cea7 RK |
1983 | GO_IF_MODE_DEPENDENT_ADDRESS (addr, win); |
1984 | return 0; | |
dc297297 | 1985 | /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */ |
47c3ed98 | 1986 | win: ATTRIBUTE_UNUSED_LABEL |
2055cea7 RK |
1987 | return 1; |
1988 | } | |
2055cea7 | 1989 | \f |
d90ffc8d JH |
1990 | /* Like extract_insn, but save insn extracted and don't extract again, when |
1991 | called again for the same insn expecting that recog_data still contain the | |
1992 | valid information. This is used primary by gen_attr infrastructure that | |
1993 | often does extract insn again and again. */ | |
1994 | void | |
0c20a65f | 1995 | extract_insn_cached (rtx insn) |
d90ffc8d JH |
1996 | { |
1997 | if (recog_data.insn == insn && INSN_CODE (insn) >= 0) | |
1998 | return; | |
1999 | extract_insn (insn); | |
2000 | recog_data.insn = insn; | |
2001 | } | |
9099f8e1 | 2002 | |
3b6c3bb0 | 2003 | /* Do cached extract_insn, constrain_operands and complain about failures. |
d90ffc8d JH |
2004 | Used by insn_attrtab. */ |
2005 | void | |
0c20a65f | 2006 | extract_constrain_insn_cached (rtx insn) |
d90ffc8d JH |
2007 | { |
2008 | extract_insn_cached (insn); | |
2009 | if (which_alternative == -1 | |
2010 | && !constrain_operands (reload_completed)) | |
2011 | fatal_insn_not_found (insn); | |
2012 | } | |
9099f8e1 | 2013 | |
3b6c3bb0 | 2014 | /* Do cached constrain_operands and complain about failures. */ |
6c698a6d | 2015 | int |
0c20a65f | 2016 | constrain_operands_cached (int strict) |
6c698a6d JH |
2017 | { |
2018 | if (which_alternative == -1) | |
2019 | return constrain_operands (strict); | |
2020 | else | |
2021 | return 1; | |
2022 | } | |
d90ffc8d | 2023 | \f |
1ccbefce RH |
2024 | /* Analyze INSN and fill in recog_data. */ |
2025 | ||
0a578fee | 2026 | void |
0c20a65f | 2027 | extract_insn (rtx insn) |
0a578fee BS |
2028 | { |
2029 | int i; | |
2030 | int icode; | |
2031 | int noperands; | |
2032 | rtx body = PATTERN (insn); | |
2033 | ||
1ccbefce RH |
2034 | recog_data.n_operands = 0; |
2035 | recog_data.n_alternatives = 0; | |
2036 | recog_data.n_dups = 0; | |
0a578fee BS |
2037 | |
2038 | switch (GET_CODE (body)) | |
2039 | { | |
2040 | case USE: | |
2041 | case CLOBBER: | |
2042 | case ASM_INPUT: | |
2043 | case ADDR_VEC: | |
2044 | case ADDR_DIFF_VEC: | |
b5b8b0ac | 2045 | case VAR_LOCATION: |
0a578fee BS |
2046 | return; |
2047 | ||
2048 | case SET: | |
6c698a6d JH |
2049 | if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS) |
2050 | goto asm_insn; | |
2051 | else | |
2052 | goto normal_insn; | |
0a578fee | 2053 | case PARALLEL: |
6c698a6d JH |
2054 | if ((GET_CODE (XVECEXP (body, 0, 0)) == SET |
2055 | && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS) | |
2056 | || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS) | |
2057 | goto asm_insn; | |
2058 | else | |
2059 | goto normal_insn; | |
0a578fee | 2060 | case ASM_OPERANDS: |
6c698a6d | 2061 | asm_insn: |
1ccbefce | 2062 | recog_data.n_operands = noperands = asm_noperands (body); |
0a578fee BS |
2063 | if (noperands >= 0) |
2064 | { | |
0a578fee BS |
2065 | /* This insn is an `asm' with operands. */ |
2066 | ||
2067 | /* expand_asm_operands makes sure there aren't too many operands. */ | |
41374e13 | 2068 | gcc_assert (noperands <= MAX_RECOG_OPERANDS); |
0a578fee BS |
2069 | |
2070 | /* Now get the operand values and constraints out of the insn. */ | |
1ccbefce RH |
2071 | decode_asm_operands (body, recog_data.operand, |
2072 | recog_data.operand_loc, | |
2073 | recog_data.constraints, | |
bff4b63d | 2074 | recog_data.operand_mode, NULL); |
0a578fee BS |
2075 | if (noperands > 0) |
2076 | { | |
1ccbefce RH |
2077 | const char *p = recog_data.constraints[0]; |
2078 | recog_data.n_alternatives = 1; | |
0a578fee | 2079 | while (*p) |
1ccbefce | 2080 | recog_data.n_alternatives += (*p++ == ','); |
0a578fee | 2081 | } |
0a578fee BS |
2082 | break; |
2083 | } | |
6c698a6d | 2084 | fatal_insn_not_found (insn); |
0a578fee BS |
2085 | |
2086 | default: | |
6c698a6d | 2087 | normal_insn: |
0a578fee BS |
2088 | /* Ordinary insn: recognize it, get the operands via insn_extract |
2089 | and get the constraints. */ | |
2090 | ||
2091 | icode = recog_memoized (insn); | |
2092 | if (icode < 0) | |
2093 | fatal_insn_not_found (insn); | |
2094 | ||
a995e389 RH |
2095 | recog_data.n_operands = noperands = insn_data[icode].n_operands; |
2096 | recog_data.n_alternatives = insn_data[icode].n_alternatives; | |
2097 | recog_data.n_dups = insn_data[icode].n_dups; | |
0a578fee BS |
2098 | |
2099 | insn_extract (insn); | |
2100 | ||
2101 | for (i = 0; i < noperands; i++) | |
2102 | { | |
a995e389 | 2103 | recog_data.constraints[i] = insn_data[icode].operand[i].constraint; |
a995e389 | 2104 | recog_data.operand_mode[i] = insn_data[icode].operand[i].mode; |
e7adb6fb JH |
2105 | /* VOIDmode match_operands gets mode from their real operand. */ |
2106 | if (recog_data.operand_mode[i] == VOIDmode) | |
2107 | recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]); | |
0a578fee BS |
2108 | } |
2109 | } | |
0eadeb15 | 2110 | for (i = 0; i < noperands; i++) |
1ccbefce RH |
2111 | recog_data.operand_type[i] |
2112 | = (recog_data.constraints[i][0] == '=' ? OP_OUT | |
2113 | : recog_data.constraints[i][0] == '+' ? OP_INOUT | |
2114 | : OP_IN); | |
f62a15e3 | 2115 | |
41374e13 | 2116 | gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES); |
7ac28727 AK |
2117 | |
2118 | if (INSN_CODE (insn) < 0) | |
2119 | for (i = 0; i < recog_data.n_alternatives; i++) | |
2120 | recog_data.alternative_enabled_p[i] = true; | |
2121 | else | |
2122 | { | |
2123 | recog_data.insn = insn; | |
2124 | for (i = 0; i < recog_data.n_alternatives; i++) | |
2125 | { | |
2126 | which_alternative = i; | |
2127 | recog_data.alternative_enabled_p[i] = get_attr_enabled (insn); | |
2128 | } | |
2129 | } | |
2130 | ||
2131 | recog_data.insn = NULL; | |
2132 | which_alternative = -1; | |
0a578fee BS |
2133 | } |
2134 | ||
f62a15e3 BS |
2135 | /* After calling extract_insn, you can use this function to extract some |
2136 | information from the constraint strings into a more usable form. | |
2137 | The collected data is stored in recog_op_alt. */ | |
2138 | void | |
0c20a65f | 2139 | preprocess_constraints (void) |
f62a15e3 BS |
2140 | { |
2141 | int i; | |
2142 | ||
8c2a5582 RE |
2143 | for (i = 0; i < recog_data.n_operands; i++) |
2144 | memset (recog_op_alt[i], 0, (recog_data.n_alternatives | |
2145 | * sizeof (struct operand_alternative))); | |
2146 | ||
1ccbefce | 2147 | for (i = 0; i < recog_data.n_operands; i++) |
f62a15e3 BS |
2148 | { |
2149 | int j; | |
2150 | struct operand_alternative *op_alt; | |
1ccbefce | 2151 | const char *p = recog_data.constraints[i]; |
f62a15e3 BS |
2152 | |
2153 | op_alt = recog_op_alt[i]; | |
2154 | ||
1ccbefce | 2155 | for (j = 0; j < recog_data.n_alternatives; j++) |
f62a15e3 | 2156 | { |
e3a64162 | 2157 | op_alt[j].cl = NO_REGS; |
f62a15e3 BS |
2158 | op_alt[j].constraint = p; |
2159 | op_alt[j].matches = -1; | |
2160 | op_alt[j].matched = -1; | |
2161 | ||
7ac28727 AK |
2162 | if (!recog_data.alternative_enabled_p[j]) |
2163 | { | |
2164 | p = skip_alternative (p); | |
2165 | continue; | |
2166 | } | |
2167 | ||
f62a15e3 BS |
2168 | if (*p == '\0' || *p == ',') |
2169 | { | |
2170 | op_alt[j].anything_ok = 1; | |
2171 | continue; | |
2172 | } | |
2173 | ||
2174 | for (;;) | |
2175 | { | |
97488870 | 2176 | char c = *p; |
f62a15e3 BS |
2177 | if (c == '#') |
2178 | do | |
97488870 | 2179 | c = *++p; |
f62a15e3 BS |
2180 | while (c != ',' && c != '\0'); |
2181 | if (c == ',' || c == '\0') | |
97488870 R |
2182 | { |
2183 | p++; | |
2184 | break; | |
2185 | } | |
f62a15e3 BS |
2186 | |
2187 | switch (c) | |
2188 | { | |
2189 | case '=': case '+': case '*': case '%': | |
2190 | case 'E': case 'F': case 'G': case 'H': | |
2191 | case 's': case 'i': case 'n': | |
2192 | case 'I': case 'J': case 'K': case 'L': | |
2193 | case 'M': case 'N': case 'O': case 'P': | |
f62a15e3 BS |
2194 | /* These don't say anything we care about. */ |
2195 | break; | |
2196 | ||
2197 | case '?': | |
2198 | op_alt[j].reject += 6; | |
2199 | break; | |
2200 | case '!': | |
2201 | op_alt[j].reject += 600; | |
2202 | break; | |
2203 | case '&': | |
2204 | op_alt[j].earlyclobber = 1; | |
a6a2274a | 2205 | break; |
f62a15e3 BS |
2206 | |
2207 | case '0': case '1': case '2': case '3': case '4': | |
2208 | case '5': case '6': case '7': case '8': case '9': | |
84b72302 RH |
2209 | { |
2210 | char *end; | |
97488870 | 2211 | op_alt[j].matches = strtoul (p, &end, 10); |
84b72302 RH |
2212 | recog_op_alt[op_alt[j].matches][j].matched = i; |
2213 | p = end; | |
2214 | } | |
97488870 | 2215 | continue; |
f62a15e3 | 2216 | |
a4edaf83 | 2217 | case TARGET_MEM_CONSTRAINT: |
f62a15e3 BS |
2218 | op_alt[j].memory_ok = 1; |
2219 | break; | |
2220 | case '<': | |
2221 | op_alt[j].decmem_ok = 1; | |
2222 | break; | |
2223 | case '>': | |
2224 | op_alt[j].incmem_ok = 1; | |
2225 | break; | |
2226 | case 'V': | |
2227 | op_alt[j].nonoffmem_ok = 1; | |
2228 | break; | |
2229 | case 'o': | |
2230 | op_alt[j].offmem_ok = 1; | |
2231 | break; | |
2232 | case 'X': | |
2233 | op_alt[j].anything_ok = 1; | |
2234 | break; | |
2235 | ||
2236 | case 'p': | |
541f7d56 | 2237 | op_alt[j].is_address = 1; |
e3a64162 | 2238 | op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl] |
c4963a0a | 2239 | [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)]; |
f62a15e3 BS |
2240 | break; |
2241 | ||
e3a64162 BI |
2242 | case 'g': |
2243 | case 'r': | |
2244 | op_alt[j].cl = | |
2245 | reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS]; | |
f62a15e3 BS |
2246 | break; |
2247 | ||
2248 | default: | |
97488870 | 2249 | if (EXTRA_MEMORY_CONSTRAINT (c, p)) |
ccfc6cc8 UW |
2250 | { |
2251 | op_alt[j].memory_ok = 1; | |
2252 | break; | |
2253 | } | |
97488870 | 2254 | if (EXTRA_ADDRESS_CONSTRAINT (c, p)) |
ccfc6cc8 UW |
2255 | { |
2256 | op_alt[j].is_address = 1; | |
e3a64162 | 2257 | op_alt[j].cl |
97488870 | 2258 | = (reg_class_subunion |
e3a64162 | 2259 | [(int) op_alt[j].cl] |
c4963a0a BS |
2260 | [(int) base_reg_class (VOIDmode, ADDRESS, |
2261 | SCRATCH)]); | |
ccfc6cc8 UW |
2262 | break; |
2263 | } | |
2264 | ||
e3a64162 | 2265 | op_alt[j].cl |
97488870 | 2266 | = (reg_class_subunion |
e3a64162 | 2267 | [(int) op_alt[j].cl] |
97488870 | 2268 | [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]); |
f62a15e3 BS |
2269 | break; |
2270 | } | |
97488870 | 2271 | p += CONSTRAINT_LEN (c, p); |
f62a15e3 BS |
2272 | } |
2273 | } | |
2274 | } | |
2275 | } | |
a6a2274a | 2276 | |
0eadeb15 | 2277 | /* Check the operands of an insn against the insn's operand constraints |
2055cea7 | 2278 | and return 1 if they are valid. |
0eadeb15 BS |
2279 | The information about the insn's operands, constraints, operand modes |
2280 | etc. is obtained from the global variables set up by extract_insn. | |
2055cea7 RK |
2281 | |
2282 | WHICH_ALTERNATIVE is set to a number which indicates which | |
2283 | alternative of constraints was matched: 0 for the first alternative, | |
2284 | 1 for the next, etc. | |
2285 | ||
97488870 | 2286 | In addition, when two operands are required to match |
2055cea7 RK |
2287 | and it happens that the output operand is (reg) while the |
2288 | input operand is --(reg) or ++(reg) (a pre-inc or pre-dec), | |
2289 | make the output operand look like the input. | |
2290 | This is because the output operand is the one the template will print. | |
2291 | ||
2292 | This is used in final, just before printing the assembler code and by | |
2293 | the routines that determine an insn's attribute. | |
2294 | ||
40f03658 | 2295 | If STRICT is a positive nonzero value, it means that we have been |
2055cea7 RK |
2296 | called after reload has been completed. In that case, we must |
2297 | do all checks strictly. If it is zero, it means that we have been called | |
2298 | before reload has completed. In that case, we first try to see if we can | |
2299 | find an alternative that matches strictly. If not, we try again, this | |
2300 | time assuming that reload will fix up the insn. This provides a "best | |
2301 | guess" for the alternative and is used to compute attributes of insns prior | |
2302 | to reload. A negative value of STRICT is used for this internal call. */ | |
2303 | ||
2304 | struct funny_match | |
2305 | { | |
55d796da | 2306 | int this_op, other; |
2055cea7 RK |
2307 | }; |
2308 | ||
2309 | int | |
0c20a65f | 2310 | constrain_operands (int strict) |
2055cea7 | 2311 | { |
9b3142b3 | 2312 | const char *constraints[MAX_RECOG_OPERANDS]; |
9e21be9d | 2313 | int matching_operands[MAX_RECOG_OPERANDS]; |
9e21be9d | 2314 | int earlyclobber[MAX_RECOG_OPERANDS]; |
b3694847 | 2315 | int c; |
2055cea7 RK |
2316 | |
2317 | struct funny_match funny_match[MAX_RECOG_OPERANDS]; | |
2318 | int funny_match_index; | |
2055cea7 | 2319 | |
4667f705 | 2320 | which_alternative = 0; |
1ccbefce | 2321 | if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0) |
2055cea7 RK |
2322 | return 1; |
2323 | ||
1ccbefce | 2324 | for (c = 0; c < recog_data.n_operands; c++) |
9e21be9d | 2325 | { |
1ccbefce | 2326 | constraints[c] = recog_data.constraints[c]; |
9e21be9d | 2327 | matching_operands[c] = -1; |
9e21be9d | 2328 | } |
2055cea7 | 2329 | |
4667f705 | 2330 | do |
2055cea7 | 2331 | { |
9ea88834 | 2332 | int seen_earlyclobber_at = -1; |
b3694847 | 2333 | int opno; |
2055cea7 RK |
2334 | int lose = 0; |
2335 | funny_match_index = 0; | |
2336 | ||
7ac28727 AK |
2337 | if (!recog_data.alternative_enabled_p[which_alternative]) |
2338 | { | |
2339 | int i; | |
2340 | ||
2341 | for (i = 0; i < recog_data.n_operands; i++) | |
2342 | constraints[i] = skip_alternative (constraints[i]); | |
2343 | ||
2344 | which_alternative++; | |
2345 | continue; | |
2346 | } | |
2347 | ||
1ccbefce | 2348 | for (opno = 0; opno < recog_data.n_operands; opno++) |
2055cea7 | 2349 | { |
b3694847 | 2350 | rtx op = recog_data.operand[opno]; |
2055cea7 | 2351 | enum machine_mode mode = GET_MODE (op); |
b3694847 | 2352 | const char *p = constraints[opno]; |
2055cea7 RK |
2353 | int offset = 0; |
2354 | int win = 0; | |
2355 | int val; | |
97488870 | 2356 | int len; |
2055cea7 | 2357 | |
9e21be9d RK |
2358 | earlyclobber[opno] = 0; |
2359 | ||
b85f21c0 | 2360 | /* A unary operator may be accepted by the predicate, but it |
38a448ca | 2361 | is irrelevant for matching constraints. */ |
ec8e098d | 2362 | if (UNARY_P (op)) |
b85f21c0 ILT |
2363 | op = XEXP (op, 0); |
2364 | ||
2055cea7 RK |
2365 | if (GET_CODE (op) == SUBREG) |
2366 | { | |
f8cfc6aa | 2367 | if (REG_P (SUBREG_REG (op)) |
2055cea7 | 2368 | && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER) |
ddef6bc7 JJ |
2369 | offset = subreg_regno_offset (REGNO (SUBREG_REG (op)), |
2370 | GET_MODE (SUBREG_REG (op)), | |
2371 | SUBREG_BYTE (op), | |
2372 | GET_MODE (op)); | |
2055cea7 RK |
2373 | op = SUBREG_REG (op); |
2374 | } | |
2375 | ||
2376 | /* An empty constraint or empty alternative | |
2377 | allows anything which matched the pattern. */ | |
2378 | if (*p == 0 || *p == ',') | |
2379 | win = 1; | |
2380 | ||
97488870 R |
2381 | do |
2382 | switch (c = *p, len = CONSTRAINT_LEN (c, p), c) | |
2055cea7 | 2383 | { |
97488870 R |
2384 | case '\0': |
2385 | len = 0; | |
2386 | break; | |
2387 | case ',': | |
2388 | c = '\0'; | |
2389 | break; | |
2390 | ||
c5c76735 JL |
2391 | case '?': case '!': case '*': case '%': |
2392 | case '=': case '+': | |
2055cea7 RK |
2393 | break; |
2394 | ||
4d3067db RK |
2395 | case '#': |
2396 | /* Ignore rest of this alternative as far as | |
2397 | constraint checking is concerned. */ | |
97488870 | 2398 | do |
4d3067db | 2399 | p++; |
97488870 R |
2400 | while (*p && *p != ','); |
2401 | len = 0; | |
4d3067db RK |
2402 | break; |
2403 | ||
9e21be9d RK |
2404 | case '&': |
2405 | earlyclobber[opno] = 1; | |
9ea88834 SB |
2406 | if (seen_earlyclobber_at < 0) |
2407 | seen_earlyclobber_at = opno; | |
9e21be9d RK |
2408 | break; |
2409 | ||
c5c76735 JL |
2410 | case '0': case '1': case '2': case '3': case '4': |
2411 | case '5': case '6': case '7': case '8': case '9': | |
84b72302 RH |
2412 | { |
2413 | /* This operand must be the same as a previous one. | |
2414 | This kind of constraint is used for instructions such | |
2415 | as add when they take only two operands. | |
2416 | ||
2417 | Note that the lower-numbered operand is passed first. | |
2418 | ||
2419 | If we are not testing strictly, assume that this | |
2420 | constraint will be satisfied. */ | |
2421 | ||
2422 | char *end; | |
2423 | int match; | |
2424 | ||
97488870 | 2425 | match = strtoul (p, &end, 10); |
84b72302 RH |
2426 | p = end; |
2427 | ||
2428 | if (strict < 0) | |
2429 | val = 1; | |
2430 | else | |
2431 | { | |
2432 | rtx op1 = recog_data.operand[match]; | |
2433 | rtx op2 = recog_data.operand[opno]; | |
2434 | ||
2435 | /* A unary operator may be accepted by the predicate, | |
2436 | but it is irrelevant for matching constraints. */ | |
ec8e098d | 2437 | if (UNARY_P (op1)) |
84b72302 | 2438 | op1 = XEXP (op1, 0); |
ec8e098d | 2439 | if (UNARY_P (op2)) |
84b72302 RH |
2440 | op2 = XEXP (op2, 0); |
2441 | ||
2442 | val = operands_match_p (op1, op2); | |
2443 | } | |
2444 | ||
2445 | matching_operands[opno] = match; | |
2446 | matching_operands[match] = opno; | |
2447 | ||
2448 | if (val != 0) | |
2449 | win = 1; | |
2450 | ||
2451 | /* If output is *x and input is *--x, arrange later | |
2452 | to change the output to *--x as well, since the | |
2453 | output op is the one that will be printed. */ | |
2454 | if (val == 2 && strict > 0) | |
2455 | { | |
55d796da | 2456 | funny_match[funny_match_index].this_op = opno; |
84b72302 RH |
2457 | funny_match[funny_match_index++].other = match; |
2458 | } | |
2459 | } | |
97488870 | 2460 | len = 0; |
2055cea7 RK |
2461 | break; |
2462 | ||
2463 | case 'p': | |
2464 | /* p is used for address_operands. When we are called by | |
a8647766 RK |
2465 | gen_reload, no one will have checked that the address is |
2466 | strictly valid, i.e., that all pseudos requiring hard regs | |
2467 | have gotten them. */ | |
2055cea7 | 2468 | if (strict <= 0 |
1ccbefce | 2469 | || (strict_memory_address_p (recog_data.operand_mode[opno], |
0eadeb15 | 2470 | op))) |
2055cea7 RK |
2471 | win = 1; |
2472 | break; | |
2473 | ||
2474 | /* No need to check general_operand again; | |
74e3e54a HPN |
2475 | it was done in insn-recog.c. Well, except that reload |
2476 | doesn't check the validity of its replacements, but | |
2477 | that should only matter when there's a bug. */ | |
2055cea7 RK |
2478 | case 'g': |
2479 | /* Anything goes unless it is a REG and really has a hard reg | |
2480 | but the hard reg is not in the class GENERAL_REGS. */ | |
74e3e54a HPN |
2481 | if (REG_P (op)) |
2482 | { | |
2483 | if (strict < 0 | |
2484 | || GENERAL_REGS == ALL_REGS | |
2485 | || (reload_in_progress | |
2486 | && REGNO (op) >= FIRST_PSEUDO_REGISTER) | |
2487 | || reg_fits_class_p (op, GENERAL_REGS, offset, mode)) | |
2488 | win = 1; | |
2489 | } | |
2490 | else if (strict < 0 || general_operand (op, mode)) | |
2055cea7 RK |
2491 | win = 1; |
2492 | break; | |
2493 | ||
2055cea7 | 2494 | case 'X': |
0f41302f MS |
2495 | /* This is used for a MATCH_SCRATCH in the cases when |
2496 | we don't actually need anything. So anything goes | |
2497 | any time. */ | |
2055cea7 RK |
2498 | win = 1; |
2499 | break; | |
2500 | ||
a4edaf83 | 2501 | case TARGET_MEM_CONSTRAINT: |
47069ecb RH |
2502 | /* Memory operands must be valid, to the extent |
2503 | required by STRICT. */ | |
3c0cb5de | 2504 | if (MEM_P (op)) |
47069ecb RH |
2505 | { |
2506 | if (strict > 0 | |
2507 | && !strict_memory_address_p (GET_MODE (op), | |
2508 | XEXP (op, 0))) | |
2509 | break; | |
2510 | if (strict == 0 | |
2511 | && !memory_address_p (GET_MODE (op), XEXP (op, 0))) | |
2512 | break; | |
2513 | win = 1; | |
2514 | } | |
2515 | /* Before reload, accept what reload can turn into mem. */ | |
2516 | else if (strict < 0 && CONSTANT_P (op)) | |
2517 | win = 1; | |
2518 | /* During reload, accept a pseudo */ | |
f8cfc6aa | 2519 | else if (reload_in_progress && REG_P (op) |
47069ecb | 2520 | && REGNO (op) >= FIRST_PSEUDO_REGISTER) |
2055cea7 RK |
2521 | win = 1; |
2522 | break; | |
2523 | ||
2524 | case '<': | |
3c0cb5de | 2525 | if (MEM_P (op) |
2055cea7 RK |
2526 | && (GET_CODE (XEXP (op, 0)) == PRE_DEC |
2527 | || GET_CODE (XEXP (op, 0)) == POST_DEC)) | |
2528 | win = 1; | |
2529 | break; | |
2530 | ||
2531 | case '>': | |
3c0cb5de | 2532 | if (MEM_P (op) |
2055cea7 RK |
2533 | && (GET_CODE (XEXP (op, 0)) == PRE_INC |
2534 | || GET_CODE (XEXP (op, 0)) == POST_INC)) | |
2535 | win = 1; | |
2536 | break; | |
2537 | ||
2538 | case 'E': | |
2055cea7 | 2539 | case 'F': |
bf7cd754 R |
2540 | if (GET_CODE (op) == CONST_DOUBLE |
2541 | || (GET_CODE (op) == CONST_VECTOR | |
2542 | && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)) | |
2055cea7 RK |
2543 | win = 1; |
2544 | break; | |
2545 | ||
2546 | case 'G': | |
2547 | case 'H': | |
2548 | if (GET_CODE (op) == CONST_DOUBLE | |
97488870 | 2549 | && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p)) |
2055cea7 RK |
2550 | win = 1; |
2551 | break; | |
2552 | ||
2553 | case 's': | |
481683e1 | 2554 | if (CONST_INT_P (op) |
2055cea7 RK |
2555 | || (GET_CODE (op) == CONST_DOUBLE |
2556 | && GET_MODE (op) == VOIDmode)) | |
2557 | break; | |
2558 | case 'i': | |
2559 | if (CONSTANT_P (op)) | |
2560 | win = 1; | |
2561 | break; | |
2562 | ||
2563 | case 'n': | |
481683e1 | 2564 | if (CONST_INT_P (op) |
2055cea7 RK |
2565 | || (GET_CODE (op) == CONST_DOUBLE |
2566 | && GET_MODE (op) == VOIDmode)) | |
2567 | win = 1; | |
2568 | break; | |
2569 | ||
2570 | case 'I': | |
2571 | case 'J': | |
2572 | case 'K': | |
2573 | case 'L': | |
2574 | case 'M': | |
2575 | case 'N': | |
2576 | case 'O': | |
2577 | case 'P': | |
481683e1 | 2578 | if (CONST_INT_P (op) |
97488870 | 2579 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p)) |
2055cea7 RK |
2580 | win = 1; |
2581 | break; | |
2582 | ||
2055cea7 | 2583 | case 'V': |
3c0cb5de | 2584 | if (MEM_P (op) |
69f724c0 JL |
2585 | && ((strict > 0 && ! offsettable_memref_p (op)) |
2586 | || (strict < 0 | |
3c0cb5de | 2587 | && !(CONSTANT_P (op) || MEM_P (op))) |
69f724c0 | 2588 | || (reload_in_progress |
f8cfc6aa | 2589 | && !(REG_P (op) |
69f724c0 | 2590 | && REGNO (op) >= FIRST_PSEUDO_REGISTER)))) |
2055cea7 RK |
2591 | win = 1; |
2592 | break; | |
2593 | ||
2594 | case 'o': | |
2595 | if ((strict > 0 && offsettable_memref_p (op)) | |
2596 | || (strict == 0 && offsettable_nonstrict_memref_p (op)) | |
2597 | /* Before reload, accept what reload can handle. */ | |
2598 | || (strict < 0 | |
3c0cb5de | 2599 | && (CONSTANT_P (op) || MEM_P (op))) |
3c3eeea6 | 2600 | /* During reload, accept a pseudo */ |
f8cfc6aa | 2601 | || (reload_in_progress && REG_P (op) |
3c3eeea6 | 2602 | && REGNO (op) >= FIRST_PSEUDO_REGISTER)) |
2055cea7 RK |
2603 | win = 1; |
2604 | break; | |
2605 | ||
2606 | default: | |
c2cba7a9 | 2607 | { |
e3a64162 | 2608 | enum reg_class cl; |
c2cba7a9 | 2609 | |
e3a64162 | 2610 | cl = (c == 'r' |
97488870 | 2611 | ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p)); |
e3a64162 | 2612 | if (cl != NO_REGS) |
c2cba7a9 RH |
2613 | { |
2614 | if (strict < 0 | |
2615 | || (strict == 0 | |
f8cfc6aa | 2616 | && REG_P (op) |
c2cba7a9 RH |
2617 | && REGNO (op) >= FIRST_PSEUDO_REGISTER) |
2618 | || (strict == 0 && GET_CODE (op) == SCRATCH) | |
f8cfc6aa | 2619 | || (REG_P (op) |
e3a64162 | 2620 | && reg_fits_class_p (op, cl, offset, mode))) |
c2cba7a9 RH |
2621 | win = 1; |
2622 | } | |
97488870 R |
2623 | #ifdef EXTRA_CONSTRAINT_STR |
2624 | else if (EXTRA_CONSTRAINT_STR (op, c, p)) | |
c2cba7a9 | 2625 | win = 1; |
ccfc6cc8 | 2626 | |
3b6c3bb0 JW |
2627 | else if (EXTRA_MEMORY_CONSTRAINT (c, p) |
2628 | /* Every memory operand can be reloaded to fit. */ | |
3c0cb5de | 2629 | && ((strict < 0 && MEM_P (op)) |
3b6c3bb0 JW |
2630 | /* Before reload, accept what reload can turn |
2631 | into mem. */ | |
2632 | || (strict < 0 && CONSTANT_P (op)) | |
2633 | /* During reload, accept a pseudo */ | |
f8cfc6aa | 2634 | || (reload_in_progress && REG_P (op) |
3b6c3bb0 JW |
2635 | && REGNO (op) >= FIRST_PSEUDO_REGISTER))) |
2636 | win = 1; | |
2637 | else if (EXTRA_ADDRESS_CONSTRAINT (c, p) | |
2638 | /* Every address operand can be reloaded to fit. */ | |
2639 | && strict < 0) | |
2640 | win = 1; | |
c2cba7a9 RH |
2641 | #endif |
2642 | break; | |
2643 | } | |
2055cea7 | 2644 | } |
97488870 | 2645 | while (p += len, c); |
2055cea7 RK |
2646 | |
2647 | constraints[opno] = p; | |
2648 | /* If this operand did not win somehow, | |
2649 | this alternative loses. */ | |
2650 | if (! win) | |
2651 | lose = 1; | |
2652 | } | |
2653 | /* This alternative won; the operands are ok. | |
2654 | Change whichever operands this alternative says to change. */ | |
2655 | if (! lose) | |
2656 | { | |
9e21be9d RK |
2657 | int opno, eopno; |
2658 | ||
2659 | /* See if any earlyclobber operand conflicts with some other | |
2660 | operand. */ | |
2661 | ||
9ea88834 SB |
2662 | if (strict > 0 && seen_earlyclobber_at >= 0) |
2663 | for (eopno = seen_earlyclobber_at; | |
2664 | eopno < recog_data.n_operands; | |
2665 | eopno++) | |
62946075 RS |
2666 | /* Ignore earlyclobber operands now in memory, |
2667 | because we would often report failure when we have | |
2668 | two memory operands, one of which was formerly a REG. */ | |
2669 | if (earlyclobber[eopno] | |
f8cfc6aa | 2670 | && REG_P (recog_data.operand[eopno])) |
1ccbefce | 2671 | for (opno = 0; opno < recog_data.n_operands; opno++) |
3c0cb5de | 2672 | if ((MEM_P (recog_data.operand[opno]) |
1ccbefce | 2673 | || recog_data.operand_type[opno] != OP_OUT) |
9e21be9d | 2674 | && opno != eopno |
0f41302f | 2675 | /* Ignore things like match_operator operands. */ |
1ccbefce | 2676 | && *recog_data.constraints[opno] != 0 |
9e21be9d | 2677 | && ! (matching_operands[opno] == eopno |
1ccbefce RH |
2678 | && operands_match_p (recog_data.operand[opno], |
2679 | recog_data.operand[eopno])) | |
2680 | && ! safe_from_earlyclobber (recog_data.operand[opno], | |
2681 | recog_data.operand[eopno])) | |
9e21be9d RK |
2682 | lose = 1; |
2683 | ||
2684 | if (! lose) | |
2055cea7 | 2685 | { |
9e21be9d RK |
2686 | while (--funny_match_index >= 0) |
2687 | { | |
1ccbefce | 2688 | recog_data.operand[funny_match[funny_match_index].other] |
55d796da | 2689 | = recog_data.operand[funny_match[funny_match_index].this_op]; |
9e21be9d RK |
2690 | } |
2691 | ||
2692 | return 1; | |
2055cea7 | 2693 | } |
2055cea7 RK |
2694 | } |
2695 | ||
2696 | which_alternative++; | |
2697 | } | |
4667f705 | 2698 | while (which_alternative < recog_data.n_alternatives); |
2055cea7 | 2699 | |
d90ffc8d | 2700 | which_alternative = -1; |
2055cea7 RK |
2701 | /* If we are about to reject this, but we are not to test strictly, |
2702 | try a very loose test. Only return failure if it fails also. */ | |
2703 | if (strict == 0) | |
0eadeb15 | 2704 | return constrain_operands (-1); |
2055cea7 RK |
2705 | else |
2706 | return 0; | |
2707 | } | |
2708 | ||
2709 | /* Return 1 iff OPERAND (assumed to be a REG rtx) | |
38a448ca | 2710 | is a hard reg in class CLASS when its regno is offset by OFFSET |
2055cea7 RK |
2711 | and changed to mode MODE. |
2712 | If REG occupies multiple hard regs, all of them must be in CLASS. */ | |
2713 | ||
2714 | int | |
e3a64162 | 2715 | reg_fits_class_p (rtx operand, enum reg_class cl, int offset, |
0c20a65f | 2716 | enum machine_mode mode) |
2055cea7 | 2717 | { |
b3694847 | 2718 | int regno = REGNO (operand); |
f38840db ZW |
2719 | |
2720 | if (cl == NO_REGS) | |
2721 | return 0; | |
2722 | ||
09e18274 RS |
2723 | return (regno < FIRST_PSEUDO_REGISTER |
2724 | && in_hard_reg_set_p (reg_class_contents[(int) cl], | |
2725 | mode, regno + offset)); | |
2055cea7 | 2726 | } |
ca545bb5 | 2727 | \f |
d9e7c8e3 RS |
2728 | /* Split single instruction. Helper function for split_all_insns and |
2729 | split_all_insns_noflow. Return last insn in the sequence if successful, | |
2730 | or NULL if unsuccessful. */ | |
2731 | ||
d58d4c12 | 2732 | static rtx |
0c20a65f | 2733 | split_insn (rtx insn) |
d58d4c12 | 2734 | { |
d9e7c8e3 RS |
2735 | /* Split insns here to get max fine-grain parallelism. */ |
2736 | rtx first = PREV_INSN (insn); | |
2737 | rtx last = try_split (PATTERN (insn), insn, 1); | |
a2a92083 | 2738 | rtx insn_set, last_set, note; |
d9e7c8e3 RS |
2739 | |
2740 | if (last == insn) | |
2741 | return NULL_RTX; | |
2742 | ||
a2a92083 RS |
2743 | /* If the original instruction was a single set that was known to be |
2744 | equivalent to a constant, see if we can say the same about the last | |
2745 | instruction in the split sequence. The two instructions must set | |
2746 | the same destination. */ | |
2747 | insn_set = single_set (insn); | |
2748 | if (insn_set) | |
2749 | { | |
2750 | last_set = single_set (last); | |
2751 | if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set))) | |
2752 | { | |
2753 | note = find_reg_equal_equiv_note (insn); | |
2754 | if (note && CONSTANT_P (XEXP (note, 0))) | |
2755 | set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0)); | |
2756 | else if (CONSTANT_P (SET_SRC (insn_set))) | |
2757 | set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set)); | |
2758 | } | |
2759 | } | |
2760 | ||
d9e7c8e3 | 2761 | /* try_split returns the NOTE that INSN became. */ |
6773e15f | 2762 | SET_INSN_DELETED (insn); |
d58d4c12 | 2763 | |
d9e7c8e3 RS |
2764 | /* ??? Coddle to md files that generate subregs in post-reload |
2765 | splitters instead of computing the proper hard register. */ | |
2766 | if (reload_completed && first != last) | |
2767 | { | |
2768 | first = NEXT_INSN (first); | |
2769 | for (;;) | |
d58d4c12 | 2770 | { |
d9e7c8e3 RS |
2771 | if (INSN_P (first)) |
2772 | cleanup_subreg_operands (first); | |
2773 | if (first == last) | |
2774 | break; | |
2775 | first = NEXT_INSN (first); | |
d58d4c12 JH |
2776 | } |
2777 | } | |
a2a92083 | 2778 | |
d9e7c8e3 | 2779 | return last; |
d58d4c12 | 2780 | } |
d9e7c8e3 | 2781 | |
d3a923ee | 2782 | /* Split all insns in the function. If UPD_LIFE, update life info after. */ |
ca545bb5 BM |
2783 | |
2784 | void | |
6fb5fa3c | 2785 | split_all_insns (void) |
ca545bb5 | 2786 | { |
d3a923ee | 2787 | sbitmap blocks; |
9381bbc9 | 2788 | bool changed; |
e0082a72 | 2789 | basic_block bb; |
d3a923ee | 2790 | |
d55bc081 | 2791 | blocks = sbitmap_alloc (last_basic_block); |
d3a923ee | 2792 | sbitmap_zero (blocks); |
9381bbc9 | 2793 | changed = false; |
ca545bb5 | 2794 | |
e0082a72 | 2795 | FOR_EACH_BB_REVERSE (bb) |
ca545bb5 | 2796 | { |
d3a923ee | 2797 | rtx insn, next; |
83a49407 | 2798 | bool finish = false; |
ca545bb5 | 2799 | |
a8ba47cb | 2800 | rtl_profile_for_bb (bb); |
a813c111 | 2801 | for (insn = BB_HEAD (bb); !finish ; insn = next) |
ca545bb5 | 2802 | { |
d3a923ee RH |
2803 | /* Can't use `next_real_insn' because that might go across |
2804 | CODE_LABELS and short-out basic blocks. */ | |
2805 | next = NEXT_INSN (insn); | |
a813c111 | 2806 | finish = (insn == BB_END (bb)); |
d9e7c8e3 | 2807 | if (INSN_P (insn)) |
ca545bb5 | 2808 | { |
d9e7c8e3 RS |
2809 | rtx set = single_set (insn); |
2810 | ||
2811 | /* Don't split no-op move insns. These should silently | |
2812 | disappear later in final. Splitting such insns would | |
d70dcf29 | 2813 | break the code that handles LIBCALL blocks. */ |
d9e7c8e3 RS |
2814 | if (set && set_noop_p (set)) |
2815 | { | |
2816 | /* Nops get in the way while scheduling, so delete them | |
2817 | now if register allocation has already been done. It | |
2818 | is too risky to try to do this before register | |
2819 | allocation, and there are unlikely to be very many | |
2820 | nops then anyways. */ | |
2821 | if (reload_completed) | |
d9e7c8e3 | 2822 | delete_insn_and_edges (insn); |
d9e7c8e3 RS |
2823 | } |
2824 | else | |
2825 | { | |
2826 | rtx last = split_insn (insn); | |
2827 | if (last) | |
2828 | { | |
2829 | /* The split sequence may include barrier, but the | |
2830 | BB boundary we are interested in will be set to | |
2831 | previous one. */ | |
2832 | ||
4b4bf941 | 2833 | while (BARRIER_P (last)) |
d9e7c8e3 RS |
2834 | last = PREV_INSN (last); |
2835 | SET_BIT (blocks, bb->index); | |
2836 | changed = true; | |
2837 | } | |
2838 | } | |
ca545bb5 BM |
2839 | } |
2840 | } | |
ca545bb5 | 2841 | } |
d3a923ee | 2842 | |
a8ba47cb | 2843 | default_rtl_profile (); |
0005550b | 2844 | if (changed) |
6fb5fa3c | 2845 | find_many_sub_basic_blocks (blocks); |
9381bbc9 | 2846 | |
0005550b JH |
2847 | #ifdef ENABLE_CHECKING |
2848 | verify_flow_info (); | |
2849 | #endif | |
d3a923ee RH |
2850 | |
2851 | sbitmap_free (blocks); | |
ca545bb5 | 2852 | } |
6f862f2f | 2853 | |
a6a2274a | 2854 | /* Same as split_all_insns, but do not expect CFG to be available. |
d55d8fc7 | 2855 | Used by machine dependent reorg passes. */ |
6f862f2f | 2856 | |
c2924966 | 2857 | unsigned int |
0c20a65f | 2858 | split_all_insns_noflow (void) |
6f862f2f JH |
2859 | { |
2860 | rtx next, insn; | |
2861 | ||
2862 | for (insn = get_insns (); insn; insn = next) | |
2863 | { | |
2864 | next = NEXT_INSN (insn); | |
d9e7c8e3 RS |
2865 | if (INSN_P (insn)) |
2866 | { | |
2867 | /* Don't split no-op move insns. These should silently | |
2868 | disappear later in final. Splitting such insns would | |
d70dcf29 | 2869 | break the code that handles LIBCALL blocks. */ |
d9e7c8e3 RS |
2870 | rtx set = single_set (insn); |
2871 | if (set && set_noop_p (set)) | |
2872 | { | |
2873 | /* Nops get in the way while scheduling, so delete them | |
2874 | now if register allocation has already been done. It | |
2875 | is too risky to try to do this before register | |
2876 | allocation, and there are unlikely to be very many | |
2877 | nops then anyways. | |
2878 | ||
2879 | ??? Should we use delete_insn when the CFG isn't valid? */ | |
2880 | if (reload_completed) | |
2881 | delete_insn_and_edges (insn); | |
2882 | } | |
2883 | else | |
2884 | split_insn (insn); | |
2885 | } | |
6f862f2f | 2886 | } |
c2924966 | 2887 | return 0; |
6f862f2f | 2888 | } |
ede7cd44 RH |
2889 | \f |
2890 | #ifdef HAVE_peephole2 | |
23280139 RH |
2891 | struct peep2_insn_data |
2892 | { | |
2893 | rtx insn; | |
2894 | regset live_before; | |
2895 | }; | |
2896 | ||
2897 | static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1]; | |
2898 | static int peep2_current; | |
0cd6c85a PB |
2899 | /* The number of instructions available to match a peep2. */ |
2900 | int peep2_current_count; | |
23280139 RH |
2901 | |
2902 | /* A non-insn marker indicating the last insn of the block. | |
2903 | The live_before regset for this element is correct, indicating | |
6fb5fa3c | 2904 | DF_LIVE_OUT for the block. */ |
23280139 RH |
2905 | #define PEEP2_EOB pc_rtx |
2906 | ||
2907 | /* Return the Nth non-note insn after `current', or return NULL_RTX if it | |
2908 | does not exist. Used by the recognizer to find the next insn to match | |
2909 | in a multi-insn pattern. */ | |
d3a923ee | 2910 | |
ede7cd44 | 2911 | rtx |
0c20a65f | 2912 | peep2_next_insn (int n) |
ede7cd44 | 2913 | { |
0cd6c85a | 2914 | gcc_assert (n <= peep2_current_count); |
23280139 RH |
2915 | |
2916 | n += peep2_current; | |
2917 | if (n >= MAX_INSNS_PER_PEEP2 + 1) | |
2918 | n -= MAX_INSNS_PER_PEEP2 + 1; | |
2919 | ||
23280139 RH |
2920 | return peep2_insn_data[n].insn; |
2921 | } | |
2922 | ||
2923 | /* Return true if REGNO is dead before the Nth non-note insn | |
2924 | after `current'. */ | |
2925 | ||
2926 | int | |
0c20a65f | 2927 | peep2_regno_dead_p (int ofs, int regno) |
23280139 | 2928 | { |
41374e13 | 2929 | gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1); |
23280139 RH |
2930 | |
2931 | ofs += peep2_current; | |
2932 | if (ofs >= MAX_INSNS_PER_PEEP2 + 1) | |
2933 | ofs -= MAX_INSNS_PER_PEEP2 + 1; | |
2934 | ||
41374e13 | 2935 | gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX); |
23280139 RH |
2936 | |
2937 | return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno); | |
2938 | } | |
2939 | ||
2940 | /* Similarly for a REG. */ | |
2941 | ||
2942 | int | |
0c20a65f | 2943 | peep2_reg_dead_p (int ofs, rtx reg) |
23280139 RH |
2944 | { |
2945 | int regno, n; | |
2946 | ||
41374e13 | 2947 | gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1); |
23280139 RH |
2948 | |
2949 | ofs += peep2_current; | |
2950 | if (ofs >= MAX_INSNS_PER_PEEP2 + 1) | |
2951 | ofs -= MAX_INSNS_PER_PEEP2 + 1; | |
2952 | ||
41374e13 | 2953 | gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX); |
23280139 RH |
2954 | |
2955 | regno = REGNO (reg); | |
66fd46b6 | 2956 | n = hard_regno_nregs[regno][GET_MODE (reg)]; |
23280139 RH |
2957 | while (--n >= 0) |
2958 | if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n)) | |
2959 | return 0; | |
2960 | return 1; | |
2961 | } | |
2962 | ||
2963 | /* Try to find a hard register of mode MODE, matching the register class in | |
2964 | CLASS_STR, which is available at the beginning of insn CURRENT_INSN and | |
2965 | remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX, | |
2966 | in which case the only condition is that the register must be available | |
2967 | before CURRENT_INSN. | |
2968 | Registers that already have bits set in REG_SET will not be considered. | |
2969 | ||
2970 | If an appropriate register is available, it will be returned and the | |
2971 | corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is | |
2972 | returned. */ | |
2973 | ||
2974 | rtx | |
0c20a65f AJ |
2975 | peep2_find_free_register (int from, int to, const char *class_str, |
2976 | enum machine_mode mode, HARD_REG_SET *reg_set) | |
23280139 RH |
2977 | { |
2978 | static int search_ofs; | |
e3a64162 | 2979 | enum reg_class cl; |
23280139 RH |
2980 | HARD_REG_SET live; |
2981 | int i; | |
2982 | ||
41374e13 NS |
2983 | gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1); |
2984 | gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1); | |
23280139 RH |
2985 | |
2986 | from += peep2_current; | |
2987 | if (from >= MAX_INSNS_PER_PEEP2 + 1) | |
2988 | from -= MAX_INSNS_PER_PEEP2 + 1; | |
2989 | to += peep2_current; | |
2990 | if (to >= MAX_INSNS_PER_PEEP2 + 1) | |
2991 | to -= MAX_INSNS_PER_PEEP2 + 1; | |
2992 | ||
41374e13 | 2993 | gcc_assert (peep2_insn_data[from].insn != NULL_RTX); |
23280139 RH |
2994 | REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before); |
2995 | ||
2996 | while (from != to) | |
ede7cd44 | 2997 | { |
23280139 RH |
2998 | HARD_REG_SET this_live; |
2999 | ||
3000 | if (++from >= MAX_INSNS_PER_PEEP2 + 1) | |
3001 | from = 0; | |
41374e13 | 3002 | gcc_assert (peep2_insn_data[from].insn != NULL_RTX); |
23280139 RH |
3003 | REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before); |
3004 | IOR_HARD_REG_SET (live, this_live); | |
3005 | } | |
3006 | ||
e3a64162 | 3007 | cl = (class_str[0] == 'r' ? GENERAL_REGS |
97488870 | 3008 | : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str)); |
23280139 RH |
3009 | |
3010 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
3011 | { | |
3012 | int raw_regno, regno, success, j; | |
3013 | ||
3014 | /* Distribute the free registers as much as possible. */ | |
3015 | raw_regno = search_ofs + i; | |
3016 | if (raw_regno >= FIRST_PSEUDO_REGISTER) | |
3017 | raw_regno -= FIRST_PSEUDO_REGISTER; | |
3018 | #ifdef REG_ALLOC_ORDER | |
3019 | regno = reg_alloc_order[raw_regno]; | |
3020 | #else | |
3021 | regno = raw_regno; | |
3022 | #endif | |
3023 | ||
3024 | /* Don't allocate fixed registers. */ | |
3025 | if (fixed_regs[regno]) | |
3026 | continue; | |
dbc42c44 AS |
3027 | /* Don't allocate global registers. */ |
3028 | if (global_regs[regno]) | |
3029 | continue; | |
23280139 | 3030 | /* Make sure the register is of the right class. */ |
e3a64162 | 3031 | if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno)) |
23280139 RH |
3032 | continue; |
3033 | /* And can support the mode we need. */ | |
3034 | if (! HARD_REGNO_MODE_OK (regno, mode)) | |
3035 | continue; | |
3036 | /* And that we don't create an extra save/restore. */ | |
6fb5fa3c | 3037 | if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno)) |
23280139 | 3038 | continue; |
dbc42c44 AS |
3039 | if (! targetm.hard_regno_scratch_ok (regno)) |
3040 | continue; | |
3041 | ||
23280139 RH |
3042 | /* And we don't clobber traceback for noreturn functions. */ |
3043 | if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM) | |
3044 | && (! reload_completed || frame_pointer_needed)) | |
3045 | continue; | |
3046 | ||
3047 | success = 1; | |
66fd46b6 | 3048 | for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--) |
23280139 RH |
3049 | { |
3050 | if (TEST_HARD_REG_BIT (*reg_set, regno + j) | |
3051 | || TEST_HARD_REG_BIT (live, regno + j)) | |
3052 | { | |
3053 | success = 0; | |
3054 | break; | |
3055 | } | |
3056 | } | |
3057 | if (success) | |
d3a923ee | 3058 | { |
09e18274 | 3059 | add_to_hard_reg_set (reg_set, mode, regno); |
ede7cd44 | 3060 | |
23280139 RH |
3061 | /* Start the next search with the next register. */ |
3062 | if (++raw_regno >= FIRST_PSEUDO_REGISTER) | |
3063 | raw_regno = 0; | |
3064 | search_ofs = raw_regno; | |
ede7cd44 | 3065 | |
23280139 | 3066 | return gen_rtx_REG (mode, regno); |
d3a923ee | 3067 | } |
ede7cd44 RH |
3068 | } |
3069 | ||
23280139 RH |
3070 | search_ofs = 0; |
3071 | return NULL_RTX; | |
ede7cd44 RH |
3072 | } |
3073 | ||
6c0d7021 JJ |
3074 | /* Forget all currently tracked instructions, only remember current |
3075 | LIVE regset. */ | |
3076 | ||
3077 | static void | |
3078 | peep2_reinit_state (regset live) | |
3079 | { | |
3080 | int i; | |
3081 | ||
3082 | /* Indicate that all slots except the last holds invalid data. */ | |
3083 | for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i) | |
3084 | peep2_insn_data[i].insn = NULL_RTX; | |
3085 | peep2_current_count = 0; | |
3086 | ||
3087 | /* Indicate that the last slot contains live_after data. */ | |
3088 | peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB; | |
3089 | peep2_current = MAX_INSNS_PER_PEEP2; | |
3090 | ||
3091 | COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live); | |
3092 | } | |
3093 | ||
dc297297 | 3094 | /* Perform the peephole2 optimization pass. */ |
23280139 | 3095 | |
e94aab95 | 3096 | static void |
10d22567 | 3097 | peephole2_optimize (void) |
ede7cd44 | 3098 | { |
d3a923ee | 3099 | rtx insn, prev; |
6fb5fa3c | 3100 | bitmap live; |
e0082a72 ZD |
3101 | int i; |
3102 | basic_block bb; | |
b29afcf8 | 3103 | bool do_cleanup_cfg = false; |
9be40833 | 3104 | bool do_rebuild_jump_labels = false; |
ede7cd44 | 3105 | |
6fb5fa3c DB |
3106 | df_set_flags (DF_LR_RUN_DCE); |
3107 | df_analyze (); | |
3108 | ||
23280139 RH |
3109 | /* Initialize the regsets we're going to use. */ |
3110 | for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i) | |
6fb5fa3c DB |
3111 | peep2_insn_data[i].live_before = BITMAP_ALLOC (®_obstack); |
3112 | live = BITMAP_ALLOC (®_obstack); | |
d3a923ee | 3113 | |
e0082a72 | 3114 | FOR_EACH_BB_REVERSE (bb) |
ede7cd44 | 3115 | { |
a8ba47cb | 3116 | rtl_profile_for_bb (bb); |
d3a923ee | 3117 | |
23280139 | 3118 | /* Start up propagation. */ |
6fb5fa3c | 3119 | bitmap_copy (live, DF_LR_OUT (bb)); |
02b47899 | 3120 | df_simulate_initialize_backwards (bb, live); |
6c0d7021 | 3121 | peep2_reinit_state (live); |
ede7cd44 | 3122 | |
a813c111 | 3123 | for (insn = BB_END (bb); ; insn = prev) |
d3a923ee RH |
3124 | { |
3125 | prev = PREV_INSN (insn); | |
b5b8b0ac | 3126 | if (NONDEBUG_INSN_P (insn)) |
ede7cd44 | 3127 | { |
55d796da | 3128 | rtx attempt, before_try, x; |
23280139 | 3129 | int match_len; |
189ae0f4 | 3130 | rtx note; |
546c093e | 3131 | bool was_call = false; |
23280139 RH |
3132 | |
3133 | /* Record this insn. */ | |
3134 | if (--peep2_current < 0) | |
3135 | peep2_current = MAX_INSNS_PER_PEEP2; | |
03414545 KK |
3136 | if (peep2_current_count < MAX_INSNS_PER_PEEP2 |
3137 | && peep2_insn_data[peep2_current].insn == NULL_RTX) | |
0cd6c85a | 3138 | peep2_current_count++; |
23280139 | 3139 | peep2_insn_data[peep2_current].insn = insn; |
02b47899 | 3140 | df_simulate_one_insn_backwards (bb, insn, live); |
23280139 RH |
3141 | COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live); |
3142 | ||
eead060a AH |
3143 | if (RTX_FRAME_RELATED_P (insn)) |
3144 | { | |
3145 | /* If an insn has RTX_FRAME_RELATED_P set, peephole | |
3146 | substitution would lose the | |
3147 | REG_FRAME_RELATED_EXPR that is attached. */ | |
6c0d7021 | 3148 | peep2_reinit_state (live); |
55d796da | 3149 | attempt = NULL; |
eead060a AH |
3150 | } |
3151 | else | |
3152 | /* Match the peephole. */ | |
55d796da | 3153 | attempt = peephole2_insns (PATTERN (insn), insn, &match_len); |
eead060a | 3154 | |
55d796da | 3155 | if (attempt != NULL) |
d3a923ee | 3156 | { |
33593de7 RH |
3157 | /* If we are splitting a CALL_INSN, look for the CALL_INSN |
3158 | in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other | |
3159 | cfg-related call notes. */ | |
3160 | for (i = 0; i <= match_len; ++i) | |
3161 | { | |
2f937369 | 3162 | int j; |
33593de7 RH |
3163 | rtx old_insn, new_insn, note; |
3164 | ||
3165 | j = i + peep2_current; | |
3166 | if (j >= MAX_INSNS_PER_PEEP2 + 1) | |
3167 | j -= MAX_INSNS_PER_PEEP2 + 1; | |
3168 | old_insn = peep2_insn_data[j].insn; | |
4b4bf941 | 3169 | if (!CALL_P (old_insn)) |
33593de7 | 3170 | continue; |
546c093e | 3171 | was_call = true; |
33593de7 | 3172 | |
55d796da | 3173 | new_insn = attempt; |
2f937369 DM |
3174 | while (new_insn != NULL_RTX) |
3175 | { | |
4b4bf941 | 3176 | if (CALL_P (new_insn)) |
2f937369 DM |
3177 | break; |
3178 | new_insn = NEXT_INSN (new_insn); | |
3179 | } | |
3180 | ||
41374e13 | 3181 | gcc_assert (new_insn != NULL_RTX); |
33593de7 RH |
3182 | |
3183 | CALL_INSN_FUNCTION_USAGE (new_insn) | |
3184 | = CALL_INSN_FUNCTION_USAGE (old_insn); | |
3185 | ||
3186 | for (note = REG_NOTES (old_insn); | |
3187 | note; | |
3188 | note = XEXP (note, 1)) | |
3189 | switch (REG_NOTE_KIND (note)) | |
3190 | { | |
33593de7 RH |
3191 | case REG_NORETURN: |
3192 | case REG_SETJMP: | |
65c5f2a6 ILT |
3193 | add_reg_note (new_insn, REG_NOTE_KIND (note), |
3194 | XEXP (note, 0)); | |
3195 | break; | |
001d2740 | 3196 | default: |
e5837c07 | 3197 | /* Discard all other reg notes. */ |
33593de7 RH |
3198 | break; |
3199 | } | |
3200 | ||
3201 | /* Croak if there is another call in the sequence. */ | |
3202 | while (++i <= match_len) | |
3203 | { | |
3204 | j = i + peep2_current; | |
3205 | if (j >= MAX_INSNS_PER_PEEP2 + 1) | |
3206 | j -= MAX_INSNS_PER_PEEP2 + 1; | |
3207 | old_insn = peep2_insn_data[j].insn; | |
41374e13 | 3208 | gcc_assert (!CALL_P (old_insn)); |
33593de7 RH |
3209 | } |
3210 | break; | |
3211 | } | |
3212 | ||
23280139 RH |
3213 | i = match_len + peep2_current; |
3214 | if (i >= MAX_INSNS_PER_PEEP2 + 1) | |
3215 | i -= MAX_INSNS_PER_PEEP2 + 1; | |
3216 | ||
a6a2274a | 3217 | note = find_reg_note (peep2_insn_data[i].insn, |
6b2e80b7 RH |
3218 | REG_EH_REGION, NULL_RTX); |
3219 | ||
23280139 | 3220 | /* Replace the old sequence with the new. */ |
de498dd4 KG |
3221 | attempt = emit_insn_after_setloc (attempt, |
3222 | peep2_insn_data[i].insn, | |
3223 | INSN_LOCATOR (peep2_insn_data[i].insn)); | |
6b2e80b7 | 3224 | before_try = PREV_INSN (insn); |
a7b87f73 | 3225 | delete_insn_chain (insn, peep2_insn_data[i].insn, false); |
d3a923ee | 3226 | |
189ae0f4 | 3227 | /* Re-insert the EH_REGION notes. */ |
546c093e | 3228 | if (note || (was_call && nonlocal_goto_handler_labels)) |
189ae0f4 | 3229 | { |
b29afcf8 | 3230 | edge eh_edge; |
628f6a4e | 3231 | edge_iterator ei; |
b29afcf8 | 3232 | |
628f6a4e | 3233 | FOR_EACH_EDGE (eh_edge, ei, bb->succs) |
546c093e | 3234 | if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL)) |
b29afcf8 RH |
3235 | break; |
3236 | ||
55d796da | 3237 | for (x = attempt ; x != before_try ; x = PREV_INSN (x)) |
4b4bf941 | 3238 | if (CALL_P (x) |
189ae0f4 | 3239 | || (flag_non_call_exceptions |
6b2e80b7 RH |
3240 | && may_trap_p (PATTERN (x)) |
3241 | && !find_reg_note (x, REG_EH_REGION, NULL))) | |
b29afcf8 | 3242 | { |
546c093e | 3243 | if (note) |
65c5f2a6 | 3244 | add_reg_note (x, REG_EH_REGION, XEXP (note, 0)); |
b29afcf8 | 3245 | |
a813c111 | 3246 | if (x != BB_END (bb) && eh_edge) |
b29afcf8 | 3247 | { |
6b2e80b7 RH |
3248 | edge nfte, nehe; |
3249 | int flags; | |
3250 | ||
3251 | nfte = split_block (bb, x); | |
546c093e RH |
3252 | flags = (eh_edge->flags |
3253 | & (EDGE_EH | EDGE_ABNORMAL)); | |
4b4bf941 | 3254 | if (CALL_P (x)) |
6b2e80b7 RH |
3255 | flags |= EDGE_ABNORMAL_CALL; |
3256 | nehe = make_edge (nfte->src, eh_edge->dest, | |
3257 | flags); | |
3258 | ||
b29afcf8 RH |
3259 | nehe->probability = eh_edge->probability; |
3260 | nfte->probability | |
3261 | = REG_BR_PROB_BASE - nehe->probability; | |
3262 | ||
3263 | do_cleanup_cfg |= purge_dead_edges (nfte->dest); | |
b29afcf8 | 3264 | bb = nfte->src; |
6b2e80b7 | 3265 | eh_edge = nehe; |
b29afcf8 RH |
3266 | } |
3267 | } | |
3268 | ||
3269 | /* Converting possibly trapping insn to non-trapping is | |
3270 | possible. Zap dummy outgoing edges. */ | |
3271 | do_cleanup_cfg |= purge_dead_edges (bb); | |
189ae0f4 | 3272 | } |
189ae0f4 | 3273 | |
23280139 | 3274 | #ifdef HAVE_conditional_execution |
23280139 RH |
3275 | for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i) |
3276 | peep2_insn_data[i].insn = NULL_RTX; | |
3277 | peep2_insn_data[peep2_current].insn = PEEP2_EOB; | |
0cd6c85a | 3278 | peep2_current_count = 0; |
23280139 RH |
3279 | #else |
3280 | /* Back up lifetime information past the end of the | |
3281 | newly created sequence. */ | |
3282 | if (++i >= MAX_INSNS_PER_PEEP2 + 1) | |
3283 | i = 0; | |
6fb5fa3c | 3284 | bitmap_copy (live, peep2_insn_data[i].live_before); |
23280139 RH |
3285 | |
3286 | /* Update life information for the new sequence. */ | |
55d796da | 3287 | x = attempt; |
23280139 RH |
3288 | do |
3289 | { | |
9be40833 | 3290 | if (INSN_P (x)) |
23280139 RH |
3291 | { |
3292 | if (--i < 0) | |
3293 | i = MAX_INSNS_PER_PEEP2; | |
03414545 KK |
3294 | if (peep2_current_count < MAX_INSNS_PER_PEEP2 |
3295 | && peep2_insn_data[i].insn == NULL_RTX) | |
0cd6c85a | 3296 | peep2_current_count++; |
9be40833 | 3297 | peep2_insn_data[i].insn = x; |
6fb5fa3c | 3298 | df_insn_rescan (x); |
02b47899 | 3299 | df_simulate_one_insn_backwards (bb, x, live); |
6fb5fa3c | 3300 | bitmap_copy (peep2_insn_data[i].live_before, live); |
23280139 | 3301 | } |
9be40833 | 3302 | x = PREV_INSN (x); |
23280139 | 3303 | } |
9be40833 | 3304 | while (x != prev); |
23280139 | 3305 | |
23280139 RH |
3306 | peep2_current = i; |
3307 | #endif | |
9be40833 RH |
3308 | |
3309 | /* If we generated a jump instruction, it won't have | |
3310 | JUMP_LABEL set. Recompute after we're done. */ | |
55d796da | 3311 | for (x = attempt; x != before_try; x = PREV_INSN (x)) |
4b4bf941 | 3312 | if (JUMP_P (x)) |
9be40833 RH |
3313 | { |
3314 | do_rebuild_jump_labels = true; | |
3315 | break; | |
3316 | } | |
d3a923ee | 3317 | } |
ede7cd44 | 3318 | } |
d3a923ee | 3319 | |
a813c111 | 3320 | if (insn == BB_HEAD (bb)) |
d3a923ee | 3321 | break; |
ede7cd44 RH |
3322 | } |
3323 | } | |
3324 | ||
a8ba47cb | 3325 | default_rtl_profile (); |
23280139 | 3326 | for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i) |
6fb5fa3c DB |
3327 | BITMAP_FREE (peep2_insn_data[i].live_before); |
3328 | BITMAP_FREE (live); | |
9be40833 RH |
3329 | if (do_rebuild_jump_labels) |
3330 | rebuild_jump_labels (get_insns ()); | |
23280139 RH |
3331 | } |
3332 | #endif /* HAVE_peephole2 */ | |
b37c2614 RH |
3333 | |
3334 | /* Common predicates for use with define_bypass. */ | |
3335 | ||
3336 | /* True if the dependency between OUT_INSN and IN_INSN is on the store | |
ff81aa23 PS |
3337 | data not the address operand(s) of the store. IN_INSN and OUT_INSN |
3338 | must be either a single_set or a PARALLEL with SETs inside. */ | |
b37c2614 RH |
3339 | |
3340 | int | |
0c20a65f | 3341 | store_data_bypass_p (rtx out_insn, rtx in_insn) |
b37c2614 RH |
3342 | { |
3343 | rtx out_set, in_set; | |
ff81aa23 PS |
3344 | rtx out_pat, in_pat; |
3345 | rtx out_exp, in_exp; | |
3346 | int i, j; | |
b37c2614 | 3347 | |
b37c2614 | 3348 | in_set = single_set (in_insn); |
ff81aa23 | 3349 | if (in_set) |
091fb516 | 3350 | { |
ff81aa23 | 3351 | if (!MEM_P (SET_DEST (in_set))) |
091fb516 | 3352 | return false; |
ff81aa23 PS |
3353 | |
3354 | out_set = single_set (out_insn); | |
3355 | if (out_set) | |
3356 | { | |
3357 | if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set))) | |
3358 | return false; | |
3359 | } | |
3360 | else | |
3361 | { | |
3362 | out_pat = PATTERN (out_insn); | |
3363 | ||
3364 | if (GET_CODE (out_pat) != PARALLEL) | |
3365 | return false; | |
3366 | ||
3367 | for (i = 0; i < XVECLEN (out_pat, 0); i++) | |
3368 | { | |
3369 | out_exp = XVECEXP (out_pat, 0, i); | |
3370 | ||
3371 | if (GET_CODE (out_exp) == CLOBBER) | |
3372 | continue; | |
3373 | ||
3374 | gcc_assert (GET_CODE (out_exp) == SET); | |
3375 | ||
3376 | if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set))) | |
3377 | return false; | |
3378 | } | |
3379 | } | |
091fb516 DM |
3380 | } |
3381 | else | |
3382 | { | |
ff81aa23 PS |
3383 | in_pat = PATTERN (in_insn); |
3384 | gcc_assert (GET_CODE (in_pat) == PARALLEL); | |
091fb516 | 3385 | |
ff81aa23 | 3386 | for (i = 0; i < XVECLEN (in_pat, 0); i++) |
091fb516 | 3387 | { |
ff81aa23 | 3388 | in_exp = XVECEXP (in_pat, 0, i); |
091fb516 | 3389 | |
ff81aa23 | 3390 | if (GET_CODE (in_exp) == CLOBBER) |
309527ce DM |
3391 | continue; |
3392 | ||
ff81aa23 | 3393 | gcc_assert (GET_CODE (in_exp) == SET); |
091fb516 | 3394 | |
ff81aa23 | 3395 | if (!MEM_P (SET_DEST (in_exp))) |
091fb516 | 3396 | return false; |
ff81aa23 PS |
3397 | |
3398 | out_set = single_set (out_insn); | |
3399 | if (out_set) | |
3400 | { | |
3401 | if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp))) | |
3402 | return false; | |
3403 | } | |
3404 | else | |
3405 | { | |
3406 | out_pat = PATTERN (out_insn); | |
3407 | gcc_assert (GET_CODE (out_pat) == PARALLEL); | |
3408 | ||
3409 | for (j = 0; j < XVECLEN (out_pat, 0); j++) | |
3410 | { | |
3411 | out_exp = XVECEXP (out_pat, 0, j); | |
3412 | ||
3413 | if (GET_CODE (out_exp) == CLOBBER) | |
3414 | continue; | |
3415 | ||
3416 | gcc_assert (GET_CODE (out_exp) == SET); | |
3417 | ||
3418 | if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp))) | |
3419 | return false; | |
3420 | } | |
3421 | } | |
3422 | } | |
091fb516 | 3423 | } |
b37c2614 RH |
3424 | |
3425 | return true; | |
3426 | } | |
3427 | ||
688ec718 RH |
3428 | /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE |
3429 | condition, and not the THEN or ELSE branch. OUT_INSN may be either a single | |
3430 | or multiple set; IN_INSN should be single_set for truth, but for convenience | |
3431 | of insn categorization may be any JUMP or CALL insn. */ | |
b37c2614 RH |
3432 | |
3433 | int | |
0c20a65f | 3434 | if_test_bypass_p (rtx out_insn, rtx in_insn) |
b37c2614 RH |
3435 | { |
3436 | rtx out_set, in_set; | |
3437 | ||
b37c2614 RH |
3438 | in_set = single_set (in_insn); |
3439 | if (! in_set) | |
688ec718 | 3440 | { |
41374e13 NS |
3441 | gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn)); |
3442 | return false; | |
688ec718 | 3443 | } |
b37c2614 RH |
3444 | |
3445 | if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE) | |
3446 | return false; | |
688ec718 | 3447 | in_set = SET_SRC (in_set); |
b37c2614 | 3448 | |
688ec718 RH |
3449 | out_set = single_set (out_insn); |
3450 | if (out_set) | |
3451 | { | |
3452 | if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1)) | |
3453 | || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2))) | |
a6a2274a | 3454 | return false; |
688ec718 RH |
3455 | } |
3456 | else | |
3457 | { | |
3458 | rtx out_pat; | |
3459 | int i; | |
3460 | ||
3461 | out_pat = PATTERN (out_insn); | |
41374e13 | 3462 | gcc_assert (GET_CODE (out_pat) == PARALLEL); |
688ec718 RH |
3463 | |
3464 | for (i = 0; i < XVECLEN (out_pat, 0); i++) | |
3465 | { | |
3466 | rtx exp = XVECEXP (out_pat, 0, i); | |
3467 | ||
3468 | if (GET_CODE (exp) == CLOBBER) | |
3469 | continue; | |
3470 | ||
41374e13 | 3471 | gcc_assert (GET_CODE (exp) == SET); |
688ec718 RH |
3472 | |
3473 | if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1)) | |
3474 | || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2))) | |
3475 | return false; | |
3476 | } | |
3477 | } | |
b37c2614 RH |
3478 | |
3479 | return true; | |
3480 | } | |
ef330312 PB |
3481 | \f |
3482 | static bool | |
3483 | gate_handle_peephole2 (void) | |
3484 | { | |
3485 | return (optimize > 0 && flag_peephole2); | |
3486 | } | |
3487 | ||
c2924966 | 3488 | static unsigned int |
ef330312 PB |
3489 | rest_of_handle_peephole2 (void) |
3490 | { | |
3491 | #ifdef HAVE_peephole2 | |
10d22567 | 3492 | peephole2_optimize (); |
ef330312 | 3493 | #endif |
c2924966 | 3494 | return 0; |
ef330312 PB |
3495 | } |
3496 | ||
8ddbbcae | 3497 | struct rtl_opt_pass pass_peephole2 = |
ef330312 | 3498 | { |
8ddbbcae JH |
3499 | { |
3500 | RTL_PASS, | |
ef330312 PB |
3501 | "peephole2", /* name */ |
3502 | gate_handle_peephole2, /* gate */ | |
3503 | rest_of_handle_peephole2, /* execute */ | |
3504 | NULL, /* sub */ | |
3505 | NULL, /* next */ | |
3506 | 0, /* static_pass_number */ | |
3507 | TV_PEEPHOLE2, /* tv_id */ | |
3508 | 0, /* properties_required */ | |
3509 | 0, /* properties_provided */ | |
3510 | 0, /* properties_destroyed */ | |
3511 | 0, /* todo_flags_start */ | |
a36b8a1e | 3512 | TODO_df_finish | TODO_verify_rtl_sharing | |
8ddbbcae JH |
3513 | TODO_dump_func /* todo_flags_finish */ |
3514 | } | |
ef330312 PB |
3515 | }; |
3516 | ||
c2924966 | 3517 | static unsigned int |
ef330312 PB |
3518 | rest_of_handle_split_all_insns (void) |
3519 | { | |
6fb5fa3c | 3520 | split_all_insns (); |
c2924966 | 3521 | return 0; |
ef330312 PB |
3522 | } |
3523 | ||
8ddbbcae | 3524 | struct rtl_opt_pass pass_split_all_insns = |
ef330312 | 3525 | { |
8ddbbcae JH |
3526 | { |
3527 | RTL_PASS, | |
defb77dc | 3528 | "split1", /* name */ |
ef330312 PB |
3529 | NULL, /* gate */ |
3530 | rest_of_handle_split_all_insns, /* execute */ | |
3531 | NULL, /* sub */ | |
3532 | NULL, /* next */ | |
3533 | 0, /* static_pass_number */ | |
7072a650 | 3534 | TV_NONE, /* tv_id */ |
ef330312 PB |
3535 | 0, /* properties_required */ |
3536 | 0, /* properties_provided */ | |
3537 | 0, /* properties_destroyed */ | |
3538 | 0, /* todo_flags_start */ | |
8ddbbcae JH |
3539 | TODO_dump_func /* todo_flags_finish */ |
3540 | } | |
ef330312 PB |
3541 | }; |
3542 | ||
6fb5fa3c DB |
3543 | static unsigned int |
3544 | rest_of_handle_split_after_reload (void) | |
ef330312 | 3545 | { |
6fb5fa3c DB |
3546 | /* If optimizing, then go ahead and split insns now. */ |
3547 | #ifndef STACK_REGS | |
3548 | if (optimize > 0) | |
3549 | #endif | |
3550 | split_all_insns (); | |
ef330312 | 3551 | return 0; |
ef330312 PB |
3552 | } |
3553 | ||
8ddbbcae | 3554 | struct rtl_opt_pass pass_split_after_reload = |
ef330312 | 3555 | { |
8ddbbcae JH |
3556 | { |
3557 | RTL_PASS, | |
6fb5fa3c DB |
3558 | "split2", /* name */ |
3559 | NULL, /* gate */ | |
3560 | rest_of_handle_split_after_reload, /* execute */ | |
ef330312 PB |
3561 | NULL, /* sub */ |
3562 | NULL, /* next */ | |
3563 | 0, /* static_pass_number */ | |
7072a650 | 3564 | TV_NONE, /* tv_id */ |
ef330312 PB |
3565 | 0, /* properties_required */ |
3566 | 0, /* properties_provided */ | |
3567 | 0, /* properties_destroyed */ | |
3568 | 0, /* todo_flags_start */ | |
8ddbbcae JH |
3569 | TODO_dump_func /* todo_flags_finish */ |
3570 | } | |
ef330312 PB |
3571 | }; |
3572 | ||
ef330312 PB |
3573 | static bool |
3574 | gate_handle_split_before_regstack (void) | |
3575 | { | |
3576 | #if defined (HAVE_ATTR_length) && defined (STACK_REGS) | |
3577 | /* If flow2 creates new instructions which need splitting | |
3578 | and scheduling after reload is not done, they might not be | |
3579 | split until final which doesn't allow splitting | |
3580 | if HAVE_ATTR_length. */ | |
3581 | # ifdef INSN_SCHEDULING | |
3582 | return (optimize && !flag_schedule_insns_after_reload); | |
3583 | # else | |
3584 | return (optimize); | |
3585 | # endif | |
3586 | #else | |
3587 | return 0; | |
3588 | #endif | |
3589 | } | |
3590 | ||
6fb5fa3c DB |
3591 | static unsigned int |
3592 | rest_of_handle_split_before_regstack (void) | |
3593 | { | |
3594 | split_all_insns (); | |
3595 | return 0; | |
3596 | } | |
3597 | ||
8ddbbcae | 3598 | struct rtl_opt_pass pass_split_before_regstack = |
ef330312 | 3599 | { |
8ddbbcae JH |
3600 | { |
3601 | RTL_PASS, | |
6fb5fa3c | 3602 | "split3", /* name */ |
ef330312 | 3603 | gate_handle_split_before_regstack, /* gate */ |
6fb5fa3c DB |
3604 | rest_of_handle_split_before_regstack, /* execute */ |
3605 | NULL, /* sub */ | |
3606 | NULL, /* next */ | |
3607 | 0, /* static_pass_number */ | |
7072a650 | 3608 | TV_NONE, /* tv_id */ |
6fb5fa3c DB |
3609 | 0, /* properties_required */ |
3610 | 0, /* properties_provided */ | |
3611 | 0, /* properties_destroyed */ | |
3612 | 0, /* todo_flags_start */ | |
8ddbbcae JH |
3613 | TODO_dump_func /* todo_flags_finish */ |
3614 | } | |
6fb5fa3c DB |
3615 | }; |
3616 | ||
3617 | static bool | |
3618 | gate_handle_split_before_sched2 (void) | |
3619 | { | |
3620 | #ifdef INSN_SCHEDULING | |
3621 | return optimize > 0 && flag_schedule_insns_after_reload; | |
3622 | #else | |
3623 | return 0; | |
3624 | #endif | |
3625 | } | |
3626 | ||
3627 | static unsigned int | |
3628 | rest_of_handle_split_before_sched2 (void) | |
3629 | { | |
3630 | #ifdef INSN_SCHEDULING | |
3631 | split_all_insns (); | |
3632 | #endif | |
3633 | return 0; | |
3634 | } | |
3635 | ||
8ddbbcae | 3636 | struct rtl_opt_pass pass_split_before_sched2 = |
6fb5fa3c | 3637 | { |
8ddbbcae JH |
3638 | { |
3639 | RTL_PASS, | |
6fb5fa3c DB |
3640 | "split4", /* name */ |
3641 | gate_handle_split_before_sched2, /* gate */ | |
3642 | rest_of_handle_split_before_sched2, /* execute */ | |
3643 | NULL, /* sub */ | |
3644 | NULL, /* next */ | |
3645 | 0, /* static_pass_number */ | |
7072a650 | 3646 | TV_NONE, /* tv_id */ |
6fb5fa3c DB |
3647 | 0, /* properties_required */ |
3648 | 0, /* properties_provided */ | |
3649 | 0, /* properties_destroyed */ | |
3650 | 0, /* todo_flags_start */ | |
3651 | TODO_verify_flow | | |
8ddbbcae JH |
3652 | TODO_dump_func /* todo_flags_finish */ |
3653 | } | |
6fb5fa3c DB |
3654 | }; |
3655 | ||
3656 | /* The placement of the splitting that we do for shorten_branches | |
3657 | depends on whether regstack is used by the target or not. */ | |
3658 | static bool | |
3659 | gate_do_final_split (void) | |
3660 | { | |
3661 | #if defined (HAVE_ATTR_length) && !defined (STACK_REGS) | |
3662 | return 1; | |
3663 | #else | |
3664 | return 0; | |
3665 | #endif | |
3666 | } | |
3667 | ||
8ddbbcae | 3668 | struct rtl_opt_pass pass_split_for_shorten_branches = |
6fb5fa3c | 3669 | { |
8ddbbcae JH |
3670 | { |
3671 | RTL_PASS, | |
6fb5fa3c DB |
3672 | "split5", /* name */ |
3673 | gate_do_final_split, /* gate */ | |
3674 | split_all_insns_noflow, /* execute */ | |
ef330312 PB |
3675 | NULL, /* sub */ |
3676 | NULL, /* next */ | |
3677 | 0, /* static_pass_number */ | |
7072a650 | 3678 | TV_NONE, /* tv_id */ |
ef330312 PB |
3679 | 0, /* properties_required */ |
3680 | 0, /* properties_provided */ | |
3681 | 0, /* properties_destroyed */ | |
3682 | 0, /* todo_flags_start */ | |
8ddbbcae JH |
3683 | TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */ |
3684 | } | |
ef330312 | 3685 | }; |