]>
Commit | Line | Data |
---|---|---|
431ad2a9 | 1 | /* Subroutines used by or related to instruction recognition. |
711789cc | 2 | Copyright (C) 1987-2013 Free Software Foundation, Inc. |
431ad2a9 | 3 | |
f12b58b3 | 4 | This file is part of GCC. |
431ad2a9 | 5 | |
f12b58b3 | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
f12b58b3 | 9 | version. |
431ad2a9 | 10 | |
f12b58b3 | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
431ad2a9 | 15 | |
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
431ad2a9 | 19 | |
20 | ||
21 | #include "config.h" | |
405711de | 22 | #include "system.h" |
805e22b2 | 23 | #include "coretypes.h" |
24 | #include "tm.h" | |
41a8aa41 | 25 | #include "tree.h" |
d7091a76 | 26 | #include "rtl-error.h" |
7953c610 | 27 | #include "tm_p.h" |
431ad2a9 | 28 | #include "insn-config.h" |
29 | #include "insn-attr.h" | |
05806416 | 30 | #include "hard-reg-set.h" |
431ad2a9 | 31 | #include "recog.h" |
32 | #include "regs.h" | |
00cb30dc | 33 | #include "addresses.h" |
e4e86ec5 | 34 | #include "expr.h" |
0a893c29 | 35 | #include "function.h" |
431ad2a9 | 36 | #include "flags.h" |
29bd1808 | 37 | #include "basic-block.h" |
aed0bd19 | 38 | #include "reload.h" |
747bf50d | 39 | #include "target.h" |
77fce4cd | 40 | #include "tree-pass.h" |
3072d30e | 41 | #include "df.h" |
26427966 | 42 | #include "insn-codes.h" |
431ad2a9 | 43 | |
44 | #ifndef STACK_PUSH_CODE | |
45 | #ifdef STACK_GROWS_DOWNWARD | |
46 | #define STACK_PUSH_CODE PRE_DEC | |
47 | #else | |
48 | #define STACK_PUSH_CODE PRE_INC | |
49 | #endif | |
50 | #endif | |
51 | ||
dcf593ec | 52 | #ifndef STACK_POP_CODE |
53 | #ifdef STACK_GROWS_DOWNWARD | |
54 | #define STACK_POP_CODE POST_INC | |
55 | #else | |
56 | #define STACK_POP_CODE POST_DEC | |
57 | #endif | |
58 | #endif | |
59 | ||
e1ab7874 | 60 | static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool); |
3ad4992f | 61 | static void validate_replace_src_1 (rtx *, void *); |
62 | static rtx split_insn (rtx); | |
431ad2a9 | 63 | |
64 | /* Nonzero means allow operands to be volatile. | |
65 | This should be 0 if you are generating rtl, such as if you are calling | |
66 | the functions in optabs.c and expmed.c (most of the time). | |
67 | This should be 1 if all valid insns need to be recognized, | |
ac502adf | 68 | such as in reginfo.c and final.c and reload.c. |
431ad2a9 | 69 | |
70 | init_recog and init_recog_no_volatile are responsible for setting this. */ | |
71 | ||
72 | int volatile_ok; | |
73 | ||
a31fe3d0 | 74 | struct recog_data_d recog_data; |
5d07813e | 75 | |
78e49515 | 76 | /* Contains a vector of operand_alternative structures for every operand. |
77 | Set up by preprocess_constraints. */ | |
78 | struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES]; | |
79 | ||
431ad2a9 | 80 | /* On return from `constrain_operands', indicate which alternative |
81 | was satisfied. */ | |
82 | ||
83 | int which_alternative; | |
84 | ||
85 | /* Nonzero after end of reload pass. | |
86 | Set to 1 or 0 by toplev.c. | |
87 | Controls the significance of (SUBREG (MEM)). */ | |
88 | ||
89 | int reload_completed; | |
90 | ||
8af3db02 | 91 | /* Nonzero after thread_prologue_and_epilogue_insns has run. */ |
92 | int epilogue_completed; | |
93 | ||
431ad2a9 | 94 | /* Initialize data used by the function `recog'. |
95 | This must be called once in the compilation of a function | |
96 | before any insn recognition may be done in the function. */ | |
97 | ||
98 | void | |
3ad4992f | 99 | init_recog_no_volatile (void) |
431ad2a9 | 100 | { |
101 | volatile_ok = 0; | |
102 | } | |
103 | ||
47dda538 | 104 | void |
3ad4992f | 105 | init_recog (void) |
431ad2a9 | 106 | { |
107 | volatile_ok = 1; | |
108 | } | |
109 | ||
431ad2a9 | 110 | \f |
1c7a1755 | 111 | /* Return true if labels in asm operands BODY are LABEL_REFs. */ |
112 | ||
113 | static bool | |
114 | asm_labels_ok (rtx body) | |
115 | { | |
116 | rtx asmop; | |
117 | int i; | |
118 | ||
119 | asmop = extract_asm_operands (body); | |
120 | if (asmop == NULL_RTX) | |
121 | return true; | |
122 | ||
123 | for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++) | |
124 | if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF) | |
125 | return false; | |
126 | ||
127 | return true; | |
128 | } | |
129 | ||
431ad2a9 | 130 | /* Check that X is an insn-body for an `asm' with operands |
131 | and that the operands mentioned in it are legitimate. */ | |
132 | ||
133 | int | |
3ad4992f | 134 | check_asm_operands (rtx x) |
431ad2a9 | 135 | { |
6d2e66f1 | 136 | int noperands; |
431ad2a9 | 137 | rtx *operands; |
a8482e91 | 138 | const char **constraints; |
431ad2a9 | 139 | int i; |
140 | ||
1c7a1755 | 141 | if (!asm_labels_ok (x)) |
142 | return 0; | |
143 | ||
6d2e66f1 | 144 | /* Post-reload, be more strict with things. */ |
145 | if (reload_completed) | |
146 | { | |
147 | /* ??? Doh! We've not got the wrapping insn. Cook one up. */ | |
148 | extract_insn (make_insn_raw (x)); | |
149 | constrain_operands (1); | |
150 | return which_alternative >= 0; | |
151 | } | |
152 | ||
153 | noperands = asm_noperands (x); | |
431ad2a9 | 154 | if (noperands < 0) |
155 | return 0; | |
156 | if (noperands == 0) | |
157 | return 1; | |
158 | ||
f7f3687c | 159 | operands = XALLOCAVEC (rtx, noperands); |
160 | constraints = XALLOCAVEC (const char *, noperands); | |
6d2e66f1 | 161 | |
09fb10e8 | 162 | decode_asm_operands (x, operands, NULL, constraints, NULL, NULL); |
431ad2a9 | 163 | |
164 | for (i = 0; i < noperands; i++) | |
6d2e66f1 | 165 | { |
a8482e91 | 166 | const char *c = constraints[i]; |
24871a8e | 167 | if (c[0] == '%') |
168 | c++; | |
cde8f1c8 | 169 | if (! asm_operand_ok (operands[i], c, constraints)) |
2617fe26 | 170 | return 0; |
6d2e66f1 | 171 | } |
431ad2a9 | 172 | |
173 | return 1; | |
174 | } | |
175 | \f | |
ce326ac0 | 176 | /* Static data for the next two routines. */ |
431ad2a9 | 177 | |
ce326ac0 | 178 | typedef struct change_t |
179 | { | |
180 | rtx object; | |
181 | int old_code; | |
182 | rtx *loc; | |
183 | rtx old; | |
11d686e2 | 184 | bool unshare; |
ce326ac0 | 185 | } change_t; |
431ad2a9 | 186 | |
ce326ac0 | 187 | static change_t *changes; |
188 | static int changes_allocated; | |
431ad2a9 | 189 | |
190 | static int num_changes = 0; | |
191 | ||
60c7c333 | 192 | /* Validate a proposed change to OBJECT. LOC is the location in the rtl |
47cfb7f4 | 193 | at which NEW_RTX will be placed. If OBJECT is zero, no validation is done, |
431ad2a9 | 194 | the change is simply made. |
195 | ||
196 | Two types of objects are supported: If OBJECT is a MEM, memory_address_p | |
197 | will be called with the address and mode as parameters. If OBJECT is | |
198 | an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with | |
199 | the change in place. | |
200 | ||
7fd957fe | 201 | IN_GROUP is nonzero if this is part of a group of changes that must be |
431ad2a9 | 202 | performed as a group. In that case, the changes will be stored. The |
203 | function `apply_change_group' will validate and apply the changes. | |
204 | ||
205 | If IN_GROUP is zero, this is a single change. Try to recognize the insn | |
206 | or validate the memory reference with the change applied. If the result | |
207 | is not valid for the machine, suppress the change and return zero. | |
208 | Otherwise, perform the change and return 1. */ | |
209 | ||
11d686e2 | 210 | static bool |
47cfb7f4 | 211 | validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare) |
431ad2a9 | 212 | { |
213 | rtx old = *loc; | |
214 | ||
47cfb7f4 | 215 | if (old == new_rtx || rtx_equal_p (old, new_rtx)) |
431ad2a9 | 216 | return 1; |
217 | ||
04e579b6 | 218 | gcc_assert (in_group != 0 || num_changes == 0); |
431ad2a9 | 219 | |
47cfb7f4 | 220 | *loc = new_rtx; |
431ad2a9 | 221 | |
222 | /* Save the information describing this change. */ | |
ce326ac0 | 223 | if (num_changes >= changes_allocated) |
224 | { | |
225 | if (changes_allocated == 0) | |
226 | /* This value allows for repeated substitutions inside complex | |
227 | indexed addresses, or changes in up to 5 insns. */ | |
228 | changes_allocated = MAX_RECOG_OPERANDS * 5; | |
229 | else | |
230 | changes_allocated *= 2; | |
231 | ||
f7f3687c | 232 | changes = XRESIZEVEC (change_t, changes, changes_allocated); |
ce326ac0 | 233 | } |
2617fe26 | 234 | |
ce326ac0 | 235 | changes[num_changes].object = object; |
236 | changes[num_changes].loc = loc; | |
237 | changes[num_changes].old = old; | |
11d686e2 | 238 | changes[num_changes].unshare = unshare; |
431ad2a9 | 239 | |
e16ceb8e | 240 | if (object && !MEM_P (object)) |
431ad2a9 | 241 | { |
242 | /* Set INSN_CODE to force rerecognition of insn. Save old code in | |
243 | case invalid. */ | |
ce326ac0 | 244 | changes[num_changes].old_code = INSN_CODE (object); |
431ad2a9 | 245 | INSN_CODE (object) = -1; |
246 | } | |
247 | ||
248 | num_changes++; | |
249 | ||
250 | /* If we are making a group of changes, return 1. Otherwise, validate the | |
251 | change group we made. */ | |
252 | ||
253 | if (in_group) | |
254 | return 1; | |
255 | else | |
256 | return apply_change_group (); | |
257 | } | |
258 | ||
11d686e2 | 259 | /* Wrapper for validate_change_1 without the UNSHARE argument defaulting |
260 | UNSHARE to false. */ | |
261 | ||
262 | bool | |
47cfb7f4 | 263 | validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group) |
11d686e2 | 264 | { |
47cfb7f4 | 265 | return validate_change_1 (object, loc, new_rtx, in_group, false); |
11d686e2 | 266 | } |
267 | ||
268 | /* Wrapper for validate_change_1 without the UNSHARE argument defaulting | |
269 | UNSHARE to true. */ | |
270 | ||
271 | bool | |
47cfb7f4 | 272 | validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group) |
11d686e2 | 273 | { |
47cfb7f4 | 274 | return validate_change_1 (object, loc, new_rtx, in_group, true); |
11d686e2 | 275 | } |
276 | ||
277 | ||
42a3a38b | 278 | /* Keep X canonicalized if some changes have made it non-canonical; only |
279 | modifies the operands of X, not (for example) its code. Simplifications | |
280 | are not the job of this routine. | |
281 | ||
282 | Return true if anything was changed. */ | |
283 | bool | |
284 | canonicalize_change_group (rtx insn, rtx x) | |
285 | { | |
286 | if (COMMUTATIVE_P (x) | |
287 | && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))) | |
288 | { | |
289 | /* Oops, the caller has made X no longer canonical. | |
290 | Let's redo the changes in the correct order. */ | |
291 | rtx tem = XEXP (x, 0); | |
0a01a6f5 | 292 | validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1); |
293 | validate_unshare_change (insn, &XEXP (x, 1), tem, 1); | |
42a3a38b | 294 | return true; |
295 | } | |
296 | else | |
297 | return false; | |
298 | } | |
48e1416a | 299 | |
f5ab9387 | 300 | |
c426f6be | 301 | /* This subroutine of apply_change_group verifies whether the changes to INSN |
dae9d0e7 | 302 | were valid; i.e. whether INSN can still be recognized. |
303 | ||
304 | If IN_GROUP is true clobbers which have to be added in order to | |
305 | match the instructions will be added to the current change group. | |
306 | Otherwise the changes will take effect immediately. */ | |
c426f6be | 307 | |
06e2144a | 308 | int |
dae9d0e7 | 309 | insn_invalid_p (rtx insn, bool in_group) |
c426f6be | 310 | { |
06e2144a | 311 | rtx pat = PATTERN (insn); |
312 | int num_clobbers = 0; | |
313 | /* If we are before reload and the pattern is a SET, see if we can add | |
314 | clobbers. */ | |
315 | int icode = recog (pat, insn, | |
316 | (GET_CODE (pat) == SET | |
317 | && ! reload_completed && ! reload_in_progress) | |
d946ea19 | 318 | ? &num_clobbers : 0); |
c426f6be | 319 | int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0; |
320 | ||
2617fe26 | 321 | |
06e2144a | 322 | /* If this is an asm and the operand aren't legal, then fail. Likewise if |
323 | this is not an asm and the insn wasn't recognized. */ | |
324 | if ((is_asm && ! check_asm_operands (PATTERN (insn))) | |
325 | || (!is_asm && icode < 0)) | |
c426f6be | 326 | return 1; |
327 | ||
06e2144a | 328 | /* If we have to add CLOBBERs, fail if we have to add ones that reference |
329 | hard registers since our callers can't know if they are live or not. | |
330 | Otherwise, add them. */ | |
331 | if (num_clobbers > 0) | |
332 | { | |
333 | rtx newpat; | |
334 | ||
335 | if (added_clobbers_hard_reg_p (icode)) | |
336 | return 1; | |
337 | ||
338 | newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1)); | |
339 | XVECEXP (newpat, 0, 0) = pat; | |
340 | add_clobbers (newpat, icode); | |
dae9d0e7 | 341 | if (in_group) |
342 | validate_change (insn, &PATTERN (insn), newpat, 1); | |
343 | else | |
344 | PATTERN (insn) = pat = newpat; | |
06e2144a | 345 | } |
346 | ||
c426f6be | 347 | /* After reload, verify that all constraints are satisfied. */ |
348 | if (reload_completed) | |
349 | { | |
7f82be90 | 350 | extract_insn (insn); |
c426f6be | 351 | |
7f82be90 | 352 | if (! constrain_operands (1)) |
c426f6be | 353 | return 1; |
354 | } | |
355 | ||
06e2144a | 356 | INSN_CODE (insn) = icode; |
c426f6be | 357 | return 0; |
358 | } | |
359 | ||
34073249 | 360 | /* Return number of changes made and not validated yet. */ |
361 | int | |
3ad4992f | 362 | num_changes_pending (void) |
34073249 | 363 | { |
364 | return num_changes; | |
365 | } | |
366 | ||
82880dfd | 367 | /* Tentatively apply the changes numbered NUM and up. |
431ad2a9 | 368 | Return 1 if all changes are valid, zero otherwise. */ |
369 | ||
cd7f40a2 | 370 | int |
82880dfd | 371 | verify_changes (int num) |
431ad2a9 | 372 | { |
373 | int i; | |
3ea97a32 | 374 | rtx last_validated = NULL_RTX; |
431ad2a9 | 375 | |
376 | /* The changes have been applied and all INSN_CODEs have been reset to force | |
377 | rerecognition. | |
378 | ||
379 | The changes are valid if we aren't given an object, or if we are | |
380 | given a MEM and it still is a valid address, or if this is in insn | |
381 | and it is recognized. In the latter case, if reload has completed, | |
382 | we also require that the operands meet the constraints for | |
7f82be90 | 383 | the insn. */ |
431ad2a9 | 384 | |
82880dfd | 385 | for (i = num; i < num_changes; i++) |
431ad2a9 | 386 | { |
ce326ac0 | 387 | rtx object = changes[i].object; |
431ad2a9 | 388 | |
b4b174c3 | 389 | /* If there is no object to test or if it is the same as the one we |
3ea97a32 | 390 | already tested, ignore it. */ |
391 | if (object == 0 || object == last_validated) | |
431ad2a9 | 392 | continue; |
393 | ||
e16ceb8e | 394 | if (MEM_P (object)) |
431ad2a9 | 395 | { |
bd1a81f7 | 396 | if (! memory_address_addr_space_p (GET_MODE (object), |
397 | XEXP (object, 0), | |
398 | MEM_ADDR_SPACE (object))) | |
431ad2a9 | 399 | break; |
400 | } | |
d25c779e | 401 | else if (/* changes[i].old might be zero, e.g. when putting a |
402 | REG_FRAME_RELATED_EXPR into a previously empty list. */ | |
403 | changes[i].old | |
404 | && REG_P (changes[i].old) | |
af7eac22 | 405 | && asm_noperands (PATTERN (object)) > 0 |
406 | && REG_EXPR (changes[i].old) != NULL_TREE | |
407 | && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old)) | |
408 | && DECL_REGISTER (REG_EXPR (changes[i].old))) | |
409 | { | |
410 | /* Don't allow changes of hard register operands to inline | |
411 | assemblies if they have been defined as register asm ("x"). */ | |
412 | break; | |
413 | } | |
9845d120 | 414 | else if (DEBUG_INSN_P (object)) |
415 | continue; | |
dae9d0e7 | 416 | else if (insn_invalid_p (object, true)) |
431ad2a9 | 417 | { |
418 | rtx pat = PATTERN (object); | |
419 | ||
420 | /* Perhaps we couldn't recognize the insn because there were | |
421 | extra CLOBBERs at the end. If so, try to re-recognize | |
422 | without the last CLOBBER (later iterations will cause each of | |
423 | them to be eliminated, in turn). But don't do this if we | |
424 | have an ASM_OPERAND. */ | |
425 | if (GET_CODE (pat) == PARALLEL | |
426 | && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER | |
427 | && asm_noperands (PATTERN (object)) < 0) | |
428 | { | |
c4cf0b6b | 429 | rtx newpat; |
430 | ||
431 | if (XVECLEN (pat, 0) == 2) | |
432 | newpat = XVECEXP (pat, 0, 0); | |
433 | else | |
434 | { | |
435 | int j; | |
436 | ||
437 | newpat | |
2617fe26 | 438 | = gen_rtx_PARALLEL (VOIDmode, |
c4cf0b6b | 439 | rtvec_alloc (XVECLEN (pat, 0) - 1)); |
440 | for (j = 0; j < XVECLEN (newpat, 0); j++) | |
441 | XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j); | |
442 | } | |
443 | ||
444 | /* Add a new change to this group to replace the pattern | |
445 | with this new pattern. Then consider this change | |
446 | as having succeeded. The change we added will | |
447 | cause the entire call to fail if things remain invalid. | |
448 | ||
449 | Note that this can lose if a later change than the one | |
450 | we are processing specified &XVECEXP (PATTERN (object), 0, X) | |
451 | but this shouldn't occur. */ | |
452 | ||
453 | validate_change (object, &PATTERN (object), newpat, 1); | |
454 | continue; | |
455 | } | |
9845d120 | 456 | else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER |
457 | || GET_CODE (pat) == VAR_LOCATION) | |
431ad2a9 | 458 | /* If this insn is a CLOBBER or USE, it is always valid, but is |
459 | never recognized. */ | |
460 | continue; | |
461 | else | |
462 | break; | |
463 | } | |
3ea97a32 | 464 | last_validated = object; |
431ad2a9 | 465 | } |
466 | ||
82880dfd | 467 | return (i == num_changes); |
468 | } | |
469 | ||
3072d30e | 470 | /* A group of changes has previously been issued with validate_change |
471 | and verified with verify_changes. Call df_insn_rescan for each of | |
472 | the insn changed and clear num_changes. */ | |
82880dfd | 473 | |
474 | void | |
475 | confirm_change_group (void) | |
476 | { | |
477 | int i; | |
11d686e2 | 478 | rtx last_object = NULL; |
308f9b79 | 479 | |
82880dfd | 480 | for (i = 0; i < num_changes; i++) |
3072d30e | 481 | { |
482 | rtx object = changes[i].object; | |
11d686e2 | 483 | |
484 | if (changes[i].unshare) | |
485 | *changes[i].loc = copy_rtx (*changes[i].loc); | |
486 | ||
f0b5f617 | 487 | /* Avoid unnecessary rescanning when multiple changes to same instruction |
11d686e2 | 488 | are made. */ |
489 | if (object) | |
490 | { | |
491 | if (object != last_object && last_object && INSN_P (last_object)) | |
492 | df_insn_rescan (last_object); | |
493 | last_object = object; | |
494 | } | |
3072d30e | 495 | } |
82880dfd | 496 | |
11d686e2 | 497 | if (last_object && INSN_P (last_object)) |
498 | df_insn_rescan (last_object); | |
82880dfd | 499 | num_changes = 0; |
500 | } | |
501 | ||
502 | /* Apply a group of changes previously issued with `validate_change'. | |
503 | If all changes are valid, call confirm_change_group and return 1, | |
504 | otherwise, call cancel_changes and return 0. */ | |
308f9b79 | 505 | |
82880dfd | 506 | int |
507 | apply_change_group (void) | |
508 | { | |
509 | if (verify_changes (0)) | |
510 | { | |
511 | confirm_change_group (); | |
431ad2a9 | 512 | return 1; |
513 | } | |
514 | else | |
515 | { | |
516 | cancel_changes (0); | |
517 | return 0; | |
518 | } | |
519 | } | |
520 | ||
82880dfd | 521 | |
1e625a2e | 522 | /* Return the number of changes so far in the current group. */ |
431ad2a9 | 523 | |
524 | int | |
3ad4992f | 525 | num_validated_changes (void) |
431ad2a9 | 526 | { |
527 | return num_changes; | |
528 | } | |
529 | ||
530 | /* Retract the changes numbered NUM and up. */ | |
531 | ||
532 | void | |
3ad4992f | 533 | cancel_changes (int num) |
431ad2a9 | 534 | { |
535 | int i; | |
536 | ||
537 | /* Back out all the changes. Do this in the opposite order in which | |
538 | they were made. */ | |
539 | for (i = num_changes - 1; i >= num; i--) | |
540 | { | |
ce326ac0 | 541 | *changes[i].loc = changes[i].old; |
e16ceb8e | 542 | if (changes[i].object && !MEM_P (changes[i].object)) |
ce326ac0 | 543 | INSN_CODE (changes[i].object) = changes[i].old_code; |
431ad2a9 | 544 | } |
545 | num_changes = num; | |
546 | } | |
547 | ||
26427966 | 548 | /* Reduce conditional compilation elsewhere. */ |
549 | #ifndef HAVE_extv | |
550 | #define HAVE_extv 0 | |
551 | #define CODE_FOR_extv CODE_FOR_nothing | |
552 | #endif | |
553 | #ifndef HAVE_extzv | |
554 | #define HAVE_extzv 0 | |
555 | #define CODE_FOR_extzv CODE_FOR_nothing | |
556 | #endif | |
557 | ||
e1ab7874 | 558 | /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting |
559 | rtx. */ | |
431ad2a9 | 560 | |
561 | static void | |
48e1416a | 562 | simplify_while_replacing (rtx *loc, rtx to, rtx object, |
e1ab7874 | 563 | enum machine_mode op0_mode) |
431ad2a9 | 564 | { |
19cb6b50 | 565 | rtx x = *loc; |
e1ab7874 | 566 | enum rtx_code code = GET_CODE (x); |
47cfb7f4 | 567 | rtx new_rtx; |
431ad2a9 | 568 | |
6720e96c | 569 | if (SWAPPABLE_OPERANDS_P (x) |
9a19db4c | 570 | && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))) |
571 | { | |
5612e2f1 | 572 | validate_unshare_change (object, loc, |
573 | gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code | |
574 | : swap_condition (code), | |
575 | GET_MODE (x), XEXP (x, 1), | |
576 | XEXP (x, 0)), 1); | |
9a19db4c | 577 | x = *loc; |
578 | code = GET_CODE (x); | |
579 | } | |
67edcce9 | 580 | |
431ad2a9 | 581 | switch (code) |
582 | { | |
583 | case PLUS: | |
3398e91d | 584 | /* If we have a PLUS whose second operand is now a CONST_INT, use |
458bb871 | 585 | simplify_gen_binary to try to simplify it. |
9a19db4c | 586 | ??? We may want later to remove this, once simplification is |
587 | separated from this function. */ | |
971ba038 | 588 | if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to) |
9a19db4c | 589 | validate_change (object, loc, |
7d6080d6 | 590 | simplify_gen_binary |
591 | (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1); | |
9a19db4c | 592 | break; |
67edcce9 | 593 | case MINUS: |
efa08fc2 | 594 | if (CONST_SCALAR_INT_P (XEXP (x, 1))) |
9a19db4c | 595 | validate_change (object, loc, |
596 | simplify_gen_binary | |
597 | (PLUS, GET_MODE (x), XEXP (x, 0), | |
598 | simplify_gen_unary (NEG, | |
4d1f4307 | 599 | GET_MODE (x), XEXP (x, 1), |
600 | GET_MODE (x))), 1); | |
67edcce9 | 601 | break; |
431ad2a9 | 602 | case ZERO_EXTEND: |
603 | case SIGN_EXTEND: | |
9a19db4c | 604 | if (GET_MODE (XEXP (x, 0)) == VOIDmode) |
431ad2a9 | 605 | { |
47cfb7f4 | 606 | new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0), |
9a19db4c | 607 | op0_mode); |
332e6527 | 608 | /* If any of the above failed, substitute in something that |
609 | we know won't be recognized. */ | |
47cfb7f4 | 610 | if (!new_rtx) |
611 | new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); | |
612 | validate_change (object, loc, new_rtx, 1); | |
431ad2a9 | 613 | } |
614 | break; | |
431ad2a9 | 615 | case SUBREG: |
9a19db4c | 616 | /* All subregs possible to simplify should be simplified. */ |
47cfb7f4 | 617 | new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode, |
9a19db4c | 618 | SUBREG_BYTE (x)); |
619 | ||
c4cf0b6b | 620 | /* Subregs of VOIDmode operands are incorrect. */ |
47cfb7f4 | 621 | if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode) |
622 | new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); | |
623 | if (new_rtx) | |
624 | validate_change (object, loc, new_rtx, 1); | |
431ad2a9 | 625 | break; |
431ad2a9 | 626 | case ZERO_EXTRACT: |
627 | case SIGN_EXTRACT: | |
628 | /* If we are replacing a register with memory, try to change the memory | |
9a19db4c | 629 | to be the mode required for memory in extract operations (this isn't |
630 | likely to be an insertion operation; if it was, nothing bad will | |
631 | happen, we might just fail in some cases). */ | |
431ad2a9 | 632 | |
e16ceb8e | 633 | if (MEM_P (XEXP (x, 0)) |
971ba038 | 634 | && CONST_INT_P (XEXP (x, 1)) |
635 | && CONST_INT_P (XEXP (x, 2)) | |
4e27ffd0 | 636 | && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0), |
637 | MEM_ADDR_SPACE (XEXP (x, 0))) | |
9a19db4c | 638 | && !MEM_VOLATILE_P (XEXP (x, 0))) |
431ad2a9 | 639 | { |
640 | enum machine_mode wanted_mode = VOIDmode; | |
9a19db4c | 641 | enum machine_mode is_mode = GET_MODE (XEXP (x, 0)); |
431ad2a9 | 642 | int pos = INTVAL (XEXP (x, 2)); |
643 | ||
26427966 | 644 | if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv) |
519e5777 | 645 | { |
26427966 | 646 | wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode; |
647 | if (wanted_mode == VOIDmode) | |
648 | wanted_mode = word_mode; | |
519e5777 | 649 | } |
26427966 | 650 | else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv) |
519e5777 | 651 | { |
26427966 | 652 | wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode; |
653 | if (wanted_mode == VOIDmode) | |
654 | wanted_mode = word_mode; | |
519e5777 | 655 | } |
431ad2a9 | 656 | |
4bbea254 | 657 | /* If we have a narrower mode, we can do something. */ |
431ad2a9 | 658 | if (wanted_mode != VOIDmode |
659 | && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) | |
660 | { | |
661 | int offset = pos / BITS_PER_UNIT; | |
662 | rtx newmem; | |
663 | ||
701e46d0 | 664 | /* If the bytes and bits are counted differently, we |
9a19db4c | 665 | must adjust the offset. */ |
51356f86 | 666 | if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN) |
9a19db4c | 667 | offset = |
668 | (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) - | |
669 | offset); | |
431ad2a9 | 670 | |
eb2457b0 | 671 | gcc_assert (GET_MODE_PRECISION (wanted_mode) |
672 | == GET_MODE_BITSIZE (wanted_mode)); | |
431ad2a9 | 673 | pos %= GET_MODE_BITSIZE (wanted_mode); |
674 | ||
e4e86ec5 | 675 | newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset); |
431ad2a9 | 676 | |
8afcf593 | 677 | validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1); |
431ad2a9 | 678 | validate_change (object, &XEXP (x, 0), newmem, 1); |
679 | } | |
680 | } | |
681 | ||
682 | break; | |
9a19db4c | 683 | |
941522d6 | 684 | default: |
685 | break; | |
431ad2a9 | 686 | } |
431ad2a9 | 687 | } |
688 | ||
e1ab7874 | 689 | /* Replace every occurrence of FROM in X with TO. Mark each change with |
690 | validate_change passing OBJECT. */ | |
691 | ||
692 | static void | |
48e1416a | 693 | validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object, |
e1ab7874 | 694 | bool simplify) |
695 | { | |
696 | int i, j; | |
697 | const char *fmt; | |
698 | rtx x = *loc; | |
699 | enum rtx_code code; | |
700 | enum machine_mode op0_mode = VOIDmode; | |
701 | int prev_changes = num_changes; | |
702 | ||
703 | if (!x) | |
704 | return; | |
705 | ||
706 | code = GET_CODE (x); | |
707 | fmt = GET_RTX_FORMAT (code); | |
708 | if (fmt[0] == 'e') | |
709 | op0_mode = GET_MODE (XEXP (x, 0)); | |
710 | ||
711 | /* X matches FROM if it is the same rtx or they are both referring to the | |
712 | same register in the same mode. Avoid calling rtx_equal_p unless the | |
713 | operands look similar. */ | |
714 | ||
715 | if (x == from | |
716 | || (REG_P (x) && REG_P (from) | |
717 | && GET_MODE (x) == GET_MODE (from) | |
718 | && REGNO (x) == REGNO (from)) | |
719 | || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from) | |
720 | && rtx_equal_p (x, from))) | |
721 | { | |
722 | validate_unshare_change (object, loc, to, 1); | |
723 | return; | |
724 | } | |
725 | ||
726 | /* Call ourself recursively to perform the replacements. | |
727 | We must not replace inside already replaced expression, otherwise we | |
728 | get infinite recursion for replacements like (reg X)->(subreg (reg X)) | |
67d57e27 | 729 | so we must special case shared ASM_OPERANDS. */ |
e1ab7874 | 730 | |
731 | if (GET_CODE (x) == PARALLEL) | |
732 | { | |
733 | for (j = XVECLEN (x, 0) - 1; j >= 0; j--) | |
734 | { | |
735 | if (j && GET_CODE (XVECEXP (x, 0, j)) == SET | |
736 | && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS) | |
737 | { | |
738 | /* Verify that operands are really shared. */ | |
739 | gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) | |
740 | == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP | |
741 | (x, 0, j)))); | |
742 | validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)), | |
743 | from, to, object, simplify); | |
744 | } | |
745 | else | |
48e1416a | 746 | validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object, |
e1ab7874 | 747 | simplify); |
748 | } | |
749 | } | |
750 | else | |
751 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
752 | { | |
753 | if (fmt[i] == 'e') | |
754 | validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify); | |
755 | else if (fmt[i] == 'E') | |
756 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
48e1416a | 757 | validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object, |
e1ab7874 | 758 | simplify); |
759 | } | |
760 | ||
761 | /* If we didn't substitute, there is nothing more to do. */ | |
762 | if (num_changes == prev_changes) | |
763 | return; | |
764 | ||
67d57e27 | 765 | /* ??? The regmove is no more, so is this aberration still necessary? */ |
e1ab7874 | 766 | /* Allow substituted expression to have different mode. This is used by |
767 | regmove to change mode of pseudo register. */ | |
768 | if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode) | |
769 | op0_mode = GET_MODE (XEXP (x, 0)); | |
770 | ||
771 | /* Do changes needed to keep rtx consistent. Don't do any other | |
772 | simplifications, as it is not our job. */ | |
773 | if (simplify) | |
774 | simplify_while_replacing (loc, to, object, op0_mode); | |
775 | } | |
776 | ||
46222c18 | 777 | /* Try replacing every occurrence of FROM in subexpression LOC of INSN |
778 | with TO. After all changes have been made, validate by seeing | |
779 | if INSN is still valid. */ | |
780 | ||
781 | int | |
782 | validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc) | |
783 | { | |
784 | validate_replace_rtx_1 (loc, from, to, insn, true); | |
785 | return apply_change_group (); | |
786 | } | |
787 | ||
431ad2a9 | 788 | /* Try replacing every occurrence of FROM in INSN with TO. After all |
789 | changes have been made, validate by seeing if INSN is still valid. */ | |
790 | ||
791 | int | |
3ad4992f | 792 | validate_replace_rtx (rtx from, rtx to, rtx insn) |
431ad2a9 | 793 | { |
e1ab7874 | 794 | validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true); |
431ad2a9 | 795 | return apply_change_group (); |
796 | } | |
18aa2adf | 797 | |
e1ab7874 | 798 | /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE |
48e1416a | 799 | is a part of INSN. After all changes have been made, validate by seeing if |
800 | INSN is still valid. | |
801 | validate_replace_rtx (from, to, insn) is equivalent to | |
e1ab7874 | 802 | validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */ |
803 | ||
804 | int | |
805 | validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn) | |
806 | { | |
807 | validate_replace_rtx_1 (where, from, to, insn, true); | |
808 | return apply_change_group (); | |
809 | } | |
810 | ||
811 | /* Same as above, but do not simplify rtx afterwards. */ | |
48e1416a | 812 | int |
813 | validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where, | |
e1ab7874 | 814 | rtx insn) |
815 | { | |
816 | validate_replace_rtx_1 (where, from, to, insn, false); | |
817 | return apply_change_group (); | |
818 | ||
819 | } | |
820 | ||
70bb3c91 | 821 | /* Try replacing every occurrence of FROM in INSN with TO. This also |
822 | will replace in REG_EQUAL and REG_EQUIV notes. */ | |
0a7a3b35 | 823 | |
824 | void | |
3ad4992f | 825 | validate_replace_rtx_group (rtx from, rtx to, rtx insn) |
0a7a3b35 | 826 | { |
70bb3c91 | 827 | rtx note; |
e1ab7874 | 828 | validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true); |
70bb3c91 | 829 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) |
830 | if (REG_NOTE_KIND (note) == REG_EQUAL | |
831 | || REG_NOTE_KIND (note) == REG_EQUIV) | |
832 | validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true); | |
0a7a3b35 | 833 | } |
834 | ||
99b86c05 | 835 | /* Function called by note_uses to replace used subexpressions. */ |
836 | struct validate_replace_src_data | |
06e2144a | 837 | { |
838 | rtx from; /* Old RTX */ | |
839 | rtx to; /* New RTX */ | |
840 | rtx insn; /* Insn in which substitution is occurring. */ | |
841 | }; | |
99b86c05 | 842 | |
843 | static void | |
3ad4992f | 844 | validate_replace_src_1 (rtx *x, void *data) |
99b86c05 | 845 | { |
846 | struct validate_replace_src_data *d | |
847 | = (struct validate_replace_src_data *) data; | |
848 | ||
e1ab7874 | 849 | validate_replace_rtx_1 (x, d->from, d->to, d->insn, true); |
99b86c05 | 850 | } |
851 | ||
18aa2adf | 852 | /* Try replacing every occurrence of FROM in INSN with TO, avoiding |
34073249 | 853 | SET_DESTs. */ |
18aa2adf | 854 | |
34073249 | 855 | void |
3ad4992f | 856 | validate_replace_src_group (rtx from, rtx to, rtx insn) |
18aa2adf | 857 | { |
99b86c05 | 858 | struct validate_replace_src_data d; |
38417d3e | 859 | |
99b86c05 | 860 | d.from = from; |
861 | d.to = to; | |
862 | d.insn = insn; | |
863 | note_uses (&PATTERN (insn), validate_replace_src_1, &d); | |
34073249 | 864 | } |
2b74c150 | 865 | |
866 | /* Try simplify INSN. | |
867 | Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's | |
868 | pattern and return true if something was simplified. */ | |
869 | ||
870 | bool | |
871 | validate_simplify_insn (rtx insn) | |
872 | { | |
873 | int i; | |
874 | rtx pat = NULL; | |
875 | rtx newpat = NULL; | |
876 | ||
877 | pat = PATTERN (insn); | |
878 | ||
879 | if (GET_CODE (pat) == SET) | |
880 | { | |
881 | newpat = simplify_rtx (SET_SRC (pat)); | |
882 | if (newpat && !rtx_equal_p (SET_SRC (pat), newpat)) | |
883 | validate_change (insn, &SET_SRC (pat), newpat, 1); | |
884 | newpat = simplify_rtx (SET_DEST (pat)); | |
885 | if (newpat && !rtx_equal_p (SET_DEST (pat), newpat)) | |
886 | validate_change (insn, &SET_DEST (pat), newpat, 1); | |
887 | } | |
888 | else if (GET_CODE (pat) == PARALLEL) | |
889 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
890 | { | |
891 | rtx s = XVECEXP (pat, 0, i); | |
892 | ||
893 | if (GET_CODE (XVECEXP (pat, 0, i)) == SET) | |
894 | { | |
895 | newpat = simplify_rtx (SET_SRC (s)); | |
896 | if (newpat && !rtx_equal_p (SET_SRC (s), newpat)) | |
897 | validate_change (insn, &SET_SRC (s), newpat, 1); | |
898 | newpat = simplify_rtx (SET_DEST (s)); | |
899 | if (newpat && !rtx_equal_p (SET_DEST (s), newpat)) | |
900 | validate_change (insn, &SET_DEST (s), newpat, 1); | |
901 | } | |
902 | } | |
903 | return ((num_changes_pending () > 0) && (apply_change_group () > 0)); | |
904 | } | |
431ad2a9 | 905 | \f |
906 | #ifdef HAVE_cc0 | |
907 | /* Return 1 if the insn using CC0 set by INSN does not contain | |
908 | any ordered tests applied to the condition codes. | |
909 | EQ and NE tests do not count. */ | |
910 | ||
911 | int | |
3ad4992f | 912 | next_insn_tests_no_inequality (rtx insn) |
431ad2a9 | 913 | { |
19cb6b50 | 914 | rtx next = next_cc0_user (insn); |
431ad2a9 | 915 | |
916 | /* If there is no next insn, we have to take the conservative choice. */ | |
917 | if (next == 0) | |
918 | return 0; | |
919 | ||
6d7dc5b9 | 920 | return (INSN_P (next) |
431ad2a9 | 921 | && ! inequality_comparisons_p (PATTERN (next))); |
922 | } | |
431ad2a9 | 923 | #endif |
924 | \f | |
431ad2a9 | 925 | /* Return 1 if OP is a valid general operand for machine mode MODE. |
926 | This is either a register reference, a memory reference, | |
927 | or a constant. In the case of a memory reference, the address | |
928 | is checked for general validity for the target machine. | |
929 | ||
930 | Register and memory references must have mode MODE in order to be valid, | |
931 | but some constants have no machine mode and are valid for any mode. | |
932 | ||
933 | If MODE is VOIDmode, OP is checked for validity for whatever mode | |
934 | it has. | |
935 | ||
936 | The main use of this function is as a predicate in match_operand | |
9fbe2159 | 937 | expressions in the machine description. */ |
431ad2a9 | 938 | |
939 | int | |
3ad4992f | 940 | general_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 941 | { |
19cb6b50 | 942 | enum rtx_code code = GET_CODE (op); |
431ad2a9 | 943 | |
944 | if (mode == VOIDmode) | |
945 | mode = GET_MODE (op); | |
946 | ||
947 | /* Don't accept CONST_INT or anything similar | |
948 | if the caller wants something floating. */ | |
949 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode | |
ccf721a9 | 950 | && GET_MODE_CLASS (mode) != MODE_INT |
951 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) | |
431ad2a9 | 952 | return 0; |
953 | ||
971ba038 | 954 | if (CONST_INT_P (op) |
dd067362 | 955 | && mode != VOIDmode |
37b36c90 | 956 | && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) |
957 | return 0; | |
958 | ||
431ad2a9 | 959 | if (CONSTANT_P (op)) |
26f88df6 | 960 | return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode |
961 | || mode == VOIDmode) | |
431ad2a9 | 962 | && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)) |
ca316360 | 963 | && targetm.legitimate_constant_p (mode == VOIDmode |
964 | ? GET_MODE (op) | |
965 | : mode, op)); | |
431ad2a9 | 966 | |
967 | /* Except for certain constants with VOIDmode, already checked for, | |
968 | OP's mode must match MODE if MODE specifies a mode. */ | |
969 | ||
970 | if (GET_MODE (op) != mode) | |
971 | return 0; | |
972 | ||
973 | if (code == SUBREG) | |
974 | { | |
9abe1e73 | 975 | rtx sub = SUBREG_REG (op); |
976 | ||
431ad2a9 | 977 | #ifdef INSN_SCHEDULING |
978 | /* On machines that have insn scheduling, we want all memory | |
d7041732 | 979 | reference to be explicit, so outlaw paradoxical SUBREGs. |
980 | However, we must allow them after reload so that they can | |
981 | get cleaned up by cleanup_subreg_operands. */ | |
982 | if (!reload_completed && MEM_P (sub) | |
9abe1e73 | 983 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub))) |
431ad2a9 | 984 | return 0; |
985 | #endif | |
9a19db4c | 986 | /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory |
987 | may result in incorrect reference. We should simplify all valid | |
9e238359 | 988 | subregs of MEM anyway. But allow this after reload because we |
2617fe26 | 989 | might be called from cleanup_subreg_operands. |
9e238359 | 990 | |
991 | ??? This is a kludge. */ | |
992 | if (!reload_completed && SUBREG_BYTE (op) != 0 | |
e16ceb8e | 993 | && MEM_P (sub)) |
9abe1e73 | 994 | return 0; |
995 | ||
996 | /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally | |
3ad4992f | 997 | create such rtl, and we must reject it. */ |
cee7491d | 998 | if (SCALAR_FLOAT_MODE_P (GET_MODE (op)) |
c6a6cdaa | 999 | /* LRA can use subreg to store a floating point value in an |
1000 | integer mode. Although the floating point and the | |
1001 | integer modes need the same number of hard registers, the | |
1002 | size of floating point mode can be less than the integer | |
1003 | mode. */ | |
1004 | && ! lra_in_progress | |
9abe1e73 | 1005 | && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub))) |
2617fe26 | 1006 | return 0; |
431ad2a9 | 1007 | |
9abe1e73 | 1008 | op = sub; |
431ad2a9 | 1009 | code = GET_CODE (op); |
431ad2a9 | 1010 | } |
1011 | ||
1012 | if (code == REG) | |
431ad2a9 | 1013 | return (REGNO (op) >= FIRST_PSEUDO_REGISTER |
9fbe2159 | 1014 | || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op))); |
431ad2a9 | 1015 | |
1016 | if (code == MEM) | |
1017 | { | |
19cb6b50 | 1018 | rtx y = XEXP (op, 0); |
a1824e2f | 1019 | |
431ad2a9 | 1020 | if (! volatile_ok && MEM_VOLATILE_P (op)) |
1021 | return 0; | |
a1824e2f | 1022 | |
431ad2a9 | 1023 | /* Use the mem's mode, since it will be reloaded thus. */ |
bd1a81f7 | 1024 | if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op))) |
d7ecac2e | 1025 | return 1; |
431ad2a9 | 1026 | } |
941522d6 | 1027 | |
431ad2a9 | 1028 | return 0; |
431ad2a9 | 1029 | } |
1030 | \f | |
1031 | /* Return 1 if OP is a valid memory address for a memory reference | |
1032 | of mode MODE. | |
1033 | ||
1034 | The main use of this function is as a predicate in match_operand | |
1035 | expressions in the machine description. */ | |
1036 | ||
1037 | int | |
3ad4992f | 1038 | address_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1039 | { |
1040 | return memory_address_p (mode, op); | |
1041 | } | |
1042 | ||
1043 | /* Return 1 if OP is a register reference of mode MODE. | |
1044 | If MODE is VOIDmode, accept a register in any mode. | |
1045 | ||
1046 | The main use of this function is as a predicate in match_operand | |
9fbe2159 | 1047 | expressions in the machine description. */ |
431ad2a9 | 1048 | |
1049 | int | |
3ad4992f | 1050 | register_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1051 | { |
1052 | if (GET_MODE (op) != mode && mode != VOIDmode) | |
1053 | return 0; | |
1054 | ||
1055 | if (GET_CODE (op) == SUBREG) | |
1056 | { | |
9abe1e73 | 1057 | rtx sub = SUBREG_REG (op); |
1058 | ||
431ad2a9 | 1059 | /* Before reload, we can allow (SUBREG (MEM...)) as a register operand |
1060 | because it is guaranteed to be reloaded into one. | |
1061 | Just make sure the MEM is valid in itself. | |
1062 | (Ideally, (SUBREG (MEM)...) should not exist after reload, | |
1063 | but currently it does result from (SUBREG (REG)...) where the | |
1064 | reg went on the stack.) */ | |
e16ceb8e | 1065 | if (! reload_completed && MEM_P (sub)) |
431ad2a9 | 1066 | return general_operand (op, mode); |
4181ff71 | 1067 | |
897118e8 | 1068 | #ifdef CANNOT_CHANGE_MODE_CLASS |
8ad4c111 | 1069 | if (REG_P (sub) |
9abe1e73 | 1070 | && REGNO (sub) < FIRST_PSEUDO_REGISTER |
22aae821 | 1071 | && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode) |
9abe1e73 | 1072 | && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT |
ea99c7a1 | 1073 | && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT |
1074 | /* LRA can generate some invalid SUBREGS just for matched | |
1075 | operand reload presentation. LRA needs to treat them as | |
1076 | valid. */ | |
1077 | && ! LRA_SUBREG_P (op)) | |
4181ff71 | 1078 | return 0; |
1079 | #endif | |
1080 | ||
9abe1e73 | 1081 | /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally |
1082 | create such rtl, and we must reject it. */ | |
cee7491d | 1083 | if (SCALAR_FLOAT_MODE_P (GET_MODE (op)) |
c6a6cdaa | 1084 | /* LRA can use subreg to store a floating point value in an |
1085 | integer mode. Although the floating point and the | |
1086 | integer modes need the same number of hard registers, the | |
1087 | size of floating point mode can be less than the integer | |
1088 | mode. */ | |
1089 | && ! lra_in_progress | |
9abe1e73 | 1090 | && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub))) |
1091 | return 0; | |
1092 | ||
1093 | op = sub; | |
431ad2a9 | 1094 | } |
1095 | ||
8ad4c111 | 1096 | return (REG_P (op) |
431ad2a9 | 1097 | && (REGNO (op) >= FIRST_PSEUDO_REGISTER |
9fbe2159 | 1098 | || in_hard_reg_set_p (operand_reg_set, |
1099 | GET_MODE (op), REGNO (op)))); | |
431ad2a9 | 1100 | } |
1101 | ||
67b87c97 | 1102 | /* Return 1 for a register in Pmode; ignore the tested mode. */ |
1103 | ||
1104 | int | |
3ad4992f | 1105 | pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED) |
67b87c97 | 1106 | { |
1107 | return register_operand (op, Pmode); | |
1108 | } | |
1109 | ||
431ad2a9 | 1110 | /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH |
1111 | or a hard register. */ | |
1112 | ||
1113 | int | |
3ad4992f | 1114 | scratch_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1115 | { |
f3d96a58 | 1116 | if (GET_MODE (op) != mode && mode != VOIDmode) |
1117 | return 0; | |
1118 | ||
1119 | return (GET_CODE (op) == SCRATCH | |
8ad4c111 | 1120 | || (REG_P (op) |
c6a6cdaa | 1121 | && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER))); |
431ad2a9 | 1122 | } |
1123 | ||
1124 | /* Return 1 if OP is a valid immediate operand for mode MODE. | |
1125 | ||
1126 | The main use of this function is as a predicate in match_operand | |
1127 | expressions in the machine description. */ | |
1128 | ||
1129 | int | |
3ad4992f | 1130 | immediate_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1131 | { |
1132 | /* Don't accept CONST_INT or anything similar | |
1133 | if the caller wants something floating. */ | |
1134 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode | |
ccf721a9 | 1135 | && GET_MODE_CLASS (mode) != MODE_INT |
1136 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) | |
431ad2a9 | 1137 | return 0; |
1138 | ||
971ba038 | 1139 | if (CONST_INT_P (op) |
dd067362 | 1140 | && mode != VOIDmode |
37b36c90 | 1141 | && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) |
1142 | return 0; | |
1143 | ||
431ad2a9 | 1144 | return (CONSTANT_P (op) |
1145 | && (GET_MODE (op) == mode || mode == VOIDmode | |
1146 | || GET_MODE (op) == VOIDmode) | |
431ad2a9 | 1147 | && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)) |
ca316360 | 1148 | && targetm.legitimate_constant_p (mode == VOIDmode |
1149 | ? GET_MODE (op) | |
1150 | : mode, op)); | |
431ad2a9 | 1151 | } |
1152 | ||
e913b5cd | 1153 | /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */ |
431ad2a9 | 1154 | |
1155 | int | |
3ad4992f | 1156 | const_int_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1157 | { |
971ba038 | 1158 | if (!CONST_INT_P (op)) |
4737d8ef | 1159 | return 0; |
1160 | ||
1161 | if (mode != VOIDmode | |
1162 | && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) | |
1163 | return 0; | |
1164 | ||
1165 | return 1; | |
431ad2a9 | 1166 | } |
1167 | ||
e913b5cd | 1168 | #if TARGET_SUPPORTS_WIDE_INT |
1169 | /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT | |
1170 | of mode MODE. */ | |
1171 | int | |
1172 | const_scalar_int_operand (rtx op, enum machine_mode mode) | |
1173 | { | |
1174 | if (!CONST_SCALAR_INT_P (op)) | |
1175 | return 0; | |
1176 | ||
1177 | if (CONST_INT_P (op)) | |
1178 | return const_int_operand (op, mode); | |
1179 | ||
1180 | if (mode != VOIDmode) | |
1181 | { | |
1182 | int prec = GET_MODE_PRECISION (mode); | |
1183 | int bitsize = GET_MODE_BITSIZE (mode); | |
ddb1be65 | 1184 | |
e913b5cd | 1185 | if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize) |
1186 | return 0; | |
ddb1be65 | 1187 | |
e913b5cd | 1188 | if (prec == bitsize) |
1189 | return 1; | |
1190 | else | |
1191 | { | |
1192 | /* Multiword partial int. */ | |
ddb1be65 | 1193 | HOST_WIDE_INT x |
e913b5cd | 1194 | = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1); |
fed48e93 | 1195 | return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x); |
e913b5cd | 1196 | } |
1197 | } | |
1198 | return 1; | |
1199 | } | |
1200 | ||
431ad2a9 | 1201 | /* Returns 1 if OP is an operand that is a constant integer or constant |
e913b5cd | 1202 | floating-point number of MODE. */ |
1203 | ||
1204 | int | |
1205 | const_double_operand (rtx op, enum machine_mode mode) | |
1206 | { | |
1207 | return (GET_CODE (op) == CONST_DOUBLE) | |
1208 | && (GET_MODE (op) == mode || mode == VOIDmode); | |
1209 | } | |
1210 | #else | |
1211 | /* Returns 1 if OP is an operand that is a constant integer or constant | |
1212 | floating-point number of MODE. */ | |
431ad2a9 | 1213 | |
1214 | int | |
3ad4992f | 1215 | const_double_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1216 | { |
1217 | /* Don't accept CONST_INT or anything similar | |
1218 | if the caller wants something floating. */ | |
1219 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode | |
ccf721a9 | 1220 | && GET_MODE_CLASS (mode) != MODE_INT |
1221 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) | |
431ad2a9 | 1222 | return 0; |
1223 | ||
78f1962f | 1224 | return ((CONST_DOUBLE_P (op) || CONST_INT_P (op)) |
431ad2a9 | 1225 | && (mode == VOIDmode || GET_MODE (op) == mode |
1226 | || GET_MODE (op) == VOIDmode)); | |
1227 | } | |
e913b5cd | 1228 | #endif |
1229 | /* Return 1 if OP is a general operand that is not an immediate | |
1230 | operand of mode MODE. */ | |
431ad2a9 | 1231 | |
1232 | int | |
3ad4992f | 1233 | nonimmediate_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1234 | { |
1235 | return (general_operand (op, mode) && ! CONSTANT_P (op)); | |
1236 | } | |
1237 | ||
3a54beaf | 1238 | /* Return 1 if OP is a register reference or immediate value of mode MODE. */ |
431ad2a9 | 1239 | |
1240 | int | |
3ad4992f | 1241 | nonmemory_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1242 | { |
1243 | if (CONSTANT_P (op)) | |
cd6d4e14 | 1244 | return immediate_operand (op, mode); |
431ad2a9 | 1245 | |
1246 | if (GET_MODE (op) != mode && mode != VOIDmode) | |
1247 | return 0; | |
1248 | ||
1249 | if (GET_CODE (op) == SUBREG) | |
1250 | { | |
1251 | /* Before reload, we can allow (SUBREG (MEM...)) as a register operand | |
1252 | because it is guaranteed to be reloaded into one. | |
1253 | Just make sure the MEM is valid in itself. | |
1254 | (Ideally, (SUBREG (MEM)...) should not exist after reload, | |
1255 | but currently it does result from (SUBREG (REG)...) where the | |
1256 | reg went on the stack.) */ | |
e16ceb8e | 1257 | if (! reload_completed && MEM_P (SUBREG_REG (op))) |
431ad2a9 | 1258 | return general_operand (op, mode); |
1259 | op = SUBREG_REG (op); | |
1260 | } | |
1261 | ||
8ad4c111 | 1262 | return (REG_P (op) |
431ad2a9 | 1263 | && (REGNO (op) >= FIRST_PSEUDO_REGISTER |
9fbe2159 | 1264 | || in_hard_reg_set_p (operand_reg_set, |
1265 | GET_MODE (op), REGNO (op)))); | |
431ad2a9 | 1266 | } |
1267 | ||
1268 | /* Return 1 if OP is a valid operand that stands for pushing a | |
1269 | value of mode MODE onto the stack. | |
1270 | ||
1271 | The main use of this function is as a predicate in match_operand | |
1272 | expressions in the machine description. */ | |
1273 | ||
1274 | int | |
3ad4992f | 1275 | push_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1276 | { |
2419c7da | 1277 | unsigned int rounded_size = GET_MODE_SIZE (mode); |
1278 | ||
1279 | #ifdef PUSH_ROUNDING | |
1280 | rounded_size = PUSH_ROUNDING (rounded_size); | |
1281 | #endif | |
1282 | ||
e16ceb8e | 1283 | if (!MEM_P (op)) |
431ad2a9 | 1284 | return 0; |
1285 | ||
582258fb | 1286 | if (mode != VOIDmode && GET_MODE (op) != mode) |
431ad2a9 | 1287 | return 0; |
1288 | ||
1289 | op = XEXP (op, 0); | |
1290 | ||
2419c7da | 1291 | if (rounded_size == GET_MODE_SIZE (mode)) |
34b84ca6 | 1292 | { |
1293 | if (GET_CODE (op) != STACK_PUSH_CODE) | |
1294 | return 0; | |
1295 | } | |
1296 | else | |
1297 | { | |
34b84ca6 | 1298 | if (GET_CODE (op) != PRE_MODIFY |
1299 | || GET_CODE (XEXP (op, 1)) != PLUS | |
1300 | || XEXP (XEXP (op, 1), 0) != XEXP (op, 0) | |
971ba038 | 1301 | || !CONST_INT_P (XEXP (XEXP (op, 1), 1)) |
34b84ca6 | 1302 | #ifdef STACK_GROWS_DOWNWARD |
2419c7da | 1303 | || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size |
34b84ca6 | 1304 | #else |
3473aefe | 1305 | || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size |
34b84ca6 | 1306 | #endif |
1307 | ) | |
1308 | return 0; | |
1309 | } | |
431ad2a9 | 1310 | |
1311 | return XEXP (op, 0) == stack_pointer_rtx; | |
1312 | } | |
1313 | ||
dcf593ec | 1314 | /* Return 1 if OP is a valid operand that stands for popping a |
1315 | value of mode MODE off the stack. | |
1316 | ||
1317 | The main use of this function is as a predicate in match_operand | |
1318 | expressions in the machine description. */ | |
1319 | ||
1320 | int | |
3ad4992f | 1321 | pop_operand (rtx op, enum machine_mode mode) |
dcf593ec | 1322 | { |
e16ceb8e | 1323 | if (!MEM_P (op)) |
dcf593ec | 1324 | return 0; |
1325 | ||
582258fb | 1326 | if (mode != VOIDmode && GET_MODE (op) != mode) |
dcf593ec | 1327 | return 0; |
1328 | ||
1329 | op = XEXP (op, 0); | |
1330 | ||
1331 | if (GET_CODE (op) != STACK_POP_CODE) | |
1332 | return 0; | |
1333 | ||
1334 | return XEXP (op, 0) == stack_pointer_rtx; | |
1335 | } | |
1336 | ||
bd1a81f7 | 1337 | /* Return 1 if ADDR is a valid memory address |
1338 | for mode MODE in address space AS. */ | |
431ad2a9 | 1339 | |
1340 | int | |
bd1a81f7 | 1341 | memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED, |
1342 | rtx addr, addr_space_t as) | |
431ad2a9 | 1343 | { |
fd50b071 | 1344 | #ifdef GO_IF_LEGITIMATE_ADDRESS |
bd1a81f7 | 1345 | gcc_assert (ADDR_SPACE_GENERIC_P (as)); |
431ad2a9 | 1346 | GO_IF_LEGITIMATE_ADDRESS (mode, addr, win); |
1347 | return 0; | |
1348 | ||
1349 | win: | |
1350 | return 1; | |
fd50b071 | 1351 | #else |
bd1a81f7 | 1352 | return targetm.addr_space.legitimate_address_p (mode, addr, 0, as); |
fd50b071 | 1353 | #endif |
431ad2a9 | 1354 | } |
1355 | ||
1356 | /* Return 1 if OP is a valid memory reference with mode MODE, | |
1357 | including a valid address. | |
1358 | ||
1359 | The main use of this function is as a predicate in match_operand | |
1360 | expressions in the machine description. */ | |
1361 | ||
1362 | int | |
3ad4992f | 1363 | memory_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1364 | { |
1365 | rtx inner; | |
1366 | ||
1367 | if (! reload_completed) | |
1368 | /* Note that no SUBREG is a memory operand before end of reload pass, | |
1369 | because (SUBREG (MEM...)) forces reloading into a register. */ | |
e16ceb8e | 1370 | return MEM_P (op) && general_operand (op, mode); |
431ad2a9 | 1371 | |
1372 | if (mode != VOIDmode && GET_MODE (op) != mode) | |
1373 | return 0; | |
1374 | ||
1375 | inner = op; | |
1376 | if (GET_CODE (inner) == SUBREG) | |
1377 | inner = SUBREG_REG (inner); | |
1378 | ||
e16ceb8e | 1379 | return (MEM_P (inner) && general_operand (op, mode)); |
431ad2a9 | 1380 | } |
1381 | ||
1382 | /* Return 1 if OP is a valid indirect memory reference with mode MODE; | |
1383 | that is, a memory reference whose address is a general_operand. */ | |
1384 | ||
1385 | int | |
3ad4992f | 1386 | indirect_operand (rtx op, enum machine_mode mode) |
431ad2a9 | 1387 | { |
1388 | /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */ | |
1389 | if (! reload_completed | |
e16ceb8e | 1390 | && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op))) |
431ad2a9 | 1391 | { |
19cb6b50 | 1392 | int offset = SUBREG_BYTE (op); |
431ad2a9 | 1393 | rtx inner = SUBREG_REG (op); |
1394 | ||
458a05df | 1395 | if (mode != VOIDmode && GET_MODE (op) != mode) |
1396 | return 0; | |
1397 | ||
431ad2a9 | 1398 | /* The only way that we can have a general_operand as the resulting |
1399 | address is if OFFSET is zero and the address already is an operand | |
1400 | or if the address is (plus Y (const_int -OFFSET)) and Y is an | |
1401 | operand. */ | |
1402 | ||
1403 | return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode)) | |
1404 | || (GET_CODE (XEXP (inner, 0)) == PLUS | |
971ba038 | 1405 | && CONST_INT_P (XEXP (XEXP (inner, 0), 1)) |
431ad2a9 | 1406 | && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset |
1407 | && general_operand (XEXP (XEXP (inner, 0), 0), Pmode))); | |
1408 | } | |
1409 | ||
e16ceb8e | 1410 | return (MEM_P (op) |
431ad2a9 | 1411 | && memory_operand (op, mode) |
1412 | && general_operand (XEXP (op, 0), Pmode)); | |
1413 | } | |
1414 | ||
c429965c | 1415 | /* Return 1 if this is an ordered comparison operator (not including |
1416 | ORDERED and UNORDERED). */ | |
1417 | ||
1418 | int | |
1419 | ordered_comparison_operator (rtx op, enum machine_mode mode) | |
1420 | { | |
1421 | if (mode != VOIDmode && GET_MODE (op) != mode) | |
1422 | return false; | |
1423 | switch (GET_CODE (op)) | |
1424 | { | |
1425 | case EQ: | |
1426 | case NE: | |
1427 | case LT: | |
1428 | case LTU: | |
1429 | case LE: | |
1430 | case LEU: | |
1431 | case GT: | |
1432 | case GTU: | |
1433 | case GE: | |
1434 | case GEU: | |
1435 | return true; | |
1436 | default: | |
1437 | return false; | |
1438 | } | |
1439 | } | |
1440 | ||
431ad2a9 | 1441 | /* Return 1 if this is a comparison operator. This allows the use of |
1442 | MATCH_OPERATOR to recognize all the branch insns. */ | |
1443 | ||
1444 | int | |
3ad4992f | 1445 | comparison_operator (rtx op, enum machine_mode mode) |
431ad2a9 | 1446 | { |
1447 | return ((mode == VOIDmode || GET_MODE (op) == mode) | |
6720e96c | 1448 | && COMPARISON_P (op)); |
431ad2a9 | 1449 | } |
1450 | \f | |
78f55ca8 | 1451 | /* If BODY is an insn body that uses ASM_OPERANDS, return it. */ |
1452 | ||
1453 | rtx | |
1454 | extract_asm_operands (rtx body) | |
1455 | { | |
1456 | rtx tmp; | |
1457 | switch (GET_CODE (body)) | |
1458 | { | |
1459 | case ASM_OPERANDS: | |
1460 | return body; | |
1461 | ||
1462 | case SET: | |
1463 | /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */ | |
1464 | tmp = SET_SRC (body); | |
1465 | if (GET_CODE (tmp) == ASM_OPERANDS) | |
1466 | return tmp; | |
1467 | break; | |
1468 | ||
1469 | case PARALLEL: | |
1470 | tmp = XVECEXP (body, 0, 0); | |
1471 | if (GET_CODE (tmp) == ASM_OPERANDS) | |
1472 | return tmp; | |
1473 | if (GET_CODE (tmp) == SET) | |
1474 | { | |
1475 | tmp = SET_SRC (tmp); | |
1476 | if (GET_CODE (tmp) == ASM_OPERANDS) | |
1477 | return tmp; | |
1478 | } | |
1479 | break; | |
1480 | ||
1481 | default: | |
1482 | break; | |
1483 | } | |
1484 | return NULL; | |
1485 | } | |
1486 | ||
431ad2a9 | 1487 | /* If BODY is an insn body that uses ASM_OPERANDS, |
1488 | return the number of operands (both input and output) in the insn. | |
1489 | Otherwise return -1. */ | |
1490 | ||
1491 | int | |
52d07779 | 1492 | asm_noperands (const_rtx body) |
431ad2a9 | 1493 | { |
78f55ca8 | 1494 | rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body)); |
1495 | int n_sets = 0; | |
1496 | ||
1497 | if (asm_op == NULL) | |
1498 | return -1; | |
1499 | ||
1500 | if (GET_CODE (body) == SET) | |
1501 | n_sets = 1; | |
1502 | else if (GET_CODE (body) == PARALLEL) | |
431ad2a9 | 1503 | { |
78f55ca8 | 1504 | int i; |
1505 | if (GET_CODE (XVECEXP (body, 0, 0)) == SET) | |
431ad2a9 | 1506 | { |
60f1f5cc | 1507 | /* Multiple output operands, or 1 output plus some clobbers: |
48e1416a | 1508 | body is |
78f55ca8 | 1509 | [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */ |
60f1f5cc | 1510 | /* Count backwards through CLOBBERs to determine number of SETs. */ |
1511 | for (i = XVECLEN (body, 0); i > 0; i--) | |
1512 | { | |
1513 | if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET) | |
1514 | break; | |
1515 | if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER) | |
1516 | return -1; | |
1517 | } | |
431ad2a9 | 1518 | |
60f1f5cc | 1519 | /* N_SETS is now number of output operands. */ |
1520 | n_sets = i; | |
1521 | ||
1522 | /* Verify that all the SETs we have | |
1523 | came from a single original asm_operands insn | |
1524 | (so that invalid combinations are blocked). */ | |
1525 | for (i = 0; i < n_sets; i++) | |
1526 | { | |
1527 | rtx elt = XVECEXP (body, 0, i); | |
1528 | if (GET_CODE (elt) != SET) | |
1529 | return -1; | |
1530 | if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS) | |
1531 | return -1; | |
1532 | /* If these ASM_OPERANDS rtx's came from different original insns | |
1533 | then they aren't allowed together. */ | |
1534 | if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt)) | |
78f55ca8 | 1535 | != ASM_OPERANDS_INPUT_VEC (asm_op)) |
60f1f5cc | 1536 | return -1; |
1537 | } | |
431ad2a9 | 1538 | } |
78f55ca8 | 1539 | else |
60f1f5cc | 1540 | { |
1541 | /* 0 outputs, but some clobbers: | |
1542 | body is [(asm_operands ...) (clobber (reg ...))...]. */ | |
60f1f5cc | 1543 | /* Make sure all the other parallel things really are clobbers. */ |
1544 | for (i = XVECLEN (body, 0) - 1; i > 0; i--) | |
1545 | if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER) | |
1546 | return -1; | |
60f1f5cc | 1547 | } |
431ad2a9 | 1548 | } |
78f55ca8 | 1549 | |
1550 | return (ASM_OPERANDS_INPUT_LENGTH (asm_op) | |
1551 | + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets); | |
431ad2a9 | 1552 | } |
1553 | ||
1554 | /* Assuming BODY is an insn body that uses ASM_OPERANDS, | |
1555 | copy its operands (both input and output) into the vector OPERANDS, | |
1556 | the locations of the operands within the insn into the vector OPERAND_LOCS, | |
1557 | and the constraints for the operands into CONSTRAINTS. | |
1558 | Write the modes of the operands into MODES. | |
1559 | Return the assembler-template. | |
1560 | ||
1561 | If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0, | |
1562 | we don't store that info. */ | |
1563 | ||
9a356c3c | 1564 | const char * |
3ad4992f | 1565 | decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs, |
09fb10e8 | 1566 | const char **constraints, enum machine_mode *modes, |
1567 | location_t *loc) | |
431ad2a9 | 1568 | { |
f018d957 | 1569 | int nbase = 0, n, i; |
78f55ca8 | 1570 | rtx asmop; |
431ad2a9 | 1571 | |
78f55ca8 | 1572 | switch (GET_CODE (body)) |
431ad2a9 | 1573 | { |
78f55ca8 | 1574 | case ASM_OPERANDS: |
1575 | /* Zero output asm: BODY is (asm_operands ...). */ | |
1576 | asmop = body; | |
1577 | break; | |
431ad2a9 | 1578 | |
78f55ca8 | 1579 | case SET: |
1580 | /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */ | |
1581 | asmop = SET_SRC (body); | |
431ad2a9 | 1582 | |
1583 | /* The output is in the SET. | |
1584 | Its constraint is in the ASM_OPERANDS itself. */ | |
1585 | if (operands) | |
1586 | operands[0] = SET_DEST (body); | |
1587 | if (operand_locs) | |
1588 | operand_locs[0] = &SET_DEST (body); | |
1589 | if (constraints) | |
1590 | constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop); | |
1591 | if (modes) | |
1592 | modes[0] = GET_MODE (SET_DEST (body)); | |
78f55ca8 | 1593 | nbase = 1; |
1594 | break; | |
29c734f4 | 1595 | |
78f55ca8 | 1596 | case PARALLEL: |
1597 | { | |
1598 | int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */ | |
431ad2a9 | 1599 | |
78f55ca8 | 1600 | asmop = XVECEXP (body, 0, 0); |
1601 | if (GET_CODE (asmop) == SET) | |
1602 | { | |
1603 | asmop = SET_SRC (asmop); | |
1604 | ||
1605 | /* At least one output, plus some CLOBBERs. The outputs are in | |
1606 | the SETs. Their constraints are in the ASM_OPERANDS itself. */ | |
1607 | for (i = 0; i < nparallel; i++) | |
1608 | { | |
1609 | if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER) | |
1610 | break; /* Past last SET */ | |
1611 | if (operands) | |
1612 | operands[i] = SET_DEST (XVECEXP (body, 0, i)); | |
1613 | if (operand_locs) | |
1614 | operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i)); | |
1615 | if (constraints) | |
1616 | constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1); | |
1617 | if (modes) | |
1618 | modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i))); | |
1619 | } | |
1620 | nbase = i; | |
1621 | } | |
1622 | break; | |
1623 | } | |
431ad2a9 | 1624 | |
78f55ca8 | 1625 | default: |
1626 | gcc_unreachable (); | |
431ad2a9 | 1627 | } |
29c734f4 | 1628 | |
78f55ca8 | 1629 | n = ASM_OPERANDS_INPUT_LENGTH (asmop); |
1630 | for (i = 0; i < n; i++) | |
1631 | { | |
1632 | if (operand_locs) | |
1633 | operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i); | |
1634 | if (operands) | |
1635 | operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i); | |
1636 | if (constraints) | |
1637 | constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); | |
1638 | if (modes) | |
1639 | modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i); | |
1640 | } | |
1641 | nbase += n; | |
431ad2a9 | 1642 | |
78f55ca8 | 1643 | n = ASM_OPERANDS_LABEL_LENGTH (asmop); |
1644 | for (i = 0; i < n; i++) | |
1645 | { | |
1646 | if (operand_locs) | |
1647 | operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i); | |
1648 | if (operands) | |
1649 | operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i); | |
1650 | if (constraints) | |
1651 | constraints[nbase + i] = ""; | |
1652 | if (modes) | |
1653 | modes[nbase + i] = Pmode; | |
431ad2a9 | 1654 | } |
1655 | ||
09fb10e8 | 1656 | if (loc) |
9c85a98a | 1657 | *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop); |
09fb10e8 | 1658 | |
1659 | return ASM_OPERANDS_TEMPLATE (asmop); | |
431ad2a9 | 1660 | } |
6d2e66f1 | 1661 | |
3927afe0 | 1662 | /* Check if an asm_operand matches its constraints. |
24871a8e | 1663 | Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */ |
6d2e66f1 | 1664 | |
1665 | int | |
cde8f1c8 | 1666 | asm_operand_ok (rtx op, const char *constraint, const char **constraints) |
6d2e66f1 | 1667 | { |
24871a8e | 1668 | int result = 0; |
4a45595d | 1669 | #ifdef AUTO_INC_DEC |
1670 | bool incdec_ok = false; | |
1671 | #endif | |
24871a8e | 1672 | |
6d2e66f1 | 1673 | /* Use constrain_operands after reload. */ |
04e579b6 | 1674 | gcc_assert (!reload_completed); |
6d2e66f1 | 1675 | |
78f55ca8 | 1676 | /* Empty constraint string is the same as "X,...,X", i.e. X for as |
1677 | many alternatives as required to match the other operands. */ | |
1678 | if (*constraint == '\0') | |
4a45595d | 1679 | result = 1; |
78f55ca8 | 1680 | |
6d2e66f1 | 1681 | while (*constraint) |
1682 | { | |
48ea5577 | 1683 | char c = *constraint; |
1684 | int len; | |
f3653a64 | 1685 | switch (c) |
6d2e66f1 | 1686 | { |
48ea5577 | 1687 | case ',': |
1688 | constraint++; | |
1689 | continue; | |
6d2e66f1 | 1690 | case '=': |
1691 | case '+': | |
1692 | case '*': | |
1693 | case '%': | |
6d2e66f1 | 1694 | case '!': |
1695 | case '#': | |
1696 | case '&': | |
48ea5577 | 1697 | case '?': |
6d2e66f1 | 1698 | break; |
1699 | ||
1700 | case '0': case '1': case '2': case '3': case '4': | |
1701 | case '5': case '6': case '7': case '8': case '9': | |
cde8f1c8 | 1702 | /* If caller provided constraints pointer, look up |
75de4aa2 | 1703 | the matching constraint. Otherwise, our caller should have |
cde8f1c8 | 1704 | given us the proper matching constraint, but we can't |
1705 | actually fail the check if they didn't. Indicate that | |
1706 | results are inconclusive. */ | |
1707 | if (constraints) | |
1708 | { | |
1709 | char *end; | |
1710 | unsigned long match; | |
1711 | ||
1712 | match = strtoul (constraint, &end, 10); | |
1713 | if (!result) | |
1714 | result = asm_operand_ok (op, constraints[match], NULL); | |
1715 | constraint = (const char *) end; | |
1716 | } | |
1717 | else | |
1718 | { | |
1719 | do | |
1720 | constraint++; | |
1721 | while (ISDIGIT (*constraint)); | |
1722 | if (! result) | |
1723 | result = -1; | |
1724 | } | |
48ea5577 | 1725 | continue; |
6d2e66f1 | 1726 | |
1727 | case 'p': | |
1728 | if (address_operand (op, VOIDmode)) | |
48ea5577 | 1729 | result = 1; |
6d2e66f1 | 1730 | break; |
1731 | ||
e9ff93b1 | 1732 | case TARGET_MEM_CONSTRAINT: |
6d2e66f1 | 1733 | case 'V': /* non-offsettable */ |
1734 | if (memory_operand (op, VOIDmode)) | |
48ea5577 | 1735 | result = 1; |
6d2e66f1 | 1736 | break; |
1737 | ||
1738 | case 'o': /* offsettable */ | |
1739 | if (offsettable_nonstrict_memref_p (op)) | |
48ea5577 | 1740 | result = 1; |
6d2e66f1 | 1741 | break; |
1742 | ||
1743 | case '<': | |
3072d30e | 1744 | /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist, |
24871a8e | 1745 | excepting those that expand_call created. Further, on some |
1746 | machines which do not have generalized auto inc/dec, an inc/dec | |
1747 | is not a memory_operand. | |
1748 | ||
1749 | Match any memory and hope things are resolved after reload. */ | |
1750 | ||
e16ceb8e | 1751 | if (MEM_P (op) |
24871a8e | 1752 | && (1 |
1753 | || GET_CODE (XEXP (op, 0)) == PRE_DEC | |
2617fe26 | 1754 | || GET_CODE (XEXP (op, 0)) == POST_DEC)) |
48ea5577 | 1755 | result = 1; |
4a45595d | 1756 | #ifdef AUTO_INC_DEC |
1757 | incdec_ok = true; | |
1758 | #endif | |
6d2e66f1 | 1759 | break; |
1760 | ||
1761 | case '>': | |
e16ceb8e | 1762 | if (MEM_P (op) |
24871a8e | 1763 | && (1 |
1764 | || GET_CODE (XEXP (op, 0)) == PRE_INC | |
2617fe26 | 1765 | || GET_CODE (XEXP (op, 0)) == POST_INC)) |
48ea5577 | 1766 | result = 1; |
4a45595d | 1767 | #ifdef AUTO_INC_DEC |
1768 | incdec_ok = true; | |
1769 | #endif | |
6d2e66f1 | 1770 | break; |
1771 | ||
1772 | case 'E': | |
6d2e66f1 | 1773 | case 'F': |
78f1962f | 1774 | if (CONST_DOUBLE_AS_FLOAT_P (op) |
22dd8d0e | 1775 | || (GET_CODE (op) == CONST_VECTOR |
1776 | && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)) | |
48ea5577 | 1777 | result = 1; |
6d2e66f1 | 1778 | break; |
1779 | ||
1780 | case 'G': | |
78f1962f | 1781 | if (CONST_DOUBLE_AS_FLOAT_P (op) |
48ea5577 | 1782 | && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint)) |
1783 | result = 1; | |
6d2e66f1 | 1784 | break; |
1785 | case 'H': | |
78f1962f | 1786 | if (CONST_DOUBLE_AS_FLOAT_P (op) |
48ea5577 | 1787 | && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint)) |
1788 | result = 1; | |
6d2e66f1 | 1789 | break; |
1790 | ||
1791 | case 's': | |
efa08fc2 | 1792 | if (CONST_SCALAR_INT_P (op)) |
6d2e66f1 | 1793 | break; |
d632b59a | 1794 | /* Fall through. */ |
6d2e66f1 | 1795 | |
1796 | case 'i': | |
7151fd0e | 1797 | if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))) |
48ea5577 | 1798 | result = 1; |
6d2e66f1 | 1799 | break; |
1800 | ||
1801 | case 'n': | |
efa08fc2 | 1802 | if (CONST_SCALAR_INT_P (op)) |
48ea5577 | 1803 | result = 1; |
6d2e66f1 | 1804 | break; |
1805 | ||
1806 | case 'I': | |
971ba038 | 1807 | if (CONST_INT_P (op) |
48ea5577 | 1808 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint)) |
1809 | result = 1; | |
6d2e66f1 | 1810 | break; |
1811 | case 'J': | |
971ba038 | 1812 | if (CONST_INT_P (op) |
48ea5577 | 1813 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint)) |
1814 | result = 1; | |
6d2e66f1 | 1815 | break; |
1816 | case 'K': | |
971ba038 | 1817 | if (CONST_INT_P (op) |
48ea5577 | 1818 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint)) |
1819 | result = 1; | |
6d2e66f1 | 1820 | break; |
1821 | case 'L': | |
971ba038 | 1822 | if (CONST_INT_P (op) |
48ea5577 | 1823 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint)) |
1824 | result = 1; | |
6d2e66f1 | 1825 | break; |
1826 | case 'M': | |
971ba038 | 1827 | if (CONST_INT_P (op) |
48ea5577 | 1828 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint)) |
1829 | result = 1; | |
6d2e66f1 | 1830 | break; |
1831 | case 'N': | |
971ba038 | 1832 | if (CONST_INT_P (op) |
48ea5577 | 1833 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint)) |
1834 | result = 1; | |
6d2e66f1 | 1835 | break; |
1836 | case 'O': | |
971ba038 | 1837 | if (CONST_INT_P (op) |
48ea5577 | 1838 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint)) |
1839 | result = 1; | |
6d2e66f1 | 1840 | break; |
1841 | case 'P': | |
971ba038 | 1842 | if (CONST_INT_P (op) |
48ea5577 | 1843 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint)) |
1844 | result = 1; | |
6d2e66f1 | 1845 | break; |
1846 | ||
1847 | case 'X': | |
48ea5577 | 1848 | result = 1; |
76cfd4f9 | 1849 | break; |
6d2e66f1 | 1850 | |
1851 | case 'g': | |
1852 | if (general_operand (op, VOIDmode)) | |
48ea5577 | 1853 | result = 1; |
6d2e66f1 | 1854 | break; |
1855 | ||
f3653a64 | 1856 | default: |
1857 | /* For all other letters, we first check for a register class, | |
1858 | otherwise it is an EXTRA_CONSTRAINT. */ | |
48ea5577 | 1859 | if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS) |
f3653a64 | 1860 | { |
1861 | case 'r': | |
1862 | if (GET_MODE (op) == BLKmode) | |
1863 | break; | |
1864 | if (register_operand (op, VOIDmode)) | |
48ea5577 | 1865 | result = 1; |
f3653a64 | 1866 | } |
48ea5577 | 1867 | #ifdef EXTRA_CONSTRAINT_STR |
04da991f | 1868 | else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)) |
1869 | /* Every memory operand can be reloaded to fit. */ | |
1870 | result = result || memory_operand (op, VOIDmode); | |
1871 | else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)) | |
1872 | /* Every address operand can be reloaded to fit. */ | |
1873 | result = result || address_operand (op, VOIDmode); | |
76cfd4f9 | 1874 | else if (EXTRA_CONSTRAINT_STR (op, c, constraint)) |
1875 | result = 1; | |
6d2e66f1 | 1876 | #endif |
6d2e66f1 | 1877 | break; |
1878 | } | |
48ea5577 | 1879 | len = CONSTRAINT_LEN (c, constraint); |
1880 | do | |
1881 | constraint++; | |
1882 | while (--len && *constraint); | |
1883 | if (len) | |
1884 | return 0; | |
6d2e66f1 | 1885 | } |
1886 | ||
4a45595d | 1887 | #ifdef AUTO_INC_DEC |
1888 | /* For operands without < or > constraints reject side-effects. */ | |
1889 | if (!incdec_ok && result && MEM_P (op)) | |
1890 | switch (GET_CODE (XEXP (op, 0))) | |
1891 | { | |
1892 | case PRE_INC: | |
1893 | case POST_INC: | |
1894 | case PRE_DEC: | |
1895 | case POST_DEC: | |
1896 | case PRE_MODIFY: | |
1897 | case POST_MODIFY: | |
1898 | return 0; | |
1899 | default: | |
1900 | break; | |
1901 | } | |
1902 | #endif | |
1903 | ||
24871a8e | 1904 | return result; |
6d2e66f1 | 1905 | } |
431ad2a9 | 1906 | \f |
431ad2a9 | 1907 | /* Given an rtx *P, if it is a sum containing an integer constant term, |
1908 | return the location (type rtx *) of the pointer to that constant term. | |
1909 | Otherwise, return a null pointer. */ | |
1910 | ||
eafc6604 | 1911 | rtx * |
3ad4992f | 1912 | find_constant_term_loc (rtx *p) |
431ad2a9 | 1913 | { |
19cb6b50 | 1914 | rtx *tem; |
1915 | enum rtx_code code = GET_CODE (*p); | |
431ad2a9 | 1916 | |
1917 | /* If *P IS such a constant term, P is its location. */ | |
1918 | ||
1919 | if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF | |
1920 | || code == CONST) | |
1921 | return p; | |
1922 | ||
1923 | /* Otherwise, if not a sum, it has no constant term. */ | |
1924 | ||
1925 | if (GET_CODE (*p) != PLUS) | |
1926 | return 0; | |
1927 | ||
1928 | /* If one of the summands is constant, return its location. */ | |
1929 | ||
1930 | if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0)) | |
1931 | && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1))) | |
1932 | return p; | |
1933 | ||
1934 | /* Otherwise, check each summand for containing a constant term. */ | |
1935 | ||
1936 | if (XEXP (*p, 0) != 0) | |
1937 | { | |
1938 | tem = find_constant_term_loc (&XEXP (*p, 0)); | |
1939 | if (tem != 0) | |
1940 | return tem; | |
1941 | } | |
1942 | ||
1943 | if (XEXP (*p, 1) != 0) | |
1944 | { | |
1945 | tem = find_constant_term_loc (&XEXP (*p, 1)); | |
1946 | if (tem != 0) | |
1947 | return tem; | |
1948 | } | |
1949 | ||
1950 | return 0; | |
1951 | } | |
1952 | \f | |
1953 | /* Return 1 if OP is a memory reference | |
1954 | whose address contains no side effects | |
1955 | and remains valid after the addition | |
1956 | of a positive integer less than the | |
1957 | size of the object being referenced. | |
1958 | ||
1959 | We assume that the original address is valid and do not check it. | |
1960 | ||
1961 | This uses strict_memory_address_p as a subroutine, so | |
1962 | don't use it before reload. */ | |
1963 | ||
1964 | int | |
3ad4992f | 1965 | offsettable_memref_p (rtx op) |
431ad2a9 | 1966 | { |
e16ceb8e | 1967 | return ((MEM_P (op)) |
bd1a81f7 | 1968 | && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0), |
1969 | MEM_ADDR_SPACE (op))); | |
431ad2a9 | 1970 | } |
1971 | ||
1972 | /* Similar, but don't require a strictly valid mem ref: | |
1973 | consider pseudo-regs valid as index or base regs. */ | |
1974 | ||
1975 | int | |
3ad4992f | 1976 | offsettable_nonstrict_memref_p (rtx op) |
431ad2a9 | 1977 | { |
e16ceb8e | 1978 | return ((MEM_P (op)) |
bd1a81f7 | 1979 | && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0), |
1980 | MEM_ADDR_SPACE (op))); | |
431ad2a9 | 1981 | } |
1982 | ||
1983 | /* Return 1 if Y is a memory address which contains no side effects | |
bd1a81f7 | 1984 | and would remain valid for address space AS after the addition of |
1985 | a positive integer less than the size of that mode. | |
431ad2a9 | 1986 | |
1987 | We assume that the original address is valid and do not check it. | |
1988 | We do check that it is valid for narrower modes. | |
1989 | ||
1990 | If STRICTP is nonzero, we require a strictly valid address, | |
1991 | for the sake of use in reload.c. */ | |
1992 | ||
1993 | int | |
bd1a81f7 | 1994 | offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y, |
1995 | addr_space_t as) | |
431ad2a9 | 1996 | { |
19cb6b50 | 1997 | enum rtx_code ycode = GET_CODE (y); |
1998 | rtx z; | |
431ad2a9 | 1999 | rtx y1 = y; |
2000 | rtx *y2; | |
bd1a81f7 | 2001 | int (*addressp) (enum machine_mode, rtx, addr_space_t) = |
2002 | (strictp ? strict_memory_address_addr_space_p | |
2003 | : memory_address_addr_space_p); | |
a8d9af11 | 2004 | unsigned int mode_sz = GET_MODE_SIZE (mode); |
431ad2a9 | 2005 | |
2006 | if (CONSTANT_ADDRESS_P (y)) | |
2007 | return 1; | |
2008 | ||
2009 | /* Adjusting an offsettable address involves changing to a narrower mode. | |
2010 | Make sure that's OK. */ | |
2011 | ||
4e27ffd0 | 2012 | if (mode_dependent_address_p (y, as)) |
431ad2a9 | 2013 | return 0; |
2014 | ||
e7dd8581 | 2015 | enum machine_mode address_mode = GET_MODE (y); |
2016 | if (address_mode == VOIDmode) | |
2017 | address_mode = targetm.addr_space.address_mode (as); | |
2018 | #ifdef POINTERS_EXTEND_UNSIGNED | |
2019 | enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); | |
2020 | #endif | |
2021 | ||
a8d9af11 | 2022 | /* ??? How much offset does an offsettable BLKmode reference need? |
2023 | Clearly that depends on the situation in which it's being used. | |
2024 | However, the current situation in which we test 0xffffffff is | |
2025 | less than ideal. Caveat user. */ | |
2026 | if (mode_sz == 0) | |
2027 | mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
2028 | ||
431ad2a9 | 2029 | /* If the expression contains a constant term, |
2030 | see if it remains valid when max possible offset is added. */ | |
2031 | ||
2032 | if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1))) | |
2033 | { | |
2034 | int good; | |
2035 | ||
2036 | y1 = *y2; | |
e7dd8581 | 2037 | *y2 = plus_constant (address_mode, *y2, mode_sz - 1); |
431ad2a9 | 2038 | /* Use QImode because an odd displacement may be automatically invalid |
2039 | for any wider mode. But it should be valid for a single byte. */ | |
bd1a81f7 | 2040 | good = (*addressp) (QImode, y, as); |
431ad2a9 | 2041 | |
2042 | /* In any case, restore old contents of memory. */ | |
2043 | *y2 = y1; | |
2044 | return good; | |
2045 | } | |
2046 | ||
6720e96c | 2047 | if (GET_RTX_CLASS (ycode) == RTX_AUTOINC) |
431ad2a9 | 2048 | return 0; |
2049 | ||
2050 | /* The offset added here is chosen as the maximum offset that | |
2051 | any instruction could need to add when operating on something | |
2052 | of the specified mode. We assume that if Y and Y+c are | |
a8b4d977 | 2053 | valid addresses then so is Y+d for all 0<d<c. adjust_address will |
2054 | go inside a LO_SUM here, so we do so as well. */ | |
03035422 | 2055 | if (GET_CODE (y) == LO_SUM |
2056 | && mode != BLKmode | |
2057 | && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT) | |
e7dd8581 | 2058 | z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0), |
2059 | plus_constant (address_mode, XEXP (y, 1), | |
29c05e22 | 2060 | mode_sz - 1)); |
21b8bc7e | 2061 | #ifdef POINTERS_EXTEND_UNSIGNED |
2062 | /* Likewise for a ZERO_EXTEND from pointer_mode. */ | |
2063 | else if (POINTERS_EXTEND_UNSIGNED > 0 | |
2064 | && GET_CODE (y) == ZERO_EXTEND | |
2065 | && GET_MODE (XEXP (y, 0)) == pointer_mode) | |
e7dd8581 | 2066 | z = gen_rtx_ZERO_EXTEND (address_mode, |
21b8bc7e | 2067 | plus_constant (pointer_mode, XEXP (y, 0), |
2068 | mode_sz - 1)); | |
2069 | #endif | |
a8b4d977 | 2070 | else |
e7dd8581 | 2071 | z = plus_constant (address_mode, y, mode_sz - 1); |
431ad2a9 | 2072 | |
2073 | /* Use QImode because an odd displacement may be automatically invalid | |
2074 | for any wider mode. But it should be valid for a single byte. */ | |
bd1a81f7 | 2075 | return (*addressp) (QImode, z, as); |
431ad2a9 | 2076 | } |
2077 | ||
2078 | /* Return 1 if ADDR is an address-expression whose effect depends | |
2079 | on the mode of the memory reference it is used in. | |
2080 | ||
4e27ffd0 | 2081 | ADDRSPACE is the address space associated with the address. |
2082 | ||
431ad2a9 | 2083 | Autoincrement addressing is a typical example of mode-dependence |
2084 | because the amount of the increment depends on the mode. */ | |
2085 | ||
98e22cb6 | 2086 | bool |
4e27ffd0 | 2087 | mode_dependent_address_p (rtx addr, addr_space_t addrspace) |
431ad2a9 | 2088 | { |
cad2a6b5 | 2089 | /* Auto-increment addressing with anything other than post_modify |
2090 | or pre_modify always introduces a mode dependency. Catch such | |
2091 | cases now instead of deferring to the target. */ | |
2092 | if (GET_CODE (addr) == PRE_INC | |
2093 | || GET_CODE (addr) == POST_INC | |
2094 | || GET_CODE (addr) == PRE_DEC | |
2095 | || GET_CODE (addr) == POST_DEC) | |
98e22cb6 | 2096 | return true; |
cad2a6b5 | 2097 | |
4e27ffd0 | 2098 | return targetm.mode_dependent_address_p (addr, addrspace); |
431ad2a9 | 2099 | } |
431ad2a9 | 2100 | \f |
0edc2b1c | 2101 | /* Like extract_insn, but save insn extracted and don't extract again, when |
2102 | called again for the same insn expecting that recog_data still contain the | |
2103 | valid information. This is used primary by gen_attr infrastructure that | |
2104 | often does extract insn again and again. */ | |
2105 | void | |
3ad4992f | 2106 | extract_insn_cached (rtx insn) |
0edc2b1c | 2107 | { |
2108 | if (recog_data.insn == insn && INSN_CODE (insn) >= 0) | |
2109 | return; | |
2110 | extract_insn (insn); | |
2111 | recog_data.insn = insn; | |
2112 | } | |
374e3eb3 | 2113 | |
76cfd4f9 | 2114 | /* Do cached extract_insn, constrain_operands and complain about failures. |
0edc2b1c | 2115 | Used by insn_attrtab. */ |
2116 | void | |
3ad4992f | 2117 | extract_constrain_insn_cached (rtx insn) |
0edc2b1c | 2118 | { |
2119 | extract_insn_cached (insn); | |
2120 | if (which_alternative == -1 | |
2121 | && !constrain_operands (reload_completed)) | |
2122 | fatal_insn_not_found (insn); | |
2123 | } | |
374e3eb3 | 2124 | |
76cfd4f9 | 2125 | /* Do cached constrain_operands and complain about failures. */ |
60f1f5cc | 2126 | int |
3ad4992f | 2127 | constrain_operands_cached (int strict) |
60f1f5cc | 2128 | { |
2129 | if (which_alternative == -1) | |
2130 | return constrain_operands (strict); | |
2131 | else | |
2132 | return 1; | |
2133 | } | |
0edc2b1c | 2134 | \f |
ed420a25 | 2135 | /* Analyze INSN and fill in recog_data. */ |
2136 | ||
5d07813e | 2137 | void |
3ad4992f | 2138 | extract_insn (rtx insn) |
5d07813e | 2139 | { |
2140 | int i; | |
2141 | int icode; | |
2142 | int noperands; | |
2143 | rtx body = PATTERN (insn); | |
2144 | ||
ed420a25 | 2145 | recog_data.n_operands = 0; |
2146 | recog_data.n_alternatives = 0; | |
2147 | recog_data.n_dups = 0; | |
4a45595d | 2148 | recog_data.is_asm = false; |
5d07813e | 2149 | |
2150 | switch (GET_CODE (body)) | |
2151 | { | |
2152 | case USE: | |
2153 | case CLOBBER: | |
2154 | case ASM_INPUT: | |
2155 | case ADDR_VEC: | |
2156 | case ADDR_DIFF_VEC: | |
9845d120 | 2157 | case VAR_LOCATION: |
5d07813e | 2158 | return; |
2159 | ||
2160 | case SET: | |
60f1f5cc | 2161 | if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS) |
2162 | goto asm_insn; | |
2163 | else | |
2164 | goto normal_insn; | |
5d07813e | 2165 | case PARALLEL: |
60f1f5cc | 2166 | if ((GET_CODE (XVECEXP (body, 0, 0)) == SET |
2167 | && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS) | |
2168 | || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS) | |
2169 | goto asm_insn; | |
2170 | else | |
2171 | goto normal_insn; | |
5d07813e | 2172 | case ASM_OPERANDS: |
60f1f5cc | 2173 | asm_insn: |
ed420a25 | 2174 | recog_data.n_operands = noperands = asm_noperands (body); |
5d07813e | 2175 | if (noperands >= 0) |
2176 | { | |
5d07813e | 2177 | /* This insn is an `asm' with operands. */ |
2178 | ||
2179 | /* expand_asm_operands makes sure there aren't too many operands. */ | |
04e579b6 | 2180 | gcc_assert (noperands <= MAX_RECOG_OPERANDS); |
5d07813e | 2181 | |
2182 | /* Now get the operand values and constraints out of the insn. */ | |
ed420a25 | 2183 | decode_asm_operands (body, recog_data.operand, |
2184 | recog_data.operand_loc, | |
2185 | recog_data.constraints, | |
09fb10e8 | 2186 | recog_data.operand_mode, NULL); |
a67a82ef | 2187 | memset (recog_data.is_operator, 0, sizeof recog_data.is_operator); |
5d07813e | 2188 | if (noperands > 0) |
2189 | { | |
ed420a25 | 2190 | const char *p = recog_data.constraints[0]; |
2191 | recog_data.n_alternatives = 1; | |
5d07813e | 2192 | while (*p) |
ed420a25 | 2193 | recog_data.n_alternatives += (*p++ == ','); |
5d07813e | 2194 | } |
4a45595d | 2195 | recog_data.is_asm = true; |
5d07813e | 2196 | break; |
2197 | } | |
60f1f5cc | 2198 | fatal_insn_not_found (insn); |
5d07813e | 2199 | |
2200 | default: | |
60f1f5cc | 2201 | normal_insn: |
5d07813e | 2202 | /* Ordinary insn: recognize it, get the operands via insn_extract |
2203 | and get the constraints. */ | |
2204 | ||
2205 | icode = recog_memoized (insn); | |
2206 | if (icode < 0) | |
2207 | fatal_insn_not_found (insn); | |
2208 | ||
6357eaae | 2209 | recog_data.n_operands = noperands = insn_data[icode].n_operands; |
2210 | recog_data.n_alternatives = insn_data[icode].n_alternatives; | |
2211 | recog_data.n_dups = insn_data[icode].n_dups; | |
5d07813e | 2212 | |
2213 | insn_extract (insn); | |
2214 | ||
2215 | for (i = 0; i < noperands; i++) | |
2216 | { | |
6357eaae | 2217 | recog_data.constraints[i] = insn_data[icode].operand[i].constraint; |
a67a82ef | 2218 | recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator; |
6357eaae | 2219 | recog_data.operand_mode[i] = insn_data[icode].operand[i].mode; |
3449c700 | 2220 | /* VOIDmode match_operands gets mode from their real operand. */ |
2221 | if (recog_data.operand_mode[i] == VOIDmode) | |
2222 | recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]); | |
5d07813e | 2223 | } |
2224 | } | |
7f82be90 | 2225 | for (i = 0; i < noperands; i++) |
ed420a25 | 2226 | recog_data.operand_type[i] |
2227 | = (recog_data.constraints[i][0] == '=' ? OP_OUT | |
2228 | : recog_data.constraints[i][0] == '+' ? OP_INOUT | |
2229 | : OP_IN); | |
78e49515 | 2230 | |
04e579b6 | 2231 | gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES); |
39a1a66f | 2232 | |
2233 | if (INSN_CODE (insn) < 0) | |
2234 | for (i = 0; i < recog_data.n_alternatives; i++) | |
2235 | recog_data.alternative_enabled_p[i] = true; | |
2236 | else | |
2237 | { | |
2238 | recog_data.insn = insn; | |
2239 | for (i = 0; i < recog_data.n_alternatives; i++) | |
2240 | { | |
2241 | which_alternative = i; | |
f2d92d7a | 2242 | recog_data.alternative_enabled_p[i] |
a52f36e7 | 2243 | = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1; |
39a1a66f | 2244 | } |
2245 | } | |
2246 | ||
2247 | recog_data.insn = NULL; | |
2248 | which_alternative = -1; | |
5d07813e | 2249 | } |
2250 | ||
78e49515 | 2251 | /* After calling extract_insn, you can use this function to extract some |
2252 | information from the constraint strings into a more usable form. | |
2253 | The collected data is stored in recog_op_alt. */ | |
2254 | void | |
3ad4992f | 2255 | preprocess_constraints (void) |
78e49515 | 2256 | { |
2257 | int i; | |
2258 | ||
97088907 | 2259 | for (i = 0; i < recog_data.n_operands; i++) |
2260 | memset (recog_op_alt[i], 0, (recog_data.n_alternatives | |
2261 | * sizeof (struct operand_alternative))); | |
2262 | ||
ed420a25 | 2263 | for (i = 0; i < recog_data.n_operands; i++) |
78e49515 | 2264 | { |
2265 | int j; | |
2266 | struct operand_alternative *op_alt; | |
ed420a25 | 2267 | const char *p = recog_data.constraints[i]; |
78e49515 | 2268 | |
2269 | op_alt = recog_op_alt[i]; | |
2270 | ||
ed420a25 | 2271 | for (j = 0; j < recog_data.n_alternatives; j++) |
78e49515 | 2272 | { |
e916c70c | 2273 | op_alt[j].cl = NO_REGS; |
78e49515 | 2274 | op_alt[j].constraint = p; |
2275 | op_alt[j].matches = -1; | |
2276 | op_alt[j].matched = -1; | |
2277 | ||
39a1a66f | 2278 | if (!recog_data.alternative_enabled_p[j]) |
2279 | { | |
2280 | p = skip_alternative (p); | |
2281 | continue; | |
2282 | } | |
2283 | ||
78e49515 | 2284 | if (*p == '\0' || *p == ',') |
2285 | { | |
2286 | op_alt[j].anything_ok = 1; | |
2287 | continue; | |
2288 | } | |
2289 | ||
2290 | for (;;) | |
2291 | { | |
48ea5577 | 2292 | char c = *p; |
78e49515 | 2293 | if (c == '#') |
2294 | do | |
48ea5577 | 2295 | c = *++p; |
78e49515 | 2296 | while (c != ',' && c != '\0'); |
2297 | if (c == ',' || c == '\0') | |
48ea5577 | 2298 | { |
2299 | p++; | |
2300 | break; | |
2301 | } | |
78e49515 | 2302 | |
2303 | switch (c) | |
2304 | { | |
2305 | case '=': case '+': case '*': case '%': | |
2306 | case 'E': case 'F': case 'G': case 'H': | |
2307 | case 's': case 'i': case 'n': | |
2308 | case 'I': case 'J': case 'K': case 'L': | |
2309 | case 'M': case 'N': case 'O': case 'P': | |
78e49515 | 2310 | /* These don't say anything we care about. */ |
2311 | break; | |
2312 | ||
2313 | case '?': | |
2314 | op_alt[j].reject += 6; | |
2315 | break; | |
2316 | case '!': | |
2317 | op_alt[j].reject += 600; | |
2318 | break; | |
2319 | case '&': | |
2320 | op_alt[j].earlyclobber = 1; | |
2617fe26 | 2321 | break; |
78e49515 | 2322 | |
2323 | case '0': case '1': case '2': case '3': case '4': | |
2324 | case '5': case '6': case '7': case '8': case '9': | |
2c7f203c | 2325 | { |
2326 | char *end; | |
48ea5577 | 2327 | op_alt[j].matches = strtoul (p, &end, 10); |
2c7f203c | 2328 | recog_op_alt[op_alt[j].matches][j].matched = i; |
2329 | p = end; | |
2330 | } | |
48ea5577 | 2331 | continue; |
78e49515 | 2332 | |
e9ff93b1 | 2333 | case TARGET_MEM_CONSTRAINT: |
78e49515 | 2334 | op_alt[j].memory_ok = 1; |
2335 | break; | |
2336 | case '<': | |
2337 | op_alt[j].decmem_ok = 1; | |
2338 | break; | |
2339 | case '>': | |
2340 | op_alt[j].incmem_ok = 1; | |
2341 | break; | |
2342 | case 'V': | |
2343 | op_alt[j].nonoffmem_ok = 1; | |
2344 | break; | |
2345 | case 'o': | |
2346 | op_alt[j].offmem_ok = 1; | |
2347 | break; | |
2348 | case 'X': | |
2349 | op_alt[j].anything_ok = 1; | |
2350 | break; | |
2351 | ||
2352 | case 'p': | |
1140e77e | 2353 | op_alt[j].is_address = 1; |
e916c70c | 2354 | op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl] |
f8a8fc7b | 2355 | [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC, |
2356 | ADDRESS, SCRATCH)]; | |
78e49515 | 2357 | break; |
2358 | ||
e916c70c | 2359 | case 'g': |
2360 | case 'r': | |
2361 | op_alt[j].cl = | |
2362 | reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS]; | |
78e49515 | 2363 | break; |
2364 | ||
2365 | default: | |
48ea5577 | 2366 | if (EXTRA_MEMORY_CONSTRAINT (c, p)) |
a5004c3d | 2367 | { |
2368 | op_alt[j].memory_ok = 1; | |
2369 | break; | |
2370 | } | |
48ea5577 | 2371 | if (EXTRA_ADDRESS_CONSTRAINT (c, p)) |
a5004c3d | 2372 | { |
2373 | op_alt[j].is_address = 1; | |
e916c70c | 2374 | op_alt[j].cl |
48ea5577 | 2375 | = (reg_class_subunion |
e916c70c | 2376 | [(int) op_alt[j].cl] |
f8a8fc7b | 2377 | [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC, |
2378 | ADDRESS, SCRATCH)]); | |
a5004c3d | 2379 | break; |
2380 | } | |
2381 | ||
e916c70c | 2382 | op_alt[j].cl |
48ea5577 | 2383 | = (reg_class_subunion |
e916c70c | 2384 | [(int) op_alt[j].cl] |
48ea5577 | 2385 | [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]); |
78e49515 | 2386 | break; |
2387 | } | |
48ea5577 | 2388 | p += CONSTRAINT_LEN (c, p); |
78e49515 | 2389 | } |
2390 | } | |
2391 | } | |
2392 | } | |
2617fe26 | 2393 | |
7f82be90 | 2394 | /* Check the operands of an insn against the insn's operand constraints |
431ad2a9 | 2395 | and return 1 if they are valid. |
7f82be90 | 2396 | The information about the insn's operands, constraints, operand modes |
2397 | etc. is obtained from the global variables set up by extract_insn. | |
431ad2a9 | 2398 | |
2399 | WHICH_ALTERNATIVE is set to a number which indicates which | |
2400 | alternative of constraints was matched: 0 for the first alternative, | |
2401 | 1 for the next, etc. | |
2402 | ||
48ea5577 | 2403 | In addition, when two operands are required to match |
431ad2a9 | 2404 | and it happens that the output operand is (reg) while the |
2405 | input operand is --(reg) or ++(reg) (a pre-inc or pre-dec), | |
2406 | make the output operand look like the input. | |
2407 | This is because the output operand is the one the template will print. | |
2408 | ||
2409 | This is used in final, just before printing the assembler code and by | |
2410 | the routines that determine an insn's attribute. | |
2411 | ||
7fd957fe | 2412 | If STRICT is a positive nonzero value, it means that we have been |
431ad2a9 | 2413 | called after reload has been completed. In that case, we must |
2414 | do all checks strictly. If it is zero, it means that we have been called | |
2415 | before reload has completed. In that case, we first try to see if we can | |
2416 | find an alternative that matches strictly. If not, we try again, this | |
2417 | time assuming that reload will fix up the insn. This provides a "best | |
2418 | guess" for the alternative and is used to compute attributes of insns prior | |
2419 | to reload. A negative value of STRICT is used for this internal call. */ | |
2420 | ||
2421 | struct funny_match | |
2422 | { | |
47cfb7f4 | 2423 | int this_op, other; |
431ad2a9 | 2424 | }; |
2425 | ||
2426 | int | |
3ad4992f | 2427 | constrain_operands (int strict) |
431ad2a9 | 2428 | { |
a8482e91 | 2429 | const char *constraints[MAX_RECOG_OPERANDS]; |
3652aed1 | 2430 | int matching_operands[MAX_RECOG_OPERANDS]; |
3652aed1 | 2431 | int earlyclobber[MAX_RECOG_OPERANDS]; |
19cb6b50 | 2432 | int c; |
431ad2a9 | 2433 | |
2434 | struct funny_match funny_match[MAX_RECOG_OPERANDS]; | |
2435 | int funny_match_index; | |
431ad2a9 | 2436 | |
d05ae338 | 2437 | which_alternative = 0; |
ed420a25 | 2438 | if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0) |
431ad2a9 | 2439 | return 1; |
2440 | ||
ed420a25 | 2441 | for (c = 0; c < recog_data.n_operands; c++) |
3652aed1 | 2442 | { |
ed420a25 | 2443 | constraints[c] = recog_data.constraints[c]; |
3652aed1 | 2444 | matching_operands[c] = -1; |
3652aed1 | 2445 | } |
431ad2a9 | 2446 | |
d05ae338 | 2447 | do |
431ad2a9 | 2448 | { |
c937de78 | 2449 | int seen_earlyclobber_at = -1; |
19cb6b50 | 2450 | int opno; |
431ad2a9 | 2451 | int lose = 0; |
2452 | funny_match_index = 0; | |
2453 | ||
39a1a66f | 2454 | if (!recog_data.alternative_enabled_p[which_alternative]) |
2455 | { | |
2456 | int i; | |
2457 | ||
2458 | for (i = 0; i < recog_data.n_operands; i++) | |
2459 | constraints[i] = skip_alternative (constraints[i]); | |
2460 | ||
2461 | which_alternative++; | |
2462 | continue; | |
2463 | } | |
2464 | ||
ed420a25 | 2465 | for (opno = 0; opno < recog_data.n_operands; opno++) |
431ad2a9 | 2466 | { |
19cb6b50 | 2467 | rtx op = recog_data.operand[opno]; |
431ad2a9 | 2468 | enum machine_mode mode = GET_MODE (op); |
19cb6b50 | 2469 | const char *p = constraints[opno]; |
431ad2a9 | 2470 | int offset = 0; |
2471 | int win = 0; | |
2472 | int val; | |
48ea5577 | 2473 | int len; |
431ad2a9 | 2474 | |
3652aed1 | 2475 | earlyclobber[opno] = 0; |
2476 | ||
7d00647e | 2477 | /* A unary operator may be accepted by the predicate, but it |
941522d6 | 2478 | is irrelevant for matching constraints. */ |
6720e96c | 2479 | if (UNARY_P (op)) |
7d00647e | 2480 | op = XEXP (op, 0); |
2481 | ||
431ad2a9 | 2482 | if (GET_CODE (op) == SUBREG) |
2483 | { | |
8ad4c111 | 2484 | if (REG_P (SUBREG_REG (op)) |
431ad2a9 | 2485 | && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER) |
701e46d0 | 2486 | offset = subreg_regno_offset (REGNO (SUBREG_REG (op)), |
2487 | GET_MODE (SUBREG_REG (op)), | |
2488 | SUBREG_BYTE (op), | |
2489 | GET_MODE (op)); | |
431ad2a9 | 2490 | op = SUBREG_REG (op); |
2491 | } | |
2492 | ||
2493 | /* An empty constraint or empty alternative | |
2494 | allows anything which matched the pattern. */ | |
2495 | if (*p == 0 || *p == ',') | |
2496 | win = 1; | |
2497 | ||
48ea5577 | 2498 | do |
2499 | switch (c = *p, len = CONSTRAINT_LEN (c, p), c) | |
431ad2a9 | 2500 | { |
48ea5577 | 2501 | case '\0': |
2502 | len = 0; | |
2503 | break; | |
2504 | case ',': | |
2505 | c = '\0'; | |
2506 | break; | |
2507 | ||
7014838c | 2508 | case '?': case '!': case '*': case '%': |
2509 | case '=': case '+': | |
431ad2a9 | 2510 | break; |
2511 | ||
ae9c5250 | 2512 | case '#': |
2513 | /* Ignore rest of this alternative as far as | |
2514 | constraint checking is concerned. */ | |
48ea5577 | 2515 | do |
ae9c5250 | 2516 | p++; |
48ea5577 | 2517 | while (*p && *p != ','); |
2518 | len = 0; | |
ae9c5250 | 2519 | break; |
2520 | ||
3652aed1 | 2521 | case '&': |
2522 | earlyclobber[opno] = 1; | |
c937de78 | 2523 | if (seen_earlyclobber_at < 0) |
2524 | seen_earlyclobber_at = opno; | |
3652aed1 | 2525 | break; |
2526 | ||
7014838c | 2527 | case '0': case '1': case '2': case '3': case '4': |
2528 | case '5': case '6': case '7': case '8': case '9': | |
2c7f203c | 2529 | { |
2530 | /* This operand must be the same as a previous one. | |
2531 | This kind of constraint is used for instructions such | |
2532 | as add when they take only two operands. | |
2533 | ||
2534 | Note that the lower-numbered operand is passed first. | |
2535 | ||
2536 | If we are not testing strictly, assume that this | |
2537 | constraint will be satisfied. */ | |
2538 | ||
2539 | char *end; | |
2540 | int match; | |
2541 | ||
48ea5577 | 2542 | match = strtoul (p, &end, 10); |
2c7f203c | 2543 | p = end; |
2544 | ||
2545 | if (strict < 0) | |
2546 | val = 1; | |
2547 | else | |
2548 | { | |
2549 | rtx op1 = recog_data.operand[match]; | |
2550 | rtx op2 = recog_data.operand[opno]; | |
2551 | ||
2552 | /* A unary operator may be accepted by the predicate, | |
2553 | but it is irrelevant for matching constraints. */ | |
6720e96c | 2554 | if (UNARY_P (op1)) |
2c7f203c | 2555 | op1 = XEXP (op1, 0); |
6720e96c | 2556 | if (UNARY_P (op2)) |
2c7f203c | 2557 | op2 = XEXP (op2, 0); |
2558 | ||
2559 | val = operands_match_p (op1, op2); | |
2560 | } | |
2561 | ||
2562 | matching_operands[opno] = match; | |
2563 | matching_operands[match] = opno; | |
2564 | ||
2565 | if (val != 0) | |
2566 | win = 1; | |
2567 | ||
2568 | /* If output is *x and input is *--x, arrange later | |
2569 | to change the output to *--x as well, since the | |
2570 | output op is the one that will be printed. */ | |
2571 | if (val == 2 && strict > 0) | |
2572 | { | |
47cfb7f4 | 2573 | funny_match[funny_match_index].this_op = opno; |
2c7f203c | 2574 | funny_match[funny_match_index++].other = match; |
2575 | } | |
2576 | } | |
48ea5577 | 2577 | len = 0; |
431ad2a9 | 2578 | break; |
2579 | ||
2580 | case 'p': | |
2581 | /* p is used for address_operands. When we are called by | |
593ffa6c | 2582 | gen_reload, no one will have checked that the address is |
2583 | strictly valid, i.e., that all pseudos requiring hard regs | |
2584 | have gotten them. */ | |
431ad2a9 | 2585 | if (strict <= 0 |
ed420a25 | 2586 | || (strict_memory_address_p (recog_data.operand_mode[opno], |
7f82be90 | 2587 | op))) |
431ad2a9 | 2588 | win = 1; |
2589 | break; | |
2590 | ||
2591 | /* No need to check general_operand again; | |
f6205c6f | 2592 | it was done in insn-recog.c. Well, except that reload |
2593 | doesn't check the validity of its replacements, but | |
2594 | that should only matter when there's a bug. */ | |
431ad2a9 | 2595 | case 'g': |
2596 | /* Anything goes unless it is a REG and really has a hard reg | |
2597 | but the hard reg is not in the class GENERAL_REGS. */ | |
f6205c6f | 2598 | if (REG_P (op)) |
2599 | { | |
2600 | if (strict < 0 | |
2601 | || GENERAL_REGS == ALL_REGS | |
2602 | || (reload_in_progress | |
2603 | && REGNO (op) >= FIRST_PSEUDO_REGISTER) | |
2604 | || reg_fits_class_p (op, GENERAL_REGS, offset, mode)) | |
2605 | win = 1; | |
2606 | } | |
2607 | else if (strict < 0 || general_operand (op, mode)) | |
431ad2a9 | 2608 | win = 1; |
2609 | break; | |
2610 | ||
431ad2a9 | 2611 | case 'X': |
a92771b8 | 2612 | /* This is used for a MATCH_SCRATCH in the cases when |
2613 | we don't actually need anything. So anything goes | |
2614 | any time. */ | |
431ad2a9 | 2615 | win = 1; |
2616 | break; | |
2617 | ||
e9ff93b1 | 2618 | case TARGET_MEM_CONSTRAINT: |
39dc22d5 | 2619 | /* Memory operands must be valid, to the extent |
2620 | required by STRICT. */ | |
e16ceb8e | 2621 | if (MEM_P (op)) |
39dc22d5 | 2622 | { |
2623 | if (strict > 0 | |
bd1a81f7 | 2624 | && !strict_memory_address_addr_space_p |
2625 | (GET_MODE (op), XEXP (op, 0), | |
2626 | MEM_ADDR_SPACE (op))) | |
39dc22d5 | 2627 | break; |
2628 | if (strict == 0 | |
bd1a81f7 | 2629 | && !memory_address_addr_space_p |
2630 | (GET_MODE (op), XEXP (op, 0), | |
2631 | MEM_ADDR_SPACE (op))) | |
39dc22d5 | 2632 | break; |
2633 | win = 1; | |
2634 | } | |
2635 | /* Before reload, accept what reload can turn into mem. */ | |
2636 | else if (strict < 0 && CONSTANT_P (op)) | |
2637 | win = 1; | |
2638 | /* During reload, accept a pseudo */ | |
8ad4c111 | 2639 | else if (reload_in_progress && REG_P (op) |
39dc22d5 | 2640 | && REGNO (op) >= FIRST_PSEUDO_REGISTER) |
431ad2a9 | 2641 | win = 1; |
2642 | break; | |
2643 | ||
2644 | case '<': | |
e16ceb8e | 2645 | if (MEM_P (op) |
431ad2a9 | 2646 | && (GET_CODE (XEXP (op, 0)) == PRE_DEC |
2647 | || GET_CODE (XEXP (op, 0)) == POST_DEC)) | |
2648 | win = 1; | |
2649 | break; | |
2650 | ||
2651 | case '>': | |
e16ceb8e | 2652 | if (MEM_P (op) |
431ad2a9 | 2653 | && (GET_CODE (XEXP (op, 0)) == PRE_INC |
2654 | || GET_CODE (XEXP (op, 0)) == POST_INC)) | |
2655 | win = 1; | |
2656 | break; | |
2657 | ||
2658 | case 'E': | |
431ad2a9 | 2659 | case 'F': |
78f1962f | 2660 | if (CONST_DOUBLE_AS_FLOAT_P (op) |
22dd8d0e | 2661 | || (GET_CODE (op) == CONST_VECTOR |
2662 | && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)) | |
431ad2a9 | 2663 | win = 1; |
2664 | break; | |
2665 | ||
2666 | case 'G': | |
2667 | case 'H': | |
78f1962f | 2668 | if (CONST_DOUBLE_AS_FLOAT_P (op) |
48ea5577 | 2669 | && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p)) |
431ad2a9 | 2670 | win = 1; |
2671 | break; | |
2672 | ||
2673 | case 's': | |
efa08fc2 | 2674 | if (CONST_SCALAR_INT_P (op)) |
431ad2a9 | 2675 | break; |
2676 | case 'i': | |
2677 | if (CONSTANT_P (op)) | |
2678 | win = 1; | |
2679 | break; | |
2680 | ||
2681 | case 'n': | |
efa08fc2 | 2682 | if (CONST_SCALAR_INT_P (op)) |
431ad2a9 | 2683 | win = 1; |
2684 | break; | |
2685 | ||
2686 | case 'I': | |
2687 | case 'J': | |
2688 | case 'K': | |
2689 | case 'L': | |
2690 | case 'M': | |
2691 | case 'N': | |
2692 | case 'O': | |
2693 | case 'P': | |
971ba038 | 2694 | if (CONST_INT_P (op) |
48ea5577 | 2695 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p)) |
431ad2a9 | 2696 | win = 1; |
2697 | break; | |
2698 | ||
431ad2a9 | 2699 | case 'V': |
e16ceb8e | 2700 | if (MEM_P (op) |
501a4cbf | 2701 | && ((strict > 0 && ! offsettable_memref_p (op)) |
2702 | || (strict < 0 | |
e16ceb8e | 2703 | && !(CONSTANT_P (op) || MEM_P (op))) |
501a4cbf | 2704 | || (reload_in_progress |
8ad4c111 | 2705 | && !(REG_P (op) |
501a4cbf | 2706 | && REGNO (op) >= FIRST_PSEUDO_REGISTER)))) |
431ad2a9 | 2707 | win = 1; |
2708 | break; | |
2709 | ||
2710 | case 'o': | |
2711 | if ((strict > 0 && offsettable_memref_p (op)) | |
2712 | || (strict == 0 && offsettable_nonstrict_memref_p (op)) | |
2713 | /* Before reload, accept what reload can handle. */ | |
2714 | || (strict < 0 | |
e16ceb8e | 2715 | && (CONSTANT_P (op) || MEM_P (op))) |
bb552490 | 2716 | /* During reload, accept a pseudo */ |
8ad4c111 | 2717 | || (reload_in_progress && REG_P (op) |
bb552490 | 2718 | && REGNO (op) >= FIRST_PSEUDO_REGISTER)) |
431ad2a9 | 2719 | win = 1; |
2720 | break; | |
2721 | ||
2722 | default: | |
f3653a64 | 2723 | { |
e916c70c | 2724 | enum reg_class cl; |
f3653a64 | 2725 | |
e916c70c | 2726 | cl = (c == 'r' |
48ea5577 | 2727 | ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p)); |
e916c70c | 2728 | if (cl != NO_REGS) |
f3653a64 | 2729 | { |
2730 | if (strict < 0 | |
2731 | || (strict == 0 | |
8ad4c111 | 2732 | && REG_P (op) |
f3653a64 | 2733 | && REGNO (op) >= FIRST_PSEUDO_REGISTER) |
2734 | || (strict == 0 && GET_CODE (op) == SCRATCH) | |
8ad4c111 | 2735 | || (REG_P (op) |
e916c70c | 2736 | && reg_fits_class_p (op, cl, offset, mode))) |
f3653a64 | 2737 | win = 1; |
2738 | } | |
48ea5577 | 2739 | #ifdef EXTRA_CONSTRAINT_STR |
2740 | else if (EXTRA_CONSTRAINT_STR (op, c, p)) | |
f3653a64 | 2741 | win = 1; |
a5004c3d | 2742 | |
76cfd4f9 | 2743 | else if (EXTRA_MEMORY_CONSTRAINT (c, p) |
2744 | /* Every memory operand can be reloaded to fit. */ | |
e16ceb8e | 2745 | && ((strict < 0 && MEM_P (op)) |
76cfd4f9 | 2746 | /* Before reload, accept what reload can turn |
2747 | into mem. */ | |
2748 | || (strict < 0 && CONSTANT_P (op)) | |
2749 | /* During reload, accept a pseudo */ | |
8ad4c111 | 2750 | || (reload_in_progress && REG_P (op) |
76cfd4f9 | 2751 | && REGNO (op) >= FIRST_PSEUDO_REGISTER))) |
2752 | win = 1; | |
2753 | else if (EXTRA_ADDRESS_CONSTRAINT (c, p) | |
2754 | /* Every address operand can be reloaded to fit. */ | |
2755 | && strict < 0) | |
2756 | win = 1; | |
50b58916 | 2757 | /* Cater to architectures like IA-64 that define extra memory |
2758 | constraints without using define_memory_constraint. */ | |
2759 | else if (reload_in_progress | |
2760 | && REG_P (op) | |
2761 | && REGNO (op) >= FIRST_PSEUDO_REGISTER | |
2762 | && reg_renumber[REGNO (op)] < 0 | |
2763 | && reg_equiv_mem (REGNO (op)) != 0 | |
2764 | && EXTRA_CONSTRAINT_STR | |
2765 | (reg_equiv_mem (REGNO (op)), c, p)) | |
2766 | win = 1; | |
f3653a64 | 2767 | #endif |
2768 | break; | |
2769 | } | |
431ad2a9 | 2770 | } |
48ea5577 | 2771 | while (p += len, c); |
431ad2a9 | 2772 | |
2773 | constraints[opno] = p; | |
2774 | /* If this operand did not win somehow, | |
2775 | this alternative loses. */ | |
2776 | if (! win) | |
2777 | lose = 1; | |
2778 | } | |
2779 | /* This alternative won; the operands are ok. | |
2780 | Change whichever operands this alternative says to change. */ | |
2781 | if (! lose) | |
2782 | { | |
3652aed1 | 2783 | int opno, eopno; |
2784 | ||
2785 | /* See if any earlyclobber operand conflicts with some other | |
2786 | operand. */ | |
2787 | ||
c937de78 | 2788 | if (strict > 0 && seen_earlyclobber_at >= 0) |
2789 | for (eopno = seen_earlyclobber_at; | |
2790 | eopno < recog_data.n_operands; | |
2791 | eopno++) | |
fc67ee74 | 2792 | /* Ignore earlyclobber operands now in memory, |
2793 | because we would often report failure when we have | |
2794 | two memory operands, one of which was formerly a REG. */ | |
2795 | if (earlyclobber[eopno] | |
8ad4c111 | 2796 | && REG_P (recog_data.operand[eopno])) |
ed420a25 | 2797 | for (opno = 0; opno < recog_data.n_operands; opno++) |
e16ceb8e | 2798 | if ((MEM_P (recog_data.operand[opno]) |
ed420a25 | 2799 | || recog_data.operand_type[opno] != OP_OUT) |
3652aed1 | 2800 | && opno != eopno |
a92771b8 | 2801 | /* Ignore things like match_operator operands. */ |
ed420a25 | 2802 | && *recog_data.constraints[opno] != 0 |
3652aed1 | 2803 | && ! (matching_operands[opno] == eopno |
ed420a25 | 2804 | && operands_match_p (recog_data.operand[opno], |
2805 | recog_data.operand[eopno])) | |
2806 | && ! safe_from_earlyclobber (recog_data.operand[opno], | |
2807 | recog_data.operand[eopno])) | |
3652aed1 | 2808 | lose = 1; |
2809 | ||
2810 | if (! lose) | |
431ad2a9 | 2811 | { |
3652aed1 | 2812 | while (--funny_match_index >= 0) |
2813 | { | |
ed420a25 | 2814 | recog_data.operand[funny_match[funny_match_index].other] |
47cfb7f4 | 2815 | = recog_data.operand[funny_match[funny_match_index].this_op]; |
3652aed1 | 2816 | } |
2817 | ||
4a45595d | 2818 | #ifdef AUTO_INC_DEC |
2819 | /* For operands without < or > constraints reject side-effects. */ | |
2820 | if (recog_data.is_asm) | |
2821 | { | |
2822 | for (opno = 0; opno < recog_data.n_operands; opno++) | |
2823 | if (MEM_P (recog_data.operand[opno])) | |
2824 | switch (GET_CODE (XEXP (recog_data.operand[opno], 0))) | |
2825 | { | |
2826 | case PRE_INC: | |
2827 | case POST_INC: | |
2828 | case PRE_DEC: | |
2829 | case POST_DEC: | |
2830 | case PRE_MODIFY: | |
2831 | case POST_MODIFY: | |
2832 | if (strchr (recog_data.constraints[opno], '<') == NULL | |
6ada2360 | 2833 | && strchr (recog_data.constraints[opno], '>') |
4a45595d | 2834 | == NULL) |
2835 | return 0; | |
2836 | break; | |
2837 | default: | |
2838 | break; | |
2839 | } | |
2840 | } | |
2841 | #endif | |
3652aed1 | 2842 | return 1; |
431ad2a9 | 2843 | } |
431ad2a9 | 2844 | } |
2845 | ||
2846 | which_alternative++; | |
2847 | } | |
d05ae338 | 2848 | while (which_alternative < recog_data.n_alternatives); |
431ad2a9 | 2849 | |
0edc2b1c | 2850 | which_alternative = -1; |
431ad2a9 | 2851 | /* If we are about to reject this, but we are not to test strictly, |
2852 | try a very loose test. Only return failure if it fails also. */ | |
2853 | if (strict == 0) | |
7f82be90 | 2854 | return constrain_operands (-1); |
431ad2a9 | 2855 | else |
2856 | return 0; | |
2857 | } | |
2858 | ||
71db0d8b | 2859 | /* Return true iff OPERAND (assumed to be a REG rtx) |
941522d6 | 2860 | is a hard reg in class CLASS when its regno is offset by OFFSET |
431ad2a9 | 2861 | and changed to mode MODE. |
2862 | If REG occupies multiple hard regs, all of them must be in CLASS. */ | |
2863 | ||
71db0d8b | 2864 | bool |
2865 | reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset, | |
3ad4992f | 2866 | enum machine_mode mode) |
431ad2a9 | 2867 | { |
71e16a8f | 2868 | unsigned int regno = REGNO (operand); |
026d3868 | 2869 | |
2870 | if (cl == NO_REGS) | |
71db0d8b | 2871 | return false; |
026d3868 | 2872 | |
71e16a8f | 2873 | /* Regno must not be a pseudo register. Offset may be negative. */ |
71db0d8b | 2874 | return (HARD_REGISTER_NUM_P (regno) |
71e16a8f | 2875 | && HARD_REGISTER_NUM_P (regno + offset) |
2876 | && in_hard_reg_set_p (reg_class_contents[(int) cl], mode, | |
2877 | regno + offset)); | |
431ad2a9 | 2878 | } |
29bd1808 | 2879 | \f |
8c92d36e | 2880 | /* Split single instruction. Helper function for split_all_insns and |
2881 | split_all_insns_noflow. Return last insn in the sequence if successful, | |
2882 | or NULL if unsuccessful. */ | |
2883 | ||
3a2624cf | 2884 | static rtx |
3ad4992f | 2885 | split_insn (rtx insn) |
3a2624cf | 2886 | { |
8c92d36e | 2887 | /* Split insns here to get max fine-grain parallelism. */ |
2888 | rtx first = PREV_INSN (insn); | |
2889 | rtx last = try_split (PATTERN (insn), insn, 1); | |
2ec3e1bb | 2890 | rtx insn_set, last_set, note; |
8c92d36e | 2891 | |
2892 | if (last == insn) | |
2893 | return NULL_RTX; | |
2894 | ||
2ec3e1bb | 2895 | /* If the original instruction was a single set that was known to be |
2896 | equivalent to a constant, see if we can say the same about the last | |
2897 | instruction in the split sequence. The two instructions must set | |
2898 | the same destination. */ | |
2899 | insn_set = single_set (insn); | |
2900 | if (insn_set) | |
2901 | { | |
2902 | last_set = single_set (last); | |
2903 | if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set))) | |
2904 | { | |
2905 | note = find_reg_equal_equiv_note (insn); | |
2906 | if (note && CONSTANT_P (XEXP (note, 0))) | |
2907 | set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0)); | |
2908 | else if (CONSTANT_P (SET_SRC (insn_set))) | |
9adc3d57 | 2909 | set_unique_reg_note (last, REG_EQUAL, |
2910 | copy_rtx (SET_SRC (insn_set))); | |
2ec3e1bb | 2911 | } |
2912 | } | |
2913 | ||
8c92d36e | 2914 | /* try_split returns the NOTE that INSN became. */ |
7bd3dcc4 | 2915 | SET_INSN_DELETED (insn); |
3a2624cf | 2916 | |
8c92d36e | 2917 | /* ??? Coddle to md files that generate subregs in post-reload |
2918 | splitters instead of computing the proper hard register. */ | |
2919 | if (reload_completed && first != last) | |
2920 | { | |
2921 | first = NEXT_INSN (first); | |
2922 | for (;;) | |
3a2624cf | 2923 | { |
8c92d36e | 2924 | if (INSN_P (first)) |
2925 | cleanup_subreg_operands (first); | |
2926 | if (first == last) | |
2927 | break; | |
2928 | first = NEXT_INSN (first); | |
3a2624cf | 2929 | } |
2930 | } | |
2ec3e1bb | 2931 | |
8c92d36e | 2932 | return last; |
3a2624cf | 2933 | } |
8c92d36e | 2934 | |
2d9b9dfe | 2935 | /* Split all insns in the function. If UPD_LIFE, update life info after. */ |
29bd1808 | 2936 | |
2937 | void | |
3072d30e | 2938 | split_all_insns (void) |
29bd1808 | 2939 | { |
2d9b9dfe | 2940 | sbitmap blocks; |
56dddaad | 2941 | bool changed; |
4c26117a | 2942 | basic_block bb; |
2d9b9dfe | 2943 | |
f20183e6 | 2944 | blocks = sbitmap_alloc (last_basic_block); |
53c5d9d4 | 2945 | bitmap_clear (blocks); |
56dddaad | 2946 | changed = false; |
29bd1808 | 2947 | |
4c26117a | 2948 | FOR_EACH_BB_REVERSE (bb) |
29bd1808 | 2949 | { |
2d9b9dfe | 2950 | rtx insn, next; |
0f43048e | 2951 | bool finish = false; |
29bd1808 | 2952 | |
c107ab96 | 2953 | rtl_profile_for_bb (bb); |
5496dbfc | 2954 | for (insn = BB_HEAD (bb); !finish ; insn = next) |
29bd1808 | 2955 | { |
2d9b9dfe | 2956 | /* Can't use `next_real_insn' because that might go across |
2957 | CODE_LABELS and short-out basic blocks. */ | |
2958 | next = NEXT_INSN (insn); | |
5496dbfc | 2959 | finish = (insn == BB_END (bb)); |
8c92d36e | 2960 | if (INSN_P (insn)) |
29bd1808 | 2961 | { |
8c92d36e | 2962 | rtx set = single_set (insn); |
2963 | ||
2964 | /* Don't split no-op move insns. These should silently | |
2965 | disappear later in final. Splitting such insns would | |
e29831db | 2966 | break the code that handles LIBCALL blocks. */ |
8c92d36e | 2967 | if (set && set_noop_p (set)) |
2968 | { | |
2969 | /* Nops get in the way while scheduling, so delete them | |
2970 | now if register allocation has already been done. It | |
2971 | is too risky to try to do this before register | |
2972 | allocation, and there are unlikely to be very many | |
2973 | nops then anyways. */ | |
2974 | if (reload_completed) | |
8c92d36e | 2975 | delete_insn_and_edges (insn); |
8c92d36e | 2976 | } |
2977 | else | |
2978 | { | |
d6cbad05 | 2979 | if (split_insn (insn)) |
8c92d36e | 2980 | { |
08b7917c | 2981 | bitmap_set_bit (blocks, bb->index); |
8c92d36e | 2982 | changed = true; |
2983 | } | |
2984 | } | |
29bd1808 | 2985 | } |
2986 | } | |
29bd1808 | 2987 | } |
2d9b9dfe | 2988 | |
c107ab96 | 2989 | default_rtl_profile (); |
b08cd584 | 2990 | if (changed) |
3072d30e | 2991 | find_many_sub_basic_blocks (blocks); |
56dddaad | 2992 | |
b08cd584 | 2993 | #ifdef ENABLE_CHECKING |
2994 | verify_flow_info (); | |
2995 | #endif | |
2d9b9dfe | 2996 | |
2997 | sbitmap_free (blocks); | |
29bd1808 | 2998 | } |
161b1647 | 2999 | |
2617fe26 | 3000 | /* Same as split_all_insns, but do not expect CFG to be available. |
41a6f238 | 3001 | Used by machine dependent reorg passes. */ |
161b1647 | 3002 | |
2a1990e9 | 3003 | unsigned int |
3ad4992f | 3004 | split_all_insns_noflow (void) |
161b1647 | 3005 | { |
3006 | rtx next, insn; | |
3007 | ||
3008 | for (insn = get_insns (); insn; insn = next) | |
3009 | { | |
3010 | next = NEXT_INSN (insn); | |
8c92d36e | 3011 | if (INSN_P (insn)) |
3012 | { | |
3013 | /* Don't split no-op move insns. These should silently | |
3014 | disappear later in final. Splitting such insns would | |
e29831db | 3015 | break the code that handles LIBCALL blocks. */ |
8c92d36e | 3016 | rtx set = single_set (insn); |
3017 | if (set && set_noop_p (set)) | |
3018 | { | |
3019 | /* Nops get in the way while scheduling, so delete them | |
3020 | now if register allocation has already been done. It | |
3021 | is too risky to try to do this before register | |
3022 | allocation, and there are unlikely to be very many | |
3023 | nops then anyways. | |
3024 | ||
3025 | ??? Should we use delete_insn when the CFG isn't valid? */ | |
3026 | if (reload_completed) | |
3027 | delete_insn_and_edges (insn); | |
3028 | } | |
3029 | else | |
3030 | split_insn (insn); | |
3031 | } | |
161b1647 | 3032 | } |
2a1990e9 | 3033 | return 0; |
161b1647 | 3034 | } |
82575fa7 | 3035 | \f |
3036 | #ifdef HAVE_peephole2 | |
3e9f1237 | 3037 | struct peep2_insn_data |
3038 | { | |
3039 | rtx insn; | |
3040 | regset live_before; | |
3041 | }; | |
3042 | ||
3043 | static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1]; | |
3044 | static int peep2_current; | |
991512bc | 3045 | |
3046 | static bool peep2_do_rebuild_jump_labels; | |
3047 | static bool peep2_do_cleanup_cfg; | |
3048 | ||
393d701f | 3049 | /* The number of instructions available to match a peep2. */ |
3050 | int peep2_current_count; | |
3e9f1237 | 3051 | |
3052 | /* A non-insn marker indicating the last insn of the block. | |
3053 | The live_before regset for this element is correct, indicating | |
3072d30e | 3054 | DF_LIVE_OUT for the block. */ |
3e9f1237 | 3055 | #define PEEP2_EOB pc_rtx |
3056 | ||
991512bc | 3057 | /* Wrap N to fit into the peep2_insn_data buffer. */ |
3058 | ||
3059 | static int | |
3060 | peep2_buf_position (int n) | |
3061 | { | |
3062 | if (n >= MAX_INSNS_PER_PEEP2 + 1) | |
3063 | n -= MAX_INSNS_PER_PEEP2 + 1; | |
3064 | return n; | |
3065 | } | |
3066 | ||
3e9f1237 | 3067 | /* Return the Nth non-note insn after `current', or return NULL_RTX if it |
3068 | does not exist. Used by the recognizer to find the next insn to match | |
3069 | in a multi-insn pattern. */ | |
2d9b9dfe | 3070 | |
82575fa7 | 3071 | rtx |
3ad4992f | 3072 | peep2_next_insn (int n) |
82575fa7 | 3073 | { |
393d701f | 3074 | gcc_assert (n <= peep2_current_count); |
3e9f1237 | 3075 | |
991512bc | 3076 | n = peep2_buf_position (peep2_current + n); |
3e9f1237 | 3077 | |
3e9f1237 | 3078 | return peep2_insn_data[n].insn; |
3079 | } | |
3080 | ||
3081 | /* Return true if REGNO is dead before the Nth non-note insn | |
3082 | after `current'. */ | |
3083 | ||
3084 | int | |
3ad4992f | 3085 | peep2_regno_dead_p (int ofs, int regno) |
3e9f1237 | 3086 | { |
04e579b6 | 3087 | gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1); |
3e9f1237 | 3088 | |
991512bc | 3089 | ofs = peep2_buf_position (peep2_current + ofs); |
3e9f1237 | 3090 | |
04e579b6 | 3091 | gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX); |
3e9f1237 | 3092 | |
3093 | return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno); | |
3094 | } | |
3095 | ||
3096 | /* Similarly for a REG. */ | |
3097 | ||
3098 | int | |
3ad4992f | 3099 | peep2_reg_dead_p (int ofs, rtx reg) |
3e9f1237 | 3100 | { |
3101 | int regno, n; | |
3102 | ||
04e579b6 | 3103 | gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1); |
3e9f1237 | 3104 | |
991512bc | 3105 | ofs = peep2_buf_position (peep2_current + ofs); |
3e9f1237 | 3106 | |
04e579b6 | 3107 | gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX); |
3e9f1237 | 3108 | |
3109 | regno = REGNO (reg); | |
67d6c12b | 3110 | n = hard_regno_nregs[regno][GET_MODE (reg)]; |
3e9f1237 | 3111 | while (--n >= 0) |
3112 | if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n)) | |
3113 | return 0; | |
3114 | return 1; | |
3115 | } | |
3116 | ||
b88e3065 | 3117 | /* Regno offset to be used in the register search. */ |
3118 | static int search_ofs; | |
3119 | ||
3e9f1237 | 3120 | /* Try to find a hard register of mode MODE, matching the register class in |
3121 | CLASS_STR, which is available at the beginning of insn CURRENT_INSN and | |
3122 | remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX, | |
3123 | in which case the only condition is that the register must be available | |
3124 | before CURRENT_INSN. | |
3125 | Registers that already have bits set in REG_SET will not be considered. | |
3126 | ||
3127 | If an appropriate register is available, it will be returned and the | |
3128 | corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is | |
3129 | returned. */ | |
3130 | ||
3131 | rtx | |
3ad4992f | 3132 | peep2_find_free_register (int from, int to, const char *class_str, |
3133 | enum machine_mode mode, HARD_REG_SET *reg_set) | |
3e9f1237 | 3134 | { |
e916c70c | 3135 | enum reg_class cl; |
3e9f1237 | 3136 | HARD_REG_SET live; |
75e237b7 | 3137 | df_ref *def_rec; |
3e9f1237 | 3138 | int i; |
3139 | ||
04e579b6 | 3140 | gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1); |
3141 | gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1); | |
3e9f1237 | 3142 | |
991512bc | 3143 | from = peep2_buf_position (peep2_current + from); |
3144 | to = peep2_buf_position (peep2_current + to); | |
3e9f1237 | 3145 | |
04e579b6 | 3146 | gcc_assert (peep2_insn_data[from].insn != NULL_RTX); |
3e9f1237 | 3147 | REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before); |
3148 | ||
3149 | while (from != to) | |
82575fa7 | 3150 | { |
75e237b7 | 3151 | gcc_assert (peep2_insn_data[from].insn != NULL_RTX); |
3152 | ||
3153 | /* Don't use registers set or clobbered by the insn. */ | |
3154 | for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn); | |
3155 | *def_rec; def_rec++) | |
3156 | SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec)); | |
3e9f1237 | 3157 | |
991512bc | 3158 | from = peep2_buf_position (from + 1); |
3e9f1237 | 3159 | } |
3160 | ||
e916c70c | 3161 | cl = (class_str[0] == 'r' ? GENERAL_REGS |
48ea5577 | 3162 | : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str)); |
3e9f1237 | 3163 | |
3164 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
3165 | { | |
3166 | int raw_regno, regno, success, j; | |
3167 | ||
3168 | /* Distribute the free registers as much as possible. */ | |
3169 | raw_regno = search_ofs + i; | |
3170 | if (raw_regno >= FIRST_PSEUDO_REGISTER) | |
3171 | raw_regno -= FIRST_PSEUDO_REGISTER; | |
3172 | #ifdef REG_ALLOC_ORDER | |
3173 | regno = reg_alloc_order[raw_regno]; | |
3174 | #else | |
3175 | regno = raw_regno; | |
3176 | #endif | |
3177 | ||
cdbf60c9 | 3178 | /* Can it support the mode we need? */ |
3e9f1237 | 3179 | if (! HARD_REGNO_MODE_OK (regno, mode)) |
3180 | continue; | |
3e9f1237 | 3181 | |
3182 | success = 1; | |
cdbf60c9 | 3183 | for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++) |
3e9f1237 | 3184 | { |
cdbf60c9 | 3185 | /* Don't allocate fixed registers. */ |
3186 | if (fixed_regs[regno + j]) | |
3187 | { | |
3188 | success = 0; | |
3189 | break; | |
3190 | } | |
3191 | /* Don't allocate global registers. */ | |
3192 | if (global_regs[regno + j]) | |
3193 | { | |
3194 | success = 0; | |
3195 | break; | |
3196 | } | |
3197 | /* Make sure the register is of the right class. */ | |
3198 | if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j)) | |
3199 | { | |
3200 | success = 0; | |
3201 | break; | |
3202 | } | |
3203 | /* And that we don't create an extra save/restore. */ | |
3204 | if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j)) | |
3205 | { | |
3206 | success = 0; | |
3207 | break; | |
3208 | } | |
3209 | ||
3210 | if (! targetm.hard_regno_scratch_ok (regno + j)) | |
3211 | { | |
3212 | success = 0; | |
3213 | break; | |
3214 | } | |
3215 | ||
3216 | /* And we don't clobber traceback for noreturn functions. */ | |
3217 | if ((regno + j == FRAME_POINTER_REGNUM | |
3218 | || regno + j == HARD_FRAME_POINTER_REGNUM) | |
3219 | && (! reload_completed || frame_pointer_needed)) | |
3220 | { | |
3221 | success = 0; | |
3222 | break; | |
3223 | } | |
3224 | ||
3e9f1237 | 3225 | if (TEST_HARD_REG_BIT (*reg_set, regno + j) |
3226 | || TEST_HARD_REG_BIT (live, regno + j)) | |
3227 | { | |
3228 | success = 0; | |
3229 | break; | |
3230 | } | |
3231 | } | |
cdbf60c9 | 3232 | |
3e9f1237 | 3233 | if (success) |
2d9b9dfe | 3234 | { |
a2c6f0b7 | 3235 | add_to_hard_reg_set (reg_set, mode, regno); |
82575fa7 | 3236 | |
3e9f1237 | 3237 | /* Start the next search with the next register. */ |
3238 | if (++raw_regno >= FIRST_PSEUDO_REGISTER) | |
3239 | raw_regno = 0; | |
3240 | search_ofs = raw_regno; | |
82575fa7 | 3241 | |
3e9f1237 | 3242 | return gen_rtx_REG (mode, regno); |
2d9b9dfe | 3243 | } |
82575fa7 | 3244 | } |
3245 | ||
3e9f1237 | 3246 | search_ofs = 0; |
3247 | return NULL_RTX; | |
82575fa7 | 3248 | } |
3249 | ||
7737aadd | 3250 | /* Forget all currently tracked instructions, only remember current |
3251 | LIVE regset. */ | |
3252 | ||
3253 | static void | |
3254 | peep2_reinit_state (regset live) | |
3255 | { | |
3256 | int i; | |
3257 | ||
3258 | /* Indicate that all slots except the last holds invalid data. */ | |
3259 | for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i) | |
3260 | peep2_insn_data[i].insn = NULL_RTX; | |
3261 | peep2_current_count = 0; | |
3262 | ||
3263 | /* Indicate that the last slot contains live_after data. */ | |
3264 | peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB; | |
3265 | peep2_current = MAX_INSNS_PER_PEEP2; | |
3266 | ||
3267 | COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live); | |
3268 | } | |
3269 | ||
991512bc | 3270 | /* While scanning basic block BB, we found a match of length MATCH_LEN, |
3271 | starting at INSN. Perform the replacement, removing the old insns and | |
0c0e4952 | 3272 | replacing them with ATTEMPT. Returns the last insn emitted, or NULL |
3273 | if the replacement is rejected. */ | |
991512bc | 3274 | |
3275 | static rtx | |
3276 | peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt) | |
3277 | { | |
3278 | int i; | |
dfe00a8f | 3279 | rtx last, eh_note, as_note, before_try, x; |
0c0e4952 | 3280 | rtx old_insn, new_insn; |
991512bc | 3281 | bool was_call = false; |
3282 | ||
dfe00a8f | 3283 | /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to |
0c0e4952 | 3284 | match more than one insn, or to be split into more than one insn. */ |
3285 | old_insn = peep2_insn_data[peep2_current].insn; | |
3286 | if (RTX_FRAME_RELATED_P (old_insn)) | |
3287 | { | |
3288 | bool any_note = false; | |
dfe00a8f | 3289 | rtx note; |
0c0e4952 | 3290 | |
3291 | if (match_len != 0) | |
3292 | return NULL; | |
3293 | ||
3294 | /* Look for one "active" insn. I.e. ignore any "clobber" insns that | |
3295 | may be in the stream for the purpose of register allocation. */ | |
3296 | if (active_insn_p (attempt)) | |
3297 | new_insn = attempt; | |
3298 | else | |
3299 | new_insn = next_active_insn (attempt); | |
3300 | if (next_active_insn (new_insn)) | |
3301 | return NULL; | |
3302 | ||
3303 | /* We have a 1-1 replacement. Copy over any frame-related info. */ | |
3304 | RTX_FRAME_RELATED_P (new_insn) = 1; | |
3305 | ||
3306 | /* Allow the backend to fill in a note during the split. */ | |
3307 | for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1)) | |
3308 | switch (REG_NOTE_KIND (note)) | |
3309 | { | |
3310 | case REG_FRAME_RELATED_EXPR: | |
3311 | case REG_CFA_DEF_CFA: | |
3312 | case REG_CFA_ADJUST_CFA: | |
3313 | case REG_CFA_OFFSET: | |
3314 | case REG_CFA_REGISTER: | |
3315 | case REG_CFA_EXPRESSION: | |
3316 | case REG_CFA_RESTORE: | |
3317 | case REG_CFA_SET_VDRAP: | |
3318 | any_note = true; | |
3319 | break; | |
3320 | default: | |
3321 | break; | |
3322 | } | |
3323 | ||
3324 | /* If the backend didn't supply a note, copy one over. */ | |
3325 | if (!any_note) | |
3326 | for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1)) | |
3327 | switch (REG_NOTE_KIND (note)) | |
3328 | { | |
3329 | case REG_FRAME_RELATED_EXPR: | |
3330 | case REG_CFA_DEF_CFA: | |
3331 | case REG_CFA_ADJUST_CFA: | |
3332 | case REG_CFA_OFFSET: | |
3333 | case REG_CFA_REGISTER: | |
3334 | case REG_CFA_EXPRESSION: | |
3335 | case REG_CFA_RESTORE: | |
3336 | case REG_CFA_SET_VDRAP: | |
3337 | add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0)); | |
3338 | any_note = true; | |
3339 | break; | |
3340 | default: | |
3341 | break; | |
3342 | } | |
3343 | ||
3344 | /* If there still isn't a note, make sure the unwind info sees the | |
3345 | same expression as before the split. */ | |
3346 | if (!any_note) | |
3347 | { | |
3348 | rtx old_set, new_set; | |
3349 | ||
3350 | /* The old insn had better have been simple, or annotated. */ | |
3351 | old_set = single_set (old_insn); | |
3352 | gcc_assert (old_set != NULL); | |
3353 | ||
3354 | new_set = single_set (new_insn); | |
3355 | if (!new_set || !rtx_equal_p (new_set, old_set)) | |
3356 | add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set); | |
3357 | } | |
1eefcaee | 3358 | |
3359 | /* Copy prologue/epilogue status. This is required in order to keep | |
3360 | proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */ | |
3361 | maybe_copy_prologue_epilogue_insn (old_insn, new_insn); | |
0c0e4952 | 3362 | } |
3363 | ||
991512bc | 3364 | /* If we are splitting a CALL_INSN, look for the CALL_INSN |
3365 | in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other | |
3366 | cfg-related call notes. */ | |
3367 | for (i = 0; i <= match_len; ++i) | |
3368 | { | |
3369 | int j; | |
dfe00a8f | 3370 | rtx note; |
991512bc | 3371 | |
3372 | j = peep2_buf_position (peep2_current + i); | |
3373 | old_insn = peep2_insn_data[j].insn; | |
3374 | if (!CALL_P (old_insn)) | |
3375 | continue; | |
3376 | was_call = true; | |
3377 | ||
3378 | new_insn = attempt; | |
3379 | while (new_insn != NULL_RTX) | |
3380 | { | |
3381 | if (CALL_P (new_insn)) | |
3382 | break; | |
3383 | new_insn = NEXT_INSN (new_insn); | |
3384 | } | |
3385 | ||
3386 | gcc_assert (new_insn != NULL_RTX); | |
3387 | ||
3388 | CALL_INSN_FUNCTION_USAGE (new_insn) | |
3389 | = CALL_INSN_FUNCTION_USAGE (old_insn); | |
3390 | ||
3391 | for (note = REG_NOTES (old_insn); | |
3392 | note; | |
3393 | note = XEXP (note, 1)) | |
3394 | switch (REG_NOTE_KIND (note)) | |
3395 | { | |
3396 | case REG_NORETURN: | |
3397 | case REG_SETJMP: | |
4c0315d0 | 3398 | case REG_TM: |
991512bc | 3399 | add_reg_note (new_insn, REG_NOTE_KIND (note), |
3400 | XEXP (note, 0)); | |
3401 | break; | |
3402 | default: | |
3403 | /* Discard all other reg notes. */ | |
3404 | break; | |
3405 | } | |
3406 | ||
3407 | /* Croak if there is another call in the sequence. */ | |
3408 | while (++i <= match_len) | |
3409 | { | |
3410 | j = peep2_buf_position (peep2_current + i); | |
3411 | old_insn = peep2_insn_data[j].insn; | |
3412 | gcc_assert (!CALL_P (old_insn)); | |
3413 | } | |
3414 | break; | |
3415 | } | |
3416 | ||
dfe00a8f | 3417 | /* If we matched any instruction that had a REG_ARGS_SIZE, then |
3418 | move those notes over to the new sequence. */ | |
3419 | as_note = NULL; | |
3420 | for (i = match_len; i >= 0; --i) | |
3421 | { | |
3422 | int j = peep2_buf_position (peep2_current + i); | |
3423 | old_insn = peep2_insn_data[j].insn; | |
3424 | ||
3425 | as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL); | |
3426 | if (as_note) | |
3427 | break; | |
3428 | } | |
991512bc | 3429 | |
dfe00a8f | 3430 | i = peep2_buf_position (peep2_current + match_len); |
3431 | eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX); | |
991512bc | 3432 | |
3433 | /* Replace the old sequence with the new. */ | |
3434 | last = emit_insn_after_setloc (attempt, | |
3435 | peep2_insn_data[i].insn, | |
5169661d | 3436 | INSN_LOCATION (peep2_insn_data[i].insn)); |
991512bc | 3437 | before_try = PREV_INSN (insn); |
3438 | delete_insn_chain (insn, peep2_insn_data[i].insn, false); | |
3439 | ||
3440 | /* Re-insert the EH_REGION notes. */ | |
dfe00a8f | 3441 | if (eh_note || (was_call && nonlocal_goto_handler_labels)) |
991512bc | 3442 | { |
3443 | edge eh_edge; | |
3444 | edge_iterator ei; | |
3445 | ||
3446 | FOR_EACH_EDGE (eh_edge, ei, bb->succs) | |
3447 | if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL)) | |
3448 | break; | |
3449 | ||
dfe00a8f | 3450 | if (eh_note) |
3451 | copy_reg_eh_region_note_backward (eh_note, last, before_try); | |
991512bc | 3452 | |
3453 | if (eh_edge) | |
3454 | for (x = last; x != before_try; x = PREV_INSN (x)) | |
3455 | if (x != BB_END (bb) | |
3456 | && (can_throw_internal (x) | |
3457 | || can_nonlocal_goto (x))) | |
3458 | { | |
3459 | edge nfte, nehe; | |
3460 | int flags; | |
3461 | ||
3462 | nfte = split_block (bb, x); | |
3463 | flags = (eh_edge->flags | |
3464 | & (EDGE_EH | EDGE_ABNORMAL)); | |
3465 | if (CALL_P (x)) | |
3466 | flags |= EDGE_ABNORMAL_CALL; | |
3467 | nehe = make_edge (nfte->src, eh_edge->dest, | |
3468 | flags); | |
3469 | ||
3470 | nehe->probability = eh_edge->probability; | |
3471 | nfte->probability | |
3472 | = REG_BR_PROB_BASE - nehe->probability; | |
3473 | ||
3474 | peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest); | |
3475 | bb = nfte->src; | |
3476 | eh_edge = nehe; | |
3477 | } | |
3478 | ||
3479 | /* Converting possibly trapping insn to non-trapping is | |
3480 | possible. Zap dummy outgoing edges. */ | |
3481 | peep2_do_cleanup_cfg |= purge_dead_edges (bb); | |
3482 | } | |
3483 | ||
dfe00a8f | 3484 | /* Re-insert the ARGS_SIZE notes. */ |
3485 | if (as_note) | |
3486 | fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0))); | |
3487 | ||
991512bc | 3488 | /* If we generated a jump instruction, it won't have |
3489 | JUMP_LABEL set. Recompute after we're done. */ | |
3490 | for (x = last; x != before_try; x = PREV_INSN (x)) | |
3491 | if (JUMP_P (x)) | |
3492 | { | |
3493 | peep2_do_rebuild_jump_labels = true; | |
3494 | break; | |
3495 | } | |
3496 | ||
3497 | return last; | |
3498 | } | |
3499 | ||
3500 | /* After performing a replacement in basic block BB, fix up the life | |
3501 | information in our buffer. LAST is the last of the insns that we | |
3502 | emitted as a replacement. PREV is the insn before the start of | |
3503 | the replacement. MATCH_LEN is the number of instructions that were | |
3504 | matched, and which now need to be replaced in the buffer. */ | |
3505 | ||
3506 | static void | |
3507 | peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev) | |
3508 | { | |
3509 | int i = peep2_buf_position (peep2_current + match_len + 1); | |
3510 | rtx x; | |
3511 | regset_head live; | |
3512 | ||
3513 | INIT_REG_SET (&live); | |
3514 | COPY_REG_SET (&live, peep2_insn_data[i].live_before); | |
3515 | ||
3516 | gcc_assert (peep2_current_count >= match_len + 1); | |
3517 | peep2_current_count -= match_len + 1; | |
3518 | ||
3519 | x = last; | |
3520 | do | |
3521 | { | |
3522 | if (INSN_P (x)) | |
3523 | { | |
3524 | df_insn_rescan (x); | |
3525 | if (peep2_current_count < MAX_INSNS_PER_PEEP2) | |
3526 | { | |
3527 | peep2_current_count++; | |
3528 | if (--i < 0) | |
3529 | i = MAX_INSNS_PER_PEEP2; | |
3530 | peep2_insn_data[i].insn = x; | |
3531 | df_simulate_one_insn_backwards (bb, x, &live); | |
3532 | COPY_REG_SET (peep2_insn_data[i].live_before, &live); | |
3533 | } | |
3534 | } | |
3535 | x = PREV_INSN (x); | |
3536 | } | |
3537 | while (x != prev); | |
3538 | CLEAR_REG_SET (&live); | |
3539 | ||
3540 | peep2_current = i; | |
3541 | } | |
3542 | ||
3543 | /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible. | |
3544 | Return true if we added it, false otherwise. The caller will try to match | |
3545 | peepholes against the buffer if we return false; otherwise it will try to | |
3546 | add more instructions to the buffer. */ | |
3547 | ||
3548 | static bool | |
3549 | peep2_fill_buffer (basic_block bb, rtx insn, regset live) | |
3550 | { | |
3551 | int pos; | |
3552 | ||
3553 | /* Once we have filled the maximum number of insns the buffer can hold, | |
3554 | allow the caller to match the insns against peepholes. We wait until | |
3555 | the buffer is full in case the target has similar peepholes of different | |
3556 | length; we always want to match the longest if possible. */ | |
3557 | if (peep2_current_count == MAX_INSNS_PER_PEEP2) | |
3558 | return false; | |
3559 | ||
0c0e4952 | 3560 | /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with |
3561 | any other pattern, lest it change the semantics of the frame info. */ | |
991512bc | 3562 | if (RTX_FRAME_RELATED_P (insn)) |
3563 | { | |
3564 | /* Let the buffer drain first. */ | |
3565 | if (peep2_current_count > 0) | |
3566 | return false; | |
0c0e4952 | 3567 | /* Now the insn will be the only thing in the buffer. */ |
991512bc | 3568 | } |
3569 | ||
3570 | pos = peep2_buf_position (peep2_current + peep2_current_count); | |
3571 | peep2_insn_data[pos].insn = insn; | |
3572 | COPY_REG_SET (peep2_insn_data[pos].live_before, live); | |
3573 | peep2_current_count++; | |
3574 | ||
3575 | df_simulate_one_insn_forwards (bb, insn, live); | |
3576 | return true; | |
3577 | } | |
3578 | ||
aa40f561 | 3579 | /* Perform the peephole2 optimization pass. */ |
3e9f1237 | 3580 | |
e0464104 | 3581 | static void |
3f5be5f4 | 3582 | peephole2_optimize (void) |
82575fa7 | 3583 | { |
991512bc | 3584 | rtx insn; |
3072d30e | 3585 | bitmap live; |
4c26117a | 3586 | int i; |
3587 | basic_block bb; | |
991512bc | 3588 | |
3589 | peep2_do_cleanup_cfg = false; | |
3590 | peep2_do_rebuild_jump_labels = false; | |
82575fa7 | 3591 | |
3072d30e | 3592 | df_set_flags (DF_LR_RUN_DCE); |
991512bc | 3593 | df_note_add_problem (); |
3072d30e | 3594 | df_analyze (); |
3595 | ||
3e9f1237 | 3596 | /* Initialize the regsets we're going to use. */ |
3597 | for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i) | |
3072d30e | 3598 | peep2_insn_data[i].live_before = BITMAP_ALLOC (®_obstack); |
b88e3065 | 3599 | search_ofs = 0; |
3072d30e | 3600 | live = BITMAP_ALLOC (®_obstack); |
2d9b9dfe | 3601 | |
4c26117a | 3602 | FOR_EACH_BB_REVERSE (bb) |
82575fa7 | 3603 | { |
991512bc | 3604 | bool past_end = false; |
3605 | int pos; | |
3606 | ||
c107ab96 | 3607 | rtl_profile_for_bb (bb); |
2d9b9dfe | 3608 | |
3e9f1237 | 3609 | /* Start up propagation. */ |
991512bc | 3610 | bitmap_copy (live, DF_LR_IN (bb)); |
3611 | df_simulate_initialize_forwards (bb, live); | |
7737aadd | 3612 | peep2_reinit_state (live); |
82575fa7 | 3613 | |
991512bc | 3614 | insn = BB_HEAD (bb); |
3615 | for (;;) | |
2d9b9dfe | 3616 | { |
991512bc | 3617 | rtx attempt, head; |
3618 | int match_len; | |
a4fcc300 | 3619 | |
991512bc | 3620 | if (!past_end && !NONDEBUG_INSN_P (insn)) |
3621 | { | |
3622 | next_insn: | |
3623 | insn = NEXT_INSN (insn); | |
3624 | if (insn == NEXT_INSN (BB_END (bb))) | |
3625 | past_end = true; | |
3626 | continue; | |
3627 | } | |
3628 | if (!past_end && peep2_fill_buffer (bb, insn, live)) | |
3629 | goto next_insn; | |
029de010 | 3630 | |
991512bc | 3631 | /* If we did not fill an empty buffer, it signals the end of the |
3632 | block. */ | |
3633 | if (peep2_current_count == 0) | |
3634 | break; | |
fbf81687 | 3635 | |
991512bc | 3636 | /* The buffer filled to the current maximum, so try to match. */ |
3e9f1237 | 3637 | |
991512bc | 3638 | pos = peep2_buf_position (peep2_current + peep2_current_count); |
3639 | peep2_insn_data[pos].insn = PEEP2_EOB; | |
3640 | COPY_REG_SET (peep2_insn_data[pos].live_before, live); | |
f3cb52fc | 3641 | |
991512bc | 3642 | /* Match the peephole. */ |
3643 | head = peep2_insn_data[peep2_current].insn; | |
3644 | attempt = peephole2_insns (PATTERN (head), head, &match_len); | |
3645 | if (attempt != NULL) | |
3646 | { | |
0c0e4952 | 3647 | rtx last = peep2_attempt (bb, head, match_len, attempt); |
3648 | if (last) | |
3649 | { | |
3650 | peep2_update_life (bb, match_len, last, PREV_INSN (attempt)); | |
3651 | continue; | |
3652 | } | |
82575fa7 | 3653 | } |
0c0e4952 | 3654 | |
3655 | /* No match: advance the buffer by one insn. */ | |
3656 | peep2_current = peep2_buf_position (peep2_current + 1); | |
3657 | peep2_current_count--; | |
82575fa7 | 3658 | } |
3659 | } | |
3660 | ||
c107ab96 | 3661 | default_rtl_profile (); |
3e9f1237 | 3662 | for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i) |
3072d30e | 3663 | BITMAP_FREE (peep2_insn_data[i].live_before); |
3664 | BITMAP_FREE (live); | |
991512bc | 3665 | if (peep2_do_rebuild_jump_labels) |
f3cb52fc | 3666 | rebuild_jump_labels (get_insns ()); |
3e9f1237 | 3667 | } |
3668 | #endif /* HAVE_peephole2 */ | |
5459086b | 3669 | |
3670 | /* Common predicates for use with define_bypass. */ | |
3671 | ||
3672 | /* True if the dependency between OUT_INSN and IN_INSN is on the store | |
bdfa3a48 | 3673 | data not the address operand(s) of the store. IN_INSN and OUT_INSN |
3674 | must be either a single_set or a PARALLEL with SETs inside. */ | |
5459086b | 3675 | |
3676 | int | |
3ad4992f | 3677 | store_data_bypass_p (rtx out_insn, rtx in_insn) |
5459086b | 3678 | { |
3679 | rtx out_set, in_set; | |
bdfa3a48 | 3680 | rtx out_pat, in_pat; |
3681 | rtx out_exp, in_exp; | |
3682 | int i, j; | |
5459086b | 3683 | |
5459086b | 3684 | in_set = single_set (in_insn); |
bdfa3a48 | 3685 | if (in_set) |
c69803e9 | 3686 | { |
bdfa3a48 | 3687 | if (!MEM_P (SET_DEST (in_set))) |
c69803e9 | 3688 | return false; |
bdfa3a48 | 3689 | |
3690 | out_set = single_set (out_insn); | |
3691 | if (out_set) | |
3692 | { | |
3693 | if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set))) | |
3694 | return false; | |
3695 | } | |
3696 | else | |
3697 | { | |
3698 | out_pat = PATTERN (out_insn); | |
3699 | ||
3700 | if (GET_CODE (out_pat) != PARALLEL) | |
3701 | return false; | |
3702 | ||
3703 | for (i = 0; i < XVECLEN (out_pat, 0); i++) | |
3704 | { | |
3705 | out_exp = XVECEXP (out_pat, 0, i); | |
3706 | ||
3707 | if (GET_CODE (out_exp) == CLOBBER) | |
3708 | continue; | |
3709 | ||
3710 | gcc_assert (GET_CODE (out_exp) == SET); | |
3711 | ||
3712 | if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set))) | |
3713 | return false; | |
3714 | } | |
3715 | } | |
c69803e9 | 3716 | } |
3717 | else | |
3718 | { | |
bdfa3a48 | 3719 | in_pat = PATTERN (in_insn); |
3720 | gcc_assert (GET_CODE (in_pat) == PARALLEL); | |
c69803e9 | 3721 | |
bdfa3a48 | 3722 | for (i = 0; i < XVECLEN (in_pat, 0); i++) |
c69803e9 | 3723 | { |
bdfa3a48 | 3724 | in_exp = XVECEXP (in_pat, 0, i); |
c69803e9 | 3725 | |
bdfa3a48 | 3726 | if (GET_CODE (in_exp) == CLOBBER) |
bab5a775 | 3727 | continue; |
3728 | ||
bdfa3a48 | 3729 | gcc_assert (GET_CODE (in_exp) == SET); |
c69803e9 | 3730 | |
bdfa3a48 | 3731 | if (!MEM_P (SET_DEST (in_exp))) |
c69803e9 | 3732 | return false; |
bdfa3a48 | 3733 | |
3734 | out_set = single_set (out_insn); | |
3735 | if (out_set) | |
3736 | { | |
3737 | if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp))) | |
3738 | return false; | |
3739 | } | |
3740 | else | |
3741 | { | |
3742 | out_pat = PATTERN (out_insn); | |
3743 | gcc_assert (GET_CODE (out_pat) == PARALLEL); | |
3744 | ||
3745 | for (j = 0; j < XVECLEN (out_pat, 0); j++) | |
3746 | { | |
3747 | out_exp = XVECEXP (out_pat, 0, j); | |
3748 | ||
3749 | if (GET_CODE (out_exp) == CLOBBER) | |
3750 | continue; | |
3751 | ||
3752 | gcc_assert (GET_CODE (out_exp) == SET); | |
3753 | ||
3754 | if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp))) | |
3755 | return false; | |
3756 | } | |
3757 | } | |
3758 | } | |
c69803e9 | 3759 | } |
5459086b | 3760 | |
3761 | return true; | |
3762 | } | |
3763 | ||
9ed76315 | 3764 | /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE |
3765 | condition, and not the THEN or ELSE branch. OUT_INSN may be either a single | |
3766 | or multiple set; IN_INSN should be single_set for truth, but for convenience | |
3767 | of insn categorization may be any JUMP or CALL insn. */ | |
5459086b | 3768 | |
3769 | int | |
3ad4992f | 3770 | if_test_bypass_p (rtx out_insn, rtx in_insn) |
5459086b | 3771 | { |
3772 | rtx out_set, in_set; | |
3773 | ||
5459086b | 3774 | in_set = single_set (in_insn); |
3775 | if (! in_set) | |
9ed76315 | 3776 | { |
04e579b6 | 3777 | gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn)); |
3778 | return false; | |
9ed76315 | 3779 | } |
5459086b | 3780 | |
3781 | if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE) | |
3782 | return false; | |
9ed76315 | 3783 | in_set = SET_SRC (in_set); |
5459086b | 3784 | |
9ed76315 | 3785 | out_set = single_set (out_insn); |
3786 | if (out_set) | |
3787 | { | |
3788 | if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1)) | |
3789 | || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2))) | |
2617fe26 | 3790 | return false; |
9ed76315 | 3791 | } |
3792 | else | |
3793 | { | |
3794 | rtx out_pat; | |
3795 | int i; | |
3796 | ||
3797 | out_pat = PATTERN (out_insn); | |
04e579b6 | 3798 | gcc_assert (GET_CODE (out_pat) == PARALLEL); |
9ed76315 | 3799 | |
3800 | for (i = 0; i < XVECLEN (out_pat, 0); i++) | |
3801 | { | |
3802 | rtx exp = XVECEXP (out_pat, 0, i); | |
3803 | ||
3804 | if (GET_CODE (exp) == CLOBBER) | |
3805 | continue; | |
3806 | ||
04e579b6 | 3807 | gcc_assert (GET_CODE (exp) == SET); |
9ed76315 | 3808 | |
3809 | if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1)) | |
3810 | || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2))) | |
3811 | return false; | |
3812 | } | |
3813 | } | |
5459086b | 3814 | |
3815 | return true; | |
3816 | } | |
77fce4cd | 3817 | \f |
3818 | static bool | |
3819 | gate_handle_peephole2 (void) | |
3820 | { | |
3821 | return (optimize > 0 && flag_peephole2); | |
3822 | } | |
3823 | ||
2a1990e9 | 3824 | static unsigned int |
77fce4cd | 3825 | rest_of_handle_peephole2 (void) |
3826 | { | |
3827 | #ifdef HAVE_peephole2 | |
3f5be5f4 | 3828 | peephole2_optimize (); |
77fce4cd | 3829 | #endif |
2a1990e9 | 3830 | return 0; |
77fce4cd | 3831 | } |
3832 | ||
cbe8bda8 | 3833 | namespace { |
3834 | ||
3835 | const pass_data pass_data_peephole2 = | |
3836 | { | |
3837 | RTL_PASS, /* type */ | |
3838 | "peephole2", /* name */ | |
3839 | OPTGROUP_NONE, /* optinfo_flags */ | |
3840 | true, /* has_gate */ | |
3841 | true, /* has_execute */ | |
3842 | TV_PEEPHOLE2, /* tv_id */ | |
3843 | 0, /* properties_required */ | |
3844 | 0, /* properties_provided */ | |
3845 | 0, /* properties_destroyed */ | |
3846 | 0, /* todo_flags_start */ | |
3847 | ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */ | |
77fce4cd | 3848 | }; |
3849 | ||
cbe8bda8 | 3850 | class pass_peephole2 : public rtl_opt_pass |
3851 | { | |
3852 | public: | |
9af5ce0c | 3853 | pass_peephole2 (gcc::context *ctxt) |
3854 | : rtl_opt_pass (pass_data_peephole2, ctxt) | |
cbe8bda8 | 3855 | {} |
3856 | ||
3857 | /* opt_pass methods: */ | |
a6ae2cf4 | 3858 | /* The epiphany backend creates a second instance of this pass, so we need |
3859 | a clone method. */ | |
ae84f584 | 3860 | opt_pass * clone () { return new pass_peephole2 (m_ctxt); } |
cbe8bda8 | 3861 | bool gate () { return gate_handle_peephole2 (); } |
3862 | unsigned int execute () { return rest_of_handle_peephole2 (); } | |
3863 | ||
3864 | }; // class pass_peephole2 | |
3865 | ||
3866 | } // anon namespace | |
3867 | ||
3868 | rtl_opt_pass * | |
3869 | make_pass_peephole2 (gcc::context *ctxt) | |
3870 | { | |
3871 | return new pass_peephole2 (ctxt); | |
3872 | } | |
3873 | ||
2a1990e9 | 3874 | static unsigned int |
77fce4cd | 3875 | rest_of_handle_split_all_insns (void) |
3876 | { | |
3072d30e | 3877 | split_all_insns (); |
2a1990e9 | 3878 | return 0; |
77fce4cd | 3879 | } |
3880 | ||
cbe8bda8 | 3881 | namespace { |
3882 | ||
3883 | const pass_data pass_data_split_all_insns = | |
3884 | { | |
3885 | RTL_PASS, /* type */ | |
3886 | "split1", /* name */ | |
3887 | OPTGROUP_NONE, /* optinfo_flags */ | |
3888 | false, /* has_gate */ | |
3889 | true, /* has_execute */ | |
3890 | TV_NONE, /* tv_id */ | |
3891 | 0, /* properties_required */ | |
3892 | 0, /* properties_provided */ | |
3893 | 0, /* properties_destroyed */ | |
3894 | 0, /* todo_flags_start */ | |
3895 | 0, /* todo_flags_finish */ | |
77fce4cd | 3896 | }; |
3897 | ||
cbe8bda8 | 3898 | class pass_split_all_insns : public rtl_opt_pass |
3899 | { | |
3900 | public: | |
9af5ce0c | 3901 | pass_split_all_insns (gcc::context *ctxt) |
3902 | : rtl_opt_pass (pass_data_split_all_insns, ctxt) | |
cbe8bda8 | 3903 | {} |
3904 | ||
3905 | /* opt_pass methods: */ | |
a6ae2cf4 | 3906 | /* The epiphany backend creates a second instance of this pass, so |
3907 | we need a clone method. */ | |
ae84f584 | 3908 | opt_pass * clone () { return new pass_split_all_insns (m_ctxt); } |
cbe8bda8 | 3909 | unsigned int execute () { return rest_of_handle_split_all_insns (); } |
3910 | ||
3911 | }; // class pass_split_all_insns | |
3912 | ||
3913 | } // anon namespace | |
3914 | ||
3915 | rtl_opt_pass * | |
3916 | make_pass_split_all_insns (gcc::context *ctxt) | |
3917 | { | |
3918 | return new pass_split_all_insns (ctxt); | |
3919 | } | |
3920 | ||
3072d30e | 3921 | static unsigned int |
3922 | rest_of_handle_split_after_reload (void) | |
77fce4cd | 3923 | { |
3072d30e | 3924 | /* If optimizing, then go ahead and split insns now. */ |
3925 | #ifndef STACK_REGS | |
3926 | if (optimize > 0) | |
3927 | #endif | |
3928 | split_all_insns (); | |
77fce4cd | 3929 | return 0; |
77fce4cd | 3930 | } |
3931 | ||
cbe8bda8 | 3932 | namespace { |
3933 | ||
3934 | const pass_data pass_data_split_after_reload = | |
3935 | { | |
3936 | RTL_PASS, /* type */ | |
3937 | "split2", /* name */ | |
3938 | OPTGROUP_NONE, /* optinfo_flags */ | |
3939 | false, /* has_gate */ | |
3940 | true, /* has_execute */ | |
3941 | TV_NONE, /* tv_id */ | |
3942 | 0, /* properties_required */ | |
3943 | 0, /* properties_provided */ | |
3944 | 0, /* properties_destroyed */ | |
3945 | 0, /* todo_flags_start */ | |
3946 | 0, /* todo_flags_finish */ | |
77fce4cd | 3947 | }; |
3948 | ||
cbe8bda8 | 3949 | class pass_split_after_reload : public rtl_opt_pass |
3950 | { | |
3951 | public: | |
9af5ce0c | 3952 | pass_split_after_reload (gcc::context *ctxt) |
3953 | : rtl_opt_pass (pass_data_split_after_reload, ctxt) | |
cbe8bda8 | 3954 | {} |
3955 | ||
3956 | /* opt_pass methods: */ | |
3957 | unsigned int execute () { return rest_of_handle_split_after_reload (); } | |
3958 | ||
3959 | }; // class pass_split_after_reload | |
3960 | ||
3961 | } // anon namespace | |
3962 | ||
3963 | rtl_opt_pass * | |
3964 | make_pass_split_after_reload (gcc::context *ctxt) | |
3965 | { | |
3966 | return new pass_split_after_reload (ctxt); | |
3967 | } | |
3968 | ||
77fce4cd | 3969 | static bool |
3970 | gate_handle_split_before_regstack (void) | |
3971 | { | |
f2d92d7a | 3972 | #if HAVE_ATTR_length && defined (STACK_REGS) |
77fce4cd | 3973 | /* If flow2 creates new instructions which need splitting |
3974 | and scheduling after reload is not done, they might not be | |
3975 | split until final which doesn't allow splitting | |
3976 | if HAVE_ATTR_length. */ | |
3977 | # ifdef INSN_SCHEDULING | |
3978 | return (optimize && !flag_schedule_insns_after_reload); | |
3979 | # else | |
3980 | return (optimize); | |
3981 | # endif | |
3982 | #else | |
3983 | return 0; | |
3984 | #endif | |
3985 | } | |
3986 | ||
3072d30e | 3987 | static unsigned int |
3988 | rest_of_handle_split_before_regstack (void) | |
3989 | { | |
3990 | split_all_insns (); | |
3991 | return 0; | |
3992 | } | |
3993 | ||
cbe8bda8 | 3994 | namespace { |
3995 | ||
3996 | const pass_data pass_data_split_before_regstack = | |
3997 | { | |
3998 | RTL_PASS, /* type */ | |
3999 | "split3", /* name */ | |
4000 | OPTGROUP_NONE, /* optinfo_flags */ | |
4001 | true, /* has_gate */ | |
4002 | true, /* has_execute */ | |
4003 | TV_NONE, /* tv_id */ | |
4004 | 0, /* properties_required */ | |
4005 | 0, /* properties_provided */ | |
4006 | 0, /* properties_destroyed */ | |
4007 | 0, /* todo_flags_start */ | |
4008 | 0, /* todo_flags_finish */ | |
3072d30e | 4009 | }; |
4010 | ||
cbe8bda8 | 4011 | class pass_split_before_regstack : public rtl_opt_pass |
4012 | { | |
4013 | public: | |
9af5ce0c | 4014 | pass_split_before_regstack (gcc::context *ctxt) |
4015 | : rtl_opt_pass (pass_data_split_before_regstack, ctxt) | |
cbe8bda8 | 4016 | {} |
4017 | ||
4018 | /* opt_pass methods: */ | |
4019 | bool gate () { return gate_handle_split_before_regstack (); } | |
4020 | unsigned int execute () { | |
4021 | return rest_of_handle_split_before_regstack (); | |
4022 | } | |
4023 | ||
4024 | }; // class pass_split_before_regstack | |
4025 | ||
4026 | } // anon namespace | |
4027 | ||
4028 | rtl_opt_pass * | |
4029 | make_pass_split_before_regstack (gcc::context *ctxt) | |
4030 | { | |
4031 | return new pass_split_before_regstack (ctxt); | |
4032 | } | |
4033 | ||
3072d30e | 4034 | static bool |
4035 | gate_handle_split_before_sched2 (void) | |
4036 | { | |
4037 | #ifdef INSN_SCHEDULING | |
4038 | return optimize > 0 && flag_schedule_insns_after_reload; | |
4039 | #else | |
4040 | return 0; | |
4041 | #endif | |
4042 | } | |
4043 | ||
4044 | static unsigned int | |
4045 | rest_of_handle_split_before_sched2 (void) | |
4046 | { | |
4047 | #ifdef INSN_SCHEDULING | |
4048 | split_all_insns (); | |
4049 | #endif | |
4050 | return 0; | |
4051 | } | |
4052 | ||
cbe8bda8 | 4053 | namespace { |
4054 | ||
4055 | const pass_data pass_data_split_before_sched2 = | |
4056 | { | |
4057 | RTL_PASS, /* type */ | |
4058 | "split4", /* name */ | |
4059 | OPTGROUP_NONE, /* optinfo_flags */ | |
4060 | true, /* has_gate */ | |
4061 | true, /* has_execute */ | |
4062 | TV_NONE, /* tv_id */ | |
4063 | 0, /* properties_required */ | |
4064 | 0, /* properties_provided */ | |
4065 | 0, /* properties_destroyed */ | |
4066 | 0, /* todo_flags_start */ | |
4067 | TODO_verify_flow, /* todo_flags_finish */ | |
3072d30e | 4068 | }; |
4069 | ||
cbe8bda8 | 4070 | class pass_split_before_sched2 : public rtl_opt_pass |
4071 | { | |
4072 | public: | |
9af5ce0c | 4073 | pass_split_before_sched2 (gcc::context *ctxt) |
4074 | : rtl_opt_pass (pass_data_split_before_sched2, ctxt) | |
cbe8bda8 | 4075 | {} |
4076 | ||
4077 | /* opt_pass methods: */ | |
4078 | bool gate () { return gate_handle_split_before_sched2 (); } | |
4079 | unsigned int execute () { return rest_of_handle_split_before_sched2 (); } | |
4080 | ||
4081 | }; // class pass_split_before_sched2 | |
4082 | ||
4083 | } // anon namespace | |
4084 | ||
4085 | rtl_opt_pass * | |
4086 | make_pass_split_before_sched2 (gcc::context *ctxt) | |
4087 | { | |
4088 | return new pass_split_before_sched2 (ctxt); | |
4089 | } | |
4090 | ||
3072d30e | 4091 | /* The placement of the splitting that we do for shorten_branches |
4092 | depends on whether regstack is used by the target or not. */ | |
4093 | static bool | |
4094 | gate_do_final_split (void) | |
4095 | { | |
f2d92d7a | 4096 | #if HAVE_ATTR_length && !defined (STACK_REGS) |
3072d30e | 4097 | return 1; |
4098 | #else | |
4099 | return 0; | |
48e1416a | 4100 | #endif |
3072d30e | 4101 | } |
4102 | ||
cbe8bda8 | 4103 | namespace { |
4104 | ||
4105 | const pass_data pass_data_split_for_shorten_branches = | |
4106 | { | |
4107 | RTL_PASS, /* type */ | |
4108 | "split5", /* name */ | |
4109 | OPTGROUP_NONE, /* optinfo_flags */ | |
4110 | true, /* has_gate */ | |
4111 | true, /* has_execute */ | |
4112 | TV_NONE, /* tv_id */ | |
4113 | 0, /* properties_required */ | |
4114 | 0, /* properties_provided */ | |
4115 | 0, /* properties_destroyed */ | |
4116 | 0, /* todo_flags_start */ | |
4117 | TODO_verify_rtl_sharing, /* todo_flags_finish */ | |
77fce4cd | 4118 | }; |
cbe8bda8 | 4119 | |
4120 | class pass_split_for_shorten_branches : public rtl_opt_pass | |
4121 | { | |
4122 | public: | |
9af5ce0c | 4123 | pass_split_for_shorten_branches (gcc::context *ctxt) |
4124 | : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt) | |
cbe8bda8 | 4125 | {} |
4126 | ||
4127 | /* opt_pass methods: */ | |
4128 | bool gate () { return gate_do_final_split (); } | |
4129 | unsigned int execute () { return split_all_insns_noflow (); } | |
4130 | ||
4131 | }; // class pass_split_for_shorten_branches | |
4132 | ||
4133 | } // anon namespace | |
4134 | ||
4135 | rtl_opt_pass * | |
4136 | make_pass_split_for_shorten_branches (gcc::context *ctxt) | |
4137 | { | |
4138 | return new pass_split_for_shorten_branches (ctxt); | |
4139 | } |