]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/recog.c
[26/77] Use is_a <scalar_int_mode> in subreg/extract simplifications
[thirdparty/gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
43
44 #ifndef STACK_POP_CODE
45 #if STACK_GROWS_DOWNWARD
46 #define STACK_POP_CODE POST_INC
47 #else
48 #define STACK_POP_CODE POST_DEC
49 #endif
50 #endif
51
52 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
53 static void validate_replace_src_1 (rtx *, void *);
54 static rtx_insn *split_insn (rtx_insn *);
55
56 struct target_recog default_target_recog;
57 #if SWITCHABLE_TARGET
58 struct target_recog *this_target_recog = &default_target_recog;
59 #endif
60
61 /* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in reginfo.c and final.c and reload.c.
66
67 init_recog and init_recog_no_volatile are responsible for setting this. */
68
69 int volatile_ok;
70
71 struct recog_data_d recog_data;
72
73 /* Contains a vector of operand_alternative structures, such that
74 operand OP of alternative A is at index A * n_operands + OP.
75 Set up by preprocess_constraints. */
76 const operand_alternative *recog_op_alt;
77
78 /* Used to provide recog_op_alt for asms. */
79 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
80 * MAX_RECOG_ALTERNATIVES];
81
82 /* On return from `constrain_operands', indicate which alternative
83 was satisfied. */
84
85 int which_alternative;
86
87 /* Nonzero after end of reload pass.
88 Set to 1 or 0 by toplev.c.
89 Controls the significance of (SUBREG (MEM)). */
90
91 int reload_completed;
92
93 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
94 int epilogue_completed;
95
96 /* Initialize data used by the function `recog'.
97 This must be called once in the compilation of a function
98 before any insn recognition may be done in the function. */
99
100 void
101 init_recog_no_volatile (void)
102 {
103 volatile_ok = 0;
104 }
105
106 void
107 init_recog (void)
108 {
109 volatile_ok = 1;
110 }
111
112 \f
113 /* Return true if labels in asm operands BODY are LABEL_REFs. */
114
115 static bool
116 asm_labels_ok (rtx body)
117 {
118 rtx asmop;
119 int i;
120
121 asmop = extract_asm_operands (body);
122 if (asmop == NULL_RTX)
123 return true;
124
125 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
126 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
127 return false;
128
129 return true;
130 }
131
132 /* Check that X is an insn-body for an `asm' with operands
133 and that the operands mentioned in it are legitimate. */
134
135 int
136 check_asm_operands (rtx x)
137 {
138 int noperands;
139 rtx *operands;
140 const char **constraints;
141 int i;
142
143 if (!asm_labels_ok (x))
144 return 0;
145
146 /* Post-reload, be more strict with things. */
147 if (reload_completed)
148 {
149 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
150 rtx_insn *insn = make_insn_raw (x);
151 extract_insn (insn);
152 constrain_operands (1, get_enabled_alternatives (insn));
153 return which_alternative >= 0;
154 }
155
156 noperands = asm_noperands (x);
157 if (noperands < 0)
158 return 0;
159 if (noperands == 0)
160 return 1;
161
162 operands = XALLOCAVEC (rtx, noperands);
163 constraints = XALLOCAVEC (const char *, noperands);
164
165 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
166
167 for (i = 0; i < noperands; i++)
168 {
169 const char *c = constraints[i];
170 if (c[0] == '%')
171 c++;
172 if (! asm_operand_ok (operands[i], c, constraints))
173 return 0;
174 }
175
176 return 1;
177 }
178 \f
179 /* Static data for the next two routines. */
180
181 struct change_t
182 {
183 rtx object;
184 int old_code;
185 bool unshare;
186 rtx *loc;
187 rtx old;
188 };
189
190 static change_t *changes;
191 static int changes_allocated;
192
193 static int num_changes = 0;
194
195 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
196 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
197 the change is simply made.
198
199 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
200 will be called with the address and mode as parameters. If OBJECT is
201 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
202 the change in place.
203
204 IN_GROUP is nonzero if this is part of a group of changes that must be
205 performed as a group. In that case, the changes will be stored. The
206 function `apply_change_group' will validate and apply the changes.
207
208 If IN_GROUP is zero, this is a single change. Try to recognize the insn
209 or validate the memory reference with the change applied. If the result
210 is not valid for the machine, suppress the change and return zero.
211 Otherwise, perform the change and return 1. */
212
213 static bool
214 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
215 {
216 rtx old = *loc;
217
218 if (old == new_rtx || rtx_equal_p (old, new_rtx))
219 return 1;
220
221 gcc_assert (in_group != 0 || num_changes == 0);
222
223 *loc = new_rtx;
224
225 /* Save the information describing this change. */
226 if (num_changes >= changes_allocated)
227 {
228 if (changes_allocated == 0)
229 /* This value allows for repeated substitutions inside complex
230 indexed addresses, or changes in up to 5 insns. */
231 changes_allocated = MAX_RECOG_OPERANDS * 5;
232 else
233 changes_allocated *= 2;
234
235 changes = XRESIZEVEC (change_t, changes, changes_allocated);
236 }
237
238 changes[num_changes].object = object;
239 changes[num_changes].loc = loc;
240 changes[num_changes].old = old;
241 changes[num_changes].unshare = unshare;
242
243 if (object && !MEM_P (object))
244 {
245 /* Set INSN_CODE to force rerecognition of insn. Save old code in
246 case invalid. */
247 changes[num_changes].old_code = INSN_CODE (object);
248 INSN_CODE (object) = -1;
249 }
250
251 num_changes++;
252
253 /* If we are making a group of changes, return 1. Otherwise, validate the
254 change group we made. */
255
256 if (in_group)
257 return 1;
258 else
259 return apply_change_group ();
260 }
261
262 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
263 UNSHARE to false. */
264
265 bool
266 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
267 {
268 return validate_change_1 (object, loc, new_rtx, in_group, false);
269 }
270
271 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
272 UNSHARE to true. */
273
274 bool
275 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
276 {
277 return validate_change_1 (object, loc, new_rtx, in_group, true);
278 }
279
280
281 /* Keep X canonicalized if some changes have made it non-canonical; only
282 modifies the operands of X, not (for example) its code. Simplifications
283 are not the job of this routine.
284
285 Return true if anything was changed. */
286 bool
287 canonicalize_change_group (rtx_insn *insn, rtx x)
288 {
289 if (COMMUTATIVE_P (x)
290 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
291 {
292 /* Oops, the caller has made X no longer canonical.
293 Let's redo the changes in the correct order. */
294 rtx tem = XEXP (x, 0);
295 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
296 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
297 return true;
298 }
299 else
300 return false;
301 }
302
303
304 /* This subroutine of apply_change_group verifies whether the changes to INSN
305 were valid; i.e. whether INSN can still be recognized.
306
307 If IN_GROUP is true clobbers which have to be added in order to
308 match the instructions will be added to the current change group.
309 Otherwise the changes will take effect immediately. */
310
311 int
312 insn_invalid_p (rtx_insn *insn, bool in_group)
313 {
314 rtx pat = PATTERN (insn);
315 int num_clobbers = 0;
316 /* If we are before reload and the pattern is a SET, see if we can add
317 clobbers. */
318 int icode = recog (pat, insn,
319 (GET_CODE (pat) == SET
320 && ! reload_completed
321 && ! reload_in_progress)
322 ? &num_clobbers : 0);
323 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
324
325
326 /* If this is an asm and the operand aren't legal, then fail. Likewise if
327 this is not an asm and the insn wasn't recognized. */
328 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
329 || (!is_asm && icode < 0))
330 return 1;
331
332 /* If we have to add CLOBBERs, fail if we have to add ones that reference
333 hard registers since our callers can't know if they are live or not.
334 Otherwise, add them. */
335 if (num_clobbers > 0)
336 {
337 rtx newpat;
338
339 if (added_clobbers_hard_reg_p (icode))
340 return 1;
341
342 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
343 XVECEXP (newpat, 0, 0) = pat;
344 add_clobbers (newpat, icode);
345 if (in_group)
346 validate_change (insn, &PATTERN (insn), newpat, 1);
347 else
348 PATTERN (insn) = pat = newpat;
349 }
350
351 /* After reload, verify that all constraints are satisfied. */
352 if (reload_completed)
353 {
354 extract_insn (insn);
355
356 if (! constrain_operands (1, get_preferred_alternatives (insn)))
357 return 1;
358 }
359
360 INSN_CODE (insn) = icode;
361 return 0;
362 }
363
364 /* Return number of changes made and not validated yet. */
365 int
366 num_changes_pending (void)
367 {
368 return num_changes;
369 }
370
371 /* Tentatively apply the changes numbered NUM and up.
372 Return 1 if all changes are valid, zero otherwise. */
373
374 int
375 verify_changes (int num)
376 {
377 int i;
378 rtx last_validated = NULL_RTX;
379
380 /* The changes have been applied and all INSN_CODEs have been reset to force
381 rerecognition.
382
383 The changes are valid if we aren't given an object, or if we are
384 given a MEM and it still is a valid address, or if this is in insn
385 and it is recognized. In the latter case, if reload has completed,
386 we also require that the operands meet the constraints for
387 the insn. */
388
389 for (i = num; i < num_changes; i++)
390 {
391 rtx object = changes[i].object;
392
393 /* If there is no object to test or if it is the same as the one we
394 already tested, ignore it. */
395 if (object == 0 || object == last_validated)
396 continue;
397
398 if (MEM_P (object))
399 {
400 if (! memory_address_addr_space_p (GET_MODE (object),
401 XEXP (object, 0),
402 MEM_ADDR_SPACE (object)))
403 break;
404 }
405 else if (/* changes[i].old might be zero, e.g. when putting a
406 REG_FRAME_RELATED_EXPR into a previously empty list. */
407 changes[i].old
408 && REG_P (changes[i].old)
409 && asm_noperands (PATTERN (object)) > 0
410 && REG_EXPR (changes[i].old) != NULL_TREE
411 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
412 && DECL_REGISTER (REG_EXPR (changes[i].old)))
413 {
414 /* Don't allow changes of hard register operands to inline
415 assemblies if they have been defined as register asm ("x"). */
416 break;
417 }
418 else if (DEBUG_INSN_P (object))
419 continue;
420 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
421 {
422 rtx pat = PATTERN (object);
423
424 /* Perhaps we couldn't recognize the insn because there were
425 extra CLOBBERs at the end. If so, try to re-recognize
426 without the last CLOBBER (later iterations will cause each of
427 them to be eliminated, in turn). But don't do this if we
428 have an ASM_OPERAND. */
429 if (GET_CODE (pat) == PARALLEL
430 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
431 && asm_noperands (PATTERN (object)) < 0)
432 {
433 rtx newpat;
434
435 if (XVECLEN (pat, 0) == 2)
436 newpat = XVECEXP (pat, 0, 0);
437 else
438 {
439 int j;
440
441 newpat
442 = gen_rtx_PARALLEL (VOIDmode,
443 rtvec_alloc (XVECLEN (pat, 0) - 1));
444 for (j = 0; j < XVECLEN (newpat, 0); j++)
445 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
446 }
447
448 /* Add a new change to this group to replace the pattern
449 with this new pattern. Then consider this change
450 as having succeeded. The change we added will
451 cause the entire call to fail if things remain invalid.
452
453 Note that this can lose if a later change than the one
454 we are processing specified &XVECEXP (PATTERN (object), 0, X)
455 but this shouldn't occur. */
456
457 validate_change (object, &PATTERN (object), newpat, 1);
458 continue;
459 }
460 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
461 || GET_CODE (pat) == VAR_LOCATION)
462 /* If this insn is a CLOBBER or USE, it is always valid, but is
463 never recognized. */
464 continue;
465 else
466 break;
467 }
468 last_validated = object;
469 }
470
471 return (i == num_changes);
472 }
473
474 /* A group of changes has previously been issued with validate_change
475 and verified with verify_changes. Call df_insn_rescan for each of
476 the insn changed and clear num_changes. */
477
478 void
479 confirm_change_group (void)
480 {
481 int i;
482 rtx last_object = NULL;
483
484 for (i = 0; i < num_changes; i++)
485 {
486 rtx object = changes[i].object;
487
488 if (changes[i].unshare)
489 *changes[i].loc = copy_rtx (*changes[i].loc);
490
491 /* Avoid unnecessary rescanning when multiple changes to same instruction
492 are made. */
493 if (object)
494 {
495 if (object != last_object && last_object && INSN_P (last_object))
496 df_insn_rescan (as_a <rtx_insn *> (last_object));
497 last_object = object;
498 }
499 }
500
501 if (last_object && INSN_P (last_object))
502 df_insn_rescan (as_a <rtx_insn *> (last_object));
503 num_changes = 0;
504 }
505
506 /* Apply a group of changes previously issued with `validate_change'.
507 If all changes are valid, call confirm_change_group and return 1,
508 otherwise, call cancel_changes and return 0. */
509
510 int
511 apply_change_group (void)
512 {
513 if (verify_changes (0))
514 {
515 confirm_change_group ();
516 return 1;
517 }
518 else
519 {
520 cancel_changes (0);
521 return 0;
522 }
523 }
524
525
526 /* Return the number of changes so far in the current group. */
527
528 int
529 num_validated_changes (void)
530 {
531 return num_changes;
532 }
533
534 /* Retract the changes numbered NUM and up. */
535
536 void
537 cancel_changes (int num)
538 {
539 int i;
540
541 /* Back out all the changes. Do this in the opposite order in which
542 they were made. */
543 for (i = num_changes - 1; i >= num; i--)
544 {
545 *changes[i].loc = changes[i].old;
546 if (changes[i].object && !MEM_P (changes[i].object))
547 INSN_CODE (changes[i].object) = changes[i].old_code;
548 }
549 num_changes = num;
550 }
551
552 /* Reduce conditional compilation elsewhere. */
553 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
554 rtx. */
555
556 static void
557 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
558 machine_mode op0_mode)
559 {
560 rtx x = *loc;
561 enum rtx_code code = GET_CODE (x);
562 rtx new_rtx = NULL_RTX;
563 scalar_int_mode is_mode;
564
565 if (SWAPPABLE_OPERANDS_P (x)
566 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
567 {
568 validate_unshare_change (object, loc,
569 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
570 : swap_condition (code),
571 GET_MODE (x), XEXP (x, 1),
572 XEXP (x, 0)), 1);
573 x = *loc;
574 code = GET_CODE (x);
575 }
576
577 /* Canonicalize arithmetics with all constant operands. */
578 switch (GET_RTX_CLASS (code))
579 {
580 case RTX_UNARY:
581 if (CONSTANT_P (XEXP (x, 0)))
582 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
583 op0_mode);
584 break;
585 case RTX_COMM_ARITH:
586 case RTX_BIN_ARITH:
587 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
588 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
589 XEXP (x, 1));
590 break;
591 case RTX_COMPARE:
592 case RTX_COMM_COMPARE:
593 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
594 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
595 XEXP (x, 0), XEXP (x, 1));
596 break;
597 default:
598 break;
599 }
600 if (new_rtx)
601 {
602 validate_change (object, loc, new_rtx, 1);
603 return;
604 }
605
606 switch (code)
607 {
608 case PLUS:
609 /* If we have a PLUS whose second operand is now a CONST_INT, use
610 simplify_gen_binary to try to simplify it.
611 ??? We may want later to remove this, once simplification is
612 separated from this function. */
613 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
614 validate_change (object, loc,
615 simplify_gen_binary
616 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
617 break;
618 case MINUS:
619 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
620 validate_change (object, loc,
621 simplify_gen_binary
622 (PLUS, GET_MODE (x), XEXP (x, 0),
623 simplify_gen_unary (NEG,
624 GET_MODE (x), XEXP (x, 1),
625 GET_MODE (x))), 1);
626 break;
627 case ZERO_EXTEND:
628 case SIGN_EXTEND:
629 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
630 {
631 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
632 op0_mode);
633 /* If any of the above failed, substitute in something that
634 we know won't be recognized. */
635 if (!new_rtx)
636 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
637 validate_change (object, loc, new_rtx, 1);
638 }
639 break;
640 case SUBREG:
641 /* All subregs possible to simplify should be simplified. */
642 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
643 SUBREG_BYTE (x));
644
645 /* Subregs of VOIDmode operands are incorrect. */
646 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
647 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
648 if (new_rtx)
649 validate_change (object, loc, new_rtx, 1);
650 break;
651 case ZERO_EXTRACT:
652 case SIGN_EXTRACT:
653 /* If we are replacing a register with memory, try to change the memory
654 to be the mode required for memory in extract operations (this isn't
655 likely to be an insertion operation; if it was, nothing bad will
656 happen, we might just fail in some cases). */
657
658 if (MEM_P (XEXP (x, 0))
659 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
660 && CONST_INT_P (XEXP (x, 1))
661 && CONST_INT_P (XEXP (x, 2))
662 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
663 MEM_ADDR_SPACE (XEXP (x, 0)))
664 && !MEM_VOLATILE_P (XEXP (x, 0)))
665 {
666 machine_mode wanted_mode = VOIDmode;
667 int pos = INTVAL (XEXP (x, 2));
668
669 if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
670 {
671 wanted_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
672 if (wanted_mode == VOIDmode)
673 wanted_mode = word_mode;
674 }
675 else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
676 {
677 wanted_mode = insn_data[targetm.code_for_extv].operand[1].mode;
678 if (wanted_mode == VOIDmode)
679 wanted_mode = word_mode;
680 }
681
682 /* If we have a narrower mode, we can do something. */
683 if (wanted_mode != VOIDmode
684 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
685 {
686 int offset = pos / BITS_PER_UNIT;
687 rtx newmem;
688
689 /* If the bytes and bits are counted differently, we
690 must adjust the offset. */
691 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
692 offset =
693 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
694 offset);
695
696 gcc_assert (GET_MODE_PRECISION (wanted_mode)
697 == GET_MODE_BITSIZE (wanted_mode));
698 pos %= GET_MODE_BITSIZE (wanted_mode);
699
700 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
701
702 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
703 validate_change (object, &XEXP (x, 0), newmem, 1);
704 }
705 }
706
707 break;
708
709 default:
710 break;
711 }
712 }
713
714 /* Replace every occurrence of FROM in X with TO. Mark each change with
715 validate_change passing OBJECT. */
716
717 static void
718 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
719 bool simplify)
720 {
721 int i, j;
722 const char *fmt;
723 rtx x = *loc;
724 enum rtx_code code;
725 machine_mode op0_mode = VOIDmode;
726 int prev_changes = num_changes;
727
728 if (!x)
729 return;
730
731 code = GET_CODE (x);
732 fmt = GET_RTX_FORMAT (code);
733 if (fmt[0] == 'e')
734 op0_mode = GET_MODE (XEXP (x, 0));
735
736 /* X matches FROM if it is the same rtx or they are both referring to the
737 same register in the same mode. Avoid calling rtx_equal_p unless the
738 operands look similar. */
739
740 if (x == from
741 || (REG_P (x) && REG_P (from)
742 && GET_MODE (x) == GET_MODE (from)
743 && REGNO (x) == REGNO (from))
744 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
745 && rtx_equal_p (x, from)))
746 {
747 validate_unshare_change (object, loc, to, 1);
748 return;
749 }
750
751 /* Call ourself recursively to perform the replacements.
752 We must not replace inside already replaced expression, otherwise we
753 get infinite recursion for replacements like (reg X)->(subreg (reg X))
754 so we must special case shared ASM_OPERANDS. */
755
756 if (GET_CODE (x) == PARALLEL)
757 {
758 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
759 {
760 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
761 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
762 {
763 /* Verify that operands are really shared. */
764 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
765 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
766 (x, 0, j))));
767 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
768 from, to, object, simplify);
769 }
770 else
771 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
772 simplify);
773 }
774 }
775 else
776 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
777 {
778 if (fmt[i] == 'e')
779 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
780 else if (fmt[i] == 'E')
781 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
782 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
783 simplify);
784 }
785
786 /* If we didn't substitute, there is nothing more to do. */
787 if (num_changes == prev_changes)
788 return;
789
790 /* ??? The regmove is no more, so is this aberration still necessary? */
791 /* Allow substituted expression to have different mode. This is used by
792 regmove to change mode of pseudo register. */
793 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
794 op0_mode = GET_MODE (XEXP (x, 0));
795
796 /* Do changes needed to keep rtx consistent. Don't do any other
797 simplifications, as it is not our job. */
798 if (simplify)
799 simplify_while_replacing (loc, to, object, op0_mode);
800 }
801
802 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
803 with TO. After all changes have been made, validate by seeing
804 if INSN is still valid. */
805
806 int
807 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
808 {
809 validate_replace_rtx_1 (loc, from, to, insn, true);
810 return apply_change_group ();
811 }
812
813 /* Try replacing every occurrence of FROM in INSN with TO. After all
814 changes have been made, validate by seeing if INSN is still valid. */
815
816 int
817 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
818 {
819 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
820 return apply_change_group ();
821 }
822
823 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
824 is a part of INSN. After all changes have been made, validate by seeing if
825 INSN is still valid.
826 validate_replace_rtx (from, to, insn) is equivalent to
827 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
828
829 int
830 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
831 {
832 validate_replace_rtx_1 (where, from, to, insn, true);
833 return apply_change_group ();
834 }
835
836 /* Same as above, but do not simplify rtx afterwards. */
837 int
838 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
839 rtx_insn *insn)
840 {
841 validate_replace_rtx_1 (where, from, to, insn, false);
842 return apply_change_group ();
843
844 }
845
846 /* Try replacing every occurrence of FROM in INSN with TO. This also
847 will replace in REG_EQUAL and REG_EQUIV notes. */
848
849 void
850 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
851 {
852 rtx note;
853 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
854 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
855 if (REG_NOTE_KIND (note) == REG_EQUAL
856 || REG_NOTE_KIND (note) == REG_EQUIV)
857 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
858 }
859
860 /* Function called by note_uses to replace used subexpressions. */
861 struct validate_replace_src_data
862 {
863 rtx from; /* Old RTX */
864 rtx to; /* New RTX */
865 rtx_insn *insn; /* Insn in which substitution is occurring. */
866 };
867
868 static void
869 validate_replace_src_1 (rtx *x, void *data)
870 {
871 struct validate_replace_src_data *d
872 = (struct validate_replace_src_data *) data;
873
874 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
875 }
876
877 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
878 SET_DESTs. */
879
880 void
881 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
882 {
883 struct validate_replace_src_data d;
884
885 d.from = from;
886 d.to = to;
887 d.insn = insn;
888 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
889 }
890
891 /* Try simplify INSN.
892 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
893 pattern and return true if something was simplified. */
894
895 bool
896 validate_simplify_insn (rtx_insn *insn)
897 {
898 int i;
899 rtx pat = NULL;
900 rtx newpat = NULL;
901
902 pat = PATTERN (insn);
903
904 if (GET_CODE (pat) == SET)
905 {
906 newpat = simplify_rtx (SET_SRC (pat));
907 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
908 validate_change (insn, &SET_SRC (pat), newpat, 1);
909 newpat = simplify_rtx (SET_DEST (pat));
910 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
911 validate_change (insn, &SET_DEST (pat), newpat, 1);
912 }
913 else if (GET_CODE (pat) == PARALLEL)
914 for (i = 0; i < XVECLEN (pat, 0); i++)
915 {
916 rtx s = XVECEXP (pat, 0, i);
917
918 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
919 {
920 newpat = simplify_rtx (SET_SRC (s));
921 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
922 validate_change (insn, &SET_SRC (s), newpat, 1);
923 newpat = simplify_rtx (SET_DEST (s));
924 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
925 validate_change (insn, &SET_DEST (s), newpat, 1);
926 }
927 }
928 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
929 }
930 \f
931 /* Return 1 if the insn using CC0 set by INSN does not contain
932 any ordered tests applied to the condition codes.
933 EQ and NE tests do not count. */
934
935 int
936 next_insn_tests_no_inequality (rtx_insn *insn)
937 {
938 rtx_insn *next = next_cc0_user (insn);
939
940 /* If there is no next insn, we have to take the conservative choice. */
941 if (next == 0)
942 return 0;
943
944 return (INSN_P (next)
945 && ! inequality_comparisons_p (PATTERN (next)));
946 }
947 \f
948 /* Return 1 if OP is a valid general operand for machine mode MODE.
949 This is either a register reference, a memory reference,
950 or a constant. In the case of a memory reference, the address
951 is checked for general validity for the target machine.
952
953 Register and memory references must have mode MODE in order to be valid,
954 but some constants have no machine mode and are valid for any mode.
955
956 If MODE is VOIDmode, OP is checked for validity for whatever mode
957 it has.
958
959 The main use of this function is as a predicate in match_operand
960 expressions in the machine description. */
961
962 int
963 general_operand (rtx op, machine_mode mode)
964 {
965 enum rtx_code code = GET_CODE (op);
966
967 if (mode == VOIDmode)
968 mode = GET_MODE (op);
969
970 /* Don't accept CONST_INT or anything similar
971 if the caller wants something floating. */
972 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
973 && GET_MODE_CLASS (mode) != MODE_INT
974 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
975 return 0;
976
977 if (CONST_INT_P (op)
978 && mode != VOIDmode
979 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
980 return 0;
981
982 if (CONSTANT_P (op))
983 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
984 || mode == VOIDmode)
985 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
986 && targetm.legitimate_constant_p (mode == VOIDmode
987 ? GET_MODE (op)
988 : mode, op));
989
990 /* Except for certain constants with VOIDmode, already checked for,
991 OP's mode must match MODE if MODE specifies a mode. */
992
993 if (GET_MODE (op) != mode)
994 return 0;
995
996 if (code == SUBREG)
997 {
998 rtx sub = SUBREG_REG (op);
999
1000 #ifdef INSN_SCHEDULING
1001 /* On machines that have insn scheduling, we want all memory
1002 reference to be explicit, so outlaw paradoxical SUBREGs.
1003 However, we must allow them after reload so that they can
1004 get cleaned up by cleanup_subreg_operands. */
1005 if (!reload_completed && MEM_P (sub)
1006 && paradoxical_subreg_p (op))
1007 return 0;
1008 #endif
1009 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1010 may result in incorrect reference. We should simplify all valid
1011 subregs of MEM anyway. But allow this after reload because we
1012 might be called from cleanup_subreg_operands.
1013
1014 ??? This is a kludge. */
1015 if (!reload_completed && SUBREG_BYTE (op) != 0
1016 && MEM_P (sub))
1017 return 0;
1018
1019 #ifdef CANNOT_CHANGE_MODE_CLASS
1020 if (REG_P (sub)
1021 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1022 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1023 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1024 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1025 /* LRA can generate some invalid SUBREGS just for matched
1026 operand reload presentation. LRA needs to treat them as
1027 valid. */
1028 && ! LRA_SUBREG_P (op))
1029 return 0;
1030 #endif
1031
1032 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1033 create such rtl, and we must reject it. */
1034 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1035 /* LRA can use subreg to store a floating point value in an
1036 integer mode. Although the floating point and the
1037 integer modes need the same number of hard registers, the
1038 size of floating point mode can be less than the integer
1039 mode. */
1040 && ! lra_in_progress
1041 && paradoxical_subreg_p (op))
1042 return 0;
1043
1044 op = sub;
1045 code = GET_CODE (op);
1046 }
1047
1048 if (code == REG)
1049 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1050 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1051
1052 if (code == MEM)
1053 {
1054 rtx y = XEXP (op, 0);
1055
1056 if (! volatile_ok && MEM_VOLATILE_P (op))
1057 return 0;
1058
1059 /* Use the mem's mode, since it will be reloaded thus. LRA can
1060 generate move insn with invalid addresses which is made valid
1061 and efficiently calculated by LRA through further numerous
1062 transformations. */
1063 if (lra_in_progress
1064 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1065 return 1;
1066 }
1067
1068 return 0;
1069 }
1070 \f
1071 /* Return 1 if OP is a valid memory address for a memory reference
1072 of mode MODE.
1073
1074 The main use of this function is as a predicate in match_operand
1075 expressions in the machine description. */
1076
1077 int
1078 address_operand (rtx op, machine_mode mode)
1079 {
1080 return memory_address_p (mode, op);
1081 }
1082
1083 /* Return 1 if OP is a register reference of mode MODE.
1084 If MODE is VOIDmode, accept a register in any mode.
1085
1086 The main use of this function is as a predicate in match_operand
1087 expressions in the machine description. */
1088
1089 int
1090 register_operand (rtx op, machine_mode mode)
1091 {
1092 if (GET_CODE (op) == SUBREG)
1093 {
1094 rtx sub = SUBREG_REG (op);
1095
1096 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1097 because it is guaranteed to be reloaded into one.
1098 Just make sure the MEM is valid in itself.
1099 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1100 but currently it does result from (SUBREG (REG)...) where the
1101 reg went on the stack.) */
1102 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1103 return 0;
1104 }
1105 else if (!REG_P (op))
1106 return 0;
1107 return general_operand (op, mode);
1108 }
1109
1110 /* Return 1 for a register in Pmode; ignore the tested mode. */
1111
1112 int
1113 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1114 {
1115 return register_operand (op, Pmode);
1116 }
1117
1118 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1119 or a hard register. */
1120
1121 int
1122 scratch_operand (rtx op, machine_mode mode)
1123 {
1124 if (GET_MODE (op) != mode && mode != VOIDmode)
1125 return 0;
1126
1127 return (GET_CODE (op) == SCRATCH
1128 || (REG_P (op)
1129 && (lra_in_progress
1130 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1131 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1132 }
1133
1134 /* Return 1 if OP is a valid immediate operand for mode MODE.
1135
1136 The main use of this function is as a predicate in match_operand
1137 expressions in the machine description. */
1138
1139 int
1140 immediate_operand (rtx op, machine_mode mode)
1141 {
1142 /* Don't accept CONST_INT or anything similar
1143 if the caller wants something floating. */
1144 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1145 && GET_MODE_CLASS (mode) != MODE_INT
1146 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1147 return 0;
1148
1149 if (CONST_INT_P (op)
1150 && mode != VOIDmode
1151 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1152 return 0;
1153
1154 return (CONSTANT_P (op)
1155 && (GET_MODE (op) == mode || mode == VOIDmode
1156 || GET_MODE (op) == VOIDmode)
1157 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1158 && targetm.legitimate_constant_p (mode == VOIDmode
1159 ? GET_MODE (op)
1160 : mode, op));
1161 }
1162
1163 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1164
1165 int
1166 const_int_operand (rtx op, machine_mode mode)
1167 {
1168 if (!CONST_INT_P (op))
1169 return 0;
1170
1171 if (mode != VOIDmode
1172 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1173 return 0;
1174
1175 return 1;
1176 }
1177
1178 #if TARGET_SUPPORTS_WIDE_INT
1179 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1180 of mode MODE. */
1181 int
1182 const_scalar_int_operand (rtx op, machine_mode mode)
1183 {
1184 if (!CONST_SCALAR_INT_P (op))
1185 return 0;
1186
1187 if (CONST_INT_P (op))
1188 return const_int_operand (op, mode);
1189
1190 if (mode != VOIDmode)
1191 {
1192 int prec = GET_MODE_PRECISION (mode);
1193 int bitsize = GET_MODE_BITSIZE (mode);
1194
1195 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1196 return 0;
1197
1198 if (prec == bitsize)
1199 return 1;
1200 else
1201 {
1202 /* Multiword partial int. */
1203 HOST_WIDE_INT x
1204 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1205 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1206 }
1207 }
1208 return 1;
1209 }
1210
1211 /* Returns 1 if OP is an operand that is a constant integer or constant
1212 floating-point number of MODE. */
1213
1214 int
1215 const_double_operand (rtx op, machine_mode mode)
1216 {
1217 return (GET_CODE (op) == CONST_DOUBLE)
1218 && (GET_MODE (op) == mode || mode == VOIDmode);
1219 }
1220 #else
1221 /* Returns 1 if OP is an operand that is a constant integer or constant
1222 floating-point number of MODE. */
1223
1224 int
1225 const_double_operand (rtx op, machine_mode mode)
1226 {
1227 /* Don't accept CONST_INT or anything similar
1228 if the caller wants something floating. */
1229 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1230 && GET_MODE_CLASS (mode) != MODE_INT
1231 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1232 return 0;
1233
1234 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1235 && (mode == VOIDmode || GET_MODE (op) == mode
1236 || GET_MODE (op) == VOIDmode));
1237 }
1238 #endif
1239 /* Return 1 if OP is a general operand that is not an immediate
1240 operand of mode MODE. */
1241
1242 int
1243 nonimmediate_operand (rtx op, machine_mode mode)
1244 {
1245 return (general_operand (op, mode) && ! CONSTANT_P (op));
1246 }
1247
1248 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1249
1250 int
1251 nonmemory_operand (rtx op, machine_mode mode)
1252 {
1253 if (CONSTANT_P (op))
1254 return immediate_operand (op, mode);
1255 return register_operand (op, mode);
1256 }
1257
1258 /* Return 1 if OP is a valid operand that stands for pushing a
1259 value of mode MODE onto the stack.
1260
1261 The main use of this function is as a predicate in match_operand
1262 expressions in the machine description. */
1263
1264 int
1265 push_operand (rtx op, machine_mode mode)
1266 {
1267 unsigned int rounded_size = GET_MODE_SIZE (mode);
1268
1269 #ifdef PUSH_ROUNDING
1270 rounded_size = PUSH_ROUNDING (rounded_size);
1271 #endif
1272
1273 if (!MEM_P (op))
1274 return 0;
1275
1276 if (mode != VOIDmode && GET_MODE (op) != mode)
1277 return 0;
1278
1279 op = XEXP (op, 0);
1280
1281 if (rounded_size == GET_MODE_SIZE (mode))
1282 {
1283 if (GET_CODE (op) != STACK_PUSH_CODE)
1284 return 0;
1285 }
1286 else
1287 {
1288 if (GET_CODE (op) != PRE_MODIFY
1289 || GET_CODE (XEXP (op, 1)) != PLUS
1290 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1291 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1292 || INTVAL (XEXP (XEXP (op, 1), 1))
1293 != ((STACK_GROWS_DOWNWARD ? -1 : 1) * (int) rounded_size))
1294 return 0;
1295 }
1296
1297 return XEXP (op, 0) == stack_pointer_rtx;
1298 }
1299
1300 /* Return 1 if OP is a valid operand that stands for popping a
1301 value of mode MODE off the stack.
1302
1303 The main use of this function is as a predicate in match_operand
1304 expressions in the machine description. */
1305
1306 int
1307 pop_operand (rtx op, machine_mode mode)
1308 {
1309 if (!MEM_P (op))
1310 return 0;
1311
1312 if (mode != VOIDmode && GET_MODE (op) != mode)
1313 return 0;
1314
1315 op = XEXP (op, 0);
1316
1317 if (GET_CODE (op) != STACK_POP_CODE)
1318 return 0;
1319
1320 return XEXP (op, 0) == stack_pointer_rtx;
1321 }
1322
1323 /* Return 1 if ADDR is a valid memory address
1324 for mode MODE in address space AS. */
1325
1326 int
1327 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1328 rtx addr, addr_space_t as)
1329 {
1330 #ifdef GO_IF_LEGITIMATE_ADDRESS
1331 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1332 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1333 return 0;
1334
1335 win:
1336 return 1;
1337 #else
1338 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1339 #endif
1340 }
1341
1342 /* Return 1 if OP is a valid memory reference with mode MODE,
1343 including a valid address.
1344
1345 The main use of this function is as a predicate in match_operand
1346 expressions in the machine description. */
1347
1348 int
1349 memory_operand (rtx op, machine_mode mode)
1350 {
1351 rtx inner;
1352
1353 if (! reload_completed)
1354 /* Note that no SUBREG is a memory operand before end of reload pass,
1355 because (SUBREG (MEM...)) forces reloading into a register. */
1356 return MEM_P (op) && general_operand (op, mode);
1357
1358 if (mode != VOIDmode && GET_MODE (op) != mode)
1359 return 0;
1360
1361 inner = op;
1362 if (GET_CODE (inner) == SUBREG)
1363 inner = SUBREG_REG (inner);
1364
1365 return (MEM_P (inner) && general_operand (op, mode));
1366 }
1367
1368 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1369 that is, a memory reference whose address is a general_operand. */
1370
1371 int
1372 indirect_operand (rtx op, machine_mode mode)
1373 {
1374 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1375 if (! reload_completed
1376 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1377 {
1378 int offset = SUBREG_BYTE (op);
1379 rtx inner = SUBREG_REG (op);
1380
1381 if (mode != VOIDmode && GET_MODE (op) != mode)
1382 return 0;
1383
1384 /* The only way that we can have a general_operand as the resulting
1385 address is if OFFSET is zero and the address already is an operand
1386 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1387 operand. */
1388
1389 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1390 || (GET_CODE (XEXP (inner, 0)) == PLUS
1391 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1392 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1393 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1394 }
1395
1396 return (MEM_P (op)
1397 && memory_operand (op, mode)
1398 && general_operand (XEXP (op, 0), Pmode));
1399 }
1400
1401 /* Return 1 if this is an ordered comparison operator (not including
1402 ORDERED and UNORDERED). */
1403
1404 int
1405 ordered_comparison_operator (rtx op, machine_mode mode)
1406 {
1407 if (mode != VOIDmode && GET_MODE (op) != mode)
1408 return false;
1409 switch (GET_CODE (op))
1410 {
1411 case EQ:
1412 case NE:
1413 case LT:
1414 case LTU:
1415 case LE:
1416 case LEU:
1417 case GT:
1418 case GTU:
1419 case GE:
1420 case GEU:
1421 return true;
1422 default:
1423 return false;
1424 }
1425 }
1426
1427 /* Return 1 if this is a comparison operator. This allows the use of
1428 MATCH_OPERATOR to recognize all the branch insns. */
1429
1430 int
1431 comparison_operator (rtx op, machine_mode mode)
1432 {
1433 return ((mode == VOIDmode || GET_MODE (op) == mode)
1434 && COMPARISON_P (op));
1435 }
1436 \f
1437 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1438
1439 rtx
1440 extract_asm_operands (rtx body)
1441 {
1442 rtx tmp;
1443 switch (GET_CODE (body))
1444 {
1445 case ASM_OPERANDS:
1446 return body;
1447
1448 case SET:
1449 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1450 tmp = SET_SRC (body);
1451 if (GET_CODE (tmp) == ASM_OPERANDS)
1452 return tmp;
1453 break;
1454
1455 case PARALLEL:
1456 tmp = XVECEXP (body, 0, 0);
1457 if (GET_CODE (tmp) == ASM_OPERANDS)
1458 return tmp;
1459 if (GET_CODE (tmp) == SET)
1460 {
1461 tmp = SET_SRC (tmp);
1462 if (GET_CODE (tmp) == ASM_OPERANDS)
1463 return tmp;
1464 }
1465 break;
1466
1467 default:
1468 break;
1469 }
1470 return NULL;
1471 }
1472
1473 /* If BODY is an insn body that uses ASM_OPERANDS,
1474 return the number of operands (both input and output) in the insn.
1475 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1476 return 0.
1477 Otherwise return -1. */
1478
1479 int
1480 asm_noperands (const_rtx body)
1481 {
1482 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1483 int i, n_sets = 0;
1484
1485 if (asm_op == NULL)
1486 {
1487 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1488 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1489 {
1490 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1491 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1492 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1493 return -1;
1494 return 0;
1495 }
1496 return -1;
1497 }
1498
1499 if (GET_CODE (body) == SET)
1500 n_sets = 1;
1501 else if (GET_CODE (body) == PARALLEL)
1502 {
1503 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1504 {
1505 /* Multiple output operands, or 1 output plus some clobbers:
1506 body is
1507 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1508 /* Count backwards through CLOBBERs to determine number of SETs. */
1509 for (i = XVECLEN (body, 0); i > 0; i--)
1510 {
1511 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1512 break;
1513 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1514 return -1;
1515 }
1516
1517 /* N_SETS is now number of output operands. */
1518 n_sets = i;
1519
1520 /* Verify that all the SETs we have
1521 came from a single original asm_operands insn
1522 (so that invalid combinations are blocked). */
1523 for (i = 0; i < n_sets; i++)
1524 {
1525 rtx elt = XVECEXP (body, 0, i);
1526 if (GET_CODE (elt) != SET)
1527 return -1;
1528 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1529 return -1;
1530 /* If these ASM_OPERANDS rtx's came from different original insns
1531 then they aren't allowed together. */
1532 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1533 != ASM_OPERANDS_INPUT_VEC (asm_op))
1534 return -1;
1535 }
1536 }
1537 else
1538 {
1539 /* 0 outputs, but some clobbers:
1540 body is [(asm_operands ...) (clobber (reg ...))...]. */
1541 /* Make sure all the other parallel things really are clobbers. */
1542 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1543 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1544 return -1;
1545 }
1546 }
1547
1548 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1549 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1550 }
1551
1552 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1553 copy its operands (both input and output) into the vector OPERANDS,
1554 the locations of the operands within the insn into the vector OPERAND_LOCS,
1555 and the constraints for the operands into CONSTRAINTS.
1556 Write the modes of the operands into MODES.
1557 Write the location info into LOC.
1558 Return the assembler-template.
1559 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1560 return the basic assembly string.
1561
1562 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1563 we don't store that info. */
1564
1565 const char *
1566 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1567 const char **constraints, machine_mode *modes,
1568 location_t *loc)
1569 {
1570 int nbase = 0, n, i;
1571 rtx asmop;
1572
1573 switch (GET_CODE (body))
1574 {
1575 case ASM_OPERANDS:
1576 /* Zero output asm: BODY is (asm_operands ...). */
1577 asmop = body;
1578 break;
1579
1580 case SET:
1581 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1582 asmop = SET_SRC (body);
1583
1584 /* The output is in the SET.
1585 Its constraint is in the ASM_OPERANDS itself. */
1586 if (operands)
1587 operands[0] = SET_DEST (body);
1588 if (operand_locs)
1589 operand_locs[0] = &SET_DEST (body);
1590 if (constraints)
1591 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1592 if (modes)
1593 modes[0] = GET_MODE (SET_DEST (body));
1594 nbase = 1;
1595 break;
1596
1597 case PARALLEL:
1598 {
1599 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1600
1601 asmop = XVECEXP (body, 0, 0);
1602 if (GET_CODE (asmop) == SET)
1603 {
1604 asmop = SET_SRC (asmop);
1605
1606 /* At least one output, plus some CLOBBERs. The outputs are in
1607 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1608 for (i = 0; i < nparallel; i++)
1609 {
1610 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1611 break; /* Past last SET */
1612 if (operands)
1613 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1614 if (operand_locs)
1615 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1616 if (constraints)
1617 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1618 if (modes)
1619 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1620 }
1621 nbase = i;
1622 }
1623 else if (GET_CODE (asmop) == ASM_INPUT)
1624 {
1625 if (loc)
1626 *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
1627 return XSTR (asmop, 0);
1628 }
1629 break;
1630 }
1631
1632 default:
1633 gcc_unreachable ();
1634 }
1635
1636 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1637 for (i = 0; i < n; i++)
1638 {
1639 if (operand_locs)
1640 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1641 if (operands)
1642 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1643 if (constraints)
1644 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1645 if (modes)
1646 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1647 }
1648 nbase += n;
1649
1650 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1651 for (i = 0; i < n; i++)
1652 {
1653 if (operand_locs)
1654 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1655 if (operands)
1656 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1657 if (constraints)
1658 constraints[nbase + i] = "";
1659 if (modes)
1660 modes[nbase + i] = Pmode;
1661 }
1662
1663 if (loc)
1664 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1665
1666 return ASM_OPERANDS_TEMPLATE (asmop);
1667 }
1668
1669 /* Parse inline assembly string STRING and determine which operands are
1670 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1671 to true if operand I is referenced.
1672
1673 This is intended to distinguish barrier-like asms such as:
1674
1675 asm ("" : "=m" (...));
1676
1677 from real references such as:
1678
1679 asm ("sw\t$0, %0" : "=m" (...)); */
1680
1681 void
1682 get_referenced_operands (const char *string, bool *used,
1683 unsigned int noperands)
1684 {
1685 memset (used, 0, sizeof (bool) * noperands);
1686 const char *p = string;
1687 while (*p)
1688 switch (*p)
1689 {
1690 case '%':
1691 p += 1;
1692 /* A letter followed by a digit indicates an operand number. */
1693 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1694 p += 1;
1695 if (ISDIGIT (*p))
1696 {
1697 char *endptr;
1698 unsigned long opnum = strtoul (p, &endptr, 10);
1699 if (endptr != p && opnum < noperands)
1700 used[opnum] = true;
1701 p = endptr;
1702 }
1703 else
1704 p += 1;
1705 break;
1706
1707 default:
1708 p++;
1709 break;
1710 }
1711 }
1712
1713 /* Check if an asm_operand matches its constraints.
1714 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1715
1716 int
1717 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1718 {
1719 int result = 0;
1720 bool incdec_ok = false;
1721
1722 /* Use constrain_operands after reload. */
1723 gcc_assert (!reload_completed);
1724
1725 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1726 many alternatives as required to match the other operands. */
1727 if (*constraint == '\0')
1728 result = 1;
1729
1730 while (*constraint)
1731 {
1732 enum constraint_num cn;
1733 char c = *constraint;
1734 int len;
1735 switch (c)
1736 {
1737 case ',':
1738 constraint++;
1739 continue;
1740
1741 case '0': case '1': case '2': case '3': case '4':
1742 case '5': case '6': case '7': case '8': case '9':
1743 /* If caller provided constraints pointer, look up
1744 the matching constraint. Otherwise, our caller should have
1745 given us the proper matching constraint, but we can't
1746 actually fail the check if they didn't. Indicate that
1747 results are inconclusive. */
1748 if (constraints)
1749 {
1750 char *end;
1751 unsigned long match;
1752
1753 match = strtoul (constraint, &end, 10);
1754 if (!result)
1755 result = asm_operand_ok (op, constraints[match], NULL);
1756 constraint = (const char *) end;
1757 }
1758 else
1759 {
1760 do
1761 constraint++;
1762 while (ISDIGIT (*constraint));
1763 if (! result)
1764 result = -1;
1765 }
1766 continue;
1767
1768 /* The rest of the compiler assumes that reloading the address
1769 of a MEM into a register will make it fit an 'o' constraint.
1770 That is, if it sees a MEM operand for an 'o' constraint,
1771 it assumes that (mem (base-reg)) will fit.
1772
1773 That assumption fails on targets that don't have offsettable
1774 addresses at all. We therefore need to treat 'o' asm
1775 constraints as a special case and only accept operands that
1776 are already offsettable, thus proving that at least one
1777 offsettable address exists. */
1778 case 'o': /* offsettable */
1779 if (offsettable_nonstrict_memref_p (op))
1780 result = 1;
1781 break;
1782
1783 case 'g':
1784 if (general_operand (op, VOIDmode))
1785 result = 1;
1786 break;
1787
1788 case '<':
1789 case '>':
1790 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1791 to exist, excepting those that expand_call created. Further,
1792 on some machines which do not have generalized auto inc/dec,
1793 an inc/dec is not a memory_operand.
1794
1795 Match any memory and hope things are resolved after reload. */
1796 incdec_ok = true;
1797 /* FALLTHRU */
1798 default:
1799 cn = lookup_constraint (constraint);
1800 switch (get_constraint_type (cn))
1801 {
1802 case CT_REGISTER:
1803 if (!result
1804 && reg_class_for_constraint (cn) != NO_REGS
1805 && GET_MODE (op) != BLKmode
1806 && register_operand (op, VOIDmode))
1807 result = 1;
1808 break;
1809
1810 case CT_CONST_INT:
1811 if (!result
1812 && CONST_INT_P (op)
1813 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1814 result = 1;
1815 break;
1816
1817 case CT_MEMORY:
1818 case CT_SPECIAL_MEMORY:
1819 /* Every memory operand can be reloaded to fit. */
1820 result = result || memory_operand (op, VOIDmode);
1821 break;
1822
1823 case CT_ADDRESS:
1824 /* Every address operand can be reloaded to fit. */
1825 result = result || address_operand (op, VOIDmode);
1826 break;
1827
1828 case CT_FIXED_FORM:
1829 result = result || constraint_satisfied_p (op, cn);
1830 break;
1831 }
1832 break;
1833 }
1834 len = CONSTRAINT_LEN (c, constraint);
1835 do
1836 constraint++;
1837 while (--len && *constraint);
1838 if (len)
1839 return 0;
1840 }
1841
1842 /* For operands without < or > constraints reject side-effects. */
1843 if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
1844 switch (GET_CODE (XEXP (op, 0)))
1845 {
1846 case PRE_INC:
1847 case POST_INC:
1848 case PRE_DEC:
1849 case POST_DEC:
1850 case PRE_MODIFY:
1851 case POST_MODIFY:
1852 return 0;
1853 default:
1854 break;
1855 }
1856
1857 return result;
1858 }
1859 \f
1860 /* Given an rtx *P, if it is a sum containing an integer constant term,
1861 return the location (type rtx *) of the pointer to that constant term.
1862 Otherwise, return a null pointer. */
1863
1864 rtx *
1865 find_constant_term_loc (rtx *p)
1866 {
1867 rtx *tem;
1868 enum rtx_code code = GET_CODE (*p);
1869
1870 /* If *P IS such a constant term, P is its location. */
1871
1872 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1873 || code == CONST)
1874 return p;
1875
1876 /* Otherwise, if not a sum, it has no constant term. */
1877
1878 if (GET_CODE (*p) != PLUS)
1879 return 0;
1880
1881 /* If one of the summands is constant, return its location. */
1882
1883 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1884 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1885 return p;
1886
1887 /* Otherwise, check each summand for containing a constant term. */
1888
1889 if (XEXP (*p, 0) != 0)
1890 {
1891 tem = find_constant_term_loc (&XEXP (*p, 0));
1892 if (tem != 0)
1893 return tem;
1894 }
1895
1896 if (XEXP (*p, 1) != 0)
1897 {
1898 tem = find_constant_term_loc (&XEXP (*p, 1));
1899 if (tem != 0)
1900 return tem;
1901 }
1902
1903 return 0;
1904 }
1905 \f
1906 /* Return 1 if OP is a memory reference
1907 whose address contains no side effects
1908 and remains valid after the addition
1909 of a positive integer less than the
1910 size of the object being referenced.
1911
1912 We assume that the original address is valid and do not check it.
1913
1914 This uses strict_memory_address_p as a subroutine, so
1915 don't use it before reload. */
1916
1917 int
1918 offsettable_memref_p (rtx op)
1919 {
1920 return ((MEM_P (op))
1921 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1922 MEM_ADDR_SPACE (op)));
1923 }
1924
1925 /* Similar, but don't require a strictly valid mem ref:
1926 consider pseudo-regs valid as index or base regs. */
1927
1928 int
1929 offsettable_nonstrict_memref_p (rtx op)
1930 {
1931 return ((MEM_P (op))
1932 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1933 MEM_ADDR_SPACE (op)));
1934 }
1935
1936 /* Return 1 if Y is a memory address which contains no side effects
1937 and would remain valid for address space AS after the addition of
1938 a positive integer less than the size of that mode.
1939
1940 We assume that the original address is valid and do not check it.
1941 We do check that it is valid for narrower modes.
1942
1943 If STRICTP is nonzero, we require a strictly valid address,
1944 for the sake of use in reload.c. */
1945
1946 int
1947 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1948 addr_space_t as)
1949 {
1950 enum rtx_code ycode = GET_CODE (y);
1951 rtx z;
1952 rtx y1 = y;
1953 rtx *y2;
1954 int (*addressp) (machine_mode, rtx, addr_space_t) =
1955 (strictp ? strict_memory_address_addr_space_p
1956 : memory_address_addr_space_p);
1957 unsigned int mode_sz = GET_MODE_SIZE (mode);
1958
1959 if (CONSTANT_ADDRESS_P (y))
1960 return 1;
1961
1962 /* Adjusting an offsettable address involves changing to a narrower mode.
1963 Make sure that's OK. */
1964
1965 if (mode_dependent_address_p (y, as))
1966 return 0;
1967
1968 machine_mode address_mode = GET_MODE (y);
1969 if (address_mode == VOIDmode)
1970 address_mode = targetm.addr_space.address_mode (as);
1971 #ifdef POINTERS_EXTEND_UNSIGNED
1972 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1973 #endif
1974
1975 /* ??? How much offset does an offsettable BLKmode reference need?
1976 Clearly that depends on the situation in which it's being used.
1977 However, the current situation in which we test 0xffffffff is
1978 less than ideal. Caveat user. */
1979 if (mode_sz == 0)
1980 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1981
1982 /* If the expression contains a constant term,
1983 see if it remains valid when max possible offset is added. */
1984
1985 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1986 {
1987 int good;
1988
1989 y1 = *y2;
1990 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1991 /* Use QImode because an odd displacement may be automatically invalid
1992 for any wider mode. But it should be valid for a single byte. */
1993 good = (*addressp) (QImode, y, as);
1994
1995 /* In any case, restore old contents of memory. */
1996 *y2 = y1;
1997 return good;
1998 }
1999
2000 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2001 return 0;
2002
2003 /* The offset added here is chosen as the maximum offset that
2004 any instruction could need to add when operating on something
2005 of the specified mode. We assume that if Y and Y+c are
2006 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2007 go inside a LO_SUM here, so we do so as well. */
2008 if (GET_CODE (y) == LO_SUM
2009 && mode != BLKmode
2010 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2011 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2012 plus_constant (address_mode, XEXP (y, 1),
2013 mode_sz - 1));
2014 #ifdef POINTERS_EXTEND_UNSIGNED
2015 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2016 else if (POINTERS_EXTEND_UNSIGNED > 0
2017 && GET_CODE (y) == ZERO_EXTEND
2018 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2019 z = gen_rtx_ZERO_EXTEND (address_mode,
2020 plus_constant (pointer_mode, XEXP (y, 0),
2021 mode_sz - 1));
2022 #endif
2023 else
2024 z = plus_constant (address_mode, y, mode_sz - 1);
2025
2026 /* Use QImode because an odd displacement may be automatically invalid
2027 for any wider mode. But it should be valid for a single byte. */
2028 return (*addressp) (QImode, z, as);
2029 }
2030
2031 /* Return 1 if ADDR is an address-expression whose effect depends
2032 on the mode of the memory reference it is used in.
2033
2034 ADDRSPACE is the address space associated with the address.
2035
2036 Autoincrement addressing is a typical example of mode-dependence
2037 because the amount of the increment depends on the mode. */
2038
2039 bool
2040 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2041 {
2042 /* Auto-increment addressing with anything other than post_modify
2043 or pre_modify always introduces a mode dependency. Catch such
2044 cases now instead of deferring to the target. */
2045 if (GET_CODE (addr) == PRE_INC
2046 || GET_CODE (addr) == POST_INC
2047 || GET_CODE (addr) == PRE_DEC
2048 || GET_CODE (addr) == POST_DEC)
2049 return true;
2050
2051 return targetm.mode_dependent_address_p (addr, addrspace);
2052 }
2053 \f
2054 /* Return true if boolean attribute ATTR is supported. */
2055
2056 static bool
2057 have_bool_attr (bool_attr attr)
2058 {
2059 switch (attr)
2060 {
2061 case BA_ENABLED:
2062 return HAVE_ATTR_enabled;
2063 case BA_PREFERRED_FOR_SIZE:
2064 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2065 case BA_PREFERRED_FOR_SPEED:
2066 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2067 }
2068 gcc_unreachable ();
2069 }
2070
2071 /* Return the value of ATTR for instruction INSN. */
2072
2073 static bool
2074 get_bool_attr (rtx_insn *insn, bool_attr attr)
2075 {
2076 switch (attr)
2077 {
2078 case BA_ENABLED:
2079 return get_attr_enabled (insn);
2080 case BA_PREFERRED_FOR_SIZE:
2081 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2082 case BA_PREFERRED_FOR_SPEED:
2083 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2084 }
2085 gcc_unreachable ();
2086 }
2087
2088 /* Like get_bool_attr_mask, but don't use the cache. */
2089
2090 static alternative_mask
2091 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2092 {
2093 /* Temporarily install enough information for get_attr_<foo> to assume
2094 that the insn operands are already cached. As above, the attribute
2095 mustn't depend on the values of operands, so we don't provide their
2096 real values here. */
2097 rtx_insn *old_insn = recog_data.insn;
2098 int old_alternative = which_alternative;
2099
2100 recog_data.insn = insn;
2101 alternative_mask mask = ALL_ALTERNATIVES;
2102 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2103 for (int i = 0; i < n_alternatives; i++)
2104 {
2105 which_alternative = i;
2106 if (!get_bool_attr (insn, attr))
2107 mask &= ~ALTERNATIVE_BIT (i);
2108 }
2109
2110 recog_data.insn = old_insn;
2111 which_alternative = old_alternative;
2112 return mask;
2113 }
2114
2115 /* Return the mask of operand alternatives that are allowed for INSN
2116 by boolean attribute ATTR. This mask depends only on INSN and on
2117 the current target; it does not depend on things like the values of
2118 operands. */
2119
2120 static alternative_mask
2121 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2122 {
2123 /* Quick exit for asms and for targets that don't use these attributes. */
2124 int code = INSN_CODE (insn);
2125 if (code < 0 || !have_bool_attr (attr))
2126 return ALL_ALTERNATIVES;
2127
2128 /* Calling get_attr_<foo> can be expensive, so cache the mask
2129 for speed. */
2130 if (!this_target_recog->x_bool_attr_masks[code][attr])
2131 this_target_recog->x_bool_attr_masks[code][attr]
2132 = get_bool_attr_mask_uncached (insn, attr);
2133 return this_target_recog->x_bool_attr_masks[code][attr];
2134 }
2135
2136 /* Return the set of alternatives of INSN that are allowed by the current
2137 target. */
2138
2139 alternative_mask
2140 get_enabled_alternatives (rtx_insn *insn)
2141 {
2142 return get_bool_attr_mask (insn, BA_ENABLED);
2143 }
2144
2145 /* Return the set of alternatives of INSN that are allowed by the current
2146 target and are preferred for the current size/speed optimization
2147 choice. */
2148
2149 alternative_mask
2150 get_preferred_alternatives (rtx_insn *insn)
2151 {
2152 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2153 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2154 else
2155 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2156 }
2157
2158 /* Return the set of alternatives of INSN that are allowed by the current
2159 target and are preferred for the size/speed optimization choice
2160 associated with BB. Passing a separate BB is useful if INSN has not
2161 been emitted yet or if we are considering moving it to a different
2162 block. */
2163
2164 alternative_mask
2165 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2166 {
2167 if (optimize_bb_for_speed_p (bb))
2168 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2169 else
2170 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2171 }
2172
2173 /* Assert that the cached boolean attributes for INSN are still accurate.
2174 The backend is required to define these attributes in a way that only
2175 depends on the current target (rather than operands, compiler phase,
2176 etc.). */
2177
2178 bool
2179 check_bool_attrs (rtx_insn *insn)
2180 {
2181 int code = INSN_CODE (insn);
2182 if (code >= 0)
2183 for (int i = 0; i <= BA_LAST; ++i)
2184 {
2185 enum bool_attr attr = (enum bool_attr) i;
2186 if (this_target_recog->x_bool_attr_masks[code][attr])
2187 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2188 == get_bool_attr_mask_uncached (insn, attr));
2189 }
2190 return true;
2191 }
2192
2193 /* Like extract_insn, but save insn extracted and don't extract again, when
2194 called again for the same insn expecting that recog_data still contain the
2195 valid information. This is used primary by gen_attr infrastructure that
2196 often does extract insn again and again. */
2197 void
2198 extract_insn_cached (rtx_insn *insn)
2199 {
2200 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2201 return;
2202 extract_insn (insn);
2203 recog_data.insn = insn;
2204 }
2205
2206 /* Do uncached extract_insn, constrain_operands and complain about failures.
2207 This should be used when extracting a pre-existing constrained instruction
2208 if the caller wants to know which alternative was chosen. */
2209 void
2210 extract_constrain_insn (rtx_insn *insn)
2211 {
2212 extract_insn (insn);
2213 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2214 fatal_insn_not_found (insn);
2215 }
2216
2217 /* Do cached extract_insn, constrain_operands and complain about failures.
2218 Used by insn_attrtab. */
2219 void
2220 extract_constrain_insn_cached (rtx_insn *insn)
2221 {
2222 extract_insn_cached (insn);
2223 if (which_alternative == -1
2224 && !constrain_operands (reload_completed,
2225 get_enabled_alternatives (insn)))
2226 fatal_insn_not_found (insn);
2227 }
2228
2229 /* Do cached constrain_operands on INSN and complain about failures. */
2230 int
2231 constrain_operands_cached (rtx_insn *insn, int strict)
2232 {
2233 if (which_alternative == -1)
2234 return constrain_operands (strict, get_enabled_alternatives (insn));
2235 else
2236 return 1;
2237 }
2238 \f
2239 /* Analyze INSN and fill in recog_data. */
2240
2241 void
2242 extract_insn (rtx_insn *insn)
2243 {
2244 int i;
2245 int icode;
2246 int noperands;
2247 rtx body = PATTERN (insn);
2248
2249 recog_data.n_operands = 0;
2250 recog_data.n_alternatives = 0;
2251 recog_data.n_dups = 0;
2252 recog_data.is_asm = false;
2253
2254 switch (GET_CODE (body))
2255 {
2256 case USE:
2257 case CLOBBER:
2258 case ASM_INPUT:
2259 case ADDR_VEC:
2260 case ADDR_DIFF_VEC:
2261 case VAR_LOCATION:
2262 return;
2263
2264 case SET:
2265 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2266 goto asm_insn;
2267 else
2268 goto normal_insn;
2269 case PARALLEL:
2270 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2271 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2272 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2273 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2274 goto asm_insn;
2275 else
2276 goto normal_insn;
2277 case ASM_OPERANDS:
2278 asm_insn:
2279 recog_data.n_operands = noperands = asm_noperands (body);
2280 if (noperands >= 0)
2281 {
2282 /* This insn is an `asm' with operands. */
2283
2284 /* expand_asm_operands makes sure there aren't too many operands. */
2285 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2286
2287 /* Now get the operand values and constraints out of the insn. */
2288 decode_asm_operands (body, recog_data.operand,
2289 recog_data.operand_loc,
2290 recog_data.constraints,
2291 recog_data.operand_mode, NULL);
2292 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2293 if (noperands > 0)
2294 {
2295 const char *p = recog_data.constraints[0];
2296 recog_data.n_alternatives = 1;
2297 while (*p)
2298 recog_data.n_alternatives += (*p++ == ',');
2299 }
2300 recog_data.is_asm = true;
2301 break;
2302 }
2303 fatal_insn_not_found (insn);
2304
2305 default:
2306 normal_insn:
2307 /* Ordinary insn: recognize it, get the operands via insn_extract
2308 and get the constraints. */
2309
2310 icode = recog_memoized (insn);
2311 if (icode < 0)
2312 fatal_insn_not_found (insn);
2313
2314 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2315 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2316 recog_data.n_dups = insn_data[icode].n_dups;
2317
2318 insn_extract (insn);
2319
2320 for (i = 0; i < noperands; i++)
2321 {
2322 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2323 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2324 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2325 /* VOIDmode match_operands gets mode from their real operand. */
2326 if (recog_data.operand_mode[i] == VOIDmode)
2327 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2328 }
2329 }
2330 for (i = 0; i < noperands; i++)
2331 recog_data.operand_type[i]
2332 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2333 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2334 : OP_IN);
2335
2336 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2337
2338 recog_data.insn = NULL;
2339 which_alternative = -1;
2340 }
2341
2342 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2343 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2344 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2345 has N_OPERANDS entries. */
2346
2347 void
2348 preprocess_constraints (int n_operands, int n_alternatives,
2349 const char **constraints,
2350 operand_alternative *op_alt_base)
2351 {
2352 for (int i = 0; i < n_operands; i++)
2353 {
2354 int j;
2355 struct operand_alternative *op_alt;
2356 const char *p = constraints[i];
2357
2358 op_alt = op_alt_base;
2359
2360 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2361 {
2362 op_alt[i].cl = NO_REGS;
2363 op_alt[i].constraint = p;
2364 op_alt[i].matches = -1;
2365 op_alt[i].matched = -1;
2366
2367 if (*p == '\0' || *p == ',')
2368 {
2369 op_alt[i].anything_ok = 1;
2370 continue;
2371 }
2372
2373 for (;;)
2374 {
2375 char c = *p;
2376 if (c == '#')
2377 do
2378 c = *++p;
2379 while (c != ',' && c != '\0');
2380 if (c == ',' || c == '\0')
2381 {
2382 p++;
2383 break;
2384 }
2385
2386 switch (c)
2387 {
2388 case '?':
2389 op_alt[i].reject += 6;
2390 break;
2391 case '!':
2392 op_alt[i].reject += 600;
2393 break;
2394 case '&':
2395 op_alt[i].earlyclobber = 1;
2396 break;
2397
2398 case '0': case '1': case '2': case '3': case '4':
2399 case '5': case '6': case '7': case '8': case '9':
2400 {
2401 char *end;
2402 op_alt[i].matches = strtoul (p, &end, 10);
2403 op_alt[op_alt[i].matches].matched = i;
2404 p = end;
2405 }
2406 continue;
2407
2408 case 'X':
2409 op_alt[i].anything_ok = 1;
2410 break;
2411
2412 case 'g':
2413 op_alt[i].cl =
2414 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2415 break;
2416
2417 default:
2418 enum constraint_num cn = lookup_constraint (p);
2419 enum reg_class cl;
2420 switch (get_constraint_type (cn))
2421 {
2422 case CT_REGISTER:
2423 cl = reg_class_for_constraint (cn);
2424 if (cl != NO_REGS)
2425 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2426 break;
2427
2428 case CT_CONST_INT:
2429 break;
2430
2431 case CT_MEMORY:
2432 case CT_SPECIAL_MEMORY:
2433 op_alt[i].memory_ok = 1;
2434 break;
2435
2436 case CT_ADDRESS:
2437 op_alt[i].is_address = 1;
2438 op_alt[i].cl
2439 = (reg_class_subunion
2440 [(int) op_alt[i].cl]
2441 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2442 ADDRESS, SCRATCH)]);
2443 break;
2444
2445 case CT_FIXED_FORM:
2446 break;
2447 }
2448 break;
2449 }
2450 p += CONSTRAINT_LEN (c, p);
2451 }
2452 }
2453 }
2454 }
2455
2456 /* Return an array of operand_alternative instructions for
2457 instruction ICODE. */
2458
2459 const operand_alternative *
2460 preprocess_insn_constraints (unsigned int icode)
2461 {
2462 gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2463 if (this_target_recog->x_op_alt[icode])
2464 return this_target_recog->x_op_alt[icode];
2465
2466 int n_operands = insn_data[icode].n_operands;
2467 if (n_operands == 0)
2468 return 0;
2469 /* Always provide at least one alternative so that which_op_alt ()
2470 works correctly. If the instruction has 0 alternatives (i.e. all
2471 constraint strings are empty) then each operand in this alternative
2472 will have anything_ok set. */
2473 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2474 int n_entries = n_operands * n_alternatives;
2475
2476 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2477 const char **constraints = XALLOCAVEC (const char *, n_operands);
2478
2479 for (int i = 0; i < n_operands; ++i)
2480 constraints[i] = insn_data[icode].operand[i].constraint;
2481 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2482
2483 this_target_recog->x_op_alt[icode] = op_alt;
2484 return op_alt;
2485 }
2486
2487 /* After calling extract_insn, you can use this function to extract some
2488 information from the constraint strings into a more usable form.
2489 The collected data is stored in recog_op_alt. */
2490
2491 void
2492 preprocess_constraints (rtx_insn *insn)
2493 {
2494 int icode = INSN_CODE (insn);
2495 if (icode >= 0)
2496 recog_op_alt = preprocess_insn_constraints (icode);
2497 else
2498 {
2499 int n_operands = recog_data.n_operands;
2500 int n_alternatives = recog_data.n_alternatives;
2501 int n_entries = n_operands * n_alternatives;
2502 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2503 preprocess_constraints (n_operands, n_alternatives,
2504 recog_data.constraints, asm_op_alt);
2505 recog_op_alt = asm_op_alt;
2506 }
2507 }
2508
2509 /* Check the operands of an insn against the insn's operand constraints
2510 and return 1 if they match any of the alternatives in ALTERNATIVES.
2511
2512 The information about the insn's operands, constraints, operand modes
2513 etc. is obtained from the global variables set up by extract_insn.
2514
2515 WHICH_ALTERNATIVE is set to a number which indicates which
2516 alternative of constraints was matched: 0 for the first alternative,
2517 1 for the next, etc.
2518
2519 In addition, when two operands are required to match
2520 and it happens that the output operand is (reg) while the
2521 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2522 make the output operand look like the input.
2523 This is because the output operand is the one the template will print.
2524
2525 This is used in final, just before printing the assembler code and by
2526 the routines that determine an insn's attribute.
2527
2528 If STRICT is a positive nonzero value, it means that we have been
2529 called after reload has been completed. In that case, we must
2530 do all checks strictly. If it is zero, it means that we have been called
2531 before reload has completed. In that case, we first try to see if we can
2532 find an alternative that matches strictly. If not, we try again, this
2533 time assuming that reload will fix up the insn. This provides a "best
2534 guess" for the alternative and is used to compute attributes of insns prior
2535 to reload. A negative value of STRICT is used for this internal call. */
2536
2537 struct funny_match
2538 {
2539 int this_op, other;
2540 };
2541
2542 int
2543 constrain_operands (int strict, alternative_mask alternatives)
2544 {
2545 const char *constraints[MAX_RECOG_OPERANDS];
2546 int matching_operands[MAX_RECOG_OPERANDS];
2547 int earlyclobber[MAX_RECOG_OPERANDS];
2548 int c;
2549
2550 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2551 int funny_match_index;
2552
2553 which_alternative = 0;
2554 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2555 return 1;
2556
2557 for (c = 0; c < recog_data.n_operands; c++)
2558 {
2559 constraints[c] = recog_data.constraints[c];
2560 matching_operands[c] = -1;
2561 }
2562
2563 do
2564 {
2565 int seen_earlyclobber_at = -1;
2566 int opno;
2567 int lose = 0;
2568 funny_match_index = 0;
2569
2570 if (!TEST_BIT (alternatives, which_alternative))
2571 {
2572 int i;
2573
2574 for (i = 0; i < recog_data.n_operands; i++)
2575 constraints[i] = skip_alternative (constraints[i]);
2576
2577 which_alternative++;
2578 continue;
2579 }
2580
2581 for (opno = 0; opno < recog_data.n_operands; opno++)
2582 {
2583 rtx op = recog_data.operand[opno];
2584 machine_mode mode = GET_MODE (op);
2585 const char *p = constraints[opno];
2586 int offset = 0;
2587 int win = 0;
2588 int val;
2589 int len;
2590
2591 earlyclobber[opno] = 0;
2592
2593 /* A unary operator may be accepted by the predicate, but it
2594 is irrelevant for matching constraints. */
2595 if (UNARY_P (op))
2596 op = XEXP (op, 0);
2597
2598 if (GET_CODE (op) == SUBREG)
2599 {
2600 if (REG_P (SUBREG_REG (op))
2601 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2602 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2603 GET_MODE (SUBREG_REG (op)),
2604 SUBREG_BYTE (op),
2605 GET_MODE (op));
2606 op = SUBREG_REG (op);
2607 }
2608
2609 /* An empty constraint or empty alternative
2610 allows anything which matched the pattern. */
2611 if (*p == 0 || *p == ',')
2612 win = 1;
2613
2614 do
2615 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2616 {
2617 case '\0':
2618 len = 0;
2619 break;
2620 case ',':
2621 c = '\0';
2622 break;
2623
2624 case '#':
2625 /* Ignore rest of this alternative as far as
2626 constraint checking is concerned. */
2627 do
2628 p++;
2629 while (*p && *p != ',');
2630 len = 0;
2631 break;
2632
2633 case '&':
2634 earlyclobber[opno] = 1;
2635 if (seen_earlyclobber_at < 0)
2636 seen_earlyclobber_at = opno;
2637 break;
2638
2639 case '0': case '1': case '2': case '3': case '4':
2640 case '5': case '6': case '7': case '8': case '9':
2641 {
2642 /* This operand must be the same as a previous one.
2643 This kind of constraint is used for instructions such
2644 as add when they take only two operands.
2645
2646 Note that the lower-numbered operand is passed first.
2647
2648 If we are not testing strictly, assume that this
2649 constraint will be satisfied. */
2650
2651 char *end;
2652 int match;
2653
2654 match = strtoul (p, &end, 10);
2655 p = end;
2656
2657 if (strict < 0)
2658 val = 1;
2659 else
2660 {
2661 rtx op1 = recog_data.operand[match];
2662 rtx op2 = recog_data.operand[opno];
2663
2664 /* A unary operator may be accepted by the predicate,
2665 but it is irrelevant for matching constraints. */
2666 if (UNARY_P (op1))
2667 op1 = XEXP (op1, 0);
2668 if (UNARY_P (op2))
2669 op2 = XEXP (op2, 0);
2670
2671 val = operands_match_p (op1, op2);
2672 }
2673
2674 matching_operands[opno] = match;
2675 matching_operands[match] = opno;
2676
2677 if (val != 0)
2678 win = 1;
2679
2680 /* If output is *x and input is *--x, arrange later
2681 to change the output to *--x as well, since the
2682 output op is the one that will be printed. */
2683 if (val == 2 && strict > 0)
2684 {
2685 funny_match[funny_match_index].this_op = opno;
2686 funny_match[funny_match_index++].other = match;
2687 }
2688 }
2689 len = 0;
2690 break;
2691
2692 case 'p':
2693 /* p is used for address_operands. When we are called by
2694 gen_reload, no one will have checked that the address is
2695 strictly valid, i.e., that all pseudos requiring hard regs
2696 have gotten them. */
2697 if (strict <= 0
2698 || (strict_memory_address_p (recog_data.operand_mode[opno],
2699 op)))
2700 win = 1;
2701 break;
2702
2703 /* No need to check general_operand again;
2704 it was done in insn-recog.c. Well, except that reload
2705 doesn't check the validity of its replacements, but
2706 that should only matter when there's a bug. */
2707 case 'g':
2708 /* Anything goes unless it is a REG and really has a hard reg
2709 but the hard reg is not in the class GENERAL_REGS. */
2710 if (REG_P (op))
2711 {
2712 if (strict < 0
2713 || GENERAL_REGS == ALL_REGS
2714 || (reload_in_progress
2715 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2716 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2717 win = 1;
2718 }
2719 else if (strict < 0 || general_operand (op, mode))
2720 win = 1;
2721 break;
2722
2723 default:
2724 {
2725 enum constraint_num cn = lookup_constraint (p);
2726 enum reg_class cl = reg_class_for_constraint (cn);
2727 if (cl != NO_REGS)
2728 {
2729 if (strict < 0
2730 || (strict == 0
2731 && REG_P (op)
2732 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2733 || (strict == 0 && GET_CODE (op) == SCRATCH)
2734 || (REG_P (op)
2735 && reg_fits_class_p (op, cl, offset, mode)))
2736 win = 1;
2737 }
2738
2739 else if (constraint_satisfied_p (op, cn))
2740 win = 1;
2741
2742 else if (insn_extra_memory_constraint (cn)
2743 /* Every memory operand can be reloaded to fit. */
2744 && ((strict < 0 && MEM_P (op))
2745 /* Before reload, accept what reload can turn
2746 into a mem. */
2747 || (strict < 0 && CONSTANT_P (op))
2748 /* Before reload, accept a pseudo,
2749 since LRA can turn it into a mem. */
2750 || (strict < 0 && targetm.lra_p () && REG_P (op)
2751 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2752 /* During reload, accept a pseudo */
2753 || (reload_in_progress && REG_P (op)
2754 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2755 win = 1;
2756 else if (insn_extra_address_constraint (cn)
2757 /* Every address operand can be reloaded to fit. */
2758 && strict < 0)
2759 win = 1;
2760 /* Cater to architectures like IA-64 that define extra memory
2761 constraints without using define_memory_constraint. */
2762 else if (reload_in_progress
2763 && REG_P (op)
2764 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2765 && reg_renumber[REGNO (op)] < 0
2766 && reg_equiv_mem (REGNO (op)) != 0
2767 && constraint_satisfied_p
2768 (reg_equiv_mem (REGNO (op)), cn))
2769 win = 1;
2770 break;
2771 }
2772 }
2773 while (p += len, c);
2774
2775 constraints[opno] = p;
2776 /* If this operand did not win somehow,
2777 this alternative loses. */
2778 if (! win)
2779 lose = 1;
2780 }
2781 /* This alternative won; the operands are ok.
2782 Change whichever operands this alternative says to change. */
2783 if (! lose)
2784 {
2785 int opno, eopno;
2786
2787 /* See if any earlyclobber operand conflicts with some other
2788 operand. */
2789
2790 if (strict > 0 && seen_earlyclobber_at >= 0)
2791 for (eopno = seen_earlyclobber_at;
2792 eopno < recog_data.n_operands;
2793 eopno++)
2794 /* Ignore earlyclobber operands now in memory,
2795 because we would often report failure when we have
2796 two memory operands, one of which was formerly a REG. */
2797 if (earlyclobber[eopno]
2798 && REG_P (recog_data.operand[eopno]))
2799 for (opno = 0; opno < recog_data.n_operands; opno++)
2800 if ((MEM_P (recog_data.operand[opno])
2801 || recog_data.operand_type[opno] != OP_OUT)
2802 && opno != eopno
2803 /* Ignore things like match_operator operands. */
2804 && *recog_data.constraints[opno] != 0
2805 && ! (matching_operands[opno] == eopno
2806 && operands_match_p (recog_data.operand[opno],
2807 recog_data.operand[eopno]))
2808 && ! safe_from_earlyclobber (recog_data.operand[opno],
2809 recog_data.operand[eopno]))
2810 lose = 1;
2811
2812 if (! lose)
2813 {
2814 while (--funny_match_index >= 0)
2815 {
2816 recog_data.operand[funny_match[funny_match_index].other]
2817 = recog_data.operand[funny_match[funny_match_index].this_op];
2818 }
2819
2820 /* For operands without < or > constraints reject side-effects. */
2821 if (AUTO_INC_DEC && recog_data.is_asm)
2822 {
2823 for (opno = 0; opno < recog_data.n_operands; opno++)
2824 if (MEM_P (recog_data.operand[opno]))
2825 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2826 {
2827 case PRE_INC:
2828 case POST_INC:
2829 case PRE_DEC:
2830 case POST_DEC:
2831 case PRE_MODIFY:
2832 case POST_MODIFY:
2833 if (strchr (recog_data.constraints[opno], '<') == NULL
2834 && strchr (recog_data.constraints[opno], '>')
2835 == NULL)
2836 return 0;
2837 break;
2838 default:
2839 break;
2840 }
2841 }
2842
2843 return 1;
2844 }
2845 }
2846
2847 which_alternative++;
2848 }
2849 while (which_alternative < recog_data.n_alternatives);
2850
2851 which_alternative = -1;
2852 /* If we are about to reject this, but we are not to test strictly,
2853 try a very loose test. Only return failure if it fails also. */
2854 if (strict == 0)
2855 return constrain_operands (-1, alternatives);
2856 else
2857 return 0;
2858 }
2859
2860 /* Return true iff OPERAND (assumed to be a REG rtx)
2861 is a hard reg in class CLASS when its regno is offset by OFFSET
2862 and changed to mode MODE.
2863 If REG occupies multiple hard regs, all of them must be in CLASS. */
2864
2865 bool
2866 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2867 machine_mode mode)
2868 {
2869 unsigned int regno = REGNO (operand);
2870
2871 if (cl == NO_REGS)
2872 return false;
2873
2874 /* Regno must not be a pseudo register. Offset may be negative. */
2875 return (HARD_REGISTER_NUM_P (regno)
2876 && HARD_REGISTER_NUM_P (regno + offset)
2877 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2878 regno + offset));
2879 }
2880 \f
2881 /* Split single instruction. Helper function for split_all_insns and
2882 split_all_insns_noflow. Return last insn in the sequence if successful,
2883 or NULL if unsuccessful. */
2884
2885 static rtx_insn *
2886 split_insn (rtx_insn *insn)
2887 {
2888 /* Split insns here to get max fine-grain parallelism. */
2889 rtx_insn *first = PREV_INSN (insn);
2890 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2891 rtx insn_set, last_set, note;
2892
2893 if (last == insn)
2894 return NULL;
2895
2896 /* If the original instruction was a single set that was known to be
2897 equivalent to a constant, see if we can say the same about the last
2898 instruction in the split sequence. The two instructions must set
2899 the same destination. */
2900 insn_set = single_set (insn);
2901 if (insn_set)
2902 {
2903 last_set = single_set (last);
2904 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2905 {
2906 note = find_reg_equal_equiv_note (insn);
2907 if (note && CONSTANT_P (XEXP (note, 0)))
2908 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2909 else if (CONSTANT_P (SET_SRC (insn_set)))
2910 set_unique_reg_note (last, REG_EQUAL,
2911 copy_rtx (SET_SRC (insn_set)));
2912 }
2913 }
2914
2915 /* try_split returns the NOTE that INSN became. */
2916 SET_INSN_DELETED (insn);
2917
2918 /* ??? Coddle to md files that generate subregs in post-reload
2919 splitters instead of computing the proper hard register. */
2920 if (reload_completed && first != last)
2921 {
2922 first = NEXT_INSN (first);
2923 for (;;)
2924 {
2925 if (INSN_P (first))
2926 cleanup_subreg_operands (first);
2927 if (first == last)
2928 break;
2929 first = NEXT_INSN (first);
2930 }
2931 }
2932
2933 return last;
2934 }
2935
2936 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2937
2938 void
2939 split_all_insns (void)
2940 {
2941 bool changed;
2942 basic_block bb;
2943
2944 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2945 bitmap_clear (blocks);
2946 changed = false;
2947
2948 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2949 {
2950 rtx_insn *insn, *next;
2951 bool finish = false;
2952
2953 rtl_profile_for_bb (bb);
2954 for (insn = BB_HEAD (bb); !finish ; insn = next)
2955 {
2956 /* Can't use `next_real_insn' because that might go across
2957 CODE_LABELS and short-out basic blocks. */
2958 next = NEXT_INSN (insn);
2959 finish = (insn == BB_END (bb));
2960 if (INSN_P (insn))
2961 {
2962 rtx set = single_set (insn);
2963
2964 /* Don't split no-op move insns. These should silently
2965 disappear later in final. Splitting such insns would
2966 break the code that handles LIBCALL blocks. */
2967 if (set && set_noop_p (set))
2968 {
2969 /* Nops get in the way while scheduling, so delete them
2970 now if register allocation has already been done. It
2971 is too risky to try to do this before register
2972 allocation, and there are unlikely to be very many
2973 nops then anyways. */
2974 if (reload_completed)
2975 delete_insn_and_edges (insn);
2976 }
2977 else
2978 {
2979 if (split_insn (insn))
2980 {
2981 bitmap_set_bit (blocks, bb->index);
2982 changed = true;
2983 }
2984 }
2985 }
2986 }
2987 }
2988
2989 default_rtl_profile ();
2990 if (changed)
2991 find_many_sub_basic_blocks (blocks);
2992
2993 checking_verify_flow_info ();
2994 }
2995
2996 /* Same as split_all_insns, but do not expect CFG to be available.
2997 Used by machine dependent reorg passes. */
2998
2999 unsigned int
3000 split_all_insns_noflow (void)
3001 {
3002 rtx_insn *next, *insn;
3003
3004 for (insn = get_insns (); insn; insn = next)
3005 {
3006 next = NEXT_INSN (insn);
3007 if (INSN_P (insn))
3008 {
3009 /* Don't split no-op move insns. These should silently
3010 disappear later in final. Splitting such insns would
3011 break the code that handles LIBCALL blocks. */
3012 rtx set = single_set (insn);
3013 if (set && set_noop_p (set))
3014 {
3015 /* Nops get in the way while scheduling, so delete them
3016 now if register allocation has already been done. It
3017 is too risky to try to do this before register
3018 allocation, and there are unlikely to be very many
3019 nops then anyways.
3020
3021 ??? Should we use delete_insn when the CFG isn't valid? */
3022 if (reload_completed)
3023 delete_insn_and_edges (insn);
3024 }
3025 else
3026 split_insn (insn);
3027 }
3028 }
3029 return 0;
3030 }
3031 \f
3032 struct peep2_insn_data
3033 {
3034 rtx_insn *insn;
3035 regset live_before;
3036 };
3037
3038 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3039 static int peep2_current;
3040
3041 static bool peep2_do_rebuild_jump_labels;
3042 static bool peep2_do_cleanup_cfg;
3043
3044 /* The number of instructions available to match a peep2. */
3045 int peep2_current_count;
3046
3047 /* A marker indicating the last insn of the block. The live_before regset
3048 for this element is correct, indicating DF_LIVE_OUT for the block. */
3049 #define PEEP2_EOB invalid_insn_rtx
3050
3051 /* Wrap N to fit into the peep2_insn_data buffer. */
3052
3053 static int
3054 peep2_buf_position (int n)
3055 {
3056 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3057 n -= MAX_INSNS_PER_PEEP2 + 1;
3058 return n;
3059 }
3060
3061 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3062 does not exist. Used by the recognizer to find the next insn to match
3063 in a multi-insn pattern. */
3064
3065 rtx_insn *
3066 peep2_next_insn (int n)
3067 {
3068 gcc_assert (n <= peep2_current_count);
3069
3070 n = peep2_buf_position (peep2_current + n);
3071
3072 return peep2_insn_data[n].insn;
3073 }
3074
3075 /* Return true if REGNO is dead before the Nth non-note insn
3076 after `current'. */
3077
3078 int
3079 peep2_regno_dead_p (int ofs, int regno)
3080 {
3081 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3082
3083 ofs = peep2_buf_position (peep2_current + ofs);
3084
3085 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3086
3087 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3088 }
3089
3090 /* Similarly for a REG. */
3091
3092 int
3093 peep2_reg_dead_p (int ofs, rtx reg)
3094 {
3095 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3096
3097 ofs = peep2_buf_position (peep2_current + ofs);
3098
3099 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3100
3101 unsigned int end_regno = END_REGNO (reg);
3102 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3103 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3104 return 0;
3105 return 1;
3106 }
3107
3108 /* Regno offset to be used in the register search. */
3109 static int search_ofs;
3110
3111 /* Try to find a hard register of mode MODE, matching the register class in
3112 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3113 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3114 in which case the only condition is that the register must be available
3115 before CURRENT_INSN.
3116 Registers that already have bits set in REG_SET will not be considered.
3117
3118 If an appropriate register is available, it will be returned and the
3119 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3120 returned. */
3121
3122 rtx
3123 peep2_find_free_register (int from, int to, const char *class_str,
3124 machine_mode mode, HARD_REG_SET *reg_set)
3125 {
3126 enum reg_class cl;
3127 HARD_REG_SET live;
3128 df_ref def;
3129 int i;
3130
3131 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3132 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3133
3134 from = peep2_buf_position (peep2_current + from);
3135 to = peep2_buf_position (peep2_current + to);
3136
3137 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3138 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3139
3140 while (from != to)
3141 {
3142 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3143
3144 /* Don't use registers set or clobbered by the insn. */
3145 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3146 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3147
3148 from = peep2_buf_position (from + 1);
3149 }
3150
3151 cl = reg_class_for_constraint (lookup_constraint (class_str));
3152
3153 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3154 {
3155 int raw_regno, regno, success, j;
3156
3157 /* Distribute the free registers as much as possible. */
3158 raw_regno = search_ofs + i;
3159 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3160 raw_regno -= FIRST_PSEUDO_REGISTER;
3161 #ifdef REG_ALLOC_ORDER
3162 regno = reg_alloc_order[raw_regno];
3163 #else
3164 regno = raw_regno;
3165 #endif
3166
3167 /* Can it support the mode we need? */
3168 if (! HARD_REGNO_MODE_OK (regno, mode))
3169 continue;
3170
3171 success = 1;
3172 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3173 {
3174 /* Don't allocate fixed registers. */
3175 if (fixed_regs[regno + j])
3176 {
3177 success = 0;
3178 break;
3179 }
3180 /* Don't allocate global registers. */
3181 if (global_regs[regno + j])
3182 {
3183 success = 0;
3184 break;
3185 }
3186 /* Make sure the register is of the right class. */
3187 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3188 {
3189 success = 0;
3190 break;
3191 }
3192 /* And that we don't create an extra save/restore. */
3193 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3194 {
3195 success = 0;
3196 break;
3197 }
3198
3199 if (! targetm.hard_regno_scratch_ok (regno + j))
3200 {
3201 success = 0;
3202 break;
3203 }
3204
3205 /* And we don't clobber traceback for noreturn functions. */
3206 if ((regno + j == FRAME_POINTER_REGNUM
3207 || regno + j == HARD_FRAME_POINTER_REGNUM)
3208 && (! reload_completed || frame_pointer_needed))
3209 {
3210 success = 0;
3211 break;
3212 }
3213
3214 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3215 || TEST_HARD_REG_BIT (live, regno + j))
3216 {
3217 success = 0;
3218 break;
3219 }
3220 }
3221
3222 if (success)
3223 {
3224 add_to_hard_reg_set (reg_set, mode, regno);
3225
3226 /* Start the next search with the next register. */
3227 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3228 raw_regno = 0;
3229 search_ofs = raw_regno;
3230
3231 return gen_rtx_REG (mode, regno);
3232 }
3233 }
3234
3235 search_ofs = 0;
3236 return NULL_RTX;
3237 }
3238
3239 /* Forget all currently tracked instructions, only remember current
3240 LIVE regset. */
3241
3242 static void
3243 peep2_reinit_state (regset live)
3244 {
3245 int i;
3246
3247 /* Indicate that all slots except the last holds invalid data. */
3248 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3249 peep2_insn_data[i].insn = NULL;
3250 peep2_current_count = 0;
3251
3252 /* Indicate that the last slot contains live_after data. */
3253 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3254 peep2_current = MAX_INSNS_PER_PEEP2;
3255
3256 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3257 }
3258
3259 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3260 starting at INSN. Perform the replacement, removing the old insns and
3261 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3262 if the replacement is rejected. */
3263
3264 static rtx_insn *
3265 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3266 {
3267 int i;
3268 rtx_insn *last, *before_try, *x;
3269 rtx eh_note, as_note;
3270 rtx_insn *old_insn;
3271 rtx_insn *new_insn;
3272 bool was_call = false;
3273
3274 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3275 match more than one insn, or to be split into more than one insn. */
3276 old_insn = peep2_insn_data[peep2_current].insn;
3277 if (RTX_FRAME_RELATED_P (old_insn))
3278 {
3279 bool any_note = false;
3280 rtx note;
3281
3282 if (match_len != 0)
3283 return NULL;
3284
3285 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3286 may be in the stream for the purpose of register allocation. */
3287 if (active_insn_p (attempt))
3288 new_insn = attempt;
3289 else
3290 new_insn = next_active_insn (attempt);
3291 if (next_active_insn (new_insn))
3292 return NULL;
3293
3294 /* We have a 1-1 replacement. Copy over any frame-related info. */
3295 RTX_FRAME_RELATED_P (new_insn) = 1;
3296
3297 /* Allow the backend to fill in a note during the split. */
3298 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3299 switch (REG_NOTE_KIND (note))
3300 {
3301 case REG_FRAME_RELATED_EXPR:
3302 case REG_CFA_DEF_CFA:
3303 case REG_CFA_ADJUST_CFA:
3304 case REG_CFA_OFFSET:
3305 case REG_CFA_REGISTER:
3306 case REG_CFA_EXPRESSION:
3307 case REG_CFA_RESTORE:
3308 case REG_CFA_SET_VDRAP:
3309 any_note = true;
3310 break;
3311 default:
3312 break;
3313 }
3314
3315 /* If the backend didn't supply a note, copy one over. */
3316 if (!any_note)
3317 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3318 switch (REG_NOTE_KIND (note))
3319 {
3320 case REG_FRAME_RELATED_EXPR:
3321 case REG_CFA_DEF_CFA:
3322 case REG_CFA_ADJUST_CFA:
3323 case REG_CFA_OFFSET:
3324 case REG_CFA_REGISTER:
3325 case REG_CFA_EXPRESSION:
3326 case REG_CFA_RESTORE:
3327 case REG_CFA_SET_VDRAP:
3328 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3329 any_note = true;
3330 break;
3331 default:
3332 break;
3333 }
3334
3335 /* If there still isn't a note, make sure the unwind info sees the
3336 same expression as before the split. */
3337 if (!any_note)
3338 {
3339 rtx old_set, new_set;
3340
3341 /* The old insn had better have been simple, or annotated. */
3342 old_set = single_set (old_insn);
3343 gcc_assert (old_set != NULL);
3344
3345 new_set = single_set (new_insn);
3346 if (!new_set || !rtx_equal_p (new_set, old_set))
3347 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3348 }
3349
3350 /* Copy prologue/epilogue status. This is required in order to keep
3351 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3352 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3353 }
3354
3355 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3356 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3357 cfg-related call notes. */
3358 for (i = 0; i <= match_len; ++i)
3359 {
3360 int j;
3361 rtx note;
3362
3363 j = peep2_buf_position (peep2_current + i);
3364 old_insn = peep2_insn_data[j].insn;
3365 if (!CALL_P (old_insn))
3366 continue;
3367 was_call = true;
3368
3369 new_insn = attempt;
3370 while (new_insn != NULL_RTX)
3371 {
3372 if (CALL_P (new_insn))
3373 break;
3374 new_insn = NEXT_INSN (new_insn);
3375 }
3376
3377 gcc_assert (new_insn != NULL_RTX);
3378
3379 CALL_INSN_FUNCTION_USAGE (new_insn)
3380 = CALL_INSN_FUNCTION_USAGE (old_insn);
3381 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3382
3383 for (note = REG_NOTES (old_insn);
3384 note;
3385 note = XEXP (note, 1))
3386 switch (REG_NOTE_KIND (note))
3387 {
3388 case REG_NORETURN:
3389 case REG_SETJMP:
3390 case REG_TM:
3391 add_reg_note (new_insn, REG_NOTE_KIND (note),
3392 XEXP (note, 0));
3393 break;
3394 default:
3395 /* Discard all other reg notes. */
3396 break;
3397 }
3398
3399 /* Croak if there is another call in the sequence. */
3400 while (++i <= match_len)
3401 {
3402 j = peep2_buf_position (peep2_current + i);
3403 old_insn = peep2_insn_data[j].insn;
3404 gcc_assert (!CALL_P (old_insn));
3405 }
3406 break;
3407 }
3408
3409 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3410 move those notes over to the new sequence. */
3411 as_note = NULL;
3412 for (i = match_len; i >= 0; --i)
3413 {
3414 int j = peep2_buf_position (peep2_current + i);
3415 old_insn = peep2_insn_data[j].insn;
3416
3417 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3418 if (as_note)
3419 break;
3420 }
3421
3422 i = peep2_buf_position (peep2_current + match_len);
3423 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3424
3425 /* Replace the old sequence with the new. */
3426 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3427 last = emit_insn_after_setloc (attempt,
3428 peep2_insn_data[i].insn,
3429 INSN_LOCATION (peepinsn));
3430 before_try = PREV_INSN (insn);
3431 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3432
3433 /* Re-insert the EH_REGION notes. */
3434 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3435 {
3436 edge eh_edge;
3437 edge_iterator ei;
3438
3439 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3440 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3441 break;
3442
3443 if (eh_note)
3444 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3445
3446 if (eh_edge)
3447 for (x = last; x != before_try; x = PREV_INSN (x))
3448 if (x != BB_END (bb)
3449 && (can_throw_internal (x)
3450 || can_nonlocal_goto (x)))
3451 {
3452 edge nfte, nehe;
3453 int flags;
3454
3455 nfte = split_block (bb, x);
3456 flags = (eh_edge->flags
3457 & (EDGE_EH | EDGE_ABNORMAL));
3458 if (CALL_P (x))
3459 flags |= EDGE_ABNORMAL_CALL;
3460 nehe = make_edge (nfte->src, eh_edge->dest,
3461 flags);
3462
3463 nehe->probability = eh_edge->probability;
3464 nfte->probability = nehe->probability.invert ();
3465
3466 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3467 bb = nfte->src;
3468 eh_edge = nehe;
3469 }
3470
3471 /* Converting possibly trapping insn to non-trapping is
3472 possible. Zap dummy outgoing edges. */
3473 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3474 }
3475
3476 /* Re-insert the ARGS_SIZE notes. */
3477 if (as_note)
3478 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3479
3480 /* If we generated a jump instruction, it won't have
3481 JUMP_LABEL set. Recompute after we're done. */
3482 for (x = last; x != before_try; x = PREV_INSN (x))
3483 if (JUMP_P (x))
3484 {
3485 peep2_do_rebuild_jump_labels = true;
3486 break;
3487 }
3488
3489 return last;
3490 }
3491
3492 /* After performing a replacement in basic block BB, fix up the life
3493 information in our buffer. LAST is the last of the insns that we
3494 emitted as a replacement. PREV is the insn before the start of
3495 the replacement. MATCH_LEN is the number of instructions that were
3496 matched, and which now need to be replaced in the buffer. */
3497
3498 static void
3499 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3500 rtx_insn *prev)
3501 {
3502 int i = peep2_buf_position (peep2_current + match_len + 1);
3503 rtx_insn *x;
3504 regset_head live;
3505
3506 INIT_REG_SET (&live);
3507 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3508
3509 gcc_assert (peep2_current_count >= match_len + 1);
3510 peep2_current_count -= match_len + 1;
3511
3512 x = last;
3513 do
3514 {
3515 if (INSN_P (x))
3516 {
3517 df_insn_rescan (x);
3518 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3519 {
3520 peep2_current_count++;
3521 if (--i < 0)
3522 i = MAX_INSNS_PER_PEEP2;
3523 peep2_insn_data[i].insn = x;
3524 df_simulate_one_insn_backwards (bb, x, &live);
3525 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3526 }
3527 }
3528 x = PREV_INSN (x);
3529 }
3530 while (x != prev);
3531 CLEAR_REG_SET (&live);
3532
3533 peep2_current = i;
3534 }
3535
3536 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3537 Return true if we added it, false otherwise. The caller will try to match
3538 peepholes against the buffer if we return false; otherwise it will try to
3539 add more instructions to the buffer. */
3540
3541 static bool
3542 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3543 {
3544 int pos;
3545
3546 /* Once we have filled the maximum number of insns the buffer can hold,
3547 allow the caller to match the insns against peepholes. We wait until
3548 the buffer is full in case the target has similar peepholes of different
3549 length; we always want to match the longest if possible. */
3550 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3551 return false;
3552
3553 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3554 any other pattern, lest it change the semantics of the frame info. */
3555 if (RTX_FRAME_RELATED_P (insn))
3556 {
3557 /* Let the buffer drain first. */
3558 if (peep2_current_count > 0)
3559 return false;
3560 /* Now the insn will be the only thing in the buffer. */
3561 }
3562
3563 pos = peep2_buf_position (peep2_current + peep2_current_count);
3564 peep2_insn_data[pos].insn = insn;
3565 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3566 peep2_current_count++;
3567
3568 df_simulate_one_insn_forwards (bb, insn, live);
3569 return true;
3570 }
3571
3572 /* Perform the peephole2 optimization pass. */
3573
3574 static void
3575 peephole2_optimize (void)
3576 {
3577 rtx_insn *insn;
3578 bitmap live;
3579 int i;
3580 basic_block bb;
3581
3582 peep2_do_cleanup_cfg = false;
3583 peep2_do_rebuild_jump_labels = false;
3584
3585 df_set_flags (DF_LR_RUN_DCE);
3586 df_note_add_problem ();
3587 df_analyze ();
3588
3589 /* Initialize the regsets we're going to use. */
3590 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3591 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3592 search_ofs = 0;
3593 live = BITMAP_ALLOC (&reg_obstack);
3594
3595 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3596 {
3597 bool past_end = false;
3598 int pos;
3599
3600 rtl_profile_for_bb (bb);
3601
3602 /* Start up propagation. */
3603 bitmap_copy (live, DF_LR_IN (bb));
3604 df_simulate_initialize_forwards (bb, live);
3605 peep2_reinit_state (live);
3606
3607 insn = BB_HEAD (bb);
3608 for (;;)
3609 {
3610 rtx_insn *attempt, *head;
3611 int match_len;
3612
3613 if (!past_end && !NONDEBUG_INSN_P (insn))
3614 {
3615 next_insn:
3616 insn = NEXT_INSN (insn);
3617 if (insn == NEXT_INSN (BB_END (bb)))
3618 past_end = true;
3619 continue;
3620 }
3621 if (!past_end && peep2_fill_buffer (bb, insn, live))
3622 goto next_insn;
3623
3624 /* If we did not fill an empty buffer, it signals the end of the
3625 block. */
3626 if (peep2_current_count == 0)
3627 break;
3628
3629 /* The buffer filled to the current maximum, so try to match. */
3630
3631 pos = peep2_buf_position (peep2_current + peep2_current_count);
3632 peep2_insn_data[pos].insn = PEEP2_EOB;
3633 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3634
3635 /* Match the peephole. */
3636 head = peep2_insn_data[peep2_current].insn;
3637 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3638 if (attempt != NULL)
3639 {
3640 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3641 if (last)
3642 {
3643 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3644 continue;
3645 }
3646 }
3647
3648 /* No match: advance the buffer by one insn. */
3649 peep2_current = peep2_buf_position (peep2_current + 1);
3650 peep2_current_count--;
3651 }
3652 }
3653
3654 default_rtl_profile ();
3655 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3656 BITMAP_FREE (peep2_insn_data[i].live_before);
3657 BITMAP_FREE (live);
3658 if (peep2_do_rebuild_jump_labels)
3659 rebuild_jump_labels (get_insns ());
3660 if (peep2_do_cleanup_cfg)
3661 cleanup_cfg (CLEANUP_CFG_CHANGED);
3662 }
3663
3664 /* Common predicates for use with define_bypass. */
3665
3666 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3667 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3668 must be either a single_set or a PARALLEL with SETs inside. */
3669
3670 int
3671 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3672 {
3673 rtx out_set, in_set;
3674 rtx out_pat, in_pat;
3675 rtx out_exp, in_exp;
3676 int i, j;
3677
3678 in_set = single_set (in_insn);
3679 if (in_set)
3680 {
3681 if (!MEM_P (SET_DEST (in_set)))
3682 return false;
3683
3684 out_set = single_set (out_insn);
3685 if (out_set)
3686 {
3687 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3688 return false;
3689 }
3690 else
3691 {
3692 out_pat = PATTERN (out_insn);
3693
3694 if (GET_CODE (out_pat) != PARALLEL)
3695 return false;
3696
3697 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3698 {
3699 out_exp = XVECEXP (out_pat, 0, i);
3700
3701 if (GET_CODE (out_exp) == CLOBBER)
3702 continue;
3703
3704 gcc_assert (GET_CODE (out_exp) == SET);
3705
3706 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3707 return false;
3708 }
3709 }
3710 }
3711 else
3712 {
3713 in_pat = PATTERN (in_insn);
3714 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3715
3716 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3717 {
3718 in_exp = XVECEXP (in_pat, 0, i);
3719
3720 if (GET_CODE (in_exp) == CLOBBER)
3721 continue;
3722
3723 gcc_assert (GET_CODE (in_exp) == SET);
3724
3725 if (!MEM_P (SET_DEST (in_exp)))
3726 return false;
3727
3728 out_set = single_set (out_insn);
3729 if (out_set)
3730 {
3731 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3732 return false;
3733 }
3734 else
3735 {
3736 out_pat = PATTERN (out_insn);
3737 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3738
3739 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3740 {
3741 out_exp = XVECEXP (out_pat, 0, j);
3742
3743 if (GET_CODE (out_exp) == CLOBBER)
3744 continue;
3745
3746 gcc_assert (GET_CODE (out_exp) == SET);
3747
3748 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3749 return false;
3750 }
3751 }
3752 }
3753 }
3754
3755 return true;
3756 }
3757
3758 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3759 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3760 or multiple set; IN_INSN should be single_set for truth, but for convenience
3761 of insn categorization may be any JUMP or CALL insn. */
3762
3763 int
3764 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3765 {
3766 rtx out_set, in_set;
3767
3768 in_set = single_set (in_insn);
3769 if (! in_set)
3770 {
3771 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3772 return false;
3773 }
3774
3775 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3776 return false;
3777 in_set = SET_SRC (in_set);
3778
3779 out_set = single_set (out_insn);
3780 if (out_set)
3781 {
3782 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3783 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3784 return false;
3785 }
3786 else
3787 {
3788 rtx out_pat;
3789 int i;
3790
3791 out_pat = PATTERN (out_insn);
3792 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3793
3794 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3795 {
3796 rtx exp = XVECEXP (out_pat, 0, i);
3797
3798 if (GET_CODE (exp) == CLOBBER)
3799 continue;
3800
3801 gcc_assert (GET_CODE (exp) == SET);
3802
3803 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3804 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3805 return false;
3806 }
3807 }
3808
3809 return true;
3810 }
3811 \f
3812 static unsigned int
3813 rest_of_handle_peephole2 (void)
3814 {
3815 if (HAVE_peephole2)
3816 peephole2_optimize ();
3817
3818 return 0;
3819 }
3820
3821 namespace {
3822
3823 const pass_data pass_data_peephole2 =
3824 {
3825 RTL_PASS, /* type */
3826 "peephole2", /* name */
3827 OPTGROUP_NONE, /* optinfo_flags */
3828 TV_PEEPHOLE2, /* tv_id */
3829 0, /* properties_required */
3830 0, /* properties_provided */
3831 0, /* properties_destroyed */
3832 0, /* todo_flags_start */
3833 TODO_df_finish, /* todo_flags_finish */
3834 };
3835
3836 class pass_peephole2 : public rtl_opt_pass
3837 {
3838 public:
3839 pass_peephole2 (gcc::context *ctxt)
3840 : rtl_opt_pass (pass_data_peephole2, ctxt)
3841 {}
3842
3843 /* opt_pass methods: */
3844 /* The epiphany backend creates a second instance of this pass, so we need
3845 a clone method. */
3846 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3847 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3848 virtual unsigned int execute (function *)
3849 {
3850 return rest_of_handle_peephole2 ();
3851 }
3852
3853 }; // class pass_peephole2
3854
3855 } // anon namespace
3856
3857 rtl_opt_pass *
3858 make_pass_peephole2 (gcc::context *ctxt)
3859 {
3860 return new pass_peephole2 (ctxt);
3861 }
3862
3863 namespace {
3864
3865 const pass_data pass_data_split_all_insns =
3866 {
3867 RTL_PASS, /* type */
3868 "split1", /* name */
3869 OPTGROUP_NONE, /* optinfo_flags */
3870 TV_NONE, /* tv_id */
3871 0, /* properties_required */
3872 0, /* properties_provided */
3873 0, /* properties_destroyed */
3874 0, /* todo_flags_start */
3875 0, /* todo_flags_finish */
3876 };
3877
3878 class pass_split_all_insns : public rtl_opt_pass
3879 {
3880 public:
3881 pass_split_all_insns (gcc::context *ctxt)
3882 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3883 {}
3884
3885 /* opt_pass methods: */
3886 /* The epiphany backend creates a second instance of this pass, so
3887 we need a clone method. */
3888 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3889 virtual unsigned int execute (function *)
3890 {
3891 split_all_insns ();
3892 return 0;
3893 }
3894
3895 }; // class pass_split_all_insns
3896
3897 } // anon namespace
3898
3899 rtl_opt_pass *
3900 make_pass_split_all_insns (gcc::context *ctxt)
3901 {
3902 return new pass_split_all_insns (ctxt);
3903 }
3904
3905 namespace {
3906
3907 const pass_data pass_data_split_after_reload =
3908 {
3909 RTL_PASS, /* type */
3910 "split2", /* name */
3911 OPTGROUP_NONE, /* optinfo_flags */
3912 TV_NONE, /* tv_id */
3913 0, /* properties_required */
3914 0, /* properties_provided */
3915 0, /* properties_destroyed */
3916 0, /* todo_flags_start */
3917 0, /* todo_flags_finish */
3918 };
3919
3920 class pass_split_after_reload : public rtl_opt_pass
3921 {
3922 public:
3923 pass_split_after_reload (gcc::context *ctxt)
3924 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3925 {}
3926
3927 /* opt_pass methods: */
3928 virtual bool gate (function *)
3929 {
3930 /* If optimizing, then go ahead and split insns now. */
3931 if (optimize > 0)
3932 return true;
3933
3934 #ifdef STACK_REGS
3935 return true;
3936 #else
3937 return false;
3938 #endif
3939 }
3940
3941 virtual unsigned int execute (function *)
3942 {
3943 split_all_insns ();
3944 return 0;
3945 }
3946
3947 }; // class pass_split_after_reload
3948
3949 } // anon namespace
3950
3951 rtl_opt_pass *
3952 make_pass_split_after_reload (gcc::context *ctxt)
3953 {
3954 return new pass_split_after_reload (ctxt);
3955 }
3956
3957 namespace {
3958
3959 const pass_data pass_data_split_before_regstack =
3960 {
3961 RTL_PASS, /* type */
3962 "split3", /* name */
3963 OPTGROUP_NONE, /* optinfo_flags */
3964 TV_NONE, /* tv_id */
3965 0, /* properties_required */
3966 0, /* properties_provided */
3967 0, /* properties_destroyed */
3968 0, /* todo_flags_start */
3969 0, /* todo_flags_finish */
3970 };
3971
3972 class pass_split_before_regstack : public rtl_opt_pass
3973 {
3974 public:
3975 pass_split_before_regstack (gcc::context *ctxt)
3976 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3977 {}
3978
3979 /* opt_pass methods: */
3980 virtual bool gate (function *);
3981 virtual unsigned int execute (function *)
3982 {
3983 split_all_insns ();
3984 return 0;
3985 }
3986
3987 }; // class pass_split_before_regstack
3988
3989 bool
3990 pass_split_before_regstack::gate (function *)
3991 {
3992 #if HAVE_ATTR_length && defined (STACK_REGS)
3993 /* If flow2 creates new instructions which need splitting
3994 and scheduling after reload is not done, they might not be
3995 split until final which doesn't allow splitting
3996 if HAVE_ATTR_length. */
3997 # ifdef INSN_SCHEDULING
3998 return (optimize && !flag_schedule_insns_after_reload);
3999 # else
4000 return (optimize);
4001 # endif
4002 #else
4003 return 0;
4004 #endif
4005 }
4006
4007 } // anon namespace
4008
4009 rtl_opt_pass *
4010 make_pass_split_before_regstack (gcc::context *ctxt)
4011 {
4012 return new pass_split_before_regstack (ctxt);
4013 }
4014
4015 static unsigned int
4016 rest_of_handle_split_before_sched2 (void)
4017 {
4018 #ifdef INSN_SCHEDULING
4019 split_all_insns ();
4020 #endif
4021 return 0;
4022 }
4023
4024 namespace {
4025
4026 const pass_data pass_data_split_before_sched2 =
4027 {
4028 RTL_PASS, /* type */
4029 "split4", /* name */
4030 OPTGROUP_NONE, /* optinfo_flags */
4031 TV_NONE, /* tv_id */
4032 0, /* properties_required */
4033 0, /* properties_provided */
4034 0, /* properties_destroyed */
4035 0, /* todo_flags_start */
4036 0, /* todo_flags_finish */
4037 };
4038
4039 class pass_split_before_sched2 : public rtl_opt_pass
4040 {
4041 public:
4042 pass_split_before_sched2 (gcc::context *ctxt)
4043 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4044 {}
4045
4046 /* opt_pass methods: */
4047 virtual bool gate (function *)
4048 {
4049 #ifdef INSN_SCHEDULING
4050 return optimize > 0 && flag_schedule_insns_after_reload;
4051 #else
4052 return false;
4053 #endif
4054 }
4055
4056 virtual unsigned int execute (function *)
4057 {
4058 return rest_of_handle_split_before_sched2 ();
4059 }
4060
4061 }; // class pass_split_before_sched2
4062
4063 } // anon namespace
4064
4065 rtl_opt_pass *
4066 make_pass_split_before_sched2 (gcc::context *ctxt)
4067 {
4068 return new pass_split_before_sched2 (ctxt);
4069 }
4070
4071 namespace {
4072
4073 const pass_data pass_data_split_for_shorten_branches =
4074 {
4075 RTL_PASS, /* type */
4076 "split5", /* name */
4077 OPTGROUP_NONE, /* optinfo_flags */
4078 TV_NONE, /* tv_id */
4079 0, /* properties_required */
4080 0, /* properties_provided */
4081 0, /* properties_destroyed */
4082 0, /* todo_flags_start */
4083 0, /* todo_flags_finish */
4084 };
4085
4086 class pass_split_for_shorten_branches : public rtl_opt_pass
4087 {
4088 public:
4089 pass_split_for_shorten_branches (gcc::context *ctxt)
4090 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4091 {}
4092
4093 /* opt_pass methods: */
4094 virtual bool gate (function *)
4095 {
4096 /* The placement of the splitting that we do for shorten_branches
4097 depends on whether regstack is used by the target or not. */
4098 #if HAVE_ATTR_length && !defined (STACK_REGS)
4099 return true;
4100 #else
4101 return false;
4102 #endif
4103 }
4104
4105 virtual unsigned int execute (function *)
4106 {
4107 return split_all_insns_noflow ();
4108 }
4109
4110 }; // class pass_split_for_shorten_branches
4111
4112 } // anon namespace
4113
4114 rtl_opt_pass *
4115 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4116 {
4117 return new pass_split_for_shorten_branches (ctxt);
4118 }
4119
4120 /* (Re)initialize the target information after a change in target. */
4121
4122 void
4123 recog_init ()
4124 {
4125 /* The information is zero-initialized, so we don't need to do anything
4126 first time round. */
4127 if (!this_target_recog->x_initialized)
4128 {
4129 this_target_recog->x_initialized = true;
4130 return;
4131 }
4132 memset (this_target_recog->x_bool_attr_masks, 0,
4133 sizeof (this_target_recog->x_bool_attr_masks));
4134 for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4135 if (this_target_recog->x_op_alt[i])
4136 {
4137 free (this_target_recog->x_op_alt[i]);
4138 this_target_recog->x_op_alt[i] = 0;
4139 }
4140 }