1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "rtl-error.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
33 #include "addresses.h"
37 #include "basic-block.h"
40 #include "tree-pass.h"
42 #include "insn-codes.h"
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
48 #define STACK_PUSH_CODE PRE_INC
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
56 #define STACK_POP_CODE POST_DEC
60 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
, bool);
61 static void validate_replace_src_1 (rtx
*, void *);
62 static rtx
split_insn (rtx
);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in reginfo.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
74 struct recog_data_d recog_data
;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
80 /* On return from `constrain_operands', indicate which alternative
83 int which_alternative
;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed
;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
99 init_recog_no_volatile (void)
111 /* Return true if labels in asm operands BODY are LABEL_REFs. */
114 asm_labels_ok (rtx body
)
119 asmop
= extract_asm_operands (body
);
120 if (asmop
== NULL_RTX
)
123 for (i
= 0; i
< ASM_OPERANDS_LABEL_LENGTH (asmop
); i
++)
124 if (GET_CODE (ASM_OPERANDS_LABEL (asmop
, i
)) != LABEL_REF
)
130 /* Check that X is an insn-body for an `asm' with operands
131 and that the operands mentioned in it are legitimate. */
134 check_asm_operands (rtx x
)
138 const char **constraints
;
141 if (!asm_labels_ok (x
))
144 /* Post-reload, be more strict with things. */
145 if (reload_completed
)
147 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
148 extract_insn (make_insn_raw (x
));
149 constrain_operands (1);
150 return which_alternative
>= 0;
153 noperands
= asm_noperands (x
);
159 operands
= XALLOCAVEC (rtx
, noperands
);
160 constraints
= XALLOCAVEC (const char *, noperands
);
162 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
164 for (i
= 0; i
< noperands
; i
++)
166 const char *c
= constraints
[i
];
169 if (! asm_operand_ok (operands
[i
], c
, constraints
))
176 /* Static data for the next two routines. */
178 typedef struct change_t
187 static change_t
*changes
;
188 static int changes_allocated
;
190 static int num_changes
= 0;
192 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
193 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
194 the change is simply made.
196 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
197 will be called with the address and mode as parameters. If OBJECT is
198 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
201 IN_GROUP is nonzero if this is part of a group of changes that must be
202 performed as a group. In that case, the changes will be stored. The
203 function `apply_change_group' will validate and apply the changes.
205 If IN_GROUP is zero, this is a single change. Try to recognize the insn
206 or validate the memory reference with the change applied. If the result
207 is not valid for the machine, suppress the change and return zero.
208 Otherwise, perform the change and return 1. */
211 validate_change_1 (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
, bool unshare
)
215 if (old
== new_rtx
|| rtx_equal_p (old
, new_rtx
))
218 gcc_assert (in_group
!= 0 || num_changes
== 0);
222 /* Save the information describing this change. */
223 if (num_changes
>= changes_allocated
)
225 if (changes_allocated
== 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
230 changes_allocated
*= 2;
232 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
235 changes
[num_changes
].object
= object
;
236 changes
[num_changes
].loc
= loc
;
237 changes
[num_changes
].old
= old
;
238 changes
[num_changes
].unshare
= unshare
;
240 if (object
&& !MEM_P (object
))
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
244 changes
[num_changes
].old_code
= INSN_CODE (object
);
245 INSN_CODE (object
) = -1;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
256 return apply_change_group ();
259 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
263 validate_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
265 return validate_change_1 (object
, loc
, new_rtx
, in_group
, false);
268 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
272 validate_unshare_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
274 return validate_change_1 (object
, loc
, new_rtx
, in_group
, true);
278 /* Keep X canonicalized if some changes have made it non-canonical; only
279 modifies the operands of X, not (for example) its code. Simplifications
280 are not the job of this routine.
282 Return true if anything was changed. */
284 canonicalize_change_group (rtx insn
, rtx x
)
286 if (COMMUTATIVE_P (x
)
287 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
289 /* Oops, the caller has made X no longer canonical.
290 Let's redo the changes in the correct order. */
291 rtx tem
= XEXP (x
, 0);
292 validate_unshare_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
293 validate_unshare_change (insn
, &XEXP (x
, 1), tem
, 1);
301 /* This subroutine of apply_change_group verifies whether the changes to INSN
302 were valid; i.e. whether INSN can still be recognized.
304 If IN_GROUP is true clobbers which have to be added in order to
305 match the instructions will be added to the current change group.
306 Otherwise the changes will take effect immediately. */
309 insn_invalid_p (rtx insn
, bool in_group
)
311 rtx pat
= PATTERN (insn
);
312 int num_clobbers
= 0;
313 /* If we are before reload and the pattern is a SET, see if we can add
315 int icode
= recog (pat
, insn
,
316 (GET_CODE (pat
) == SET
317 && ! reload_completed
318 && ! reload_in_progress
)
319 ? &num_clobbers
: 0);
320 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
323 /* If this is an asm and the operand aren't legal, then fail. Likewise if
324 this is not an asm and the insn wasn't recognized. */
325 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
326 || (!is_asm
&& icode
< 0))
329 /* If we have to add CLOBBERs, fail if we have to add ones that reference
330 hard registers since our callers can't know if they are live or not.
331 Otherwise, add them. */
332 if (num_clobbers
> 0)
336 if (added_clobbers_hard_reg_p (icode
))
339 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
340 XVECEXP (newpat
, 0, 0) = pat
;
341 add_clobbers (newpat
, icode
);
343 validate_change (insn
, &PATTERN (insn
), newpat
, 1);
345 PATTERN (insn
) = pat
= newpat
;
348 /* After reload, verify that all constraints are satisfied. */
349 if (reload_completed
)
353 if (! constrain_operands (1))
357 INSN_CODE (insn
) = icode
;
361 /* Return number of changes made and not validated yet. */
363 num_changes_pending (void)
368 /* Tentatively apply the changes numbered NUM and up.
369 Return 1 if all changes are valid, zero otherwise. */
372 verify_changes (int num
)
375 rtx last_validated
= NULL_RTX
;
377 /* The changes have been applied and all INSN_CODEs have been reset to force
380 The changes are valid if we aren't given an object, or if we are
381 given a MEM and it still is a valid address, or if this is in insn
382 and it is recognized. In the latter case, if reload has completed,
383 we also require that the operands meet the constraints for
386 for (i
= num
; i
< num_changes
; i
++)
388 rtx object
= changes
[i
].object
;
390 /* If there is no object to test or if it is the same as the one we
391 already tested, ignore it. */
392 if (object
== 0 || object
== last_validated
)
397 if (! memory_address_addr_space_p (GET_MODE (object
),
399 MEM_ADDR_SPACE (object
)))
402 else if (/* changes[i].old might be zero, e.g. when putting a
403 REG_FRAME_RELATED_EXPR into a previously empty list. */
405 && REG_P (changes
[i
].old
)
406 && asm_noperands (PATTERN (object
)) > 0
407 && REG_EXPR (changes
[i
].old
) != NULL_TREE
408 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes
[i
].old
))
409 && DECL_REGISTER (REG_EXPR (changes
[i
].old
)))
411 /* Don't allow changes of hard register operands to inline
412 assemblies if they have been defined as register asm ("x"). */
415 else if (DEBUG_INSN_P (object
))
417 else if (insn_invalid_p (object
, true))
419 rtx pat
= PATTERN (object
);
421 /* Perhaps we couldn't recognize the insn because there were
422 extra CLOBBERs at the end. If so, try to re-recognize
423 without the last CLOBBER (later iterations will cause each of
424 them to be eliminated, in turn). But don't do this if we
425 have an ASM_OPERAND. */
426 if (GET_CODE (pat
) == PARALLEL
427 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
428 && asm_noperands (PATTERN (object
)) < 0)
432 if (XVECLEN (pat
, 0) == 2)
433 newpat
= XVECEXP (pat
, 0, 0);
439 = gen_rtx_PARALLEL (VOIDmode
,
440 rtvec_alloc (XVECLEN (pat
, 0) - 1));
441 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
442 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
445 /* Add a new change to this group to replace the pattern
446 with this new pattern. Then consider this change
447 as having succeeded. The change we added will
448 cause the entire call to fail if things remain invalid.
450 Note that this can lose if a later change than the one
451 we are processing specified &XVECEXP (PATTERN (object), 0, X)
452 but this shouldn't occur. */
454 validate_change (object
, &PATTERN (object
), newpat
, 1);
457 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
458 || GET_CODE (pat
) == VAR_LOCATION
)
459 /* If this insn is a CLOBBER or USE, it is always valid, but is
465 last_validated
= object
;
468 return (i
== num_changes
);
471 /* A group of changes has previously been issued with validate_change
472 and verified with verify_changes. Call df_insn_rescan for each of
473 the insn changed and clear num_changes. */
476 confirm_change_group (void)
479 rtx last_object
= NULL
;
481 for (i
= 0; i
< num_changes
; i
++)
483 rtx object
= changes
[i
].object
;
485 if (changes
[i
].unshare
)
486 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
488 /* Avoid unnecessary rescanning when multiple changes to same instruction
492 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
493 df_insn_rescan (last_object
);
494 last_object
= object
;
498 if (last_object
&& INSN_P (last_object
))
499 df_insn_rescan (last_object
);
503 /* Apply a group of changes previously issued with `validate_change'.
504 If all changes are valid, call confirm_change_group and return 1,
505 otherwise, call cancel_changes and return 0. */
508 apply_change_group (void)
510 if (verify_changes (0))
512 confirm_change_group ();
523 /* Return the number of changes so far in the current group. */
526 num_validated_changes (void)
531 /* Retract the changes numbered NUM and up. */
534 cancel_changes (int num
)
538 /* Back out all the changes. Do this in the opposite order in which
540 for (i
= num_changes
- 1; i
>= num
; i
--)
542 *changes
[i
].loc
= changes
[i
].old
;
543 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
544 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
549 /* Reduce conditional compilation elsewhere. */
552 #define CODE_FOR_extv CODE_FOR_nothing
556 #define CODE_FOR_extzv CODE_FOR_nothing
559 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
563 simplify_while_replacing (rtx
*loc
, rtx to
, rtx object
,
564 enum machine_mode op0_mode
)
567 enum rtx_code code
= GET_CODE (x
);
570 if (SWAPPABLE_OPERANDS_P (x
)
571 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
573 validate_unshare_change (object
, loc
,
574 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
575 : swap_condition (code
),
576 GET_MODE (x
), XEXP (x
, 1),
585 /* If we have a PLUS whose second operand is now a CONST_INT, use
586 simplify_gen_binary to try to simplify it.
587 ??? We may want later to remove this, once simplification is
588 separated from this function. */
589 if (CONST_INT_P (XEXP (x
, 1)) && XEXP (x
, 1) == to
)
590 validate_change (object
, loc
,
592 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
595 if (CONST_SCALAR_INT_P (XEXP (x
, 1)))
596 validate_change (object
, loc
,
598 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
599 simplify_gen_unary (NEG
,
600 GET_MODE (x
), XEXP (x
, 1),
605 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
607 new_rtx
= simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
609 /* If any of the above failed, substitute in something that
610 we know won't be recognized. */
612 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
613 validate_change (object
, loc
, new_rtx
, 1);
617 /* All subregs possible to simplify should be simplified. */
618 new_rtx
= simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
621 /* Subregs of VOIDmode operands are incorrect. */
622 if (!new_rtx
&& GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
623 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
625 validate_change (object
, loc
, new_rtx
, 1);
629 /* If we are replacing a register with memory, try to change the memory
630 to be the mode required for memory in extract operations (this isn't
631 likely to be an insertion operation; if it was, nothing bad will
632 happen, we might just fail in some cases). */
634 if (MEM_P (XEXP (x
, 0))
635 && CONST_INT_P (XEXP (x
, 1))
636 && CONST_INT_P (XEXP (x
, 2))
637 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0),
638 MEM_ADDR_SPACE (XEXP (x
, 0)))
639 && !MEM_VOLATILE_P (XEXP (x
, 0)))
641 enum machine_mode wanted_mode
= VOIDmode
;
642 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
643 int pos
= INTVAL (XEXP (x
, 2));
645 if (GET_CODE (x
) == ZERO_EXTRACT
&& HAVE_extzv
)
647 wanted_mode
= insn_data
[CODE_FOR_extzv
].operand
[1].mode
;
648 if (wanted_mode
== VOIDmode
)
649 wanted_mode
= word_mode
;
651 else if (GET_CODE (x
) == SIGN_EXTRACT
&& HAVE_extv
)
653 wanted_mode
= insn_data
[CODE_FOR_extv
].operand
[1].mode
;
654 if (wanted_mode
== VOIDmode
)
655 wanted_mode
= word_mode
;
658 /* If we have a narrower mode, we can do something. */
659 if (wanted_mode
!= VOIDmode
660 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
662 int offset
= pos
/ BITS_PER_UNIT
;
665 /* If the bytes and bits are counted differently, we
666 must adjust the offset. */
667 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
669 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
672 gcc_assert (GET_MODE_PRECISION (wanted_mode
)
673 == GET_MODE_BITSIZE (wanted_mode
));
674 pos
%= GET_MODE_BITSIZE (wanted_mode
);
676 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
678 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
679 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
690 /* Replace every occurrence of FROM in X with TO. Mark each change with
691 validate_change passing OBJECT. */
694 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
,
701 enum machine_mode op0_mode
= VOIDmode
;
702 int prev_changes
= num_changes
;
708 fmt
= GET_RTX_FORMAT (code
);
710 op0_mode
= GET_MODE (XEXP (x
, 0));
712 /* X matches FROM if it is the same rtx or they are both referring to the
713 same register in the same mode. Avoid calling rtx_equal_p unless the
714 operands look similar. */
717 || (REG_P (x
) && REG_P (from
)
718 && GET_MODE (x
) == GET_MODE (from
)
719 && REGNO (x
) == REGNO (from
))
720 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
721 && rtx_equal_p (x
, from
)))
723 validate_unshare_change (object
, loc
, to
, 1);
727 /* Call ourself recursively to perform the replacements.
728 We must not replace inside already replaced expression, otherwise we
729 get infinite recursion for replacements like (reg X)->(subreg (reg X))
730 so we must special case shared ASM_OPERANDS. */
732 if (GET_CODE (x
) == PARALLEL
)
734 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
736 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
737 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
739 /* Verify that operands are really shared. */
740 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
741 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
743 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
744 from
, to
, object
, simplify
);
747 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
,
752 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
755 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
, simplify
);
756 else if (fmt
[i
] == 'E')
757 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
758 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
,
762 /* If we didn't substitute, there is nothing more to do. */
763 if (num_changes
== prev_changes
)
766 /* ??? The regmove is no more, so is this aberration still necessary? */
767 /* Allow substituted expression to have different mode. This is used by
768 regmove to change mode of pseudo register. */
769 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
770 op0_mode
= GET_MODE (XEXP (x
, 0));
772 /* Do changes needed to keep rtx consistent. Don't do any other
773 simplifications, as it is not our job. */
775 simplify_while_replacing (loc
, to
, object
, op0_mode
);
778 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
779 with TO. After all changes have been made, validate by seeing
780 if INSN is still valid. */
783 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx insn
, rtx
*loc
)
785 validate_replace_rtx_1 (loc
, from
, to
, insn
, true);
786 return apply_change_group ();
789 /* Try replacing every occurrence of FROM in INSN with TO. After all
790 changes have been made, validate by seeing if INSN is still valid. */
793 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
795 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
796 return apply_change_group ();
799 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
800 is a part of INSN. After all changes have been made, validate by seeing if
802 validate_replace_rtx (from, to, insn) is equivalent to
803 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
806 validate_replace_rtx_part (rtx from
, rtx to
, rtx
*where
, rtx insn
)
808 validate_replace_rtx_1 (where
, from
, to
, insn
, true);
809 return apply_change_group ();
812 /* Same as above, but do not simplify rtx afterwards. */
814 validate_replace_rtx_part_nosimplify (rtx from
, rtx to
, rtx
*where
,
817 validate_replace_rtx_1 (where
, from
, to
, insn
, false);
818 return apply_change_group ();
822 /* Try replacing every occurrence of FROM in INSN with TO. This also
823 will replace in REG_EQUAL and REG_EQUIV notes. */
826 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
829 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
830 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
831 if (REG_NOTE_KIND (note
) == REG_EQUAL
832 || REG_NOTE_KIND (note
) == REG_EQUIV
)
833 validate_replace_rtx_1 (&XEXP (note
, 0), from
, to
, insn
, true);
836 /* Function called by note_uses to replace used subexpressions. */
837 struct validate_replace_src_data
839 rtx from
; /* Old RTX */
840 rtx to
; /* New RTX */
841 rtx insn
; /* Insn in which substitution is occurring. */
845 validate_replace_src_1 (rtx
*x
, void *data
)
847 struct validate_replace_src_data
*d
848 = (struct validate_replace_src_data
*) data
;
850 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
, true);
853 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
857 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
859 struct validate_replace_src_data d
;
864 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
867 /* Try simplify INSN.
868 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
869 pattern and return true if something was simplified. */
872 validate_simplify_insn (rtx insn
)
878 pat
= PATTERN (insn
);
880 if (GET_CODE (pat
) == SET
)
882 newpat
= simplify_rtx (SET_SRC (pat
));
883 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
884 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
885 newpat
= simplify_rtx (SET_DEST (pat
));
886 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
887 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
889 else if (GET_CODE (pat
) == PARALLEL
)
890 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
892 rtx s
= XVECEXP (pat
, 0, i
);
894 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
896 newpat
= simplify_rtx (SET_SRC (s
));
897 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
898 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
899 newpat
= simplify_rtx (SET_DEST (s
));
900 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
901 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
904 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
908 /* Return 1 if the insn using CC0 set by INSN does not contain
909 any ordered tests applied to the condition codes.
910 EQ and NE tests do not count. */
913 next_insn_tests_no_inequality (rtx insn
)
915 rtx next
= next_cc0_user (insn
);
917 /* If there is no next insn, we have to take the conservative choice. */
921 return (INSN_P (next
)
922 && ! inequality_comparisons_p (PATTERN (next
)));
926 /* Return 1 if OP is a valid general operand for machine mode MODE.
927 This is either a register reference, a memory reference,
928 or a constant. In the case of a memory reference, the address
929 is checked for general validity for the target machine.
931 Register and memory references must have mode MODE in order to be valid,
932 but some constants have no machine mode and are valid for any mode.
934 If MODE is VOIDmode, OP is checked for validity for whatever mode
937 The main use of this function is as a predicate in match_operand
938 expressions in the machine description. */
941 general_operand (rtx op
, enum machine_mode mode
)
943 enum rtx_code code
= GET_CODE (op
);
945 if (mode
== VOIDmode
)
946 mode
= GET_MODE (op
);
948 /* Don't accept CONST_INT or anything similar
949 if the caller wants something floating. */
950 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
951 && GET_MODE_CLASS (mode
) != MODE_INT
952 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
957 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
961 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
963 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
964 && targetm
.legitimate_constant_p (mode
== VOIDmode
968 /* Except for certain constants with VOIDmode, already checked for,
969 OP's mode must match MODE if MODE specifies a mode. */
971 if (GET_MODE (op
) != mode
)
976 rtx sub
= SUBREG_REG (op
);
978 #ifdef INSN_SCHEDULING
979 /* On machines that have insn scheduling, we want all memory
980 reference to be explicit, so outlaw paradoxical SUBREGs.
981 However, we must allow them after reload so that they can
982 get cleaned up by cleanup_subreg_operands. */
983 if (!reload_completed
&& MEM_P (sub
)
984 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
987 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
988 may result in incorrect reference. We should simplify all valid
989 subregs of MEM anyway. But allow this after reload because we
990 might be called from cleanup_subreg_operands.
992 ??? This is a kludge. */
993 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
997 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
998 create such rtl, and we must reject it. */
999 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1000 /* LRA can use subreg to store a floating point value in an
1001 integer mode. Although the floating point and the
1002 integer modes need the same number of hard registers, the
1003 size of floating point mode can be less than the integer
1005 && ! lra_in_progress
1006 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1010 code
= GET_CODE (op
);
1014 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1015 || in_hard_reg_set_p (operand_reg_set
, GET_MODE (op
), REGNO (op
)));
1019 rtx y
= XEXP (op
, 0);
1021 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1024 /* Use the mem's mode, since it will be reloaded thus. */
1025 if (memory_address_addr_space_p (GET_MODE (op
), y
, MEM_ADDR_SPACE (op
)))
1032 /* Return 1 if OP is a valid memory address for a memory reference
1035 The main use of this function is as a predicate in match_operand
1036 expressions in the machine description. */
1039 address_operand (rtx op
, enum machine_mode mode
)
1041 return memory_address_p (mode
, op
);
1044 /* Return 1 if OP is a register reference of mode MODE.
1045 If MODE is VOIDmode, accept a register in any mode.
1047 The main use of this function is as a predicate in match_operand
1048 expressions in the machine description. */
1051 register_operand (rtx op
, enum machine_mode mode
)
1053 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1056 if (GET_CODE (op
) == SUBREG
)
1058 rtx sub
= SUBREG_REG (op
);
1060 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1061 because it is guaranteed to be reloaded into one.
1062 Just make sure the MEM is valid in itself.
1063 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1064 but currently it does result from (SUBREG (REG)...) where the
1065 reg went on the stack.) */
1066 if (! reload_completed
&& MEM_P (sub
))
1067 return general_operand (op
, mode
);
1069 #ifdef CANNOT_CHANGE_MODE_CLASS
1071 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1072 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1073 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1074 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
1075 /* LRA can generate some invalid SUBREGS just for matched
1076 operand reload presentation. LRA needs to treat them as
1078 && ! LRA_SUBREG_P (op
))
1082 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1083 create such rtl, and we must reject it. */
1084 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1085 /* LRA can use subreg to store a floating point value in an
1086 integer mode. Although the floating point and the
1087 integer modes need the same number of hard registers, the
1088 size of floating point mode can be less than the integer
1090 && ! lra_in_progress
1091 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1098 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1099 || in_hard_reg_set_p (operand_reg_set
,
1100 GET_MODE (op
), REGNO (op
))));
1103 /* Return 1 for a register in Pmode; ignore the tested mode. */
1106 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1108 return register_operand (op
, Pmode
);
1111 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1112 or a hard register. */
1115 scratch_operand (rtx op
, enum machine_mode mode
)
1117 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1120 return (GET_CODE (op
) == SCRATCH
1122 && (lra_in_progress
|| REGNO (op
) < FIRST_PSEUDO_REGISTER
)));
1125 /* Return 1 if OP is a valid immediate operand for mode MODE.
1127 The main use of this function is as a predicate in match_operand
1128 expressions in the machine description. */
1131 immediate_operand (rtx op
, enum machine_mode mode
)
1133 /* Don't accept CONST_INT or anything similar
1134 if the caller wants something floating. */
1135 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1136 && GET_MODE_CLASS (mode
) != MODE_INT
1137 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1140 if (CONST_INT_P (op
)
1142 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1145 return (CONSTANT_P (op
)
1146 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1147 || GET_MODE (op
) == VOIDmode
)
1148 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1149 && targetm
.legitimate_constant_p (mode
== VOIDmode
1154 /* Returns 1 if OP is an operand that is a CONST_INT. */
1157 const_int_operand (rtx op
, enum machine_mode mode
)
1159 if (!CONST_INT_P (op
))
1162 if (mode
!= VOIDmode
1163 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1169 /* Returns 1 if OP is an operand that is a constant integer or constant
1170 floating-point number. */
1173 const_double_operand (rtx op
, enum machine_mode mode
)
1175 /* Don't accept CONST_INT or anything similar
1176 if the caller wants something floating. */
1177 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1178 && GET_MODE_CLASS (mode
) != MODE_INT
1179 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1182 return ((CONST_DOUBLE_P (op
) || CONST_INT_P (op
))
1183 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1184 || GET_MODE (op
) == VOIDmode
));
1187 /* Return 1 if OP is a general operand that is not an immediate operand. */
1190 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1192 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1195 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1198 nonmemory_operand (rtx op
, enum machine_mode mode
)
1200 if (CONSTANT_P (op
))
1201 return immediate_operand (op
, mode
);
1203 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1206 if (GET_CODE (op
) == SUBREG
)
1208 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1209 because it is guaranteed to be reloaded into one.
1210 Just make sure the MEM is valid in itself.
1211 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1212 but currently it does result from (SUBREG (REG)...) where the
1213 reg went on the stack.) */
1214 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1215 return general_operand (op
, mode
);
1216 op
= SUBREG_REG (op
);
1220 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1221 || in_hard_reg_set_p (operand_reg_set
,
1222 GET_MODE (op
), REGNO (op
))));
1225 /* Return 1 if OP is a valid operand that stands for pushing a
1226 value of mode MODE onto the stack.
1228 The main use of this function is as a predicate in match_operand
1229 expressions in the machine description. */
1232 push_operand (rtx op
, enum machine_mode mode
)
1234 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1236 #ifdef PUSH_ROUNDING
1237 rounded_size
= PUSH_ROUNDING (rounded_size
);
1243 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1248 if (rounded_size
== GET_MODE_SIZE (mode
))
1250 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1255 if (GET_CODE (op
) != PRE_MODIFY
1256 || GET_CODE (XEXP (op
, 1)) != PLUS
1257 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1258 || !CONST_INT_P (XEXP (XEXP (op
, 1), 1))
1259 #ifdef STACK_GROWS_DOWNWARD
1260 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1262 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1268 return XEXP (op
, 0) == stack_pointer_rtx
;
1271 /* Return 1 if OP is a valid operand that stands for popping a
1272 value of mode MODE off the stack.
1274 The main use of this function is as a predicate in match_operand
1275 expressions in the machine description. */
1278 pop_operand (rtx op
, enum machine_mode mode
)
1283 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1288 if (GET_CODE (op
) != STACK_POP_CODE
)
1291 return XEXP (op
, 0) == stack_pointer_rtx
;
1294 /* Return 1 if ADDR is a valid memory address
1295 for mode MODE in address space AS. */
1298 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
1299 rtx addr
, addr_space_t as
)
1301 #ifdef GO_IF_LEGITIMATE_ADDRESS
1302 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
1303 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1309 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 0, as
);
1313 /* Return 1 if OP is a valid memory reference with mode MODE,
1314 including a valid address.
1316 The main use of this function is as a predicate in match_operand
1317 expressions in the machine description. */
1320 memory_operand (rtx op
, enum machine_mode mode
)
1324 if (! reload_completed
)
1325 /* Note that no SUBREG is a memory operand before end of reload pass,
1326 because (SUBREG (MEM...)) forces reloading into a register. */
1327 return MEM_P (op
) && general_operand (op
, mode
);
1329 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1333 if (GET_CODE (inner
) == SUBREG
)
1334 inner
= SUBREG_REG (inner
);
1336 return (MEM_P (inner
) && general_operand (op
, mode
));
1339 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1340 that is, a memory reference whose address is a general_operand. */
1343 indirect_operand (rtx op
, enum machine_mode mode
)
1345 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1346 if (! reload_completed
1347 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1349 int offset
= SUBREG_BYTE (op
);
1350 rtx inner
= SUBREG_REG (op
);
1352 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1355 /* The only way that we can have a general_operand as the resulting
1356 address is if OFFSET is zero and the address already is an operand
1357 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1360 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1361 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1362 && CONST_INT_P (XEXP (XEXP (inner
, 0), 1))
1363 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1364 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1368 && memory_operand (op
, mode
)
1369 && general_operand (XEXP (op
, 0), Pmode
));
1372 /* Return 1 if this is an ordered comparison operator (not including
1373 ORDERED and UNORDERED). */
1376 ordered_comparison_operator (rtx op
, enum machine_mode mode
)
1378 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1380 switch (GET_CODE (op
))
1398 /* Return 1 if this is a comparison operator. This allows the use of
1399 MATCH_OPERATOR to recognize all the branch insns. */
1402 comparison_operator (rtx op
, enum machine_mode mode
)
1404 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1405 && COMPARISON_P (op
));
1408 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1411 extract_asm_operands (rtx body
)
1414 switch (GET_CODE (body
))
1420 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1421 tmp
= SET_SRC (body
);
1422 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1427 tmp
= XVECEXP (body
, 0, 0);
1428 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1430 if (GET_CODE (tmp
) == SET
)
1432 tmp
= SET_SRC (tmp
);
1433 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1444 /* If BODY is an insn body that uses ASM_OPERANDS,
1445 return the number of operands (both input and output) in the insn.
1446 Otherwise return -1. */
1449 asm_noperands (const_rtx body
)
1451 rtx asm_op
= extract_asm_operands (CONST_CAST_RTX (body
));
1457 if (GET_CODE (body
) == SET
)
1459 else if (GET_CODE (body
) == PARALLEL
)
1462 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1464 /* Multiple output operands, or 1 output plus some clobbers:
1466 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1467 /* Count backwards through CLOBBERs to determine number of SETs. */
1468 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1470 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1472 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1476 /* N_SETS is now number of output operands. */
1479 /* Verify that all the SETs we have
1480 came from a single original asm_operands insn
1481 (so that invalid combinations are blocked). */
1482 for (i
= 0; i
< n_sets
; i
++)
1484 rtx elt
= XVECEXP (body
, 0, i
);
1485 if (GET_CODE (elt
) != SET
)
1487 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1489 /* If these ASM_OPERANDS rtx's came from different original insns
1490 then they aren't allowed together. */
1491 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1492 != ASM_OPERANDS_INPUT_VEC (asm_op
))
1498 /* 0 outputs, but some clobbers:
1499 body is [(asm_operands ...) (clobber (reg ...))...]. */
1500 /* Make sure all the other parallel things really are clobbers. */
1501 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1502 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1507 return (ASM_OPERANDS_INPUT_LENGTH (asm_op
)
1508 + ASM_OPERANDS_LABEL_LENGTH (asm_op
) + n_sets
);
1511 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1512 copy its operands (both input and output) into the vector OPERANDS,
1513 the locations of the operands within the insn into the vector OPERAND_LOCS,
1514 and the constraints for the operands into CONSTRAINTS.
1515 Write the modes of the operands into MODES.
1516 Return the assembler-template.
1518 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1519 we don't store that info. */
1522 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1523 const char **constraints
, enum machine_mode
*modes
,
1526 int nbase
= 0, n
, i
;
1529 switch (GET_CODE (body
))
1532 /* Zero output asm: BODY is (asm_operands ...). */
1537 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1538 asmop
= SET_SRC (body
);
1540 /* The output is in the SET.
1541 Its constraint is in the ASM_OPERANDS itself. */
1543 operands
[0] = SET_DEST (body
);
1545 operand_locs
[0] = &SET_DEST (body
);
1547 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1549 modes
[0] = GET_MODE (SET_DEST (body
));
1555 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1557 asmop
= XVECEXP (body
, 0, 0);
1558 if (GET_CODE (asmop
) == SET
)
1560 asmop
= SET_SRC (asmop
);
1562 /* At least one output, plus some CLOBBERs. The outputs are in
1563 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1564 for (i
= 0; i
< nparallel
; i
++)
1566 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1567 break; /* Past last SET */
1569 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1571 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1573 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1575 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1586 n
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1587 for (i
= 0; i
< n
; i
++)
1590 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1592 operands
[nbase
+ i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1594 constraints
[nbase
+ i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1596 modes
[nbase
+ i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1600 n
= ASM_OPERANDS_LABEL_LENGTH (asmop
);
1601 for (i
= 0; i
< n
; i
++)
1604 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_LABEL (asmop
, i
);
1606 operands
[nbase
+ i
] = ASM_OPERANDS_LABEL (asmop
, i
);
1608 constraints
[nbase
+ i
] = "";
1610 modes
[nbase
+ i
] = Pmode
;
1614 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
1616 return ASM_OPERANDS_TEMPLATE (asmop
);
1619 /* Check if an asm_operand matches its constraints.
1620 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1623 asm_operand_ok (rtx op
, const char *constraint
, const char **constraints
)
1627 bool incdec_ok
= false;
1630 /* Use constrain_operands after reload. */
1631 gcc_assert (!reload_completed
);
1633 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1634 many alternatives as required to match the other operands. */
1635 if (*constraint
== '\0')
1640 char c
= *constraint
;
1657 case '0': case '1': case '2': case '3': case '4':
1658 case '5': case '6': case '7': case '8': case '9':
1659 /* If caller provided constraints pointer, look up
1660 the matching constraint. Otherwise, our caller should have
1661 given us the proper matching constraint, but we can't
1662 actually fail the check if they didn't. Indicate that
1663 results are inconclusive. */
1667 unsigned long match
;
1669 match
= strtoul (constraint
, &end
, 10);
1671 result
= asm_operand_ok (op
, constraints
[match
], NULL
);
1672 constraint
= (const char *) end
;
1678 while (ISDIGIT (*constraint
));
1685 if (address_operand (op
, VOIDmode
))
1689 case TARGET_MEM_CONSTRAINT
:
1690 case 'V': /* non-offsettable */
1691 if (memory_operand (op
, VOIDmode
))
1695 case 'o': /* offsettable */
1696 if (offsettable_nonstrict_memref_p (op
))
1701 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1702 excepting those that expand_call created. Further, on some
1703 machines which do not have generalized auto inc/dec, an inc/dec
1704 is not a memory_operand.
1706 Match any memory and hope things are resolved after reload. */
1710 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1711 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1721 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1722 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1731 if (CONST_DOUBLE_AS_FLOAT_P (op
)
1732 || (GET_CODE (op
) == CONST_VECTOR
1733 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1738 if (CONST_DOUBLE_AS_FLOAT_P (op
)
1739 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1743 if (CONST_DOUBLE_AS_FLOAT_P (op
)
1744 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1749 if (CONST_SCALAR_INT_P (op
))
1754 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1759 if (CONST_SCALAR_INT_P (op
))
1764 if (CONST_INT_P (op
)
1765 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1769 if (CONST_INT_P (op
)
1770 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1774 if (CONST_INT_P (op
)
1775 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1779 if (CONST_INT_P (op
)
1780 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1784 if (CONST_INT_P (op
)
1785 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1789 if (CONST_INT_P (op
)
1790 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1794 if (CONST_INT_P (op
)
1795 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1799 if (CONST_INT_P (op
)
1800 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1809 if (general_operand (op
, VOIDmode
))
1814 /* For all other letters, we first check for a register class,
1815 otherwise it is an EXTRA_CONSTRAINT. */
1816 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1819 if (GET_MODE (op
) == BLKmode
)
1821 if (register_operand (op
, VOIDmode
))
1824 #ifdef EXTRA_CONSTRAINT_STR
1825 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
))
1826 /* Every memory operand can be reloaded to fit. */
1827 result
= result
|| memory_operand (op
, VOIDmode
);
1828 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
))
1829 /* Every address operand can be reloaded to fit. */
1830 result
= result
|| address_operand (op
, VOIDmode
);
1831 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1836 len
= CONSTRAINT_LEN (c
, constraint
);
1839 while (--len
&& *constraint
);
1845 /* For operands without < or > constraints reject side-effects. */
1846 if (!incdec_ok
&& result
&& MEM_P (op
))
1847 switch (GET_CODE (XEXP (op
, 0)))
1864 /* Given an rtx *P, if it is a sum containing an integer constant term,
1865 return the location (type rtx *) of the pointer to that constant term.
1866 Otherwise, return a null pointer. */
1869 find_constant_term_loc (rtx
*p
)
1872 enum rtx_code code
= GET_CODE (*p
);
1874 /* If *P IS such a constant term, P is its location. */
1876 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1880 /* Otherwise, if not a sum, it has no constant term. */
1882 if (GET_CODE (*p
) != PLUS
)
1885 /* If one of the summands is constant, return its location. */
1887 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1888 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1891 /* Otherwise, check each summand for containing a constant term. */
1893 if (XEXP (*p
, 0) != 0)
1895 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1900 if (XEXP (*p
, 1) != 0)
1902 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1910 /* Return 1 if OP is a memory reference
1911 whose address contains no side effects
1912 and remains valid after the addition
1913 of a positive integer less than the
1914 size of the object being referenced.
1916 We assume that the original address is valid and do not check it.
1918 This uses strict_memory_address_p as a subroutine, so
1919 don't use it before reload. */
1922 offsettable_memref_p (rtx op
)
1924 return ((MEM_P (op
))
1925 && offsettable_address_addr_space_p (1, GET_MODE (op
), XEXP (op
, 0),
1926 MEM_ADDR_SPACE (op
)));
1929 /* Similar, but don't require a strictly valid mem ref:
1930 consider pseudo-regs valid as index or base regs. */
1933 offsettable_nonstrict_memref_p (rtx op
)
1935 return ((MEM_P (op
))
1936 && offsettable_address_addr_space_p (0, GET_MODE (op
), XEXP (op
, 0),
1937 MEM_ADDR_SPACE (op
)));
1940 /* Return 1 if Y is a memory address which contains no side effects
1941 and would remain valid for address space AS after the addition of
1942 a positive integer less than the size of that mode.
1944 We assume that the original address is valid and do not check it.
1945 We do check that it is valid for narrower modes.
1947 If STRICTP is nonzero, we require a strictly valid address,
1948 for the sake of use in reload.c. */
1951 offsettable_address_addr_space_p (int strictp
, enum machine_mode mode
, rtx y
,
1954 enum rtx_code ycode
= GET_CODE (y
);
1958 int (*addressp
) (enum machine_mode
, rtx
, addr_space_t
) =
1959 (strictp
? strict_memory_address_addr_space_p
1960 : memory_address_addr_space_p
);
1961 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1963 if (CONSTANT_ADDRESS_P (y
))
1966 /* Adjusting an offsettable address involves changing to a narrower mode.
1967 Make sure that's OK. */
1969 if (mode_dependent_address_p (y
, as
))
1972 enum machine_mode address_mode
= GET_MODE (y
);
1973 if (address_mode
== VOIDmode
)
1974 address_mode
= targetm
.addr_space
.address_mode (as
);
1975 #ifdef POINTERS_EXTEND_UNSIGNED
1976 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
1979 /* ??? How much offset does an offsettable BLKmode reference need?
1980 Clearly that depends on the situation in which it's being used.
1981 However, the current situation in which we test 0xffffffff is
1982 less than ideal. Caveat user. */
1984 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1986 /* If the expression contains a constant term,
1987 see if it remains valid when max possible offset is added. */
1989 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1994 *y2
= plus_constant (address_mode
, *y2
, mode_sz
- 1);
1995 /* Use QImode because an odd displacement may be automatically invalid
1996 for any wider mode. But it should be valid for a single byte. */
1997 good
= (*addressp
) (QImode
, y
, as
);
1999 /* In any case, restore old contents of memory. */
2004 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
2007 /* The offset added here is chosen as the maximum offset that
2008 any instruction could need to add when operating on something
2009 of the specified mode. We assume that if Y and Y+c are
2010 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2011 go inside a LO_SUM here, so we do so as well. */
2012 if (GET_CODE (y
) == LO_SUM
2014 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
2015 z
= gen_rtx_LO_SUM (address_mode
, XEXP (y
, 0),
2016 plus_constant (address_mode
, XEXP (y
, 1),
2018 #ifdef POINTERS_EXTEND_UNSIGNED
2019 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2020 else if (POINTERS_EXTEND_UNSIGNED
> 0
2021 && GET_CODE (y
) == ZERO_EXTEND
2022 && GET_MODE (XEXP (y
, 0)) == pointer_mode
)
2023 z
= gen_rtx_ZERO_EXTEND (address_mode
,
2024 plus_constant (pointer_mode
, XEXP (y
, 0),
2028 z
= plus_constant (address_mode
, y
, mode_sz
- 1);
2030 /* Use QImode because an odd displacement may be automatically invalid
2031 for any wider mode. But it should be valid for a single byte. */
2032 return (*addressp
) (QImode
, z
, as
);
2035 /* Return 1 if ADDR is an address-expression whose effect depends
2036 on the mode of the memory reference it is used in.
2038 ADDRSPACE is the address space associated with the address.
2040 Autoincrement addressing is a typical example of mode-dependence
2041 because the amount of the increment depends on the mode. */
2044 mode_dependent_address_p (rtx addr
, addr_space_t addrspace
)
2046 /* Auto-increment addressing with anything other than post_modify
2047 or pre_modify always introduces a mode dependency. Catch such
2048 cases now instead of deferring to the target. */
2049 if (GET_CODE (addr
) == PRE_INC
2050 || GET_CODE (addr
) == POST_INC
2051 || GET_CODE (addr
) == PRE_DEC
2052 || GET_CODE (addr
) == POST_DEC
)
2055 return targetm
.mode_dependent_address_p (addr
, addrspace
);
2058 /* Like extract_insn, but save insn extracted and don't extract again, when
2059 called again for the same insn expecting that recog_data still contain the
2060 valid information. This is used primary by gen_attr infrastructure that
2061 often does extract insn again and again. */
2063 extract_insn_cached (rtx insn
)
2065 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2067 extract_insn (insn
);
2068 recog_data
.insn
= insn
;
2071 /* Do cached extract_insn, constrain_operands and complain about failures.
2072 Used by insn_attrtab. */
2074 extract_constrain_insn_cached (rtx insn
)
2076 extract_insn_cached (insn
);
2077 if (which_alternative
== -1
2078 && !constrain_operands (reload_completed
))
2079 fatal_insn_not_found (insn
);
2082 /* Do cached constrain_operands and complain about failures. */
2084 constrain_operands_cached (int strict
)
2086 if (which_alternative
== -1)
2087 return constrain_operands (strict
);
2092 /* Analyze INSN and fill in recog_data. */
2095 extract_insn (rtx insn
)
2100 rtx body
= PATTERN (insn
);
2102 recog_data
.n_operands
= 0;
2103 recog_data
.n_alternatives
= 0;
2104 recog_data
.n_dups
= 0;
2105 recog_data
.is_asm
= false;
2107 switch (GET_CODE (body
))
2118 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2123 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2124 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2125 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2131 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2134 /* This insn is an `asm' with operands. */
2136 /* expand_asm_operands makes sure there aren't too many operands. */
2137 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2139 /* Now get the operand values and constraints out of the insn. */
2140 decode_asm_operands (body
, recog_data
.operand
,
2141 recog_data
.operand_loc
,
2142 recog_data
.constraints
,
2143 recog_data
.operand_mode
, NULL
);
2144 memset (recog_data
.is_operator
, 0, sizeof recog_data
.is_operator
);
2147 const char *p
= recog_data
.constraints
[0];
2148 recog_data
.n_alternatives
= 1;
2150 recog_data
.n_alternatives
+= (*p
++ == ',');
2152 recog_data
.is_asm
= true;
2155 fatal_insn_not_found (insn
);
2159 /* Ordinary insn: recognize it, get the operands via insn_extract
2160 and get the constraints. */
2162 icode
= recog_memoized (insn
);
2164 fatal_insn_not_found (insn
);
2166 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2167 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2168 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2170 insn_extract (insn
);
2172 for (i
= 0; i
< noperands
; i
++)
2174 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2175 recog_data
.is_operator
[i
] = insn_data
[icode
].operand
[i
].is_operator
;
2176 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2177 /* VOIDmode match_operands gets mode from their real operand. */
2178 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2179 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2182 for (i
= 0; i
< noperands
; i
++)
2183 recog_data
.operand_type
[i
]
2184 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2185 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2188 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2190 if (INSN_CODE (insn
) < 0)
2191 for (i
= 0; i
< recog_data
.n_alternatives
; i
++)
2192 recog_data
.alternative_enabled_p
[i
] = true;
2195 recog_data
.insn
= insn
;
2196 for (i
= 0; i
< recog_data
.n_alternatives
; i
++)
2198 which_alternative
= i
;
2199 recog_data
.alternative_enabled_p
[i
]
2200 = HAVE_ATTR_enabled
? get_attr_enabled (insn
) : 1;
2204 recog_data
.insn
= NULL
;
2205 which_alternative
= -1;
2208 /* After calling extract_insn, you can use this function to extract some
2209 information from the constraint strings into a more usable form.
2210 The collected data is stored in recog_op_alt. */
2212 preprocess_constraints (void)
2216 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2217 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2218 * sizeof (struct operand_alternative
)));
2220 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2223 struct operand_alternative
*op_alt
;
2224 const char *p
= recog_data
.constraints
[i
];
2226 op_alt
= recog_op_alt
[i
];
2228 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2230 op_alt
[j
].cl
= NO_REGS
;
2231 op_alt
[j
].constraint
= p
;
2232 op_alt
[j
].matches
= -1;
2233 op_alt
[j
].matched
= -1;
2235 if (!recog_data
.alternative_enabled_p
[j
])
2237 p
= skip_alternative (p
);
2241 if (*p
== '\0' || *p
== ',')
2243 op_alt
[j
].anything_ok
= 1;
2253 while (c
!= ',' && c
!= '\0');
2254 if (c
== ',' || c
== '\0')
2262 case '=': case '+': case '*': case '%':
2263 case 'E': case 'F': case 'G': case 'H':
2264 case 's': case 'i': case 'n':
2265 case 'I': case 'J': case 'K': case 'L':
2266 case 'M': case 'N': case 'O': case 'P':
2267 /* These don't say anything we care about. */
2271 op_alt
[j
].reject
+= 6;
2274 op_alt
[j
].reject
+= 600;
2277 op_alt
[j
].earlyclobber
= 1;
2280 case '0': case '1': case '2': case '3': case '4':
2281 case '5': case '6': case '7': case '8': case '9':
2284 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2285 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2290 case TARGET_MEM_CONSTRAINT
:
2291 op_alt
[j
].memory_ok
= 1;
2294 op_alt
[j
].decmem_ok
= 1;
2297 op_alt
[j
].incmem_ok
= 1;
2300 op_alt
[j
].nonoffmem_ok
= 1;
2303 op_alt
[j
].offmem_ok
= 1;
2306 op_alt
[j
].anything_ok
= 1;
2310 op_alt
[j
].is_address
= 1;
2311 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2312 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
2319 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2323 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2325 op_alt
[j
].memory_ok
= 1;
2328 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2330 op_alt
[j
].is_address
= 1;
2332 = (reg_class_subunion
2333 [(int) op_alt
[j
].cl
]
2334 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
2335 ADDRESS
, SCRATCH
)]);
2340 = (reg_class_subunion
2341 [(int) op_alt
[j
].cl
]
2342 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2345 p
+= CONSTRAINT_LEN (c
, p
);
2351 /* Check the operands of an insn against the insn's operand constraints
2352 and return 1 if they are valid.
2353 The information about the insn's operands, constraints, operand modes
2354 etc. is obtained from the global variables set up by extract_insn.
2356 WHICH_ALTERNATIVE is set to a number which indicates which
2357 alternative of constraints was matched: 0 for the first alternative,
2358 1 for the next, etc.
2360 In addition, when two operands are required to match
2361 and it happens that the output operand is (reg) while the
2362 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2363 make the output operand look like the input.
2364 This is because the output operand is the one the template will print.
2366 This is used in final, just before printing the assembler code and by
2367 the routines that determine an insn's attribute.
2369 If STRICT is a positive nonzero value, it means that we have been
2370 called after reload has been completed. In that case, we must
2371 do all checks strictly. If it is zero, it means that we have been called
2372 before reload has completed. In that case, we first try to see if we can
2373 find an alternative that matches strictly. If not, we try again, this
2374 time assuming that reload will fix up the insn. This provides a "best
2375 guess" for the alternative and is used to compute attributes of insns prior
2376 to reload. A negative value of STRICT is used for this internal call. */
2384 constrain_operands (int strict
)
2386 const char *constraints
[MAX_RECOG_OPERANDS
];
2387 int matching_operands
[MAX_RECOG_OPERANDS
];
2388 int earlyclobber
[MAX_RECOG_OPERANDS
];
2391 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2392 int funny_match_index
;
2394 which_alternative
= 0;
2395 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2398 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2400 constraints
[c
] = recog_data
.constraints
[c
];
2401 matching_operands
[c
] = -1;
2406 int seen_earlyclobber_at
= -1;
2409 funny_match_index
= 0;
2411 if (!recog_data
.alternative_enabled_p
[which_alternative
])
2415 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2416 constraints
[i
] = skip_alternative (constraints
[i
]);
2418 which_alternative
++;
2422 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2424 rtx op
= recog_data
.operand
[opno
];
2425 enum machine_mode mode
= GET_MODE (op
);
2426 const char *p
= constraints
[opno
];
2432 earlyclobber
[opno
] = 0;
2434 /* A unary operator may be accepted by the predicate, but it
2435 is irrelevant for matching constraints. */
2439 if (GET_CODE (op
) == SUBREG
)
2441 if (REG_P (SUBREG_REG (op
))
2442 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2443 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2444 GET_MODE (SUBREG_REG (op
)),
2447 op
= SUBREG_REG (op
);
2450 /* An empty constraint or empty alternative
2451 allows anything which matched the pattern. */
2452 if (*p
== 0 || *p
== ',')
2456 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2465 case '?': case '!': case '*': case '%':
2470 /* Ignore rest of this alternative as far as
2471 constraint checking is concerned. */
2474 while (*p
&& *p
!= ',');
2479 earlyclobber
[opno
] = 1;
2480 if (seen_earlyclobber_at
< 0)
2481 seen_earlyclobber_at
= opno
;
2484 case '0': case '1': case '2': case '3': case '4':
2485 case '5': case '6': case '7': case '8': case '9':
2487 /* This operand must be the same as a previous one.
2488 This kind of constraint is used for instructions such
2489 as add when they take only two operands.
2491 Note that the lower-numbered operand is passed first.
2493 If we are not testing strictly, assume that this
2494 constraint will be satisfied. */
2499 match
= strtoul (p
, &end
, 10);
2506 rtx op1
= recog_data
.operand
[match
];
2507 rtx op2
= recog_data
.operand
[opno
];
2509 /* A unary operator may be accepted by the predicate,
2510 but it is irrelevant for matching constraints. */
2512 op1
= XEXP (op1
, 0);
2514 op2
= XEXP (op2
, 0);
2516 val
= operands_match_p (op1
, op2
);
2519 matching_operands
[opno
] = match
;
2520 matching_operands
[match
] = opno
;
2525 /* If output is *x and input is *--x, arrange later
2526 to change the output to *--x as well, since the
2527 output op is the one that will be printed. */
2528 if (val
== 2 && strict
> 0)
2530 funny_match
[funny_match_index
].this_op
= opno
;
2531 funny_match
[funny_match_index
++].other
= match
;
2538 /* p is used for address_operands. When we are called by
2539 gen_reload, no one will have checked that the address is
2540 strictly valid, i.e., that all pseudos requiring hard regs
2541 have gotten them. */
2543 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2548 /* No need to check general_operand again;
2549 it was done in insn-recog.c. Well, except that reload
2550 doesn't check the validity of its replacements, but
2551 that should only matter when there's a bug. */
2553 /* Anything goes unless it is a REG and really has a hard reg
2554 but the hard reg is not in the class GENERAL_REGS. */
2558 || GENERAL_REGS
== ALL_REGS
2559 || (reload_in_progress
2560 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2561 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2564 else if (strict
< 0 || general_operand (op
, mode
))
2569 /* This is used for a MATCH_SCRATCH in the cases when
2570 we don't actually need anything. So anything goes
2575 case TARGET_MEM_CONSTRAINT
:
2576 /* Memory operands must be valid, to the extent
2577 required by STRICT. */
2581 && !strict_memory_address_addr_space_p
2582 (GET_MODE (op
), XEXP (op
, 0),
2583 MEM_ADDR_SPACE (op
)))
2586 && !memory_address_addr_space_p
2587 (GET_MODE (op
), XEXP (op
, 0),
2588 MEM_ADDR_SPACE (op
)))
2592 /* Before reload, accept what reload can turn into mem. */
2593 else if (strict
< 0 && CONSTANT_P (op
))
2595 /* During reload, accept a pseudo */
2596 else if (reload_in_progress
&& REG_P (op
)
2597 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2603 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2604 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2610 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2611 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2617 if (CONST_DOUBLE_AS_FLOAT_P (op
)
2618 || (GET_CODE (op
) == CONST_VECTOR
2619 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2625 if (CONST_DOUBLE_AS_FLOAT_P (op
)
2626 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2631 if (CONST_SCALAR_INT_P (op
))
2634 if (CONSTANT_P (op
))
2639 if (CONST_SCALAR_INT_P (op
))
2651 if (CONST_INT_P (op
)
2652 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2658 && ((strict
> 0 && ! offsettable_memref_p (op
))
2660 && !(CONSTANT_P (op
) || MEM_P (op
)))
2661 || (reload_in_progress
2663 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2668 if ((strict
> 0 && offsettable_memref_p (op
))
2669 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2670 /* Before reload, accept what reload can handle. */
2672 && (CONSTANT_P (op
) || MEM_P (op
)))
2673 /* During reload, accept a pseudo */
2674 || (reload_in_progress
&& REG_P (op
)
2675 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2684 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2690 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2691 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2693 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2696 #ifdef EXTRA_CONSTRAINT_STR
2697 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2700 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2701 /* Every memory operand can be reloaded to fit. */
2702 && ((strict
< 0 && MEM_P (op
))
2703 /* Before reload, accept what reload can turn
2705 || (strict
< 0 && CONSTANT_P (op
))
2706 /* During reload, accept a pseudo */
2707 || (reload_in_progress
&& REG_P (op
)
2708 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2710 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2711 /* Every address operand can be reloaded to fit. */
2714 /* Cater to architectures like IA-64 that define extra memory
2715 constraints without using define_memory_constraint. */
2716 else if (reload_in_progress
2718 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
2719 && reg_renumber
[REGNO (op
)] < 0
2720 && reg_equiv_mem (REGNO (op
)) != 0
2721 && EXTRA_CONSTRAINT_STR
2722 (reg_equiv_mem (REGNO (op
)), c
, p
))
2728 while (p
+= len
, c
);
2730 constraints
[opno
] = p
;
2731 /* If this operand did not win somehow,
2732 this alternative loses. */
2736 /* This alternative won; the operands are ok.
2737 Change whichever operands this alternative says to change. */
2742 /* See if any earlyclobber operand conflicts with some other
2745 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2746 for (eopno
= seen_earlyclobber_at
;
2747 eopno
< recog_data
.n_operands
;
2749 /* Ignore earlyclobber operands now in memory,
2750 because we would often report failure when we have
2751 two memory operands, one of which was formerly a REG. */
2752 if (earlyclobber
[eopno
]
2753 && REG_P (recog_data
.operand
[eopno
]))
2754 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2755 if ((MEM_P (recog_data
.operand
[opno
])
2756 || recog_data
.operand_type
[opno
] != OP_OUT
)
2758 /* Ignore things like match_operator operands. */
2759 && *recog_data
.constraints
[opno
] != 0
2760 && ! (matching_operands
[opno
] == eopno
2761 && operands_match_p (recog_data
.operand
[opno
],
2762 recog_data
.operand
[eopno
]))
2763 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2764 recog_data
.operand
[eopno
]))
2769 while (--funny_match_index
>= 0)
2771 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2772 = recog_data
.operand
[funny_match
[funny_match_index
].this_op
];
2776 /* For operands without < or > constraints reject side-effects. */
2777 if (recog_data
.is_asm
)
2779 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2780 if (MEM_P (recog_data
.operand
[opno
]))
2781 switch (GET_CODE (XEXP (recog_data
.operand
[opno
], 0)))
2789 if (strchr (recog_data
.constraints
[opno
], '<') == NULL
2790 && strchr (recog_data
.constraints
[opno
], '>')
2803 which_alternative
++;
2805 while (which_alternative
< recog_data
.n_alternatives
);
2807 which_alternative
= -1;
2808 /* If we are about to reject this, but we are not to test strictly,
2809 try a very loose test. Only return failure if it fails also. */
2811 return constrain_operands (-1);
2816 /* Return true iff OPERAND (assumed to be a REG rtx)
2817 is a hard reg in class CLASS when its regno is offset by OFFSET
2818 and changed to mode MODE.
2819 If REG occupies multiple hard regs, all of them must be in CLASS. */
2822 reg_fits_class_p (const_rtx operand
, reg_class_t cl
, int offset
,
2823 enum machine_mode mode
)
2825 unsigned int regno
= REGNO (operand
);
2830 /* Regno must not be a pseudo register. Offset may be negative. */
2831 return (HARD_REGISTER_NUM_P (regno
)
2832 && HARD_REGISTER_NUM_P (regno
+ offset
)
2833 && in_hard_reg_set_p (reg_class_contents
[(int) cl
], mode
,
2837 /* Split single instruction. Helper function for split_all_insns and
2838 split_all_insns_noflow. Return last insn in the sequence if successful,
2839 or NULL if unsuccessful. */
2842 split_insn (rtx insn
)
2844 /* Split insns here to get max fine-grain parallelism. */
2845 rtx first
= PREV_INSN (insn
);
2846 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2847 rtx insn_set
, last_set
, note
;
2852 /* If the original instruction was a single set that was known to be
2853 equivalent to a constant, see if we can say the same about the last
2854 instruction in the split sequence. The two instructions must set
2855 the same destination. */
2856 insn_set
= single_set (insn
);
2859 last_set
= single_set (last
);
2860 if (last_set
&& rtx_equal_p (SET_DEST (last_set
), SET_DEST (insn_set
)))
2862 note
= find_reg_equal_equiv_note (insn
);
2863 if (note
&& CONSTANT_P (XEXP (note
, 0)))
2864 set_unique_reg_note (last
, REG_EQUAL
, XEXP (note
, 0));
2865 else if (CONSTANT_P (SET_SRC (insn_set
)))
2866 set_unique_reg_note (last
, REG_EQUAL
,
2867 copy_rtx (SET_SRC (insn_set
)));
2871 /* try_split returns the NOTE that INSN became. */
2872 SET_INSN_DELETED (insn
);
2874 /* ??? Coddle to md files that generate subregs in post-reload
2875 splitters instead of computing the proper hard register. */
2876 if (reload_completed
&& first
!= last
)
2878 first
= NEXT_INSN (first
);
2882 cleanup_subreg_operands (first
);
2885 first
= NEXT_INSN (first
);
2892 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2895 split_all_insns (void)
2901 blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
2902 bitmap_clear (blocks
);
2905 FOR_EACH_BB_REVERSE (bb
)
2908 bool finish
= false;
2910 rtl_profile_for_bb (bb
);
2911 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2913 /* Can't use `next_real_insn' because that might go across
2914 CODE_LABELS and short-out basic blocks. */
2915 next
= NEXT_INSN (insn
);
2916 finish
= (insn
== BB_END (bb
));
2919 rtx set
= single_set (insn
);
2921 /* Don't split no-op move insns. These should silently
2922 disappear later in final. Splitting such insns would
2923 break the code that handles LIBCALL blocks. */
2924 if (set
&& set_noop_p (set
))
2926 /* Nops get in the way while scheduling, so delete them
2927 now if register allocation has already been done. It
2928 is too risky to try to do this before register
2929 allocation, and there are unlikely to be very many
2930 nops then anyways. */
2931 if (reload_completed
)
2932 delete_insn_and_edges (insn
);
2936 if (split_insn (insn
))
2938 bitmap_set_bit (blocks
, bb
->index
);
2946 default_rtl_profile ();
2948 find_many_sub_basic_blocks (blocks
);
2950 #ifdef ENABLE_CHECKING
2951 verify_flow_info ();
2954 sbitmap_free (blocks
);
2957 /* Same as split_all_insns, but do not expect CFG to be available.
2958 Used by machine dependent reorg passes. */
2961 split_all_insns_noflow (void)
2965 for (insn
= get_insns (); insn
; insn
= next
)
2967 next
= NEXT_INSN (insn
);
2970 /* Don't split no-op move insns. These should silently
2971 disappear later in final. Splitting such insns would
2972 break the code that handles LIBCALL blocks. */
2973 rtx set
= single_set (insn
);
2974 if (set
&& set_noop_p (set
))
2976 /* Nops get in the way while scheduling, so delete them
2977 now if register allocation has already been done. It
2978 is too risky to try to do this before register
2979 allocation, and there are unlikely to be very many
2982 ??? Should we use delete_insn when the CFG isn't valid? */
2983 if (reload_completed
)
2984 delete_insn_and_edges (insn
);
2993 #ifdef HAVE_peephole2
2994 struct peep2_insn_data
3000 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
3001 static int peep2_current
;
3003 static bool peep2_do_rebuild_jump_labels
;
3004 static bool peep2_do_cleanup_cfg
;
3006 /* The number of instructions available to match a peep2. */
3007 int peep2_current_count
;
3009 /* A non-insn marker indicating the last insn of the block.
3010 The live_before regset for this element is correct, indicating
3011 DF_LIVE_OUT for the block. */
3012 #define PEEP2_EOB pc_rtx
3014 /* Wrap N to fit into the peep2_insn_data buffer. */
3017 peep2_buf_position (int n
)
3019 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
3020 n
-= MAX_INSNS_PER_PEEP2
+ 1;
3024 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3025 does not exist. Used by the recognizer to find the next insn to match
3026 in a multi-insn pattern. */
3029 peep2_next_insn (int n
)
3031 gcc_assert (n
<= peep2_current_count
);
3033 n
= peep2_buf_position (peep2_current
+ n
);
3035 return peep2_insn_data
[n
].insn
;
3038 /* Return true if REGNO is dead before the Nth non-note insn
3042 peep2_regno_dead_p (int ofs
, int regno
)
3044 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3046 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3048 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3050 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
3053 /* Similarly for a REG. */
3056 peep2_reg_dead_p (int ofs
, rtx reg
)
3060 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3062 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3064 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3066 regno
= REGNO (reg
);
3067 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
3069 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
3074 /* Regno offset to be used in the register search. */
3075 static int search_ofs
;
3077 /* Try to find a hard register of mode MODE, matching the register class in
3078 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3079 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3080 in which case the only condition is that the register must be available
3081 before CURRENT_INSN.
3082 Registers that already have bits set in REG_SET will not be considered.
3084 If an appropriate register is available, it will be returned and the
3085 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3089 peep2_find_free_register (int from
, int to
, const char *class_str
,
3090 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
3097 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
3098 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
3100 from
= peep2_buf_position (peep2_current
+ from
);
3101 to
= peep2_buf_position (peep2_current
+ to
);
3103 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3104 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
3108 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3110 /* Don't use registers set or clobbered by the insn. */
3111 for (def_rec
= DF_INSN_DEFS (peep2_insn_data
[from
].insn
);
3112 *def_rec
; def_rec
++)
3113 SET_HARD_REG_BIT (live
, DF_REF_REGNO (*def_rec
));
3115 from
= peep2_buf_position (from
+ 1);
3118 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
3119 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
3121 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3123 int raw_regno
, regno
, success
, j
;
3125 /* Distribute the free registers as much as possible. */
3126 raw_regno
= search_ofs
+ i
;
3127 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
3128 raw_regno
-= FIRST_PSEUDO_REGISTER
;
3129 #ifdef REG_ALLOC_ORDER
3130 regno
= reg_alloc_order
[raw_regno
];
3135 /* Can it support the mode we need? */
3136 if (! HARD_REGNO_MODE_OK (regno
, mode
))
3140 for (j
= 0; success
&& j
< hard_regno_nregs
[regno
][mode
]; j
++)
3142 /* Don't allocate fixed registers. */
3143 if (fixed_regs
[regno
+ j
])
3148 /* Don't allocate global registers. */
3149 if (global_regs
[regno
+ j
])
3154 /* Make sure the register is of the right class. */
3155 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
+ j
))
3160 /* And that we don't create an extra save/restore. */
3161 if (! call_used_regs
[regno
+ j
] && ! df_regs_ever_live_p (regno
+ j
))
3167 if (! targetm
.hard_regno_scratch_ok (regno
+ j
))
3173 /* And we don't clobber traceback for noreturn functions. */
3174 if ((regno
+ j
== FRAME_POINTER_REGNUM
3175 || regno
+ j
== HARD_FRAME_POINTER_REGNUM
)
3176 && (! reload_completed
|| frame_pointer_needed
))
3182 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3183 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3192 add_to_hard_reg_set (reg_set
, mode
, regno
);
3194 /* Start the next search with the next register. */
3195 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3197 search_ofs
= raw_regno
;
3199 return gen_rtx_REG (mode
, regno
);
3207 /* Forget all currently tracked instructions, only remember current
3211 peep2_reinit_state (regset live
)
3215 /* Indicate that all slots except the last holds invalid data. */
3216 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3217 peep2_insn_data
[i
].insn
= NULL_RTX
;
3218 peep2_current_count
= 0;
3220 /* Indicate that the last slot contains live_after data. */
3221 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3222 peep2_current
= MAX_INSNS_PER_PEEP2
;
3224 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3227 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3228 starting at INSN. Perform the replacement, removing the old insns and
3229 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3230 if the replacement is rejected. */
3233 peep2_attempt (basic_block bb
, rtx insn
, int match_len
, rtx attempt
)
3236 rtx last
, eh_note
, as_note
, before_try
, x
;
3237 rtx old_insn
, new_insn
;
3238 bool was_call
= false;
3240 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3241 match more than one insn, or to be split into more than one insn. */
3242 old_insn
= peep2_insn_data
[peep2_current
].insn
;
3243 if (RTX_FRAME_RELATED_P (old_insn
))
3245 bool any_note
= false;
3251 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3252 may be in the stream for the purpose of register allocation. */
3253 if (active_insn_p (attempt
))
3256 new_insn
= next_active_insn (attempt
);
3257 if (next_active_insn (new_insn
))
3260 /* We have a 1-1 replacement. Copy over any frame-related info. */
3261 RTX_FRAME_RELATED_P (new_insn
) = 1;
3263 /* Allow the backend to fill in a note during the split. */
3264 for (note
= REG_NOTES (new_insn
); note
; note
= XEXP (note
, 1))
3265 switch (REG_NOTE_KIND (note
))
3267 case REG_FRAME_RELATED_EXPR
:
3268 case REG_CFA_DEF_CFA
:
3269 case REG_CFA_ADJUST_CFA
:
3270 case REG_CFA_OFFSET
:
3271 case REG_CFA_REGISTER
:
3272 case REG_CFA_EXPRESSION
:
3273 case REG_CFA_RESTORE
:
3274 case REG_CFA_SET_VDRAP
:
3281 /* If the backend didn't supply a note, copy one over. */
3283 for (note
= REG_NOTES (old_insn
); note
; note
= XEXP (note
, 1))
3284 switch (REG_NOTE_KIND (note
))
3286 case REG_FRAME_RELATED_EXPR
:
3287 case REG_CFA_DEF_CFA
:
3288 case REG_CFA_ADJUST_CFA
:
3289 case REG_CFA_OFFSET
:
3290 case REG_CFA_REGISTER
:
3291 case REG_CFA_EXPRESSION
:
3292 case REG_CFA_RESTORE
:
3293 case REG_CFA_SET_VDRAP
:
3294 add_reg_note (new_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3301 /* If there still isn't a note, make sure the unwind info sees the
3302 same expression as before the split. */
3305 rtx old_set
, new_set
;
3307 /* The old insn had better have been simple, or annotated. */
3308 old_set
= single_set (old_insn
);
3309 gcc_assert (old_set
!= NULL
);
3311 new_set
= single_set (new_insn
);
3312 if (!new_set
|| !rtx_equal_p (new_set
, old_set
))
3313 add_reg_note (new_insn
, REG_FRAME_RELATED_EXPR
, old_set
);
3316 /* Copy prologue/epilogue status. This is required in order to keep
3317 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3318 maybe_copy_prologue_epilogue_insn (old_insn
, new_insn
);
3321 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3322 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3323 cfg-related call notes. */
3324 for (i
= 0; i
<= match_len
; ++i
)
3329 j
= peep2_buf_position (peep2_current
+ i
);
3330 old_insn
= peep2_insn_data
[j
].insn
;
3331 if (!CALL_P (old_insn
))
3336 while (new_insn
!= NULL_RTX
)
3338 if (CALL_P (new_insn
))
3340 new_insn
= NEXT_INSN (new_insn
);
3343 gcc_assert (new_insn
!= NULL_RTX
);
3345 CALL_INSN_FUNCTION_USAGE (new_insn
)
3346 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3348 for (note
= REG_NOTES (old_insn
);
3350 note
= XEXP (note
, 1))
3351 switch (REG_NOTE_KIND (note
))
3356 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3360 /* Discard all other reg notes. */
3364 /* Croak if there is another call in the sequence. */
3365 while (++i
<= match_len
)
3367 j
= peep2_buf_position (peep2_current
+ i
);
3368 old_insn
= peep2_insn_data
[j
].insn
;
3369 gcc_assert (!CALL_P (old_insn
));
3374 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3375 move those notes over to the new sequence. */
3377 for (i
= match_len
; i
>= 0; --i
)
3379 int j
= peep2_buf_position (peep2_current
+ i
);
3380 old_insn
= peep2_insn_data
[j
].insn
;
3382 as_note
= find_reg_note (old_insn
, REG_ARGS_SIZE
, NULL
);
3387 i
= peep2_buf_position (peep2_current
+ match_len
);
3388 eh_note
= find_reg_note (peep2_insn_data
[i
].insn
, REG_EH_REGION
, NULL_RTX
);
3390 /* Replace the old sequence with the new. */
3391 last
= emit_insn_after_setloc (attempt
,
3392 peep2_insn_data
[i
].insn
,
3393 INSN_LOCATION (peep2_insn_data
[i
].insn
));
3394 before_try
= PREV_INSN (insn
);
3395 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3397 /* Re-insert the EH_REGION notes. */
3398 if (eh_note
|| (was_call
&& nonlocal_goto_handler_labels
))
3403 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3404 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3408 copy_reg_eh_region_note_backward (eh_note
, last
, before_try
);
3411 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3412 if (x
!= BB_END (bb
)
3413 && (can_throw_internal (x
)
3414 || can_nonlocal_goto (x
)))
3419 nfte
= split_block (bb
, x
);
3420 flags
= (eh_edge
->flags
3421 & (EDGE_EH
| EDGE_ABNORMAL
));
3423 flags
|= EDGE_ABNORMAL_CALL
;
3424 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3427 nehe
->probability
= eh_edge
->probability
;
3429 = REG_BR_PROB_BASE
- nehe
->probability
;
3431 peep2_do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3436 /* Converting possibly trapping insn to non-trapping is
3437 possible. Zap dummy outgoing edges. */
3438 peep2_do_cleanup_cfg
|= purge_dead_edges (bb
);
3441 /* Re-insert the ARGS_SIZE notes. */
3443 fixup_args_size_notes (before_try
, last
, INTVAL (XEXP (as_note
, 0)));
3445 /* If we generated a jump instruction, it won't have
3446 JUMP_LABEL set. Recompute after we're done. */
3447 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3450 peep2_do_rebuild_jump_labels
= true;
3457 /* After performing a replacement in basic block BB, fix up the life
3458 information in our buffer. LAST is the last of the insns that we
3459 emitted as a replacement. PREV is the insn before the start of
3460 the replacement. MATCH_LEN is the number of instructions that were
3461 matched, and which now need to be replaced in the buffer. */
3464 peep2_update_life (basic_block bb
, int match_len
, rtx last
, rtx prev
)
3466 int i
= peep2_buf_position (peep2_current
+ match_len
+ 1);
3470 INIT_REG_SET (&live
);
3471 COPY_REG_SET (&live
, peep2_insn_data
[i
].live_before
);
3473 gcc_assert (peep2_current_count
>= match_len
+ 1);
3474 peep2_current_count
-= match_len
+ 1;
3482 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
)
3484 peep2_current_count
++;
3486 i
= MAX_INSNS_PER_PEEP2
;
3487 peep2_insn_data
[i
].insn
= x
;
3488 df_simulate_one_insn_backwards (bb
, x
, &live
);
3489 COPY_REG_SET (peep2_insn_data
[i
].live_before
, &live
);
3495 CLEAR_REG_SET (&live
);
3500 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3501 Return true if we added it, false otherwise. The caller will try to match
3502 peepholes against the buffer if we return false; otherwise it will try to
3503 add more instructions to the buffer. */
3506 peep2_fill_buffer (basic_block bb
, rtx insn
, regset live
)
3510 /* Once we have filled the maximum number of insns the buffer can hold,
3511 allow the caller to match the insns against peepholes. We wait until
3512 the buffer is full in case the target has similar peepholes of different
3513 length; we always want to match the longest if possible. */
3514 if (peep2_current_count
== MAX_INSNS_PER_PEEP2
)
3517 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3518 any other pattern, lest it change the semantics of the frame info. */
3519 if (RTX_FRAME_RELATED_P (insn
))
3521 /* Let the buffer drain first. */
3522 if (peep2_current_count
> 0)
3524 /* Now the insn will be the only thing in the buffer. */
3527 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3528 peep2_insn_data
[pos
].insn
= insn
;
3529 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3530 peep2_current_count
++;
3532 df_simulate_one_insn_forwards (bb
, insn
, live
);
3536 /* Perform the peephole2 optimization pass. */
3539 peephole2_optimize (void)
3546 peep2_do_cleanup_cfg
= false;
3547 peep2_do_rebuild_jump_labels
= false;
3549 df_set_flags (DF_LR_RUN_DCE
);
3550 df_note_add_problem ();
3553 /* Initialize the regsets we're going to use. */
3554 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3555 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
3557 live
= BITMAP_ALLOC (®_obstack
);
3559 FOR_EACH_BB_REVERSE (bb
)
3561 bool past_end
= false;
3564 rtl_profile_for_bb (bb
);
3566 /* Start up propagation. */
3567 bitmap_copy (live
, DF_LR_IN (bb
));
3568 df_simulate_initialize_forwards (bb
, live
);
3569 peep2_reinit_state (live
);
3571 insn
= BB_HEAD (bb
);
3577 if (!past_end
&& !NONDEBUG_INSN_P (insn
))
3580 insn
= NEXT_INSN (insn
);
3581 if (insn
== NEXT_INSN (BB_END (bb
)))
3585 if (!past_end
&& peep2_fill_buffer (bb
, insn
, live
))
3588 /* If we did not fill an empty buffer, it signals the end of the
3590 if (peep2_current_count
== 0)
3593 /* The buffer filled to the current maximum, so try to match. */
3595 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3596 peep2_insn_data
[pos
].insn
= PEEP2_EOB
;
3597 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3599 /* Match the peephole. */
3600 head
= peep2_insn_data
[peep2_current
].insn
;
3601 attempt
= peephole2_insns (PATTERN (head
), head
, &match_len
);
3602 if (attempt
!= NULL
)
3604 rtx last
= peep2_attempt (bb
, head
, match_len
, attempt
);
3607 peep2_update_life (bb
, match_len
, last
, PREV_INSN (attempt
));
3612 /* No match: advance the buffer by one insn. */
3613 peep2_current
= peep2_buf_position (peep2_current
+ 1);
3614 peep2_current_count
--;
3618 default_rtl_profile ();
3619 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3620 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
3622 if (peep2_do_rebuild_jump_labels
)
3623 rebuild_jump_labels (get_insns ());
3625 #endif /* HAVE_peephole2 */
3627 /* Common predicates for use with define_bypass. */
3629 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3630 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3631 must be either a single_set or a PARALLEL with SETs inside. */
3634 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3636 rtx out_set
, in_set
;
3637 rtx out_pat
, in_pat
;
3638 rtx out_exp
, in_exp
;
3641 in_set
= single_set (in_insn
);
3644 if (!MEM_P (SET_DEST (in_set
)))
3647 out_set
= single_set (out_insn
);
3650 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3655 out_pat
= PATTERN (out_insn
);
3657 if (GET_CODE (out_pat
) != PARALLEL
)
3660 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3662 out_exp
= XVECEXP (out_pat
, 0, i
);
3664 if (GET_CODE (out_exp
) == CLOBBER
)
3667 gcc_assert (GET_CODE (out_exp
) == SET
);
3669 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
3676 in_pat
= PATTERN (in_insn
);
3677 gcc_assert (GET_CODE (in_pat
) == PARALLEL
);
3679 for (i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
3681 in_exp
= XVECEXP (in_pat
, 0, i
);
3683 if (GET_CODE (in_exp
) == CLOBBER
)
3686 gcc_assert (GET_CODE (in_exp
) == SET
);
3688 if (!MEM_P (SET_DEST (in_exp
)))
3691 out_set
= single_set (out_insn
);
3694 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_exp
)))
3699 out_pat
= PATTERN (out_insn
);
3700 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3702 for (j
= 0; j
< XVECLEN (out_pat
, 0); j
++)
3704 out_exp
= XVECEXP (out_pat
, 0, j
);
3706 if (GET_CODE (out_exp
) == CLOBBER
)
3709 gcc_assert (GET_CODE (out_exp
) == SET
);
3711 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_exp
)))
3721 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3722 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3723 or multiple set; IN_INSN should be single_set for truth, but for convenience
3724 of insn categorization may be any JUMP or CALL insn. */
3727 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3729 rtx out_set
, in_set
;
3731 in_set
= single_set (in_insn
);
3734 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3738 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3740 in_set
= SET_SRC (in_set
);
3742 out_set
= single_set (out_insn
);
3745 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3746 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3754 out_pat
= PATTERN (out_insn
);
3755 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3757 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3759 rtx exp
= XVECEXP (out_pat
, 0, i
);
3761 if (GET_CODE (exp
) == CLOBBER
)
3764 gcc_assert (GET_CODE (exp
) == SET
);
3766 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3767 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3776 gate_handle_peephole2 (void)
3778 return (optimize
> 0 && flag_peephole2
);
3782 rest_of_handle_peephole2 (void)
3784 #ifdef HAVE_peephole2
3785 peephole2_optimize ();
3792 const pass_data pass_data_peephole2
=
3794 RTL_PASS
, /* type */
3795 "peephole2", /* name */
3796 OPTGROUP_NONE
, /* optinfo_flags */
3797 true, /* has_gate */
3798 true, /* has_execute */
3799 TV_PEEPHOLE2
, /* tv_id */
3800 0, /* properties_required */
3801 0, /* properties_provided */
3802 0, /* properties_destroyed */
3803 0, /* todo_flags_start */
3804 ( TODO_df_finish
| TODO_verify_rtl_sharing
| 0 ), /* todo_flags_finish */
3807 class pass_peephole2
: public rtl_opt_pass
3810 pass_peephole2 (gcc::context
*ctxt
)
3811 : rtl_opt_pass (pass_data_peephole2
, ctxt
)
3814 /* opt_pass methods: */
3815 /* The epiphany backend creates a second instance of this pass, so we need
3817 opt_pass
* clone () { return new pass_peephole2 (m_ctxt
); }
3818 bool gate () { return gate_handle_peephole2 (); }
3819 unsigned int execute () { return rest_of_handle_peephole2 (); }
3821 }; // class pass_peephole2
3826 make_pass_peephole2 (gcc::context
*ctxt
)
3828 return new pass_peephole2 (ctxt
);
3832 rest_of_handle_split_all_insns (void)
3840 const pass_data pass_data_split_all_insns
=
3842 RTL_PASS
, /* type */
3843 "split1", /* name */
3844 OPTGROUP_NONE
, /* optinfo_flags */
3845 false, /* has_gate */
3846 true, /* has_execute */
3847 TV_NONE
, /* tv_id */
3848 0, /* properties_required */
3849 0, /* properties_provided */
3850 0, /* properties_destroyed */
3851 0, /* todo_flags_start */
3852 0, /* todo_flags_finish */
3855 class pass_split_all_insns
: public rtl_opt_pass
3858 pass_split_all_insns (gcc::context
*ctxt
)
3859 : rtl_opt_pass (pass_data_split_all_insns
, ctxt
)
3862 /* opt_pass methods: */
3863 /* The epiphany backend creates a second instance of this pass, so
3864 we need a clone method. */
3865 opt_pass
* clone () { return new pass_split_all_insns (m_ctxt
); }
3866 unsigned int execute () { return rest_of_handle_split_all_insns (); }
3868 }; // class pass_split_all_insns
3873 make_pass_split_all_insns (gcc::context
*ctxt
)
3875 return new pass_split_all_insns (ctxt
);
3879 rest_of_handle_split_after_reload (void)
3881 /* If optimizing, then go ahead and split insns now. */
3891 const pass_data pass_data_split_after_reload
=
3893 RTL_PASS
, /* type */
3894 "split2", /* name */
3895 OPTGROUP_NONE
, /* optinfo_flags */
3896 false, /* has_gate */
3897 true, /* has_execute */
3898 TV_NONE
, /* tv_id */
3899 0, /* properties_required */
3900 0, /* properties_provided */
3901 0, /* properties_destroyed */
3902 0, /* todo_flags_start */
3903 0, /* todo_flags_finish */
3906 class pass_split_after_reload
: public rtl_opt_pass
3909 pass_split_after_reload (gcc::context
*ctxt
)
3910 : rtl_opt_pass (pass_data_split_after_reload
, ctxt
)
3913 /* opt_pass methods: */
3914 unsigned int execute () { return rest_of_handle_split_after_reload (); }
3916 }; // class pass_split_after_reload
3921 make_pass_split_after_reload (gcc::context
*ctxt
)
3923 return new pass_split_after_reload (ctxt
);
3927 gate_handle_split_before_regstack (void)
3929 #if HAVE_ATTR_length && defined (STACK_REGS)
3930 /* If flow2 creates new instructions which need splitting
3931 and scheduling after reload is not done, they might not be
3932 split until final which doesn't allow splitting
3933 if HAVE_ATTR_length. */
3934 # ifdef INSN_SCHEDULING
3935 return (optimize
&& !flag_schedule_insns_after_reload
);
3945 rest_of_handle_split_before_regstack (void)
3953 const pass_data pass_data_split_before_regstack
=
3955 RTL_PASS
, /* type */
3956 "split3", /* name */
3957 OPTGROUP_NONE
, /* optinfo_flags */
3958 true, /* has_gate */
3959 true, /* has_execute */
3960 TV_NONE
, /* tv_id */
3961 0, /* properties_required */
3962 0, /* properties_provided */
3963 0, /* properties_destroyed */
3964 0, /* todo_flags_start */
3965 0, /* todo_flags_finish */
3968 class pass_split_before_regstack
: public rtl_opt_pass
3971 pass_split_before_regstack (gcc::context
*ctxt
)
3972 : rtl_opt_pass (pass_data_split_before_regstack
, ctxt
)
3975 /* opt_pass methods: */
3976 bool gate () { return gate_handle_split_before_regstack (); }
3977 unsigned int execute () {
3978 return rest_of_handle_split_before_regstack ();
3981 }; // class pass_split_before_regstack
3986 make_pass_split_before_regstack (gcc::context
*ctxt
)
3988 return new pass_split_before_regstack (ctxt
);
3992 gate_handle_split_before_sched2 (void)
3994 #ifdef INSN_SCHEDULING
3995 return optimize
> 0 && flag_schedule_insns_after_reload
;
4002 rest_of_handle_split_before_sched2 (void)
4004 #ifdef INSN_SCHEDULING
4012 const pass_data pass_data_split_before_sched2
=
4014 RTL_PASS
, /* type */
4015 "split4", /* name */
4016 OPTGROUP_NONE
, /* optinfo_flags */
4017 true, /* has_gate */
4018 true, /* has_execute */
4019 TV_NONE
, /* tv_id */
4020 0, /* properties_required */
4021 0, /* properties_provided */
4022 0, /* properties_destroyed */
4023 0, /* todo_flags_start */
4024 TODO_verify_flow
, /* todo_flags_finish */
4027 class pass_split_before_sched2
: public rtl_opt_pass
4030 pass_split_before_sched2 (gcc::context
*ctxt
)
4031 : rtl_opt_pass (pass_data_split_before_sched2
, ctxt
)
4034 /* opt_pass methods: */
4035 bool gate () { return gate_handle_split_before_sched2 (); }
4036 unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4038 }; // class pass_split_before_sched2
4043 make_pass_split_before_sched2 (gcc::context
*ctxt
)
4045 return new pass_split_before_sched2 (ctxt
);
4048 /* The placement of the splitting that we do for shorten_branches
4049 depends on whether regstack is used by the target or not. */
4051 gate_do_final_split (void)
4053 #if HAVE_ATTR_length && !defined (STACK_REGS)
4062 const pass_data pass_data_split_for_shorten_branches
=
4064 RTL_PASS
, /* type */
4065 "split5", /* name */
4066 OPTGROUP_NONE
, /* optinfo_flags */
4067 true, /* has_gate */
4068 true, /* has_execute */
4069 TV_NONE
, /* tv_id */
4070 0, /* properties_required */
4071 0, /* properties_provided */
4072 0, /* properties_destroyed */
4073 0, /* todo_flags_start */
4074 TODO_verify_rtl_sharing
, /* todo_flags_finish */
4077 class pass_split_for_shorten_branches
: public rtl_opt_pass
4080 pass_split_for_shorten_branches (gcc::context
*ctxt
)
4081 : rtl_opt_pass (pass_data_split_for_shorten_branches
, ctxt
)
4084 /* opt_pass methods: */
4085 bool gate () { return gate_do_final_split (); }
4086 unsigned int execute () { return split_all_insns_noflow (); }
4088 }; // class pass_split_for_shorten_branches
4093 make_pass_split_for_shorten_branches (gcc::context
*ctxt
)
4095 return new pass_split_for_shorten_branches (ctxt
);