1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
26 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 #include "hard-reg-set.h"
35 #include "addresses.h"
41 #include "basic-block.h"
45 #include "tree-pass.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
52 #define STACK_PUSH_CODE PRE_INC
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
60 #define STACK_POP_CODE POST_DEC
64 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
);
65 static void validate_replace_src_1 (rtx
*, void *);
66 static rtx
split_insn (rtx
);
68 /* Nonzero means allow operands to be volatile.
69 This should be 0 if you are generating rtl, such as if you are calling
70 the functions in optabs.c and expmed.c (most of the time).
71 This should be 1 if all valid insns need to be recognized,
72 such as in regclass.c and final.c and reload.c.
74 init_recog and init_recog_no_volatile are responsible for setting this. */
78 struct recog_data recog_data
;
80 /* Contains a vector of operand_alternative structures for every operand.
81 Set up by preprocess_constraints. */
82 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
84 /* On return from `constrain_operands', indicate which alternative
87 int which_alternative
;
89 /* Nonzero after end of reload pass.
90 Set to 1 or 0 by toplev.c.
91 Controls the significance of (SUBREG (MEM)). */
95 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
96 int epilogue_completed
;
98 /* Initialize data used by the function `recog'.
99 This must be called once in the compilation of a function
100 before any insn recognition may be done in the function. */
103 init_recog_no_volatile (void)
115 /* Check that X is an insn-body for an `asm' with operands
116 and that the operands mentioned in it are legitimate. */
119 check_asm_operands (rtx x
)
123 const char **constraints
;
126 /* Post-reload, be more strict with things. */
127 if (reload_completed
)
129 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
130 extract_insn (make_insn_raw (x
));
131 constrain_operands (1);
132 return which_alternative
>= 0;
135 noperands
= asm_noperands (x
);
141 operands
= alloca (noperands
* sizeof (rtx
));
142 constraints
= alloca (noperands
* sizeof (char *));
144 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
146 for (i
= 0; i
< noperands
; i
++)
148 const char *c
= constraints
[i
];
151 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
152 c
= constraints
[c
[0] - '0'];
154 if (! asm_operand_ok (operands
[i
], c
))
161 /* Static data for the next two routines. */
163 typedef struct change_t
171 static change_t
*changes
;
172 static int changes_allocated
;
174 static int num_changes
= 0;
176 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
177 at which NEW will be placed. If OBJECT is zero, no validation is done,
178 the change is simply made.
180 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
181 will be called with the address and mode as parameters. If OBJECT is
182 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
185 IN_GROUP is nonzero if this is part of a group of changes that must be
186 performed as a group. In that case, the changes will be stored. The
187 function `apply_change_group' will validate and apply the changes.
189 If IN_GROUP is zero, this is a single change. Try to recognize the insn
190 or validate the memory reference with the change applied. If the result
191 is not valid for the machine, suppress the change and return zero.
192 Otherwise, perform the change and return 1. */
195 validate_change (rtx object
, rtx
*loc
, rtx
new, int in_group
)
199 if (old
== new || rtx_equal_p (old
, new))
202 gcc_assert (in_group
!= 0 || num_changes
== 0);
206 /* Save the information describing this change. */
207 if (num_changes
>= changes_allocated
)
209 if (changes_allocated
== 0)
210 /* This value allows for repeated substitutions inside complex
211 indexed addresses, or changes in up to 5 insns. */
212 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
214 changes_allocated
*= 2;
216 changes
= xrealloc (changes
, sizeof (change_t
) * changes_allocated
);
219 changes
[num_changes
].object
= object
;
220 changes
[num_changes
].loc
= loc
;
221 changes
[num_changes
].old
= old
;
223 if (object
&& !MEM_P (object
))
225 /* Set INSN_CODE to force rerecognition of insn. Save old code in
227 changes
[num_changes
].old_code
= INSN_CODE (object
);
228 INSN_CODE (object
) = -1;
233 /* If we are making a group of changes, return 1. Otherwise, validate the
234 change group we made. */
239 return apply_change_group ();
242 /* Keep X canonicalized if some changes have made it non-canonical; only
243 modifies the operands of X, not (for example) its code. Simplifications
244 are not the job of this routine.
246 Return true if anything was changed. */
248 canonicalize_change_group (rtx insn
, rtx x
)
250 if (COMMUTATIVE_P (x
)
251 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
253 /* Oops, the caller has made X no longer canonical.
254 Let's redo the changes in the correct order. */
255 rtx tem
= XEXP (x
, 0);
256 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
257 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
265 /* This subroutine of apply_change_group verifies whether the changes to INSN
266 were valid; i.e. whether INSN can still be recognized. */
269 insn_invalid_p (rtx insn
)
271 rtx pat
= PATTERN (insn
);
272 int num_clobbers
= 0;
273 /* If we are before reload and the pattern is a SET, see if we can add
275 int icode
= recog (pat
, insn
,
276 (GET_CODE (pat
) == SET
277 && ! reload_completed
&& ! reload_in_progress
)
278 ? &num_clobbers
: 0);
279 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
282 /* If this is an asm and the operand aren't legal, then fail. Likewise if
283 this is not an asm and the insn wasn't recognized. */
284 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
285 || (!is_asm
&& icode
< 0))
288 /* If we have to add CLOBBERs, fail if we have to add ones that reference
289 hard registers since our callers can't know if they are live or not.
290 Otherwise, add them. */
291 if (num_clobbers
> 0)
295 if (added_clobbers_hard_reg_p (icode
))
298 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
299 XVECEXP (newpat
, 0, 0) = pat
;
300 add_clobbers (newpat
, icode
);
301 PATTERN (insn
) = pat
= newpat
;
304 /* After reload, verify that all constraints are satisfied. */
305 if (reload_completed
)
309 if (! constrain_operands (1))
313 INSN_CODE (insn
) = icode
;
317 /* Return number of changes made and not validated yet. */
319 num_changes_pending (void)
324 /* Tentatively apply the changes numbered NUM and up.
325 Return 1 if all changes are valid, zero otherwise. */
328 verify_changes (int num
)
331 rtx last_validated
= NULL_RTX
;
333 /* The changes have been applied and all INSN_CODEs have been reset to force
336 The changes are valid if we aren't given an object, or if we are
337 given a MEM and it still is a valid address, or if this is in insn
338 and it is recognized. In the latter case, if reload has completed,
339 we also require that the operands meet the constraints for
342 for (i
= num
; i
< num_changes
; i
++)
344 rtx object
= changes
[i
].object
;
346 /* If there is no object to test or if it is the same as the one we
347 already tested, ignore it. */
348 if (object
== 0 || object
== last_validated
)
353 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
356 else if (insn_invalid_p (object
))
358 rtx pat
= PATTERN (object
);
360 /* Perhaps we couldn't recognize the insn because there were
361 extra CLOBBERs at the end. If so, try to re-recognize
362 without the last CLOBBER (later iterations will cause each of
363 them to be eliminated, in turn). But don't do this if we
364 have an ASM_OPERAND. */
365 if (GET_CODE (pat
) == PARALLEL
366 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
367 && asm_noperands (PATTERN (object
)) < 0)
371 if (XVECLEN (pat
, 0) == 2)
372 newpat
= XVECEXP (pat
, 0, 0);
378 = gen_rtx_PARALLEL (VOIDmode
,
379 rtvec_alloc (XVECLEN (pat
, 0) - 1));
380 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
381 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
384 /* Add a new change to this group to replace the pattern
385 with this new pattern. Then consider this change
386 as having succeeded. The change we added will
387 cause the entire call to fail if things remain invalid.
389 Note that this can lose if a later change than the one
390 we are processing specified &XVECEXP (PATTERN (object), 0, X)
391 but this shouldn't occur. */
393 validate_change (object
, &PATTERN (object
), newpat
, 1);
396 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
397 /* If this insn is a CLOBBER or USE, it is always valid, but is
403 last_validated
= object
;
406 return (i
== num_changes
);
409 /* A group of changes has previously been issued with validate_change
410 and verified with verify_changes. Call df_insn_rescan for each of
411 the insn changed and clear num_changes. */
414 confirm_change_group (void)
418 for (i
= 0; i
< num_changes
; i
++)
420 rtx object
= changes
[i
].object
;
421 if (object
&& INSN_P (object
))
422 df_insn_rescan (object
);
428 /* Apply a group of changes previously issued with `validate_change'.
429 If all changes are valid, call confirm_change_group and return 1,
430 otherwise, call cancel_changes and return 0. */
433 apply_change_group (void)
435 if (verify_changes (0))
437 confirm_change_group ();
448 /* Return the number of changes so far in the current group. */
451 num_validated_changes (void)
456 /* Retract the changes numbered NUM and up. */
459 cancel_changes (int num
)
463 /* Back out all the changes. Do this in the opposite order in which
465 for (i
= num_changes
- 1; i
>= num
; i
--)
467 *changes
[i
].loc
= changes
[i
].old
;
468 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
469 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
474 /* Replace every occurrence of FROM in X with TO. Mark each change with
475 validate_change passing OBJECT. */
478 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
)
484 enum machine_mode op0_mode
= VOIDmode
;
485 int prev_changes
= num_changes
;
492 fmt
= GET_RTX_FORMAT (code
);
494 op0_mode
= GET_MODE (XEXP (x
, 0));
496 /* X matches FROM if it is the same rtx or they are both referring to the
497 same register in the same mode. Avoid calling rtx_equal_p unless the
498 operands look similar. */
501 || (REG_P (x
) && REG_P (from
)
502 && GET_MODE (x
) == GET_MODE (from
)
503 && REGNO (x
) == REGNO (from
))
504 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
505 && rtx_equal_p (x
, from
)))
507 validate_change (object
, loc
, to
, 1);
511 /* Call ourself recursively to perform the replacements.
512 We must not replace inside already replaced expression, otherwise we
513 get infinite recursion for replacements like (reg X)->(subreg (reg X))
514 done by regmove, so we must special case shared ASM_OPERANDS. */
516 if (GET_CODE (x
) == PARALLEL
)
518 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
520 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
521 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
523 /* Verify that operands are really shared. */
524 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
525 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
527 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
531 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
);
535 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
538 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
539 else if (fmt
[i
] == 'E')
540 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
541 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
544 /* If we didn't substitute, there is nothing more to do. */
545 if (num_changes
== prev_changes
)
548 /* Allow substituted expression to have different mode. This is used by
549 regmove to change mode of pseudo register. */
550 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
551 op0_mode
= GET_MODE (XEXP (x
, 0));
553 /* Do changes needed to keep rtx consistent. Don't do any other
554 simplifications, as it is not our job. */
556 if (SWAPPABLE_OPERANDS_P (x
)
557 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
559 validate_change (object
, loc
,
560 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
561 : swap_condition (code
),
562 GET_MODE (x
), XEXP (x
, 1),
571 /* If we have a PLUS whose second operand is now a CONST_INT, use
572 simplify_gen_binary to try to simplify it.
573 ??? We may want later to remove this, once simplification is
574 separated from this function. */
575 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
576 validate_change (object
, loc
,
578 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
581 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
582 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
583 validate_change (object
, loc
,
585 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
586 simplify_gen_unary (NEG
,
587 GET_MODE (x
), XEXP (x
, 1),
592 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
594 new = simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
596 /* If any of the above failed, substitute in something that
597 we know won't be recognized. */
599 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
600 validate_change (object
, loc
, new, 1);
604 /* All subregs possible to simplify should be simplified. */
605 new = simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
608 /* Subregs of VOIDmode operands are incorrect. */
609 if (!new && GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
610 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
612 validate_change (object
, loc
, new, 1);
616 /* If we are replacing a register with memory, try to change the memory
617 to be the mode required for memory in extract operations (this isn't
618 likely to be an insertion operation; if it was, nothing bad will
619 happen, we might just fail in some cases). */
621 if (MEM_P (XEXP (x
, 0))
622 && GET_CODE (XEXP (x
, 1)) == CONST_INT
623 && GET_CODE (XEXP (x
, 2)) == CONST_INT
624 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
625 && !MEM_VOLATILE_P (XEXP (x
, 0)))
627 enum machine_mode wanted_mode
= VOIDmode
;
628 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
629 int pos
= INTVAL (XEXP (x
, 2));
631 if (GET_CODE (x
) == ZERO_EXTRACT
)
633 enum machine_mode new_mode
634 = mode_for_extraction (EP_extzv
, 1);
635 if (new_mode
!= MAX_MACHINE_MODE
)
636 wanted_mode
= new_mode
;
638 else if (GET_CODE (x
) == SIGN_EXTRACT
)
640 enum machine_mode new_mode
641 = mode_for_extraction (EP_extv
, 1);
642 if (new_mode
!= MAX_MACHINE_MODE
)
643 wanted_mode
= new_mode
;
646 /* If we have a narrower mode, we can do something. */
647 if (wanted_mode
!= VOIDmode
648 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
650 int offset
= pos
/ BITS_PER_UNIT
;
653 /* If the bytes and bits are counted differently, we
654 must adjust the offset. */
655 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
657 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
660 pos
%= GET_MODE_BITSIZE (wanted_mode
);
662 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
664 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
665 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
676 /* Try replacing every occurrence of FROM in INSN with TO. After all
677 changes have been made, validate by seeing if INSN is still valid. */
680 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
682 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
683 return apply_change_group ();
686 /* Try replacing every occurrence of FROM in INSN with TO. */
689 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
691 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
694 /* Function called by note_uses to replace used subexpressions. */
695 struct validate_replace_src_data
697 rtx from
; /* Old RTX */
698 rtx to
; /* New RTX */
699 rtx insn
; /* Insn in which substitution is occurring. */
703 validate_replace_src_1 (rtx
*x
, void *data
)
705 struct validate_replace_src_data
*d
706 = (struct validate_replace_src_data
*) data
;
708 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
711 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
715 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
717 struct validate_replace_src_data d
;
722 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
725 /* Try simplify INSN.
726 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
727 pattern and return true if something was simplified. */
730 validate_simplify_insn (rtx insn
)
736 pat
= PATTERN (insn
);
738 if (GET_CODE (pat
) == SET
)
740 newpat
= simplify_rtx (SET_SRC (pat
));
741 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
742 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
743 newpat
= simplify_rtx (SET_DEST (pat
));
744 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
745 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
747 else if (GET_CODE (pat
) == PARALLEL
)
748 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
750 rtx s
= XVECEXP (pat
, 0, i
);
752 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
754 newpat
= simplify_rtx (SET_SRC (s
));
755 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
756 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
757 newpat
= simplify_rtx (SET_DEST (s
));
758 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
759 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
762 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
766 /* Return 1 if the insn using CC0 set by INSN does not contain
767 any ordered tests applied to the condition codes.
768 EQ and NE tests do not count. */
771 next_insn_tests_no_inequality (rtx insn
)
773 rtx next
= next_cc0_user (insn
);
775 /* If there is no next insn, we have to take the conservative choice. */
779 return (INSN_P (next
)
780 && ! inequality_comparisons_p (PATTERN (next
)));
784 /* Return 1 if OP is a valid general operand for machine mode MODE.
785 This is either a register reference, a memory reference,
786 or a constant. In the case of a memory reference, the address
787 is checked for general validity for the target machine.
789 Register and memory references must have mode MODE in order to be valid,
790 but some constants have no machine mode and are valid for any mode.
792 If MODE is VOIDmode, OP is checked for validity for whatever mode
795 The main use of this function is as a predicate in match_operand
796 expressions in the machine description.
798 For an explanation of this function's behavior for registers of
799 class NO_REGS, see the comment for `register_operand'. */
802 general_operand (rtx op
, enum machine_mode mode
)
804 enum rtx_code code
= GET_CODE (op
);
806 if (mode
== VOIDmode
)
807 mode
= GET_MODE (op
);
809 /* Don't accept CONST_INT or anything similar
810 if the caller wants something floating. */
811 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
812 && GET_MODE_CLASS (mode
) != MODE_INT
813 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
816 if (GET_CODE (op
) == CONST_INT
818 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
822 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
824 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
825 && LEGITIMATE_CONSTANT_P (op
));
827 /* Except for certain constants with VOIDmode, already checked for,
828 OP's mode must match MODE if MODE specifies a mode. */
830 if (GET_MODE (op
) != mode
)
835 rtx sub
= SUBREG_REG (op
);
837 #ifdef INSN_SCHEDULING
838 /* On machines that have insn scheduling, we want all memory
839 reference to be explicit, so outlaw paradoxical SUBREGs.
840 However, we must allow them after reload so that they can
841 get cleaned up by cleanup_subreg_operands. */
842 if (!reload_completed
&& MEM_P (sub
)
843 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
846 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
847 may result in incorrect reference. We should simplify all valid
848 subregs of MEM anyway. But allow this after reload because we
849 might be called from cleanup_subreg_operands.
851 ??? This is a kludge. */
852 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
856 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
857 create such rtl, and we must reject it. */
858 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
859 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
863 code
= GET_CODE (op
);
867 /* A register whose class is NO_REGS is not a general operand. */
868 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
869 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
873 rtx y
= XEXP (op
, 0);
875 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
878 /* Use the mem's mode, since it will be reloaded thus. */
879 if (memory_address_p (GET_MODE (op
), y
))
886 /* Return 1 if OP is a valid memory address for a memory reference
889 The main use of this function is as a predicate in match_operand
890 expressions in the machine description. */
893 address_operand (rtx op
, enum machine_mode mode
)
895 return memory_address_p (mode
, op
);
898 /* Return 1 if OP is a register reference of mode MODE.
899 If MODE is VOIDmode, accept a register in any mode.
901 The main use of this function is as a predicate in match_operand
902 expressions in the machine description.
904 As a special exception, registers whose class is NO_REGS are
905 not accepted by `register_operand'. The reason for this change
906 is to allow the representation of special architecture artifacts
907 (such as a condition code register) without extending the rtl
908 definitions. Since registers of class NO_REGS cannot be used
909 as registers in any case where register classes are examined,
910 it is most consistent to keep this function from accepting them. */
913 register_operand (rtx op
, enum machine_mode mode
)
915 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
918 if (GET_CODE (op
) == SUBREG
)
920 rtx sub
= SUBREG_REG (op
);
922 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
923 because it is guaranteed to be reloaded into one.
924 Just make sure the MEM is valid in itself.
925 (Ideally, (SUBREG (MEM)...) should not exist after reload,
926 but currently it does result from (SUBREG (REG)...) where the
927 reg went on the stack.) */
928 if (! reload_completed
&& MEM_P (sub
))
929 return general_operand (op
, mode
);
931 #ifdef CANNOT_CHANGE_MODE_CLASS
933 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
934 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
935 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
936 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
940 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
941 create such rtl, and we must reject it. */
942 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
943 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
949 /* We don't consider registers whose class is NO_REGS
950 to be a register operand. */
952 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
953 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
956 /* Return 1 for a register in Pmode; ignore the tested mode. */
959 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
961 return register_operand (op
, Pmode
);
964 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
965 or a hard register. */
968 scratch_operand (rtx op
, enum machine_mode mode
)
970 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
973 return (GET_CODE (op
) == SCRATCH
975 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
978 /* Return 1 if OP is a valid immediate operand for mode MODE.
980 The main use of this function is as a predicate in match_operand
981 expressions in the machine description. */
984 immediate_operand (rtx op
, enum machine_mode mode
)
986 /* Don't accept CONST_INT or anything similar
987 if the caller wants something floating. */
988 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
989 && GET_MODE_CLASS (mode
) != MODE_INT
990 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
993 if (GET_CODE (op
) == CONST_INT
995 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
998 return (CONSTANT_P (op
)
999 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1000 || GET_MODE (op
) == VOIDmode
)
1001 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1002 && LEGITIMATE_CONSTANT_P (op
));
1005 /* Returns 1 if OP is an operand that is a CONST_INT. */
1008 const_int_operand (rtx op
, enum machine_mode mode
)
1010 if (GET_CODE (op
) != CONST_INT
)
1013 if (mode
!= VOIDmode
1014 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1020 /* Returns 1 if OP is an operand that is a constant integer or constant
1021 floating-point number. */
1024 const_double_operand (rtx op
, enum machine_mode mode
)
1026 /* Don't accept CONST_INT or anything similar
1027 if the caller wants something floating. */
1028 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1029 && GET_MODE_CLASS (mode
) != MODE_INT
1030 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1033 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1034 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1035 || GET_MODE (op
) == VOIDmode
));
1038 /* Return 1 if OP is a general operand that is not an immediate operand. */
1041 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1043 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1046 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1049 nonmemory_operand (rtx op
, enum machine_mode mode
)
1051 if (CONSTANT_P (op
))
1053 /* Don't accept CONST_INT or anything similar
1054 if the caller wants something floating. */
1055 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1056 && GET_MODE_CLASS (mode
) != MODE_INT
1057 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1060 if (GET_CODE (op
) == CONST_INT
1062 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1065 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1066 || mode
== VOIDmode
)
1067 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1068 && LEGITIMATE_CONSTANT_P (op
));
1071 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1074 if (GET_CODE (op
) == SUBREG
)
1076 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1077 because it is guaranteed to be reloaded into one.
1078 Just make sure the MEM is valid in itself.
1079 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1080 but currently it does result from (SUBREG (REG)...) where the
1081 reg went on the stack.) */
1082 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1083 return general_operand (op
, mode
);
1084 op
= SUBREG_REG (op
);
1087 /* We don't consider registers whose class is NO_REGS
1088 to be a register operand. */
1090 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1091 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1094 /* Return 1 if OP is a valid operand that stands for pushing a
1095 value of mode MODE onto the stack.
1097 The main use of this function is as a predicate in match_operand
1098 expressions in the machine description. */
1101 push_operand (rtx op
, enum machine_mode mode
)
1103 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1105 #ifdef PUSH_ROUNDING
1106 rounded_size
= PUSH_ROUNDING (rounded_size
);
1112 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1117 if (rounded_size
== GET_MODE_SIZE (mode
))
1119 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1124 if (GET_CODE (op
) != PRE_MODIFY
1125 || GET_CODE (XEXP (op
, 1)) != PLUS
1126 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1127 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1128 #ifdef STACK_GROWS_DOWNWARD
1129 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1131 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1137 return XEXP (op
, 0) == stack_pointer_rtx
;
1140 /* Return 1 if OP is a valid operand that stands for popping a
1141 value of mode MODE off the stack.
1143 The main use of this function is as a predicate in match_operand
1144 expressions in the machine description. */
1147 pop_operand (rtx op
, enum machine_mode mode
)
1152 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1157 if (GET_CODE (op
) != STACK_POP_CODE
)
1160 return XEXP (op
, 0) == stack_pointer_rtx
;
1163 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1166 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx addr
)
1168 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1175 /* Return 1 if OP is a valid memory reference with mode MODE,
1176 including a valid address.
1178 The main use of this function is as a predicate in match_operand
1179 expressions in the machine description. */
1182 memory_operand (rtx op
, enum machine_mode mode
)
1186 if (! reload_completed
)
1187 /* Note that no SUBREG is a memory operand before end of reload pass,
1188 because (SUBREG (MEM...)) forces reloading into a register. */
1189 return MEM_P (op
) && general_operand (op
, mode
);
1191 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1195 if (GET_CODE (inner
) == SUBREG
)
1196 inner
= SUBREG_REG (inner
);
1198 return (MEM_P (inner
) && general_operand (op
, mode
));
1201 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1202 that is, a memory reference whose address is a general_operand. */
1205 indirect_operand (rtx op
, enum machine_mode mode
)
1207 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1208 if (! reload_completed
1209 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1211 int offset
= SUBREG_BYTE (op
);
1212 rtx inner
= SUBREG_REG (op
);
1214 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1217 /* The only way that we can have a general_operand as the resulting
1218 address is if OFFSET is zero and the address already is an operand
1219 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1222 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1223 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1224 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1225 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1226 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1230 && memory_operand (op
, mode
)
1231 && general_operand (XEXP (op
, 0), Pmode
));
1234 /* Return 1 if this is a comparison operator. This allows the use of
1235 MATCH_OPERATOR to recognize all the branch insns. */
1238 comparison_operator (rtx op
, enum machine_mode mode
)
1240 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1241 && COMPARISON_P (op
));
1244 /* If BODY is an insn body that uses ASM_OPERANDS,
1245 return the number of operands (both input and output) in the insn.
1246 Otherwise return -1. */
1249 asm_noperands (rtx body
)
1251 switch (GET_CODE (body
))
1254 /* No output operands: return number of input operands. */
1255 return ASM_OPERANDS_INPUT_LENGTH (body
);
1257 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1258 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1259 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1263 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1264 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1266 /* Multiple output operands, or 1 output plus some clobbers:
1267 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1271 /* Count backwards through CLOBBERs to determine number of SETs. */
1272 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1274 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1276 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1280 /* N_SETS is now number of output operands. */
1283 /* Verify that all the SETs we have
1284 came from a single original asm_operands insn
1285 (so that invalid combinations are blocked). */
1286 for (i
= 0; i
< n_sets
; i
++)
1288 rtx elt
= XVECEXP (body
, 0, i
);
1289 if (GET_CODE (elt
) != SET
)
1291 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1293 /* If these ASM_OPERANDS rtx's came from different original insns
1294 then they aren't allowed together. */
1295 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1296 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1299 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1302 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1304 /* 0 outputs, but some clobbers:
1305 body is [(asm_operands ...) (clobber (reg ...))...]. */
1308 /* Make sure all the other parallel things really are clobbers. */
1309 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1310 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1313 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1322 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1323 copy its operands (both input and output) into the vector OPERANDS,
1324 the locations of the operands within the insn into the vector OPERAND_LOCS,
1325 and the constraints for the operands into CONSTRAINTS.
1326 Write the modes of the operands into MODES.
1327 Return the assembler-template.
1329 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1330 we don't store that info. */
1333 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1334 const char **constraints
, enum machine_mode
*modes
,
1341 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1343 asmop
= SET_SRC (body
);
1344 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1346 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1348 for (i
= 1; i
< noperands
; i
++)
1351 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1353 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1355 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1357 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1360 /* The output is in the SET.
1361 Its constraint is in the ASM_OPERANDS itself. */
1363 operands
[0] = SET_DEST (body
);
1365 operand_locs
[0] = &SET_DEST (body
);
1367 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1369 modes
[0] = GET_MODE (SET_DEST (body
));
1371 else if (GET_CODE (body
) == ASM_OPERANDS
)
1374 /* No output operands: BODY is (asm_operands ....). */
1376 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1378 /* The input operands are found in the 1st element vector. */
1379 /* Constraints for inputs are in the 2nd element vector. */
1380 for (i
= 0; i
< noperands
; i
++)
1383 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1385 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1387 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1389 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1392 else if (GET_CODE (body
) == PARALLEL
1393 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1394 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1396 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1398 int nout
= 0; /* Does not include CLOBBERs. */
1400 asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1401 nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1403 /* At least one output, plus some CLOBBERs. */
1405 /* The outputs are in the SETs.
1406 Their constraints are in the ASM_OPERANDS itself. */
1407 for (i
= 0; i
< nparallel
; i
++)
1409 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1410 break; /* Past last SET */
1413 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1415 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1417 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1419 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1423 for (i
= 0; i
< nin
; i
++)
1426 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1428 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1430 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1432 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1435 else if (GET_CODE (body
) == PARALLEL
1436 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1438 /* No outputs, but some CLOBBERs. */
1442 asmop
= XVECEXP (body
, 0, 0);
1443 nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1445 for (i
= 0; i
< nin
; i
++)
1448 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1450 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1452 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1454 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1461 #ifdef USE_MAPPED_LOCATION
1462 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
1464 loc
->file
= ASM_OPERANDS_SOURCE_FILE (asmop
);
1465 loc
->line
= ASM_OPERANDS_SOURCE_LINE (asmop
);
1469 return ASM_OPERANDS_TEMPLATE (asmop
);
1472 /* Check if an asm_operand matches its constraints.
1473 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1476 asm_operand_ok (rtx op
, const char *constraint
)
1480 /* Use constrain_operands after reload. */
1481 gcc_assert (!reload_completed
);
1485 char c
= *constraint
;
1502 case '0': case '1': case '2': case '3': case '4':
1503 case '5': case '6': case '7': case '8': case '9':
1504 /* For best results, our caller should have given us the
1505 proper matching constraint, but we can't actually fail
1506 the check if they didn't. Indicate that results are
1510 while (ISDIGIT (*constraint
));
1516 if (address_operand (op
, VOIDmode
))
1521 case 'V': /* non-offsettable */
1522 if (memory_operand (op
, VOIDmode
))
1526 case 'o': /* offsettable */
1527 if (offsettable_nonstrict_memref_p (op
))
1532 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1533 excepting those that expand_call created. Further, on some
1534 machines which do not have generalized auto inc/dec, an inc/dec
1535 is not a memory_operand.
1537 Match any memory and hope things are resolved after reload. */
1541 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1542 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1549 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1550 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1556 if (GET_CODE (op
) == CONST_DOUBLE
1557 || (GET_CODE (op
) == CONST_VECTOR
1558 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1563 if (GET_CODE (op
) == CONST_DOUBLE
1564 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1568 if (GET_CODE (op
) == CONST_DOUBLE
1569 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1574 if (GET_CODE (op
) == CONST_INT
1575 || (GET_CODE (op
) == CONST_DOUBLE
1576 && GET_MODE (op
) == VOIDmode
))
1581 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1586 if (GET_CODE (op
) == CONST_INT
1587 || (GET_CODE (op
) == CONST_DOUBLE
1588 && GET_MODE (op
) == VOIDmode
))
1593 if (GET_CODE (op
) == CONST_INT
1594 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1598 if (GET_CODE (op
) == CONST_INT
1599 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1603 if (GET_CODE (op
) == CONST_INT
1604 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1608 if (GET_CODE (op
) == CONST_INT
1609 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1613 if (GET_CODE (op
) == CONST_INT
1614 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1618 if (GET_CODE (op
) == CONST_INT
1619 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1623 if (GET_CODE (op
) == CONST_INT
1624 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1628 if (GET_CODE (op
) == CONST_INT
1629 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1638 if (general_operand (op
, VOIDmode
))
1643 /* For all other letters, we first check for a register class,
1644 otherwise it is an EXTRA_CONSTRAINT. */
1645 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1648 if (GET_MODE (op
) == BLKmode
)
1650 if (register_operand (op
, VOIDmode
))
1653 #ifdef EXTRA_CONSTRAINT_STR
1654 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1656 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
)
1657 /* Every memory operand can be reloaded to fit. */
1658 && memory_operand (op
, VOIDmode
))
1660 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
)
1661 /* Every address operand can be reloaded to fit. */
1662 && address_operand (op
, VOIDmode
))
1667 len
= CONSTRAINT_LEN (c
, constraint
);
1670 while (--len
&& *constraint
);
1678 /* Given an rtx *P, if it is a sum containing an integer constant term,
1679 return the location (type rtx *) of the pointer to that constant term.
1680 Otherwise, return a null pointer. */
1683 find_constant_term_loc (rtx
*p
)
1686 enum rtx_code code
= GET_CODE (*p
);
1688 /* If *P IS such a constant term, P is its location. */
1690 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1694 /* Otherwise, if not a sum, it has no constant term. */
1696 if (GET_CODE (*p
) != PLUS
)
1699 /* If one of the summands is constant, return its location. */
1701 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1702 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1705 /* Otherwise, check each summand for containing a constant term. */
1707 if (XEXP (*p
, 0) != 0)
1709 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1714 if (XEXP (*p
, 1) != 0)
1716 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1724 /* Return 1 if OP is a memory reference
1725 whose address contains no side effects
1726 and remains valid after the addition
1727 of a positive integer less than the
1728 size of the object being referenced.
1730 We assume that the original address is valid and do not check it.
1732 This uses strict_memory_address_p as a subroutine, so
1733 don't use it before reload. */
1736 offsettable_memref_p (rtx op
)
1738 return ((MEM_P (op
))
1739 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1742 /* Similar, but don't require a strictly valid mem ref:
1743 consider pseudo-regs valid as index or base regs. */
1746 offsettable_nonstrict_memref_p (rtx op
)
1748 return ((MEM_P (op
))
1749 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1752 /* Return 1 if Y is a memory address which contains no side effects
1753 and would remain valid after the addition of a positive integer
1754 less than the size of that mode.
1756 We assume that the original address is valid and do not check it.
1757 We do check that it is valid for narrower modes.
1759 If STRICTP is nonzero, we require a strictly valid address,
1760 for the sake of use in reload.c. */
1763 offsettable_address_p (int strictp
, enum machine_mode mode
, rtx y
)
1765 enum rtx_code ycode
= GET_CODE (y
);
1769 int (*addressp
) (enum machine_mode
, rtx
) =
1770 (strictp
? strict_memory_address_p
: memory_address_p
);
1771 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1773 if (CONSTANT_ADDRESS_P (y
))
1776 /* Adjusting an offsettable address involves changing to a narrower mode.
1777 Make sure that's OK. */
1779 if (mode_dependent_address_p (y
))
1782 /* ??? How much offset does an offsettable BLKmode reference need?
1783 Clearly that depends on the situation in which it's being used.
1784 However, the current situation in which we test 0xffffffff is
1785 less than ideal. Caveat user. */
1787 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1789 /* If the expression contains a constant term,
1790 see if it remains valid when max possible offset is added. */
1792 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1797 *y2
= plus_constant (*y2
, mode_sz
- 1);
1798 /* Use QImode because an odd displacement may be automatically invalid
1799 for any wider mode. But it should be valid for a single byte. */
1800 good
= (*addressp
) (QImode
, y
);
1802 /* In any case, restore old contents of memory. */
1807 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
1810 /* The offset added here is chosen as the maximum offset that
1811 any instruction could need to add when operating on something
1812 of the specified mode. We assume that if Y and Y+c are
1813 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1814 go inside a LO_SUM here, so we do so as well. */
1815 if (GET_CODE (y
) == LO_SUM
1817 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
1818 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
1819 plus_constant (XEXP (y
, 1), mode_sz
- 1));
1821 z
= plus_constant (y
, mode_sz
- 1);
1823 /* Use QImode because an odd displacement may be automatically invalid
1824 for any wider mode. But it should be valid for a single byte. */
1825 return (*addressp
) (QImode
, z
);
1828 /* Return 1 if ADDR is an address-expression whose effect depends
1829 on the mode of the memory reference it is used in.
1831 Autoincrement addressing is a typical example of mode-dependence
1832 because the amount of the increment depends on the mode. */
1835 mode_dependent_address_p (rtx addr
)
1837 /* Auto-increment addressing with anything other than post_modify
1838 or pre_modify always introduces a mode dependency. Catch such
1839 cases now instead of deferring to the target. */
1840 if (GET_CODE (addr
) == PRE_INC
1841 || GET_CODE (addr
) == POST_INC
1842 || GET_CODE (addr
) == PRE_DEC
1843 || GET_CODE (addr
) == POST_DEC
)
1846 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1848 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1849 win
: ATTRIBUTE_UNUSED_LABEL
1853 /* Like extract_insn, but save insn extracted and don't extract again, when
1854 called again for the same insn expecting that recog_data still contain the
1855 valid information. This is used primary by gen_attr infrastructure that
1856 often does extract insn again and again. */
1858 extract_insn_cached (rtx insn
)
1860 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
1862 extract_insn (insn
);
1863 recog_data
.insn
= insn
;
1866 /* Do cached extract_insn, constrain_operands and complain about failures.
1867 Used by insn_attrtab. */
1869 extract_constrain_insn_cached (rtx insn
)
1871 extract_insn_cached (insn
);
1872 if (which_alternative
== -1
1873 && !constrain_operands (reload_completed
))
1874 fatal_insn_not_found (insn
);
1877 /* Do cached constrain_operands and complain about failures. */
1879 constrain_operands_cached (int strict
)
1881 if (which_alternative
== -1)
1882 return constrain_operands (strict
);
1887 /* Analyze INSN and fill in recog_data. */
1890 extract_insn (rtx insn
)
1895 rtx body
= PATTERN (insn
);
1897 recog_data
.insn
= NULL
;
1898 recog_data
.n_operands
= 0;
1899 recog_data
.n_alternatives
= 0;
1900 recog_data
.n_dups
= 0;
1901 which_alternative
= -1;
1903 switch (GET_CODE (body
))
1913 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1918 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
1919 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1920 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1926 recog_data
.n_operands
= noperands
= asm_noperands (body
);
1929 /* This insn is an `asm' with operands. */
1931 /* expand_asm_operands makes sure there aren't too many operands. */
1932 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
1934 /* Now get the operand values and constraints out of the insn. */
1935 decode_asm_operands (body
, recog_data
.operand
,
1936 recog_data
.operand_loc
,
1937 recog_data
.constraints
,
1938 recog_data
.operand_mode
, NULL
);
1941 const char *p
= recog_data
.constraints
[0];
1942 recog_data
.n_alternatives
= 1;
1944 recog_data
.n_alternatives
+= (*p
++ == ',');
1948 fatal_insn_not_found (insn
);
1952 /* Ordinary insn: recognize it, get the operands via insn_extract
1953 and get the constraints. */
1955 icode
= recog_memoized (insn
);
1957 fatal_insn_not_found (insn
);
1959 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
1960 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
1961 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
1963 insn_extract (insn
);
1965 for (i
= 0; i
< noperands
; i
++)
1967 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
1968 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
1969 /* VOIDmode match_operands gets mode from their real operand. */
1970 if (recog_data
.operand_mode
[i
] == VOIDmode
)
1971 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
1974 for (i
= 0; i
< noperands
; i
++)
1975 recog_data
.operand_type
[i
]
1976 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
1977 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
1980 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
1983 /* After calling extract_insn, you can use this function to extract some
1984 information from the constraint strings into a more usable form.
1985 The collected data is stored in recog_op_alt. */
1987 preprocess_constraints (void)
1991 for (i
= 0; i
< recog_data
.n_operands
; i
++)
1992 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
1993 * sizeof (struct operand_alternative
)));
1995 for (i
= 0; i
< recog_data
.n_operands
; i
++)
1998 struct operand_alternative
*op_alt
;
1999 const char *p
= recog_data
.constraints
[i
];
2001 op_alt
= recog_op_alt
[i
];
2003 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2005 op_alt
[j
].cl
= NO_REGS
;
2006 op_alt
[j
].constraint
= p
;
2007 op_alt
[j
].matches
= -1;
2008 op_alt
[j
].matched
= -1;
2010 if (*p
== '\0' || *p
== ',')
2012 op_alt
[j
].anything_ok
= 1;
2022 while (c
!= ',' && c
!= '\0');
2023 if (c
== ',' || c
== '\0')
2031 case '=': case '+': case '*': case '%':
2032 case 'E': case 'F': case 'G': case 'H':
2033 case 's': case 'i': case 'n':
2034 case 'I': case 'J': case 'K': case 'L':
2035 case 'M': case 'N': case 'O': case 'P':
2036 /* These don't say anything we care about. */
2040 op_alt
[j
].reject
+= 6;
2043 op_alt
[j
].reject
+= 600;
2046 op_alt
[j
].earlyclobber
= 1;
2049 case '0': case '1': case '2': case '3': case '4':
2050 case '5': case '6': case '7': case '8': case '9':
2053 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2054 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2060 op_alt
[j
].memory_ok
= 1;
2063 op_alt
[j
].decmem_ok
= 1;
2066 op_alt
[j
].incmem_ok
= 1;
2069 op_alt
[j
].nonoffmem_ok
= 1;
2072 op_alt
[j
].offmem_ok
= 1;
2075 op_alt
[j
].anything_ok
= 1;
2079 op_alt
[j
].is_address
= 1;
2080 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2081 [(int) base_reg_class (VOIDmode
, ADDRESS
, SCRATCH
)];
2087 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2091 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2093 op_alt
[j
].memory_ok
= 1;
2096 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2098 op_alt
[j
].is_address
= 1;
2100 = (reg_class_subunion
2101 [(int) op_alt
[j
].cl
]
2102 [(int) base_reg_class (VOIDmode
, ADDRESS
,
2108 = (reg_class_subunion
2109 [(int) op_alt
[j
].cl
]
2110 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2113 p
+= CONSTRAINT_LEN (c
, p
);
2119 /* Check the operands of an insn against the insn's operand constraints
2120 and return 1 if they are valid.
2121 The information about the insn's operands, constraints, operand modes
2122 etc. is obtained from the global variables set up by extract_insn.
2124 WHICH_ALTERNATIVE is set to a number which indicates which
2125 alternative of constraints was matched: 0 for the first alternative,
2126 1 for the next, etc.
2128 In addition, when two operands are required to match
2129 and it happens that the output operand is (reg) while the
2130 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2131 make the output operand look like the input.
2132 This is because the output operand is the one the template will print.
2134 This is used in final, just before printing the assembler code and by
2135 the routines that determine an insn's attribute.
2137 If STRICT is a positive nonzero value, it means that we have been
2138 called after reload has been completed. In that case, we must
2139 do all checks strictly. If it is zero, it means that we have been called
2140 before reload has completed. In that case, we first try to see if we can
2141 find an alternative that matches strictly. If not, we try again, this
2142 time assuming that reload will fix up the insn. This provides a "best
2143 guess" for the alternative and is used to compute attributes of insns prior
2144 to reload. A negative value of STRICT is used for this internal call. */
2152 constrain_operands (int strict
)
2154 const char *constraints
[MAX_RECOG_OPERANDS
];
2155 int matching_operands
[MAX_RECOG_OPERANDS
];
2156 int earlyclobber
[MAX_RECOG_OPERANDS
];
2159 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2160 int funny_match_index
;
2162 which_alternative
= 0;
2163 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2166 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2168 constraints
[c
] = recog_data
.constraints
[c
];
2169 matching_operands
[c
] = -1;
2174 int seen_earlyclobber_at
= -1;
2177 funny_match_index
= 0;
2179 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2181 rtx op
= recog_data
.operand
[opno
];
2182 enum machine_mode mode
= GET_MODE (op
);
2183 const char *p
= constraints
[opno
];
2189 earlyclobber
[opno
] = 0;
2191 /* A unary operator may be accepted by the predicate, but it
2192 is irrelevant for matching constraints. */
2196 if (GET_CODE (op
) == SUBREG
)
2198 if (REG_P (SUBREG_REG (op
))
2199 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2200 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2201 GET_MODE (SUBREG_REG (op
)),
2204 op
= SUBREG_REG (op
);
2207 /* An empty constraint or empty alternative
2208 allows anything which matched the pattern. */
2209 if (*p
== 0 || *p
== ',')
2213 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2222 case '?': case '!': case '*': case '%':
2227 /* Ignore rest of this alternative as far as
2228 constraint checking is concerned. */
2231 while (*p
&& *p
!= ',');
2236 earlyclobber
[opno
] = 1;
2237 if (seen_earlyclobber_at
< 0)
2238 seen_earlyclobber_at
= opno
;
2241 case '0': case '1': case '2': case '3': case '4':
2242 case '5': case '6': case '7': case '8': case '9':
2244 /* This operand must be the same as a previous one.
2245 This kind of constraint is used for instructions such
2246 as add when they take only two operands.
2248 Note that the lower-numbered operand is passed first.
2250 If we are not testing strictly, assume that this
2251 constraint will be satisfied. */
2256 match
= strtoul (p
, &end
, 10);
2263 rtx op1
= recog_data
.operand
[match
];
2264 rtx op2
= recog_data
.operand
[opno
];
2266 /* A unary operator may be accepted by the predicate,
2267 but it is irrelevant for matching constraints. */
2269 op1
= XEXP (op1
, 0);
2271 op2
= XEXP (op2
, 0);
2273 val
= operands_match_p (op1
, op2
);
2276 matching_operands
[opno
] = match
;
2277 matching_operands
[match
] = opno
;
2282 /* If output is *x and input is *--x, arrange later
2283 to change the output to *--x as well, since the
2284 output op is the one that will be printed. */
2285 if (val
== 2 && strict
> 0)
2287 funny_match
[funny_match_index
].this = opno
;
2288 funny_match
[funny_match_index
++].other
= match
;
2295 /* p is used for address_operands. When we are called by
2296 gen_reload, no one will have checked that the address is
2297 strictly valid, i.e., that all pseudos requiring hard regs
2298 have gotten them. */
2300 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2305 /* No need to check general_operand again;
2306 it was done in insn-recog.c. Well, except that reload
2307 doesn't check the validity of its replacements, but
2308 that should only matter when there's a bug. */
2310 /* Anything goes unless it is a REG and really has a hard reg
2311 but the hard reg is not in the class GENERAL_REGS. */
2315 || GENERAL_REGS
== ALL_REGS
2316 || (reload_in_progress
2317 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2318 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2321 else if (strict
< 0 || general_operand (op
, mode
))
2326 /* This is used for a MATCH_SCRATCH in the cases when
2327 we don't actually need anything. So anything goes
2333 /* Memory operands must be valid, to the extent
2334 required by STRICT. */
2338 && !strict_memory_address_p (GET_MODE (op
),
2342 && !memory_address_p (GET_MODE (op
), XEXP (op
, 0)))
2346 /* Before reload, accept what reload can turn into mem. */
2347 else if (strict
< 0 && CONSTANT_P (op
))
2349 /* During reload, accept a pseudo */
2350 else if (reload_in_progress
&& REG_P (op
)
2351 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2357 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2358 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2364 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2365 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2371 if (GET_CODE (op
) == CONST_DOUBLE
2372 || (GET_CODE (op
) == CONST_VECTOR
2373 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2379 if (GET_CODE (op
) == CONST_DOUBLE
2380 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2385 if (GET_CODE (op
) == CONST_INT
2386 || (GET_CODE (op
) == CONST_DOUBLE
2387 && GET_MODE (op
) == VOIDmode
))
2390 if (CONSTANT_P (op
))
2395 if (GET_CODE (op
) == CONST_INT
2396 || (GET_CODE (op
) == CONST_DOUBLE
2397 && GET_MODE (op
) == VOIDmode
))
2409 if (GET_CODE (op
) == CONST_INT
2410 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2416 && ((strict
> 0 && ! offsettable_memref_p (op
))
2418 && !(CONSTANT_P (op
) || MEM_P (op
)))
2419 || (reload_in_progress
2421 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2426 if ((strict
> 0 && offsettable_memref_p (op
))
2427 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2428 /* Before reload, accept what reload can handle. */
2430 && (CONSTANT_P (op
) || MEM_P (op
)))
2431 /* During reload, accept a pseudo */
2432 || (reload_in_progress
&& REG_P (op
)
2433 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2442 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2448 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2449 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2451 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2454 #ifdef EXTRA_CONSTRAINT_STR
2455 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2458 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2459 /* Every memory operand can be reloaded to fit. */
2460 && ((strict
< 0 && MEM_P (op
))
2461 /* Before reload, accept what reload can turn
2463 || (strict
< 0 && CONSTANT_P (op
))
2464 /* During reload, accept a pseudo */
2465 || (reload_in_progress
&& REG_P (op
)
2466 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2468 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2469 /* Every address operand can be reloaded to fit. */
2476 while (p
+= len
, c
);
2478 constraints
[opno
] = p
;
2479 /* If this operand did not win somehow,
2480 this alternative loses. */
2484 /* This alternative won; the operands are ok.
2485 Change whichever operands this alternative says to change. */
2490 /* See if any earlyclobber operand conflicts with some other
2493 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2494 for (eopno
= seen_earlyclobber_at
;
2495 eopno
< recog_data
.n_operands
;
2497 /* Ignore earlyclobber operands now in memory,
2498 because we would often report failure when we have
2499 two memory operands, one of which was formerly a REG. */
2500 if (earlyclobber
[eopno
]
2501 && REG_P (recog_data
.operand
[eopno
]))
2502 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2503 if ((MEM_P (recog_data
.operand
[opno
])
2504 || recog_data
.operand_type
[opno
] != OP_OUT
)
2506 /* Ignore things like match_operator operands. */
2507 && *recog_data
.constraints
[opno
] != 0
2508 && ! (matching_operands
[opno
] == eopno
2509 && operands_match_p (recog_data
.operand
[opno
],
2510 recog_data
.operand
[eopno
]))
2511 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2512 recog_data
.operand
[eopno
]))
2517 while (--funny_match_index
>= 0)
2519 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2520 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2527 which_alternative
++;
2529 while (which_alternative
< recog_data
.n_alternatives
);
2531 which_alternative
= -1;
2532 /* If we are about to reject this, but we are not to test strictly,
2533 try a very loose test. Only return failure if it fails also. */
2535 return constrain_operands (-1);
2540 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2541 is a hard reg in class CLASS when its regno is offset by OFFSET
2542 and changed to mode MODE.
2543 If REG occupies multiple hard regs, all of them must be in CLASS. */
2546 reg_fits_class_p (rtx operand
, enum reg_class cl
, int offset
,
2547 enum machine_mode mode
)
2549 int regno
= REGNO (operand
);
2554 return (regno
< FIRST_PSEUDO_REGISTER
2555 && in_hard_reg_set_p (reg_class_contents
[(int) cl
],
2556 mode
, regno
+ offset
));
2559 /* Split single instruction. Helper function for split_all_insns and
2560 split_all_insns_noflow. Return last insn in the sequence if successful,
2561 or NULL if unsuccessful. */
2564 split_insn (rtx insn
)
2566 /* Split insns here to get max fine-grain parallelism. */
2567 rtx first
= PREV_INSN (insn
);
2568 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2573 /* try_split returns the NOTE that INSN became. */
2574 SET_INSN_DELETED (insn
);
2576 /* ??? Coddle to md files that generate subregs in post-reload
2577 splitters instead of computing the proper hard register. */
2578 if (reload_completed
&& first
!= last
)
2580 first
= NEXT_INSN (first
);
2584 cleanup_subreg_operands (first
);
2587 first
= NEXT_INSN (first
);
2593 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2596 split_all_insns (void)
2602 blocks
= sbitmap_alloc (last_basic_block
);
2603 sbitmap_zero (blocks
);
2606 FOR_EACH_BB_REVERSE (bb
)
2609 bool finish
= false;
2611 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2613 /* Can't use `next_real_insn' because that might go across
2614 CODE_LABELS and short-out basic blocks. */
2615 next
= NEXT_INSN (insn
);
2616 finish
= (insn
== BB_END (bb
));
2619 rtx set
= single_set (insn
);
2621 /* Don't split no-op move insns. These should silently
2622 disappear later in final. Splitting such insns would
2623 break the code that handles REG_NO_CONFLICT blocks. */
2624 if (set
&& set_noop_p (set
))
2626 /* Nops get in the way while scheduling, so delete them
2627 now if register allocation has already been done. It
2628 is too risky to try to do this before register
2629 allocation, and there are unlikely to be very many
2630 nops then anyways. */
2631 if (reload_completed
)
2632 delete_insn_and_edges (insn
);
2636 rtx last
= split_insn (insn
);
2639 /* The split sequence may include barrier, but the
2640 BB boundary we are interested in will be set to
2643 while (BARRIER_P (last
))
2644 last
= PREV_INSN (last
);
2645 SET_BIT (blocks
, bb
->index
);
2654 find_many_sub_basic_blocks (blocks
);
2656 #ifdef ENABLE_CHECKING
2657 verify_flow_info ();
2660 sbitmap_free (blocks
);
2663 /* Same as split_all_insns, but do not expect CFG to be available.
2664 Used by machine dependent reorg passes. */
2667 split_all_insns_noflow (void)
2671 for (insn
= get_insns (); insn
; insn
= next
)
2673 next
= NEXT_INSN (insn
);
2676 /* Don't split no-op move insns. These should silently
2677 disappear later in final. Splitting such insns would
2678 break the code that handles REG_NO_CONFLICT blocks. */
2679 rtx set
= single_set (insn
);
2680 if (set
&& set_noop_p (set
))
2682 /* Nops get in the way while scheduling, so delete them
2683 now if register allocation has already been done. It
2684 is too risky to try to do this before register
2685 allocation, and there are unlikely to be very many
2688 ??? Should we use delete_insn when the CFG isn't valid? */
2689 if (reload_completed
)
2690 delete_insn_and_edges (insn
);
2699 #ifdef HAVE_peephole2
2700 struct peep2_insn_data
2706 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2707 static int peep2_current
;
2708 /* The number of instructions available to match a peep2. */
2709 int peep2_current_count
;
2711 /* A non-insn marker indicating the last insn of the block.
2712 The live_before regset for this element is correct, indicating
2713 DF_LIVE_OUT for the block. */
2714 #define PEEP2_EOB pc_rtx
2716 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2717 does not exist. Used by the recognizer to find the next insn to match
2718 in a multi-insn pattern. */
2721 peep2_next_insn (int n
)
2723 gcc_assert (n
<= peep2_current_count
);
2726 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2727 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2729 return peep2_insn_data
[n
].insn
;
2732 /* Return true if REGNO is dead before the Nth non-note insn
2736 peep2_regno_dead_p (int ofs
, int regno
)
2738 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2740 ofs
+= peep2_current
;
2741 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2742 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2744 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2746 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2749 /* Similarly for a REG. */
2752 peep2_reg_dead_p (int ofs
, rtx reg
)
2756 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2758 ofs
+= peep2_current
;
2759 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2760 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2762 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2764 regno
= REGNO (reg
);
2765 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
2767 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2772 /* Try to find a hard register of mode MODE, matching the register class in
2773 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2774 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2775 in which case the only condition is that the register must be available
2776 before CURRENT_INSN.
2777 Registers that already have bits set in REG_SET will not be considered.
2779 If an appropriate register is available, it will be returned and the
2780 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2784 peep2_find_free_register (int from
, int to
, const char *class_str
,
2785 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
2787 static int search_ofs
;
2792 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
2793 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
2795 from
+= peep2_current
;
2796 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2797 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2798 to
+= peep2_current
;
2799 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2800 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2802 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2803 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2807 HARD_REG_SET this_live
;
2809 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2811 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2812 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2813 IOR_HARD_REG_SET (live
, this_live
);
2816 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
2817 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
2819 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2821 int raw_regno
, regno
, success
, j
;
2823 /* Distribute the free registers as much as possible. */
2824 raw_regno
= search_ofs
+ i
;
2825 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2826 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2827 #ifdef REG_ALLOC_ORDER
2828 regno
= reg_alloc_order
[raw_regno
];
2833 /* Don't allocate fixed registers. */
2834 if (fixed_regs
[regno
])
2836 /* Make sure the register is of the right class. */
2837 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
))
2839 /* And can support the mode we need. */
2840 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2842 /* And that we don't create an extra save/restore. */
2843 if (! call_used_regs
[regno
] && ! df_regs_ever_live_p (regno
))
2845 /* And we don't clobber traceback for noreturn functions. */
2846 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
2847 && (! reload_completed
|| frame_pointer_needed
))
2851 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
2853 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
2854 || TEST_HARD_REG_BIT (live
, regno
+ j
))
2862 add_to_hard_reg_set (reg_set
, mode
, regno
);
2864 /* Start the next search with the next register. */
2865 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
2867 search_ofs
= raw_regno
;
2869 return gen_rtx_REG (mode
, regno
);
2877 /* Perform the peephole2 optimization pass. */
2880 peephole2_optimize (void)
2886 bool do_cleanup_cfg
= false;
2887 bool do_rebuild_jump_labels
= false;
2889 df_set_flags (DF_LR_RUN_DCE
);
2892 /* Initialize the regsets we're going to use. */
2893 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
2894 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
2895 live
= BITMAP_ALLOC (®_obstack
);
2897 FOR_EACH_BB_REVERSE (bb
)
2899 /* Indicate that all slots except the last holds invalid data. */
2900 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
2901 peep2_insn_data
[i
].insn
= NULL_RTX
;
2902 peep2_current_count
= 0;
2904 /* Indicate that the last slot contains live_after data. */
2905 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
2906 peep2_current
= MAX_INSNS_PER_PEEP2
;
2908 /* Start up propagation. */
2909 bitmap_copy (live
, DF_LR_OUT (bb
));
2910 df_simulate_artificial_refs_at_end (bb
, live
);
2911 bitmap_copy (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
2913 for (insn
= BB_END (bb
); ; insn
= prev
)
2915 prev
= PREV_INSN (insn
);
2918 rtx
try, before_try
, x
;
2921 bool was_call
= false;
2923 /* Record this insn. */
2924 if (--peep2_current
< 0)
2925 peep2_current
= MAX_INSNS_PER_PEEP2
;
2926 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
2927 && peep2_insn_data
[peep2_current
].insn
== NULL_RTX
)
2928 peep2_current_count
++;
2929 peep2_insn_data
[peep2_current
].insn
= insn
;
2930 df_simulate_one_insn_backwards (bb
, insn
, live
);
2931 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
2933 if (RTX_FRAME_RELATED_P (insn
))
2935 /* If an insn has RTX_FRAME_RELATED_P set, peephole
2936 substitution would lose the
2937 REG_FRAME_RELATED_EXPR that is attached. */
2938 peep2_current_count
= 0;
2942 /* Match the peephole. */
2943 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
2947 /* If we are splitting a CALL_INSN, look for the CALL_INSN
2948 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
2949 cfg-related call notes. */
2950 for (i
= 0; i
<= match_len
; ++i
)
2953 rtx old_insn
, new_insn
, note
;
2955 j
= i
+ peep2_current
;
2956 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
2957 j
-= MAX_INSNS_PER_PEEP2
+ 1;
2958 old_insn
= peep2_insn_data
[j
].insn
;
2959 if (!CALL_P (old_insn
))
2964 while (new_insn
!= NULL_RTX
)
2966 if (CALL_P (new_insn
))
2968 new_insn
= NEXT_INSN (new_insn
);
2971 gcc_assert (new_insn
!= NULL_RTX
);
2973 CALL_INSN_FUNCTION_USAGE (new_insn
)
2974 = CALL_INSN_FUNCTION_USAGE (old_insn
);
2976 for (note
= REG_NOTES (old_insn
);
2978 note
= XEXP (note
, 1))
2979 switch (REG_NOTE_KIND (note
))
2983 REG_NOTES (new_insn
)
2984 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note
),
2986 REG_NOTES (new_insn
));
2988 /* Discard all other reg notes. */
2992 /* Croak if there is another call in the sequence. */
2993 while (++i
<= match_len
)
2995 j
= i
+ peep2_current
;
2996 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
2997 j
-= MAX_INSNS_PER_PEEP2
+ 1;
2998 old_insn
= peep2_insn_data
[j
].insn
;
2999 gcc_assert (!CALL_P (old_insn
));
3004 i
= match_len
+ peep2_current
;
3005 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3006 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3008 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3009 REG_EH_REGION
, NULL_RTX
);
3011 /* Replace the old sequence with the new. */
3012 try = emit_insn_after_setloc (try, peep2_insn_data
[i
].insn
,
3013 INSN_LOCATOR (peep2_insn_data
[i
].insn
));
3014 before_try
= PREV_INSN (insn
);
3015 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3017 /* Re-insert the EH_REGION notes. */
3018 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3023 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3024 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3027 for (x
= try ; x
!= before_try
; x
= PREV_INSN (x
))
3029 || (flag_non_call_exceptions
3030 && may_trap_p (PATTERN (x
))
3031 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3035 = gen_rtx_EXPR_LIST (REG_EH_REGION
,
3039 if (x
!= BB_END (bb
) && eh_edge
)
3044 nfte
= split_block (bb
, x
);
3045 flags
= (eh_edge
->flags
3046 & (EDGE_EH
| EDGE_ABNORMAL
));
3048 flags
|= EDGE_ABNORMAL_CALL
;
3049 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3052 nehe
->probability
= eh_edge
->probability
;
3054 = REG_BR_PROB_BASE
- nehe
->probability
;
3056 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3062 /* Converting possibly trapping insn to non-trapping is
3063 possible. Zap dummy outgoing edges. */
3064 do_cleanup_cfg
|= purge_dead_edges (bb
);
3067 #ifdef HAVE_conditional_execution
3068 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3069 peep2_insn_data
[i
].insn
= NULL_RTX
;
3070 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3071 peep2_current_count
= 0;
3073 /* Back up lifetime information past the end of the
3074 newly created sequence. */
3075 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3077 bitmap_copy (live
, peep2_insn_data
[i
].live_before
);
3079 /* Update life information for the new sequence. */
3086 i
= MAX_INSNS_PER_PEEP2
;
3087 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
3088 && peep2_insn_data
[i
].insn
== NULL_RTX
)
3089 peep2_current_count
++;
3090 peep2_insn_data
[i
].insn
= x
;
3092 df_simulate_one_insn_backwards (bb
, x
, live
);
3093 bitmap_copy (peep2_insn_data
[i
].live_before
, live
);
3102 /* If we generated a jump instruction, it won't have
3103 JUMP_LABEL set. Recompute after we're done. */
3104 for (x
= try; x
!= before_try
; x
= PREV_INSN (x
))
3107 do_rebuild_jump_labels
= true;
3113 if (insn
== BB_HEAD (bb
))
3118 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3119 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
3121 if (do_rebuild_jump_labels
)
3122 rebuild_jump_labels (get_insns ());
3124 #endif /* HAVE_peephole2 */
3126 /* Common predicates for use with define_bypass. */
3128 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3129 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3130 must be either a single_set or a PARALLEL with SETs inside. */
3133 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3135 rtx out_set
, in_set
;
3136 rtx out_pat
, in_pat
;
3137 rtx out_exp
, in_exp
;
3140 in_set
= single_set (in_insn
);
3143 if (!MEM_P (SET_DEST (in_set
)))
3146 out_set
= single_set (out_insn
);
3149 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3154 out_pat
= PATTERN (out_insn
);
3156 if (GET_CODE (out_pat
) != PARALLEL
)
3159 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3161 out_exp
= XVECEXP (out_pat
, 0, i
);
3163 if (GET_CODE (out_exp
) == CLOBBER
)
3166 gcc_assert (GET_CODE (out_exp
) == SET
);
3168 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
3175 in_pat
= PATTERN (in_insn
);
3176 gcc_assert (GET_CODE (in_pat
) == PARALLEL
);
3178 for (i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
3180 in_exp
= XVECEXP (in_pat
, 0, i
);
3182 if (GET_CODE (in_exp
) == CLOBBER
)
3185 gcc_assert (GET_CODE (in_exp
) == SET
);
3187 if (!MEM_P (SET_DEST (in_exp
)))
3190 out_set
= single_set (out_insn
);
3193 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_exp
)))
3198 out_pat
= PATTERN (out_insn
);
3199 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3201 for (j
= 0; j
< XVECLEN (out_pat
, 0); j
++)
3203 out_exp
= XVECEXP (out_pat
, 0, j
);
3205 if (GET_CODE (out_exp
) == CLOBBER
)
3208 gcc_assert (GET_CODE (out_exp
) == SET
);
3210 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_exp
)))
3220 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3221 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3222 or multiple set; IN_INSN should be single_set for truth, but for convenience
3223 of insn categorization may be any JUMP or CALL insn. */
3226 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3228 rtx out_set
, in_set
;
3230 in_set
= single_set (in_insn
);
3233 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3237 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3239 in_set
= SET_SRC (in_set
);
3241 out_set
= single_set (out_insn
);
3244 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3245 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3253 out_pat
= PATTERN (out_insn
);
3254 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3256 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3258 rtx exp
= XVECEXP (out_pat
, 0, i
);
3260 if (GET_CODE (exp
) == CLOBBER
)
3263 gcc_assert (GET_CODE (exp
) == SET
);
3265 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3266 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3275 gate_handle_peephole2 (void)
3277 return (optimize
> 0 && flag_peephole2
);
3281 rest_of_handle_peephole2 (void)
3283 #ifdef HAVE_peephole2
3284 peephole2_optimize ();
3289 struct tree_opt_pass pass_peephole2
=
3291 "peephole2", /* name */
3292 gate_handle_peephole2
, /* gate */
3293 rest_of_handle_peephole2
, /* execute */
3296 0, /* static_pass_number */
3297 TV_PEEPHOLE2
, /* tv_id */
3298 0, /* properties_required */
3299 0, /* properties_provided */
3300 0, /* properties_destroyed */
3301 0, /* todo_flags_start */
3303 TODO_dump_func
, /* todo_flags_finish */
3308 rest_of_handle_split_all_insns (void)
3314 struct tree_opt_pass pass_split_all_insns
=
3316 "split1", /* name */
3318 rest_of_handle_split_all_insns
, /* execute */
3321 0, /* static_pass_number */
3323 0, /* properties_required */
3324 0, /* properties_provided */
3325 0, /* properties_destroyed */
3326 0, /* todo_flags_start */
3327 TODO_dump_func
, /* todo_flags_finish */
3332 rest_of_handle_split_after_reload (void)
3334 /* If optimizing, then go ahead and split insns now. */
3342 struct tree_opt_pass pass_split_after_reload
=
3344 "split2", /* name */
3346 rest_of_handle_split_after_reload
, /* execute */
3349 0, /* static_pass_number */
3351 0, /* properties_required */
3352 0, /* properties_provided */
3353 0, /* properties_destroyed */
3354 0, /* todo_flags_start */
3355 TODO_dump_func
, /* todo_flags_finish */
3360 gate_handle_split_before_regstack (void)
3362 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3363 /* If flow2 creates new instructions which need splitting
3364 and scheduling after reload is not done, they might not be
3365 split until final which doesn't allow splitting
3366 if HAVE_ATTR_length. */
3367 # ifdef INSN_SCHEDULING
3368 return (optimize
&& !flag_schedule_insns_after_reload
);
3378 rest_of_handle_split_before_regstack (void)
3384 struct tree_opt_pass pass_split_before_regstack
=
3386 "split3", /* name */
3387 gate_handle_split_before_regstack
, /* gate */
3388 rest_of_handle_split_before_regstack
, /* execute */
3391 0, /* static_pass_number */
3393 0, /* properties_required */
3394 0, /* properties_provided */
3395 0, /* properties_destroyed */
3396 0, /* todo_flags_start */
3397 TODO_dump_func
, /* todo_flags_finish */
3402 gate_handle_split_before_sched2 (void)
3404 #ifdef INSN_SCHEDULING
3405 return optimize
> 0 && flag_schedule_insns_after_reload
;
3412 rest_of_handle_split_before_sched2 (void)
3414 #ifdef INSN_SCHEDULING
3420 struct tree_opt_pass pass_split_before_sched2
=
3422 "split4", /* name */
3423 gate_handle_split_before_sched2
, /* gate */
3424 rest_of_handle_split_before_sched2
, /* execute */
3427 0, /* static_pass_number */
3429 0, /* properties_required */
3430 0, /* properties_provided */
3431 0, /* properties_destroyed */
3432 0, /* todo_flags_start */
3434 TODO_dump_func
, /* todo_flags_finish */
3438 /* The placement of the splitting that we do for shorten_branches
3439 depends on whether regstack is used by the target or not. */
3441 gate_do_final_split (void)
3443 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3450 struct tree_opt_pass pass_split_for_shorten_branches
=
3452 "split5", /* name */
3453 gate_do_final_split
, /* gate */
3454 split_all_insns_noflow
, /* execute */
3457 0, /* static_pass_number */
3459 0, /* properties_required */
3460 0, /* properties_provided */
3461 0, /* properties_destroyed */
3462 0, /* todo_flags_start */
3463 TODO_dump_func
, /* todo_flags_finish */