1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "insn-config.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
40 #include "cfgcleanup.h"
42 #include "tree-pass.h"
43 #include "function-abi.h"
45 #ifndef STACK_POP_CODE
46 #if STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
49 #define STACK_POP_CODE POST_DEC
53 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx_insn
*, bool);
54 static void validate_replace_src_1 (rtx
*, void *);
55 static rtx_insn
*split_insn (rtx_insn
*);
57 struct target_recog default_target_recog
;
59 struct target_recog
*this_target_recog
= &default_target_recog
;
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in reginfo.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
72 struct recog_data_d recog_data
;
74 /* Contains a vector of operand_alternative structures, such that
75 operand OP of alternative A is at index A * n_operands + OP.
76 Set up by preprocess_constraints. */
77 const operand_alternative
*recog_op_alt
;
79 /* Used to provide recog_op_alt for asms. */
80 static operand_alternative asm_op_alt
[MAX_RECOG_OPERANDS
81 * MAX_RECOG_ALTERNATIVES
];
83 /* On return from `constrain_operands', indicate which alternative
86 int which_alternative
;
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.c.
90 Controls the significance of (SUBREG (MEM)). */
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed
;
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
102 init_recog_no_volatile (void)
114 /* Return true if labels in asm operands BODY are LABEL_REFs. */
117 asm_labels_ok (rtx body
)
122 asmop
= extract_asm_operands (body
);
123 if (asmop
== NULL_RTX
)
126 for (i
= 0; i
< ASM_OPERANDS_LABEL_LENGTH (asmop
); i
++)
127 if (GET_CODE (ASM_OPERANDS_LABEL (asmop
, i
)) != LABEL_REF
)
133 /* Check that X is an insn-body for an `asm' with operands
134 and that the operands mentioned in it are legitimate. */
137 check_asm_operands (rtx x
)
141 const char **constraints
;
144 if (!asm_labels_ok (x
))
147 /* Post-reload, be more strict with things. */
148 if (reload_completed
)
150 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
151 rtx_insn
*insn
= make_insn_raw (x
);
153 constrain_operands (1, get_enabled_alternatives (insn
));
154 return which_alternative
>= 0;
157 noperands
= asm_noperands (x
);
163 operands
= XALLOCAVEC (rtx
, noperands
);
164 constraints
= XALLOCAVEC (const char *, noperands
);
166 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
168 for (i
= 0; i
< noperands
; i
++)
170 const char *c
= constraints
[i
];
173 if (! asm_operand_ok (operands
[i
], c
, constraints
))
180 /* Static data for the next two routines. */
191 static change_t
*changes
;
192 static int changes_allocated
;
194 static int num_changes
= 0;
196 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
197 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
198 the change is simply made.
200 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
201 will be called with the address and mode as parameters. If OBJECT is
202 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
205 IN_GROUP is nonzero if this is part of a group of changes that must be
206 performed as a group. In that case, the changes will be stored. The
207 function `apply_change_group' will validate and apply the changes.
209 If IN_GROUP is zero, this is a single change. Try to recognize the insn
210 or validate the memory reference with the change applied. If the result
211 is not valid for the machine, suppress the change and return zero.
212 Otherwise, perform the change and return 1. */
215 validate_change_1 (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
, bool unshare
)
219 if (old
== new_rtx
|| rtx_equal_p (old
, new_rtx
))
222 gcc_assert (in_group
!= 0 || num_changes
== 0);
226 /* Save the information describing this change. */
227 if (num_changes
>= changes_allocated
)
229 if (changes_allocated
== 0)
230 /* This value allows for repeated substitutions inside complex
231 indexed addresses, or changes in up to 5 insns. */
232 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
234 changes_allocated
*= 2;
236 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
239 changes
[num_changes
].object
= object
;
240 changes
[num_changes
].loc
= loc
;
241 changes
[num_changes
].old
= old
;
242 changes
[num_changes
].unshare
= unshare
;
244 if (object
&& !MEM_P (object
))
246 /* Set INSN_CODE to force rerecognition of insn. Save old code in
248 changes
[num_changes
].old_code
= INSN_CODE (object
);
249 INSN_CODE (object
) = -1;
254 /* If we are making a group of changes, return 1. Otherwise, validate the
255 change group we made. */
260 return apply_change_group ();
263 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
267 validate_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
269 return validate_change_1 (object
, loc
, new_rtx
, in_group
, false);
272 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
276 validate_unshare_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
278 return validate_change_1 (object
, loc
, new_rtx
, in_group
, true);
282 /* Keep X canonicalized if some changes have made it non-canonical; only
283 modifies the operands of X, not (for example) its code. Simplifications
284 are not the job of this routine.
286 Return true if anything was changed. */
288 canonicalize_change_group (rtx_insn
*insn
, rtx x
)
290 if (COMMUTATIVE_P (x
)
291 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
293 /* Oops, the caller has made X no longer canonical.
294 Let's redo the changes in the correct order. */
295 rtx tem
= XEXP (x
, 0);
296 validate_unshare_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
297 validate_unshare_change (insn
, &XEXP (x
, 1), tem
, 1);
305 /* This subroutine of apply_change_group verifies whether the changes to INSN
306 were valid; i.e. whether INSN can still be recognized.
308 If IN_GROUP is true clobbers which have to be added in order to
309 match the instructions will be added to the current change group.
310 Otherwise the changes will take effect immediately. */
313 insn_invalid_p (rtx_insn
*insn
, bool in_group
)
315 rtx pat
= PATTERN (insn
);
316 int num_clobbers
= 0;
317 /* If we are before reload and the pattern is a SET, see if we can add
319 int icode
= recog (pat
, insn
,
320 (GET_CODE (pat
) == SET
321 && ! reload_completed
322 && ! reload_in_progress
)
323 ? &num_clobbers
: 0);
324 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
327 /* If this is an asm and the operand aren't legal, then fail. Likewise if
328 this is not an asm and the insn wasn't recognized. */
329 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
330 || (!is_asm
&& icode
< 0))
333 /* If we have to add CLOBBERs, fail if we have to add ones that reference
334 hard registers since our callers can't know if they are live or not.
335 Otherwise, add them. */
336 if (num_clobbers
> 0)
340 if (added_clobbers_hard_reg_p (icode
))
343 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
344 XVECEXP (newpat
, 0, 0) = pat
;
345 add_clobbers (newpat
, icode
);
347 validate_change (insn
, &PATTERN (insn
), newpat
, 1);
349 PATTERN (insn
) = pat
= newpat
;
352 /* After reload, verify that all constraints are satisfied. */
353 if (reload_completed
)
357 if (! constrain_operands (1, get_preferred_alternatives (insn
)))
361 INSN_CODE (insn
) = icode
;
365 /* Return number of changes made and not validated yet. */
367 num_changes_pending (void)
372 /* Tentatively apply the changes numbered NUM and up.
373 Return 1 if all changes are valid, zero otherwise. */
376 verify_changes (int num
)
379 rtx last_validated
= NULL_RTX
;
381 /* The changes have been applied and all INSN_CODEs have been reset to force
384 The changes are valid if we aren't given an object, or if we are
385 given a MEM and it still is a valid address, or if this is in insn
386 and it is recognized. In the latter case, if reload has completed,
387 we also require that the operands meet the constraints for
390 for (i
= num
; i
< num_changes
; i
++)
392 rtx object
= changes
[i
].object
;
394 /* If there is no object to test or if it is the same as the one we
395 already tested, ignore it. */
396 if (object
== 0 || object
== last_validated
)
401 if (! memory_address_addr_space_p (GET_MODE (object
),
403 MEM_ADDR_SPACE (object
)))
406 else if (/* changes[i].old might be zero, e.g. when putting a
407 REG_FRAME_RELATED_EXPR into a previously empty list. */
409 && REG_P (changes
[i
].old
)
410 && asm_noperands (PATTERN (object
)) > 0
411 && register_asm_p (changes
[i
].old
))
413 /* Don't allow changes of hard register operands to inline
414 assemblies if they have been defined as register asm ("x"). */
417 else if (DEBUG_INSN_P (object
))
419 else if (insn_invalid_p (as_a
<rtx_insn
*> (object
), true))
421 rtx pat
= PATTERN (object
);
423 /* Perhaps we couldn't recognize the insn because there were
424 extra CLOBBERs at the end. If so, try to re-recognize
425 without the last CLOBBER (later iterations will cause each of
426 them to be eliminated, in turn). But don't do this if we
427 have an ASM_OPERAND. */
428 if (GET_CODE (pat
) == PARALLEL
429 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
430 && asm_noperands (PATTERN (object
)) < 0)
434 if (XVECLEN (pat
, 0) == 2)
435 newpat
= XVECEXP (pat
, 0, 0);
441 = gen_rtx_PARALLEL (VOIDmode
,
442 rtvec_alloc (XVECLEN (pat
, 0) - 1));
443 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
444 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
447 /* Add a new change to this group to replace the pattern
448 with this new pattern. Then consider this change
449 as having succeeded. The change we added will
450 cause the entire call to fail if things remain invalid.
452 Note that this can lose if a later change than the one
453 we are processing specified &XVECEXP (PATTERN (object), 0, X)
454 but this shouldn't occur. */
456 validate_change (object
, &PATTERN (object
), newpat
, 1);
459 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
460 || GET_CODE (pat
) == VAR_LOCATION
)
461 /* If this insn is a CLOBBER or USE, it is always valid, but is
467 last_validated
= object
;
470 return (i
== num_changes
);
473 /* A group of changes has previously been issued with validate_change
474 and verified with verify_changes. Call df_insn_rescan for each of
475 the insn changed and clear num_changes. */
478 confirm_change_group (void)
481 rtx last_object
= NULL
;
483 for (i
= 0; i
< num_changes
; i
++)
485 rtx object
= changes
[i
].object
;
487 if (changes
[i
].unshare
)
488 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
490 /* Avoid unnecessary rescanning when multiple changes to same instruction
494 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
495 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
496 last_object
= object
;
500 if (last_object
&& INSN_P (last_object
))
501 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
505 /* Apply a group of changes previously issued with `validate_change'.
506 If all changes are valid, call confirm_change_group and return 1,
507 otherwise, call cancel_changes and return 0. */
510 apply_change_group (void)
512 if (verify_changes (0))
514 confirm_change_group ();
525 /* Return the number of changes so far in the current group. */
528 num_validated_changes (void)
533 /* Retract the changes numbered NUM and up. */
536 cancel_changes (int num
)
540 /* Back out all the changes. Do this in the opposite order in which
542 for (i
= num_changes
- 1; i
>= num
; i
--)
544 *changes
[i
].loc
= changes
[i
].old
;
545 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
546 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
551 /* Reduce conditional compilation elsewhere. */
552 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
556 simplify_while_replacing (rtx
*loc
, rtx to
, rtx_insn
*object
,
557 machine_mode op0_mode
)
560 enum rtx_code code
= GET_CODE (x
);
561 rtx new_rtx
= NULL_RTX
;
562 scalar_int_mode is_mode
;
564 if (SWAPPABLE_OPERANDS_P (x
)
565 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
567 validate_unshare_change (object
, loc
,
568 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
569 : swap_condition (code
),
570 GET_MODE (x
), XEXP (x
, 1),
576 /* Canonicalize arithmetics with all constant operands. */
577 switch (GET_RTX_CLASS (code
))
580 if (CONSTANT_P (XEXP (x
, 0)))
581 new_rtx
= simplify_unary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
586 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
587 new_rtx
= simplify_binary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
591 case RTX_COMM_COMPARE
:
592 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
593 new_rtx
= simplify_relational_operation (code
, GET_MODE (x
), op0_mode
,
594 XEXP (x
, 0), XEXP (x
, 1));
601 validate_change (object
, loc
, new_rtx
, 1);
608 /* If we have a PLUS whose second operand is now a CONST_INT, use
609 simplify_gen_binary to try to simplify it.
610 ??? We may want later to remove this, once simplification is
611 separated from this function. */
612 if (CONST_INT_P (XEXP (x
, 1)) && XEXP (x
, 1) == to
)
613 validate_change (object
, loc
,
615 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
618 if (CONST_SCALAR_INT_P (XEXP (x
, 1)))
619 validate_change (object
, loc
,
621 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
622 simplify_gen_unary (NEG
,
623 GET_MODE (x
), XEXP (x
, 1),
628 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
630 new_rtx
= simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
632 /* If any of the above failed, substitute in something that
633 we know won't be recognized. */
635 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
636 validate_change (object
, loc
, new_rtx
, 1);
640 /* All subregs possible to simplify should be simplified. */
641 new_rtx
= simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
644 /* Subregs of VOIDmode operands are incorrect. */
645 if (!new_rtx
&& GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
646 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
648 validate_change (object
, loc
, new_rtx
, 1);
652 /* If we are replacing a register with memory, try to change the memory
653 to be the mode required for memory in extract operations (this isn't
654 likely to be an insertion operation; if it was, nothing bad will
655 happen, we might just fail in some cases). */
657 if (MEM_P (XEXP (x
, 0))
658 && is_a
<scalar_int_mode
> (GET_MODE (XEXP (x
, 0)), &is_mode
)
659 && CONST_INT_P (XEXP (x
, 1))
660 && CONST_INT_P (XEXP (x
, 2))
661 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0),
662 MEM_ADDR_SPACE (XEXP (x
, 0)))
663 && !MEM_VOLATILE_P (XEXP (x
, 0)))
665 int pos
= INTVAL (XEXP (x
, 2));
666 machine_mode new_mode
= is_mode
;
667 if (GET_CODE (x
) == ZERO_EXTRACT
&& targetm
.have_extzv ())
668 new_mode
= insn_data
[targetm
.code_for_extzv
].operand
[1].mode
;
669 else if (GET_CODE (x
) == SIGN_EXTRACT
&& targetm
.have_extv ())
670 new_mode
= insn_data
[targetm
.code_for_extv
].operand
[1].mode
;
671 scalar_int_mode wanted_mode
= (new_mode
== VOIDmode
673 : as_a
<scalar_int_mode
> (new_mode
));
675 /* If we have a narrower mode, we can do something. */
676 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
678 int offset
= pos
/ BITS_PER_UNIT
;
681 /* If the bytes and bits are counted differently, we
682 must adjust the offset. */
683 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
685 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
688 gcc_assert (GET_MODE_PRECISION (wanted_mode
)
689 == GET_MODE_BITSIZE (wanted_mode
));
690 pos
%= GET_MODE_BITSIZE (wanted_mode
);
692 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
694 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
695 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
706 /* Replace every occurrence of FROM in X with TO. Mark each change with
707 validate_change passing OBJECT. */
710 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx_insn
*object
,
717 machine_mode op0_mode
= VOIDmode
;
718 int prev_changes
= num_changes
;
724 fmt
= GET_RTX_FORMAT (code
);
726 op0_mode
= GET_MODE (XEXP (x
, 0));
728 /* X matches FROM if it is the same rtx or they are both referring to the
729 same register in the same mode. Avoid calling rtx_equal_p unless the
730 operands look similar. */
733 || (REG_P (x
) && REG_P (from
)
734 && GET_MODE (x
) == GET_MODE (from
)
735 && REGNO (x
) == REGNO (from
))
736 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
737 && rtx_equal_p (x
, from
)))
739 validate_unshare_change (object
, loc
, to
, 1);
743 /* Call ourself recursively to perform the replacements.
744 We must not replace inside already replaced expression, otherwise we
745 get infinite recursion for replacements like (reg X)->(subreg (reg X))
746 so we must special case shared ASM_OPERANDS. */
748 if (GET_CODE (x
) == PARALLEL
)
750 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
752 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
753 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
755 /* Verify that operands are really shared. */
756 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
757 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
759 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
760 from
, to
, object
, simplify
);
763 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
,
768 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
771 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
, simplify
);
772 else if (fmt
[i
] == 'E')
773 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
774 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
,
778 /* If we didn't substitute, there is nothing more to do. */
779 if (num_changes
== prev_changes
)
782 /* ??? The regmove is no more, so is this aberration still necessary? */
783 /* Allow substituted expression to have different mode. This is used by
784 regmove to change mode of pseudo register. */
785 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
786 op0_mode
= GET_MODE (XEXP (x
, 0));
788 /* Do changes needed to keep rtx consistent. Don't do any other
789 simplifications, as it is not our job. */
791 simplify_while_replacing (loc
, to
, object
, op0_mode
);
794 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
795 with TO. After all changes have been made, validate by seeing
796 if INSN is still valid. */
799 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx_insn
*insn
, rtx
*loc
)
801 validate_replace_rtx_1 (loc
, from
, to
, insn
, true);
802 return apply_change_group ();
805 /* Try replacing every occurrence of FROM in INSN with TO. After all
806 changes have been made, validate by seeing if INSN is still valid. */
809 validate_replace_rtx (rtx from
, rtx to
, rtx_insn
*insn
)
811 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
812 return apply_change_group ();
815 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
816 is a part of INSN. After all changes have been made, validate by seeing if
818 validate_replace_rtx (from, to, insn) is equivalent to
819 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
822 validate_replace_rtx_part (rtx from
, rtx to
, rtx
*where
, rtx_insn
*insn
)
824 validate_replace_rtx_1 (where
, from
, to
, insn
, true);
825 return apply_change_group ();
828 /* Same as above, but do not simplify rtx afterwards. */
830 validate_replace_rtx_part_nosimplify (rtx from
, rtx to
, rtx
*where
,
833 validate_replace_rtx_1 (where
, from
, to
, insn
, false);
834 return apply_change_group ();
838 /* Try replacing every occurrence of FROM in INSN with TO. This also
839 will replace in REG_EQUAL and REG_EQUIV notes. */
842 validate_replace_rtx_group (rtx from
, rtx to
, rtx_insn
*insn
)
845 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
846 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
847 if (REG_NOTE_KIND (note
) == REG_EQUAL
848 || REG_NOTE_KIND (note
) == REG_EQUIV
)
849 validate_replace_rtx_1 (&XEXP (note
, 0), from
, to
, insn
, true);
852 /* Function called by note_uses to replace used subexpressions. */
853 struct validate_replace_src_data
855 rtx from
; /* Old RTX */
856 rtx to
; /* New RTX */
857 rtx_insn
*insn
; /* Insn in which substitution is occurring. */
861 validate_replace_src_1 (rtx
*x
, void *data
)
863 struct validate_replace_src_data
*d
864 = (struct validate_replace_src_data
*) data
;
866 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
, true);
869 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
873 validate_replace_src_group (rtx from
, rtx to
, rtx_insn
*insn
)
875 struct validate_replace_src_data d
;
880 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
883 /* Try simplify INSN.
884 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
885 pattern and return true if something was simplified. */
888 validate_simplify_insn (rtx_insn
*insn
)
894 pat
= PATTERN (insn
);
896 if (GET_CODE (pat
) == SET
)
898 newpat
= simplify_rtx (SET_SRC (pat
));
899 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
900 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
901 newpat
= simplify_rtx (SET_DEST (pat
));
902 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
903 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
905 else if (GET_CODE (pat
) == PARALLEL
)
906 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
908 rtx s
= XVECEXP (pat
, 0, i
);
910 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
912 newpat
= simplify_rtx (SET_SRC (s
));
913 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
914 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
915 newpat
= simplify_rtx (SET_DEST (s
));
916 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
917 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
920 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
923 /* Check whether INSN matches a specific alternative of an .md pattern. */
926 valid_insn_p (rtx_insn
*insn
)
928 recog_memoized (insn
);
929 if (INSN_CODE (insn
) < 0)
932 /* We don't know whether the insn will be in code that is optimized
933 for size or speed, so consider all enabled alternatives. */
934 if (!constrain_operands (1, get_enabled_alternatives (insn
)))
939 /* Return 1 if OP is a valid general operand for machine mode MODE.
940 This is either a register reference, a memory reference,
941 or a constant. In the case of a memory reference, the address
942 is checked for general validity for the target machine.
944 Register and memory references must have mode MODE in order to be valid,
945 but some constants have no machine mode and are valid for any mode.
947 If MODE is VOIDmode, OP is checked for validity for whatever mode
950 The main use of this function is as a predicate in match_operand
951 expressions in the machine description. */
954 general_operand (rtx op
, machine_mode mode
)
956 enum rtx_code code
= GET_CODE (op
);
958 if (mode
== VOIDmode
)
959 mode
= GET_MODE (op
);
961 /* Don't accept CONST_INT or anything similar
962 if the caller wants something floating. */
963 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
964 && GET_MODE_CLASS (mode
) != MODE_INT
965 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
970 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
974 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
976 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
977 && targetm
.legitimate_constant_p (mode
== VOIDmode
981 /* Except for certain constants with VOIDmode, already checked for,
982 OP's mode must match MODE if MODE specifies a mode. */
984 if (GET_MODE (op
) != mode
)
989 rtx sub
= SUBREG_REG (op
);
991 #ifdef INSN_SCHEDULING
992 /* On machines that have insn scheduling, we want all memory
993 reference to be explicit, so outlaw paradoxical SUBREGs.
994 However, we must allow them after reload so that they can
995 get cleaned up by cleanup_subreg_operands. */
996 if (!reload_completed
&& MEM_P (sub
)
997 && paradoxical_subreg_p (op
))
1000 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1001 may result in incorrect reference. We should simplify all valid
1002 subregs of MEM anyway. But allow this after reload because we
1003 might be called from cleanup_subreg_operands.
1005 ??? This is a kludge. */
1006 if (!reload_completed
1007 && maybe_ne (SUBREG_BYTE (op
), 0)
1012 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1013 && !REG_CAN_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1014 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1015 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
1016 /* LRA can generate some invalid SUBREGS just for matched
1017 operand reload presentation. LRA needs to treat them as
1019 && ! LRA_SUBREG_P (op
))
1022 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1023 create such rtl, and we must reject it. */
1024 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1025 /* LRA can use subreg to store a floating point value in an
1026 integer mode. Although the floating point and the
1027 integer modes need the same number of hard registers, the
1028 size of floating point mode can be less than the integer
1030 && ! lra_in_progress
1031 && paradoxical_subreg_p (op
))
1035 code
= GET_CODE (op
);
1039 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1040 || in_hard_reg_set_p (operand_reg_set
, GET_MODE (op
), REGNO (op
)));
1044 rtx y
= XEXP (op
, 0);
1046 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1049 /* Use the mem's mode, since it will be reloaded thus. LRA can
1050 generate move insn with invalid addresses which is made valid
1051 and efficiently calculated by LRA through further numerous
1054 || memory_address_addr_space_p (GET_MODE (op
), y
, MEM_ADDR_SPACE (op
)))
1061 /* Return 1 if OP is a valid memory address for a memory reference
1064 The main use of this function is as a predicate in match_operand
1065 expressions in the machine description. */
1068 address_operand (rtx op
, machine_mode mode
)
1070 /* Wrong mode for an address expr. */
1071 if (GET_MODE (op
) != VOIDmode
1072 && ! SCALAR_INT_MODE_P (GET_MODE (op
)))
1075 return memory_address_p (mode
, op
);
1078 /* Return 1 if OP is a register reference of mode MODE.
1079 If MODE is VOIDmode, accept a register in any mode.
1081 The main use of this function is as a predicate in match_operand
1082 expressions in the machine description. */
1085 register_operand (rtx op
, machine_mode mode
)
1087 if (GET_CODE (op
) == SUBREG
)
1089 rtx sub
= SUBREG_REG (op
);
1091 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1092 because it is guaranteed to be reloaded into one.
1093 Just make sure the MEM is valid in itself.
1094 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1095 but currently it does result from (SUBREG (REG)...) where the
1096 reg went on the stack.) */
1097 if (!REG_P (sub
) && (reload_completed
|| !MEM_P (sub
)))
1100 else if (!REG_P (op
))
1102 return general_operand (op
, mode
);
1105 /* Return 1 for a register in Pmode; ignore the tested mode. */
1108 pmode_register_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
1110 return register_operand (op
, Pmode
);
1113 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1114 or a hard register. */
1117 scratch_operand (rtx op
, machine_mode mode
)
1119 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1122 return (GET_CODE (op
) == SCRATCH
1125 || (REGNO (op
) < FIRST_PSEUDO_REGISTER
1126 && REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
))));
1129 /* Return 1 if OP is a valid immediate operand for mode MODE.
1131 The main use of this function is as a predicate in match_operand
1132 expressions in the machine description. */
1135 immediate_operand (rtx op
, machine_mode mode
)
1137 /* Don't accept CONST_INT or anything similar
1138 if the caller wants something floating. */
1139 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1140 && GET_MODE_CLASS (mode
) != MODE_INT
1141 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1144 if (CONST_INT_P (op
)
1146 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1149 return (CONSTANT_P (op
)
1150 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1151 || GET_MODE (op
) == VOIDmode
)
1152 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1153 && targetm
.legitimate_constant_p (mode
== VOIDmode
1158 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1161 const_int_operand (rtx op
, machine_mode mode
)
1163 if (!CONST_INT_P (op
))
1166 if (mode
!= VOIDmode
1167 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1173 #if TARGET_SUPPORTS_WIDE_INT
1174 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1177 const_scalar_int_operand (rtx op
, machine_mode mode
)
1179 if (!CONST_SCALAR_INT_P (op
))
1182 if (CONST_INT_P (op
))
1183 return const_int_operand (op
, mode
);
1185 if (mode
!= VOIDmode
)
1187 scalar_int_mode int_mode
= as_a
<scalar_int_mode
> (mode
);
1188 int prec
= GET_MODE_PRECISION (int_mode
);
1189 int bitsize
= GET_MODE_BITSIZE (int_mode
);
1191 if (CONST_WIDE_INT_NUNITS (op
) * HOST_BITS_PER_WIDE_INT
> bitsize
)
1194 if (prec
== bitsize
)
1198 /* Multiword partial int. */
1200 = CONST_WIDE_INT_ELT (op
, CONST_WIDE_INT_NUNITS (op
) - 1);
1201 return (sext_hwi (x
, prec
& (HOST_BITS_PER_WIDE_INT
- 1)) == x
);
1207 /* Returns 1 if OP is an operand that is a constant integer or constant
1208 floating-point number of MODE. */
1211 const_double_operand (rtx op
, machine_mode mode
)
1213 return (GET_CODE (op
) == CONST_DOUBLE
)
1214 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
);
1217 /* Returns 1 if OP is an operand that is a constant integer or constant
1218 floating-point number of MODE. */
1221 const_double_operand (rtx op
, machine_mode mode
)
1223 /* Don't accept CONST_INT or anything similar
1224 if the caller wants something floating. */
1225 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1226 && GET_MODE_CLASS (mode
) != MODE_INT
1227 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1230 return ((CONST_DOUBLE_P (op
) || CONST_INT_P (op
))
1231 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1232 || GET_MODE (op
) == VOIDmode
));
1235 /* Return 1 if OP is a general operand that is not an immediate
1236 operand of mode MODE. */
1239 nonimmediate_operand (rtx op
, machine_mode mode
)
1241 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1244 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1247 nonmemory_operand (rtx op
, machine_mode mode
)
1249 if (CONSTANT_P (op
))
1250 return immediate_operand (op
, mode
);
1251 return register_operand (op
, mode
);
1254 /* Return 1 if OP is a valid operand that stands for pushing a
1255 value of mode MODE onto the stack.
1257 The main use of this function is as a predicate in match_operand
1258 expressions in the machine description. */
1261 push_operand (rtx op
, machine_mode mode
)
1266 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1269 poly_int64 rounded_size
= GET_MODE_SIZE (mode
);
1271 #ifdef PUSH_ROUNDING
1272 rounded_size
= PUSH_ROUNDING (MACRO_INT (rounded_size
));
1277 if (known_eq (rounded_size
, GET_MODE_SIZE (mode
)))
1279 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1285 if (GET_CODE (op
) != PRE_MODIFY
1286 || GET_CODE (XEXP (op
, 1)) != PLUS
1287 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1288 || !poly_int_rtx_p (XEXP (XEXP (op
, 1), 1), &offset
)
1289 || (STACK_GROWS_DOWNWARD
1290 ? maybe_ne (offset
, -rounded_size
)
1291 : maybe_ne (offset
, rounded_size
)))
1295 return XEXP (op
, 0) == stack_pointer_rtx
;
1298 /* Return 1 if OP is a valid operand that stands for popping a
1299 value of mode MODE off the stack.
1301 The main use of this function is as a predicate in match_operand
1302 expressions in the machine description. */
1305 pop_operand (rtx op
, machine_mode mode
)
1310 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1315 if (GET_CODE (op
) != STACK_POP_CODE
)
1318 return XEXP (op
, 0) == stack_pointer_rtx
;
1321 /* Return 1 if ADDR is a valid memory address
1322 for mode MODE in address space AS. */
1325 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED
,
1326 rtx addr
, addr_space_t as
)
1328 #ifdef GO_IF_LEGITIMATE_ADDRESS
1329 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
1330 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1336 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 0, as
);
1340 /* Return 1 if OP is a valid memory reference with mode MODE,
1341 including a valid address.
1343 The main use of this function is as a predicate in match_operand
1344 expressions in the machine description. */
1347 memory_operand (rtx op
, machine_mode mode
)
1351 if (! reload_completed
)
1352 /* Note that no SUBREG is a memory operand before end of reload pass,
1353 because (SUBREG (MEM...)) forces reloading into a register. */
1354 return MEM_P (op
) && general_operand (op
, mode
);
1356 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1360 if (GET_CODE (inner
) == SUBREG
)
1361 inner
= SUBREG_REG (inner
);
1363 return (MEM_P (inner
) && general_operand (op
, mode
));
1366 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1367 that is, a memory reference whose address is a general_operand. */
1370 indirect_operand (rtx op
, machine_mode mode
)
1372 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1373 if (! reload_completed
1374 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1376 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1379 /* The only way that we can have a general_operand as the resulting
1380 address is if OFFSET is zero and the address already is an operand
1381 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1384 rtx addr
= strip_offset (XEXP (SUBREG_REG (op
), 0), &offset
);
1385 return (known_eq (offset
+ SUBREG_BYTE (op
), 0)
1386 && general_operand (addr
, Pmode
));
1390 && memory_operand (op
, mode
)
1391 && general_operand (XEXP (op
, 0), Pmode
));
1394 /* Return 1 if this is an ordered comparison operator (not including
1395 ORDERED and UNORDERED). */
1398 ordered_comparison_operator (rtx op
, machine_mode mode
)
1400 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1402 switch (GET_CODE (op
))
1420 /* Return 1 if this is a comparison operator. This allows the use of
1421 MATCH_OPERATOR to recognize all the branch insns. */
1424 comparison_operator (rtx op
, machine_mode mode
)
1426 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1427 && COMPARISON_P (op
));
1430 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1433 extract_asm_operands (rtx body
)
1436 switch (GET_CODE (body
))
1442 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1443 tmp
= SET_SRC (body
);
1444 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1449 tmp
= XVECEXP (body
, 0, 0);
1450 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1452 if (GET_CODE (tmp
) == SET
)
1454 tmp
= SET_SRC (tmp
);
1455 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1466 /* If BODY is an insn body that uses ASM_OPERANDS,
1467 return the number of operands (both input and output) in the insn.
1468 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1470 Otherwise return -1. */
1473 asm_noperands (const_rtx body
)
1475 rtx asm_op
= extract_asm_operands (CONST_CAST_RTX (body
));
1480 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) >= 2
1481 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_INPUT
)
1483 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1484 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1485 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1492 if (GET_CODE (body
) == SET
)
1494 else if (GET_CODE (body
) == PARALLEL
)
1496 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1498 /* Multiple output operands, or 1 output plus some clobbers:
1500 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1501 /* Count backwards through CLOBBERs to determine number of SETs. */
1502 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1504 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1506 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1510 /* N_SETS is now number of output operands. */
1513 /* Verify that all the SETs we have
1514 came from a single original asm_operands insn
1515 (so that invalid combinations are blocked). */
1516 for (i
= 0; i
< n_sets
; i
++)
1518 rtx elt
= XVECEXP (body
, 0, i
);
1519 if (GET_CODE (elt
) != SET
)
1521 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1523 /* If these ASM_OPERANDS rtx's came from different original insns
1524 then they aren't allowed together. */
1525 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1526 != ASM_OPERANDS_INPUT_VEC (asm_op
))
1532 /* 0 outputs, but some clobbers:
1533 body is [(asm_operands ...) (clobber (reg ...))...]. */
1534 /* Make sure all the other parallel things really are clobbers. */
1535 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1536 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1541 return (ASM_OPERANDS_INPUT_LENGTH (asm_op
)
1542 + ASM_OPERANDS_LABEL_LENGTH (asm_op
) + n_sets
);
1545 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1546 copy its operands (both input and output) into the vector OPERANDS,
1547 the locations of the operands within the insn into the vector OPERAND_LOCS,
1548 and the constraints for the operands into CONSTRAINTS.
1549 Write the modes of the operands into MODES.
1550 Write the location info into LOC.
1551 Return the assembler-template.
1552 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1553 return the basic assembly string.
1555 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1556 we don't store that info. */
1559 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1560 const char **constraints
, machine_mode
*modes
,
1563 int nbase
= 0, n
, i
;
1566 switch (GET_CODE (body
))
1569 /* Zero output asm: BODY is (asm_operands ...). */
1574 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1575 asmop
= SET_SRC (body
);
1577 /* The output is in the SET.
1578 Its constraint is in the ASM_OPERANDS itself. */
1580 operands
[0] = SET_DEST (body
);
1582 operand_locs
[0] = &SET_DEST (body
);
1584 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1586 modes
[0] = GET_MODE (SET_DEST (body
));
1592 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1594 asmop
= XVECEXP (body
, 0, 0);
1595 if (GET_CODE (asmop
) == SET
)
1597 asmop
= SET_SRC (asmop
);
1599 /* At least one output, plus some CLOBBERs. The outputs are in
1600 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1601 for (i
= 0; i
< nparallel
; i
++)
1603 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1604 break; /* Past last SET */
1605 gcc_assert (GET_CODE (XVECEXP (body
, 0, i
)) == SET
);
1607 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1609 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1611 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1613 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1617 else if (GET_CODE (asmop
) == ASM_INPUT
)
1620 *loc
= ASM_INPUT_SOURCE_LOCATION (asmop
);
1621 return XSTR (asmop
, 0);
1630 n
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1631 for (i
= 0; i
< n
; i
++)
1634 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1636 operands
[nbase
+ i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1638 constraints
[nbase
+ i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1640 modes
[nbase
+ i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1644 n
= ASM_OPERANDS_LABEL_LENGTH (asmop
);
1645 for (i
= 0; i
< n
; i
++)
1648 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_LABEL (asmop
, i
);
1650 operands
[nbase
+ i
] = ASM_OPERANDS_LABEL (asmop
, i
);
1652 constraints
[nbase
+ i
] = "";
1654 modes
[nbase
+ i
] = Pmode
;
1658 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
1660 return ASM_OPERANDS_TEMPLATE (asmop
);
1663 /* Parse inline assembly string STRING and determine which operands are
1664 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1665 to true if operand I is referenced.
1667 This is intended to distinguish barrier-like asms such as:
1669 asm ("" : "=m" (...));
1671 from real references such as:
1673 asm ("sw\t$0, %0" : "=m" (...)); */
1676 get_referenced_operands (const char *string
, bool *used
,
1677 unsigned int noperands
)
1679 memset (used
, 0, sizeof (bool) * noperands
);
1680 const char *p
= string
;
1686 /* A letter followed by a digit indicates an operand number. */
1687 if (ISALPHA (p
[0]) && ISDIGIT (p
[1]))
1692 unsigned long opnum
= strtoul (p
, &endptr
, 10);
1693 if (endptr
!= p
&& opnum
< noperands
)
1707 /* Check if an asm_operand matches its constraints.
1708 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1711 asm_operand_ok (rtx op
, const char *constraint
, const char **constraints
)
1714 bool incdec_ok
= false;
1716 /* Use constrain_operands after reload. */
1717 gcc_assert (!reload_completed
);
1719 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1720 many alternatives as required to match the other operands. */
1721 if (*constraint
== '\0')
1726 enum constraint_num cn
;
1727 char c
= *constraint
;
1735 case '0': case '1': case '2': case '3': case '4':
1736 case '5': case '6': case '7': case '8': case '9':
1737 /* If caller provided constraints pointer, look up
1738 the matching constraint. Otherwise, our caller should have
1739 given us the proper matching constraint, but we can't
1740 actually fail the check if they didn't. Indicate that
1741 results are inconclusive. */
1745 unsigned long match
;
1747 match
= strtoul (constraint
, &end
, 10);
1749 result
= asm_operand_ok (op
, constraints
[match
], NULL
);
1750 constraint
= (const char *) end
;
1756 while (ISDIGIT (*constraint
));
1762 /* The rest of the compiler assumes that reloading the address
1763 of a MEM into a register will make it fit an 'o' constraint.
1764 That is, if it sees a MEM operand for an 'o' constraint,
1765 it assumes that (mem (base-reg)) will fit.
1767 That assumption fails on targets that don't have offsettable
1768 addresses at all. We therefore need to treat 'o' asm
1769 constraints as a special case and only accept operands that
1770 are already offsettable, thus proving that at least one
1771 offsettable address exists. */
1772 case 'o': /* offsettable */
1773 if (offsettable_nonstrict_memref_p (op
))
1778 if (general_operand (op
, VOIDmode
))
1784 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1785 to exist, excepting those that expand_call created. Further,
1786 on some machines which do not have generalized auto inc/dec,
1787 an inc/dec is not a memory_operand.
1789 Match any memory and hope things are resolved after reload. */
1793 cn
= lookup_constraint (constraint
);
1795 switch (get_constraint_type (cn
))
1799 && reg_class_for_constraint (cn
) != NO_REGS
1800 && GET_MODE (op
) != BLKmode
1801 && register_operand (op
, VOIDmode
))
1808 && insn_const_int_ok_for_constraint (INTVAL (op
), cn
))
1815 case CT_SPECIAL_MEMORY
:
1816 /* Every memory operand can be reloaded to fit. */
1818 mem
= extract_mem_from_operand (op
);
1819 result
= result
|| memory_operand (mem
, VOIDmode
);
1823 /* Every address operand can be reloaded to fit. */
1824 result
= result
|| address_operand (op
, VOIDmode
);
1828 result
= result
|| constraint_satisfied_p (op
, cn
);
1833 len
= CONSTRAINT_LEN (c
, constraint
);
1836 while (--len
&& *constraint
&& *constraint
!= ',');
1841 /* For operands without < or > constraints reject side-effects. */
1842 if (AUTO_INC_DEC
&& !incdec_ok
&& result
&& MEM_P (op
))
1843 switch (GET_CODE (XEXP (op
, 0)))
1859 /* Given an rtx *P, if it is a sum containing an integer constant term,
1860 return the location (type rtx *) of the pointer to that constant term.
1861 Otherwise, return a null pointer. */
1864 find_constant_term_loc (rtx
*p
)
1867 enum rtx_code code
= GET_CODE (*p
);
1869 /* If *P IS such a constant term, P is its location. */
1871 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1875 /* Otherwise, if not a sum, it has no constant term. */
1877 if (GET_CODE (*p
) != PLUS
)
1880 /* If one of the summands is constant, return its location. */
1882 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1883 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1886 /* Otherwise, check each summand for containing a constant term. */
1888 if (XEXP (*p
, 0) != 0)
1890 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1895 if (XEXP (*p
, 1) != 0)
1897 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1905 /* Return 1 if OP is a memory reference
1906 whose address contains no side effects
1907 and remains valid after the addition
1908 of a positive integer less than the
1909 size of the object being referenced.
1911 We assume that the original address is valid and do not check it.
1913 This uses strict_memory_address_p as a subroutine, so
1914 don't use it before reload. */
1917 offsettable_memref_p (rtx op
)
1919 return ((MEM_P (op
))
1920 && offsettable_address_addr_space_p (1, GET_MODE (op
), XEXP (op
, 0),
1921 MEM_ADDR_SPACE (op
)));
1924 /* Similar, but don't require a strictly valid mem ref:
1925 consider pseudo-regs valid as index or base regs. */
1928 offsettable_nonstrict_memref_p (rtx op
)
1930 return ((MEM_P (op
))
1931 && offsettable_address_addr_space_p (0, GET_MODE (op
), XEXP (op
, 0),
1932 MEM_ADDR_SPACE (op
)));
1935 /* Return 1 if Y is a memory address which contains no side effects
1936 and would remain valid for address space AS after the addition of
1937 a positive integer less than the size of that mode.
1939 We assume that the original address is valid and do not check it.
1940 We do check that it is valid for narrower modes.
1942 If STRICTP is nonzero, we require a strictly valid address,
1943 for the sake of use in reload.c. */
1946 offsettable_address_addr_space_p (int strictp
, machine_mode mode
, rtx y
,
1949 enum rtx_code ycode
= GET_CODE (y
);
1953 int (*addressp
) (machine_mode
, rtx
, addr_space_t
) =
1954 (strictp
? strict_memory_address_addr_space_p
1955 : memory_address_addr_space_p
);
1956 poly_int64 mode_sz
= GET_MODE_SIZE (mode
);
1958 if (CONSTANT_ADDRESS_P (y
))
1961 /* Adjusting an offsettable address involves changing to a narrower mode.
1962 Make sure that's OK. */
1964 if (mode_dependent_address_p (y
, as
))
1967 machine_mode address_mode
= GET_MODE (y
);
1968 if (address_mode
== VOIDmode
)
1969 address_mode
= targetm
.addr_space
.address_mode (as
);
1970 #ifdef POINTERS_EXTEND_UNSIGNED
1971 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
1974 /* ??? How much offset does an offsettable BLKmode reference need?
1975 Clearly that depends on the situation in which it's being used.
1976 However, the current situation in which we test 0xffffffff is
1977 less than ideal. Caveat user. */
1978 if (known_eq (mode_sz
, 0))
1979 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1981 /* If the expression contains a constant term,
1982 see if it remains valid when max possible offset is added. */
1984 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1989 *y2
= plus_constant (address_mode
, *y2
, mode_sz
- 1);
1990 /* Use QImode because an odd displacement may be automatically invalid
1991 for any wider mode. But it should be valid for a single byte. */
1992 good
= (*addressp
) (QImode
, y
, as
);
1994 /* In any case, restore old contents of memory. */
1999 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
2002 /* The offset added here is chosen as the maximum offset that
2003 any instruction could need to add when operating on something
2004 of the specified mode. We assume that if Y and Y+c are
2005 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2006 go inside a LO_SUM here, so we do so as well. */
2007 if (GET_CODE (y
) == LO_SUM
2009 && known_le (mode_sz
, GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
))
2010 z
= gen_rtx_LO_SUM (address_mode
, XEXP (y
, 0),
2011 plus_constant (address_mode
, XEXP (y
, 1),
2013 #ifdef POINTERS_EXTEND_UNSIGNED
2014 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2015 else if (POINTERS_EXTEND_UNSIGNED
> 0
2016 && GET_CODE (y
) == ZERO_EXTEND
2017 && GET_MODE (XEXP (y
, 0)) == pointer_mode
)
2018 z
= gen_rtx_ZERO_EXTEND (address_mode
,
2019 plus_constant (pointer_mode
, XEXP (y
, 0),
2023 z
= plus_constant (address_mode
, y
, mode_sz
- 1);
2025 /* Use QImode because an odd displacement may be automatically invalid
2026 for any wider mode. But it should be valid for a single byte. */
2027 return (*addressp
) (QImode
, z
, as
);
2030 /* Return 1 if ADDR is an address-expression whose effect depends
2031 on the mode of the memory reference it is used in.
2033 ADDRSPACE is the address space associated with the address.
2035 Autoincrement addressing is a typical example of mode-dependence
2036 because the amount of the increment depends on the mode. */
2039 mode_dependent_address_p (rtx addr
, addr_space_t addrspace
)
2041 /* Auto-increment addressing with anything other than post_modify
2042 or pre_modify always introduces a mode dependency. Catch such
2043 cases now instead of deferring to the target. */
2044 if (GET_CODE (addr
) == PRE_INC
2045 || GET_CODE (addr
) == POST_INC
2046 || GET_CODE (addr
) == PRE_DEC
2047 || GET_CODE (addr
) == POST_DEC
)
2050 return targetm
.mode_dependent_address_p (addr
, addrspace
);
2053 /* Return true if boolean attribute ATTR is supported. */
2056 have_bool_attr (bool_attr attr
)
2061 return HAVE_ATTR_enabled
;
2062 case BA_PREFERRED_FOR_SIZE
:
2063 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_size
;
2064 case BA_PREFERRED_FOR_SPEED
:
2065 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_speed
;
2070 /* Return the value of ATTR for instruction INSN. */
2073 get_bool_attr (rtx_insn
*insn
, bool_attr attr
)
2078 return get_attr_enabled (insn
);
2079 case BA_PREFERRED_FOR_SIZE
:
2080 return get_attr_enabled (insn
) && get_attr_preferred_for_size (insn
);
2081 case BA_PREFERRED_FOR_SPEED
:
2082 return get_attr_enabled (insn
) && get_attr_preferred_for_speed (insn
);
2087 /* Like get_bool_attr_mask, but don't use the cache. */
2089 static alternative_mask
2090 get_bool_attr_mask_uncached (rtx_insn
*insn
, bool_attr attr
)
2092 /* Temporarily install enough information for get_attr_<foo> to assume
2093 that the insn operands are already cached. As above, the attribute
2094 mustn't depend on the values of operands, so we don't provide their
2095 real values here. */
2096 rtx_insn
*old_insn
= recog_data
.insn
;
2097 int old_alternative
= which_alternative
;
2099 recog_data
.insn
= insn
;
2100 alternative_mask mask
= ALL_ALTERNATIVES
;
2101 int n_alternatives
= insn_data
[INSN_CODE (insn
)].n_alternatives
;
2102 for (int i
= 0; i
< n_alternatives
; i
++)
2104 which_alternative
= i
;
2105 if (!get_bool_attr (insn
, attr
))
2106 mask
&= ~ALTERNATIVE_BIT (i
);
2109 recog_data
.insn
= old_insn
;
2110 which_alternative
= old_alternative
;
2114 /* Return the mask of operand alternatives that are allowed for INSN
2115 by boolean attribute ATTR. This mask depends only on INSN and on
2116 the current target; it does not depend on things like the values of
2119 static alternative_mask
2120 get_bool_attr_mask (rtx_insn
*insn
, bool_attr attr
)
2122 /* Quick exit for asms and for targets that don't use these attributes. */
2123 int code
= INSN_CODE (insn
);
2124 if (code
< 0 || !have_bool_attr (attr
))
2125 return ALL_ALTERNATIVES
;
2127 /* Calling get_attr_<foo> can be expensive, so cache the mask
2129 if (!this_target_recog
->x_bool_attr_masks
[code
][attr
])
2130 this_target_recog
->x_bool_attr_masks
[code
][attr
]
2131 = get_bool_attr_mask_uncached (insn
, attr
);
2132 return this_target_recog
->x_bool_attr_masks
[code
][attr
];
2135 /* Return the set of alternatives of INSN that are allowed by the current
2139 get_enabled_alternatives (rtx_insn
*insn
)
2141 return get_bool_attr_mask (insn
, BA_ENABLED
);
2144 /* Return the set of alternatives of INSN that are allowed by the current
2145 target and are preferred for the current size/speed optimization
2149 get_preferred_alternatives (rtx_insn
*insn
)
2151 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
)))
2152 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2154 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2157 /* Return the set of alternatives of INSN that are allowed by the current
2158 target and are preferred for the size/speed optimization choice
2159 associated with BB. Passing a separate BB is useful if INSN has not
2160 been emitted yet or if we are considering moving it to a different
2164 get_preferred_alternatives (rtx_insn
*insn
, basic_block bb
)
2166 if (optimize_bb_for_speed_p (bb
))
2167 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2169 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2172 /* Assert that the cached boolean attributes for INSN are still accurate.
2173 The backend is required to define these attributes in a way that only
2174 depends on the current target (rather than operands, compiler phase,
2178 check_bool_attrs (rtx_insn
*insn
)
2180 int code
= INSN_CODE (insn
);
2182 for (int i
= 0; i
<= BA_LAST
; ++i
)
2184 enum bool_attr attr
= (enum bool_attr
) i
;
2185 if (this_target_recog
->x_bool_attr_masks
[code
][attr
])
2186 gcc_assert (this_target_recog
->x_bool_attr_masks
[code
][attr
]
2187 == get_bool_attr_mask_uncached (insn
, attr
));
2192 /* Like extract_insn, but save insn extracted and don't extract again, when
2193 called again for the same insn expecting that recog_data still contain the
2194 valid information. This is used primary by gen_attr infrastructure that
2195 often does extract insn again and again. */
2197 extract_insn_cached (rtx_insn
*insn
)
2199 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2201 extract_insn (insn
);
2202 recog_data
.insn
= insn
;
2205 /* Do uncached extract_insn, constrain_operands and complain about failures.
2206 This should be used when extracting a pre-existing constrained instruction
2207 if the caller wants to know which alternative was chosen. */
2209 extract_constrain_insn (rtx_insn
*insn
)
2211 extract_insn (insn
);
2212 if (!constrain_operands (reload_completed
, get_enabled_alternatives (insn
)))
2213 fatal_insn_not_found (insn
);
2216 /* Do cached extract_insn, constrain_operands and complain about failures.
2217 Used by insn_attrtab. */
2219 extract_constrain_insn_cached (rtx_insn
*insn
)
2221 extract_insn_cached (insn
);
2222 if (which_alternative
== -1
2223 && !constrain_operands (reload_completed
,
2224 get_enabled_alternatives (insn
)))
2225 fatal_insn_not_found (insn
);
2228 /* Do cached constrain_operands on INSN and complain about failures. */
2230 constrain_operands_cached (rtx_insn
*insn
, int strict
)
2232 if (which_alternative
== -1)
2233 return constrain_operands (strict
, get_enabled_alternatives (insn
));
2238 /* Analyze INSN and fill in recog_data. */
2241 extract_insn (rtx_insn
*insn
)
2246 rtx body
= PATTERN (insn
);
2248 recog_data
.n_operands
= 0;
2249 recog_data
.n_alternatives
= 0;
2250 recog_data
.n_dups
= 0;
2251 recog_data
.is_asm
= false;
2253 switch (GET_CODE (body
))
2265 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2270 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2271 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2272 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
2273 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_INPUT
)
2279 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2282 /* This insn is an `asm' with operands. */
2284 /* expand_asm_operands makes sure there aren't too many operands. */
2285 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2287 /* Now get the operand values and constraints out of the insn. */
2288 decode_asm_operands (body
, recog_data
.operand
,
2289 recog_data
.operand_loc
,
2290 recog_data
.constraints
,
2291 recog_data
.operand_mode
, NULL
);
2292 memset (recog_data
.is_operator
, 0, sizeof recog_data
.is_operator
);
2295 const char *p
= recog_data
.constraints
[0];
2296 recog_data
.n_alternatives
= 1;
2298 recog_data
.n_alternatives
+= (*p
++ == ',');
2300 recog_data
.is_asm
= true;
2303 fatal_insn_not_found (insn
);
2307 /* Ordinary insn: recognize it, get the operands via insn_extract
2308 and get the constraints. */
2310 icode
= recog_memoized (insn
);
2312 fatal_insn_not_found (insn
);
2314 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2315 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2316 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2318 insn_extract (insn
);
2320 for (i
= 0; i
< noperands
; i
++)
2322 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2323 recog_data
.is_operator
[i
] = insn_data
[icode
].operand
[i
].is_operator
;
2324 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2325 /* VOIDmode match_operands gets mode from their real operand. */
2326 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2327 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2330 for (i
= 0; i
< noperands
; i
++)
2331 recog_data
.operand_type
[i
]
2332 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2333 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2336 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2338 recog_data
.insn
= NULL
;
2339 which_alternative
= -1;
2342 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2343 operands, N_ALTERNATIVES alternatives and constraint strings
2344 CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2345 and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2346 if the insn is an asm statement and preprocessing should take the
2347 asm operands into account, e.g. to determine whether they could be
2348 addresses in constraints that require addresses; it should then
2349 point to an array of pointers to each operand. */
2352 preprocess_constraints (int n_operands
, int n_alternatives
,
2353 const char **constraints
,
2354 operand_alternative
*op_alt_base
,
2357 for (int i
= 0; i
< n_operands
; i
++)
2360 struct operand_alternative
*op_alt
;
2361 const char *p
= constraints
[i
];
2363 op_alt
= op_alt_base
;
2365 for (j
= 0; j
< n_alternatives
; j
++, op_alt
+= n_operands
)
2367 op_alt
[i
].cl
= NO_REGS
;
2368 op_alt
[i
].constraint
= p
;
2369 op_alt
[i
].matches
= -1;
2370 op_alt
[i
].matched
= -1;
2372 if (*p
== '\0' || *p
== ',')
2374 op_alt
[i
].anything_ok
= 1;
2384 while (c
!= ',' && c
!= '\0');
2385 if (c
== ',' || c
== '\0')
2394 op_alt
[i
].reject
+= 6;
2397 op_alt
[i
].reject
+= 600;
2400 op_alt
[i
].earlyclobber
= 1;
2403 case '0': case '1': case '2': case '3': case '4':
2404 case '5': case '6': case '7': case '8': case '9':
2407 op_alt
[i
].matches
= strtoul (p
, &end
, 10);
2408 op_alt
[op_alt
[i
].matches
].matched
= i
;
2414 op_alt
[i
].anything_ok
= 1;
2419 reg_class_subunion
[(int) op_alt
[i
].cl
][(int) GENERAL_REGS
];
2423 enum constraint_num cn
= lookup_constraint (p
);
2425 switch (get_constraint_type (cn
))
2428 cl
= reg_class_for_constraint (cn
);
2430 op_alt
[i
].cl
= reg_class_subunion
[op_alt
[i
].cl
][cl
];
2437 case CT_SPECIAL_MEMORY
:
2438 op_alt
[i
].memory_ok
= 1;
2442 if (oploc
&& !address_operand (*oploc
[i
], VOIDmode
))
2445 op_alt
[i
].is_address
= 1;
2447 = (reg_class_subunion
2448 [(int) op_alt
[i
].cl
]
2449 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
2450 ADDRESS
, SCRATCH
)]);
2458 p
+= CONSTRAINT_LEN (c
, p
);
2464 /* Return an array of operand_alternative instructions for
2465 instruction ICODE. */
2467 const operand_alternative
*
2468 preprocess_insn_constraints (unsigned int icode
)
2470 gcc_checking_assert (IN_RANGE (icode
, 0, NUM_INSN_CODES
- 1));
2471 if (this_target_recog
->x_op_alt
[icode
])
2472 return this_target_recog
->x_op_alt
[icode
];
2474 int n_operands
= insn_data
[icode
].n_operands
;
2475 if (n_operands
== 0)
2477 /* Always provide at least one alternative so that which_op_alt ()
2478 works correctly. If the instruction has 0 alternatives (i.e. all
2479 constraint strings are empty) then each operand in this alternative
2480 will have anything_ok set. */
2481 int n_alternatives
= MAX (insn_data
[icode
].n_alternatives
, 1);
2482 int n_entries
= n_operands
* n_alternatives
;
2484 operand_alternative
*op_alt
= XCNEWVEC (operand_alternative
, n_entries
);
2485 const char **constraints
= XALLOCAVEC (const char *, n_operands
);
2487 for (int i
= 0; i
< n_operands
; ++i
)
2488 constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2489 preprocess_constraints (n_operands
, n_alternatives
, constraints
, op_alt
,
2492 this_target_recog
->x_op_alt
[icode
] = op_alt
;
2496 /* After calling extract_insn, you can use this function to extract some
2497 information from the constraint strings into a more usable form.
2498 The collected data is stored in recog_op_alt. */
2501 preprocess_constraints (rtx_insn
*insn
)
2503 int icode
= INSN_CODE (insn
);
2505 recog_op_alt
= preprocess_insn_constraints (icode
);
2508 int n_operands
= recog_data
.n_operands
;
2509 int n_alternatives
= recog_data
.n_alternatives
;
2510 int n_entries
= n_operands
* n_alternatives
;
2511 memset (asm_op_alt
, 0, n_entries
* sizeof (operand_alternative
));
2512 preprocess_constraints (n_operands
, n_alternatives
,
2513 recog_data
.constraints
, asm_op_alt
,
2515 recog_op_alt
= asm_op_alt
;
2519 /* Check the operands of an insn against the insn's operand constraints
2520 and return 1 if they match any of the alternatives in ALTERNATIVES.
2522 The information about the insn's operands, constraints, operand modes
2523 etc. is obtained from the global variables set up by extract_insn.
2525 WHICH_ALTERNATIVE is set to a number which indicates which
2526 alternative of constraints was matched: 0 for the first alternative,
2527 1 for the next, etc.
2529 In addition, when two operands are required to match
2530 and it happens that the output operand is (reg) while the
2531 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2532 make the output operand look like the input.
2533 This is because the output operand is the one the template will print.
2535 This is used in final, just before printing the assembler code and by
2536 the routines that determine an insn's attribute.
2538 If STRICT is a positive nonzero value, it means that we have been
2539 called after reload has been completed. In that case, we must
2540 do all checks strictly. If it is zero, it means that we have been called
2541 before reload has completed. In that case, we first try to see if we can
2542 find an alternative that matches strictly. If not, we try again, this
2543 time assuming that reload will fix up the insn. This provides a "best
2544 guess" for the alternative and is used to compute attributes of insns prior
2545 to reload. A negative value of STRICT is used for this internal call. */
2553 constrain_operands (int strict
, alternative_mask alternatives
)
2555 const char *constraints
[MAX_RECOG_OPERANDS
];
2556 int matching_operands
[MAX_RECOG_OPERANDS
];
2557 int earlyclobber
[MAX_RECOG_OPERANDS
];
2560 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2561 int funny_match_index
;
2563 which_alternative
= 0;
2564 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2567 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2569 constraints
[c
] = recog_data
.constraints
[c
];
2570 matching_operands
[c
] = -1;
2575 int seen_earlyclobber_at
= -1;
2578 funny_match_index
= 0;
2580 if (!TEST_BIT (alternatives
, which_alternative
))
2584 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2585 constraints
[i
] = skip_alternative (constraints
[i
]);
2587 which_alternative
++;
2591 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2593 rtx op
= recog_data
.operand
[opno
];
2594 machine_mode mode
= GET_MODE (op
);
2595 const char *p
= constraints
[opno
];
2601 earlyclobber
[opno
] = 0;
2603 /* A unary operator may be accepted by the predicate, but it
2604 is irrelevant for matching constraints. */
2605 /* For special_memory_operand, there could be a memory operand inside,
2606 and it would cause a mismatch for constraint_satisfied_p. */
2607 if (UNARY_P (op
) && op
== extract_mem_from_operand (op
))
2610 if (GET_CODE (op
) == SUBREG
)
2612 if (REG_P (SUBREG_REG (op
))
2613 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2614 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2615 GET_MODE (SUBREG_REG (op
)),
2618 op
= SUBREG_REG (op
);
2621 /* An empty constraint or empty alternative
2622 allows anything which matched the pattern. */
2623 if (*p
== 0 || *p
== ',')
2627 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2637 /* Ignore rest of this alternative as far as
2638 constraint checking is concerned. */
2641 while (*p
&& *p
!= ',');
2646 earlyclobber
[opno
] = 1;
2647 if (seen_earlyclobber_at
< 0)
2648 seen_earlyclobber_at
= opno
;
2651 case '0': case '1': case '2': case '3': case '4':
2652 case '5': case '6': case '7': case '8': case '9':
2654 /* This operand must be the same as a previous one.
2655 This kind of constraint is used for instructions such
2656 as add when they take only two operands.
2658 Note that the lower-numbered operand is passed first.
2660 If we are not testing strictly, assume that this
2661 constraint will be satisfied. */
2666 match
= strtoul (p
, &end
, 10);
2673 rtx op1
= recog_data
.operand
[match
];
2674 rtx op2
= recog_data
.operand
[opno
];
2676 /* A unary operator may be accepted by the predicate,
2677 but it is irrelevant for matching constraints. */
2679 op1
= XEXP (op1
, 0);
2681 op2
= XEXP (op2
, 0);
2683 val
= operands_match_p (op1
, op2
);
2686 matching_operands
[opno
] = match
;
2687 matching_operands
[match
] = opno
;
2692 /* If output is *x and input is *--x, arrange later
2693 to change the output to *--x as well, since the
2694 output op is the one that will be printed. */
2695 if (val
== 2 && strict
> 0)
2697 funny_match
[funny_match_index
].this_op
= opno
;
2698 funny_match
[funny_match_index
++].other
= match
;
2705 /* p is used for address_operands. When we are called by
2706 gen_reload, no one will have checked that the address is
2707 strictly valid, i.e., that all pseudos requiring hard regs
2708 have gotten them. We also want to make sure we have a
2710 if ((GET_MODE (op
) == VOIDmode
2711 || SCALAR_INT_MODE_P (GET_MODE (op
)))
2713 || (strict_memory_address_p
2714 (recog_data
.operand_mode
[opno
], op
))))
2718 /* No need to check general_operand again;
2719 it was done in insn-recog.c. Well, except that reload
2720 doesn't check the validity of its replacements, but
2721 that should only matter when there's a bug. */
2723 /* Anything goes unless it is a REG and really has a hard reg
2724 but the hard reg is not in the class GENERAL_REGS. */
2728 || GENERAL_REGS
== ALL_REGS
2729 || (reload_in_progress
2730 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2731 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2734 else if (strict
< 0 || general_operand (op
, mode
))
2740 enum constraint_num cn
= lookup_constraint (p
);
2741 enum reg_class cl
= reg_class_for_constraint (cn
);
2747 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2748 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2750 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2754 else if (constraint_satisfied_p (op
, cn
))
2757 else if (insn_extra_memory_constraint (cn
)
2758 /* Every memory operand can be reloaded to fit. */
2759 && ((strict
< 0 && MEM_P (op
))
2760 /* Before reload, accept what reload can turn
2762 || (strict
< 0 && CONSTANT_P (op
))
2763 /* Before reload, accept a pseudo or hard register,
2764 since LRA can turn it into a mem. */
2765 || (strict
< 0 && targetm
.lra_p () && REG_P (op
))
2766 /* During reload, accept a pseudo */
2767 || (reload_in_progress
&& REG_P (op
)
2768 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2770 else if (insn_extra_address_constraint (cn
)
2771 /* Every address operand can be reloaded to fit. */
2774 /* Cater to architectures like IA-64 that define extra memory
2775 constraints without using define_memory_constraint. */
2776 else if (reload_in_progress
2778 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
2779 && reg_renumber
[REGNO (op
)] < 0
2780 && reg_equiv_mem (REGNO (op
)) != 0
2781 && constraint_satisfied_p
2782 (reg_equiv_mem (REGNO (op
)), cn
))
2787 while (p
+= len
, c
);
2789 constraints
[opno
] = p
;
2790 /* If this operand did not win somehow,
2791 this alternative loses. */
2795 /* This alternative won; the operands are ok.
2796 Change whichever operands this alternative says to change. */
2801 /* See if any earlyclobber operand conflicts with some other
2804 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2805 for (eopno
= seen_earlyclobber_at
;
2806 eopno
< recog_data
.n_operands
;
2808 /* Ignore earlyclobber operands now in memory,
2809 because we would often report failure when we have
2810 two memory operands, one of which was formerly a REG. */
2811 if (earlyclobber
[eopno
]
2812 && REG_P (recog_data
.operand
[eopno
]))
2813 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2814 if ((MEM_P (recog_data
.operand
[opno
])
2815 || recog_data
.operand_type
[opno
] != OP_OUT
)
2817 /* Ignore things like match_operator operands. */
2818 && *recog_data
.constraints
[opno
] != 0
2819 && ! (matching_operands
[opno
] == eopno
2820 && operands_match_p (recog_data
.operand
[opno
],
2821 recog_data
.operand
[eopno
]))
2822 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2823 recog_data
.operand
[eopno
]))
2828 while (--funny_match_index
>= 0)
2830 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2831 = recog_data
.operand
[funny_match
[funny_match_index
].this_op
];
2834 /* For operands without < or > constraints reject side-effects. */
2835 if (AUTO_INC_DEC
&& recog_data
.is_asm
)
2837 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2838 if (MEM_P (recog_data
.operand
[opno
]))
2839 switch (GET_CODE (XEXP (recog_data
.operand
[opno
], 0)))
2847 if (strchr (recog_data
.constraints
[opno
], '<') == NULL
2848 && strchr (recog_data
.constraints
[opno
], '>')
2861 which_alternative
++;
2863 while (which_alternative
< recog_data
.n_alternatives
);
2865 which_alternative
= -1;
2866 /* If we are about to reject this, but we are not to test strictly,
2867 try a very loose test. Only return failure if it fails also. */
2869 return constrain_operands (-1, alternatives
);
2874 /* Return true iff OPERAND (assumed to be a REG rtx)
2875 is a hard reg in class CLASS when its regno is offset by OFFSET
2876 and changed to mode MODE.
2877 If REG occupies multiple hard regs, all of them must be in CLASS. */
2880 reg_fits_class_p (const_rtx operand
, reg_class_t cl
, int offset
,
2883 unsigned int regno
= REGNO (operand
);
2888 /* Regno must not be a pseudo register. Offset may be negative. */
2889 return (HARD_REGISTER_NUM_P (regno
)
2890 && HARD_REGISTER_NUM_P (regno
+ offset
)
2891 && in_hard_reg_set_p (reg_class_contents
[(int) cl
], mode
,
2895 /* Split single instruction. Helper function for split_all_insns and
2896 split_all_insns_noflow. Return last insn in the sequence if successful,
2897 or NULL if unsuccessful. */
2900 split_insn (rtx_insn
*insn
)
2902 /* Split insns here to get max fine-grain parallelism. */
2903 rtx_insn
*first
= PREV_INSN (insn
);
2904 rtx_insn
*last
= try_split (PATTERN (insn
), insn
, 1);
2905 rtx insn_set
, last_set
, note
;
2910 /* If the original instruction was a single set that was known to be
2911 equivalent to a constant, see if we can say the same about the last
2912 instruction in the split sequence. The two instructions must set
2913 the same destination. */
2914 insn_set
= single_set (insn
);
2917 last_set
= single_set (last
);
2918 if (last_set
&& rtx_equal_p (SET_DEST (last_set
), SET_DEST (insn_set
)))
2920 note
= find_reg_equal_equiv_note (insn
);
2921 if (note
&& CONSTANT_P (XEXP (note
, 0)))
2922 set_unique_reg_note (last
, REG_EQUAL
, XEXP (note
, 0));
2923 else if (CONSTANT_P (SET_SRC (insn_set
)))
2924 set_unique_reg_note (last
, REG_EQUAL
,
2925 copy_rtx (SET_SRC (insn_set
)));
2929 /* try_split returns the NOTE that INSN became. */
2930 SET_INSN_DELETED (insn
);
2932 /* ??? Coddle to md files that generate subregs in post-reload
2933 splitters instead of computing the proper hard register. */
2934 if (reload_completed
&& first
!= last
)
2936 first
= NEXT_INSN (first
);
2940 cleanup_subreg_operands (first
);
2943 first
= NEXT_INSN (first
);
2950 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2953 split_all_insns (void)
2956 bool need_cfg_cleanup
= false;
2959 auto_sbitmap
blocks (last_basic_block_for_fn (cfun
));
2960 bitmap_clear (blocks
);
2963 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2965 rtx_insn
*insn
, *next
;
2966 bool finish
= false;
2968 rtl_profile_for_bb (bb
);
2969 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2971 /* Can't use `next_real_insn' because that might go across
2972 CODE_LABELS and short-out basic blocks. */
2973 next
= NEXT_INSN (insn
);
2974 finish
= (insn
== BB_END (bb
));
2976 /* If INSN has a REG_EH_REGION note and we split INSN, the
2977 resulting split may not have/need REG_EH_REGION notes.
2979 If that happens and INSN was the last reference to the
2980 given EH region, then the EH region will become unreachable.
2981 We cannot leave the unreachable blocks in the CFG as that
2982 will trigger a checking failure.
2984 So track if INSN has a REG_EH_REGION note. If so and we
2985 split INSN, then trigger a CFG cleanup. */
2986 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
2989 rtx set
= single_set (insn
);
2991 /* Don't split no-op move insns. These should silently
2992 disappear later in final. Splitting such insns would
2993 break the code that handles LIBCALL blocks. */
2994 if (set
&& set_noop_p (set
))
2996 /* Nops get in the way while scheduling, so delete them
2997 now if register allocation has already been done. It
2998 is too risky to try to do this before register
2999 allocation, and there are unlikely to be very many
3000 nops then anyways. */
3001 if (reload_completed
)
3002 delete_insn_and_edges (insn
);
3004 need_cfg_cleanup
= true;
3008 if (split_insn (insn
))
3010 bitmap_set_bit (blocks
, bb
->index
);
3013 need_cfg_cleanup
= true;
3020 default_rtl_profile ();
3023 find_many_sub_basic_blocks (blocks
);
3025 /* Splitting could drop an REG_EH_REGION if it potentially
3026 trapped in its original form, but does not in its split
3027 form. Consider a FLOAT_TRUNCATE which splits into a memory
3028 store/load pair and -fnon-call-exceptions. */
3029 if (need_cfg_cleanup
)
3033 checking_verify_flow_info ();
3036 /* Same as split_all_insns, but do not expect CFG to be available.
3037 Used by machine dependent reorg passes. */
3040 split_all_insns_noflow (void)
3042 rtx_insn
*next
, *insn
;
3044 for (insn
= get_insns (); insn
; insn
= next
)
3046 next
= NEXT_INSN (insn
);
3049 /* Don't split no-op move insns. These should silently
3050 disappear later in final. Splitting such insns would
3051 break the code that handles LIBCALL blocks. */
3052 rtx set
= single_set (insn
);
3053 if (set
&& set_noop_p (set
))
3055 /* Nops get in the way while scheduling, so delete them
3056 now if register allocation has already been done. It
3057 is too risky to try to do this before register
3058 allocation, and there are unlikely to be very many
3061 ??? Should we use delete_insn when the CFG isn't valid? */
3062 if (reload_completed
)
3063 delete_insn_and_edges (insn
);
3072 struct peep2_insn_data
3078 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
3079 static int peep2_current
;
3081 static bool peep2_do_rebuild_jump_labels
;
3082 static bool peep2_do_cleanup_cfg
;
3084 /* The number of instructions available to match a peep2. */
3085 int peep2_current_count
;
3087 /* A marker indicating the last insn of the block. The live_before regset
3088 for this element is correct, indicating DF_LIVE_OUT for the block. */
3089 #define PEEP2_EOB invalid_insn_rtx
3091 /* Wrap N to fit into the peep2_insn_data buffer. */
3094 peep2_buf_position (int n
)
3096 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
3097 n
-= MAX_INSNS_PER_PEEP2
+ 1;
3101 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3102 does not exist. Used by the recognizer to find the next insn to match
3103 in a multi-insn pattern. */
3106 peep2_next_insn (int n
)
3108 gcc_assert (n
<= peep2_current_count
);
3110 n
= peep2_buf_position (peep2_current
+ n
);
3112 return peep2_insn_data
[n
].insn
;
3115 /* Return true if REGNO is dead before the Nth non-note insn
3119 peep2_regno_dead_p (int ofs
, int regno
)
3121 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3123 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3125 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3127 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
3130 /* Similarly for a REG. */
3133 peep2_reg_dead_p (int ofs
, rtx reg
)
3135 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3137 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3139 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3141 unsigned int end_regno
= END_REGNO (reg
);
3142 for (unsigned int regno
= REGNO (reg
); regno
< end_regno
; ++regno
)
3143 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
))
3148 /* Regno offset to be used in the register search. */
3149 static int search_ofs
;
3151 /* Try to find a hard register of mode MODE, matching the register class in
3152 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3153 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3154 in which case the only condition is that the register must be available
3155 before CURRENT_INSN.
3156 Registers that already have bits set in REG_SET will not be considered.
3158 If an appropriate register is available, it will be returned and the
3159 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3163 peep2_find_free_register (int from
, int to
, const char *class_str
,
3164 machine_mode mode
, HARD_REG_SET
*reg_set
)
3171 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
3172 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
3174 from
= peep2_buf_position (peep2_current
+ from
);
3175 to
= peep2_buf_position (peep2_current
+ to
);
3177 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3178 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
3182 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3184 /* Don't use registers set or clobbered by the insn. */
3185 FOR_EACH_INSN_DEF (def
, peep2_insn_data
[from
].insn
)
3186 SET_HARD_REG_BIT (live
, DF_REF_REGNO (def
));
3188 from
= peep2_buf_position (from
+ 1);
3191 cl
= reg_class_for_constraint (lookup_constraint (class_str
));
3193 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3195 int raw_regno
, regno
, success
, j
;
3197 /* Distribute the free registers as much as possible. */
3198 raw_regno
= search_ofs
+ i
;
3199 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
3200 raw_regno
-= FIRST_PSEUDO_REGISTER
;
3201 #ifdef REG_ALLOC_ORDER
3202 regno
= reg_alloc_order
[raw_regno
];
3207 /* Can it support the mode we need? */
3208 if (!targetm
.hard_regno_mode_ok (regno
, mode
))
3212 for (j
= 0; success
&& j
< hard_regno_nregs (regno
, mode
); j
++)
3214 /* Don't allocate fixed registers. */
3215 if (fixed_regs
[regno
+ j
])
3220 /* Don't allocate global registers. */
3221 if (global_regs
[regno
+ j
])
3226 /* Make sure the register is of the right class. */
3227 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
+ j
))
3232 /* And that we don't create an extra save/restore. */
3233 if (! crtl
->abi
->clobbers_full_reg_p (regno
+ j
)
3234 && ! df_regs_ever_live_p (regno
+ j
))
3240 if (! targetm
.hard_regno_scratch_ok (regno
+ j
))
3246 /* And we don't clobber traceback for noreturn functions. */
3247 if ((regno
+ j
== FRAME_POINTER_REGNUM
3248 || regno
+ j
== HARD_FRAME_POINTER_REGNUM
)
3249 && (! reload_completed
|| frame_pointer_needed
))
3255 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3256 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3265 add_to_hard_reg_set (reg_set
, mode
, regno
);
3267 /* Start the next search with the next register. */
3268 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3270 search_ofs
= raw_regno
;
3272 return gen_rtx_REG (mode
, regno
);
3280 /* Forget all currently tracked instructions, only remember current
3284 peep2_reinit_state (regset live
)
3288 /* Indicate that all slots except the last holds invalid data. */
3289 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3290 peep2_insn_data
[i
].insn
= NULL
;
3291 peep2_current_count
= 0;
3293 /* Indicate that the last slot contains live_after data. */
3294 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3295 peep2_current
= MAX_INSNS_PER_PEEP2
;
3297 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3300 /* Copies frame related info of an insn (OLD_INSN) to the single
3301 insn (NEW_INSN) that was obtained by splitting OLD_INSN. */
3304 copy_frame_info_to_split_insn (rtx_insn
*old_insn
, rtx_insn
*new_insn
)
3306 bool any_note
= false;
3309 if (!RTX_FRAME_RELATED_P (old_insn
))
3312 RTX_FRAME_RELATED_P (new_insn
) = 1;
3314 /* Allow the backend to fill in a note during the split. */
3315 for (note
= REG_NOTES (new_insn
); note
; note
= XEXP (note
, 1))
3316 switch (REG_NOTE_KIND (note
))
3318 case REG_FRAME_RELATED_EXPR
:
3319 case REG_CFA_DEF_CFA
:
3320 case REG_CFA_ADJUST_CFA
:
3321 case REG_CFA_OFFSET
:
3322 case REG_CFA_REGISTER
:
3323 case REG_CFA_EXPRESSION
:
3324 case REG_CFA_RESTORE
:
3325 case REG_CFA_SET_VDRAP
:
3332 /* If the backend didn't supply a note, copy one over. */
3334 for (note
= REG_NOTES (old_insn
); note
; note
= XEXP (note
, 1))
3335 switch (REG_NOTE_KIND (note
))
3337 case REG_FRAME_RELATED_EXPR
:
3338 case REG_CFA_DEF_CFA
:
3339 case REG_CFA_ADJUST_CFA
:
3340 case REG_CFA_OFFSET
:
3341 case REG_CFA_REGISTER
:
3342 case REG_CFA_EXPRESSION
:
3343 case REG_CFA_RESTORE
:
3344 case REG_CFA_SET_VDRAP
:
3345 add_reg_note (new_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3352 /* If there still isn't a note, make sure the unwind info sees the
3353 same expression as before the split. */
3356 rtx old_set
, new_set
;
3358 /* The old insn had better have been simple, or annotated. */
3359 old_set
= single_set (old_insn
);
3360 gcc_assert (old_set
!= NULL
);
3362 new_set
= single_set (new_insn
);
3363 if (!new_set
|| !rtx_equal_p (new_set
, old_set
))
3364 add_reg_note (new_insn
, REG_FRAME_RELATED_EXPR
, old_set
);
3367 /* Copy prologue/epilogue status. This is required in order to keep
3368 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3369 maybe_copy_prologue_epilogue_insn (old_insn
, new_insn
);
3372 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3373 starting at INSN. Perform the replacement, removing the old insns and
3374 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3375 if the replacement is rejected. */
3378 peep2_attempt (basic_block bb
, rtx_insn
*insn
, int match_len
, rtx_insn
*attempt
)
3381 rtx_insn
*last
, *before_try
, *x
;
3382 rtx eh_note
, as_note
;
3385 bool was_call
= false;
3387 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3388 match more than one insn, or to be split into more than one insn. */
3389 old_insn
= peep2_insn_data
[peep2_current
].insn
;
3390 if (RTX_FRAME_RELATED_P (old_insn
))
3395 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3396 may be in the stream for the purpose of register allocation. */
3397 if (active_insn_p (attempt
))
3400 new_insn
= next_active_insn (attempt
);
3401 if (next_active_insn (new_insn
))
3404 /* We have a 1-1 replacement. Copy over any frame-related info. */
3405 copy_frame_info_to_split_insn (old_insn
, new_insn
);
3408 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3409 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3410 cfg-related call notes. */
3411 for (i
= 0; i
<= match_len
; ++i
)
3416 j
= peep2_buf_position (peep2_current
+ i
);
3417 old_insn
= peep2_insn_data
[j
].insn
;
3418 if (!CALL_P (old_insn
))
3423 while (new_insn
!= NULL_RTX
)
3425 if (CALL_P (new_insn
))
3427 new_insn
= NEXT_INSN (new_insn
);
3430 gcc_assert (new_insn
!= NULL_RTX
);
3432 CALL_INSN_FUNCTION_USAGE (new_insn
)
3433 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3434 SIBLING_CALL_P (new_insn
) = SIBLING_CALL_P (old_insn
);
3436 for (note
= REG_NOTES (old_insn
);
3438 note
= XEXP (note
, 1))
3439 switch (REG_NOTE_KIND (note
))
3444 case REG_CALL_NOCF_CHECK
:
3445 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3449 /* Discard all other reg notes. */
3453 /* Croak if there is another call in the sequence. */
3454 while (++i
<= match_len
)
3456 j
= peep2_buf_position (peep2_current
+ i
);
3457 old_insn
= peep2_insn_data
[j
].insn
;
3458 gcc_assert (!CALL_P (old_insn
));
3463 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3464 move those notes over to the new sequence. */
3466 for (i
= match_len
; i
>= 0; --i
)
3468 int j
= peep2_buf_position (peep2_current
+ i
);
3469 old_insn
= peep2_insn_data
[j
].insn
;
3471 as_note
= find_reg_note (old_insn
, REG_ARGS_SIZE
, NULL
);
3476 i
= peep2_buf_position (peep2_current
+ match_len
);
3477 eh_note
= find_reg_note (peep2_insn_data
[i
].insn
, REG_EH_REGION
, NULL_RTX
);
3479 /* Replace the old sequence with the new. */
3480 rtx_insn
*peepinsn
= peep2_insn_data
[i
].insn
;
3481 last
= emit_insn_after_setloc (attempt
,
3482 peep2_insn_data
[i
].insn
,
3483 INSN_LOCATION (peepinsn
));
3484 if (JUMP_P (peepinsn
) && JUMP_P (last
))
3485 CROSSING_JUMP_P (last
) = CROSSING_JUMP_P (peepinsn
);
3486 before_try
= PREV_INSN (insn
);
3487 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3489 /* Re-insert the EH_REGION notes. */
3490 if (eh_note
|| (was_call
&& nonlocal_goto_handler_labels
))
3495 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3496 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3500 copy_reg_eh_region_note_backward (eh_note
, last
, before_try
);
3503 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3504 if (x
!= BB_END (bb
)
3505 && (can_throw_internal (x
)
3506 || can_nonlocal_goto (x
)))
3511 nfte
= split_block (bb
, x
);
3512 flags
= (eh_edge
->flags
3513 & (EDGE_EH
| EDGE_ABNORMAL
));
3515 flags
|= EDGE_ABNORMAL_CALL
;
3516 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3519 nehe
->probability
= eh_edge
->probability
;
3520 nfte
->probability
= nehe
->probability
.invert ();
3522 peep2_do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3527 /* Converting possibly trapping insn to non-trapping is
3528 possible. Zap dummy outgoing edges. */
3529 peep2_do_cleanup_cfg
|= purge_dead_edges (bb
);
3532 /* Re-insert the ARGS_SIZE notes. */
3534 fixup_args_size_notes (before_try
, last
, get_args_size (as_note
));
3536 /* Scan the new insns for embedded side effects and add appropriate
3539 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3540 if (NONDEBUG_INSN_P (x
))
3541 add_auto_inc_notes (x
, PATTERN (x
));
3543 /* If we generated a jump instruction, it won't have
3544 JUMP_LABEL set. Recompute after we're done. */
3545 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3548 peep2_do_rebuild_jump_labels
= true;
3555 /* After performing a replacement in basic block BB, fix up the life
3556 information in our buffer. LAST is the last of the insns that we
3557 emitted as a replacement. PREV is the insn before the start of
3558 the replacement. MATCH_LEN is the number of instructions that were
3559 matched, and which now need to be replaced in the buffer. */
3562 peep2_update_life (basic_block bb
, int match_len
, rtx_insn
*last
,
3565 int i
= peep2_buf_position (peep2_current
+ match_len
+ 1);
3569 INIT_REG_SET (&live
);
3570 COPY_REG_SET (&live
, peep2_insn_data
[i
].live_before
);
3572 gcc_assert (peep2_current_count
>= match_len
+ 1);
3573 peep2_current_count
-= match_len
+ 1;
3581 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
)
3583 peep2_current_count
++;
3585 i
= MAX_INSNS_PER_PEEP2
;
3586 peep2_insn_data
[i
].insn
= x
;
3587 df_simulate_one_insn_backwards (bb
, x
, &live
);
3588 COPY_REG_SET (peep2_insn_data
[i
].live_before
, &live
);
3594 CLEAR_REG_SET (&live
);
3599 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3600 Return true if we added it, false otherwise. The caller will try to match
3601 peepholes against the buffer if we return false; otherwise it will try to
3602 add more instructions to the buffer. */
3605 peep2_fill_buffer (basic_block bb
, rtx_insn
*insn
, regset live
)
3609 /* Once we have filled the maximum number of insns the buffer can hold,
3610 allow the caller to match the insns against peepholes. We wait until
3611 the buffer is full in case the target has similar peepholes of different
3612 length; we always want to match the longest if possible. */
3613 if (peep2_current_count
== MAX_INSNS_PER_PEEP2
)
3616 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3617 any other pattern, lest it change the semantics of the frame info. */
3618 if (RTX_FRAME_RELATED_P (insn
))
3620 /* Let the buffer drain first. */
3621 if (peep2_current_count
> 0)
3623 /* Now the insn will be the only thing in the buffer. */
3626 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3627 peep2_insn_data
[pos
].insn
= insn
;
3628 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3629 peep2_current_count
++;
3631 df_simulate_one_insn_forwards (bb
, insn
, live
);
3635 /* Perform the peephole2 optimization pass. */
3638 peephole2_optimize (void)
3645 peep2_do_cleanup_cfg
= false;
3646 peep2_do_rebuild_jump_labels
= false;
3648 df_set_flags (DF_LR_RUN_DCE
);
3649 df_note_add_problem ();
3652 /* Initialize the regsets we're going to use. */
3653 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3654 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
3656 live
= BITMAP_ALLOC (®_obstack
);
3658 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
3660 bool past_end
= false;
3663 rtl_profile_for_bb (bb
);
3665 /* Start up propagation. */
3666 bitmap_copy (live
, DF_LR_IN (bb
));
3667 df_simulate_initialize_forwards (bb
, live
);
3668 peep2_reinit_state (live
);
3670 insn
= BB_HEAD (bb
);
3673 rtx_insn
*attempt
, *head
;
3676 if (!past_end
&& !NONDEBUG_INSN_P (insn
))
3679 insn
= NEXT_INSN (insn
);
3680 if (insn
== NEXT_INSN (BB_END (bb
)))
3684 if (!past_end
&& peep2_fill_buffer (bb
, insn
, live
))
3687 /* If we did not fill an empty buffer, it signals the end of the
3689 if (peep2_current_count
== 0)
3692 /* The buffer filled to the current maximum, so try to match. */
3694 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3695 peep2_insn_data
[pos
].insn
= PEEP2_EOB
;
3696 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3698 /* Match the peephole. */
3699 head
= peep2_insn_data
[peep2_current
].insn
;
3700 attempt
= peephole2_insns (PATTERN (head
), head
, &match_len
);
3701 if (attempt
!= NULL
)
3703 rtx_insn
*last
= peep2_attempt (bb
, head
, match_len
, attempt
);
3706 peep2_update_life (bb
, match_len
, last
, PREV_INSN (attempt
));
3711 /* No match: advance the buffer by one insn. */
3712 peep2_current
= peep2_buf_position (peep2_current
+ 1);
3713 peep2_current_count
--;
3717 default_rtl_profile ();
3718 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3719 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
3721 if (peep2_do_rebuild_jump_labels
)
3722 rebuild_jump_labels (get_insns ());
3723 if (peep2_do_cleanup_cfg
)
3724 cleanup_cfg (CLEANUP_CFG_CHANGED
);
3727 /* Common predicates for use with define_bypass. */
3729 /* Helper function for store_data_bypass_p, handle just a single SET
3733 store_data_bypass_p_1 (rtx_insn
*out_insn
, rtx in_set
)
3735 if (!MEM_P (SET_DEST (in_set
)))
3738 rtx out_set
= single_set (out_insn
);
3740 return !reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
));
3742 rtx out_pat
= PATTERN (out_insn
);
3743 if (GET_CODE (out_pat
) != PARALLEL
)
3746 for (int i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3748 rtx out_exp
= XVECEXP (out_pat
, 0, i
);
3750 if (GET_CODE (out_exp
) == CLOBBER
|| GET_CODE (out_exp
) == USE
)
3753 gcc_assert (GET_CODE (out_exp
) == SET
);
3755 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
3762 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3763 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3764 must be either a single_set or a PARALLEL with SETs inside. */
3767 store_data_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
3769 rtx in_set
= single_set (in_insn
);
3771 return store_data_bypass_p_1 (out_insn
, in_set
);
3773 rtx in_pat
= PATTERN (in_insn
);
3774 if (GET_CODE (in_pat
) != PARALLEL
)
3777 for (int i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
3779 rtx in_exp
= XVECEXP (in_pat
, 0, i
);
3781 if (GET_CODE (in_exp
) == CLOBBER
|| GET_CODE (in_exp
) == USE
)
3784 gcc_assert (GET_CODE (in_exp
) == SET
);
3786 if (!store_data_bypass_p_1 (out_insn
, in_exp
))
3793 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3794 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3795 or multiple set; IN_INSN should be single_set for truth, but for convenience
3796 of insn categorization may be any JUMP or CALL insn. */
3799 if_test_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
3801 rtx out_set
, in_set
;
3803 in_set
= single_set (in_insn
);
3806 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3810 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3812 in_set
= SET_SRC (in_set
);
3814 out_set
= single_set (out_insn
);
3817 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3818 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3826 out_pat
= PATTERN (out_insn
);
3827 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3829 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3831 rtx exp
= XVECEXP (out_pat
, 0, i
);
3833 if (GET_CODE (exp
) == CLOBBER
)
3836 gcc_assert (GET_CODE (exp
) == SET
);
3838 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3839 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3848 rest_of_handle_peephole2 (void)
3851 peephole2_optimize ();
3858 const pass_data pass_data_peephole2
=
3860 RTL_PASS
, /* type */
3861 "peephole2", /* name */
3862 OPTGROUP_NONE
, /* optinfo_flags */
3863 TV_PEEPHOLE2
, /* tv_id */
3864 0, /* properties_required */
3865 0, /* properties_provided */
3866 0, /* properties_destroyed */
3867 0, /* todo_flags_start */
3868 TODO_df_finish
, /* todo_flags_finish */
3871 class pass_peephole2
: public rtl_opt_pass
3874 pass_peephole2 (gcc::context
*ctxt
)
3875 : rtl_opt_pass (pass_data_peephole2
, ctxt
)
3878 /* opt_pass methods: */
3879 /* The epiphany backend creates a second instance of this pass, so we need
3881 opt_pass
* clone () { return new pass_peephole2 (m_ctxt
); }
3882 virtual bool gate (function
*) { return (optimize
> 0 && flag_peephole2
); }
3883 virtual unsigned int execute (function
*)
3885 return rest_of_handle_peephole2 ();
3888 }; // class pass_peephole2
3893 make_pass_peephole2 (gcc::context
*ctxt
)
3895 return new pass_peephole2 (ctxt
);
3900 const pass_data pass_data_split_all_insns
=
3902 RTL_PASS
, /* type */
3903 "split1", /* name */
3904 OPTGROUP_NONE
, /* optinfo_flags */
3905 TV_NONE
, /* tv_id */
3906 0, /* properties_required */
3907 PROP_rtl_split_insns
, /* properties_provided */
3908 0, /* properties_destroyed */
3909 0, /* todo_flags_start */
3910 0, /* todo_flags_finish */
3913 class pass_split_all_insns
: public rtl_opt_pass
3916 pass_split_all_insns (gcc::context
*ctxt
)
3917 : rtl_opt_pass (pass_data_split_all_insns
, ctxt
)
3920 /* opt_pass methods: */
3921 /* The epiphany backend creates a second instance of this pass, so
3922 we need a clone method. */
3923 opt_pass
* clone () { return new pass_split_all_insns (m_ctxt
); }
3924 virtual unsigned int execute (function
*)
3930 }; // class pass_split_all_insns
3935 make_pass_split_all_insns (gcc::context
*ctxt
)
3937 return new pass_split_all_insns (ctxt
);
3942 const pass_data pass_data_split_after_reload
=
3944 RTL_PASS
, /* type */
3945 "split2", /* name */
3946 OPTGROUP_NONE
, /* optinfo_flags */
3947 TV_NONE
, /* tv_id */
3948 0, /* properties_required */
3949 0, /* properties_provided */
3950 0, /* properties_destroyed */
3951 0, /* todo_flags_start */
3952 0, /* todo_flags_finish */
3955 class pass_split_after_reload
: public rtl_opt_pass
3958 pass_split_after_reload (gcc::context
*ctxt
)
3959 : rtl_opt_pass (pass_data_split_after_reload
, ctxt
)
3962 /* opt_pass methods: */
3963 virtual bool gate (function
*)
3965 /* If optimizing, then go ahead and split insns now. */
3966 return optimize
> 0;
3969 virtual unsigned int execute (function
*)
3975 }; // class pass_split_after_reload
3980 make_pass_split_after_reload (gcc::context
*ctxt
)
3982 return new pass_split_after_reload (ctxt
);
3986 enable_split_before_sched2 (void)
3988 #ifdef INSN_SCHEDULING
3989 return optimize
> 0 && flag_schedule_insns_after_reload
;
3997 const pass_data pass_data_split_before_sched2
=
3999 RTL_PASS
, /* type */
4000 "split3", /* name */
4001 OPTGROUP_NONE
, /* optinfo_flags */
4002 TV_NONE
, /* tv_id */
4003 0, /* properties_required */
4004 0, /* properties_provided */
4005 0, /* properties_destroyed */
4006 0, /* todo_flags_start */
4007 0, /* todo_flags_finish */
4010 class pass_split_before_sched2
: public rtl_opt_pass
4013 pass_split_before_sched2 (gcc::context
*ctxt
)
4014 : rtl_opt_pass (pass_data_split_before_sched2
, ctxt
)
4017 /* opt_pass methods: */
4018 virtual bool gate (function
*)
4020 return enable_split_before_sched2 ();
4023 virtual unsigned int execute (function
*)
4029 }; // class pass_split_before_sched2
4034 make_pass_split_before_sched2 (gcc::context
*ctxt
)
4036 return new pass_split_before_sched2 (ctxt
);
4041 const pass_data pass_data_split_before_regstack
=
4043 RTL_PASS
, /* type */
4044 "split4", /* name */
4045 OPTGROUP_NONE
, /* optinfo_flags */
4046 TV_NONE
, /* tv_id */
4047 0, /* properties_required */
4048 0, /* properties_provided */
4049 0, /* properties_destroyed */
4050 0, /* todo_flags_start */
4051 0, /* todo_flags_finish */
4054 class pass_split_before_regstack
: public rtl_opt_pass
4057 pass_split_before_regstack (gcc::context
*ctxt
)
4058 : rtl_opt_pass (pass_data_split_before_regstack
, ctxt
)
4061 /* opt_pass methods: */
4062 virtual bool gate (function
*);
4063 virtual unsigned int execute (function
*)
4069 }; // class pass_split_before_regstack
4072 pass_split_before_regstack::gate (function
*)
4074 #if HAVE_ATTR_length && defined (STACK_REGS)
4075 /* If flow2 creates new instructions which need splitting
4076 and scheduling after reload is not done, they might not be
4077 split until final which doesn't allow splitting
4078 if HAVE_ATTR_length. */
4079 return !enable_split_before_sched2 ();
4088 make_pass_split_before_regstack (gcc::context
*ctxt
)
4090 return new pass_split_before_regstack (ctxt
);
4095 const pass_data pass_data_split_for_shorten_branches
=
4097 RTL_PASS
, /* type */
4098 "split5", /* name */
4099 OPTGROUP_NONE
, /* optinfo_flags */
4100 TV_NONE
, /* tv_id */
4101 0, /* properties_required */
4102 0, /* properties_provided */
4103 0, /* properties_destroyed */
4104 0, /* todo_flags_start */
4105 0, /* todo_flags_finish */
4108 class pass_split_for_shorten_branches
: public rtl_opt_pass
4111 pass_split_for_shorten_branches (gcc::context
*ctxt
)
4112 : rtl_opt_pass (pass_data_split_for_shorten_branches
, ctxt
)
4115 /* opt_pass methods: */
4116 virtual bool gate (function
*)
4118 /* The placement of the splitting that we do for shorten_branches
4119 depends on whether regstack is used by the target or not. */
4120 #if HAVE_ATTR_length && !defined (STACK_REGS)
4127 virtual unsigned int execute (function
*)
4129 return split_all_insns_noflow ();
4132 }; // class pass_split_for_shorten_branches
4137 make_pass_split_for_shorten_branches (gcc::context
*ctxt
)
4139 return new pass_split_for_shorten_branches (ctxt
);
4142 /* (Re)initialize the target information after a change in target. */
4147 /* The information is zero-initialized, so we don't need to do anything
4148 first time round. */
4149 if (!this_target_recog
->x_initialized
)
4151 this_target_recog
->x_initialized
= true;
4154 memset (this_target_recog
->x_bool_attr_masks
, 0,
4155 sizeof (this_target_recog
->x_bool_attr_masks
));
4156 for (unsigned int i
= 0; i
< NUM_INSN_CODES
; ++i
)
4157 if (this_target_recog
->x_op_alt
[i
])
4159 free (this_target_recog
->x_op_alt
[i
]);
4160 this_target_recog
->x_op_alt
[i
] = 0;