#endif
}
\f
-/* Follow any unconditional jump at LABEL;
+/* Follow any unconditional jump at LABEL, for the purpose of redirecting JUMP;
return the ultimate label reached by any such chain of jumps.
Return a suitable return rtx if the chain ultimately leads to a
return instruction.
If LABEL is not followed by a jump, return LABEL.
If the chain loops or we can't find end, return LABEL,
- since that tells caller to avoid changing the insn. */
+ since that tells caller to avoid changing the insn.
+ If the returned label is obtained by following a REG_CROSSING_JUMP
+ jump, set *CROSSING to true, otherwise set it to false. */
static rtx
-follow_jumps (rtx label)
+follow_jumps (rtx label, rtx jump, bool *crossing)
{
rtx insn;
rtx next;
rtx value = label;
int depth;
+ *crossing = false;
if (ANY_RETURN_P (label))
return label;
for (depth = 0;
|| GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
break;
+ if (!targetm.can_follow_jump (jump, insn))
+ break;
+ if (!*crossing)
+ *crossing
+ = find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX) != NULL_RTX;
value = this_label;
}
if (depth == 10)
if (new_thread != thread)
{
rtx label;
+ bool crossing = false;
gcc_assert (thread_if_true);
&& redirect_with_delay_list_safe_p (insn,
JUMP_LABEL (new_thread),
delay_list))
- new_thread = follow_jumps (JUMP_LABEL (new_thread));
+ new_thread = follow_jumps (JUMP_LABEL (new_thread), insn, &crossing);
if (ANY_RETURN_P (new_thread))
label = find_end_label (new_thread);
label = get_label_before (new_thread);
if (label)
- reorg_redirect_jump (insn, label);
+ {
+ reorg_redirect_jump (insn, label);
+ if (crossing)
+ set_unique_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX);
+ }
}
return delay_list;
for (insn = first; insn; insn = next)
{
rtx other;
+ bool crossing;
next = next_active_insn (insn);
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
&& !ANY_RETURN_P (target_label = JUMP_LABEL (insn)))
{
- target_label = skip_consecutive_labels (follow_jumps (target_label));
+ target_label
+ = skip_consecutive_labels (follow_jumps (target_label, insn,
+ &crossing));
if (ANY_RETURN_P (target_label))
target_label = find_end_label (target_label);
}
if (target_label && target_label != JUMP_LABEL (insn))
- reorg_redirect_jump (insn, target_label);
+ {
+ reorg_redirect_jump (insn, target_label);
+ if (crossing)
+ set_unique_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX);
+ }
/* See if this jump conditionally branches around an unconditional
jump. If so, invert this jump and point it to the target of the
/* If this jump goes to another unconditional jump, thread it, but
don't convert a jump into a RETURN here. */
- trial = skip_consecutive_labels (follow_jumps (target_label));
+ trial = skip_consecutive_labels (follow_jumps (target_label, delay_insn,
+ &crossing));
if (ANY_RETURN_P (trial))
trial = find_end_label (trial);
{
reorg_redirect_jump (delay_insn, trial);
target_label = trial;
+ if (crossing)
+ set_unique_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX);
}
/* If the first insn at TARGET_LABEL is redundant with a previous
bool, (void),
hook_bool_void_false)
+/* True if FOLLOWER may be modified to follow FOLLOWEE. */
+DEFHOOK
+(can_follow_jump,
+ "FOLLOWER and FOLLOWEE are JUMP_INSN instructions;\
+ return true if FOLLOWER may be modified to follow FOLLOWEE;\
+ false, if it can't.\
+ For example, on some targets, certain kinds of branches can't be made to\
+ follow through a hot/cold partitioning.",
+ bool, (const_rtx follower, const_rtx followee),
+ hook_bool_const_rtx_const_rtx_true)
+
/* Return a register class for which branch target register
optimizations should be applied. */
DEFHOOK