+2000-05-25 Jan Hubicka <jh@suse.cz>
+
+ * combine.c (try_combine): Use any_condjump_p, any_uncondjump_p
+ and pc_set at the place of simplejump_p and condjump_p.
+ * cse.c (record_jump_equiv): Likewise.
+ * emit-rtl.c (emit): Likewise.
+ * explow.c (find_next_ref): Likewise.
+ * flow.c (tidy_fallthru_edge): Likewise.
+ (init_propagate_block_info): Likewise.
+ * gcse.c (delete_null_pointer_checks): Likewise.
+ * ifcvt.c (cond_exec_get_condition, noce_get_condition,
+ dead_or_predicable): Likewise.
+ * integrate.c (copy_insn_list): Likewise.
+ * loop.c (scan_loop, verify_dominator, find_and_verify_loops,
+ for_each_insn_in_loop, check_dbra_loop, get_condition,
+ insert_bct, load_mems): Likewise.
+ * resource.c (find_dead_or_set_registers): Likewise.
+ * sibcalls.c (simplejump_p): Likewise.
+ * unroll.c (copy_loop_body, reg_dead_after_loop): Likewise.
+
2000-05-25 David Edelsohn <edelsohn@gnu.org>
* rs6000.c (expand_block_move): Add 64-bit PowerPC doubleword move
BARRIER following it since it may have initially been a
conditional jump. It may also be the last nonnote insn. */
- if (GET_CODE (newpat) == RETURN || simplejump_p (i3))
+ if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
{
*new_direct_jump_p = 1;
{
int cond_known_true;
rtx op0, op1;
+ rtx set;
enum machine_mode mode, mode0, mode1;
int reversed_nonequality = 0;
enum rtx_code code;
/* Ensure this is the right kind of insn. */
- if (! condjump_p (insn) || simplejump_p (insn))
+ if (! any_condjump_p (insn))
return;
+ set = pc_set (insn);
/* See if this jump condition is known true or false. */
if (taken)
- cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
+ cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
else
- cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
+ cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
/* Get the type of comparison being done and the operands being compared.
If we had to reverse a non-equality condition, record that fact so we
know that it isn't valid for floating-point. */
- code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
- op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
- op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
+ code = GET_CODE (XEXP (SET_SRC (set), 0));
+ op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
+ op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
if (! cond_known_true)
basic block. If we are jumping to the end of our block, show
that we can have one usage of TO. */
- if (simplejump_p (insn))
+ if (any_uncondjump_p (insn))
{
if (to == 0)
{
else if (code == JUMP_INSN)
{
register rtx insn = emit_jump_insn (x);
- if (simplejump_p (insn) || GET_CODE (x) == RETURN)
+ if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
return emit_barrier ();
return insn;
}
return insn;
if (GET_CODE (insn) == JUMP_INSN)
{
- if (simplejump_p (insn))
+ if (any_uncondjump_p (insn))
next = JUMP_LABEL (insn);
else
return 0;
note. */
q = b->end;
if (GET_CODE (q) == JUMP_INSN
- && (simplejump_p (q)
+ && onlyjump_p (q)
+ && (any_uncondjump_p (q)
|| (b->succ == e && e->succ_next == NULL)))
{
#ifdef HAVE_cc0
/* If this was a conditional jump, we need to also delete
the insn that set cc0. */
- if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
+ if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
q = PREV_INSN (q);
#endif
from one side of the branch and not the other, record the register
as conditionally dead. */
if (GET_CODE (bb->end) == JUMP_INSN
- && condjump_p (bb->end)
- && ! simplejump_p (bb->end))
+ && any_condjump_p (bb->end))
{
regset_head diff_head;
regset diff = INITIALIZE_REG_SET (diff_head);
/* We only want conditional branches. */
if (GET_CODE (last_insn) != JUMP_INSN
- || !condjump_p (last_insn)
- || simplejump_p (last_insn))
+ || !any_condjump_p (last_insn)
+ || !onlyjump_p (last_insn))
continue;
/* LAST_INSN is a conditional jump. Get its condition. */
{
rtx test_if, cond;
- if (condjump_p (jump))
- test_if = SET_SRC (PATTERN (jump));
- else if (condjump_in_parallel_p (jump))
- test_if = SET_SRC (XVECEXP (PATTERN (jump), 0, 0));
+ if (any_condjump_p (jump))
+ test_if = pc_set (jump);
else
return NULL_RTX;
cond = XEXP (test_if, 0);
rtx *earliest;
{
rtx cond;
+ rtx set;
/* If the condition variable is a register and is MODE_INT, accept it.
Otherwise, fall back on get_condition. */
- if (! condjump_p (jump))
+ if (! any_condjump_p (jump))
return NULL_RTX;
- cond = XEXP (SET_SRC (PATTERN (jump)), 0);
+ set = pc_set (jump);
+
+ cond = XEXP (SET_SRC (set), 0);
if (GET_CODE (XEXP (cond, 0)) == REG
&& GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
{
/* If this branches to JUMP_LABEL when the condition is false,
reverse the condition. */
- if (GET_CODE (XEXP (SET_SRC (PATTERN (jump)), 2)) == LABEL_REF
- && XEXP (XEXP (SET_SRC (PATTERN (jump)), 2), 0) == JUMP_LABEL (jump))
+ if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
+ && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
GET_MODE (cond), XEXP (cond, 0),
XEXP (cond, 1));
break;
}
- if (! condjump_p (jump))
+ if (! any_condjump_p (jump))
return FALSE;
/* Find the extent of the conditional. */
/* If this used to be a conditional jump insn but whose branch
direction is now know, we must do something special. */
- if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
+ if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
{
#ifdef HAVE_cc0
/* If the previous insn set cc0 for us, delete it. */
&& GET_CODE (PATTERN (temp)) == TRAP_IF
&& (this_is_any_uncondjump
|| (this_is_any_condjump
- && temp2 = get_condition (insn, &temp4))))
+ && (temp2 = get_condition (insn, &temp4)))))
{
rtx tc = TRAP_CONDITION (PATTERN (temp));
if (tc == const_true_rtx
- || (! this_is_uncondjump && rtx_equal_p (temp2, tc)))
+ || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
{
rtx new;
/* Replace an unconditional jump to a trap with a trap. */
- if (this_is_uncondjump)
+ if (this_is_any_uncondjump)
{
emit_barrier_after (emit_insn_before (gen_trap (), insn));
delete_jump (insn);
loop_entry_jump = p;
/* Loop entry must be unconditional jump (and not a RETURN) */
- if (simplejump_p (p)
+ if (any_uncondjump_p (p)
&& JUMP_LABEL (p) != 0
/* Check to see whether the jump actually
jumps out of the loop (meaning it's no loop).
followed a by barrier then loop end. */
&& ! (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p) == loop->top
&& NEXT_INSN (NEXT_INSN (p)) == loop_end
- && simplejump_p (p)))
+ && any_uncondjump_p (p)))
maybe_never = 1;
else if (GET_CODE (p) == NOTE)
{
which we do not have jump target information in the JUMP_LABEL
field (consider ADDR_VEC and ADDR_DIFF_VEC insns), then clear
LOOP->CONT_DOMINATOR. */
- if ((! condjump_p (insn)
- && ! condjump_in_parallel_p (insn))
+ if (! any_condjump_p (insn)
|| label == NULL_RTX)
{
loop->cont_dominator = NULL_RTX;
{
rtx label = JUMP_LABEL (insn);
- if (! condjump_p (insn) && ! condjump_in_parallel_p (insn))
+ if (! any_condjump_p (insn))
label = NULL_RTX;
loop = current_loop;
/* See if this is an unconditional branch outside the loop. */
if (this_loop
&& (GET_CODE (PATTERN (insn)) == RETURN
- || (simplejump_p (insn)
+ || (any_uncondjump_p (insn)
+ && onlyjump_p (insn)
&& (uid_loop[INSN_UID (JUMP_LABEL (insn))]
!= this_loop)))
&& get_max_uid () < max_uid_for_loop)
/* Just ignore jumps to labels that were never emitted.
These always indicate compilation errors. */
&& INSN_UID (JUMP_LABEL (p)) != 0
- && condjump_p (p)
- && ! simplejump_p (p)
+ && any_condjump_p (p) && onlyjump_p (p)
&& next_real_insn (JUMP_LABEL (p)) == our_next
/* If it's not safe to move the sequence, then we
mustn't try. */
if (GET_CODE (insn) == JUMP_INSN
&& GET_CODE (PATTERN (insn)) != RETURN
- && (!condjump_p (insn)
+ && (!any_condjump_p (insn)
|| (JUMP_LABEL (insn) != 0
&& JUMP_LABEL (insn) != loop->scan_start
&& !loop_insn_first_p (p, JUMP_LABEL (insn)))))
This can be any kind of jump, since we want to know if insns
will be executed if the loop is executed. */
&& !(JUMP_LABEL (p) == loop->top
- && ((NEXT_INSN (NEXT_INSN (p)) == loop->end && simplejump_p (p))
- || (NEXT_INSN (p) == loop->end && condjump_p (p)))))
+ && ((NEXT_INSN (NEXT_INSN (p)) == loop->end
+ && any_uncondjump_p (p))
+ || (NEXT_INSN (p) == loop->end && any_condjump_p (p)))))
{
rtx label = 0;
comparison = get_condition_for_loop (loop, jump);
if (comparison == 0)
return 0;
+ if (!onlyjump_p (jump))
+ return 0;
/* Try to compute whether the compare/branch at the loop end is one or
two instructions. */
{
rtx cond;
int reverse;
+ rtx set;
/* If this is not a standard conditional jump, we can't parse it. */
if (GET_CODE (jump) != JUMP_INSN
- || ! condjump_p (jump) || simplejump_p (jump))
+ || ! any_condjump_p (jump))
return 0;
+ set = pc_set (jump);
- cond = XEXP (SET_SRC (PATTERN (jump)), 0);
+ cond = XEXP (SET_SRC (set), 0);
/* If this branches to JUMP_LABEL when the condition is false, reverse
the condition. */
reverse
- = GET_CODE (XEXP (SET_SRC (PATTERN (jump)), 2)) == LABEL_REF
- && XEXP (XEXP (SET_SRC (PATTERN (jump)), 2), 0) == JUMP_LABEL (jump);
+ = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
+ && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX);
}
/* Make sure that the last loop insn is a conditional jump. */
if (GET_CODE (PREV_INSN (loop_end)) != JUMP_INSN
- || ! condjump_p (PREV_INSN (loop_end))
- || simplejump_p (PREV_INSN (loop_end)))
+ || ! onlyjump_p (PREV_INSN (loop_end))
+ || ! any_condjump_p (PREV_INSN (loop_end)))
{
if (loop_dump_stream)
fprintf (loop_dump_stream,
&& ! (GET_CODE (p) == JUMP_INSN
&& JUMP_LABEL (p) == loop->top
&& NEXT_INSN (NEXT_INSN (p)) == loop->end
- && simplejump_p (p)))
+ && any_uncondjump_p (p)))
{
- if (!condjump_p (p))
+ if (!any_condjump_p (p))
/* Something complicated. */
maybe_never = 1;
else
{
if (jump_count++ < 10)
{
- if (simplejump_p (this_jump_insn)
+ if (any_uncondjump_p (this_jump_insn)
|| GET_CODE (PATTERN (this_jump_insn)) == RETURN)
{
next = JUMP_LABEL (this_jump_insn);
*jump_target = JUMP_LABEL (this_jump_insn);
}
}
- else if (condjump_p (this_jump_insn)
- || condjump_in_parallel_p (this_jump_insn))
+ else if (any_condjump_p (this_jump_insn))
{
struct resources target_set, target_res;
struct resources fallthrough_res;
if (insn
&& GET_CODE (insn) == JUMP_INSN
- && simplejump_p (insn))
+ && any_uncondjump_p (insn))
return insn;
return orig_insn;
/* If this used to be a conditional jump insn but whose branch
direction is now known, we must do something special. */
- if (condjump_p (insn) && !simplejump_p (insn) && map->last_pc_value)
+ if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
{
#ifdef HAVE_cc0
/* If the previous insn set cc0 for us, delete it. */
{
if (GET_CODE (PATTERN (insn)) == RETURN)
break;
- else if (! simplejump_p (insn)
+ else if (!any_uncondjump_p (insn)
/* Prevent infinite loop following infinite loops. */
|| jump_count++ > 20)
return 0;