+2013-03-23 Steven Bosscher <steven@gcc.gnu.org>
+
+ * config/avr/avr.c, config/bfin/bfin.c, config/c6x/c6x.c,
+ config/epiphany/epiphany.c, config/frv/frv.c, config/ia64/ia64.c,
+ config/iq2000/iq2000.c, config/mcore/mcore.c, config/mep/mep.c,
+ config/mmix/mmix.c, config/pa/pa.c, config/rs6000/rs6000.c,
+ config/s390/s390.c, config/sparc/sparc.c, config/spu/spu.c,
+ config/stormy16/stormy16.c, config/v850/v850.c, config/xtensa/xtensa.c,
+ dwarf2out.c, hw-doloop.c, resource.c, rtl.h : Where applicable, use
+ the predicates NOTE_P, NONJUMP_INSN_P, JUMP_P, CALL_P, LABEL_P, and
+ BARRIER_P instead of GET_CODE.
+
2013-03-23 Eric Botcazou <ebotcazou@adacore.com>
* config/sparc/sparc.c (sparc_emit_probe_stack_range): Fix small
rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
rtx set = single_set (this_insn);
- if (GET_CODE (this_insn) == CALL_INSN)
+ if (CALL_P (this_insn))
code = CALL_INSN;
- else if (GET_CODE (this_insn) == JUMP_INSN)
+ else if (JUMP_P (this_insn))
{
if (INSN_ANNULLED_BRANCH_P (this_insn))
return 0;
rtx t = NEXT_INSN (slot[0]);
while (t != slot[1])
{
- if (GET_CODE (t) != NOTE
- || NOTE_KIND (t) != NOTE_INSN_DELETED)
+ if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
return false;
t = NEXT_INSN (t);
}
rtx t = NEXT_INSN (slot[1]);
while (t != slot[2])
{
- if (GET_CODE (t) != NOTE
- || NOTE_KIND (t) != NOTE_INSN_DELETED)
+ if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
return false;
t = NEXT_INSN (t);
}
{
unsigned int reservation_mask = 0;
rtx insn = get_insns ();
- gcc_assert (GET_CODE (insn) == NOTE);
+ gcc_assert (NOTE_P (insn));
insn = next_real_insn (insn);
while (insn)
{
calls. */
if (entity == EPIPHANY_MSW_ENTITY_AND || entity == EPIPHANY_MSW_ENTITY_OR)
{
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return 0;
return last_mode;
}
{
rtx insn = get_insns ();
while (insn != NULL
- && !(GET_CODE (insn) == JUMP_INSN
+ && !(JUMP_P (insn)
/* Ignore tablejump patterns. */
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
simply emit a different assembly directive because bralr and jmpl
execute in different units. */
for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
rtx pattern = PATTERN (insn);
if (GET_CODE (pattern) == PARALLEL
HOST_WIDE_INT prob = -1;
enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
- gcc_assert (GET_CODE (insn) == JUMP_INSN);
+ gcc_assert (JUMP_P (insn));
/* Assume any non-conditional jump is likely. */
if (! any_condjump_p (insn))
- There's no point putting a call in its own packet unless
we have to. */
if (frv_packet.num_insns > 0
- && GET_CODE (insn) == INSN
+ && NONJUMP_INSN_P (insn)
&& GET_MODE (insn) == TImode
&& GET_CODE (PATTERN (insn)) != COND_EXEC)
return false;
packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
last = frv_packet.insns[frv_packet.num_insns - 1];
- if (GET_CODE (last) != INSN)
+ if (! NONJUMP_INSN_P (last))
{
insn = emit_insn_before (PATTERN (insn), last);
frv_packet.insns[frv_packet.num_insns - 1] = insn;
default:
/* Calls mustn't be packed on a TOMCAT. */
- if (GET_CODE (insn) == CALL_INSN && frv_cpu_type == FRV_CPU_TOMCAT)
+ if (CALL_P (insn) && frv_cpu_type == FRV_CPU_TOMCAT)
frv_finish_packet (handle_packet);
/* Since the last instruction in a packet determines the EH
CLEAR_HARD_REG_SET (used_regs);
for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
/* We can't predict what a call will do to volatile memory. */
memset (next_io, 0, sizeof (struct frv_io));
else
which = ".sptk";
}
- else if (GET_CODE (current_output_insn) == CALL_INSN)
+ else if (CALL_P (current_output_insn))
which = ".sptk";
else
which = ".dptk";
memset (rws_insn, 0, sizeof (rws_insn));
/* Don't bundle a call following another call. */
- if ((pat = prev_active_insn (insn))
- && GET_CODE (pat) == CALL_INSN)
+ if ((pat = prev_active_insn (insn)) && CALL_P (pat))
{
need_barrier = 1;
break;
flags.is_branch = 1;
/* Don't bundle a jump following a call. */
- if ((pat = prev_active_insn (insn))
- && GET_CODE (pat) == CALL_INSN)
+ if ((pat = prev_active_insn (insn)) && CALL_P (pat))
{
need_barrier = 1;
break;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
if (insns_since_last_label)
last_label = insn;
insns_since_last_label = 0;
}
- else if (GET_CODE (insn) == NOTE
+ else if (NOTE_P (insn)
&& NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK)
{
if (insns_since_last_label)
last_label = insn;
insns_since_last_label = 0;
}
- else if (GET_CODE (insn) == INSN
+ else if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
&& XINT (PATTERN (insn), 1) == UNSPECV_INSN_GROUP_BARRIER)
{
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
{
rtx last = prev_active_insn (insn);
if (! last)
continue;
- if (GET_CODE (last) == JUMP_INSN
+ if (JUMP_P (last)
&& GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
last = prev_active_insn (last);
if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
int needed = group_barrier_needed (insn);
gcc_assert (!needed);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
init_insn_group_barriers ();
stops_p [INSN_UID (insn)] = stop_before_p;
stop_before_p = 0;
&& last_scheduled_insn
&& scheduled_good_insn (last_scheduled_insn))))
|| (last_scheduled_insn
- && (GET_CODE (last_scheduled_insn) == CALL_INSN
+ && (CALL_P (last_scheduled_insn)
|| unknown_for_bundling_p (last_scheduled_insn))))
{
init_insn_group_barriers ();
state_transition (curr_state, dfa_stop_insn);
if (TARGET_EARLY_STOP_BITS)
*sort_p = (last_scheduled_insn == NULL_RTX
- || GET_CODE (last_scheduled_insn) != CALL_INSN);
+ || ! CALL_P (last_scheduled_insn));
else
*sort_p = 0;
return 1;
{
do
insn = next_active_insn (insn);
- while (GET_CODE (insn) == INSN
+ while (NONJUMP_INSN_P (insn)
&& get_attr_empty (insn) == EMPTY_YES);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
else if (note)
{
insn != current_sched_info->next_tail;
insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
{
rtx last = prev_active_insn (insn);
if (! last)
continue;
- if (GET_CODE (last) == JUMP_INSN
+ if (JUMP_P (last)
&& GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
last = prev_active_insn (last);
if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
else if (recog_memoized (insn) >= 0
&& important_for_bundling_p (insn))
seen_good_insn = 1;
- need_barrier_p = (GET_CODE (insn) == CALL_INSN
- || unknown_for_bundling_p (insn));
+ need_barrier_p = (CALL_P (insn) || unknown_for_bundling_p (insn));
}
}
}
rtx head = BB_HEAD (bb);
/* We only need such notes at code labels. */
- if (GET_CODE (head) != CODE_LABEL)
+ if (! LABEL_P (head))
continue;
if (NOTE_INSN_BASIC_BLOCK_P (NEXT_INSN (head)))
head = NEXT_INSN (head);
while (1)
{
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& GET_CODE (PATTERN (insn)) == COND_EXEC
&& find_reg_note (insn, REG_NORETURN, NULL_RTX))
{
if (insn)
{
/* Skip over insns that expand to nothing. */
- while (GET_CODE (insn) == INSN
+ while (NONJUMP_INSN_P (insn)
&& get_attr_empty (insn) == EMPTY_YES)
{
if (GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
saw_stop = 1;
insn = prev_active_insn (insn);
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
if (! saw_stop)
emit_insn (gen_insn_group_barrier (GEN_INT (3)));
}
}
- if (GET_CODE (insn) == NOTE || ! RTX_FRAME_RELATED_P (insn))
+ if (NOTE_P (insn) || ! RTX_FRAME_RELATED_P (insn))
return;
/* Look for the ALLOC insn. */
/* Make sure that we don't put nop's after labels. */
next_insn = NEXT_INSN (cur_insn);
while (next_insn != 0
- && (GET_CODE (next_insn) == NOTE
- || GET_CODE (next_insn) == CODE_LABEL))
+ && (NOTE_P (next_insn) || LABEL_P (next_insn)))
next_insn = NEXT_INSN (next_insn);
dslots_load_total += num_nops;
|| operands == 0
|| cur_insn == 0
|| next_insn == 0
- || GET_CODE (next_insn) == CODE_LABEL
+ || LABEL_P (next_insn)
|| (set_reg = operands[0]) == 0)
{
dslots_number_nops = 0;
iq2000_load_reg4 = 0;
}
- if ( (GET_CODE (insn) == JUMP_INSN
- || GET_CODE (insn) == CALL_INSN
+ if ( (JUMP_P (insn)
+ || CALL_P (insn)
|| (GET_CODE (PATTERN (insn)) == RETURN))
&& NEXT_INSN (PREV_INSN (insn)) == insn)
{
}
if (TARGET_STATS
- && (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CALL_INSN))
+ && (JUMP_P (insn) || CALL_P (insn)))
dslots_jump_total ++;
}
\f
/* A unconditional jump has an unfilled delay slot if it is not part
of a sequence. A conditional jump normally has a delay slot. */
if (simplejump_p (insn)
- || ( (GET_CODE (insn) == JUMP_INSN
- || GET_CODE (insn) == CALL_INSN)))
+ || ( (JUMP_P (insn)
+ || CALL_P (insn))))
length += 4;
return length;
to assume that it is live. */
for (insn = NEXT_INSN (first); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
return 0; /* We lose track, assume it is alive. */
- else if (GET_CODE(insn) == CALL_INSN)
+ else if (CALL_P (insn))
{
/* Call's might use it for target or register parms. */
if (reg_referenced_p (reg, PATTERN (insn))
else if (dead_or_set_p (insn, reg))
return 1;
}
- else if (GET_CODE (insn) == INSN)
+ else if (NONJUMP_INSN_P (insn))
{
if (reg_referenced_p (reg, PATTERN (insn)))
return 0;
changed into a conditional. Only bother with SImode items. If
we wanted to be a little more aggressive, we could also do other
modes such as DImode with reg-reg move or load 0. */
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
{
rtx pat = PATTERN (insn);
rtx src, dst;
*/
}
- else if (GET_CODE (insn) == JUMP_INSN &&
- GET_CODE (PATTERN (insn)) == SET &&
- GET_CODE (XEXP (PATTERN (insn), 1)) == LABEL_REF)
+ else if (JUMP_P (insn)
+ && GET_CODE (PATTERN (insn)) == SET
+ && GET_CODE (XEXP (PATTERN (insn), 1)) == LABEL_REF)
return COND_BRANCH_INSN;
return COND_NO;
pat = PATTERN (insn);
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
{
dst = SET_DEST (pat);
src = SET_SRC (pat);
/* Check that the first insn is a candidate conditional jump. This is
the one that we'll eliminate. If not, advance to the next insn to
try. */
- if (GET_CODE (first) != JUMP_INSN ||
- GET_CODE (PATTERN (first)) != SET ||
- GET_CODE (XEXP (PATTERN (first), 1)) != IF_THEN_ELSE)
+ if (! JUMP_P (first)
+ || GET_CODE (PATTERN (first)) != SET
+ || GET_CODE (XEXP (PATTERN (first), 1)) != IF_THEN_ELSE)
return NEXT_INSN (first);
/* Extract some information we need. */
if (dump_file)
for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
before++;
/* We're looking for (set r2 r1) moves where r1 dies, followed by a
for (insn = insns; insn; insn = next)
{
next = next_nonnote_nondebug_insn (insn);
- if (GET_CODE (insn) != INSN)
+ if (! NONJUMP_INSN_P (insn))
continue;
pat = PATTERN (insn);
if (dump_file)
fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
- while (follow && GET_CODE (follow) == INSN
+ while (follow && NONJUMP_INSN_P (follow)
&& GET_CODE (PATTERN (follow)) == SET
&& !dead_or_set_p (follow, SET_SRC (pat))
&& !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
if (dump_file)
fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
- if (follow && GET_CODE (follow) == INSN
+ if (follow && NONJUMP_INSN_P (follow)
&& GET_CODE (PATTERN (follow)) == SET
&& find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
{
count = simplejump_p (insn) ? 0 : 1;
for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
{
- if (GET_CODE (prev) == CALL_INSN
- || BARRIER_P (prev))
+ if (CALL_P (prev) || BARRIER_P (prev))
break;
if (prev == JUMP_LABEL (insn))
*after* the label. */
rtx barrier;
for (barrier = PREV_INSN (prev);
- barrier && GET_CODE (barrier) == NOTE;
+ barrier && NOTE_P (barrier);
barrier = PREV_INSN (barrier))
;
- if (barrier && GET_CODE (barrier) != BARRIER)
+ if (barrier && ! BARRIER_P (barrier))
break;
}
else
if (LABEL_NUSES (prev) == 1)
{
for (user = PREV_INSN (prev);
- user && (INSN_P (user) || GET_CODE (user) == NOTE);
+ user && (INSN_P (user) || NOTE_P (user));
user = PREV_INSN (user))
- if (GET_CODE (user) == JUMP_INSN
- && JUMP_LABEL (user) == prev)
+ if (JUMP_P (user) && JUMP_LABEL (user) == prev)
{
safe = INSN_UID (user);
break;
/* Find the fist real insn the jump jumps to. */
label = ret = JUMP_LABEL (insn);
while (ret
- && (GET_CODE (ret) == NOTE
- || GET_CODE (ret) == CODE_LABEL
+ && (NOTE_P (ret)
+ || LABEL_P (ret)
|| GET_CODE (PATTERN (ret)) == USE))
ret = NEXT_INSN (ret);
if (recog_memoized (insn) >= 0
&& get_attr_slot (insn) == SLOT_COP)
{
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
|| ! last
|| recog_memoized (last) < 0
|| get_attr_slot (last) != SLOT_CORE
if (CONSTANT_P (modified_x)
/* Strangely enough, this is not included in CONSTANT_P.
FIXME: Ask/check about sanity here. */
- || GET_CODE (modified_x) == CODE_LABEL)
+ || LABEL_P (modified_x))
{
output_addr_const (stream, modified_x);
return;
rtx tmp;
/* Ignore anything that isn't an INSN or a JUMP_INSN. */
- if (GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
+ if (! NONJUMP_INSN_P (insn) && ! JUMP_P (insn))
continue;
tmp = PATTERN (insn);
rtx tmp, next;
/* Ignore anything that isn't an INSN. */
- if (GET_CODE (insn) != INSN)
+ if (! NONJUMP_INSN_P (insn))
continue;
tmp = PATTERN (insn);
while (next)
{
/* Jumps, calls and labels stop our search. */
- if (GET_CODE (next) == JUMP_INSN
- || GET_CODE (next) == CALL_INSN
- || GET_CODE (next) == CODE_LABEL)
+ if (JUMP_P (next) || CALL_P (next) || LABEL_P (next))
break;
/* As does another fcmp insn. */
- if (GET_CODE (next) == INSN
+ if (NONJUMP_INSN_P (next)
&& GET_CODE (PATTERN (next)) == SET
&& GET_CODE (SET_DEST (PATTERN (next))) == REG
&& REGNO (SET_DEST (PATTERN (next))) == 0)
}
/* Is NEXT_INSN a branch? */
- if (next
- && GET_CODE (next) == JUMP_INSN)
+ if (next && JUMP_P (next))
{
rtx pattern = PATTERN (next);
always point to a valid instruction in the current function. */
/* Get the last real insn. */
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
insn = prev_real_insn (insn);
/* If it is a sequence, then look inside. */
- if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
/* If insn is a CALL_INSN, then it must be a call to a volatile
function (otherwise there would be epilogue insns). */
- if (insn && GET_CODE (insn) == CALL_INSN)
+ if (insn && CALL_P (insn))
{
fputs ("\tnop\n", file);
last_address += 4;
/* Jumps inside switch tables which have unfilled delay slots need
adjustment. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& GET_CODE (pat) == PARALLEL
&& get_attr_type (insn) == TYPE_BTABLE_BRANCH)
length += 4;
/* Block move pattern. */
- else if (GET_CODE (insn) == INSN
+ else if (NONJUMP_INSN_P (insn)
&& GET_CODE (pat) == PARALLEL
&& GET_CODE (XVECEXP (pat, 0, 0)) == SET
&& GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
&& GET_MODE (XEXP (XVECEXP (pat, 0, 0), 1)) == BLKmode)
length += compute_movmem_length (insn) - 4;
/* Block clear pattern. */
- else if (GET_CODE (insn) == INSN
+ else if (NONJUMP_INSN_P (insn)
&& GET_CODE (pat) == PARALLEL
&& GET_CODE (XVECEXP (pat, 0, 0)) == SET
&& GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
&& GET_MODE (XEXP (XVECEXP (pat, 0, 0), 0)) == BLKmode)
length += compute_clrmem_length (insn) - 4;
/* Conditional branch with an unfilled delay slot. */
- else if (GET_CODE (insn) == JUMP_INSN && ! simplejump_p (insn))
+ else if (JUMP_P (insn) && ! simplejump_p (insn))
{
/* Adjust a short backwards conditional with an unfilled delay slot. */
if (GET_CODE (pat) == SET
return;
}
- gcc_assert (GET_CODE (call_insn) == CALL_INSN);
+ gcc_assert (CALL_P (call_insn));
for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
link; link = XEXP (link, 1))
{
if (xdelay && dbr_sequence_length () != 0)
{
/* We can't handle a jump in the delay slot. */
- gcc_assert (GET_CODE (NEXT_INSN (insn)) != JUMP_INSN);
+ gcc_assert (! JUMP_P (NEXT_INSN (insn)));
final_scan_insn (NEXT_INSN (insn), asm_out_file,
optimize, 0, NULL);
output_asm_insn ("nop", xoperands);
/* We are done if there isn't a jump in the delay slot. */
- if (seq_length == 0 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
+ if (seq_length == 0 || ! JUMP_P (NEXT_INSN (insn)))
return "";
/* This call has an unconditional jump in its delay slot. */
rtx pat = PATTERN (insn);
unsigned long distance = -1;
- gcc_assert (GET_CODE (insn) == CALL_INSN);
+ gcc_assert (CALL_P (insn));
if (INSN_ADDRESSES_SET_P ())
{
delay slot. We can't do this in a sibcall as we don't
have a second call-clobbered scratch register available. */
if (seq_length != 0
- && GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
+ && ! JUMP_P (NEXT_INSN (insn))
&& !sibcall)
{
final_scan_insn (NEXT_INSN (insn), asm_out_file,
indirect_call = 1;
if (seq_length != 0
- && GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
+ && ! JUMP_P (NEXT_INSN (insn))
&& !sibcall
&& (!TARGET_PA_20
|| indirect_call
/* We are done if there isn't a jump in the delay slot. */
if (seq_length == 0
|| delay_insn_deleted
- || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
+ || ! JUMP_P (NEXT_INSN (insn)))
return "";
/* A sibcall should never have a branch in the delay slot. */
pa_jump_in_call_delay (rtx insn)
{
- if (GET_CODE (insn) != JUMP_INSN)
+ if (! JUMP_P (insn))
return 0;
if (PREV_INSN (insn)
&& PREV_INSN (PREV_INSN (insn))
- && GET_CODE (next_real_insn (PREV_INSN (PREV_INSN (insn)))) == INSN)
+ && NONJUMP_INSN_P (next_real_insn (PREV_INSN (PREV_INSN (insn)))))
{
rtx test_insn = next_real_insn (PREV_INSN (PREV_INSN (insn)));
/* Find the previous real insn, skipping NOTEs. */
insn = PREV_INSN (insn);
- while (insn && GET_CODE (insn) == NOTE)
+ while (insn && NOTE_P (insn))
insn = PREV_INSN (insn);
/* Check for CALL_INSNs and millicode calls. */
if (insn
- && ((GET_CODE (insn) == CALL_INSN
+ && ((CALL_P (insn)
&& get_attr_type (insn) != TYPE_DYNCALL)
- || (GET_CODE (insn) == INSN
+ || (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != SEQUENCE
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER
unsigned int length, i;
/* Find an ADDR_VEC or ADDR_DIFF_VEC insn to explode. */
- if (GET_CODE (insn) != JUMP_INSN
+ if (! JUMP_P (insn)
|| (GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
continue;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
/* Find an ADDR_VEC insn. */
- if (GET_CODE (insn) != JUMP_INSN
+ if (! JUMP_P (insn)
|| (GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
continue;
/* We only care about INSNs, JUMP_INSNs, and CALL_INSNs.
Also ignore any special USE insns. */
- if ((GET_CODE (anchor) != INSN
- && GET_CODE (anchor) != JUMP_INSN
- && GET_CODE (anchor) != CALL_INSN)
+ if ((! NONJUMP_INSN_P (anchor) && ! JUMP_P (anchor) && ! CALL_P (anchor))
|| GET_CODE (PATTERN (anchor)) == USE
|| GET_CODE (PATTERN (anchor)) == CLOBBER
|| GET_CODE (PATTERN (anchor)) == ADDR_VEC
floater;
floater = PREV_INSN (floater))
{
- if (GET_CODE (floater) == NOTE
- || (GET_CODE (floater) == INSN
+ if (NOTE_P (floater)
+ || (NONJUMP_INSN_P (floater)
&& (GET_CODE (PATTERN (floater)) == USE
|| GET_CODE (PATTERN (floater)) == CLOBBER)))
continue;
/* Anything except a regular INSN will stop our search. */
- if (GET_CODE (floater) != INSN
+ if (! NONJUMP_INSN_P (floater)
|| GET_CODE (PATTERN (floater)) == ADDR_VEC
|| GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
{
{
for (floater = anchor; floater; floater = NEXT_INSN (floater))
{
- if (GET_CODE (floater) == NOTE
- || (GET_CODE (floater) == INSN
+ if (NOTE_P (floater)
+ || (NONJUMP_INSN_P (floater)
&& (GET_CODE (PATTERN (floater)) == USE
|| GET_CODE (PATTERN (floater)) == CLOBBER)))
continue;
/* Anything except a regular INSN will stop our search. */
- if (GET_CODE (floater) != INSN
+ if (! NONJUMP_INSN_P (floater)
|| GET_CODE (PATTERN (floater)) == ADDR_VEC
|| GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
{
int
pa_insn_refs_are_delayed (rtx insn)
{
- return ((GET_CODE (insn) == INSN
+ return ((NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != SEQUENCE
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER
if (WORLD_SAVE_P (info_ptr))
{
rtx insn;
- for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
- if ( GET_CODE (insn) == CALL_INSN
- && SIBLING_CALL_P (insn))
+ for (insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
+ if (CALL_P (insn) && SIBLING_CALL_P (insn))
{
info_ptr->world_save_p = 0;
break;
if (!insn || !INSN_P (insn))
return false;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return false;
return is_load_insn1 (PATTERN (insn), load_mem);
enum attr_type type;
if (!insn
- || GET_CODE (insn) == NOTE
+ || NOTE_P (insn)
|| DEBUG_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER)
enum attr_type type;
if (!insn
- || GET_CODE (insn) == NOTE
+ || NOTE_P (insn)
|| DEBUG_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER)
{
rtx target, pat;
- if (GET_CODE (dep_rtx) == INSN)
+ if (NONJUMP_INSN_P (dep_rtx))
dep_rtx = PATTERN (dep_rtx);
if (GET_CODE (dep_rtx) == SET)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) != JUMP_INSN)
+ if (! JUMP_P (insn))
continue;
pat = PATTERN (insn);
static rtx
s390_execute_label (rtx insn)
{
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == PARALLEL
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
&& XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
&& XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
{
s390_add_execute (pool, insn);
}
- else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx pool_ref = NULL_RTX;
find_constant_pool_ref (PATTERN (insn), &pool_ref);
if (INSN_P (insn))
replace_ltrel_base (&PATTERN (insn));
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx addr, pool_ref = NULL_RTX;
find_constant_pool_ref (PATTERN (insn), &pool_ref);
s390_add_execute (curr_pool, insn);
s390_add_pool_insn (curr_pool, insn);
}
- else if (GET_CODE (insn) == INSN || CALL_P (insn))
+ else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx pool_ref = NULL_RTX;
find_constant_pool_ref (PATTERN (insn), &pool_ref);
}
}
- if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
+ if (JUMP_P (insn) || LABEL_P (insn))
{
if (curr_pool)
s390_add_pool_insn (curr_pool, insn);
Those will have an effect on code size, which we need to
consider here. This calculation makes rather pessimistic
worst-case assumptions. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
extra_size += 6;
if (chunk_size < S390_POOL_CHUNK_MIN
continue;
/* Pool chunks can only be inserted after BARRIERs ... */
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
{
s390_end_pool (curr_pool, insn);
curr_pool = NULL;
if (!section_switch_p)
{
/* We can insert the barrier only after a 'real' insn. */
- if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
+ if (! NONJUMP_INSN_P (insn) && ! CALL_P (insn))
continue;
if (get_attr_length (insn) == 0)
continue;
Don't do that, however, if it is the label before
a jump table. */
- if (GET_CODE (insn) == CODE_LABEL
+ if (LABEL_P (insn)
&& (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
{
rtx vec_insn = next_real_insn (insn);
- rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
+ rtx vec_pat = vec_insn && JUMP_P (vec_insn) ?
PATTERN (vec_insn) : NULL_RTX;
if (!vec_pat
|| !(GET_CODE (vec_pat) == ADDR_VEC
/* If we have a direct jump (conditional or unconditional)
or a casesi jump, check all potential targets. */
- else if (GET_CODE (insn) == JUMP_INSN)
+ else if (JUMP_P (insn))
{
rtx pat = PATTERN (insn);
if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
/* Find the jump table used by this casesi jump. */
rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
rtx vec_insn = next_real_insn (vec_label);
- rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
+ rtx vec_pat = vec_insn && JUMP_P (vec_insn) ?
PATTERN (vec_insn) : NULL_RTX;
if (vec_pat
&& (GET_CODE (vec_pat) == ADDR_VEC
/* Insert base register reload insns at every far label. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CODE_LABEL
+ if (LABEL_P (insn)
&& bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
{
struct constant_pool *pool = s390_find_pool (pool_list, insn);
if (!curr_pool)
continue;
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx addr, pool_ref = NULL_RTX;
find_constant_pool_ref (PATTERN (insn), &pool_ref);
rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
rtx label = NEXT_INSN (curr_pool->pool_insn);
- if (jump && GET_CODE (jump) == JUMP_INSN
- && barrier && GET_CODE (barrier) == BARRIER
- && label && GET_CODE (label) == CODE_LABEL
+ if (jump && JUMP_P (jump)
+ && barrier && BARRIER_P (barrier)
+ && label && LABEL_P (label)
&& GET_CODE (PATTERN (jump)) == SET
&& SET_DEST (PATTERN (jump)) == pc_rtx
&& GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
{
rtx next_insn = NEXT_INSN (insn);
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
&& XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
next_insn = NEXT_INSN (insn);
- if (GET_CODE (insn) != INSN)
+ if (! NONJUMP_INSN_P (insn))
continue;
if (GET_CODE (PATTERN (insn)) == PARALLEL
if (!next)
return 1;
- if (GET_CODE (next) == INSN
+ if (NONJUMP_INSN_P (next)
&& GET_CODE (PATTERN (next)) == SEQUENCE)
next = XVECEXP (PATTERN (next), 0, 0);
- else if (GET_CODE (next) == CALL_INSN
+ else if (CALL_P (next)
&& GET_CODE (PATTERN (next)) == PARALLEL)
{
rtx delay = XVECEXP (PATTERN (next), 0, 1);
int regno;
rtx pat;
- if (GET_CODE (trial) != INSN)
+ if (! NONJUMP_INSN_P (trial))
return 0;
if (get_attr_length (trial) != 1)
{
rtx pat;
- if (GET_CODE (trial) != INSN || GET_CODE (PATTERN (trial)) != SET)
+ if (! NONJUMP_INSN_P (trial) || GET_CODE (PATTERN (trial)) != SET)
return 0;
if (get_attr_length (trial) != 1)
last_real_insn = prev_real_insn (insn);
if (last_real_insn
- && GET_CODE (last_real_insn) == INSN
+ && NONJUMP_INSN_P (last_real_insn)
&& GET_CODE (PATTERN (last_real_insn)) == SEQUENCE)
last_real_insn = XVECEXP (PATTERN (last_real_insn), 0, 0);
static struct spu_bb_info *spu_bb_info;
#define STOP_HINT_P(INSN) \
- (GET_CODE(INSN) == CALL_INSN \
+ (CALL_P(INSN) \
|| INSN_CODE(INSN) == CODE_FOR_divmodsi4 \
|| INSN_CODE(INSN) == CODE_FOR_udivmodsi4)
static rtx
get_branch_target (rtx branch)
{
- if (GET_CODE (branch) == JUMP_INSN)
+ if (JUMP_P (branch))
{
rtx set, src;
return src;
}
- else if (GET_CODE (branch) == CALL_INSN)
+ else if (CALL_P (branch))
{
rtx call;
/* All of our call patterns are in a PARALLEL and the CALL is
if (reg_mentioned_p (reg, and_insn))
return;
- if (GET_CODE (and_insn) != NOTE
- && GET_CODE (and_insn) != INSN)
+ if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
return;
}
}
if (reg_mentioned_p (reg, and_insn))
return;
- if (GET_CODE (and_insn) != NOTE
- && GET_CODE (and_insn) != INSN)
+ if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
return;
}
break;
if (reg_mentioned_p (reg, shift)
- || (GET_CODE (shift) != NOTE
- && GET_CODE (shift) != INSN))
+ || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
{
shift = NULL_RTX;
break;
if (reg_mentioned_p (reg, load))
return;
- if (GET_CODE (load) != NOTE
- && GET_CODE (load) != INSN)
+ if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
return;
}
if (!load)
IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
INSN_UID (first_insn), INSN_UID (last_insn));
- if (GET_CODE (first_insn) == NOTE)
+ if (NOTE_P (first_insn))
first_insn = next_nonnote_insn (first_insn);
last_insn = next_nonnote_insn (last_insn);
for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
{
rtx pattern = single_set (insn);
/* Optimize back to back cases of ep <- r1 & r1 <- ep. */
insn = prev_nonnote_insn (first_insn);
- if (insn && GET_CODE (insn) == INSN
+ if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& SET_DEST (PATTERN (insn)) == *p_ep
&& SET_SRC (PATTERN (insn)) == *p_r1)
{
rtx body = PATTERN (insn);
- if (GET_CODE (body) == JUMP_INSN)
+ if (JUMP_P (body))
{
output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
done = 1;
if (temp->last
&& temp->first == temp->last
&& TREE_CODE (decl) == PARM_DECL
- && GET_CODE (temp->first->loc) == NOTE
+ && NOTE_P (temp->first->loc)
&& NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
&& DECL_INCOMING_RTL (decl)
&& NOTE_VAR_LOCATION_LOC (temp->first->loc)
*listp = new_loc_list (descr, node->label, endname, secname);
if (TREE_CODE (decl) == PARM_DECL
&& node == loc_list->first
- && GET_CODE (node->loc) == NOTE
+ && NOTE_P (node->loc)
&& strcmp (node->label, endname) == 0)
(*listp)->force = true;
listp = &(*listp)->dw_loc_next;
next_note = NEXT_INSN (loc_note);
if (! next_note
|| INSN_DELETED_P (next_note)
- || GET_CODE (next_note) != NOTE
+ || ! NOTE_P (next_note)
|| (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
&& NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
next_note = NULL_RTX;
rtx tail = BB_END (bb);
rtx insn, reg;
- while (tail && GET_CODE (tail) == NOTE && tail != BB_HEAD (bb))
+ while (tail && NOTE_P (tail) && tail != BB_HEAD (bb))
tail = PREV_INSN (tail);
if (tail == NULL_RTX)
&& NEXT_INSN (PREV_INSN (insn)) != insn)
{
rtx next = NEXT_INSN (insn);
- enum rtx_code code = GET_CODE (next);
- while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
+ while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
&& INSN_FROM_TARGET_P (next))
{
insn = next;
next = NEXT_INSN (insn);
- code = GET_CODE (next);
}
}
#define NOTE_KIND(INSN) XCINT (INSN, 5, NOTE)
/* Nonzero if INSN is a note marking the beginning of a basic block. */
-#define NOTE_INSN_BASIC_BLOCK_P(INSN) \
- (GET_CODE (INSN) == NOTE \
- && NOTE_KIND (INSN) == NOTE_INSN_BASIC_BLOCK)
+#define NOTE_INSN_BASIC_BLOCK_P(INSN) \
+ (NOTE_P (INSN) && NOTE_KIND (INSN) == NOTE_INSN_BASIC_BLOCK)
/* Variable declaration and the location of a variable. */
#define PAT_VAR_LOCATION_DECL(PAT) (XCTREE ((PAT), 0, VAR_LOCATION))
/* Retrieve the kind of LABEL. */
#define LABEL_KIND(LABEL) __extension__ \
({ __typeof (LABEL) const _label = (LABEL); \
- if (GET_CODE (_label) != CODE_LABEL) \
+ if (! LABEL_P (_label)) \
rtl_check_failed_flag ("LABEL_KIND", _label, __FILE__, __LINE__, \
__FUNCTION__); \
(enum label_kind) ((_label->jump << 1) | _label->call); })
#define SET_LABEL_KIND(LABEL, KIND) do { \
__typeof (LABEL) const _label = (LABEL); \
const unsigned int _kind = (KIND); \
- if (GET_CODE (_label) != CODE_LABEL) \
+ if (! LABEL_P (_label)) \
rtl_check_failed_flag ("SET_LABEL_KIND", _label, __FILE__, __LINE__, \
__FUNCTION__); \
_label->jump = ((_kind >> 1) & 1); \