/* Emit RTL for the GCC expander.
- Copyright (C) 1987-2018 Free Software Foundation, Inc.
+ Copyright (C) 1987-2021 Free Software Foundation, Inc.
This file is part of GCC.
#include "tree-eh.h"
#include "explow.h"
#include "expr.h"
-#include "params.h"
#include "builtins.h"
#include "rtl-iter.h"
#include "stor-layout.h"
#include "opts.h"
#include "predict.h"
#include "rtx-vector-builder.h"
+#include "gimple.h"
+#include "gimple-ssa.h"
+#include "gimplify.h"
struct target_rtl default_target_rtl;
#if SWITCHABLE_TARGET
rtx pc_rtx;
rtx ret_rtx;
rtx simple_return_rtx;
-rtx cc0_rtx;
/* Marker used for denoting an INSN, which should never be accessed (i.e.,
this pointer should normally never be dereferenced), but is required to be
/* Return true if the given memory attributes are equal. */
bool
-mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
+mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
{
if (p == q)
return true;
set_regno_raw (x, regno, nregs);
}
+/* Initialize a fresh REG rtx with mode MODE and register REGNO. */
+
+rtx
+init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
+{
+ set_mode_and_regno (x, mode, regno);
+ REG_ATTRS (x) = NULL;
+ ORIGINAL_REGNO (x) = regno;
+ return x;
+}
+
/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
don't attempt to share with the various global pieces of rtl (such as
frame_pointer_rtx). */
gen_raw_REG (machine_mode mode, unsigned int regno)
{
rtx x = rtx_alloc (REG MEM_STAT_INFO);
- set_mode_and_regno (x, mode, regno);
- REG_ATTRS (x) = NULL;
- ORIGINAL_REGNO (x) = regno;
+ init_raw_REG (x, mode, regno);
return x;
}
&& GET_MODE_INNER (imode) == omode)
;
/* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
- i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
- represent this. It's questionable if this ought to be represented at
- all -- why can't this all be hidden in post-reload splitters that make
- arbitrarily mode changes to the registers themselves. */
- else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
+ i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0). This
+ surely isn't the cleanest way to represent this. It's questionable
+ if this ought to be represented at all -- why can't this all be hidden
+ in post-reload splitters that make arbitrarily mode changes to the
+ registers themselves. */
+ else if (VECTOR_MODE_P (omode)
+ && GET_MODE_INNER (omode) == GET_MODE_INNER (imode))
;
/* Subregs involving floating point modes are not allowed to
change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
|| known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
- result = simplify_gen_subreg (mode, x, GET_MODE (x),
- subreg_highpart_offset (mode, GET_MODE (x)));
- gcc_assert (result);
-
- /* simplify_gen_subreg is not guaranteed to return a valid operand for
- the target if we have a MEM. gen_highpart must return a valid operand,
- emitting code if necessary to do so. */
- if (MEM_P (result))
+ /* gen_lowpart_common handles a lot of special cases due to needing to handle
+ paradoxical subregs; it only calls simplify_gen_subreg when certain that
+ it will produce something meaningful. The only case we need to handle
+ specially here is MEM. */
+ if (MEM_P (x))
{
- result = validize_mem (result);
- gcc_assert (result);
+ poly_int64 offset = subreg_highpart_offset (mode, GET_MODE (x));
+ return adjust_address (x, mode, offset);
}
+ result = simplify_gen_subreg (mode, x, GET_MODE (x),
+ subreg_highpart_offset (mode, GET_MODE (x)));
+ /* Since we handle MEM directly above, we should never get a MEM back
+ from simplify_gen_subreg. */
+ gcc_assert (result && !MEM_P (result));
+
return result;
}
if (mode != BLKmode && mode != VOIDmode)
{
- /* If this is a register which can not be accessed by words, copy it
+ /* If this is a register which cannot be accessed by words, copy it
to a pseudo register. */
if (REG_P (op))
op = copy_to_reg (op);
{
poly_int64 apply_bitpos = 0;
tree type;
- struct mem_attrs attrs, *defattrs, *refattrs;
+ class mem_attrs attrs, *defattrs, *refattrs;
addr_space_t as;
/* It can happen that type_for_mode was given a mode for which there
new_size = DECL_SIZE_UNIT (t);
}
- /* ??? If we end up with a constant here do record a MEM_EXPR. */
- else if (CONSTANT_CLASS_P (t))
+ /* ??? If we end up with a constant or a descriptor do not
+ record a MEM_EXPR. */
+ else if (CONSTANT_CLASS_P (t)
+ || TREE_CODE (t) == CONSTRUCTOR)
;
/* If this is a field reference, record it. */
new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
}
- /* If this is an array reference, look for an outer field reference. */
- else if (TREE_CODE (t) == ARRAY_REF)
- {
- tree off_tree = size_zero_node;
- /* We can't modify t, because we use it at the end of the
- function. */
- tree t2 = t;
-
- do
- {
- tree index = TREE_OPERAND (t2, 1);
- tree low_bound = array_ref_low_bound (t2);
- tree unit_size = array_ref_element_size (t2);
-
- /* We assume all arrays have sizes that are a multiple of a byte.
- First subtract the lower bound, if any, in the type of the
- index, then convert to sizetype and multiply by the size of
- the array element. */
- if (! integer_zerop (low_bound))
- index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
- index, low_bound);
-
- off_tree = size_binop (PLUS_EXPR,
- size_binop (MULT_EXPR,
- fold_convert (sizetype,
- index),
- unit_size),
- off_tree);
- t2 = TREE_OPERAND (t2, 0);
- }
- while (TREE_CODE (t2) == ARRAY_REF);
-
- if (DECL_P (t2)
- || (TREE_CODE (t2) == COMPONENT_REF
- /* For trailing arrays t2 doesn't have a size that
- covers all valid accesses. */
- && ! array_at_struct_end_p (t)))
- {
- attrs.expr = t2;
- attrs.offset_known_p = false;
- if (poly_int_tree_p (off_tree, &attrs.offset))
- {
- attrs.offset_known_p = true;
- apply_bitpos = bitpos;
- }
- }
- /* Else do not record a MEM_EXPR. */
- }
-
- /* If this is an indirect reference, record it. */
- else if (TREE_CODE (t) == MEM_REF
- || TREE_CODE (t) == TARGET_MEM_REF)
+ /* Else record it. */
+ else
{
+ gcc_assert (handled_component_p (t)
+ || TREE_CODE (t) == MEM_REF
+ || TREE_CODE (t) == TARGET_MEM_REF);
attrs.expr = t;
attrs.offset_known_p = true;
attrs.offset = 0;
apply_bitpos = bitpos;
}
+ /* If this is a reference based on a partitioned decl replace the
+ base with a MEM_REF of the pointer representative we created
+ during stack slot partitioning. */
+ if (attrs.expr
+ && VAR_P (base)
+ && ! is_global_var (base)
+ && cfun->gimple_df->decls_to_pointers != NULL)
+ {
+ tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
+ if (namep)
+ {
+ attrs.expr = unshare_expr (attrs.expr);
+ tree *orig_base = &attrs.expr;
+ while (handled_component_p (*orig_base))
+ orig_base = &TREE_OPERAND (*orig_base, 0);
+ tree aptrt = reference_alias_ptr_type (*orig_base);
+ *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
+ build_int_cst (aptrt, 0));
+ }
+ }
+
/* Compute the alignment. */
unsigned int obj_align;
unsigned HOST_WIDE_INT obj_bitpos;
{
rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
machine_mode mmode = GET_MODE (new_rtx);
- struct mem_attrs *defattrs;
+ class mem_attrs *defattrs;
mem_attrs attrs (*get_mem_attrs (memref));
defattrs = mode_mem_attrs[(int) mmode];
rtx addr = XEXP (memref, 0);
rtx new_rtx;
scalar_int_mode address_mode;
- struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
+ class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
unsigned HOST_WIDE_INT max_align;
#ifdef POINTERS_EXTEND_UNSIGNED
scalar_int_mode pointer_mode
{
rtx new_rtx, addr = XEXP (memref, 0);
machine_mode address_mode;
- struct mem_attrs *defattrs;
+ class mem_attrs *defattrs;
mem_attrs attrs (*get_mem_attrs (memref));
address_mode = get_address_mode (memref);
set_last_insn (last);
cur_insn_uid = 0;
- if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
+ if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
{
int debug_count = 0;
- cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
+ cur_insn_uid = param_min_nondebug_insn_uid - 1;
cur_debug_insn_uid = 0;
for (insn = first; insn; insn = NEXT_INSN (insn))
- if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
+ if (INSN_UID (insn) < param_min_nondebug_insn_uid)
cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
else
{
}
if (debug_count)
- cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
+ cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
else
cur_debug_insn_uid++;
}
case LABEL_REF:
case CODE_LABEL:
case PC:
- case CC0:
case RETURN:
case SIMPLE_RETURN:
case SCRATCH:
/* SCRATCH must be shared because they represent distinct values. */
return;
case CLOBBER:
- /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
+ /* Share clobbers of hard registers, but do not share pseudo reg
clobbers or clobbers of hard registers that originated as pseudos.
This is needed to allow safe register renaming. */
if (REG_P (XEXP (x, 0))
case LABEL_REF:
case CODE_LABEL:
case PC:
- case CC0:
case RETURN:
case SIMPLE_RETURN:
case SCRATCH:
/* SCRATCH must be shared because they represent distinct values. */
return;
case CLOBBER:
- /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
+ /* Share clobbers of hard registers, but do not share pseudo reg
clobbers or clobbers of hard registers that originated as pseudos.
This is needed to allow safe register renaming. */
if (REG_P (XEXP (x, 0))
case SYMBOL_REF:
case CODE_LABEL:
case PC:
- case CC0:
case RETURN:
case SIMPLE_RETURN:
return;
differences due to debug insns, and not be affected by
-fmin-insn-uid, to avoid excessive table size and to simplify
debugging of -fcompare-debug failures. */
- if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
+ if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
n -= cur_debug_insn_uid;
else
- n -= MIN_NONDEBUG_INSN_UID;
+ n -= param_min_nondebug_insn_uid;
return n;
}
SEQUENCEs. */
rtx_insn *
-next_real_insn (rtx uncast_insn)
+next_real_insn (rtx_insn *insn)
{
- rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
-
while (insn)
{
insn = NEXT_INSN (insn);
return insn;
}
\f
-/* Return the next insn that uses CC0 after INSN, which is assumed to
- set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
- applied to the result of this function should yield INSN).
-
- Normally, this is simply the next insn. However, if a REG_CC_USER note
- is present, it contains the insn that uses CC0.
-
- Return 0 if we can't find the insn. */
-
-rtx_insn *
-next_cc0_user (rtx_insn *insn)
-{
- rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
-
- if (note)
- return safe_as_a <rtx_insn *> (XEXP (note, 0));
-
- insn = next_nonnote_insn (insn);
- if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
-
- if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
- return insn;
-
- return 0;
-}
-
-/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
- note, it is the previous insn. */
-
-rtx_insn *
-prev_cc0_setter (rtx_insn *insn)
-{
- rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
-
- if (note)
- return safe_as_a <rtx_insn *> (XEXP (note, 0));
-
- insn = prev_nonnote_insn (insn);
- gcc_assert (sets_cc0_p (PATTERN (insn)));
-
- return insn;
-}
-
/* Find a RTX_AUTOINC class rtx which matches DATA. */
static int
int njumps = 0;
rtx_insn *call_insn = NULL;
- /* We're not good at redistributing frame information. */
- if (RTX_FRAME_RELATED_P (trial))
- return trial;
-
if (any_condjump_p (trial)
&& (note = find_reg_note (trial, REG_BR_PROB, 0)))
split_branch_probability
if (!seq)
return trial;
+ int split_insn_count = 0;
/* Avoid infinite loop if any insn of the result matches
the original pattern. */
insn_last = seq;
if (INSN_P (insn_last)
&& rtx_equal_p (PATTERN (insn_last), pat))
return trial;
+ split_insn_count++;
if (!NEXT_INSN (insn_last))
break;
insn_last = NEXT_INSN (insn_last);
}
+ /* We're not good at redistributing frame information if
+ the split occurs before reload or if it results in more
+ than one insn. */
+ if (RTX_FRAME_RELATED_P (trial))
+ {
+ if (!reload_completed || split_insn_count != 1)
+ return trial;
+
+ rtx_insn *new_insn = seq;
+ rtx_insn *old_insn = trial;
+ copy_frame_info_to_split_insn (old_insn, new_insn);
+ }
+
/* We will be adding the new sequence to the function. The splitters
may have introduced invalid RTL sharing, so unshare the sequence now. */
unshare_all_rtl_in_chain (seq);
break;
case REG_NON_LOCAL_GOTO:
+ case REG_LABEL_TARGET:
for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
{
if (JUMP_P (insn))
break;
case REG_CALL_DECL:
+ case REG_UNTYPED_CALL:
gcc_assert (call_insn != NULL_RTX);
add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
break;
before = PREV_INSN (trial);
after = NEXT_INSN (trial);
- tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
+ emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
delete_insn (trial);
insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
INSN_UID (insn) = cur_debug_insn_uid++;
- if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
+ if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
INSN_UID (insn) = cur_insn_uid++;
PATTERN (insn) = pattern;
they know how to update a SEQUENCE. */
void
-add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
+add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
{
- rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
- rtx_insn *after = as_a <rtx_insn *> (uncast_after);
add_insn_after_nobb (insn, after);
if (!BARRIER_P (after)
&& !BARRIER_P (insn)
they know how to update a SEQUENCE. */
void
-add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
+add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
{
- rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
- rtx_insn *before = as_a <rtx_insn *> (uncast_before);
add_insn_before_nobb (insn, before);
if (!bb
/* Replace insn with an deleted instruction note. */
void
-set_insn_deleted (rtx insn)
+set_insn_deleted (rtx_insn *insn)
{
if (INSN_P (insn))
- df_insn_delete (as_a <rtx_insn *> (insn));
+ df_insn_delete (insn);
PUT_CODE (insn, NOTE);
NOTE_KIND (insn) = NOTE_INSN_DELETED;
}
To really delete an insn and related DF information, use delete_insn. */
void
-remove_insn (rtx uncast_insn)
+remove_insn (rtx_insn *insn)
{
- rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
rtx_insn *next = NEXT_INSN (insn);
rtx_insn *prev = PREV_INSN (insn);
basic_block bb;
generated would almost certainly die right after it was created. */
static rtx_insn *
-emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
+emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
+ basic_block bb,
rtx_insn *(*make_raw) (rtx))
{
rtx_insn *insn;
gcc_assert (before);
if (x == NULL_RTX)
- return safe_as_a <rtx_insn *> (last);
+ return last;
switch (GET_CODE (x))
{
break;
}
- return safe_as_a <rtx_insn *> (last);
+ return last;
}
/* Make X be output before the instruction BEFORE. */
emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
{
return as_a <rtx_jump_insn *> (
- emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
+ emit_pattern_before_noloc (x, before, NULL, NULL,
make_jump_insn_raw));
}
rtx_insn *
emit_call_insn_before_noloc (rtx x, rtx_insn *before)
{
- return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
+ return emit_pattern_before_noloc (x, before, NULL, NULL,
make_call_insn_raw);
}
and output it before the instruction BEFORE. */
rtx_insn *
-emit_debug_insn_before_noloc (rtx x, rtx before)
+emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
{
- return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
+ return emit_pattern_before_noloc (x, before, NULL, NULL,
make_debug_insn_raw);
}
and output it before the insn BEFORE. */
rtx_barrier *
-emit_barrier_before (rtx before)
+emit_barrier_before (rtx_insn *before)
{
rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
/* Emit the label LABEL before the insn BEFORE. */
rtx_code_label *
-emit_label_before (rtx label, rtx_insn *before)
+emit_label_before (rtx_code_label *label, rtx_insn *before)
{
gcc_checking_assert (INSN_UID (label) == 0);
INSN_UID (label) = cur_insn_uid++;
add_insn_before (label, before, NULL);
- return as_a <rtx_code_label *> (label);
+ return label;
}
\f
/* Helper for emit_insn_after, handles lists of instructions
efficiently. */
static rtx_insn *
-emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
+emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
{
- rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
rtx_insn *last;
rtx_insn *after_after;
if (!bb && !BARRIER_P (after))
}
static rtx_insn *
-emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
+emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
rtx_insn *(*make_raw)(rtx))
{
- rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
rtx_insn *last = after;
gcc_assert (after);
BB is NULL, an attempt is made to infer the BB from AFTER. */
rtx_insn *
-emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
+emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
{
return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
}
and output it after the insn AFTER. */
rtx_jump_insn *
-emit_jump_insn_after_noloc (rtx x, rtx after)
+emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
{
return as_a <rtx_jump_insn *> (
emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
and output it after the instruction AFTER. */
rtx_insn *
-emit_call_insn_after_noloc (rtx x, rtx after)
+emit_call_insn_after_noloc (rtx x, rtx_insn *after)
{
return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
}
and output it after the instruction AFTER. */
rtx_insn *
-emit_debug_insn_after_noloc (rtx x, rtx after)
+emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
{
return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
}
and output it after the insn AFTER. */
rtx_barrier *
-emit_barrier_after (rtx after)
+emit_barrier_after (rtx_insn *after)
{
rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
/* Emit the label LABEL after the insn AFTER. */
rtx_insn *
-emit_label_after (rtx label, rtx_insn *after)
+emit_label_after (rtx_insn *label, rtx_insn *after)
{
gcc_checking_assert (INSN_UID (label) == 0);
INSN_UID (label) = cur_insn_uid++;
add_insn_after (label, after, NULL);
- return as_a <rtx_insn *> (label);
+ return label;
}
\f
/* Notes require a bit of special handling: Some notes need to have their
MAKE_RAW indicates how to turn PATTERN into a real insn. */
static rtx_insn *
-emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
+emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
rtx_insn *(*make_raw) (rtx))
{
- rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
if (pattern == NULL_RTX || !loc)
any DEBUG_INSNs. */
static rtx_insn *
-emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
+emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
rtx_insn *(*make_raw) (rtx))
{
- rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
rtx_insn *prev = after;
if (skip_debug_insns)
/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
rtx_insn *
-emit_insn_after_setloc (rtx pattern, rtx after, int loc)
+emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
{
return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
}
/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
rtx_insn *
-emit_insn_after (rtx pattern, rtx after)
+emit_insn_after (rtx pattern, rtx_insn *after)
{
return emit_pattern_after (pattern, after, true, make_insn_raw);
}
/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
rtx_jump_insn *
-emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
+emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
{
return as_a <rtx_jump_insn *> (
emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
rtx_jump_insn *
-emit_jump_insn_after (rtx pattern, rtx after)
+emit_jump_insn_after (rtx pattern, rtx_insn *after)
{
return as_a <rtx_jump_insn *> (
emit_pattern_after (pattern, after, true, make_jump_insn_raw));
/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
rtx_insn *
-emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
+emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
{
return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
}
/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
rtx_insn *
-emit_call_insn_after (rtx pattern, rtx after)
+emit_call_insn_after (rtx pattern, rtx_insn *after)
{
return emit_pattern_after (pattern, after, true, make_call_insn_raw);
}
/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
rtx_insn *
-emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
+emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
{
return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
}
/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
rtx_insn *
-emit_debug_insn_after (rtx pattern, rtx after)
+emit_debug_insn_after (rtx pattern, rtx_insn *after)
{
return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
}
CALL_INSN, etc. */
static rtx_insn *
-emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
- rtx_insn *(*make_raw) (rtx))
+emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
+ bool insnp, rtx_insn *(*make_raw) (rtx))
{
- rtx_insn *before = as_a <rtx_insn *> (uncast_before);
rtx_insn *first = PREV_INSN (before);
rtx_insn *last = emit_pattern_before_noloc (pattern, before,
- insnp ? before : NULL_RTX,
+ insnp ? before : NULL,
NULL, make_raw);
if (pattern == NULL_RTX || !loc)
INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
static rtx_insn *
-emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
+emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
bool insnp, rtx_insn *(*make_raw) (rtx))
{
- rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
rtx_insn *next = before;
if (skip_debug_insns)
insnp, make_raw);
else
return emit_pattern_before_noloc (pattern, before,
- insnp ? before : NULL_RTX,
+ insnp ? before : NULL,
NULL, make_raw);
}
/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
rtx_insn *
-emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
+emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
{
return emit_pattern_before_setloc (pattern, before, loc, true,
make_insn_raw);
/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
rtx_insn *
-emit_insn_before (rtx pattern, rtx before)
+emit_insn_before (rtx pattern, rtx_insn *before)
{
return emit_pattern_before (pattern, before, true, true, make_insn_raw);
}
/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
rtx_jump_insn *
-emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
+emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
{
return as_a <rtx_jump_insn *> (
emit_pattern_before_setloc (pattern, before, loc, false,
/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
rtx_jump_insn *
-emit_jump_insn_before (rtx pattern, rtx before)
+emit_jump_insn_before (rtx pattern, rtx_insn *before)
{
return as_a <rtx_jump_insn *> (
emit_pattern_before (pattern, before, true, false,
/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
rtx_insn *
-emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
+emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
{
return emit_pattern_before_setloc (pattern, before, loc, false,
make_call_insn_raw);
/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
rtx_insn *
-emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
+emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
{
return emit_pattern_before_setloc (pattern, before, loc, false,
make_debug_insn_raw);
case SYMBOL_REF:
case CODE_LABEL:
case PC:
- case CC0:
case RETURN:
case SIMPLE_RETURN:
return orig;
case CLOBBER:
- /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
+ /* Share clobbers of hard registers, but do not share pseudo reg
clobbers or clobbers of hard registers that originated as pseudos.
This is needed to allow safe register renaming. */
if (REG_P (XEXP (orig, 0))
{
set_first_insn (NULL);
set_last_insn (NULL);
- if (MIN_NONDEBUG_INSN_UID)
- cur_insn_uid = MIN_NONDEBUG_INSN_UID;
+ if (param_min_nondebug_insn_uid)
+ cur_insn_uid = param_min_nondebug_insn_uid;
else
cur_insn_uid = 1;
cur_debug_insn_uid = 1;
valid_for_const_vector_p (machine_mode, rtx x)
{
return (CONST_SCALAR_INT_P (x)
+ || CONST_POLY_INT_P (x)
|| CONST_DOUBLE_AS_FLOAT_P (x)
|| CONST_FIXED_P (x));
}
attrs = ggc_cleared_alloc<mem_attrs> ();
attrs->align = BITS_PER_UNIT;
attrs->addrspace = ADDR_SPACE_GENERIC;
- if (mode != BLKmode)
+ if (mode != BLKmode && mode != VOIDmode)
{
attrs->size_known_p = true;
attrs->size = GET_MODE_SIZE (mode);
if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
const_tiny_rtx[0][i] = const0_rtx;
- FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
- {
- scalar_mode smode = smode_iter.require ();
- wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
- const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
- }
-
pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
- cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
/*prev_insn=*/NULL,
/*next_insn=*/NULL,
return curr_location;
}
+/* Set the location of the insn chain starting at INSN to LOC. */
+void
+set_insn_locations (rtx_insn *insn, location_t loc)
+{
+ while (insn)
+ {
+ if (INSN_P (insn))
+ INSN_LOCATION (insn) = loc;
+ insn = NEXT_INSN (insn);
+ }
+}
+
/* Return lexical scope block insn belongs to. */
tree
insn_scope (const rtx_insn *insn)