/* RTL utility routines.
- Copyright (C) 1987, 1988, 1991, 1994, 1997, 1998, 1999, 2000, 2001, 2002,
- 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
+ Copyright (C) 1987-2020 Free Software Foundation, Inc.
This file is part of GCC.
#include "coretypes.h"
#include "tm.h"
#include "rtl.h"
-#include "real.h"
-#include "ggc.h"
#ifdef GENERATOR_FILE
# include "errors.h"
#else
-# include "toplev.h"
+# include "rtlhash.h"
+# include "diagnostic-core.h"
#endif
\f
prints the uid of the insn.
"b" is a pointer to a bitmap header.
"B" is a basic block pointer.
- "t" is a tree pointer. */
+ "t" is a tree pointer.
+ "r" a register.
+ "p" is a poly_uint16 offset. */
#define DEF_RTL_EXPR(ENUM, NAME, FORMAT, CLASS) FORMAT ,
#include "rtl.def" /* rtl expressions are defined here */
#undef DEF_RTL_EXPR
};
+/* Whether rtxs with the given code code store data in the hwint field. */
+
+#define RTX_CODE_HWINT_P_1(ENUM) \
+ ((ENUM) == CONST_INT || (ENUM) == CONST_DOUBLE \
+ || (ENUM) == CONST_FIXED || (ENUM) == CONST_WIDE_INT)
+#ifdef GENERATOR_FILE
+#define RTX_CODE_HWINT_P(ENUM) \
+ (RTX_CODE_HWINT_P_1 (ENUM) || (ENUM) == EQ_ATTR_ALT)
+#else
+#define RTX_CODE_HWINT_P RTX_CODE_HWINT_P_1
+#endif
+
/* Indexed by rtx code, gives the size of the rtx in bytes. */
const unsigned char rtx_code_size[NUM_RTX_CODE] = {
#define DEF_RTL_EXPR(ENUM, NAME, FORMAT, CLASS) \
- ((ENUM) == CONST_INT || (ENUM) == CONST_DOUBLE || (ENUM) == CONST_FIXED\
+ (RTX_CODE_HWINT_P (ENUM) \
? RTX_HDR_SIZE + (sizeof FORMAT - 1) * sizeof (HOST_WIDE_INT) \
+ : (ENUM) == REG \
+ ? RTX_HDR_SIZE + sizeof (reg_info) \
: RTX_HDR_SIZE + (sizeof FORMAT - 1) * sizeof (rtunion)),
#include "rtl.def"
#undef DEF_REG_NOTE
};
-#ifdef GATHER_STATISTICS
-static int rtx_alloc_counts[(int) LAST_AND_UNUSED_RTX_CODE];
-static int rtx_alloc_sizes[(int) LAST_AND_UNUSED_RTX_CODE];
-static int rtvec_alloc_counts;
-static int rtvec_alloc_sizes;
-#endif
+static size_t rtx_alloc_counts[(int) LAST_AND_UNUSED_RTX_CODE];
+static size_t rtx_alloc_sizes[(int) LAST_AND_UNUSED_RTX_CODE];
+static size_t rtvec_alloc_counts;
+static size_t rtvec_alloc_sizes;
\f
/* Allocate an rtx vector of N elements.
{
rtvec rt;
- rt = ggc_alloc_rtvec (n);
+ rt = ggc_alloc_rtvec_sized (n);
/* Clear out the vector. */
memset (&rt->elem[0], 0, n * sizeof (rtx));
PUT_NUM_ELEM (rt, n);
-#ifdef GATHER_STATISTICS
- rtvec_alloc_counts++;
- rtvec_alloc_sizes += n * sizeof (rtx);
-#endif
+ if (GATHER_STATISTICS)
+ {
+ rtvec_alloc_counts++;
+ rtvec_alloc_sizes += n * sizeof (rtx);
+ }
return rt;
}
+/* Create a bitwise copy of VEC. */
+
+rtvec
+shallow_copy_rtvec (rtvec vec)
+{
+ rtvec newvec;
+ int n;
+
+ n = GET_NUM_ELEM (vec);
+ newvec = rtvec_alloc (n);
+ memcpy (&newvec->elem[0], &vec->elem[0], sizeof (rtx) * n);
+ return newvec;
+}
+
/* Return the number of bytes occupied by rtx value X. */
unsigned int
rtx_size (const_rtx x)
{
+ if (CONST_WIDE_INT_P (x))
+ return (RTX_HDR_SIZE
+ + sizeof (struct hwivec_def)
+ + ((CONST_WIDE_INT_NUNITS (x) - 1)
+ * sizeof (HOST_WIDE_INT)));
+ if (CONST_POLY_INT_P (x))
+ return (RTX_HDR_SIZE
+ + sizeof (struct const_poly_int_def)
+ + CONST_POLY_INT_COEFFS (x).extra_size ());
if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_HAS_BLOCK_INFO_P (x))
return RTX_HDR_SIZE + sizeof (struct block_symbol);
return RTX_CODE_SIZE (GET_CODE (x));
}
-/* Allocate an rtx of code CODE. The CODE is stored in the rtx;
- all the rest is initialized to zero. */
+/* Allocate an rtx of code CODE with EXTRA bytes in it. The CODE is
+ stored in the rtx; all the rest is initialized to zero. */
rtx
-rtx_alloc_stat (RTX_CODE code MEM_STAT_DECL)
+rtx_alloc_stat_v (RTX_CODE code MEM_STAT_DECL, int extra)
{
- rtx rt;
+ rtx rt = ggc_alloc_rtx_def_stat (RTX_CODE_SIZE (code) + extra
+ PASS_MEM_STAT);
- rt = (rtx) ggc_alloc_zone_pass_stat (RTX_CODE_SIZE (code), &rtl_zone);
+ rtx_init (rt, code);
- /* We want to clear everything up to the FLD array. Normally, this
- is one int, but we don't want to assume that and it isn't very
- portable anyway; this is. */
+ if (GATHER_STATISTICS)
+ {
+ rtx_alloc_counts[code]++;
+ rtx_alloc_sizes[code] += RTX_CODE_SIZE (code);
+ }
- memset (rt, 0, RTX_HDR_SIZE);
- PUT_CODE (rt, code);
+ return rt;
+}
-#ifdef GATHER_STATISTICS
- rtx_alloc_counts[code]++;
- rtx_alloc_sizes[code] += RTX_CODE_SIZE (code);
-#endif
+/* Allocate an rtx of code CODE. The CODE is stored in the rtx;
+ all the rest is initialized to zero. */
- return rt;
+rtx
+rtx_alloc (RTX_CODE code MEM_STAT_DECL)
+{
+ return rtx_alloc_stat_v (code PASS_MEM_STAT, 0);
+}
+
+/* Write the wide constant X to OUTFILE. */
+
+void
+cwi_output_hex (FILE *outfile, const_rtx x)
+{
+ int i = CWI_GET_NUM_ELEM (x);
+ gcc_assert (i > 0);
+ if (CWI_ELT (x, i - 1) == 0)
+ /* The HOST_WIDE_INT_PRINT_HEX prepends a 0x only if the val is
+ non zero. We want all numbers to have a 0x prefix. */
+ fprintf (outfile, "0x");
+ fprintf (outfile, HOST_WIDE_INT_PRINT_HEX, CWI_ELT (x, --i));
+ while (--i >= 0)
+ fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX, CWI_ELT (x, i));
}
\f
shared_const_p (const_rtx orig)
{
gcc_assert (GET_CODE (orig) == CONST);
-
+
/* CONST can be shared if it contains a SYMBOL_REF. If it contains
a LABEL_REF, it isn't sharable. */
+ poly_int64 offset;
return (GET_CODE (XEXP (orig, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
- && CONST_INT_P(XEXP (XEXP (orig, 0), 1)));
+ && poly_int_rtx_p (XEXP (XEXP (orig, 0), 1), &offset));
}
switch (code)
{
case REG:
- case CONST_INT:
- case CONST_DOUBLE:
- case CONST_FIXED:
- case CONST_VECTOR:
+ case DEBUG_EXPR:
+ case VALUE:
+ CASE_CONST_ANY:
case SYMBOL_REF:
case CODE_LABEL:
case PC:
case CC0:
+ case RETURN:
+ case SIMPLE_RETURN:
case SCRATCH:
/* SCRATCH must be shared because they represent distinct values. */
return orig;
case CLOBBER:
- if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
+ /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
+ clobbers or clobbers of hard registers that originated as pseudos.
+ This is needed to allow safe register renaming. */
+ if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
+ && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
return orig;
break;
us to explicitly document why we are *not* copying a flag. */
copy = shallow_copy_rtx (orig);
- /* We do not copy the USED flag, which is used as a mark bit during
- walks over the RTL. */
- RTX_FLAG (copy, used) = 0;
-
- /* We do not copy FRAME_RELATED for INSNs. */
- if (INSN_P (orig))
- RTX_FLAG (copy, frame_related) = 0;
- RTX_FLAG (copy, jump) = RTX_FLAG (orig, jump);
- RTX_FLAG (copy, call) = RTX_FLAG (orig, call);
-
format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
case 't':
case 'w':
case 'i':
+ case 'p':
case 's':
case 'S':
case 'T':
/* Create a new copy of an rtx. Only copy just one level. */
rtx
-shallow_copy_rtx_stat (const_rtx orig MEM_STAT_DECL)
+shallow_copy_rtx (const_rtx orig MEM_STAT_DECL)
{
const unsigned int size = rtx_size (orig);
- rtx const copy = (rtx) ggc_alloc_zone_pass_stat (size, &rtl_zone);
- return (rtx) memcpy (copy, orig, size);
+ rtx const copy = ggc_alloc_rtx_def_stat (size PASS_MEM_STAT);
+ memcpy (copy, orig, size);
+ switch (GET_CODE (orig))
+ {
+ /* RTX codes copy_rtx_if_shared_1 considers are shareable,
+ the used flag is often used for other purposes. */
+ case REG:
+ case DEBUG_EXPR:
+ case VALUE:
+ CASE_CONST_ANY:
+ case SYMBOL_REF:
+ case CODE_LABEL:
+ case PC:
+ case CC0:
+ case RETURN:
+ case SIMPLE_RETURN:
+ case SCRATCH:
+ break;
+ default:
+ /* For all other RTXes clear the used flag on the copy. */
+ RTX_FLAG (copy, used) = 0;
+ break;
+ }
+ return copy;
}
\f
/* Nonzero when we are generating CONCATs. */
\f
-/* Same as rtx_equal_p, but call CB on each pair of rtx if CB is not NULL.
- When the callback returns true, we continue with the new pair. */
+/* Same as rtx_equal_p, but call CB on each pair of rtx if CB is not NULL.
+ When the callback returns true, we continue with the new pair.
+ Whenever changing this function check if rtx_equal_p below doesn't need
+ changing as well. */
int
rtx_equal_p_cb (const_rtx x, const_rtx y, rtx_equal_p_callback_function cb)
if (GET_MODE (x) != GET_MODE (y))
return 0;
+ /* MEMs referring to different address space are not equivalent. */
+ if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
+ return 0;
+
/* Some RTL can be compared nonrecursively. */
switch (code)
{
return (REGNO (x) == REGNO (y));
case LABEL_REF:
- return XEXP (x, 0) == XEXP (y, 0);
+ return label_ref_label (x) == label_ref_label (y);
case SYMBOL_REF:
return XSTR (x, 0) == XSTR (y, 0);
+ case DEBUG_EXPR:
+ case VALUE:
case SCRATCH:
- case CONST_DOUBLE:
- case CONST_INT:
- case CONST_FIXED:
+ CASE_CONST_UNIQUE:
return 0;
+ case DEBUG_IMPLICIT_PTR:
+ return DEBUG_IMPLICIT_PTR_DECL (x)
+ == DEBUG_IMPLICIT_PTR_DECL (y);
+
+ case DEBUG_PARAMETER_REF:
+ return DEBUG_PARAMETER_REF_DECL (x)
+ == DEBUG_PARAMETER_REF_DECL (y);
+
+ case ENTRY_VALUE:
+ return rtx_equal_p_cb (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y), cb);
+
default:
break;
}
case 'n':
case 'i':
if (XINT (x, i) != XINT (y, i))
+ {
+#ifndef GENERATOR_FILE
+ if (((code == ASM_OPERANDS && i == 6)
+ || (code == ASM_INPUT && i == 1))
+ && XINT (x, i) == XINT (y, i))
+ break;
+#endif
+ return 0;
+ }
+ break;
+
+ case 'p':
+ if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
return 0;
break;
/* And the corresponding elements must match. */
for (j = 0; j < XVECLEN (x, i); j++)
- if (rtx_equal_p_cb (XVECEXP (x, i, j),
+ if (rtx_equal_p_cb (XVECEXP (x, i, j),
XVECEXP (y, i, j), cb) == 0)
return 0;
break;
}
/* Return 1 if X and Y are identical-looking rtx's.
- This is the Lisp function EQUAL for rtx arguments. */
+ This is the Lisp function EQUAL for rtx arguments.
+ Whenever changing this function check if rtx_equal_p_cb above doesn't need
+ changing as well. */
int
rtx_equal_p (const_rtx x, const_rtx y)
{
- return rtx_equal_p_cb (x, y, NULL);
+ int i;
+ int j;
+ enum rtx_code code;
+ const char *fmt;
+
+ if (x == y)
+ return 1;
+ if (x == 0 || y == 0)
+ return 0;
+
+ code = GET_CODE (x);
+ /* Rtx's of different codes cannot be equal. */
+ if (code != GET_CODE (y))
+ return 0;
+
+ /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
+ (REG:SI x) and (REG:HI x) are NOT equivalent. */
+
+ if (GET_MODE (x) != GET_MODE (y))
+ return 0;
+
+ /* MEMs referring to different address space are not equivalent. */
+ if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
+ return 0;
+
+ /* Some RTL can be compared nonrecursively. */
+ switch (code)
+ {
+ case REG:
+ return (REGNO (x) == REGNO (y));
+
+ case LABEL_REF:
+ return label_ref_label (x) == label_ref_label (y);
+
+ case SYMBOL_REF:
+ return XSTR (x, 0) == XSTR (y, 0);
+
+ case DEBUG_EXPR:
+ case VALUE:
+ case SCRATCH:
+ CASE_CONST_UNIQUE:
+ return 0;
+
+ case DEBUG_IMPLICIT_PTR:
+ return DEBUG_IMPLICIT_PTR_DECL (x)
+ == DEBUG_IMPLICIT_PTR_DECL (y);
+
+ case DEBUG_PARAMETER_REF:
+ return DEBUG_PARAMETER_REF_DECL (x)
+ == DEBUG_PARAMETER_REF_DECL (y);
+
+ case ENTRY_VALUE:
+ return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
+
+ default:
+ break;
+ }
+
+ /* Compare the elements. If any pair of corresponding elements
+ fail to match, return 0 for the whole thing. */
+
+ fmt = GET_RTX_FORMAT (code);
+ for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
+ {
+ switch (fmt[i])
+ {
+ case 'w':
+ if (XWINT (x, i) != XWINT (y, i))
+ return 0;
+ break;
+
+ case 'n':
+ case 'i':
+ if (XINT (x, i) != XINT (y, i))
+ {
+#ifndef GENERATOR_FILE
+ if (((code == ASM_OPERANDS && i == 6)
+ || (code == ASM_INPUT && i == 1))
+ && XINT (x, i) == XINT (y, i))
+ break;
+#endif
+ return 0;
+ }
+ break;
+
+ case 'p':
+ if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
+ return 0;
+ break;
+
+ case 'V':
+ case 'E':
+ /* Two vectors must have the same length. */
+ if (XVECLEN (x, i) != XVECLEN (y, i))
+ return 0;
+
+ /* And the corresponding elements must match. */
+ for (j = 0; j < XVECLEN (x, i); j++)
+ if (rtx_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
+ return 0;
+ break;
+
+ case 'e':
+ if (rtx_equal_p (XEXP (x, i), XEXP (y, i)) == 0)
+ return 0;
+ break;
+
+ case 'S':
+ case 's':
+ if ((XSTR (x, i) || XSTR (y, i))
+ && (! XSTR (x, i) || ! XSTR (y, i)
+ || strcmp (XSTR (x, i), XSTR (y, i))))
+ return 0;
+ break;
+
+ case 'u':
+ /* These are just backpointers, so they don't matter. */
+ break;
+
+ case '0':
+ case 't':
+ break;
+
+ /* It is believed that rtx's at this level will never
+ contain anything but integers and other rtx's,
+ except for within LABEL_REFs and SYMBOL_REFs. */
+ default:
+ gcc_unreachable ();
+ }
+ }
+ return 1;
+}
+
+/* Return true if all elements of VEC are equal. */
+
+bool
+rtvec_all_equal_p (const_rtvec vec)
+{
+ const_rtx first = RTVEC_ELT (vec, 0);
+ /* Optimize the important special case of a vector of constants.
+ The main use of this function is to detect whether every element
+ of CONST_VECTOR is the same. */
+ switch (GET_CODE (first))
+ {
+ CASE_CONST_UNIQUE:
+ for (int i = 1, n = GET_NUM_ELEM (vec); i < n; ++i)
+ if (first != RTVEC_ELT (vec, i))
+ return false;
+ return true;
+
+ default:
+ for (int i = 1, n = GET_NUM_ELEM (vec); i < n; ++i)
+ if (!rtx_equal_p (first, RTVEC_ELT (vec, i)))
+ return false;
+ return true;
+ }
+}
+
+/* Return an indication of which type of insn should have X as a body.
+ In generator files, this can be UNKNOWN if the answer is only known
+ at (GCC) runtime. Otherwise the value is CODE_LABEL, INSN, CALL_INSN
+ or JUMP_INSN. */
+
+enum rtx_code
+classify_insn (rtx x)
+{
+ if (LABEL_P (x))
+ return CODE_LABEL;
+ if (GET_CODE (x) == CALL)
+ return CALL_INSN;
+ if (ANY_RETURN_P (x))
+ return JUMP_INSN;
+ if (GET_CODE (x) == ASM_OPERANDS && ASM_OPERANDS_LABEL_VEC (x))
+ return JUMP_INSN;
+ if (GET_CODE (x) == SET)
+ {
+ if (GET_CODE (SET_DEST (x)) == PC)
+ return JUMP_INSN;
+ else if (GET_CODE (SET_SRC (x)) == CALL)
+ return CALL_INSN;
+ else
+ return INSN;
+ }
+ if (GET_CODE (x) == PARALLEL)
+ {
+ int j;
+ bool has_return_p = false;
+ for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
+ if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
+ return CALL_INSN;
+ else if (ANY_RETURN_P (XVECEXP (x, 0, j)))
+ has_return_p = true;
+ else if (GET_CODE (XVECEXP (x, 0, j)) == SET
+ && GET_CODE (SET_DEST (XVECEXP (x, 0, j))) == PC)
+ return JUMP_INSN;
+ else if (GET_CODE (XVECEXP (x, 0, j)) == SET
+ && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
+ return CALL_INSN;
+ if (has_return_p)
+ return JUMP_INSN;
+ if (GET_CODE (XVECEXP (x, 0, 0)) == ASM_OPERANDS
+ && ASM_OPERANDS_LABEL_VEC (XVECEXP (x, 0, 0)))
+ return JUMP_INSN;
+ }
+#ifdef GENERATOR_FILE
+ if (GET_CODE (x) == MATCH_OPERAND
+ || GET_CODE (x) == MATCH_OPERATOR
+ || GET_CODE (x) == MATCH_PARALLEL
+ || GET_CODE (x) == MATCH_OP_DUP
+ || GET_CODE (x) == MATCH_DUP
+ || GET_CODE (x) == PARALLEL)
+ return UNKNOWN;
+#endif
+ return INSN;
+}
+
+/* Comparator of indices based on rtx_alloc_counts. */
+
+static int
+rtx_count_cmp (const void *p1, const void *p2)
+{
+ const unsigned *n1 = (const unsigned *)p1;
+ const unsigned *n2 = (const unsigned *)p2;
+
+ return rtx_alloc_counts[*n1] - rtx_alloc_counts[*n2];
}
void
dump_rtx_statistics (void)
{
-#ifdef GATHER_STATISTICS
- int i;
int total_counts = 0;
int total_sizes = 0;
- fprintf (stderr, "\nRTX Kind Count Bytes\n");
- fprintf (stderr, "---------------------------------------\n");
- for (i = 0; i < LAST_AND_UNUSED_RTX_CODE; i++)
- if (rtx_alloc_counts[i])
- {
- fprintf (stderr, "%-20s %7d %10d\n", GET_RTX_NAME (i),
- rtx_alloc_counts[i], rtx_alloc_sizes[i]);
- total_counts += rtx_alloc_counts[i];
- total_sizes += rtx_alloc_sizes[i];
- }
+
+ if (! GATHER_STATISTICS)
+ {
+ fprintf (stderr, "No RTX statistics\n");
+ return;
+ }
+
+ fprintf (stderr, "\nRTX Kind Count Bytes\n");
+ fprintf (stderr, "-------------------------------------------\n");
+
+ auto_vec<unsigned> indices (LAST_AND_UNUSED_RTX_CODE);
+ for (unsigned i = 0; i < LAST_AND_UNUSED_RTX_CODE; i++)
+ indices.quick_push (i);
+ indices.qsort (rtx_count_cmp);
+
+ for (unsigned i = 0; i < LAST_AND_UNUSED_RTX_CODE; i++)
+ {
+ unsigned j = indices[i];
+ if (rtx_alloc_counts[j])
+ {
+ fprintf (stderr, "%-24s " PRsa (6) " " PRsa (9) "\n",
+ GET_RTX_NAME (j),
+ SIZE_AMOUNT (rtx_alloc_counts[j]),
+ SIZE_AMOUNT (rtx_alloc_sizes[j]));
+ total_counts += rtx_alloc_counts[j];
+ total_sizes += rtx_alloc_sizes[j];
+ }
+ }
+
if (rtvec_alloc_counts)
{
- fprintf (stderr, "%-20s %7d %10d\n", "rtvec",
- rtvec_alloc_counts, rtvec_alloc_sizes);
+ fprintf (stderr, "%-24s " PRsa (6) " " PRsa (9) "\n", "rtvec",
+ SIZE_AMOUNT (rtvec_alloc_counts),
+ SIZE_AMOUNT (rtvec_alloc_sizes));
total_counts += rtvec_alloc_counts;
total_sizes += rtvec_alloc_sizes;
}
- fprintf (stderr, "---------------------------------------\n");
- fprintf (stderr, "%-20s %7d %10d\n",
- "Total", total_counts, total_sizes);
- fprintf (stderr, "---------------------------------------\n");
-#endif
+ fprintf (stderr, "-----------------------------------------------\n");
+ fprintf (stderr, "%-24s " PRsa (6) " " PRsa (9) "\n",
+ "Total", SIZE_AMOUNT (total_counts),
+ SIZE_AMOUNT (total_sizes));
+ fprintf (stderr, "-----------------------------------------------\n");
}
\f
#if defined ENABLE_RTL_CHECKING && (GCC_VERSION >= 2007)
}
void
-rtl_check_failed_code_mode (const_rtx r, enum rtx_code code, enum machine_mode mode,
+rtl_check_failed_code3 (const_rtx r, enum rtx_code code1, enum rtx_code code2,
+ enum rtx_code code3, const char *file, int line,
+ const char *func)
+{
+ internal_error
+ ("RTL check: expected code '%s', '%s' or '%s', have '%s' in %s, at %s:%d",
+ GET_RTX_NAME (code1), GET_RTX_NAME (code2), GET_RTX_NAME (code3),
+ GET_RTX_NAME (GET_CODE (r)), func, trim_filename (file), line);
+}
+
+void
+rtl_check_failed_code_mode (const_rtx r, enum rtx_code code, machine_mode mode,
bool not_mode, const char *file, int line,
const char *func)
{
"in %s, at %s:%d", func, trim_filename (file), line);
}
+/* XXX Maybe print the vector? */
+void
+cwi_check_failed_bounds (const_rtx x, int n, const char *file, int line,
+ const char *func)
+{
+ internal_error
+ ("RTL check: access of hwi elt %d of vector with last elt %d in %s, at %s:%d",
+ n, CWI_GET_NUM_ELEM (x) - 1, func, trim_filename (file), line);
+}
+
/* XXX Maybe print the vector? */
void
rtvec_check_failed_bounds (const_rtvec r, int n, const char *file, int line,