element has been set to zero, which implies the cselib_val will be
removed. */
-int
+bool
references_value_p (const_rtx x, int only_useless)
{
const enum rtx_code code = GET_CODE (x);
if (GET_CODE (x) == VALUE
&& (! only_useless
|| (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
- return 1;
+ return true;
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
{
if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
- return 1;
+ return true;
else if (fmt[i] == 'E')
for (j = 0; j < XVECLEN (x, i); j++)
if (references_value_p (XVECEXP (x, i, j), only_useless))
- return 1;
+ return true;
}
- return 0;
+ return false;
}
/* Return true if V is a useless VALUE and can be discarded as such. */
return x;
}
-/* Return nonzero if we can prove that X and Y contain the same value,
+/* Return true if we can prove that X and Y contain the same value,
taking our gathered information into account. MEMMODE holds the
mode of the enclosing MEM, if any, as required to deal with autoinc
addressing modes. If X and Y are not (known to be) part of
addresses, MEMMODE should be VOIDmode. */
-int
+bool
rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
{
enum rtx_code code;
}
if (x == y)
- return 1;
+ return true;
if (GET_CODE (x) == VALUE)
{
rtx yoff = NULL;
rtx yr = autoinc_split (y, &yoff, memmode);
if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX)
- return 1;
+ return true;
}
if (depth == 128)
- return 0;
+ return false;
for (l = e->locs; l; l = l->next)
{
if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
continue;
else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
- return 1;
+ return true;
}
- return 0;
+ return false;
}
else if (GET_CODE (y) == VALUE)
{
rtx xoff = NULL;
rtx xr = autoinc_split (x, &xoff, memmode);
if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX)
- return 1;
+ return true;
}
if (depth == 128)
- return 0;
+ return false;
for (l = e->locs; l; l = l->next)
{
if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
continue;
else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
- return 1;
+ return true;
}
- return 0;
+ return false;
}
if (GET_MODE (x) != GET_MODE (y))
- return 0;
+ return false;
if (GET_CODE (x) != GET_CODE (y)
|| (GET_CODE (x) == PLUS
if (x != xorig || y != yorig)
{
if (!xoff != !yoff)
- return 0;
+ return false;
if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
- return 0;
+ return false;
return rtx_equal_for_cselib_1 (x, y, memmode, depth);
}
if (GET_CODE (xorig) != GET_CODE (yorig))
- return 0;
+ return false;
}
/* These won't be handled correctly by the code below. */
{
CASE_CONST_UNIQUE:
case DEBUG_EXPR:
- return 0;
+ return false;
case CONST_VECTOR:
if (!same_vector_encodings_p (x, y))
{
case 'w':
if (XWINT (x, i) != XWINT (y, i))
- return 0;
+ return false;
break;
case 'n':
case 'i':
if (XINT (x, i) != XINT (y, i))
- return 0;
+ return false;
break;
case 'p':
if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
- return 0;
+ return false;
break;
case 'V':
case 'E':
/* Two vectors must have the same length. */
if (XVECLEN (x, i) != XVECLEN (y, i))
- return 0;
+ return false;
/* And the corresponding elements must match. */
for (j = 0; j < XVECLEN (x, i); j++)
if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
XVECEXP (y, i, j), memmode, depth))
- return 0;
+ return false;
break;
case 'e':
depth)
&& rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
depth))
- return 1;
+ return true;
if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
depth))
- return 0;
+ return false;
break;
case 'S':
case 's':
if (strcmp (XSTR (x, i), XSTR (y, i)))
- return 0;
+ return false;
break;
case 'u':
gcc_unreachable ();
}
}
- return 1;
+ return true;
}
/* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
-static int is_aligning_offset (const_tree, const_tree);
+static bool is_aligning_offset (const_tree, const_tree);
static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
static rtx do_store_flag (sepops, rtx, machine_mode);
#ifdef PUSH_ROUNDING
a pointer which will be passed as argument in every CONSTFUN call.
ALIGN is maximum alignment we can assume. MEMSETP is true if this is
a memset operation and false if it's a copy of a constant string.
- Return nonzero if a call to store_by_pieces should succeed. */
+ Return true if a call to store_by_pieces should succeed. */
-int
+bool
can_store_by_pieces (unsigned HOST_WIDE_INT len,
by_pieces_constfn constfun,
void *constfundata, unsigned int align, bool memsetp)
rtx cst ATTRIBUTE_UNUSED;
if (len == 0)
- return 1;
+ return true;
if (!targetm.use_by_pieces_infrastructure_p (len, align,
memsetp
? SET_BY_PIECES
: STORE_BY_PIECES,
optimize_insn_for_speed_p ()))
- return 0;
+ return false;
align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
vector mode for the memset expander. */
if (!((memsetp && VECTOR_MODE_P (mode))
|| targetm.legitimate_constant_p (mode, cst)))
- return 0;
+ return false;
if (!reverse)
offset += size;
gcc_assert (!l);
}
- return 1;
+ return true;
}
/* Generate several move instructions to store LEN bytes generated by
return count_type_elements (type, true) == num_elts;
}
-/* Return 1 if EXP contains mostly (3/4) zeros. */
+/* Return true if EXP contains mostly (3/4) zeros. */
-static int
+static bool
mostly_zeros_p (const_tree exp)
{
if (TREE_CODE (exp) == CONSTRUCTOR)
return initializer_zerop (exp);
}
-/* Return 1 if EXP contains all zeros. */
+/* Return true if EXP contains all zeros. */
-static int
+static bool
all_zeros_p (const_tree exp)
{
if (TREE_CODE (exp) == CONSTRUCTOR)
{
tree value, index;
unsigned HOST_WIDE_INT i;
- int need_to_clear;
+ bool need_to_clear;
tree domain;
tree elttype = TREE_TYPE (type);
- int const_bounds_p;
+ bool const_bounds_p;
HOST_WIDE_INT minelt = 0;
HOST_WIDE_INT maxelt = 0;
the whole array first. Similarly if this is static
constructor of a non-BLKmode object. */
if (cleared)
- need_to_clear = 0;
+ need_to_clear = false;
else if (REG_P (target) && TREE_STATIC (exp))
- need_to_clear = 1;
+ need_to_clear = true;
else
{
unsigned HOST_WIDE_INT idx;
if (! tree_fits_uhwi_p (lo_index)
|| ! tree_fits_uhwi_p (hi_index))
{
- need_to_clear = 1;
+ need_to_clear = true;
break;
}
if (! need_to_clear
&& (count < maxelt - minelt + 1
|| 4 * zero_count >= 3 * count))
- need_to_clear = 1;
+ need_to_clear = true;
}
if (need_to_clear && maybe_gt (size, 0))
unsigned HOST_WIDE_INT idx;
constructor_elt *ce;
int i;
- int need_to_clear;
+ bool need_to_clear;
insn_code icode = CODE_FOR_nothing;
tree elt;
tree elttype = TREE_TYPE (type);
clear the whole array first. Similarly if this is static
constructor of a non-BLKmode object. */
if (cleared)
- need_to_clear = 0;
+ need_to_clear = false;
else if (REG_P (target) && TREE_STATIC (exp))
- need_to_clear = 1;
+ need_to_clear = true;
else
{
unsigned HOST_WIDE_INT count = 0, zero_count = 0;
return value;
}
\f
-/* Subroutine of expand_expr: return nonzero iff there is no way that
+/* Subroutine of expand_expr: return true iff there is no way that
EXP can reference X, which is being modified. TOP_P is nonzero if this
call is going to be used to determine whether we need a temporary
for EXP, as opposed to a recursive call to this function.
- It is always safe for this routine to return zero since it merely
+ It is always safe for this routine to return false since it merely
searches for optimization opportunities. */
-int
+bool
safe_from_p (const_rtx x, tree exp, int top_p)
{
rtx exp_rtl = 0;
&& (XEXP (x, 0) == virtual_outgoing_args_rtx
|| (GET_CODE (XEXP (x, 0)) == PLUS
&& XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
- return 1;
+ return true;
/* If this is a subreg of a hard register, declare it unsafe, otherwise,
find the underlying pseudo. */
{
x = SUBREG_REG (x);
if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
- return 0;
+ return false;
}
/* Now look at our tree code and possibly recurse. */
break;
case tcc_constant:
- return 1;
+ return true;
case tcc_exceptional:
if (TREE_CODE (exp) == TREE_LIST)
while (1)
{
if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
- return 0;
+ return false;
exp = TREE_CHAIN (exp);
if (!exp)
- return 1;
+ return true;
if (TREE_CODE (exp) != TREE_LIST)
return safe_from_p (x, exp, 0);
}
FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
|| !safe_from_p (x, ce->value, 0))
- return 0;
- return 1;
+ return false;
+ return true;
}
else if (TREE_CODE (exp) == ERROR_MARK)
- return 1; /* An already-visited SAVE_EXPR? */
+ return true; /* An already-visited SAVE_EXPR? */
else
- return 0;
+ return false;
case tcc_statement:
/* The only case we look at here is the DECL_INITIAL inside a
case tcc_binary:
case tcc_comparison:
if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
- return 0;
+ return false;
/* Fall through. */
case tcc_unary:
if (staticp (TREE_OPERAND (exp, 0))
|| TREE_STATIC (exp)
|| safe_from_p (x, TREE_OPERAND (exp, 0), 0))
- return 1;
+ return true;
/* Otherwise, the only way this can conflict is if we are taking
the address of a DECL a that address if part of X, which is
{
if (!DECL_RTL_SET_P (exp)
|| !MEM_P (DECL_RTL (exp)))
- return 0;
+ return false;
else
exp_rtl = XEXP (DECL_RTL (exp), 0);
}
if (MEM_P (x)
&& alias_sets_conflict_p (MEM_ALIAS_SET (x),
get_alias_set (exp)))
- return 0;
+ return false;
break;
case CALL_EXPR:
all of memory. */
if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
|| MEM_P (x))
- return 0;
+ return false;
break;
case WITH_CLEANUP_EXPR:
for (i = 0; i < nops; i++)
if (TREE_OPERAND (exp, i) != 0
&& ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
- return 0;
+ return false;
break;
exp_rtl = SUBREG_REG (exp_rtl);
if (REG_P (exp_rtl)
&& REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
- return 0;
+ return false;
}
/* If the rtl is X, then it is not safe. Otherwise, it is unless both
}
/* If we reach here, it is safe. */
- return 1;
+ return true;
}
\f
}
}
\f
-/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
+/* Subroutine of above: returns true if OFFSET corresponds to an offset that
when applied to the address of EXP produces an address known to be
aligned more than BIGGEST_ALIGNMENT. */
-static int
+static bool
is_aligning_offset (const_tree offset, const_tree exp)
{
/* Strip off any conversions. */
|| compare_tree_int (TREE_OPERAND (offset, 1),
BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
|| !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
- return 0;
+ return false;
/* Look at the first operand of BIT_AND_EXPR and strip any conversion.
It must be NEGATE_EXPR. Then strip any more conversions. */
offset = TREE_OPERAND (offset, 0);
if (TREE_CODE (offset) != NEGATE_EXPR)
- return 0;
+ return false;
offset = TREE_OPERAND (offset, 0);
while (CONVERT_EXPR_P (offset))
&& !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
}
\f
-/* Attempt to generate a casesi instruction. Returns 1 if successful,
- 0 otherwise (i.e. if there is no casesi instruction).
+/* Attempt to generate a casesi instruction. Returns true if successful,
+ false otherwise (i.e. if there is no casesi instruction).
DEFAULT_PROBABILITY is the probability of jumping to the default
label. */
-int
+bool
try_casesi (tree index_type, tree index_expr, tree minval, tree range,
rtx table_label, rtx default_label, rtx fallback_label,
profile_probability default_probability)
rtx op1, op2, index;
if (! targetm.have_casesi ())
- return 0;
+ return false;
/* The index must be some form of integer. Convert it to SImode. */
scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type);
? default_label
: fallback_label));
expand_jump_insn (targetm.code_for_casesi, 5, ops);
- return 1;
+ return true;
}
/* Attempt to generate a tablejump instruction; same concept. */
emit_barrier ();
}
-int
+bool
try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
rtx table_label, rtx default_label,
profile_probability default_probability)
rtx index;
if (! targetm.have_tablejump ())
- return 0;
+ return false;
index_expr = fold_build2 (MINUS_EXPR, index_type,
fold_convert (index_type, index_expr),
expand_normal (range),
TYPE_UNSIGNED (TREE_TYPE (range))),
table_label, default_label, default_probability);
- return 1;
+ return true;
}
/* Return a CONST_VECTOR rtx representing vector mask for
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT);
-/* Return nonzero if it is desirable to store LEN bytes generated by
+/* Return true if it is desirable to store LEN bytes generated by
CONSTFUN with several move instructions by store_by_pieces
function. CONSTFUNDATA is a pointer which will be passed as argument
in every CONSTFUN call.
ALIGN is maximum alignment we can assume.
MEMSETP is true if this is a real memset/bzero, not a copy
of a const string. */
-extern int can_store_by_pieces (unsigned HOST_WIDE_INT,
- by_pieces_constfn,
- void *, unsigned int, bool);
+extern bool can_store_by_pieces (unsigned HOST_WIDE_INT,
+ by_pieces_constfn,
+ void *, unsigned int, bool);
/* Generate several move instructions to store LEN bytes generated by
CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
extern void maybe_optimize_sub_cmp_0 (enum tree_code, tree *, tree *);
/* Two different ways of generating switch statements. */
-extern int try_casesi (tree, tree, tree, tree, rtx, rtx, rtx, profile_probability);
-extern int try_tablejump (tree, tree, tree, tree, rtx, rtx, profile_probability);
+extern bool try_casesi (tree, tree, tree, tree, rtx, rtx, rtx,
+ profile_probability);
+extern bool try_tablejump (tree, tree, tree, tree, rtx, rtx,
+ profile_probability);
-extern int safe_from_p (const_rtx, tree, int);
+extern bool safe_from_p (const_rtx, tree, int);
/* Get the personality libfunc for a function decl. */
rtx get_personality_function (tree);