+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * tree.h (tree_to_uhwi): Return an unsigned HOST_WIDE_INT.
+ * tree.c (tree_to_uhwi): Return an unsigned HOST_WIDE_INT.
+ (tree_ctz): Remove cast to unsigned type.
+ * builtins.c (fold_builtin_memory_op): Likewise.
+ * dwarf2out.c (descr_info_loc): Likewise.
+ * godump.c (go_output_typedef): Likewise.
+ * omp-low.c (expand_omp_simd): Likewise.
+ * stor-layout.c (excess_unit_span): Likewise.
+ * tree-object-size.c (addr_object_size): Likewise.
+ * tree-sra.c (analyze_all_variable_accesses): Likewise.
+ * tree-ssa-forwprop.c (simplify_builtin_call): Likewise.
+ (simplify_rotate): Likewise.
+ * tree-ssa-strlen.c (adjust_last_stmt, handle_builtin_memcpy)
+ (handle_pointer_plus): Likewise.
+ * tree-switch-conversion.c (check_range): Likewise.
+ * tree-vect-patterns.c (vect_recog_rotate_pattern): Likewise.
+ * tsan.c (instrument_builtin_call): Likewise.
+ * cfgexpand.c (defer_stack_allocation): Add cast to HOST_WIDE_INT.
+ * trans-mem.c (tm_log_add): Likewise.
+ * config/aarch64/aarch64.c (aapcs_vfp_sub_candidate): Likewise.
+ * config/arm/arm.c (aapcs_vfp_sub_candidate): Likewise.
+ * config/rs6000/rs6000.c (rs6000_aggregate_candidate): Likewise.
+ * config/mips/mips.c (r10k_safe_mem_expr_p): Make offset unsigned.
+
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* tree.h (host_integerp, tree_low_cst): Delete.
if (readonly_data_expr (src)
|| (tree_fits_uhwi_p (len)
&& (MIN (src_align, dest_align) / BITS_PER_UNIT
- >= (unsigned HOST_WIDE_INT) tree_to_uhwi (len))))
+ >= tree_to_uhwi (len))))
{
tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
if (!fn)
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * c-common.c (convert_vector_to_pointer_for_subscript): Remove
+ cast to unsigned type.
+
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* c-common.c (fold_offsetof_1): Use tree_to_uhwi rather than
if (TREE_CODE (index) == INTEGER_CST)
if (!tree_fits_uhwi_p (index)
- || ((unsigned HOST_WIDE_INT) tree_to_uhwi (index)
- >= TYPE_VECTOR_SUBPARTS (type)))
+ || tree_to_uhwi (index) >= TYPE_VECTOR_SUBPARTS (type))
warning_at (loc, OPT_Warray_bounds, "index value is out of bound");
c_common_mark_addressable_vec (*vecp);
/* Whether the variable is small enough for immediate allocation not to be
a problem with regard to the frame size. */
bool smallish
- = (tree_to_uhwi (DECL_SIZE_UNIT (var))
+ = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
< PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
/* If stack protection is enabled, *all* stack variables must be deferred,
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_to_uhwi (TYPE_SIZE (type))
+ || ((HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_to_uhwi (TYPE_SIZE (type))
+ || ((HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_to_uhwi (TYPE_SIZE (type))
+ || ((HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_to_uhwi (TYPE_SIZE (type))
+ || ((HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_to_uhwi (TYPE_SIZE (type))
+ || ((HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_to_uhwi (TYPE_SIZE (type))
+ || ((HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
a link-time-constant address. */
static bool
-r10k_safe_mem_expr_p (tree expr, HOST_WIDE_INT offset)
+r10k_safe_mem_expr_p (tree expr, unsigned HOST_WIDE_INT offset)
{
HOST_WIDE_INT bitoffset, bitsize;
tree inner, var_offset;
return false;
offset += bitoffset / BITS_PER_UNIT;
- return offset >= 0 && offset < tree_to_uhwi (DECL_SIZE_UNIT (inner));
+ return offset < tree_to_uhwi (DECL_SIZE_UNIT (inner));
}
/* A for_each_rtx callback for which DATA points to the instruction
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_to_uhwi (TYPE_SIZE (type))
+ || ((HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_to_uhwi (TYPE_SIZE (type))
+ || ((HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_to_uhwi (TYPE_SIZE (type))
+ || ((HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
case POINTER_PLUS_EXPR:
case PLUS_EXPR:
if (tree_fits_uhwi_p (TREE_OPERAND (val, 1))
- && (unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (val, 1))
- < 16384)
+ && tree_to_uhwi (TREE_OPERAND (val, 1)) < 16384)
{
loc = descr_info_loc (TREE_OPERAND (val, 0), base_decl);
if (!loc)
tree_to_shwi (TREE_VALUE (element)));
else if (tree_fits_uhwi_p (TREE_VALUE (element)))
snprintf (buf, sizeof buf, HOST_WIDE_INT_PRINT_UNSIGNED,
- ((unsigned HOST_WIDE_INT)
- tree_to_uhwi (TREE_VALUE (element))));
+ tree_to_uhwi (TREE_VALUE (element)));
else
snprintf (buf, sizeof buf, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
((unsigned HOST_WIDE_INT)
{
safelen = OMP_CLAUSE_SAFELEN_EXPR (safelen);
if (!tree_fits_uhwi_p (safelen)
- || (unsigned HOST_WIDE_INT) tree_to_uhwi (safelen)
- > INT_MAX)
+ || tree_to_uhwi (safelen) > INT_MAX)
loop->safelen = INT_MAX;
else
loop->safelen = tree_to_uhwi (safelen);
offset = offset % align;
return ((offset + size + align - 1) / align
- > ((unsigned HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
- / align));
+ > tree_to_uhwi (TYPE_SIZE (type)) / align);
}
#endif
&& transaction_invariant_address_p (lp->addr, entry_block)
&& TYPE_SIZE_UNIT (type) != NULL
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
- && (tree_to_uhwi (TYPE_SIZE_UNIT (type))
+ && ((HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE_UNIT (type))
< PARAM_VALUE (PARAM_TM_MAX_AGGREGATE_SIZE))
/* We must be able to copy this type normally. I.e., no
special constructors and the like. */
else if (pt_var
&& DECL_P (pt_var)
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
- && (unsigned HOST_WIDE_INT)
- tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
+ && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
pt_var_size = DECL_SIZE_UNIT (pt_var);
else if (pt_var
&& TREE_CODE (pt_var) == STRING_CST
&& TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
- && (unsigned HOST_WIDE_INT)
- tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
+ && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
< offset_limit)
pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
else
if (TREE_CODE (var) == VAR_DECL
&& type_consists_of_records_p (TREE_TYPE (var)))
{
- if ((unsigned) tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
+ if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
<= max_total_scalarization_size)
{
completely_scalarize_var (var);
as the new memcpy length, if it is too big, bail out. */
src_len = tree_to_uhwi (diff);
src_len += tree_to_uhwi (len2);
- if (src_len < (unsigned HOST_WIDE_INT) tree_to_uhwi (len1))
+ if (src_len < tree_to_uhwi (len1))
src_len = tree_to_uhwi (len1);
if (src_len > 1024)
break;
/* CNT1 + CNT2 == B case above. */
if (tree_fits_uhwi_p (def_arg2[0])
&& tree_fits_uhwi_p (def_arg2[1])
- && (unsigned HOST_WIDE_INT) tree_to_uhwi (def_arg2[0])
+ && tree_to_uhwi (def_arg2[0])
+ tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
rotcnt = def_arg2[0];
else if (TREE_CODE (def_arg2[0]) != SSA_NAME
{
if (!tree_fits_uhwi_p (last.len)
|| integer_zerop (len)
- || (unsigned HOST_WIDE_INT) tree_to_uhwi (len)
- != (unsigned HOST_WIDE_INT) tree_to_uhwi (last.len) + 1)
+ || tree_to_uhwi (len) != tree_to_uhwi (last.len) + 1)
return;
/* Don't adjust the length if it is divisible by 4, it is more efficient
to store the extra '\0' in that case. */
- if ((((unsigned HOST_WIDE_INT) tree_to_uhwi (len)) & 3) == 0)
+ if ((tree_to_uhwi (len) & 3) == 0)
return;
}
else if (TREE_CODE (len) == SSA_NAME)
/* Handle memcpy (x, "abcd", 5) or
memcpy (x, "abc\0uvw", 7). */
if (!tree_fits_uhwi_p (len)
- || (unsigned HOST_WIDE_INT) tree_to_uhwi (len)
- <= (unsigned HOST_WIDE_INT) ~idx)
+ || tree_to_uhwi (len) <= (unsigned HOST_WIDE_INT) ~idx)
return;
}
{
tree off = gimple_assign_rhs2 (stmt);
if (tree_fits_uhwi_p (off)
- && (unsigned HOST_WIDE_INT) tree_to_uhwi (off)
- <= (unsigned HOST_WIDE_INT) ~idx)
+ && tree_to_uhwi (off) <= (unsigned HOST_WIDE_INT) ~idx)
ssa_ver_to_stridx[SSA_NAME_VERSION (lhs)]
= ~(~idx - (int) tree_to_uhwi (off));
return;
return false;
}
- if ((unsigned HOST_WIDE_INT) tree_to_uhwi (info->range_size)
+ if (tree_to_uhwi (info->range_size)
> ((unsigned) info->count * SWITCH_CONVERSION_BRANCH_RATIO))
{
info->reason = "the maximum range-branch ratio exceeded";
if (TREE_CODE (def) == INTEGER_CST)
{
if (!tree_fits_uhwi_p (def)
- || (unsigned HOST_WIDE_INT) tree_to_uhwi (def)
- >= GET_MODE_PRECISION (TYPE_MODE (type))
+ || tree_to_uhwi (def) >= GET_MODE_PRECISION (TYPE_MODE (type))
|| integer_zerop (def))
return NULL;
def2 = build_int_cst (stype,
case LSHIFT_EXPR:
ret1 = tree_ctz (TREE_OPERAND (expr, 0));
if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
- && ((unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (expr, 1))
- < (unsigned HOST_WIDE_INT) prec))
+ && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
{
ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
return MIN (ret1 + ret2, prec);
return ret1;
case RSHIFT_EXPR:
if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
- && ((unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (expr, 1))
- < (unsigned HOST_WIDE_INT) prec))
+ && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
{
ret1 = tree_ctz (TREE_OPERAND (expr, 0));
ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
HOST_WIDE_INT. */
-HOST_WIDE_INT
+unsigned HOST_WIDE_INT
tree_to_uhwi (const_tree t)
{
gcc_assert (tree_fits_uhwi_p (t));
#endif
;
extern HOST_WIDE_INT tree_to_shwi (const_tree);
-extern HOST_WIDE_INT tree_to_uhwi (const_tree);
+extern unsigned HOST_WIDE_INT tree_to_uhwi (const_tree);
#if !defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 4003)
extern inline __attribute__ ((__gnu_inline__)) HOST_WIDE_INT
tree_to_shwi (const_tree t)
return TREE_INT_CST_LOW (t);
}
-extern inline __attribute__ ((__gnu_inline__)) HOST_WIDE_INT
+extern inline __attribute__ ((__gnu_inline__)) unsigned HOST_WIDE_INT
tree_to_uhwi (const_tree t)
{
gcc_assert (tree_fits_uhwi_p (t));
case fetch_op:
last_arg = gimple_call_arg (stmt, num - 1);
if (!tree_fits_uhwi_p (last_arg)
- || (unsigned HOST_WIDE_INT) tree_to_uhwi (last_arg)
- > MEMMODEL_SEQ_CST)
+ || tree_to_uhwi (last_arg) > MEMMODEL_SEQ_CST)
return;
gimple_call_set_fndecl (stmt, decl);
update_stmt (stmt);
for (j = 0; j < 6; j++)
args[j] = gimple_call_arg (stmt, j);
if (!tree_fits_uhwi_p (args[4])
- || (unsigned HOST_WIDE_INT) tree_to_uhwi (args[4])
- > MEMMODEL_SEQ_CST)
+ || tree_to_uhwi (args[4]) > MEMMODEL_SEQ_CST)
return;
if (!tree_fits_uhwi_p (args[5])
- || (unsigned HOST_WIDE_INT) tree_to_uhwi (args[5])
- > MEMMODEL_SEQ_CST)
+ || tree_to_uhwi (args[5]) > MEMMODEL_SEQ_CST)
return;
update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
args[4], args[5]);