+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * alias.c, asan.c, builtins.c, cfgexpand.c, cgraph.c,
+ config/aarch64/aarch64.c, config/alpha/predicates.md,
+ config/arm/arm.c, config/darwin.c, config/epiphany/epiphany.c,
+ config/i386/i386.c, config/iq2000/iq2000.c, config/m32c/m32c-pragma.c,
+ config/mep/mep-pragma.c, config/mips/mips.c,
+ config/picochip/picochip.c, config/rs6000/rs6000.c, cppbuiltin.c,
+ dbxout.c, dwarf2out.c, emit-rtl.c, except.c, expr.c, fold-const.c,
+ function.c, gimple-fold.c, godump.c, ipa-cp.c, ipa-prop.c, omp-low.c,
+ predict.c, sdbout.c, stor-layout.c, trans-mem.c, tree-object-size.c,
+ tree-sra.c, tree-ssa-ccp.c, tree-ssa-forwprop.c,
+ tree-ssa-loop-ivcanon.c, tree-ssa-loop-ivopts.c, tree-ssa-loop-niter.c,
+ tree-ssa-loop-prefetch.c, tree-ssa-strlen.c, tree-stdarg.c,
+ tree-switch-conversion.c, tree-vect-generic.c, tree-vect-loop.c,
+ tree-vect-patterns.c, tree-vrp.c, tree.c, tsan.c, ubsan.c, varasm.c:
+ Replace tree_low_cst (..., 1) with tree_to_uhwi throughout.
+
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* builtins.c, cilk-common.c, config/aarch64/aarch64.c,
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * gcc-interface/decl.c, gcc-interface/utils.c, gcc-interface/utils2.c:
+ Replace tree_low_cst (..., 1) with tree_to_uhwi throughout.
+
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* gcc-interface/cuintp.c: Update comments to refer to
else if (compare_tree_int (TYPE_SIZE (gnu_type), align_cap) > 0)
align = align_cap;
else
- align = ceil_pow2 (tree_low_cst (TYPE_SIZE (gnu_type), 1));
+ align = ceil_pow2 (tree_to_uhwi (TYPE_SIZE (gnu_type)));
/* But make sure not to under-align the object. */
if (align <= TYPE_ALIGN (gnu_type))
&& tree_fits_uhwi_p (TYPE_SIZE (gnu_type))
&& integer_pow2p (TYPE_SIZE (gnu_type)))
align = MIN (BIGGEST_ALIGNMENT,
- tree_low_cst (TYPE_SIZE (gnu_type), 1));
+ tree_to_uhwi (TYPE_SIZE (gnu_type)));
else if (Is_Atomic (gnat_entity) && gnu_size
&& tree_fits_uhwi_p (gnu_size)
&& integer_pow2p (gnu_size))
- align = MIN (BIGGEST_ALIGNMENT, tree_low_cst (gnu_size, 1));
+ align = MIN (BIGGEST_ALIGNMENT, tree_to_uhwi (gnu_size));
/* See if we need to pad the type. If we did, and made a record,
the name of the new type may be changed. So get it back for
{
tree t = TREE_VALUE (purpose_member (old_field, pos_list));
tree pos = TREE_VEC_ELT (t, 0), bitpos = TREE_VEC_ELT (t, 2);
- unsigned int offset_align = tree_low_cst (TREE_VEC_ELT (t, 1), 1);
+ unsigned int offset_align = tree_to_uhwi (TREE_VEC_ELT (t, 1));
tree new_pos, new_field;
unsigned int i;
subst_pair *s;
tree
make_packable_type (tree type, bool in_record)
{
- unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 1);
+ unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
unsigned HOST_WIDE_INT new_size;
tree new_type, old_field, field_list = NULL_TREE;
unsigned int align;
if (!size_tree || !tree_fits_uhwi_p (size_tree))
return type;
- size = tree_low_cst (size_tree, 1);
+ size = tree_to_uhwi (size_tree);
switch (TREE_CODE (type))
{
&& tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
{
tree offset = TREE_OPERAND (curpos, 0);
- align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
+ align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
align = scale_by_factor_of (offset, align);
last_pos = round_up (last_pos, align);
pos = compute_related_constant (curpos, last_pos);
{
tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
unsigned HOST_WIDE_INT addend
- = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
+ = tree_to_uhwi (TREE_OPERAND (curpos, 1));
align
- = tree_low_cst (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
+ = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
align = scale_by_factor_of (offset, align);
align = MIN (align, addend & -addend);
last_pos = round_up (last_pos, align);
unsigned int known_align;
if (tree_fits_uhwi_p (pos))
- known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
+ known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
else
known_align = BITS_PER_UNIT;
value_factor_p (tree value, HOST_WIDE_INT factor)
{
if (tree_fits_uhwi_p (value))
- return tree_low_cst (value, 1) % factor == 0;
+ return tree_to_uhwi (value) % factor == 0;
if (TREE_CODE (value) == MULT_EXPR)
return (value_factor_p (TREE_OPERAND (value, 0), factor)
iff it is not multiple of the current field alignment. */
if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
&& tree_fits_uhwi_p (bit_position (prev_field)))
- return ((tree_low_cst (bit_position (prev_field), 1)
- + tree_low_cst (DECL_SIZE (prev_field), 1))
+ return ((tree_to_uhwi (bit_position (prev_field))
+ + tree_to_uhwi (DECL_SIZE (prev_field)))
% DECL_ALIGN (curr_field) != 0);
/* If both the position and size of the previous field are multiples
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
if (TYPE_VAX_FLOATING_POINT_P (type))
- switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
+ switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
{
case 6:
dtype = 10;
case COMPLEX_TYPE:
if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type))
- switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
+ switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
{
case 6:
dtype = 12;
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
if (TYPE_VAX_FLOATING_POINT_P (type))
- switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
+ switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
{
case 6:
dtype = 10;
case COMPLEX_TYPE:
if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type))
- switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
+ switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
{
case 6:
dtype = 12;
}
/* Get the vector size (in bytes). */
- vecsize = tree_low_cst (size, 1);
+ vecsize = tree_to_uhwi (size);
/* We need to provide for vector pointers, vector arrays, and
functions returning vectors. For example:
return NULL_TREE;
}
- if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
+ if (vecsize % tree_to_uhwi (TYPE_SIZE_UNIT (type)))
{
error ("vector size not an integral multiple of component size");
return NULL;
}
/* Calculate how many units fit in the vector. */
- nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
+ nunits = vecsize / tree_to_uhwi (TYPE_SIZE_UNIT (type));
if (nunits & (nunits - 1))
{
error ("number of components of the vector not a power of two");
/* Sanity check the vector size and element type consistency. */
- vec_bytes = tree_low_cst (rep_size, 1);
+ vec_bytes = tree_to_uhwi (rep_size);
- if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
+ if (vec_bytes % tree_to_uhwi (TYPE_SIZE_UNIT (elem_type)))
{
error ("vector size not an integral multiple of component size");
return NULL;
return NULL;
}
- vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
+ vec_units = vec_bytes / tree_to_uhwi (TYPE_SIZE_UNIT (elem_type));
if (vec_units & (vec_units - 1))
{
error ("number of components of the vector not a power of two");
static unsigned int
resolve_atomic_size (tree type)
{
- unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
+ unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
return size;
*known_p = false;
return;
}
- *offset += (tree_low_cst (xoffset, 1)
- + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
+ *offset += (tree_to_uhwi (xoffset)
+ + (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
/ BITS_PER_UNIT));
x = TREE_OPERAND (x, 0);
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
fold_convert (const_ptr_type_node,
build_fold_addr_expr (refdecl)));
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
size += asan_red_zone_size (size);
CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
|| compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
return 0;
- return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
+ return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
}
/* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
else
{
if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
- *min_size = tree_low_cst (TYPE_MIN_VALUE (TREE_TYPE (len)), 1);
+ *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
else
*min_size = 0;
if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
- *max_size = tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (len)), 1);
+ *max_size = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
else
*max_size = GET_MODE_MASK (GET_MODE (len_rtx));
}
rtx dest_mem;
if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
- || !can_store_by_pieces (tree_low_cst (len, 1),
+ || !can_store_by_pieces (tree_to_uhwi (len),
builtin_strncpy_read_str,
CONST_CAST (char *, p),
dest_align, false))
return NULL_RTX;
dest_mem = get_memory_rtx (dest, len);
- store_by_pieces (dest_mem, tree_low_cst (len, 1),
+ store_by_pieces (dest_mem, tree_to_uhwi (len),
builtin_strncpy_read_str,
CONST_CAST (char *, p), dest_align, false, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), target);
* We can't pass builtin_memset_gen_str as that emits RTL. */
c = 1;
if (tree_fits_uhwi_p (len)
- && can_store_by_pieces (tree_low_cst (len, 1),
+ && can_store_by_pieces (tree_to_uhwi (len),
builtin_memset_read_str, &c, dest_align,
true))
{
val_rtx = force_reg (val_mode, val_rtx);
- store_by_pieces (dest_mem, tree_low_cst (len, 1),
+ store_by_pieces (dest_mem, tree_to_uhwi (len),
builtin_memset_gen_str, val_rtx, dest_align,
true, 0);
}
if (c)
{
if (tree_fits_uhwi_p (len)
- && can_store_by_pieces (tree_low_cst (len, 1),
+ && can_store_by_pieces (tree_to_uhwi (len),
builtin_memset_read_str, &c, dest_align,
true))
- store_by_pieces (dest_mem, tree_low_cst (len, 1),
+ store_by_pieces (dest_mem, tree_to_uhwi (len),
builtin_memset_read_str, &c, dest_align, true, 0);
else if (!set_storage_via_setmem (dest_mem, len_rtx,
gen_int_mode (c, val_mode),
{
rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
- tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
+ tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
/* Some ports cannot access arbitrary stack frames. */
if (tem == NULL)
if (! var_decl_component_p (var))
return NULL_TREE;
- length = tree_low_cst (len, 1);
+ length = tree_to_uhwi (len);
if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
|| get_pointer_alignment (dest) / BITS_PER_UNIT < length)
return NULL_TREE;
if (readonly_data_expr (src)
|| (tree_fits_uhwi_p (len)
&& (MIN (src_align, dest_align) / BITS_PER_UNIT
- >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
+ >= (unsigned HOST_WIDE_INT) tree_to_uhwi (len))))
{
tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
if (!fn)
dest_base = get_ref_base_and_extent (destvar, &dest_offset,
&size, &maxsize);
if (tree_fits_uhwi_p (len))
- maxsize = tree_low_cst (len, 1);
+ maxsize = tree_to_uhwi (len);
else
maxsize = -1;
src_offset /= BITS_PER_UNIT;
if (target_char_cast (arg2, &c))
return NULL_TREE;
- r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
+ r = (const char *) memchr (p1, c, tree_to_uhwi (len));
if (r == NULL)
return build_int_cst (TREE_TYPE (arg1), 0);
&& compare_tree_int (len, strlen (p1) + 1) <= 0
&& compare_tree_int (len, strlen (p2) + 1) <= 0)
{
- const int r = memcmp (p1, p2, tree_low_cst (len, 1));
+ const int r = memcmp (p1, p2, tree_to_uhwi (len));
if (r > 0)
return integer_one_node;
/* If len parameter is one, return an expression corresponding to
(*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
- if (tree_fits_uhwi_p (len) && tree_low_cst (len, 1) == 1)
+ if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
{
tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
tree cst_uchar_ptr_node
if (tree_fits_uhwi_p (len) && p1 && p2)
{
- const int i = strncmp (p1, p2, tree_low_cst (len, 1));
+ const int i = strncmp (p1, p2, tree_to_uhwi (len));
if (i > 0)
return integer_one_node;
else if (i < 0)
/* If len parameter is one, return an expression corresponding to
(*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
- if (tree_fits_uhwi_p (len) && tree_low_cst (len, 1) == 1)
+ if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
{
tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
tree cst_uchar_ptr_node
if (!init_target_chars ())
return NULL_TREE;
- destlen = tree_low_cst (destsize, 1);
+ destlen = tree_to_uhwi (destsize);
/* If the format doesn't contain % args or %%, use strcpy. */
if (strchr (fmt_str, target_percent) == NULL)
if (!retval || !tree_fits_uhwi_p (retval))
return NULL_TREE;
- origlen = tree_low_cst (retval, 1);
+ origlen = tree_to_uhwi (retval);
/* We could expand this as
memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
or to
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * c-common.c, c-cppbuiltin.c: Replace tree_low_cst (..., 1) with
+ tree_to_uhwi throughout.
+
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* c-common.c, c-format.c, c-omp.c, c-pretty-print.c: Replace
}
/* Get the vector size (in bytes). */
- vecsize = tree_low_cst (size, 1);
+ vecsize = tree_to_uhwi (size);
/* We need to provide for vector pointers, vector arrays, and
functions returning vectors. For example:
return NULL_TREE;
}
- if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
+ if (vecsize % tree_to_uhwi (TYPE_SIZE_UNIT (type)))
{
error ("vector size not an integral multiple of component size");
return NULL;
}
/* Calculate how many units fit in the vector. */
- nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
+ nunits = vecsize / tree_to_uhwi (TYPE_SIZE_UNIT (type));
if (nunits & (nunits - 1))
{
error ("number of components of the vector not a power of two");
if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
goto incompatible;
- size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
+ size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
return size;
return 0;
}
- size_0 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (type_0)), 1);
+ size_0 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type_0)));
/* Zero size objects are not allowed. */
if (size_0 == 0)
function);
return 0;
}
- size = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (type)), 1);
+ size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type)));
if (size != size_0)
{
error_at (loc, "size mismatch in argument %d of %qE", x + 1,
tree p = (*params)[x];
if (TREE_CODE (p) == INTEGER_CST)
{
- int i = tree_low_cst (p, 1);
+ int i = tree_to_uhwi (p);
if (i < 0 || (i & MEMMODEL_MASK) >= MEMMODEL_LAST)
{
warning_at (loc, OPT_Winvalid_memory_model,
if (TREE_CODE (index) == INTEGER_CST)
if (!tree_fits_uhwi_p (index)
- || ((unsigned HOST_WIDE_INT) tree_low_cst (index, 1)
+ || ((unsigned HOST_WIDE_INT) tree_to_uhwi (index)
>= TYPE_VECTOR_SUBPARTS (type)))
warning_at (loc, OPT_Warray_bounds, "index value is out of bound");
builtin_define_type_sizeof (const char *name, tree type)
{
builtin_define_with_int_value (name,
- tree_low_cst (TYPE_SIZE_UNIT (type), 1));
+ tree_to_uhwi (TYPE_SIZE_UNIT (type)));
}
/* Define the float.h constants for TYPE using NAME_PREFIX, FP_SUFFIX,
/* Tell the source code about various types. These map to the C++11 and C11
macros where 2 indicates lock-free always, and 1 indicates sometimes
lock free. */
-#define SIZEOF_NODE(T) (tree_low_cst (TYPE_SIZE_UNIT (T), 1))
+#define SIZEOF_NODE(T) (tree_to_uhwi (TYPE_SIZE_UNIT (T)))
#define SWAP_INDEX(T) ((SIZEOF_NODE (T) < SWAP_LIMIT) ? SIZEOF_NODE (T) : 0)
builtin_define_with_int_value ("__GCC_ATOMIC_BOOL_LOCK_FREE",
(have_swap[SWAP_INDEX (boolean_type_node)]? 2 : 1));
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * c-decl.c, c-typeck.c: Replace tree_low_cst (..., 1) with
+ tree_to_uhwi throughout.
+
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* c-parser.c: Replace tree_low_cst (..., 0) with tree_to_shwi
*width = build_int_cst (integer_type_node, w);
}
else
- w = tree_low_cst (*width, 1);
+ w = tree_to_uhwi (*width);
if (TREE_CODE (*type) == ENUMERAL_TYPE)
{
if (DECL_INITIAL (x))
{
- unsigned HOST_WIDE_INT width = tree_low_cst (DECL_INITIAL (x), 1);
+ unsigned HOST_WIDE_INT width = tree_to_uhwi (DECL_INITIAL (x));
DECL_SIZE (x) = bitsize_int (width);
DECL_BIT_FIELD (x) = 1;
SET_DECL_C_BIT_FIELD (x);
&& TREE_TYPE (*fieldlistp) != error_mark_node)
{
unsigned HOST_WIDE_INT width
- = tree_low_cst (DECL_INITIAL (*fieldlistp), 1);
+ = tree_to_uhwi (DECL_INITIAL (*fieldlistp));
tree type = TREE_TYPE (*fieldlistp);
if (width != TYPE_PRECISION (type))
{
else if (TREE_CODE (constructor_type) == ARRAY_TYPE)
{
constructor_type = TREE_TYPE (constructor_type);
- push_array_bounds (tree_low_cst (constructor_index, 1));
+ push_array_bounds (tree_to_uhwi (constructor_index));
constructor_depth++;
}
/* Now output the actual element. */
if (value.value)
{
- push_array_bounds (tree_low_cst (constructor_index, 1));
+ push_array_bounds (tree_to_uhwi (constructor_index));
output_init_element (value.value, value.original_type,
strict_string, elttype,
constructor_index, 1, implicit,
* (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
v->decl = decl;
- v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
+ v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
/* Ensure that all variables have size, so that &a != &b for any two
variables that are simultaneously live. */
if (v->size == 0)
HOST_WIDE_INT size, offset;
unsigned byte_align;
- size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
byte_align = align_local_variable (SSAVAR (var));
/* We handle highly aligned variables in expand_stack_vars. */
/* Whether the variable is small enough for immediate allocation not to be
a problem with regard to the frame size. */
bool smallish
- = (tree_low_cst (DECL_SIZE_UNIT (var), 1)
+ = (tree_to_uhwi (DECL_SIZE_UNIT (var))
< PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
/* If stack protection is enabled, *all* stack variables must be deferred,
{
if (really_expand)
expand_one_stack_var (origvar);
- return tree_low_cst (DECL_SIZE_UNIT (var), 1);
+ return tree_to_uhwi (DECL_SIZE_UNIT (var));
}
return 0;
}
|| !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
len = max;
else
- len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
+ len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
if (len < max)
ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
gcc_assert (TREE_CODE (type) == RECORD_TYPE);
edge->indirect_info->param_index = -1;
edge->indirect_info->otr_token
- = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
+ = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
edge->indirect_info->otr_type = type;
edge->indirect_info->polymorphic = 1;
}
|| count < 0)
return -1;
- count *= (1 + tree_low_cst (TYPE_MAX_VALUE (index), 1)
- - tree_low_cst (TYPE_MIN_VALUE (index), 1));
+ count *= (1 + tree_to_uhwi (TYPE_MAX_VALUE (index))
+ - tree_to_uhwi (TYPE_MIN_VALUE (index)));
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_low_cst (TYPE_SIZE (type), 1)
+ || (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_low_cst (TYPE_SIZE (type), 1)
+ || (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_low_cst (TYPE_SIZE (type), 1)
+ || (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
&& !SYMBOL_REF_TLS_MODEL (op))
{
if (SYMBOL_REF_DECL (op))
- max_ofs = tree_low_cst (DECL_SIZE_UNIT (SYMBOL_REF_DECL (op)), 1);
+ max_ofs = tree_to_uhwi (DECL_SIZE_UNIT (SYMBOL_REF_DECL (op)));
}
else
return false;
|| count < 0)
return -1;
- count *= (1 + tree_low_cst (TYPE_MAX_VALUE (index), 1)
- - tree_low_cst (TYPE_MIN_VALUE (index), 1));
+ count *= (1 + tree_to_uhwi (TYPE_MAX_VALUE (index))
+ - tree_to_uhwi (TYPE_MIN_VALUE (index)));
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_low_cst (TYPE_SIZE (type), 1)
+ || (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_low_cst (TYPE_SIZE (type), 1)
+ || (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_low_cst (TYPE_SIZE (type), 1)
+ || (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
zsize = (DECL_P (decl)
&& (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
- && tree_low_cst (DECL_SIZE_UNIT (decl), 1) == 0);
+ && tree_to_uhwi (DECL_SIZE_UNIT (decl)) == 0);
one = DECL_P (decl)
&& TREE_CODE (decl) == VAR_DECL
static bool warned_objc_46 = false;
/* We shall assert that zero-sized objects are an error in ObjC
meta-data. */
- gcc_assert (tree_low_cst (DECL_SIZE_UNIT (decl), 1) != 0);
+ gcc_assert (tree_to_uhwi (DECL_SIZE_UNIT (decl)) != 0);
/* ??? This mechanism for determining the metadata section is
broken when LTO is in use, since the frontend that generated
machopic_define_symbol (DECL_RTL (decl));
}
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
#ifdef DEBUG_DARWIN_MEM_ALLOCATORS
fprintf (file, "# dadon: %s %s (%llu, %u) local %d weak %d"
{
tree elmsz = TYPE_SIZE (TREE_TYPE (TREE_TYPE (field)));
- if (!tree_fits_uhwi_p (elmsz) || tree_low_cst (elmsz, 1) >= 32)
+ if (!tree_fits_uhwi_p (elmsz) || tree_to_uhwi (elmsz) >= 32)
return 64;
}
return computed;
{
if (TREE_CODE (type) == VECTOR_TYPE)
{
- switch (tree_low_cst (TYPE_SIZE (type), 1))
+ switch (tree_to_uhwi (TYPE_SIZE (type)))
{
case 64:
return builtin_decl_explicit (BUILT_IN_TM_LOAD_M64);
{
if (TREE_CODE (type) == VECTOR_TYPE)
{
- switch (tree_low_cst (TYPE_SIZE (type), 1))
+ switch (tree_to_uhwi (TYPE_SIZE (type)))
{
case 64:
return builtin_decl_explicit (BUILT_IN_TM_STORE_M64);
unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
if (!tree_fits_uhwi_p (arg)
- || (elt = tree_low_cst (arg, 1), elt > max))
+ || (elt = tree_to_uhwi (arg), elt > max))
{
error ("selector must be an integer constant in the range 0..%wi", max);
return 0;
/* ??? If this is a packed structure, then the last hunk won't
be 64 bits. */
chunks
- = tree_low_cst (TYPE_SIZE_UNIT (type), 1) / UNITS_PER_WORD;
+ = tree_to_uhwi (TYPE_SIZE_UNIT (type)) / UNITS_PER_WORD;
if (chunks + *arg_words + bias > (unsigned) MAX_ARGS_IN_REGISTERS)
chunks = MAX_ARGS_IN_REGISTERS - *arg_words - bias;
{
if (tree_fits_uhwi_p (val))
{
- i = tree_low_cst (val, 1);
+ i = tree_to_uhwi (val);
type = pragma_lex (&val);
if (type != CPP_EOF)
{
if (var != error_mark_node)
{
- unsigned uaddr = tree_low_cst (addr, 1);
+ unsigned uaddr = tree_to_uhwi (addr);
m32c_note_pragma_address (IDENTIFIER_POINTER (var), uaddr);
}
case CPP_NUMBER:
if (! tree_fits_uhwi_p (val))
break;
- i = tree_low_cst (val, 1);
+ i = tree_to_uhwi (val);
/* This pragma no longer has any effect. */
#if 0
if (i == 32)
type = mep_pragma_lex (&val);
if (type != CPP_CHAR)
goto syntax_error;
- class_letter = tree_low_cst (val, 1);
+ class_letter = tree_to_uhwi (val);
if (class_letter >= 'A' && class_letter <= 'D')
switch (class_letter)
{
return false;
offset += bitoffset / BITS_PER_UNIT;
- return offset >= 0 && offset < tree_low_cst (DECL_SIZE_UNIT (inner), 1);
+ return offset >= 0 && offset < tree_to_uhwi (DECL_SIZE_UNIT (inner));
}
/* A for_each_rtx callback for which DATA points to the instruction
int type_size_in_units = 0;
if (type)
- type_size_in_units = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
+ type_size_in_units = tree_to_uhwi (TYPE_SIZE_UNIT (type));
else
type_size_in_units = GET_MODE_SIZE (mode);
if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
return false;
- dsize = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ dsize = tree_to_uhwi (DECL_SIZE_UNIT (decl));
if (dsize > 32768)
return false;
dsize = TREE_STRING_LENGTH (decl);
else if (TYPE_SIZE_UNIT (type)
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
- dsize = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
+ dsize = tree_to_uhwi (TYPE_SIZE_UNIT (type));
else
return false;
if (dsize > 32768)
|| count < 0)
return -1;
- count *= (1 + tree_low_cst (TYPE_MAX_VALUE (index), 1)
- - tree_low_cst (TYPE_MIN_VALUE (index), 1));
+ count *= (1 + tree_to_uhwi (TYPE_MAX_VALUE (index))
+ - tree_to_uhwi (TYPE_MIN_VALUE (index)));
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_low_cst (TYPE_SIZE (type), 1)
+ || (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_low_cst (TYPE_SIZE (type), 1)
+ || (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
/* There must be no padding. */
if (!tree_fits_uhwi_p (TYPE_SIZE (type))
- || (tree_low_cst (TYPE_SIZE (type), 1)
+ || (tree_to_uhwi (TYPE_SIZE (type))
!= count * GET_MODE_BITSIZE (*modep)))
return -1;
unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
if (!tree_fits_uhwi_p (arg)
- || (elt = tree_low_cst (arg, 1), elt > max))
+ || (elt = tree_to_uhwi (arg), elt > max))
{
error ("selector must be an integer constant in the range 0..%wi", max);
return 0;
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * call.c, class.c, decl.c, error.c: Replace tree_low_cst (..., 1) with
+ tree_to_uhwi throughout.
+
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* class.c, dump.c, error.c, init.c, method.c, parser.c, semantics.c:
if (TYPE_DOMAIN (type))
{
- unsigned HOST_WIDE_INT alen = tree_low_cst (array_type_nelts_top (type), 1);
+ unsigned HOST_WIDE_INT alen = tree_to_uhwi (array_type_nelts_top (type));
if (alen < len)
return NULL;
}
{
unsigned HOST_WIDE_INT width;
tree ftype = TREE_TYPE (field);
- width = tree_low_cst (DECL_SIZE (field), /*unsignedp=*/1);
+ width = tree_to_uhwi (DECL_SIZE (field));
if (width != TYPE_PRECISION (ftype))
{
TREE_TYPE (field)
return new_init;
if (tree_fits_uhwi_p (max_index))
- max_index_cst = tree_low_cst (max_index, 1);
+ max_index_cst = tree_to_uhwi (max_index);
/* sizetype is sign extended, not zero extended. */
else
max_index_cst = tree_low_cst (fold_convert (size_type_node, max_index),
resolve_virtual_fun_from_obj_type_ref (tree ref)
{
tree obj_type = TREE_TYPE (OBJ_TYPE_REF_OBJECT (ref));
- HOST_WIDE_INT index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
+ HOST_WIDE_INT index = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
tree fun = BINFO_VIRTUALS (TYPE_BINFO (TREE_TYPE (obj_type)));
while (index)
{
{
#define define_type_sizeof(NAME, TYPE) \
cpp_define_formatted (pfile, NAME"="HOST_WIDE_INT_PRINT_DEC, \
- tree_low_cst (TYPE_SIZE_UNIT (TYPE), 1))
+ tree_to_uhwi (TYPE_SIZE_UNIT (TYPE)))
define_type_sizeof ("__SIZEOF_INT__", integer_type_node);
define_type_sizeof ("__SIZEOF_LONG__", long_integer_type_node);
stabstr_C (',');
stabstr_D (int_bit_position (tem));
stabstr_C (',');
- stabstr_D (tree_low_cst (DECL_SIZE (tem), 1));
+ stabstr_D (tree_to_uhwi (DECL_SIZE (tem)));
stabstr_C (';');
}
}
else if (TYPE_SIZE (type) == NULL_TREE)
return 0;
else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
- return tree_low_cst (TYPE_SIZE (type), 1);
+ return tree_to_uhwi (TYPE_SIZE (type));
else
return TYPE_ALIGN (type);
}
|| !tree_fits_uhwi_p (DECL_SIZE (decl)))
return NULL;
- decl_size = tree_low_cst (DECL_SIZE (decl), 1);
+ decl_size = tree_to_uhwi (DECL_SIZE (decl));
descr = NULL;
descr_tail = &descr;
&& DECL_BIT_FIELD_TYPE (decl));
if (tree_fits_uhwi_p (DECL_SIZE (decl)))
- add_AT_unsigned (die, DW_AT_bit_size, tree_low_cst (DECL_SIZE (decl), 1));
+ add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
}
/* If the compiled language is ANSI C, then add a 'prototyped'
case POINTER_PLUS_EXPR:
case PLUS_EXPR:
if (tree_fits_uhwi_p (TREE_OPERAND (val, 1))
- && (unsigned HOST_WIDE_INT) tree_low_cst (TREE_OPERAND (val, 1), 1)
+ && (unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (val, 1))
< 16384)
{
loc = descr_info_loc (TREE_OPERAND (val, 0), base_decl);
|| !tree_fits_uhwi_p (bit_offset))
return -1;
- offset += tree_low_cst (byte_offset, 1);
- offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
+ offset += tree_to_uhwi (byte_offset);
+ offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
if (inner == NULL_TREE)
{
if (tree_fits_uhwi_p (off_tree))
{
attrs.offset_known_p = true;
- attrs.offset = tree_low_cst (off_tree, 1);
+ attrs.offset = tree_to_uhwi (off_tree);
apply_bitpos = bitpos;
}
}
if (tree_fits_uhwi_p (new_size))
{
attrs.size_known_p = true;
- attrs.size = tree_low_cst (new_size, 1);
+ attrs.size = tree_to_uhwi (new_size);
}
/* If we modified OFFSET based on T, then subtract the outstanding
}
attrs.expr = TREE_OPERAND (attrs.expr, 0);
- attrs.offset += tree_low_cst (offset, 1);
- attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
+ attrs.offset += tree_to_uhwi (offset);
+ attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
/ BITS_PER_UNIT);
}
/* Similarly for the decl. */
/* Cache the interesting field offsets so that we have
easy access from rtl. */
sjlj_fc_call_site_ofs
- = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
- + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
+ = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
+ + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
sjlj_fc_data_ofs
- = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
- + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
+ = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
+ + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
sjlj_fc_personality_ofs
- = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
- + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
+ = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
+ + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
sjlj_fc_lsda_ofs
- = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
- + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
+ = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
+ + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
sjlj_fc_jbuf_ofs
- = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
- + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
+ = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
+ + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
}
}
return constm1_rtx;
}
- iwhich = tree_low_cst (which, 1);
+ iwhich = tree_to_uhwi (which);
iwhich = EH_RETURN_DATA_REGNO (iwhich);
if (iwhich == INVALID_REGNUM)
return constm1_rtx;
see finish_bitfield_layout. */
if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
&& tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
- bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
- - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
+ bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
+ - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
else
bitoffset = 0;
- bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
- - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
+ bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
+ - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
/* If the adjustment is larger than bitpos, we would have a negative bit
position for the lower bound and this may wreak havoc later. Adjust
else
*bitstart = *bitpos - bitoffset;
- *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
+ *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
}
/* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
{
unsigned HOST_WIDE_INT n;
- n = tree_low_cst (nelts, 1) + 1;
+ n = tree_to_uhwi (nelts) + 1;
if (n == 0 || for_ctor_p)
return n;
else
tree hi_index = TREE_OPERAND (purpose, 1);
if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
- mult = (tree_low_cst (hi_index, 1)
- - tree_low_cst (lo_index, 1) + 1);
+ mult = (tree_to_uhwi (hi_index)
+ - tree_to_uhwi (lo_index) + 1);
}
num_fields += mult;
elt_type = TREE_TYPE (value);
continue;
if (tree_fits_uhwi_p (DECL_SIZE (field)))
- bitsize = tree_low_cst (DECL_SIZE (field), 1);
+ bitsize = tree_to_uhwi (DECL_SIZE (field));
else
bitsize = -1;
break;
}
- this_node_count = (tree_low_cst (hi_index, 1)
- - tree_low_cst (lo_index, 1) + 1);
+ this_node_count = (tree_to_uhwi (hi_index)
+ - tree_to_uhwi (lo_index) + 1);
}
else
this_node_count = 1;
mode = TYPE_MODE (elttype);
if (mode == BLKmode)
bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
- ? tree_low_cst (TYPE_SIZE (elttype), 1)
+ ? tree_to_uhwi (TYPE_SIZE (elttype))
: -1);
else
bitsize = GET_MODE_BITSIZE (mode);
(!MEM_P (target)
|| count <= 2
|| (tree_fits_uhwi_p (TYPE_SIZE (elttype))
- && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
+ && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
<= 40 * 8)))))
{
lo -= minelt; hi -= minelt;
{
if (index != 0)
bitpos = ((tree_to_shwi (index) - minelt)
- * tree_low_cst (TYPE_SIZE (elttype), 1));
+ * tree_to_uhwi (TYPE_SIZE (elttype)));
else
- bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
+ bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
&& TREE_CODE (type) == ARRAY_TYPE
int need_to_clear;
int icode = CODE_FOR_nothing;
tree elttype = TREE_TYPE (type);
- int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
+ int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
enum machine_mode eltmode = TYPE_MODE (elttype);
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
HOST_WIDE_INT eltpos;
tree value = ce->value;
- bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
+ bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
if (cleared && initializer_zerop (value))
continue;
if (ce->index)
- eltpos = tree_low_cst (ce->index, 1);
+ eltpos = tree_to_uhwi (ce->index);
else
eltpos = i;
if (! tree_fits_uhwi_p (size_tree))
mode = BLKmode, *pbitsize = -1;
else
- *pbitsize = tree_low_cst (size_tree, 1);
+ *pbitsize = tree_to_uhwi (size_tree);
}
/* Compute cumulative bit-offset for nested component-refs and array-refs,
|| TREE_ADDRESSABLE (exp)
|| (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
&& (! MOVE_BY_PIECES_P
- (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
+ (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
TYPE_ALIGN (type)))
&& ! mostly_zeros_p (exp))))
|| ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
|| !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
|| compare_tree_int (TREE_OPERAND (offset, 1),
BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
- || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
+ || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
return 0;
/* Look at the first operand of BIT_AND_EXPR and strip any conversion.
if (!tree_fits_uhwi_p (arg2))
return NULL_TREE;
- unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
- unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
+ unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
+ unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
unsigned HOST_WIDE_INT innerc
- = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
+ = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
if (shiftc >= outerc || (shiftc % innerc) != 0)
return NULL_TREE;
int offset = shiftc / innerc;
{
unsigned HOST_WIDE_INT cst;
- cst = tree_low_cst (and1, 1);
+ cst = tree_to_uhwi (and1);
cst &= HOST_WIDE_INT_M1U
<< (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
change = (cst == 0);
If B is constant and (B & M) == 0, fold into A & M. */
if (tree_fits_uhwi_p (arg1))
{
- unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
+ unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
if (~cst1 && (cst1 & (cst1 + 1)) == 0
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0))
&& (TREE_CODE (arg0) == PLUS_EXPR
}
if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
- || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
+ || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
& cst1) != cst1)
which = -1;
&& TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
&& TREE_CODE (arg1) == INTEGER_CST
&& tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
- && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0
- && (tree_low_cst (TREE_OPERAND (arg0, 1), 1)
+ && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
+ && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
< TYPE_PRECISION (TREE_TYPE (arg0))))
{
- unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
+ unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
unsigned HOST_WIDE_INT newmask, zerobits = 0;
tree shift_type = TREE_TYPE (arg0);
&& TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
{
tree eltype = TREE_TYPE (TREE_TYPE (arg0));
- unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
- unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
- unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
+ unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
+ unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
+ unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
if (n != 0
&& (idx % width) == 0
/* A bit-field-ref that referenced the full argument can be stripped. */
if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
- && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
+ && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
&& integer_zerop (op2))
return fold_convert_loc (loc, type, arg0);
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
/* This limitation should not be necessary, we just need to
round this up to mode size. */
- && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
+ && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
/* Need bit-shifting of the buffer to relax the following. */
- && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
+ && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
{
- unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
- unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
+ unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
+ unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
unsigned HOST_WIDE_INT clen;
- clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
+ clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
/* ??? We cannot tell native_encode_expr to start at
some random byte only. So limit us to a reasonable amount
of work. */
tree s2 = sizetree;
if (where_pad != none
&& (!tree_fits_uhwi_p (sizetree)
- || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
+ || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
SUB_PARM_SIZE (locate->slot_offset, s2);
}
if (where_pad != none
&& (!tree_fits_uhwi_p (sizetree)
- || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
+ || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
ADD_PARM_SIZE (locate->size, sizetree);
continue;
pos = int_bit_position (fld);
- size = tree_low_cst (DECL_SIZE (fld), 1);
+ size = tree_to_uhwi (DECL_SIZE (fld));
if (pos <= offset && (pos + size) > offset)
break;
}
if (TREE_CODE (v) == POINTER_PLUS_EXPR)
{
- offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
+ offset = tree_to_uhwi (TREE_OPERAND (v, 1)) * BITS_PER_UNIT;
v = TREE_OPERAND (v, 0);
}
else
return NULL_TREE;
}
gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
- size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
+ size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
offset += token * size;
fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
offset, size, v);
&& useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
&& tree_fits_uhwi_p (off))
{
- unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
+ unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
tree part_width = TYPE_SIZE (type);
unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width) / BITS_PER_UNIT;
else if (tree_fits_uhwi_p (TREE_VALUE (element)))
snprintf (buf, sizeof buf, HOST_WIDE_INT_PRINT_UNSIGNED,
((unsigned HOST_WIDE_INT)
- tree_low_cst (TREE_VALUE (element), 1)));
+ tree_to_uhwi (TREE_VALUE (element))));
else
snprintf (buf, sizeof buf, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
((unsigned HOST_WIDE_INT)
if (item->offset < 0)
continue;
gcc_checking_assert (is_gimple_ip_invariant (item->value));
- val_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (item->value)), 1);
+ val_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (item->value)));
if (merge_agg_lats_step (dest_plats, item->offset, val_size,
&aglat, pre_existing, &ret))
item->offset);
if (TYPE_P (item->value))
fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
- tree_low_cst (TYPE_SIZE (item->value), 1));
+ tree_to_uhwi (TYPE_SIZE (item->value)));
else
{
fprintf (f, "cst: ");
arg_base = arg;
arg_offset = 0;
type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
- arg_size = tree_low_cst (type_size, 1);
+ arg_size = tree_to_uhwi (type_size);
ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
}
else if (TREE_CODE (arg) == ADDR_EXPR)
cs = ipa_note_param_call (node, index, call);
ii = cs->indirect_info;
ii->offset = anc_offset;
- ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
+ ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
ii->otr_type = obj_type_ref_class (target);
ii->polymorphic = 1;
}
if (!binfo)
return NULL_TREE;
token = OBJ_TYPE_REF_TOKEN (otr);
- fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
+ fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
binfo);
#ifdef ENABLE_CHECKING
if (fndecl)
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * objc-encoding.c: Replace tree_low_cst (..., 1) with tree_to_uhwi
+ throughout.
+
2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
* objc-next-runtime-abi-02.c: Replace tree_low_cst (..., 0) with
between GNU and NeXT runtimes. */
if (DECL_BIT_FIELD_TYPE (field_decl))
{
- int size = tree_low_cst (DECL_SIZE (field_decl), 1);
+ int size = tree_to_uhwi (DECL_SIZE (field_decl));
if (flag_next_runtime)
encode_next_bitfield (size);
{
safelen = OMP_CLAUSE_SAFELEN_EXPR (safelen);
if (!tree_fits_uhwi_p (safelen)
- || (unsigned HOST_WIDE_INT) tree_low_cst (safelen, 1)
+ || (unsigned HOST_WIDE_INT) tree_to_uhwi (safelen)
> INT_MAX)
loop->safelen = INT_MAX;
else
- loop->safelen = tree_low_cst (safelen, 1);
+ loop->safelen = tree_to_uhwi (safelen);
if (loop->safelen == 1)
loop->safelen = 0;
}
HOST_WIDE_INT index;
/* Make sure the type is one of the supported sizes. */
- index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
+ index = tree_to_uhwi (TYPE_SIZE_UNIT (type));
index = exact_log2 (index);
if (index >= 0 && index <= 4)
{
if (tree_fits_uhwi_p (niter)
&& max
&& compare_tree_int (niter, max - 1) == -1)
- nitercst = tree_low_cst (niter, 1) + 1;
+ nitercst = tree_to_uhwi (niter) + 1;
else
nitercst = max;
predictor = PRED_LOOP_ITERATIONS;
PUT_SDB_INT_VAL (int_bit_position (tem));
PUT_SDB_SCL (C_FIELD);
sdbout_type (DECL_BIT_FIELD_TYPE (tem));
- PUT_SDB_SIZE (tree_low_cst (DECL_SIZE (tem), 1));
+ PUT_SDB_SIZE (tree_to_uhwi (DECL_SIZE (tem)));
}
else
{
if (!tree_fits_uhwi_p (size))
return BLKmode;
- uhwi = tree_low_cst (size, 1);
+ uhwi = tree_to_uhwi (size);
ui = uhwi;
if (uhwi != ui)
return BLKmode;
limit_p = true;
if (tree_fits_uhwi_p (size) && tree_fits_uhwi_p (elem_size))
{
- int_size = tree_low_cst (size, 1);
- int_elem_size = tree_low_cst (elem_size, 1);
+ int_size = tree_to_uhwi (size);
+ int_elem_size = tree_to_uhwi (elem_size);
if (int_elem_size > 0
&& int_size % int_elem_size == 0
&& targetm.array_mode_supported_p (TYPE_MODE (elem_type),
offset = offset % align;
return ((offset + size + align - 1) / align
- > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
+ > ((unsigned HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (type))
/ align));
}
#endif
/* Work out the known alignment so far. Note that A & (-A) is the
value of the least-significant bit in A that is one. */
if (! integer_zerop (rli->bitpos))
- known_align = (tree_low_cst (rli->bitpos, 1)
- & - tree_low_cst (rli->bitpos, 1));
+ known_align = (tree_to_uhwi (rli->bitpos)
+ & - tree_to_uhwi (rli->bitpos));
else if (integer_zerop (rli->offset))
known_align = 0;
else if (tree_fits_uhwi_p (rli->offset))
known_align = (BITS_PER_UNIT
- * (tree_low_cst (rli->offset, 1)
- & - tree_low_cst (rli->offset, 1)));
+ * (tree_to_uhwi (rli->offset)
+ & - tree_to_uhwi (rli->offset)));
else
known_align = rli->offset_align;
{
unsigned int type_align = TYPE_ALIGN (type);
tree dsize = DECL_SIZE (field);
- HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
+ HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
HOST_WIDE_INT offset = tree_to_shwi (rli->offset);
HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
{
unsigned int type_align = TYPE_ALIGN (type);
tree dsize = DECL_SIZE (field);
- HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
+ HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
HOST_WIDE_INT offset = tree_to_shwi (rli->offset);
HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
/* We're in the middle of a run of equal type size fields; make
sure we realign if we run out of bits. (Not decl size,
type size!) */
- HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1);
+ HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field));
if (rli->remaining_in_alignment < bitsize)
{
- HOST_WIDE_INT typesize = tree_low_cst (TYPE_SIZE (type), 1);
+ HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type));
/* out of bits; bump up to next 'word'. */
rli->bitpos
&& tree_fits_uhwi_p (DECL_SIZE (field)))
{
unsigned HOST_WIDE_INT bitsize
- = tree_low_cst (DECL_SIZE (field), 1);
+ = tree_to_uhwi (DECL_SIZE (field));
unsigned HOST_WIDE_INT typesize
- = tree_low_cst (TYPE_SIZE (TREE_TYPE (field)), 1);
+ = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field)));
if (typesize < bitsize)
rli->remaining_in_alignment = 0;
approximate this by seeing if its position changed), lay out the field
again; perhaps we can use an integral mode for it now. */
if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
- actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
- & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
+ actual_align = (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
+ & - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)));
else if (integer_zerop (DECL_FIELD_OFFSET (field)))
actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
actual_align = (BITS_PER_UNIT
- * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
- & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
+ * (tree_to_uhwi (DECL_FIELD_OFFSET (field))
+ & - tree_to_uhwi (DECL_FIELD_OFFSET (field))));
else
actual_align = DECL_OFFSET_ALIGN (field);
/* ACTUAL_ALIGN is still the actual alignment *within the record* .
size = size_diffop (DECL_FIELD_OFFSET (field),
DECL_FIELD_OFFSET (repr));
gcc_assert (tree_fits_uhwi_p (size));
- bitsize = (tree_low_cst (size, 1) * BITS_PER_UNIT
- + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
- - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1)
- + tree_low_cst (DECL_SIZE (field), 1));
+ bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT
+ + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
+ - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))
+ + tree_to_uhwi (DECL_SIZE (field)));
/* Round up bitsize to multiples of BITS_PER_UNIT. */
bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
DECL_FIELD_OFFSET (repr));
if (tree_fits_uhwi_p (maxsize))
{
- maxbitsize = (tree_low_cst (maxsize, 1) * BITS_PER_UNIT
- + tree_low_cst (DECL_FIELD_BIT_OFFSET (nextf), 1)
- - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
+ maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
+ + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf))
+ - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
/* If the group ends within a bitfield nextf does not need to be
aligned to BITS_PER_UNIT. Thus round up. */
maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
tree maxsize = size_diffop (TYPE_SIZE_UNIT (DECL_CONTEXT (field)),
DECL_FIELD_OFFSET (repr));
if (tree_fits_uhwi_p (maxsize))
- maxbitsize = (tree_low_cst (maxsize, 1) * BITS_PER_UNIT
- - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
+ maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
+ - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
else
maxbitsize = bitsize;
}
&& transaction_invariant_address_p (lp->addr, entry_block)
&& TYPE_SIZE_UNIT (type) != NULL
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
- && (tree_low_cst (TYPE_SIZE_UNIT (type), 1)
+ && (tree_to_uhwi (TYPE_SIZE_UNIT (type))
< PARAM_VALUE (PARAM_TM_MAX_AGGREGATE_SIZE))
/* We must be able to copy this type normally. I.e., no
special constructors and the like. */
code = BUILT_IN_TM_LOG_LDOUBLE;
else if (tree_fits_uhwi_p (size))
{
- unsigned int n = tree_low_cst (size, 1);
+ unsigned int n = tree_to_uhwi (size);
switch (n)
{
case 1:
else if (TYPE_SIZE_UNIT (type) != NULL
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
{
- switch (tree_low_cst (TYPE_SIZE_UNIT (type), 1))
+ switch (tree_to_uhwi (TYPE_SIZE_UNIT (type)))
{
case 1:
code = BUILT_IN_TM_LOAD_1;
else if (TYPE_SIZE_UNIT (type) != NULL
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
{
- switch (tree_low_cst (TYPE_SIZE_UNIT (type), 1))
+ switch (tree_to_uhwi (TYPE_SIZE_UNIT (type)))
{
case 1:
code = BUILT_IN_TM_STORE_1;
init_offset_limit (void)
{
if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
- offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
+ offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
else
offset_limit = -1;
offset_limit /= 2;
t = TREE_OPERAND (expr, 1);
off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
- size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
+ size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
/ BITS_PER_UNIT));
break;
&& DECL_P (pt_var)
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
&& (unsigned HOST_WIDE_INT)
- tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
+ tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
pt_var_size = DECL_SIZE_UNIT (pt_var);
else if (pt_var
&& TREE_CODE (pt_var) == STRING_CST
&& TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
&& (unsigned HOST_WIDE_INT)
- tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
+ tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
< offset_limit)
pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
else
bytes = pt_var_size;
if (tree_fits_uhwi_p (bytes))
- return tree_low_cst (bytes, 1);
+ return tree_to_uhwi (bytes);
return unknown[object_size_type];
}
bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
if (bytes && tree_fits_uhwi_p (bytes))
- return tree_low_cst (bytes, 1);
+ return tree_to_uhwi (bytes);
return unknown[object_size_type];
}
if (! tree_fits_uhwi_p (op1))
bytes = unknown[object_size_type];
else if (TREE_CODE (op0) == SSA_NAME)
- return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
+ return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
else
{
- unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
+ unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
/* op0 will be ADDR_EXPR here. */
bytes = addr_object_size (osi, op0, object_size_type);
if (tree_fits_uhwi_p (ost))
{
unsigned HOST_WIDE_INT object_size_type
- = tree_low_cst (ost, 1);
+ = tree_to_uhwi (ost);
if (object_size_type < 2)
result = fold_convert (size_type_node,
struct access *access;
HOST_WIDE_INT size;
- size = tree_low_cst (DECL_SIZE (fld), 1);
+ size = tree_to_uhwi (DECL_SIZE (fld));
access = create_access_1 (base, pos, size);
access->expr = nref;
access->type = ft;
static void
completely_scalarize_var (tree var)
{
- HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
+ HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
struct access *access;
access = create_access_1 (var, 0, size);
tr_size = TYPE_SIZE (TREE_TYPE (type));
if (!tr_size || !tree_fits_uhwi_p (tr_size))
return false;
- el_size = tree_low_cst (tr_size, 1);
+ el_size = tree_to_uhwi (tr_size);
minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
reject (var, "type size not fixed");
return false;
}
- if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
+ if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
{
reject (var, "type size is zero");
return false;
if (TREE_CODE (var) == VAR_DECL
&& type_consists_of_records_p (TREE_TYPE (var)))
{
- if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
+ if ((unsigned) tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
<= max_total_scalarization_size)
{
completely_scalarize_var (var);
&& tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
&& tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
{
- chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
+ chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
start_offset = access->offset
- + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
+ + tree_to_uhwi (TREE_OPERAND (bfr, 2));
}
else
start_offset = chunk_size = 0;
if (!COMPLETE_TYPE_P (type)
|| !tree_fits_uhwi_p (TYPE_SIZE (type))
- || tree_low_cst (TYPE_SIZE (type), 1) == 0
+ || tree_to_uhwi (TYPE_SIZE (type)) == 0
|| (AGGREGATE_TYPE_P (type)
&& type_internals_preclude_sra_p (type, &msg)))
continue;
}
if (POINTER_TYPE_P (TREE_TYPE (parm)))
- agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
+ agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
else
- agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
+ agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
if (total_size >= agg_size)
return NULL;
tree parm;
parm = repr->base;
- cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
+ cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
gcc_assert (cur_parm_size > 0);
if (POINTER_TYPE_P (TREE_TYPE (parm)))
{
by_ref = true;
- agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
+ agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
}
else
{
align = gimple_call_arg (stmt, 1);
if (!tree_fits_uhwi_p (align))
return ptrval;
- aligni = tree_low_cst (align, 1);
+ aligni = tree_to_uhwi (align);
if (aligni <= 1
|| (aligni & (aligni - 1)) != 0)
return ptrval;
misalign = gimple_call_arg (stmt, 2);
if (!tree_fits_uhwi_p (misalign))
return ptrval;
- misaligni = tree_low_cst (misalign, 1);
+ misaligni = tree_to_uhwi (misalign);
if (misaligni >= aligni)
return ptrval;
}
if (!tree_fits_uhwi_p (off1)
|| compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
|| compare_tree_int (len1, TREE_STRING_LENGTH (str1)
- - tree_low_cst (off1, 1)) > 0
+ - tree_to_uhwi (off1)) > 0
|| TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
|| TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
!= TYPE_MODE (char_type_node))
/* Use maximum of difference plus memset length and memcpy length
as the new memcpy length, if it is too big, bail out. */
- src_len = tree_low_cst (diff, 1);
- src_len += tree_low_cst (len2, 1);
- if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
- src_len = tree_low_cst (len1, 1);
+ src_len = tree_to_uhwi (diff);
+ src_len += tree_to_uhwi (len2);
+ if (src_len < (unsigned HOST_WIDE_INT) tree_to_uhwi (len1))
+ src_len = tree_to_uhwi (len1);
if (src_len > 1024)
break;
src_buf = XALLOCAVEC (char, src_len + 1);
if (callee1)
memcpy (src_buf,
- TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
- tree_low_cst (len1, 1));
+ TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
+ tree_to_uhwi (len1));
else
src_buf[0] = tree_to_shwi (src1);
- memset (src_buf + tree_low_cst (diff, 1),
- tree_to_shwi (val2), tree_low_cst (len2, 1));
+ memset (src_buf + tree_to_uhwi (diff),
+ tree_to_shwi (val2), tree_to_uhwi (len2));
src_buf[src_len] = '\0';
/* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
handle embedded '\0's. */
/* CNT1 + CNT2 == B case above. */
if (tree_fits_uhwi_p (def_arg2[0])
&& tree_fits_uhwi_p (def_arg2[1])
- && (unsigned HOST_WIDE_INT) tree_low_cst (def_arg2[0], 1)
- + tree_low_cst (def_arg2[1], 1) == TYPE_PRECISION (rtype))
+ && (unsigned HOST_WIDE_INT) tree_to_uhwi (def_arg2[0])
+ + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
rotcnt = def_arg2[0];
else if (TREE_CODE (def_arg2[0]) != SSA_NAME
|| TREE_CODE (def_arg2[1]) != SSA_NAME)
from the iv test. */
if (tree_fits_uhwi_p (niter))
{
- n_unroll = tree_low_cst (niter, 1);
+ n_unroll = tree_to_uhwi (niter);
n_unroll_found = true;
edge_to_cancel = EDGE_SUCC (exit->src, 0);
if (edge_to_cancel == exit)
period = build_low_bits_mask (type,
(TYPE_PRECISION (type)
- - tree_low_cst (pow2div, 1)));
+ - tree_to_uhwi (pow2div)));
return period;
}
if (!no_overflow)
{
max = double_int::mask (TYPE_PRECISION (type)
- - tree_low_cst (num_ending_zeros (s), 1));
+ - tree_to_uhwi (num_ending_zeros (s)));
mpz_set_double_int (bnd, max, true);
return;
}
bits = num_ending_zeros (s);
bound = build_low_bits_mask (niter_type,
(TYPE_PRECISION (niter_type)
- - tree_low_cst (bits, 1)));
+ - tree_to_uhwi (bits)));
d = fold_binary_to_constant (LSHIFT_EXPR, niter_type,
build_int_cst (niter_type, 1), bits);
{
stride = TYPE_SIZE_UNIT (TREE_TYPE (ref));
if (tree_fits_uhwi_p (stride))
- astride = tree_low_cst (stride, 1);
+ astride = tree_to_uhwi (stride);
else
astride = L1_CACHE_LINE_SIZE;
{
if (!tree_fits_uhwi_p (last.len)
|| integer_zerop (len)
- || (unsigned HOST_WIDE_INT) tree_low_cst (len, 1)
- != (unsigned HOST_WIDE_INT) tree_low_cst (last.len, 1) + 1)
+ || (unsigned HOST_WIDE_INT) tree_to_uhwi (len)
+ != (unsigned HOST_WIDE_INT) tree_to_uhwi (last.len) + 1)
return;
/* Don't adjust the length if it is divisible by 4, it is more efficient
to store the extra '\0' in that case. */
- if ((((unsigned HOST_WIDE_INT) tree_low_cst (len, 1)) & 3) == 0)
+ if ((((unsigned HOST_WIDE_INT) tree_to_uhwi (len)) & 3) == 0)
return;
}
else if (TREE_CODE (len) == SSA_NAME)
/* Handle memcpy (x, "abcd", 5) or
memcpy (x, "abc\0uvw", 7). */
if (!tree_fits_uhwi_p (len)
- || (unsigned HOST_WIDE_INT) tree_low_cst (len, 1)
+ || (unsigned HOST_WIDE_INT) tree_to_uhwi (len)
<= (unsigned HOST_WIDE_INT) ~idx)
return;
}
{
tree off = gimple_assign_rhs2 (stmt);
if (tree_fits_uhwi_p (off)
- && (unsigned HOST_WIDE_INT) tree_low_cst (off, 1)
+ && (unsigned HOST_WIDE_INT) tree_to_uhwi (off)
<= (unsigned HOST_WIDE_INT) ~idx)
ssa_ver_to_stridx[SSA_NAME_VERSION (lhs)]
- = ~(~idx - (int) tree_low_cst (off, 1));
+ = ~(~idx - (int) tree_to_uhwi (off));
return;
}
&& TREE_CODE (rhs1) == SSA_NAME
&& tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
{
- ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
+ ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
lhs = rhs1;
continue;
}
&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
&& tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
{
- ret += tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
+ ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
continue;
}
&& TREE_CODE (rhs1) == SSA_NAME
&& tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
{
- val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
+ val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
lhs = rhs1;
continue;
}
&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
&& tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
{
- val -= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
+ val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
continue;
}
gpr_size = si->offsets[SSA_NAME_VERSION (use)]
+ tree_to_shwi (TREE_OPERAND (rhs, 1))
- + tree_low_cst (access_size, 1);
+ + tree_to_uhwi (access_size);
if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
else if (gpr_size > cfun->va_list_gpr_size)
return false;
}
- if ((unsigned HOST_WIDE_INT) tree_low_cst (info->range_size, 1)
+ if ((unsigned HOST_WIDE_INT) tree_to_uhwi (info->range_size)
> ((unsigned) info->count * SWITCH_CONVERSION_BRANCH_RATIO))
{
info->reason = "the maximum range-branch ratio exceeded";
info->target_inbound_names = info->default_values + info->phi_count;
info->target_outbound_names = info->target_inbound_names + info->phi_count;
for (i = 0; i < info->phi_count; i++)
- vec_alloc (info->constructors[i], tree_low_cst (info->range_size, 1) + 1);
+ vec_alloc (info->constructors[i], tree_to_uhwi (info->range_size) + 1);
}
/* Free the arrays created by create_temp_arrays(). The vectors that are
static tree
build_replicated_const (tree type, tree inner_type, HOST_WIDE_INT value)
{
- int width = tree_low_cst (TYPE_SIZE (inner_type), 1);
+ int width = tree_to_uhwi (TYPE_SIZE (inner_type));
int n = HOST_BITS_PER_WIDE_INT / width;
unsigned HOST_WIDE_INT low, high, mask;
tree ret;
tree part_width = TYPE_SIZE (inner_type);
tree index = bitsize_int (0);
int nunits = TYPE_VECTOR_SUBPARTS (type);
- int delta = tree_low_cst (part_width, 1)
- / tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
+ int delta = tree_to_uhwi (part_width)
+ / tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
int i;
location_t loc = gimple_location (gsi_stmt (*gsi));
{
tree result, compute_type;
enum machine_mode mode;
- int n_words = tree_low_cst (TYPE_SIZE_UNIT (type), 1) / UNITS_PER_WORD;
+ int n_words = tree_to_uhwi (TYPE_SIZE_UNIT (type)) / UNITS_PER_WORD;
location_t loc = gimple_location (gsi_stmt (*gsi));
/* We have three strategies. If the type is already correct, just do
else
{
/* Use a single scalar operation with a mode no wider than word_mode. */
- mode = mode_for_size (tree_low_cst (TYPE_SIZE (type), 1), MODE_INT, 0);
+ mode = mode_for_size (tree_to_uhwi (TYPE_SIZE (type)), MODE_INT, 0);
compute_type = lang_hooks.types.type_for_mode (mode, 1);
result = f (gsi, compute_type, a, b, NULL_TREE, NULL_TREE, code);
warning_at (loc, OPT_Wvector_operation_performance,
tree type, tree a, tree b, enum tree_code code)
{
int parts_per_word = UNITS_PER_WORD
- / tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (type)), 1);
+ / tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type)));
if (INTEGRAL_TYPE_P (TREE_TYPE (type))
&& parts_per_word >= 4
if (!tree_fits_uhwi_p (cst2))
return NULL_TREE;
- d2 = tree_low_cst (cst2, 1) & mask;
+ d2 = tree_to_uhwi (cst2) & mask;
if (d2 == 0)
return NULL_TREE;
this_pre_shift = floor_log2 (d2 & -d2);
}
else
{
- int vec_size_in_bits = tree_low_cst (TYPE_SIZE (vectype), 1);
+ int vec_size_in_bits = tree_to_uhwi (TYPE_SIZE (vectype));
tree bitsize =
TYPE_SIZE (TREE_TYPE (gimple_assign_lhs (orig_stmt)));
- int element_bitsize = tree_low_cst (bitsize, 1);
+ int element_bitsize = tree_to_uhwi (bitsize);
int nelements = vec_size_in_bits / element_bitsize;
optab = optab_for_tree_code (code, vectype, optab_default);
enum tree_code shift_code = ERROR_MARK;
bool have_whole_vector_shift = true;
int bit_offset;
- int element_bitsize = tree_low_cst (bitsize, 1);
- int vec_size_in_bits = tree_low_cst (TYPE_SIZE (vectype), 1);
+ int element_bitsize = tree_to_uhwi (bitsize);
+ int vec_size_in_bits = tree_to_uhwi (TYPE_SIZE (vectype));
tree vec_temp;
if (optab_handler (vec_shr_optab, mode) != CODE_FOR_nothing)
dump_printf_loc (MSG_NOTE, vect_location,
"Reduce using scalar code.\n");
- vec_size_in_bits = tree_low_cst (TYPE_SIZE (vectype), 1);
+ vec_size_in_bits = tree_to_uhwi (TYPE_SIZE (vectype));
FOR_EACH_VEC_ELT (new_phis, i, new_phi)
{
if (gimple_code (new_phi) == GIMPLE_PHI)
if (TREE_CODE (def) == INTEGER_CST)
{
if (!tree_fits_uhwi_p (def)
- || (unsigned HOST_WIDE_INT) tree_low_cst (def, 1)
+ || (unsigned HOST_WIDE_INT) tree_to_uhwi (def)
>= GET_MODE_PRECISION (TYPE_MODE (type))
|| integer_zerop (def))
return NULL;
def2 = build_int_cst (stype,
GET_MODE_PRECISION (TYPE_MODE (type))
- - tree_low_cst (def, 1));
+ - tree_to_uhwi (def));
}
else
{
if (TREE_CODE (name2) == SSA_NAME
&& tree_fits_uhwi_p (cst2)
&& INTEGRAL_TYPE_P (TREE_TYPE (name2))
- && IN_RANGE (tree_low_cst (cst2, 1), 1, prec - 1)
+ && IN_RANGE (tree_to_uhwi (cst2), 1, prec - 1)
&& prec <= HOST_BITS_PER_DOUBLE_INT
&& prec == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (val)))
&& live_on_edge (e, name2)
&& !has_single_use (name2))
{
- mask = double_int::mask (tree_low_cst (cst2, 1));
+ mask = double_int::mask (tree_to_uhwi (cst2));
val2 = fold_binary (LSHIFT_EXPR, TREE_TYPE (val), val, cst2);
}
}
case LSHIFT_EXPR:
ret1 = tree_ctz (TREE_OPERAND (expr, 0));
if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
- && ((unsigned HOST_WIDE_INT) tree_low_cst (TREE_OPERAND (expr, 1), 1)
+ && ((unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (expr, 1))
< (unsigned HOST_WIDE_INT) prec))
{
- ret2 = tree_low_cst (TREE_OPERAND (expr, 1), 1);
+ ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
return MIN (ret1 + ret2, prec);
}
return ret1;
case RSHIFT_EXPR:
if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
- && ((unsigned HOST_WIDE_INT) tree_low_cst (TREE_OPERAND (expr, 1), 1)
+ && ((unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (expr, 1))
< (unsigned HOST_WIDE_INT) prec))
{
ret1 = tree_ctz (TREE_OPERAND (expr, 0));
- ret2 = tree_low_cst (TREE_OPERAND (expr, 1), 1);
+ ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
if (ret1 > ret2)
return ret1 - ret2;
}
size_tree = TYPE_ARRAY_MAX_SIZE (type);
if (size_tree && tree_fits_uhwi_p (size_tree))
- size = tree_low_cst (size_tree, 1);
+ size = tree_to_uhwi (size_tree);
}
/* If we still haven't been able to get a size, see if the language
size_tree = lang_hooks.types.max_size (type);
if (size_tree && tree_fits_uhwi_p (size_tree))
- size = tree_low_cst (size_tree, 1);
+ size = tree_to_uhwi (size_tree);
}
return size;
if (TYPE_SIZE (type) == NULL_TREE)
return NULL_TREE;
- HOST_WIDE_INT type_size = tree_low_cst (TYPE_SIZE (type), 1);
+ HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
switch (type_size)
{
case 8:
ret = itype;
if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
- ret = type_hash_canon (tree_low_cst (TYPE_MAX_VALUE (itype), 1), itype);
+ ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
if (precision <= MAX_INT_CACHED_PREC)
nonstandard_integer_type_cache[precision + unsignedp] = ret;
&& tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
{
unsigned HOST_WIDE_INT innerprec
- = tree_low_cst (DECL_SIZE (TREE_OPERAND (op, 1)), 1);
+ = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
|| TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
continue;
pos = int_bit_position (fld);
- size = tree_low_cst (DECL_SIZE (fld), 1);
+ size = tree_to_uhwi (DECL_SIZE (fld));
if (pos <= offset && (pos + size) > offset)
break;
}
case fetch_op:
last_arg = gimple_call_arg (stmt, num - 1);
if (!tree_fits_uhwi_p (last_arg)
- || (unsigned HOST_WIDE_INT) tree_low_cst (last_arg, 1)
+ || (unsigned HOST_WIDE_INT) tree_to_uhwi (last_arg)
> MEMMODEL_SEQ_CST)
return;
gimple_call_set_fndecl (stmt, decl);
for (j = 0; j < 6; j++)
args[j] = gimple_call_arg (stmt, j);
if (!tree_fits_uhwi_p (args[4])
- || (unsigned HOST_WIDE_INT) tree_low_cst (args[4], 1)
+ || (unsigned HOST_WIDE_INT) tree_to_uhwi (args[4])
> MEMMODEL_SEQ_CST)
return;
if (!tree_fits_uhwi_p (args[5])
- || (unsigned HOST_WIDE_INT) tree_low_cst (args[5], 1)
+ || (unsigned HOST_WIDE_INT) tree_to_uhwi (args[5])
> MEMMODEL_SEQ_CST)
return;
update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
get_ubsan_type_info_for_type (tree type)
{
gcc_assert (TYPE_SIZE (type) && tree_fits_uhwi_p (TYPE_SIZE (type)));
- int prec = exact_log2 (tree_low_cst (TYPE_SIZE (type), 1));
+ int prec = exact_log2 (tree_to_uhwi (TYPE_SIZE (type)));
gcc_assert (prec != -1);
return (prec << 1) | !TYPE_UNSIGNED (type);
}
{
unsigned HOST_WIDE_INT size, rounded;
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
rounded = size;
if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
&& !initializer_zerop (DECL_INITIAL (decl)))
/* Output the actual data. */
output_constant (DECL_INITIAL (decl),
- tree_low_cst (DECL_SIZE_UNIT (decl), 1),
+ tree_to_uhwi (DECL_SIZE_UNIT (decl)),
get_variable_align (decl));
else
/* Leave space for it. */
- assemble_zeros (tree_low_cst (DECL_SIZE_UNIT (decl), 1));
+ assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
}
}
if (asan_protected)
{
unsigned HOST_WIDE_INT int size
- = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ = tree_to_uhwi (DECL_SIZE_UNIT (decl));
assemble_zeros (asan_red_zone_size (size));
}
}
else if (TREE_CODE (target) == ARRAY_REF
|| TREE_CODE (target) == ARRAY_RANGE_REF)
{
- offset += (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (target)), 1)
+ offset += (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (target)))
* tree_to_shwi (TREE_OPERAND (target, 1)));
target = TREE_OPERAND (target, 0);
}
double_int idx = tree_to_double_int (local->index)
- tree_to_double_int (local->min_index);
idx = idx.sext (prec);
- fieldpos = (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (local->val)), 1)
+ fieldpos = (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (local->val)))
* idx.low);
}
else if (local->field != NULL_TREE)
gcc_assert (!fieldsize || !DECL_CHAIN (local->field));
}
else
- fieldsize = tree_low_cst (DECL_SIZE_UNIT (local->field), 1);
+ fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
}
else
fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
/* Bit size of this element. */
HOST_WIDE_INT ebitsize
= (local->field
- ? tree_low_cst (DECL_SIZE (local->field), 1)
- : tree_low_cst (TYPE_SIZE (TREE_TYPE (local->type)), 1));
+ ? tree_to_uhwi (DECL_SIZE (local->field))
+ : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
/* Relative index of this element if this is an array component. */
HOST_WIDE_INT relative_index
{
decl = SYMBOL_REF_DECL (symbol);
alignment = get_variable_align (decl);
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
if ((flag_sanitize & SANITIZE_ADDRESS)
&& asan_protect_global (decl))
{
HOST_WIDE_INT size;
decl = SYMBOL_REF_DECL (symbol);
assemble_variable_contents (decl, XSTR (symbol, 0), false);
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
offset += size;
if ((flag_sanitize & SANITIZE_ADDRESS)
&& asan_protect_global (decl))