#include "tm.h"
#include "rtl.h"
#include "tree.h"
+#include "stor-layout.h"
+#include "stringpool.h"
+#include "gcc-symtab.h"
+#include "varasm.h"
#include "flags.h"
#include "function.h"
#include "expr.h"
#include "target.h"
#include "common/common-target.h"
#include "targhooks.h"
-#include "tree-mudflap.h"
#include "cgraph.h"
#include "pointer-set.h"
#include "asan.h"
In particular, a.out format supports a maximum alignment of 4. */
if (align > MAX_OFILE_ALIGNMENT)
{
- warning (0, "alignment of %q+D is greater than maximum object "
- "file alignment. Using %d", decl,
- MAX_OFILE_ALIGNMENT/BITS_PER_UNIT);
+ error ("alignment of %q+D is greater than maximum object "
+ "file alignment %d", decl,
+ MAX_OFILE_ALIGNMENT/BITS_PER_UNIT);
align = MAX_OFILE_ALIGNMENT;
}
constant size. */
if (DECL_SIZE_UNIT (decl) == NULL)
return NULL;
- if (!host_integerp (DECL_SIZE_UNIT (decl), 1))
+ if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
return NULL;
/* Find out which section should contain DECL. We cannot put it into
&& SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0)))
change_symbol_block (XEXP (x, 0), get_block_for_decl (decl));
- /* Make this function static known to the mudflap runtime. */
- if (flag_mudflap && TREE_CODE (decl) == VAR_DECL)
- mudflap_enqueue_decl (decl);
-
return;
}
If the name is changed, the macro ASM_OUTPUT_LABELREF
will have to know how to strip this information. */
targetm.encode_section_info (decl, DECL_RTL (decl), true);
-
- /* Make this function static known to the mudflap runtime. */
- if (flag_mudflap && TREE_CODE (decl) == VAR_DECL)
- mudflap_enqueue_decl (decl);
}
/* Like make_decl_rtl, but inhibit creation of new alias sets when
rtx
make_decl_rtl_for_debug (tree decl)
{
- unsigned int save_aliasing_flag, save_mudflap_flag;
+ unsigned int save_aliasing_flag;
rtx rtl;
if (DECL_RTL_SET_P (decl))
we do not want to create alias sets that will throw the alias
numbers off in the comparison dumps. So... clearing
flag_strict_aliasing will keep new_alias_set() from creating a
- new set. It is undesirable to register decl with mudflap
- in this case as well. */
+ new set. */
save_aliasing_flag = flag_strict_aliasing;
flag_strict_aliasing = 0;
- save_mudflap_flag = flag_mudflap;
- flag_mudflap = 0;
rtl = DECL_RTL (decl);
/* Reset DECL_RTL back, as various parts of the compiler expects
SET_DECL_RTL (decl, NULL);
flag_strict_aliasing = save_aliasing_flag;
- flag_mudflap = save_mudflap_flag;
-
return rtl;
}
\f
align the hot section and write out the hot section label.
But if the current function is a thunk, we do not have a CFG. */
if (!cfun->is_thunk
- && BB_PARTITION (ENTRY_BLOCK_PTR->next_bb) == BB_COLD_PARTITION)
+ && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION)
{
switch_to_section (text_section);
assemble_align (DECL_ALIGN (decl));
{
unsigned HOST_WIDE_INT size, rounded;
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
rounded = size;
if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
if (!sect->noswitch.callback (decl, name, size, rounded)
&& (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded)
- warning (0, "requested alignment for %q+D is greater than "
- "implemented alignment of %wu", decl, rounded);
+ error ("requested alignment for %q+D is greater than "
+ "implemented alignment of %wu", decl, rounded);
}
/* A subroutine of assemble_variable. Output the label and contents of
&& !initializer_zerop (DECL_INITIAL (decl)))
/* Output the actual data. */
output_constant (DECL_INITIAL (decl),
- tree_low_cst (DECL_SIZE_UNIT (decl), 1),
+ tree_to_uhwi (DECL_SIZE_UNIT (decl)),
get_variable_align (decl));
else
/* Leave space for it. */
- assemble_zeros (tree_low_cst (DECL_SIZE_UNIT (decl), 1));
+ assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
}
}
if (asan_protected)
{
unsigned HOST_WIDE_INT int size
- = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ = tree_to_uhwi (DECL_SIZE_UNIT (decl));
assemble_zeros (asan_red_zone_size (size));
}
}
definition. */
struct cgraph_node *node = cgraph_get_create_node (decl);
if (!DECL_EXTERNAL (decl)
- && !node->symbol.definition)
+ && !node->definition)
cgraph_mark_force_output_node (node);
}
else if (TREE_CODE (decl) == VAR_DECL)
struct varpool_node *node = varpool_node_for_decl (decl);
/* C++ frontend use mark_decl_references to force COMDAT variables
to be output that might appear dead otherwise. */
- node->symbol.force_output = true;
+ node->force_output = true;
}
/* else do nothing - we can get various sorts of CST nodes here,
which do not need to be marked. */
while (1)
{
if (TREE_CODE (target) == COMPONENT_REF
- && host_integerp (byte_position (TREE_OPERAND (target, 1)), 0))
+ && tree_fits_shwi_p (byte_position (TREE_OPERAND (target, 1))))
{
offset += int_byte_position (TREE_OPERAND (target, 1));
target = TREE_OPERAND (target, 0);
else if (TREE_CODE (target) == ARRAY_REF
|| TREE_CODE (target) == ARRAY_RANGE_REF)
{
- offset += (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (target)), 1)
- * tree_low_cst (TREE_OPERAND (target, 1), 0));
+ offset += (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (target)))
+ * tree_to_shwi (TREE_OPERAND (target, 1)));
target = TREE_OPERAND (target, 0);
}
else if (TREE_CODE (target) == MEM_REF
&& TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
{
- offset += mem_ref_offset (target).low;
+ offset += mem_ref_offset (target).to_short_addr ();
target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
}
else if (TREE_CODE (target) == INDIRECT_REF
switch (code)
{
case INTEGER_CST:
- p = (char *) &TREE_INT_CST (exp);
- len = sizeof TREE_INT_CST (exp);
+ p = (char *) &TREE_INT_CST_ELT (exp, 0);
+ len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT);
break;
case REAL_CST:
desc = ggc_alloc_constant_descriptor_tree ();
desc->value = copy_constant (exp);
- /* Propagate marked-ness to copied constant. */
- if (flag_mudflap && mf_marked_p (exp))
- mf_mark (desc->value);
-
/* Create a string containing the label name, in LABEL. */
labelno = const_labelno++;
ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno);
assemble_zeros (asan_red_zone_size (size));
}
}
- if (flag_mudflap)
- mudflap_enqueue_constant (exp);
}
/* Look up EXP in the table of constant descriptors. Return the rtl
enum rtx_code code;
hashval_t h, *hp;
rtx x;
+ int i;
x = *xp;
code = GET_CODE (x);
{
case CONST_INT:
hwi = INTVAL (x);
+
fold_hwi:
{
int shift = sizeof (hashval_t) * CHAR_BIT;
const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t);
- int i;
-
+
h ^= (hashval_t) hwi;
for (i = 1; i < n; ++i)
{
}
break;
+ case CONST_WIDE_INT:
+ hwi = GET_MODE_PRECISION (mode);
+ {
+ for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
+ hwi ^= CONST_WIDE_INT_ELT (x, i);
+ goto fold_hwi;
+ }
+
case CONST_DOUBLE:
- if (mode == VOIDmode)
+ if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode)
{
hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x);
goto fold_hwi;
exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0));
/* Likewise for constant ints. */
else if (TREE_CODE (exp) == INTEGER_CST)
- exp = build_int_cst_wide (saved_type, TREE_INT_CST_LOW (exp),
- TREE_INT_CST_HIGH (exp));
+ exp = wide_int_to_tree (saved_type, exp);
}
if (TREE_CODE (exp) == FDESC_EXPR)
{
#ifdef ASM_OUTPUT_FDESC
- HOST_WIDE_INT part = tree_low_cst (TREE_OPERAND (exp, 1), 0);
+ HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1));
tree decl = TREE_OPERAND (exp, 0);
ASM_OUTPUT_FDESC (asm_out_file, decl, part);
#else
case REFERENCE_TYPE:
case OFFSET_TYPE:
case FIXED_POINT_TYPE:
+ case POINTER_BOUNDS_TYPE:
+ case NULLPTR_TYPE:
if (! assemble_integer (expand_expr (exp, NULL_RTX, VOIDmode,
EXPAND_INITIALIZER),
MIN (size, thissize), align, 0))
tree max_index;
unsigned HOST_WIDE_INT cnt;
tree index, value, tmp;
- double_int i;
+ offset_int i;
/* This code used to attempt to handle string constants that are not
arrays of single-bytes, but nothing else does, so there's no point in
/* Compute the total number of array elements. */
tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val)));
- i = tree_to_double_int (max_index) - tree_to_double_int (tmp);
- i += double_int_one;
+ i = wi::to_offset (max_index) - wi::to_offset (tmp) + 1;
/* Multiply by the array element unit size to find number of bytes. */
- i *= tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
+ i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
- gcc_assert (i.fits_uhwi ());
- return i.low;
+ gcc_assert (wi::fits_uhwi_p (i));
+ return i.to_uhwi ();
}
/* Other datastructures + helpers for output_constructor. */
= int_size_in_bytes (TREE_TYPE (local->type));
HOST_WIDE_INT lo_index
- = tree_low_cst (TREE_OPERAND (local->index, 0), 0);
+ = tree_to_shwi (TREE_OPERAND (local->index, 0));
HOST_WIDE_INT hi_index
- = tree_low_cst (TREE_OPERAND (local->index, 1), 0);
+ = tree_to_shwi (TREE_OPERAND (local->index, 1));
HOST_WIDE_INT index;
unsigned int align2
sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
but we are using an unsigned sizetype. */
unsigned prec = TYPE_PRECISION (sizetype);
- double_int idx = tree_to_double_int (local->index)
- - tree_to_double_int (local->min_index);
- idx = idx.sext (prec);
- fieldpos = (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (local->val)), 1)
- * idx.low);
+ offset_int idx = wi::sext (wi::to_offset (local->index)
+ - wi::to_offset (local->min_index), prec);
+ fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val))))
+ .to_shwi ();
}
else if (local->field != NULL_TREE)
fieldpos = int_byte_position (local->field);
gcc_assert (!fieldsize || !DECL_CHAIN (local->field));
}
else
- fieldsize = tree_low_cst (DECL_SIZE_UNIT (local->field), 1);
+ fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
}
else
fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
/* Bit size of this element. */
HOST_WIDE_INT ebitsize
= (local->field
- ? tree_low_cst (DECL_SIZE (local->field), 1)
- : tree_low_cst (TYPE_SIZE (TREE_TYPE (local->type)), 1));
+ ? tree_to_uhwi (DECL_SIZE (local->field))
+ : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
/* Relative index of this element if this is an array component. */
HOST_WIDE_INT relative_index
= (!local->field
? (local->index
- ? (tree_low_cst (local->index, 0)
- - tree_low_cst (local->min_index, 0))
+ ? (tree_to_shwi (local->index)
+ - tree_to_shwi (local->min_index))
: local->last_relative_index + 1)
: 0);
the word boundary in the INTEGER_CST. We can
only select bits from the LOW or HIGH part
not from both. */
- if (shift < HOST_BITS_PER_WIDE_INT
- && shift + this_time > HOST_BITS_PER_WIDE_INT)
- {
- this_time = shift + this_time - HOST_BITS_PER_WIDE_INT;
- shift = HOST_BITS_PER_WIDE_INT;
- }
+ if ((shift / HOST_BITS_PER_WIDE_INT)
+ != ((shift + this_time) / HOST_BITS_PER_WIDE_INT))
+ this_time = (shift + this_time) & (HOST_BITS_PER_WIDE_INT - 1);
/* Now get the bits from the appropriate constant word. */
- if (shift < HOST_BITS_PER_WIDE_INT)
- value = TREE_INT_CST_LOW (local->val);
- else
- {
- gcc_assert (shift < HOST_BITS_PER_DOUBLE_INT);
- value = TREE_INT_CST_HIGH (local->val);
- shift -= HOST_BITS_PER_WIDE_INT;
- }
+ value = TREE_INT_CST_ELT (local->val, shift / HOST_BITS_PER_WIDE_INT);
+ shift = shift & (HOST_BITS_PER_WIDE_INT - 1);
/* Get the result. This works only when:
1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
the word boundary in the INTEGER_CST. We can
only select bits from the LOW or HIGH part
not from both. */
- if (shift < HOST_BITS_PER_WIDE_INT
- && shift + this_time > HOST_BITS_PER_WIDE_INT)
+ if ((shift / HOST_BITS_PER_WIDE_INT)
+ != ((shift + this_time) / HOST_BITS_PER_WIDE_INT))
this_time = (HOST_BITS_PER_WIDE_INT - shift);
/* Now get the bits from the appropriate constant word. */
- if (shift < HOST_BITS_PER_WIDE_INT)
- value = TREE_INT_CST_LOW (local->val);
- else
- {
- gcc_assert (shift < HOST_BITS_PER_DOUBLE_INT);
- value = TREE_INT_CST_HIGH (local->val);
- shift -= HOST_BITS_PER_WIDE_INT;
- }
+ value = TREE_INT_CST_ELT (local->val, shift / HOST_BITS_PER_WIDE_INT);
+ shift = shift & (HOST_BITS_PER_WIDE_INT - 1);
/* Get the result. This works only when:
1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
static tree
find_decl (tree target)
{
- symtab_node node = symtab_node_for_asm (target);
+ symtab_node *node = symtab_node_for_asm (target);
if (node)
- return node->symbol.decl;
+ return node->decl;
return NULL_TREE;
}
/* Allow aliases to aliases. */
if (TREE_CODE (decl) == FUNCTION_DECL)
- cgraph_get_create_node (decl)->symbol.alias = true;
+ cgraph_get_create_node (decl)->alias = true;
else
- varpool_node_for_decl (decl)->symbol.alias = true;
+ varpool_node_for_decl (decl)->alias = true;
/* If the target has already been emitted, we don't have to queue the
alias. This saves a tad of memory. */
TM_GETTMCLONE. If neither of these are true, we didn't generate
a clone, and we didn't call it indirectly... no sense keeping it
in the clone table. */
- if (!dst_n || !dst_n->symbol.definition)
+ if (!dst_n || !dst_n->definition)
continue;
/* This covers the case where we have optimized the original
function away, and only access the transactional clone. */
- if (!src_n || !src_n->symbol.definition)
+ if (!src_n || !src_n->definition)
continue;
if (!switched)
return SECCAT_TEXT;
else if (TREE_CODE (decl) == STRING_CST)
{
- if (flag_mudflap
- || ((flag_sanitize & SANITIZE_ADDRESS)
- && asan_protect_global (CONST_CAST_TREE (decl))))
+ if ((flag_sanitize & SANITIZE_ADDRESS)
+ && asan_protect_global (CONST_CAST_TREE (decl)))
/* or !flag_merge_constants */
return SECCAT_RODATA;
else
}
else if (reloc & targetm.asm_out.reloc_rw_mask ())
ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
- else if (reloc || flag_merge_constants < 2 || flag_mudflap
+ else if (reloc || flag_merge_constants < 2
|| ((flag_sanitize & SANITIZE_ADDRESS)
&& asan_protect_global (CONST_CAST_TREE (decl))))
/* C and C++ don't allow different variables to share the same
&& (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
{
struct varpool_node *vnode = varpool_get_node (exp);
- if (vnode && resolution_local_p (vnode->symbol.resolution))
+ if (vnode && resolution_local_p (vnode->resolution))
resolved_locally = true;
if (vnode
- && resolution_to_local_definition_p (vnode->symbol.resolution))
+ && resolution_to_local_definition_p (vnode->resolution))
resolved_to_local_def = true;
}
else if (TREE_CODE (exp) == FUNCTION_DECL && TREE_PUBLIC (exp))
{
struct cgraph_node *node = cgraph_get_node (exp);
if (node
- && resolution_local_p (node->symbol.resolution))
+ && resolution_local_p (node->resolution))
resolved_locally = true;
if (node
- && resolution_to_local_definition_p (node->symbol.resolution))
+ && resolution_to_local_definition_p (node->resolution))
resolved_to_local_def = true;
}
{
struct varpool_node *vnode = varpool_get_node (decl);
if (vnode
- && vnode->symbol.resolution != LDPR_UNKNOWN)
- return resolution_to_local_definition_p (vnode->symbol.resolution);
+ && vnode->resolution != LDPR_UNKNOWN)
+ return resolution_to_local_definition_p (vnode->resolution);
}
else if (TREE_CODE (decl) == FUNCTION_DECL)
{
struct cgraph_node *node = cgraph_get_node (decl);
if (node
- && node->symbol.resolution != LDPR_UNKNOWN)
- return resolution_to_local_definition_p (node->symbol.resolution);
+ && node->resolution != LDPR_UNKNOWN)
+ return resolution_to_local_definition_p (node->resolution);
}
/* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks
binds locally but still can be overwritten), DECL_COMMON (can be merged
{
decl = SYMBOL_REF_DECL (symbol);
alignment = get_variable_align (decl);
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
if ((flag_sanitize & SANITIZE_ADDRESS)
&& asan_protect_global (decl))
{
HOST_WIDE_INT size;
decl = SYMBOL_REF_DECL (symbol);
assemble_variable_contents (decl, XSTR (symbol, 0), false);
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
offset += size;
if ((flag_sanitize & SANITIZE_ADDRESS)
&& asan_protect_global (decl))