/* Output variables, constants and external declarations, for GNU compiler.
- Copyright (C) 1987-2016 Free Software Foundation, Inc.
+ Copyright (C) 1987-2019 Free Software Foundation, Inc.
This file is part of GCC.
#include "langhooks.h"
#include "debug.h"
#include "common/common-target.h"
+#include "stringpool.h"
+#include "attribs.h"
#include "asan.h"
#include "rtl-iter.h"
+#include "file-prefix-map.h" /* remap_debug_filename() */
#ifdef XCOFF_DEBUGGING_INFO
#include "xcoffout.h" /* Needed for external data declarations. */
static void output_constant_def_contents (rtx);
static void output_addressed_constants (tree);
static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT,
- unsigned int, bool);
+ unsigned int, bool, bool);
static void globalize_decl (tree);
static bool decl_readonly_section_1 (enum section_category);
#ifdef BSS_SECTION_ASM_OP
{
if (sect->common.flags & SECTION_NAMED)
return htab_hash_string (sect->named.name);
- return sect->common.flags;
+ return sect->common.flags & ~SECTION_DECLARED;
}
/* Helper routines for maintaining object_block_htab. */
else
{
sect = *slot;
+ /* It is fine if one of the sections has SECTION_NOTYPE as long as
+ the other has none of the contrary flags (see the logic at the end
+ of default_section_type_flags, below). */
+ if (((sect->common.flags ^ flags) & SECTION_NOTYPE)
+ && !((sect->common.flags | flags)
+ & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE
+ | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0))))
+ {
+ sect->common.flags |= SECTION_NOTYPE;
+ flags |= SECTION_NOTYPE;
+ }
if ((sect->common.flags & ~SECTION_DECLARED) != flags
&& ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0)
{
&& decl != sect->named.decl)
{
if (decl != NULL && DECL_P (decl))
- error ("%+D causes a section type conflict with %D",
+ error ("%+qD causes a section type conflict with %qD",
decl, sect->named.decl);
else
- error ("section type conflict with %D", sect->named.decl);
+ error ("section type conflict with %qD", sect->named.decl);
inform (DECL_SOURCE_LOCATION (sect->named.decl),
"%qD was declared here", sect->named.decl);
}
else if (decl != NULL && DECL_P (decl))
- error ("%+D causes a section type conflict", decl);
+ error ("%+qD causes a section type conflict", decl);
else
error ("section type conflict");
/* Make sure we don't error about one section multiple times. */
/* Return the object_block structure for section SECT. Create a new
structure if we haven't created one already. Return null if SECT
- itself is null. */
+ itself is null. Return also null for mergeable sections since
+ section anchors can't be used in mergeable sections anyway,
+ because the linker might move objects around, and using the
+ object blocks infrastructure in that case is both a waste and a
+ maintenance burden. */
static struct object_block *
get_block_for_section (section *sect)
if (sect == NULL)
return NULL;
+ if (sect->common.flags & SECTION_MERGE)
+ return NULL;
+
object_block **slot
= object_block_htab->find_slot_with_hash (sect, hash_section (sect),
INSERT);
return sect == function_section_1 (current_function_decl, true);
}
+/* Switch to the other function partition (if inside of hot section
+ into cold section, otherwise into the hot section). */
+
+void
+switch_to_other_text_partition (void)
+{
+ in_cold_section_p = !in_cold_section_p;
+ switch_to_section (current_function_section ());
+}
+
/* Return the read-only data section associated with function DECL. */
section *
&& TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
&& align <= 256
&& (len = int_size_in_bytes (TREE_TYPE (decl))) > 0
- && TREE_STRING_LENGTH (decl) >= len)
+ && TREE_STRING_LENGTH (decl) == len)
{
- machine_mode mode;
+ scalar_int_mode mode;
unsigned int modesize;
const char *str;
HOST_WIDE_INT i;
const char *prefix = function_mergeable_rodata_prefix ();
char *name = (char *) alloca (strlen (prefix) + 30);
- mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (decl)));
+ mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl)));
modesize = GET_MODE_BITSIZE (mode);
if (modesize >= 8 && modesize <= 256
&& (modesize & (modesize - 1)) == 0)
if (align < modesize)
align = modesize;
+ if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8)
+ return readonly_data_section;
+
str = TREE_STRING_POINTER (decl);
unit = GET_MODE_SIZE (mode);
if (j == unit)
break;
}
- if (i == len - unit)
+ if (i == len - unit || (unit == 1 && i == len))
{
sprintf (name, "%s.str%d.%d", prefix,
modesize / 8, (int) (align / 8));
unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
unsigned int flags ATTRIBUTE_UNUSED)
{
- unsigned int modesize = GET_MODE_BITSIZE (mode);
-
if (HAVE_GAS_SHF_MERGE && flag_merge_constants
&& mode != VOIDmode
&& mode != BLKmode
- && modesize <= align
+ && known_le (GET_MODE_BITSIZE (mode), align)
&& align >= 8
&& align <= 256
- && (align & (align - 1)) == 0)
+ && (align & (align - 1)) == 0
+ && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8))
{
const char *prefix = function_mergeable_rodata_prefix ();
char *name = (char *) alloca (strlen (prefix) + 30);
/* Return true if DECL's initializer is suitable for a BSS section. */
bool
-bss_initializer_p (const_tree decl)
+bss_initializer_p (const_tree decl, bool named)
{
- return (DECL_INITIAL (decl) == NULL
- /* In LTO we have no errors in program; error_mark_node is used
- to mark offlined constructors. */
- || (DECL_INITIAL (decl) == error_mark_node
- && !in_lto_p)
- || (flag_zero_initialized_in_bss
- /* Leave constant zeroes in .rodata so they
- can be shared. */
- && !TREE_READONLY (decl)
- && initializer_zerop (DECL_INITIAL (decl))));
+ /* Do not put non-common constants into the .bss section, they belong in
+ a readonly section, except when NAMED is true. */
+ return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named)
+ && (DECL_INITIAL (decl) == NULL
+ /* In LTO we have no errors in program; error_mark_node is used
+ to mark offlined constructors. */
+ || (DECL_INITIAL (decl) == error_mark_node
+ && !in_lto_p)
+ || (flag_zero_initialized_in_bss
+ && initializer_zerop (DECL_INITIAL (decl)))));
}
/* Compute the alignment of variable specified by DECL.
&& (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
{
unsigned int const_align
- = CONSTANT_ALIGNMENT (DECL_INITIAL (decl), align);
+ = targetm.constant_alignment (DECL_INITIAL (decl), align);
/* Don't increase alignment too much for TLS variables - TLS
space is too precious. */
if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
to mark offlined constructors. */
&& (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
{
- unsigned int const_align = CONSTANT_ALIGNMENT (DECL_INITIAL (decl),
- align);
+ unsigned int const_align
+ = targetm.constant_alignment (DECL_INITIAL (decl), align);
/* Don't increase alignment too much for TLS variables - TLS space
is too precious. */
if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
{
section *sect = get_named_section (decl, NULL, reloc);
- if ((sect->common.flags & SECTION_BSS) && !bss_initializer_p (decl))
+ if ((sect->common.flags & SECTION_BSS)
+ && !bss_initializer_p (decl, true))
{
error_at (DECL_SOURCE_LOCATION (decl),
"only zero initializers are allowed in section %qs",
if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
return false;
- /* Detect decls created by dw2_force_const_mem. Such decls are
- special because DECL_INITIAL doesn't specify the decl's true value.
- dw2_output_indirect_constants will instead call assemble_variable
- with dont_output_data set to 1 and then print the contents itself. */
+ /* DECL_INITIAL (decl) set to decl is a hack used for some decls that
+ are never used from code directly and we never want object block handling
+ for those. */
if (DECL_INITIAL (decl) == decl)
return false;
}
id = DECL_ASSEMBLER_NAME (decl);
- if (TREE_CODE (decl) == FUNCTION_DECL
- && cgraph_node::get (decl)
- && cgraph_node::get (decl)->instrumentation_clone)
- ultimate_transparent_alias_target (&id);
name = IDENTIFIER_POINTER (id);
if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL
else if (!in_hard_reg_set_p (operand_reg_set, mode, reg_number))
error ("the register specified for %q+D is not general enough"
" to be used as a register variable", decl);
- else if (!HARD_REGNO_MODE_OK (reg_number, mode))
+ else if (!targetm.hard_regno_mode_ok (reg_number, mode))
error ("register specified for %q+D isn%'t suitable for data type",
decl);
/* Now handle properly declared static register variables. */
name = IDENTIFIER_POINTER (DECL_NAME (decl));
ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name);
#endif
- nregs = hard_regno_nregs[reg_number][mode];
+ nregs = hard_regno_nregs (reg_number, mode);
while (nregs > 0)
globalize_reg (decl, reg_number + --nregs);
}
{
first_function_block_is_cold = false;
- if (flag_reorder_blocks_and_partition)
- /* We will decide in assemble_start_function. */
- return;
-
if (DECL_SECTION_NAME (decl))
{
struct cgraph_node *node = cgraph_node::get (current_function_decl);
char tmp_label[100];
bool hot_label_written = false;
- if (flag_reorder_blocks_and_partition)
+ if (crtl->has_bb_partition)
{
ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
crtl->subsections.hot_section_label = ggc_strdup (tmp_label);
has both hot and cold sections, because we don't want to re-set
the alignment when the section switch happens mid-function. */
- if (flag_reorder_blocks_and_partition)
+ if (crtl->has_bb_partition)
{
first_function_block_is_cold = false;
/* Switch to the correct text section for the start of the function. */
switch_to_section (function_section (decl));
- if (flag_reorder_blocks_and_partition
- && !hot_label_written)
+ if (crtl->has_bb_partition && !hot_label_written)
ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
/* Tell assembler to move to target machine's alignment for functions. */
Note that we still need to align to DECL_ALIGN, as above,
because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */
if (! DECL_USER_ALIGN (decl)
- && align_functions_log > align
+ && align_functions.levels[0].log > align
&& optimize_function_for_speed_p (cfun))
{
#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
- int align_log = align_functions_log;
+ int align_log = align_functions.levels[0].log;
#endif
- int max_skip = align_functions - 1;
+ int max_skip = align_functions.levels[0].maxskip;
if (flag_limit_function_alignment && crtl->max_insn_address > 0
&& max_skip >= crtl->max_insn_address)
max_skip = crtl->max_insn_address - 1;
#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip);
+ if (max_skip == align_functions.levels[0].maxskip)
+ ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file,
+ align_functions.levels[1].log,
+ align_functions.levels[1].maxskip);
#else
- ASM_OUTPUT_ALIGN (asm_out_file, align_functions_log);
+ ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log);
#endif
}
/* Make function name accessible from other files, if appropriate. */
- if (TREE_PUBLIC (decl)
- || (cgraph_node::get (decl)->instrumentation_clone
- && cgraph_node::get (decl)->instrumented_version
- && TREE_PUBLIC (cgraph_node::get (decl)->instrumented_version->decl)))
+ if (TREE_PUBLIC (decl))
{
notice_global_symbol (decl);
if (DECL_PRESERVE_P (decl))
targetm.asm_out.mark_decl_preserved (fnname);
+ unsigned HOST_WIDE_INT patch_area_size = function_entry_patch_area_size;
+ unsigned HOST_WIDE_INT patch_area_entry = function_entry_patch_area_start;
+
+ tree patchable_function_entry_attr
+ = lookup_attribute ("patchable_function_entry", DECL_ATTRIBUTES (decl));
+ if (patchable_function_entry_attr)
+ {
+ tree pp_val = TREE_VALUE (patchable_function_entry_attr);
+ tree patchable_function_entry_value1 = TREE_VALUE (pp_val);
+
+ patch_area_size = tree_to_uhwi (patchable_function_entry_value1);
+ patch_area_entry = 0;
+ if (TREE_CHAIN (pp_val) != NULL_TREE)
+ {
+ tree patchable_function_entry_value2
+ = TREE_VALUE (TREE_CHAIN (pp_val));
+ patch_area_entry = tree_to_uhwi (patchable_function_entry_value2);
+ }
+ }
+
+ if (patch_area_entry > patch_area_size)
+ {
+ if (patch_area_size > 0)
+ warning (OPT_Wattributes, "patchable function entry > size");
+ patch_area_entry = 0;
+ }
+
+ /* Emit the patching area before the entry label, if any. */
+ if (patch_area_entry > 0)
+ targetm.asm_out.print_patchable_function_entry (asm_out_file,
+ patch_area_entry, true);
+
/* Do any machine/system dependent processing of the function name. */
#ifdef ASM_DECLARE_FUNCTION_NAME
ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl);
ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl);
#endif /* ASM_DECLARE_FUNCTION_NAME */
+ /* And the area after the label. Record it if we haven't done so yet. */
+ if (patch_area_size > patch_area_entry)
+ targetm.asm_out.print_patchable_function_entry (asm_out_file,
+ patch_area_size
+ - patch_area_entry,
+ patch_area_entry == 0);
+
if (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (decl)))
saw_no_split_stack = true;
}
{
#ifdef ASM_DECLARE_FUNCTION_SIZE
/* We could have switched section in the middle of the function. */
- if (flag_reorder_blocks_and_partition)
+ if (crtl->has_bb_partition)
switch_to_section (function_section (decl));
ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl);
#endif
}
/* Output labels for end of hot/cold text sections (to be used by
debug info.) */
- if (flag_reorder_blocks_and_partition)
+ if (crtl->has_bb_partition)
{
section *save_text_section;
/* Assemble an alignment pseudo op for an ALIGN-bit boundary. */
void
-assemble_align (int align)
+assemble_align (unsigned int align)
{
if (align > BITS_PER_UNIT)
{
static void
assemble_variable_contents (tree decl, const char *name,
- bool dont_output_data)
+ bool dont_output_data, bool merge_strings)
{
/* Do any machine/system dependent processing of the object. */
#ifdef ASM_DECLARE_OBJECT_NAME
output_constant (DECL_INITIAL (decl),
tree_to_uhwi (DECL_SIZE_UNIT (decl)),
get_variable_align (decl),
- false);
+ false, merge_strings);
else
/* Leave space for it. */
assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
switch_to_section (sect);
if (align > BITS_PER_UNIT)
ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
- assemble_variable_contents (decl, name, dont_output_data);
+ assemble_variable_contents (decl, name, dont_output_data,
+ (sect->common.flags & SECTION_MERGE)
+ && (sect->common.flags & SECTION_STRINGS));
if (asan_protected)
{
unsigned HOST_WIDE_INT int size
static bool
incorporeal_function_p (tree decl)
{
- if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
+ if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (decl))
{
const char *name;
if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
- && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
- || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
+ && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
return true;
name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
gcc_assert (asm_out_file);
/* In a perfect world, the following condition would be true.
- Sadly, the Java and Go front ends emit assembly *from the front end*,
+ Sadly, the Go front end emit assembly *from the front end*,
bypassing the call graph. See PR52739. Fix before GCC 4.8. */
#if 0
/* This function should only be called if we are expanding, or have
return targetm.asm_out.byte_op;
case 2:
return ops->hi;
+ case 3:
+ return ops->psi;
case 4:
return ops->si;
+ case 5:
+ case 6:
+ case 7:
+ return ops->pdi;
case 8:
return ops->di;
+ case 9:
+ case 10:
+ case 11:
+ case 12:
+ case 13:
+ case 14:
+ case 15:
+ return ops->pti;
case 16:
return ops->ti;
default:
else
mclass = MODE_INT;
- omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0);
- imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0);
+ omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require ();
+ imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require ();
for (i = 0; i < size; i += subsize)
{
in reverse storage order. */
void
-assemble_real (REAL_VALUE_TYPE d, machine_mode mode, unsigned int align,
+assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align,
bool reverse)
{
long data[4] = {0, 0, 0, 0};
struct addr_const {
rtx base;
- HOST_WIDE_INT offset;
+ poly_int64 offset;
};
static void
decode_addr_const (tree exp, struct addr_const *value)
{
tree target = TREE_OPERAND (exp, 0);
- int offset = 0;
+ poly_int64 offset = 0;
rtx x;
while (1)
{
+ poly_int64 bytepos;
if (TREE_CODE (target) == COMPONENT_REF
- && tree_fits_shwi_p (byte_position (TREE_OPERAND (target, 1))))
+ && poly_int_tree_p (byte_position (TREE_OPERAND (target, 1)),
+ &bytepos))
{
- offset += int_byte_position (TREE_OPERAND (target, 1));
+ offset += bytepos;
target = TREE_OPERAND (target, 0);
}
else if (TREE_CODE (target) == ARRAY_REF
|| TREE_CODE (target) == ARRAY_RANGE_REF)
{
- offset += (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (target)))
- * tree_to_shwi (TREE_OPERAND (target, 1)));
+ /* Truncate big offset. */
+ offset
+ += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target)))
+ * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ());
target = TREE_OPERAND (target, 0);
}
else if (TREE_CODE (target) == MEM_REF
&& TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
{
- offset += mem_ref_offset (target).to_short_addr ();
+ offset += mem_ref_offset (target).force_shwi ();
target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
}
else if (TREE_CODE (target) == INDIRECT_REF
case COMPLEX_CST:
case CONSTRUCTOR:
case INTEGER_CST:
- x = output_constant_def (target, 1);
+ x = lookup_constant_def (target);
+ /* Should have been added by output_addressed_constants. */
+ gcc_assert (x);
+ break;
+
+ case INDIRECT_REF:
+ /* This deals with absolute addresses. */
+ offset += tree_to_shwi (TREE_OPERAND (target, 0));
+ x = gen_rtx_MEM (QImode,
+ gen_rtx_SYMBOL_REF (Pmode, "origin of addresses"));
+ break;
+
+ case COMPOUND_LITERAL_EXPR:
+ gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target));
+ x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target));
break;
default:
case VECTOR_CST:
{
- unsigned i;
-
- hi = 7 + VECTOR_CST_NELTS (exp);
-
- for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
- hi = hi * 563 + const_hash_1 (VECTOR_CST_ELT (exp, i));
-
+ hi = 7 + VECTOR_CST_NPATTERNS (exp);
+ hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp);
+ unsigned int count = vector_cst_encoded_nelts (exp);
+ for (unsigned int i = 0; i < count; ++i)
+ hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i));
return hi;
}
}
case ADDR_EXPR:
+ if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0)))
+ return const_hash_1 (TREE_OPERAND (exp, 0));
+
+ /* Fallthru. */
case FDESC_EXPR:
{
struct addr_const value;
case SYMBOL_REF:
/* Don't hash the address of the SYMBOL_REF;
only use the offset and the symbol name. */
- hi = value.offset;
+ hi = value.offset.coeffs[0];
p = XSTR (value.base, 0);
for (i = 0; p[i] != 0; i++)
hi = ((hi * 613) + (unsigned) (p[i]));
break;
case LABEL_REF:
- hi = (value.offset
+ hi = (value.offset.coeffs[0]
+ CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13);
break;
return tree_int_cst_equal (t1, t2);
case REAL_CST:
- /* Real constants are the same only if the same width of type. */
+ /* Real constants are the same only if the same width of type. In
+ addition to the same width, we need to check whether the modes are the
+ same. There might be two floating point modes that are the same size
+ but have different representations, such as the PowerPC that has 2
+ different 128-bit floating point types (IBM extended double and IEEE
+ 128-bit floating point). */
if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
return 0;
-
+ if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
+ return 0;
return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
case FIXED_CST:
return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
case STRING_CST:
- if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
+ if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
+ || int_size_in_bytes (TREE_TYPE (t1))
+ != int_size_in_bytes (TREE_TYPE (t2)))
return 0;
return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
case VECTOR_CST:
{
- unsigned i;
+ if (VECTOR_CST_NPATTERNS (t1)
+ != VECTOR_CST_NPATTERNS (t2))
+ return 0;
- if (VECTOR_CST_NELTS (t1) != VECTOR_CST_NELTS (t2))
+ if (VECTOR_CST_NELTS_PER_PATTERN (t1)
+ != VECTOR_CST_NELTS_PER_PATTERN (t2))
return 0;
- for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
- if (!compare_constant (VECTOR_CST_ELT (t1, i),
- VECTOR_CST_ELT (t2, i)))
+ unsigned int count = vector_cst_encoded_nelts (t1);
+ for (unsigned int i = 0; i < count; ++i)
+ if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i),
+ VECTOR_CST_ENCODED_ELT (t2, i)))
return 0;
return 1;
decode_addr_const (t1, &value1);
decode_addr_const (t2, &value2);
- if (value1.offset != value2.offset)
+ if (maybe_ne (value1.offset, value2.offset))
return 0;
code = GET_CODE (value1.base);
HOST_WIDE_INT size;
size = int_size_in_bytes (TREE_TYPE (exp));
- if (TREE_CODE (exp) == STRING_CST)
- size = MAX (TREE_STRING_LENGTH (exp), size);
+ gcc_checking_assert (size >= 0);
+ gcc_checking_assert (TREE_CODE (exp) != STRING_CST
+ || size >= TREE_STRING_LENGTH (exp));
return size;
}
Instead we set the flag that will be recognized in make_decl_rtl. */
DECL_IN_CONSTANT_POOL (decl) = 1;
DECL_INITIAL (decl) = desc->value;
- /* ??? CONSTANT_ALIGNMENT hasn't been updated for vector types on most
- architectures so use DATA_ALIGNMENT as well, except for strings. */
+ /* ??? targetm.constant_alignment hasn't been updated for vector types on
+ most architectures so use DATA_ALIGNMENT as well, except for strings. */
if (TREE_CODE (exp) == STRING_CST)
- {
- SET_DECL_ALIGN (decl, CONSTANT_ALIGNMENT (exp, DECL_ALIGN (decl)));
- }
+ SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl)));
else
align_variable (decl, 0);
return desc;
}
+/* Subroutine of output_constant_def and tree_output_constant_def:
+ Add a constant to the hash table that tracks which constants
+ already have labels. */
+
+static constant_descriptor_tree *
+add_constant_to_table (tree exp)
+{
+ /* The hash table methods may call output_constant_def for addressed
+ constants, so handle them first. */
+ output_addressed_constants (exp);
+
+ /* Sanity check to catch recursive insertion. */
+ static bool inserting;
+ gcc_assert (!inserting);
+ inserting = true;
+
+ /* Look up EXP in the table of constant descriptors. If we didn't
+ find it, create a new one. */
+ struct constant_descriptor_tree key;
+ key.value = exp;
+ key.hash = const_hash_1 (exp);
+ constant_descriptor_tree **loc
+ = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
+
+ inserting = false;
+
+ struct constant_descriptor_tree *desc = *loc;
+ if (!desc)
+ {
+ desc = build_constant_desc (exp);
+ desc->hash = key.hash;
+ *loc = desc;
+ }
+
+ return desc;
+}
+
/* Return an rtx representing a reference to constant data in memory
for the constant expression EXP.
rtx
output_constant_def (tree exp, int defer)
{
- struct constant_descriptor_tree *desc;
- struct constant_descriptor_tree key;
-
- /* Look up EXP in the table of constant descriptors. If we didn't find
- it, create a new one. */
- key.value = exp;
- key.hash = const_hash_1 (exp);
- constant_descriptor_tree **loc
- = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
-
- desc = *loc;
- if (desc == 0)
- {
- desc = build_constant_desc (exp);
- desc->hash = key.hash;
- *loc = desc;
- }
-
+ struct constant_descriptor_tree *desc = add_constant_to_table (exp);
maybe_output_constant_def_contents (desc, defer);
return desc->rtl;
}
constant's alignment in bits. */
static void
-assemble_constant_contents (tree exp, const char *label, unsigned int align)
+assemble_constant_contents (tree exp, const char *label, unsigned int align,
+ bool merge_strings)
{
HOST_WIDE_INT size;
targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size);
/* Output the value of EXP. */
- output_constant (exp, size, align, false);
+ output_constant (exp, size, align, false, merge_strings);
targetm.asm_out.decl_end ();
}
|| (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
? DECL_ALIGN (decl)
: symtab_node::get (decl)->definition_alignment ());
- switch_to_section (get_constant_section (exp, align));
+ section *sect = get_constant_section (exp, align);
+ switch_to_section (sect);
if (align > BITS_PER_UNIT)
ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
- assemble_constant_contents (exp, XSTR (symbol, 0), align);
+ assemble_constant_contents (exp, XSTR (symbol, 0), align,
+ (sect->common.flags & SECTION_MERGE)
+ && (sect->common.flags & SECTION_STRINGS));
if (asan_protected)
{
HOST_WIDE_INT size = get_constant_size (exp);
tree
tree_output_constant_def (tree exp)
{
- struct constant_descriptor_tree *desc, key;
- tree decl;
-
- /* Look up EXP in the table of constant descriptors. If we didn't find
- it, create a new one. */
- key.value = exp;
- key.hash = const_hash_1 (exp);
- constant_descriptor_tree **loc
- = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
-
- desc = *loc;
- if (desc == 0)
- {
- desc = build_constant_desc (exp);
- desc->hash = key.hash;
- *loc = desc;
- }
-
- decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0));
+ struct constant_descriptor_tree *desc = add_constant_to_table (exp);
+ tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0));
varpool_node::finalize_decl (decl);
return decl;
}
rtx constant;
HOST_WIDE_INT offset;
hashval_t hash;
- machine_mode mode;
+ fixed_size_mode mode;
unsigned int align;
int labelno;
int mark;
break;
case CONST_WIDE_INT:
- hwi = GET_MODE_PRECISION (mode);
+ hwi = 0;
{
for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
hwi ^= CONST_WIDE_INT_ELT (x, i);
}
\f
/* Given a constant rtx X, make (or find) a memory constant for its value
- and return a MEM rtx to refer to it in memory. */
+ and return a MEM rtx to refer to it in memory. IN_MODE is the mode
+ of X. */
rtx
-force_const_mem (machine_mode mode, rtx x)
+force_const_mem (machine_mode in_mode, rtx x)
{
struct constant_descriptor_rtx *desc, tmp;
struct rtx_constant_pool *pool;
hashval_t hash;
unsigned int align;
constant_descriptor_rtx **slot;
+ fixed_size_mode mode;
+
+ /* We can't force variable-sized objects to memory. */
+ if (!is_a <fixed_size_mode> (in_mode, &mode))
+ return NULL_RTX;
/* If we're not allowed to drop X into the constant pool, don't. */
if (targetm.cannot_force_const_mem (mode, x))
*slot = desc;
/* Align the location counter as required by EXP's data type. */
- align = GET_MODE_ALIGNMENT (mode == VOIDmode ? word_mode : mode);
-
- tree type = lang_hooks.types.type_for_mode (mode, 0);
- if (type != NULL_TREE)
- align = CONSTANT_ALIGNMENT (make_tree (type, x), align);
+ machine_mode align_mode = (mode == VOIDmode ? word_mode : mode);
+ align = targetm.static_rtx_alignment (align_mode);
pool->offset += (align / BITS_PER_UNIT) - 1;
pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
/* Construct the MEM. */
desc->mem = def = gen_const_mem (mode, symbol);
- set_mem_attributes (def, lang_hooks.types.type_for_mode (mode, 0), 1);
set_mem_align (def, align);
/* If we're dropping a label to the constant pool, make sure we
/* Similar, return the mode. */
-machine_mode
+fixed_size_mode
get_pool_mode (const_rtx addr)
{
return SYMBOL_REF_CONSTANT (addr)->mode;
in MODE with known alignment ALIGN. */
static void
-output_constant_pool_2 (machine_mode mode, rtx x, unsigned int align)
+output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align)
{
switch (GET_MODE_CLASS (mode))
{
case MODE_DECIMAL_FLOAT:
{
gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x));
- assemble_real (*CONST_DOUBLE_REAL_VALUE (x), mode, align, false);
+ assemble_real (*CONST_DOUBLE_REAL_VALUE (x),
+ as_a <scalar_float_mode> (mode), align, false);
break;
}
case MODE_UFRACT:
case MODE_ACCUM:
case MODE_UACCUM:
- case MODE_POINTER_BOUNDS:
assemble_integer (x, GET_MODE_SIZE (mode), align, 1);
break;
+ case MODE_VECTOR_BOOL:
+ {
+ gcc_assert (GET_CODE (x) == CONST_VECTOR);
+
+ /* Pick the smallest integer mode that contains at least one
+ whole element. Often this is byte_mode and contains more
+ than one element. */
+ unsigned int nelts = GET_MODE_NUNITS (mode);
+ unsigned int elt_bits = GET_MODE_BITSIZE (mode) / nelts;
+ unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT);
+ scalar_int_mode int_mode = int_mode_for_size (int_bits, 0).require ();
+
+ /* Build the constant up one integer at a time. */
+ unsigned int elts_per_int = int_bits / elt_bits;
+ for (unsigned int i = 0; i < nelts; i += elts_per_int)
+ {
+ unsigned HOST_WIDE_INT value = 0;
+ unsigned int limit = MIN (nelts - i, elts_per_int);
+ for (unsigned int j = 0; j < limit; ++j)
+ if (INTVAL (CONST_VECTOR_ELT (x, i + j)) != 0)
+ value |= 1 << (j * elt_bits);
+ output_constant_pool_2 (int_mode, gen_int_mode (value, int_mode),
+ i != 0 ? MIN (align, int_bits) : align);
+ }
+ break;
+ }
case MODE_VECTOR_FLOAT:
case MODE_VECTOR_INT:
case MODE_VECTOR_FRACT:
case MODE_VECTOR_UACCUM:
{
int i, units;
- machine_mode submode = GET_MODE_INNER (mode);
+ scalar_mode submode = GET_MODE_INNER (mode);
unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode));
gcc_assert (GET_CODE (x) == CONST_VECTOR);
- units = CONST_VECTOR_NUNITS (x);
+ units = GET_MODE_NUNITS (mode);
for (i = 0; i < units; i++)
{
tree inner = TREE_OPERAND (op0, 0);
if (inner == error_mark_node
|| ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
- || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))
- > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (inner)))))
+ || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0)))
+ > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
break;
op0 = inner;
}
tree inner = TREE_OPERAND (op1, 0);
if (inner == error_mark_node
|| ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
- || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1)))
- > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (inner)))))
+ || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1)))
+ > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
break;
op1 = inner;
}
return initializer_constant_valid_p_1 (src, endtype, cache);
/* Allow conversions between other integer types only if
- explicit value. */
- if (INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type))
+ explicit value. Don't allow sign-extension to a type larger
+ than word and pointer, there aren't relocations that would
+ allow to sign extend it to a wider type. */
+ if (INTEGRAL_TYPE_P (dest_type)
+ && INTEGRAL_TYPE_P (src_type)
+ && (TYPE_UNSIGNED (src_type)
+ || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)
+ || TYPE_PRECISION (dest_type) <= BITS_PER_WORD
+ || TYPE_PRECISION (dest_type) <= POINTER_SIZE))
{
tree inner = initializer_constant_valid_p_1 (src, endtype, cache);
if (inner == null_pointer_node)
}
return ret;
+ case POINTER_DIFF_EXPR:
case MINUS_EXPR:
if (TREE_CODE (endtype) == REAL_TYPE)
return NULL_TREE;
return false;
}
+/* Check if a STRING_CST fits into the field.
+ Tolerate only the case when the NUL termination
+ does not fit into the field. */
+
+static bool
+check_string_literal (tree string, unsigned HOST_WIDE_INT size)
+{
+ tree type = TREE_TYPE (string);
+ tree eltype = TREE_TYPE (type);
+ unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
+ unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
+ int len = TREE_STRING_LENGTH (string);
+
+ if (elts != 1 && elts != 2 && elts != 4)
+ return false;
+ if (len < 0 || len % elts != 0)
+ return false;
+ if (size < (unsigned)len)
+ return false;
+ if (mem_size != size)
+ return false;
+ return true;
+}
+
/* output_constructor outer state of relevance in recursive calls, typically
for nested aggregate bitfields. */
static unsigned HOST_WIDE_INT
output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
- bool reverse)
+ bool reverse, bool merge_strings)
{
enum tree_code code;
unsigned HOST_WIDE_INT thissize;
if (TREE_CODE (exp) == NOP_EXPR
&& POINTER_TYPE_P (TREE_TYPE (exp))
&& targetm.addr_space.valid_pointer_mode
- (TYPE_MODE (TREE_TYPE (exp)),
+ (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
{
tree saved_type = TREE_TYPE (exp);
while (TREE_CODE (exp) == NOP_EXPR
&& POINTER_TYPE_P (TREE_TYPE (exp))
&& targetm.addr_space.valid_pointer_mode
- (TYPE_MODE (TREE_TYPE (exp)),
+ (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
exp = TREE_OPERAND (exp, 0);
case REFERENCE_TYPE:
case OFFSET_TYPE:
case FIXED_POINT_TYPE:
- case POINTER_BOUNDS_TYPE:
case NULLPTR_TYPE:
cst = expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
if (reverse)
if (TREE_CODE (exp) != REAL_CST)
error ("initializer for floating value is not a floating constant");
else
- assemble_real (TREE_REAL_CST (exp), TYPE_MODE (TREE_TYPE (exp)),
+ assemble_real (TREE_REAL_CST (exp),
+ SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)),
align, reverse);
break;
case COMPLEX_TYPE:
- output_constant (TREE_REALPART (exp), thissize / 2, align, reverse);
+ output_constant (TREE_REALPART (exp), thissize / 2, align,
+ reverse, false);
output_constant (TREE_IMAGPART (exp), thissize / 2,
min_align (align, BITS_PER_UNIT * (thissize / 2)),
- reverse);
+ reverse, false);
break;
case ARRAY_TYPE:
case CONSTRUCTOR:
return output_constructor (exp, size, align, reverse, NULL);
case STRING_CST:
- thissize
- = MIN ((unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp), size);
+ thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp);
+ if (merge_strings
+ && (thissize == 0
+ || TREE_STRING_POINTER (exp) [thissize - 1] != '\0'))
+ thissize++;
+ gcc_checking_assert (check_string_literal (exp, size));
assemble_string (TREE_STRING_POINTER (exp), thissize);
break;
case VECTOR_CST:
{
- machine_mode inner = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
+ scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
int elt_size = GET_MODE_SIZE (inner);
output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align,
- reverse);
+ reverse, false);
thissize = elt_size;
- for (unsigned int i = 1; i < VECTOR_CST_NELTS (exp); i++)
+ /* Static constants must have a fixed size. */
+ unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
+ for (unsigned int i = 1; i < nunits; i++)
{
output_constant (VECTOR_CST_ELT (exp, i), elt_size, nalign,
- reverse);
+ reverse, false);
thissize += elt_size;
}
break;
if (local->val == NULL_TREE)
assemble_zeros (fieldsize);
else
- fieldsize
- = output_constant (local->val, fieldsize, align2, local->reverse);
+ fieldsize = output_constant (local->val, fieldsize, align2,
+ local->reverse, false);
/* Count its size. */
local->total_bytes += fieldsize;
on the chain is a TYPE_DECL of the enclosing struct. */
const_tree next = DECL_CHAIN (local->field);
gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL);
+ tree size = TYPE_SIZE_UNIT (TREE_TYPE (local->val));
+ gcc_checking_assert (compare_tree_int (size, fieldsize) == 0);
}
else
fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
if (local->val == NULL_TREE)
assemble_zeros (fieldsize);
else
- fieldsize
- = output_constant (local->val, fieldsize, align2, local->reverse);
+ fieldsize = output_constant (local->val, fieldsize, align2,
+ local->reverse, false);
/* Count its size. */
local->total_bytes += fieldsize;
{
int this_time;
int shift;
- HOST_WIDE_INT value;
+ unsigned HOST_WIDE_INT value;
HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
this_time = end - shift + 1;
}
- /* Now get the bits from the appropriate constant word. */
- value = TREE_INT_CST_ELT (local->val, shift / HOST_BITS_PER_WIDE_INT);
- shift = shift & (HOST_BITS_PER_WIDE_INT - 1);
+ /* Now get the bits we want to insert. */
+ value = wi::extract_uhwi (wi::to_widest (local->val),
+ shift, this_time);
/* Get the result. This works only when:
1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
- local->byte |= (((value >> shift)
- & (((HOST_WIDE_INT) 2 << (this_time - 1)) - 1))
- << (BITS_PER_UNIT - this_time - next_bit));
+ local->byte |= value << (BITS_PER_UNIT - this_time - next_bit);
}
else
{
this_time
= HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1));
- /* Now get the bits from the appropriate constant word. */
- value = TREE_INT_CST_ELT (local->val, shift / HOST_BITS_PER_WIDE_INT);
- shift = shift & (HOST_BITS_PER_WIDE_INT - 1);
+ /* Now get the bits we want to insert. */
+ value = wi::extract_uhwi (wi::to_widest (local->val),
+ shift, this_time);
/* Get the result. This works only when:
1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
- local->byte |= (((value >> shift)
- & (((HOST_WIDE_INT) 2 << (this_time - 1)) - 1))
- << next_bit);
+ local->byte |= value << next_bit;
}
next_offset += this_time;
struct symtab_node *n = symtab_node::get (decl);
if (n && n->refuse_visibility_changes)
- error ("%+D declared weak after being used", decl);
+ error ("%+qD declared weak after being used", decl);
DECL_WEAK (decl) = 1;
if (DECL_RTL_SET_P (decl)
tree alias_decl = TREE_PURPOSE (t);
tree target = ultimate_transparent_alias_target (&TREE_VALUE (t));
- if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl)))
+ if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl))
+ || TREE_SYMBOL_REFERENCED (target))
/* Remove alias_decl from the weak list, but leave entries for
the target alone. */
target = NULL_TREE;
#ifdef ASM_OUTPUT_DEF
tree orig_decl = decl;
- if (TREE_CODE (decl) == FUNCTION_DECL
- && cgraph_node::get (decl)->instrumentation_clone
- && cgraph_node::get (decl)->instrumented_version)
- orig_decl = cgraph_node::get (decl)->instrumented_version->decl;
-
/* Make name accessible from other files, if appropriate. */
if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl))
globalize_decl (decl);
maybe_assemble_visibility (decl);
}
- if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
+ if (TREE_CODE (decl) == FUNCTION_DECL
+ && cgraph_node::get (decl)->ifunc_resolver)
{
#if defined (ASM_OUTPUT_TYPE_DIRECTIVE)
if (targetm.has_ifunc_p ())
# else
if (!DECL_WEAK (decl))
{
- if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
+ /* NB: ifunc_resolver isn't set when an error is detected. */
+ if (TREE_CODE (decl) == FUNCTION_DECL
+ && lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
error_at (DECL_SOURCE_LOCATION (decl),
"ifunc is not supported in this configuration");
else
maybe_assemble_visibility (tree decl)
{
enum symbol_visibility vis = DECL_VISIBILITY (decl);
-
- if (TREE_CODE (decl) == FUNCTION_DECL
- && cgraph_node::get (decl)
- && cgraph_node::get (decl)->instrumentation_clone
- && cgraph_node::get (decl)->instrumented_version)
- vis = DECL_VISIBILITY (cgraph_node::get (decl)->instrumented_version->decl);
-
if (vis != VISIBILITY_DEFAULT)
{
targetm.asm_out.assemble_visibility (decl, vis);
|| strncmp (name, ".gnu.linkonce.tb.", 17) == 0)
flags |= SECTION_TLS | SECTION_BSS;
- /* These three sections have special ELF types. They are neither
- SHT_PROGBITS nor SHT_NOBITS, so when changing sections we don't
- want to print a section type (@progbits or @nobits). If someone
- is silly enough to emit code or TLS variables to one of these
- sections, then don't handle them specially. */
- if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS))
- && (strcmp (name, ".init_array") == 0
- || strcmp (name, ".fini_array") == 0
- || strcmp (name, ".preinit_array") == 0))
+ /* Various sections have special ELF types that the assembler will
+ assign by default based on the name. They are neither SHT_PROGBITS
+ nor SHT_NOBITS, so when changing sections we don't want to print a
+ section type (@progbits or @nobits). Rather than duplicating the
+ assembler's knowledge of what those special name patterns are, just
+ let the assembler choose the type if we don't know a specific
+ reason to set it to something other than the default. SHT_PROGBITS
+ is the default for sections whose name is not specially known to
+ the assembler, so it does no harm to leave the choice to the
+ assembler when @progbits is the best thing we know to use. If
+ someone is silly enough to emit code or TLS variables to one of
+ these sections, then don't handle them specially.
+
+ default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and
+ LINKONCE cases when NOTYPE is not set, so leave those to its logic. */
+ if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE))
+ && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)))
flags |= SECTION_NOTYPE;
return flags;
{
if (!(flags & SECTION_DEBUG))
*f++ = 'a';
-#if defined (HAVE_GAS_SECTION_EXCLUDE) && HAVE_GAS_SECTION_EXCLUDE == 1
+#if HAVE_GAS_SECTION_EXCLUDE
if (flags & SECTION_EXCLUDE)
*f++ = 'e';
#endif
fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
+ /* default_section_type_flags (above) knows which flags need special
+ handling here, and sets NOTYPE when none of these apply so that the
+ assembler's logic for default types can apply to user-chosen
+ section names. */
if (!(flags & SECTION_NOTYPE))
{
const char *type;
}
else if (VAR_P (decl))
{
+ tree d = CONST_CAST_TREE (decl);
if (bss_initializer_p (decl))
ret = SECCAT_BSS;
else if (! TREE_READONLY (decl)
|| TREE_SIDE_EFFECTS (decl)
- || ! TREE_CONSTANT (DECL_INITIAL (decl)))
+ || (DECL_INITIAL (decl)
+ && ! TREE_CONSTANT (DECL_INITIAL (decl))))
{
/* Here the reloc_rw_mask is not testing whether the section should
be read-only or not, but whether the dynamic link will have to
ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
else if (reloc || flag_merge_constants < 2
|| ((flag_sanitize & SANITIZE_ADDRESS)
- && asan_protect_global (CONST_CAST_TREE (decl))))
+ /* PR 81697: for architectures that use section anchors we
+ need to ignore DECL_RTL_SET_P (decl) for string constants
+ inside this asan_protect_global call because otherwise
+ we'll wrongly put them into SECCAT_RODATA_MERGE_CONST
+ section, set DECL_RTL (decl) later on and add DECL to
+ protected globals via successive asan_protect_global
+ calls. In this scenario we'll end up with wrong
+ alignment of these strings at runtime and possible ASan
+ false positives. */
+ && asan_protect_global (d, use_object_blocks_p ()
+ && use_blocks_for_decl_p (d))))
/* C and C++ don't allow different variables to share the same
location. -fmerge-all-constants allows even that (at the
expense of not conforming). */
ret = SECCAT_RODATA;
- else if (TREE_CODE (DECL_INITIAL (decl)) == STRING_CST)
+ else if (DECL_INITIAL (decl)
+ && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST)
ret = SECCAT_RODATA_MERGE_STR_INIT;
else
ret = SECCAT_RODATA_MERGE_CONST;
/* Note that this would be *just* SECCAT_BSS, except that there's
no concept of a read-only thread-local-data section. */
if (ret == SECCAT_BSS
- || (flag_zero_initialized_in_bss
- && initializer_zerop (DECL_INITIAL (decl))))
+ || DECL_INITIAL (decl) == NULL
+ || (flag_zero_initialized_in_bss
+ && initializer_zerop (DECL_INITIAL (decl))))
ret = SECCAT_TBSS;
else
ret = SECCAT_TDATA;
bool
default_use_anchors_for_symbol_p (const_rtx symbol)
{
- section *sect;
tree decl;
+ section *sect = SYMBOL_REF_BLOCK (symbol)->sect;
- /* Don't use anchors for mergeable sections. The linker might move
- the objects around. */
- sect = SYMBOL_REF_BLOCK (symbol)->sect;
- if (sect->common.flags & SECTION_MERGE)
- return false;
+ /* This function should only be called with non-zero SYMBOL_REF_BLOCK,
+ furthermore get_block_for_section should not create object blocks
+ for mergeable sections. */
+ gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE));
/* Don't use anchors for small data sections. The small data register
acts as an anchor for such sections. */
weakref alias. */
if (lookup_attribute ("weakref", DECL_ATTRIBUTES (exp))
|| (TREE_CODE (exp) == FUNCTION_DECL
- && lookup_attribute ("ifunc", DECL_ATTRIBUTES (exp))))
+ && cgraph_node::get (exp)
+ && cgraph_node::get (exp)->ifunc_resolver))
return false;
/* Static variables are always local. */
else
switch_to_section (block->sect);
+ gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE));
assemble_align (block->alignment);
/* Define the values of all anchors relative to the current section
{
HOST_WIDE_INT size;
decl = SYMBOL_REF_DECL (symbol);
- assemble_constant_contents
- (DECL_INITIAL (decl), XSTR (symbol, 0), DECL_ALIGN (decl));
+ assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0),
+ DECL_ALIGN (decl), false);
size = get_constant_size (DECL_INITIAL (decl));
offset += size;
{
HOST_WIDE_INT size;
decl = SYMBOL_REF_DECL (symbol);
- assemble_variable_contents (decl, XSTR (symbol, 0), false);
+ assemble_variable_contents (decl, XSTR (symbol, 0), false, false);
size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
offset += size;
if ((flag_sanitize & SANITIZE_ADDRESS)