+2012-04-14 Jan Hubicka <jh@suse.cz>
+
+ * cgraph.h: Update copyrights;
+ (symtab_node): Turn to union typedef.
+ (symtab_node_base): New structure.
+ (symtab_type): Add SYMTAB_SYMBOL tag.
+ (cgraph_node): Annotate some pinters with nested_ptr.
+ (varpool_node): Likewise.
+ (cgraph_local_info): Remove lto_file_data
+ and externally_visible.
+ (cgraph_node): Remove decl; same_comdat_group list;
+ aux; ref_list; order; address_taken; reachable_from_other_parittion,
+ in_other_partition; resolution.
+ (varpool_node): Remove decl; same_comdat_group;
+ ref_list; lto_file_data; aux; order; resolution; externally_visible;
+ used_from_other_partition; in_other_partition.
+ (symtab_node_def); New union.
+ (cgraph, varpool): Update.
+ (varpool_first_static_initializer, varpool_next_static_initializer,
+ cgraph_only_called_directly_or_aliased_p,
+ varpool_can_remove_if_no_refs, varpool_can_remove_if_no_refs,
+ varpool_all_refs_explicit_p, cgraph_alias_aliased_node,
+ varpool_alias_aliased_node, cgraph_edge_recursive_p): Update
+ field references.
+ * cgraph.c: Likewise.
+ (cgraph_hash, assembler_name_hash): Turn into symtab_node.
+ * cgraphbuild.c: Likewise.
+ * lto-symtab.c: Likewise.
+ * c-gimplify.c: Likewise.
+ * value-prof.c: Likewise.
+ * tree.c: Likewise.
+ * ipa-cp.c: Likewise.
+ * tree-emutls.c: Likewise.
+ * ipa-inline-transform.c: Likwise.
+ * ipa-reference.c: Likewise.
+ * cgraphunit.c: Likewise.
+ * ipa-ref.c: Likewise.
+ * lto-cgraph.c: Likewise.
+ * ipa-ref-inline.h: Likewise.
+ * ipa-pure-const.c: Likewise.
+ * lto-streamer-out.c: Likewise.
+ * ipa-utils.c: Likewise.
+ * ipa-inline.c: Likewise.
+ * matrix-reorg.c: Likewise.
+ * tree-eh.c: Likewise.
+ * tree-vectorizer.c: Likewise.
+ * ipa-split.c: Likewise.
+ * ipa.c: Likewise.
+ * trans-mem.c: Likewise.
+ * ipa-inline-analysis.c: Likewise.
+ * gimplify.c: Likewise.
+ * cfgexpand.c: Likewise.
+ * tree-sra.c: Likewise.
+ * ipa-prop.c: Likewise.
+ * varasm.c: Likewise.
+ * tree-nested.c: Likewise.
+ * tree-inline.c: Likewise.
+ * tree-profile.c: Likewise.
+ * tree-ssa-structalias.c: Likewise.
+ * passes.c: Likewise.
+ * varpool.c: Likewise.
+
2012-04-14 Tom de Vries <tom@codesourcery.com>
* tree-ssa-tail-merge.c (stmt_local_def): New function, factored out of
+2012-04-14 Jan Hubicka <jh@suse.cz>
+
+ * gcc-interface/trans.c (finalize_nrv): Update field referenced for new
+ cgraph/varpool layout.
+
2012-04-09 Mike Stump <mikestump@comcast.net>
* a-assert.ads: Remove execute permission.
/* Prune also the candidates that are referenced by nested functions. */
node = cgraph_get_create_node (fndecl);
for (node = node->nested; node; node = node->next_nested)
- walk_tree_without_duplicates (&DECL_SAVED_TREE (node->decl), prune_nrv_r,
+ walk_tree_without_duplicates (&DECL_SAVED_TREE (node->symbol.decl), prune_nrv_r,
&data);
if (bitmap_empty_p (nrv))
return;
/* Dump all nested functions now. */
cgn = cgraph_get_create_node (fndecl);
for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
- c_genericize (cgn->decl);
+ c_genericize (cgn->symbol.decl);
}
static void
tree var;
tree old_cur_fun_decl = current_function_decl;
referenced_var_iterator rvi;
- struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
+ struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
- current_function_decl = node->decl;
+ current_function_decl = node->symbol.decl;
push_cfun (fn);
gcc_checking_assert (gimple_referenced_vars (fn));
static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
/* Hash table used to convert declarations into nodes. */
-static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
+static GTY((param_is (union symtab_node_def))) htab_t cgraph_hash;
/* Hash table used to convert assembler names into nodes. */
-static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
+static GTY((param_is (union symtab_node_def))) htab_t assembler_name_hash;
/* The linked list of cgraph nodes. */
-struct cgraph_node *cgraph_nodes;
+symtab_node x_cgraph_nodes;
/* Queue of cgraph nodes scheduled to be lowered. */
-struct cgraph_node *cgraph_nodes_queue;
+symtab_node x_cgraph_nodes_queue;
+#define cgraph_nodes_queue ((struct cgraph_node *)x_cgraph_nodes_queue)
/* Queue of cgraph nodes scheduled to be added into cgraph. This is a
secondary queue used during optimization to accommodate passes that
hash_node (const void *p)
{
const struct cgraph_node *n = (const struct cgraph_node *) p;
- return (hashval_t) DECL_UID (n->decl);
+ return (hashval_t) DECL_UID (n->symbol.decl);
}
{
const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
- return DECL_UID (n1->decl) == DECL_UID (n2->decl);
+ return DECL_UID (n1->symbol.decl) == DECL_UID (n2->symbol.decl);
}
/* Allocate new callgraph node. */
node->symbol.type = SYMTAB_FUNCTION;
node->next = cgraph_nodes;
- node->order = cgraph_order++;
+ node->symbol.order = cgraph_order++;
if (cgraph_nodes)
cgraph_nodes->previous = node;
node->previous = NULL;
node->frequency = NODE_FREQUENCY_NORMAL;
node->count_materialization_scale = REG_BR_PROB_BASE;
- ipa_empty_ref_list (&node->ref_list);
- cgraph_nodes = node;
+ ipa_empty_ref_list (&node->symbol.ref_list);
+ x_cgraph_nodes = (symtab_node)node;
cgraph_n_nodes++;
return node;
}
if (!cgraph_hash)
cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
- key.decl = decl;
+ key.symbol.decl = decl;
slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
gcc_assert (!*slot);
node = cgraph_create_node_1 ();
- node->decl = decl;
+ node->symbol.decl = decl;
*slot = node;
if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
{
if (!cgraph_hash)
return NULL;
- key.decl = CONST_CAST2 (tree, const_tree, decl);
+ key.symbol.decl = CONST_CAST2 (tree, const_tree, decl);
slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
NO_INSERT);
hash_node_by_assembler_name (const void *p)
{
const struct cgraph_node *n = (const struct cgraph_node *) p;
- return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
+ return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->symbol.decl));
}
/* Returns nonzero if P1 and P2 are equal. */
{
const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
const_tree name = (const_tree)p2;
- return (decl_assembler_name_equal (n1->decl, name));
+ return (decl_assembler_name_equal (n1->symbol.decl, name));
}
/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
for (node = cgraph_nodes; node; node = node->next)
if (!node->global.inlined_to)
{
- tree name = DECL_ASSEMBLER_NAME (node->decl);
+ tree name = DECL_ASSEMBLER_NAME (node->symbol.decl);
slot = htab_find_slot_with_hash (assembler_name_hash, name,
decl_assembler_name_hash (name),
INSERT);
cgraph_make_edge_direct (e, new_callee);
}
- push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
+ push_cfun (DECL_STRUCT_FUNCTION (e->caller->symbol.decl));
e->can_throw_external = stmt_can_throw_external (new_stmt);
pop_cfun ();
if (e->caller->call_site_hash)
gcc_assert (freq <= CGRAPH_FREQ_MAX);
edge->call_stmt = call_stmt;
- push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
+ push_cfun (DECL_STRUCT_FUNCTION (caller->symbol.decl));
edge->can_throw_external
= call_stmt ? stmt_can_throw_external (call_stmt) : false;
pop_cfun ();
if (call_stmt
- && callee && callee->decl
- && !gimple_check_call_matching_types (call_stmt, callee->decl))
+ && callee && callee->symbol.decl
+ && !gimple_check_call_matching_types (call_stmt, callee->symbol.decl))
edge->call_stmt_cannot_inline_p = true;
else
edge->call_stmt_cannot_inline_p = false;
if (edge->call_stmt)
edge->call_stmt_cannot_inline_p
- = !gimple_check_call_matching_types (edge->call_stmt, callee->decl);
+ = !gimple_check_call_matching_types (edge->call_stmt, callee->symbol.decl);
/* We need to re-determine the inlining status of the edge. */
initialize_inline_failed (edge);
struct cgraph_node *callee = e->callee;
while (callee)
{
- if (callee->decl == new_call
+ if (callee->symbol.decl == new_call
|| callee->former_clone_of == new_call)
return;
callee = callee->clone_of;
void
cgraph_release_function_body (struct cgraph_node *node)
{
- if (DECL_STRUCT_FUNCTION (node->decl))
+ if (DECL_STRUCT_FUNCTION (node->symbol.decl))
{
tree old_decl = current_function_decl;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
if (cfun->cfg
&& current_loops)
{
}
if (cfun->gimple_df)
{
- current_function_decl = node->decl;
+ current_function_decl = node->symbol.decl;
delete_tree_ssa ();
delete_tree_cfg_annotations ();
cfun->eh = NULL;
if (cfun->value_histograms)
free_histograms ();
pop_cfun();
- gimple_set_body (node->decl, NULL);
+ gimple_set_body (node->symbol.decl, NULL);
VEC_free (ipa_opt_pass, heap,
node->ipa_transforms_to_apply);
/* Struct function hangs a lot of data that would leak if we didn't
removed all pointers to it. */
- ggc_free (DECL_STRUCT_FUNCTION (node->decl));
- DECL_STRUCT_FUNCTION (node->decl) = NULL;
+ ggc_free (DECL_STRUCT_FUNCTION (node->symbol.decl));
+ DECL_STRUCT_FUNCTION (node->symbol.decl) = NULL;
}
- DECL_SAVED_TREE (node->decl) = NULL;
+ DECL_SAVED_TREE (node->symbol.decl) = NULL;
/* If the node is abstract and needed, then do not clear DECL_INITIAL
of its associated function function declaration because it's
needed to emit debug info later. */
if (!node->abstract_and_needed)
- DECL_INITIAL (node->decl) = error_mark_node;
+ DECL_INITIAL (node->symbol.decl) = error_mark_node;
}
/* Remove the node from cgraph. */
cgraph_call_node_removal_hooks (node);
cgraph_node_remove_callers (node);
cgraph_node_remove_callees (node);
- ipa_remove_all_references (&node->ref_list);
- ipa_remove_all_refering (&node->ref_list);
+ ipa_remove_all_references (&node->symbol.ref_list);
+ ipa_remove_all_refering (&node->symbol.ref_list);
VEC_free (ipa_opt_pass, heap,
node->ipa_transforms_to_apply);
if (node->previous)
node->previous->next = node->next;
else
- cgraph_nodes = node->next;
+ x_cgraph_nodes = (symtab_node)node->next;
if (node->next)
node->next->previous = node->previous;
node->next = NULL;
struct cgraph_node *next_inline_clone;
for (next_inline_clone = node->clones;
- next_inline_clone && next_inline_clone->decl != node->decl;
+ next_inline_clone
+ && next_inline_clone->symbol.decl != node->symbol.decl;
next_inline_clone = next_inline_clone->next_sibling_clone)
;
}
}
- if (node->same_comdat_group)
+ if (node->symbol.same_comdat_group)
{
- struct cgraph_node *prev;
- for (prev = node->same_comdat_group;
- prev->same_comdat_group != node;
- prev = prev->same_comdat_group)
+ symtab_node prev;
+ for (prev = node->symbol.same_comdat_group;
+ prev->symbol.same_comdat_group != (symtab_node)node;
+ prev = prev->symbol.same_comdat_group)
;
- if (node->same_comdat_group == prev)
- prev->same_comdat_group = NULL;
+ if (node->symbol.same_comdat_group == prev)
+ prev->symbol.same_comdat_group = NULL;
else
- prev->same_comdat_group = node->same_comdat_group;
- node->same_comdat_group = NULL;
+ prev->symbol.same_comdat_group = node->symbol.same_comdat_group;
+ node->symbol.same_comdat_group = NULL;
}
/* While all the clones are removed after being proceeded, the function
struct cgraph_node *n = (struct cgraph_node *) *slot;
if (!n->clones && !n->clone_of && !n->global.inlined_to
&& (cgraph_global_info_ready
- && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
- || n->in_other_partition)))
+ && (TREE_ASM_WRITTEN (n->symbol.decl)
+ || DECL_EXTERNAL (n->symbol.decl)
+ || n->symbol.in_other_partition)))
kill_body = true;
}
if (assembler_name_hash)
{
- tree name = DECL_ASSEMBLER_NAME (node->decl);
+ tree name = DECL_ASSEMBLER_NAME (node->symbol.decl);
slot = htab_find_slot_with_hash (assembler_name_hash, name,
decl_assembler_name_hash (name),
NO_INSERT);
if (kill_body)
cgraph_release_function_body (node);
- node->decl = NULL;
+ node->symbol.decl = NULL;
if (node->call_site_hash)
{
htab_delete (node->call_site_hash);
/* Clear out the node to NULL all pointers and add the node to the free
list. */
memset (node, 0, sizeof(*node));
+ node->symbol.type = SYMTAB_FUNCTION;
node->uid = uid;
NEXT_FREE_NODE (node) = free_nodes;
free_nodes = node;
/* Add NEW_ to the same comdat group that OLD is in. */
void
-cgraph_add_to_same_comdat_group (struct cgraph_node *new_,
- struct cgraph_node *old)
+cgraph_add_to_same_comdat_group (struct cgraph_node *new_node,
+ struct cgraph_node *old_node)
{
- gcc_assert (DECL_ONE_ONLY (old->decl));
- gcc_assert (!new_->same_comdat_group);
- gcc_assert (new_ != old);
+ gcc_assert (DECL_ONE_ONLY (old_node->symbol.decl));
+ gcc_assert (!new_node->symbol.same_comdat_group);
+ gcc_assert (new_node != old_node);
- DECL_COMDAT_GROUP (new_->decl) = DECL_COMDAT_GROUP (old->decl);
- new_->same_comdat_group = old;
- if (!old->same_comdat_group)
- old->same_comdat_group = new_;
+ DECL_COMDAT_GROUP (new_node->symbol.decl) = DECL_COMDAT_GROUP (old_node->symbol.decl);
+ new_node->symbol.same_comdat_group = (symtab_node)old_node;
+ if (!old_node->symbol.same_comdat_group)
+ old_node->symbol.same_comdat_group = (symtab_node)new_node;
else
{
- struct cgraph_node *n;
- for (n = old->same_comdat_group;
- n->same_comdat_group != old;
- n = n->same_comdat_group)
+ symtab_node n;
+ for (n = old_node->symbol.same_comdat_group;
+ n->symbol.same_comdat_group != (symtab_node)old_node;
+ n = n->symbol.same_comdat_group)
;
- n->same_comdat_group = new_;
+ n->symbol.same_comdat_group = (symtab_node)new_node;
}
}
/* Verify that function does not appear to be needed out of blue
during the optimization process. This can happen for extern
inlines when bodies was removed after inlining. */
- gcc_assert ((node->analyzed || node->in_other_partition
- || DECL_EXTERNAL (node->decl)));
+ gcc_assert ((node->analyzed || node->symbol.in_other_partition
+ || DECL_EXTERNAL (node->symbol.decl)));
}
else
- notice_global_symbol (node->decl);
+ notice_global_symbol (node->symbol.decl);
node->reachable = 1;
node->next_needed = cgraph_nodes_queue;
- cgraph_nodes_queue = node;
+ x_cgraph_nodes_queue = (symtab_node)node;
}
}
of the object was taken (and thus it should be set on node alias is
referring to). We should remove the first use and the remove the
following set. */
- node->address_taken = 1;
+ node->symbol.address_taken = 1;
node = cgraph_function_or_thunk_node (node, NULL);
- node->address_taken = 1;
+ node->symbol.address_taken = 1;
}
/* Return local info for the compiled function. */
node = cgraph_get_node (decl);
if (!node
|| (decl != current_function_decl
- && !TREE_ASM_WRITTEN (node->decl)))
+ && !TREE_ASM_WRITTEN (node->symbol.decl)))
return NULL;
return &node->rtl;
}
const char *
cgraph_node_name (struct cgraph_node *node)
{
- return lang_hooks.decl_printable_name (node->decl, 2);
+ return lang_hooks.decl_printable_name (node->symbol.decl, 2);
}
/* Names used to print out the availability enum. */
fprintf (f, "%s/%i", cgraph_node_name (node), node->uid);
dump_addr (f, " @", (void *)node);
- if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
- fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
+ if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
+ fprintf (f, " (asm: %s)",
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->symbol.decl)));
if (node->global.inlined_to)
fprintf (f, " (inline copy in %s/%i)",
cgraph_node_name (node->global.inlined_to),
node->global.inlined_to->uid);
- if (node->same_comdat_group)
+ if (node->symbol.same_comdat_group)
fprintf (f, " (same comdat group as %s/%i)",
- cgraph_node_name (node->same_comdat_group),
- node->same_comdat_group->uid);
+ cgraph_node_name (cgraph (node->symbol.same_comdat_group)),
+ cgraph (node->symbol.same_comdat_group)->uid);
if (node->clone_of)
fprintf (f, " (clone of %s/%i)",
cgraph_node_name (node->clone_of),
cgraph_availability_names [cgraph_function_body_availability (node)]);
if (node->analyzed)
fprintf (f, " analyzed");
- if (node->in_other_partition)
+ if (node->symbol.in_other_partition)
fprintf (f, " in_other_partition");
if (node->count)
fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
if (node->needed)
fprintf (f, " needed");
- if (node->address_taken)
+ if (node->symbol.address_taken)
fprintf (f, " address_taken");
else if (node->reachable)
fprintf (f, " reachable");
- else if (node->reachable_from_other_partition)
- fprintf (f, " reachable_from_other_partition");
- if (gimple_has_body_p (node->decl))
+ else if (node->symbol.used_from_other_partition)
+ fprintf (f, " used_from_other_partition");
+ if (gimple_has_body_p (node->symbol.decl))
fprintf (f, " body");
if (node->process)
fprintf (f, " process");
if (node->local.local)
fprintf (f, " local");
- if (node->local.externally_visible)
+ if (node->symbol.externally_visible)
fprintf (f, " externally_visible");
- if (node->resolution != LDPR_UNKNOWN)
+ if (node->symbol.resolution != LDPR_UNKNOWN)
fprintf (f, " %s",
- ld_plugin_symbol_resolution_names[(int)node->resolution]);
+ ld_plugin_symbol_resolution_names[(int)node->symbol.resolution]);
if (node->local.finalized)
fprintf (f, " finalized");
if (node->local.redefined_extern_inline)
fprintf (f, " redefined_extern_inline");
- if (TREE_ASM_WRITTEN (node->decl))
+ if (TREE_ASM_WRITTEN (node->symbol.decl))
fprintf (f, " asm_written");
if (node->only_called_at_startup)
fprintf (f, " only_called_at_startup");
}
fprintf (f, "\n");
fprintf (f, " References: ");
- ipa_dump_references (f, &node->ref_list);
+ ipa_dump_references (f, &node->symbol.ref_list);
fprintf (f, " Refering this function: ");
- ipa_dump_refering (f, &node->ref_list);
+ ipa_dump_refering (f, &node->symbol.ref_list);
for (edge = node->indirect_calls; edge; edge = edge->next_callee)
indirect_calls_count++;
gcov_type count_scale;
unsigned i;
- new_node->decl = decl;
+ new_node->symbol.decl = decl;
new_node->origin = n->origin;
if (new_node->origin)
{
}
new_node->analyzed = n->analyzed;
new_node->local = n->local;
- new_node->local.externally_visible = false;
+ new_node->symbol.externally_visible = false;
new_node->local.local = true;
new_node->global = n->global;
new_node->rtl = n->rtl;
for (e = n->indirect_calls; e; e = e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
count_scale, freq, update_original);
- ipa_clone_references (new_node, NULL, &n->ref_list);
+ ipa_clone_references (new_node, NULL, &n->symbol.ref_list);
new_node->next_sibling_clone = n->clones;
if (n->clones)
n->clones = new_node;
new_node->clone_of = n;
- if (n->decl != decl)
+ if (n->symbol.decl != decl)
{
struct cgraph_node **slot;
slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
bitmap args_to_skip,
const char * suffix)
{
- tree old_decl = old_node->decl;
+ tree old_decl = old_node->symbol.decl;
struct cgraph_node *new_node = NULL;
tree new_decl;
size_t i;
that is not weak also.
??? We cannot use COMDAT linkage because there is no
ABI support for this. */
- DECL_EXTERNAL (new_node->decl) = 0;
+ DECL_EXTERNAL (new_node->symbol.decl) = 0;
if (DECL_ONE_ONLY (old_decl))
- DECL_SECTION_NAME (new_node->decl) = NULL;
- DECL_COMDAT_GROUP (new_node->decl) = 0;
- TREE_PUBLIC (new_node->decl) = 0;
- DECL_COMDAT (new_node->decl) = 0;
- DECL_WEAK (new_node->decl) = 0;
- DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
- DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
+ DECL_SECTION_NAME (new_node->symbol.decl) = NULL;
+ DECL_COMDAT_GROUP (new_node->symbol.decl) = 0;
+ TREE_PUBLIC (new_node->symbol.decl) = 0;
+ DECL_COMDAT (new_node->symbol.decl) = 0;
+ DECL_WEAK (new_node->symbol.decl) = 0;
+ DECL_STATIC_CONSTRUCTOR (new_node->symbol.decl) = 0;
+ DECL_STATIC_DESTRUCTOR (new_node->symbol.decl) = 0;
new_node->clone.tree_map = tree_map;
new_node->clone.args_to_skip = args_to_skip;
FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
struct cgraph_node *orig_node;
for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
;
- for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
+ for (arg = DECL_ARGUMENTS (orig_node->symbol.decl);
+ arg; arg = DECL_CHAIN (arg), oldi++)
{
if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
{
}
else
new_node->clone.combined_args_to_skip = args_to_skip;
- new_node->local.externally_visible = 0;
+ new_node->symbol.externally_visible = 0;
new_node->local.local = 1;
new_node->lowered = true;
new_node->reachable = true;
avail = AVAIL_NOT_AVAILABLE;
else if (node->local.local)
avail = AVAIL_LOCAL;
- else if (!node->local.externally_visible)
+ else if (!node->symbol.externally_visible)
avail = AVAIL_AVAILABLE;
/* Inline functions are safe to be analyzed even if their symbol can
be overwritten at runtime. It is not meaningful to enforce any sane
behaviour on replacing inline function by different body. */
- else if (DECL_DECLARED_INLINE_P (node->decl))
+ else if (DECL_DECLARED_INLINE_P (node->symbol.decl))
avail = AVAIL_AVAILABLE;
/* If the function can be overwritten, return OVERWRITABLE. Take
AVAIL_AVAILABLE here? That would be good reason to preserve this
bit. */
- else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
+ else if (decl_replaceable_p (node->symbol.decl)
+ && !DECL_EXTERNAL (node->symbol.decl))
avail = AVAIL_OVERWRITABLE;
else avail = AVAIL_AVAILABLE;
void *data ATTRIBUTE_UNUSED)
{
return !(!node->needed
- && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
- || !node->local.externally_visible));
+ && ((DECL_COMDAT (node->symbol.decl)
+ && !node->symbol.same_comdat_group)
+ || !node->symbol.externally_visible));
}
/* Return true if NODE can be made local for API change.
bool
cgraph_node_can_be_local_p (struct cgraph_node *node)
{
- return (!node->address_taken
+ return (!node->symbol.address_taken
&& !cgraph_for_node_and_aliases (node,
cgraph_node_cannot_be_local_p_1,
NULL, true));
struct cgraph_node *node = cgraph_get_node (decl);
change_decl_assembler_name (decl,
clone_function_name (decl, "local"));
- if (node->local.lto_file_data)
- lto_record_renamed_decl (node->local.lto_file_data,
+ if (node->symbol.lto_file_data)
+ lto_record_renamed_decl (node->symbol.lto_file_data,
old_name,
IDENTIFIER_POINTER
(DECL_ASSEMBLER_NAME (decl)));
C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
SET_DECL_ASSEMBLER_NAME (decl,
clone_function_name (decl, "local"));
- if (vnode->lto_file_data)
- lto_record_renamed_decl (vnode->lto_file_data,
+ if (vnode->symbol.lto_file_data)
+ lto_record_renamed_decl (vnode->symbol.lto_file_data,
old_name,
IDENTIFIER_POINTER
(DECL_ASSEMBLER_NAME (decl)));
if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
include_overwritable))
return true;
- for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_refering_iterate (&node->symbol.ref_list, i, ref); i++)
if (ref->use == IPA_REF_ALIAS)
{
struct cgraph_node *alias = ipa_ref_refering_node (ref);
if (callback (node, data))
return true;
- for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_refering_iterate (&node->symbol.ref_list, i, ref); i++)
if (ref->use == IPA_REF_ALIAS)
{
struct cgraph_node *alias = ipa_ref_refering_node (ref);
cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
gcc_checking_assert (cgraph_node_can_be_local_p (node));
- if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
+ if (DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
{
- cgraph_make_decl_local (node->decl);
+ cgraph_make_decl_local (node->symbol.decl);
- node->local.externally_visible = false;
+ node->symbol.externally_visible = false;
node->local.local = true;
- node->resolution = LDPR_PREVAILING_DEF_IRONLY;
+ node->symbol.resolution = LDPR_PREVAILING_DEF_IRONLY;
gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
}
return false;
{
struct cgraph_edge *e;
- TREE_NOTHROW (node->decl) = data != NULL;
+ TREE_NOTHROW (node->symbol.decl) = data != NULL;
if (data != NULL)
for (e = node->callers; e; e = e->next_caller)
optimized out. */
if (data && !((size_t)data & 2))
{
- if (DECL_STATIC_CONSTRUCTOR (node->decl))
- DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
- if (DECL_STATIC_DESTRUCTOR (node->decl))
- DECL_STATIC_DESTRUCTOR (node->decl) = 0;
+ if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl))
+ DECL_STATIC_CONSTRUCTOR (node->symbol.decl) = 0;
+ if (DECL_STATIC_DESTRUCTOR (node->symbol.decl))
+ DECL_STATIC_DESTRUCTOR (node->symbol.decl) = 0;
}
- TREE_READONLY (node->decl) = data != NULL;
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
+ TREE_READONLY (node->symbol.decl) = data != NULL;
+ DECL_LOOPING_CONST_OR_PURE_P (node->symbol.decl) = ((size_t)data & 2) != 0;
return false;
}
optimized out. */
if (data && !((size_t)data & 2))
{
- if (DECL_STATIC_CONSTRUCTOR (node->decl))
- DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
- if (DECL_STATIC_DESTRUCTOR (node->decl))
- DECL_STATIC_DESTRUCTOR (node->decl) = 0;
+ if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl))
+ DECL_STATIC_CONSTRUCTOR (node->symbol.decl) = 0;
+ if (DECL_STATIC_DESTRUCTOR (node->symbol.decl))
+ DECL_STATIC_DESTRUCTOR (node->symbol.decl) = 0;
}
- DECL_PURE_P (node->decl) = data != NULL;
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
+ DECL_PURE_P (node->symbol.decl) = data != NULL;
+ DECL_LOOPING_CONST_OR_PURE_P (node->symbol.decl) = ((size_t)data & 2) != 0;
return false;
}
/* It makes sense to put main() together with the static constructors.
It will be executed for sure, but rest of functions called from
main are definitely not at startup only. */
- if (MAIN_NAME_P (DECL_NAME (edge->caller->decl)))
+ if (MAIN_NAME_P (DECL_NAME (edge->caller->symbol.decl)))
d->only_called_at_startup = 0;
d->only_called_at_exit &= edge->caller->only_called_at_exit;
}
bool
cgraph_node_cannot_return (struct cgraph_node *node)
{
- int flags = flags_from_decl_or_type (node->decl);
+ int flags = flags_from_decl_or_type (node->symbol.decl);
if (!flag_exceptions)
return (flags & ECF_NORETURN) != 0;
else
{
gcc_assert (!node->global.inlined_to);
/* Extern inlines can always go, we will use the external definition. */
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (node->symbol.decl))
return true;
/* When function is needed, we can not remove it. */
- if (node->needed || node->reachable_from_other_partition)
+ if (node->needed || node->symbol.used_from_other_partition)
return false;
- if (DECL_STATIC_CONSTRUCTOR (node->decl)
- || DECL_STATIC_DESTRUCTOR (node->decl))
+ if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl)
+ || DECL_STATIC_DESTRUCTOR (node->symbol.decl))
return false;
/* Only COMDAT functions can be removed if externally visible. */
- if (node->local.externally_visible
- && (!DECL_COMDAT (node->decl)
+ if (node->symbol.externally_visible
+ && (!DECL_COMDAT (node->symbol.decl)
|| cgraph_used_from_object_file_p (node)))
return false;
return true;
cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
{
/* Extern inlines can always go, we will use the external definition. */
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (node->symbol.decl))
return true;
- if (node->address_taken)
+ if (node->symbol.address_taken)
return false;
return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
}
return cgraph_only_called_directly_p (node);
else
{
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (node->symbol.decl))
return true;
return cgraph_can_remove_if_no_direct_calls_p (node);
}
cgraph_used_from_object_file_p (struct cgraph_node *node)
{
gcc_assert (!node->global.inlined_to);
- if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
+ if (!TREE_PUBLIC (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
return false;
- if (resolution_used_from_other_file_p (node->resolution))
+ if (resolution_used_from_other_file_p (node->symbol.resolution))
return true;
return false;
}
/* Callgraph handling code.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
+ 2012 Free Software Foundation, Inc.
Contributed by Jan Hubicka
This file is part of GCC.
TODO: add labels, constant pool and aliases. */
enum symtab_type
{
+ SYMTAB_SYMBOL,
SYMTAB_FUNCTION,
SYMTAB_VARIABLE
};
+union symtab_node_def;
+typedef union symtab_node_def *symtab_node;
+
/* Base of all entries in the symbol table.
The symtab_node is inherited by cgraph and varpol nodes. */
-struct GTY(()) symtab_node
+struct GTY(()) symtab_node_base
{
/* Type of the symbol. */
enum symtab_type type;
+ tree decl;
+ struct ipa_ref_list ref_list;
+ /* Circular list of nodes in the same comdat group if non-NULL. */
+ symtab_node same_comdat_group;
+ /* Ordering of all symtab entries. */
+ int order;
+ enum ld_plugin_symbol_resolution resolution;
+ /* File stream where this node is being written to. */
+ struct lto_file_decl_data * lto_file_data;
+
+ PTR GTY ((skip)) aux;
+
+ /* Set when function has address taken.
+ In current implementation it imply needed flag. */
+ unsigned address_taken : 1;
+ /* Set when variable is used from other LTRANS partition. */
+ unsigned used_from_other_partition : 1;
+ /* Set when function is available in the other LTRANS partition.
+ During WPA output it is used to mark nodes that are present in
+ multiple partitions. */
+ unsigned in_other_partition : 1;
+ /* Set when function is visible by other units. */
+ unsigned externally_visible : 1;
};
enum availability
Available after function is analyzed. */
struct GTY(()) cgraph_local_info {
- /* File stream where this node is being written to. */
- struct lto_file_decl_data * lto_file_data;
-
/* Set when function function is visible in current compilation unit only
and its address is never taken. */
unsigned local : 1;
- /* Set when function is visible by other units. */
- unsigned externally_visible : 1;
-
/* Set once it has been finalized so we consider it to be output. */
unsigned finalized : 1;
/* The cgraph data structure.
Each function decl has assigned cgraph_node listing callees and callers. */
-struct GTY((chain_next ("%h.next"), chain_prev ("%h.previous"))) cgraph_node {
- struct symtab_node symbol;
- tree decl;
+struct GTY(()) cgraph_node {
+ struct symtab_node_base symbol;
struct cgraph_edge *callees;
struct cgraph_edge *callers;
- struct cgraph_node *next;
- struct cgraph_node *previous;
+ struct cgraph_node *
+ GTY ((nested_ptr (union symtab_node_def, "(struct cgraph_node *)(%h)", "(symtab_node)%h")))
+ next;
+ struct cgraph_node *
+ GTY ((nested_ptr (union symtab_node_def, "(struct cgraph_node *)(%h)", "(symtab_node)%h")))
+ previous;
/* List of edges representing indirect calls with a yet undetermined
callee. */
struct cgraph_edge *indirect_calls;
/* For nested functions points to function the node is nested in. */
- struct cgraph_node *origin;
+ struct cgraph_node *
+ GTY ((nested_ptr (union symtab_node_def, "(struct cgraph_node *)(%h)", "(symtab_node)%h")))
+ origin;
/* Points to first nested function, if any. */
- struct cgraph_node *nested;
+ struct cgraph_node *
+ GTY ((nested_ptr (union symtab_node_def, "(struct cgraph_node *)(%h)", "(symtab_node)%h")))
+ nested;
/* Pointer to the next function with same origin, if any. */
- struct cgraph_node *next_nested;
+ struct cgraph_node *
+ GTY ((nested_ptr (union symtab_node_def, "(struct cgraph_node *)(%h)", "(symtab_node)%h")))
+ next_nested;
/* Pointer to the next function in cgraph_nodes_queue. */
- struct cgraph_node *next_needed;
+ struct cgraph_node *
+ GTY ((nested_ptr (union symtab_node_def, "(struct cgraph_node *)(%h)", "(symtab_node)%h")))
+ next_needed;
/* Pointer to the next clone. */
struct cgraph_node *next_sibling_clone;
struct cgraph_node *prev_sibling_clone;
struct cgraph_node *clones;
struct cgraph_node *clone_of;
- /* Circular list of nodes in the same comdat group if non-NULL. */
- struct cgraph_node *same_comdat_group;
/* For functions with many calls sites it holds map from call expression
to the edge to speed up cgraph_edge function. */
htab_t GTY((param_is (struct cgraph_edge))) call_site_hash;
/* Declaration node used to be clone of. */
tree former_clone_of;
- PTR GTY ((skip)) aux;
-
/* Interprocedural passes scheduled to have their transform functions
applied next time we execute local pass on them. We maintain it
per-function in order to allow IPA passes to introduce new functions. */
VEC(ipa_opt_pass,heap) * GTY((skip)) ipa_transforms_to_apply;
- struct ipa_ref_list ref_list;
struct cgraph_local_info local;
struct cgraph_global_info global;
struct cgraph_rtl_info rtl;
int count_materialization_scale;
/* Unique id of the node. */
int uid;
- /* Ordering of all cgraph nodes. */
- int order;
-
- enum ld_plugin_symbol_resolution resolution;
/* Set when function must be output for some reason. The primary
use of this flag is to mark functions needed to be output for
or reachable from functions needed to be output are marked
by specialized flags. */
unsigned needed : 1;
- /* Set when function has address taken.
- In current implementation it imply needed flag. */
- unsigned address_taken : 1;
/* Set when decl is an abstract function pointed to by the
ABSTRACT_DECL_ORIGIN of a reachable function. */
unsigned abstract_and_needed : 1;
cgraph_remove_unreachable_nodes cgraph still can contain unreachable
nodes when they are needed for virtual clone instantiation. */
unsigned reachable : 1;
- /* Set when function is reachable by call from other LTRANS partition. */
- unsigned reachable_from_other_partition : 1;
/* Set once the function is lowered (i.e. its CFG is built). */
unsigned lowered : 1;
/* Set once the function has been instantiated and its callee
lists created. */
unsigned analyzed : 1;
- /* Set when function is available in the other LTRANS partition.
- During WPA output it is used to mark nodes that are present in
- multiple partitions. */
- unsigned in_other_partition : 1;
/* Set when function is scheduled to be processed by local passes. */
unsigned process : 1;
/* Set for aliases once they got through assemble_alias. */
Each static variable decl has assigned varpool_node. */
struct GTY((chain_next ("%h.next"), chain_prev ("%h.prev"))) varpool_node {
- struct symtab_node symbol;
- tree decl;
+ struct symtab_node_base symbol;
/* For aliases points to declaration DECL is alias of. */
tree alias_of;
/* Pointer to the next function in varpool_nodes. */
- struct varpool_node *next, *prev;
+ struct varpool_node *
+ GTY ((nested_ptr (union symtab_node_def, "(struct varpool_node *)(%h)", "(symtab_node)%h")))
+ next;
+ struct varpool_node *
+ GTY ((nested_ptr (union symtab_node_def, "(struct varpool_node *)(%h)", "(symtab_node)%h")))
+ prev;
/* Pointer to the next function in varpool_nodes_queue. */
- struct varpool_node *next_needed, *prev_needed;
- /* Circular list of nodes in the same comdat group if non-NULL. */
- struct varpool_node *same_comdat_group;
- struct ipa_ref_list ref_list;
- /* File stream where this node is being written to. */
- struct lto_file_decl_data * lto_file_data;
- PTR GTY ((skip)) aux;
- /* Ordering of all cgraph nodes. */
- int order;
- enum ld_plugin_symbol_resolution resolution;
+ struct varpool_node *
+ GTY ((nested_ptr (union symtab_node_def, "(struct varpool_node *)(%h)", "(symtab_node)%h")))
+ next_needed;
+ struct varpool_node *
+ GTY ((nested_ptr (union symtab_node_def, "(struct varpool_node *)(%h)", "(symtab_node)%h")))
+ prev_needed;
/* Set when function must be output - it is externally visible
or its address is taken. */
unsigned finalized : 1;
/* Set when variable is scheduled to be assembled. */
unsigned output : 1;
- /* Set when function is visible by other units. */
- unsigned externally_visible : 1;
/* Set for aliases once they got through assemble_alias. Also set for
extra name aliases in varpool_extra_name_alias. */
unsigned alias : 1;
unsigned extra_name_alias : 1;
- /* Set when variable is used from other LTRANS partition. */
- unsigned used_from_other_partition : 1;
- /* Set when variable is available in the other LTRANS partition.
- During WPA output it is used to mark nodes that are present in
- multiple partitions. */
- unsigned in_other_partition : 1;
};
/* Every top level asm statement is put into a cgraph_asm_node. */
int order;
};
-extern GTY(()) struct cgraph_node *cgraph_nodes;
+/* Symbol table entry. */
+union GTY((desc ("%h.symbol.type"))) symtab_node_def {
+ struct symtab_node_base GTY ((tag ("SYMTAB_SYMBOL"))) symbol;
+ /* Use cgraph (symbol) accessor to get cgraph_node. */
+ struct cgraph_node GTY ((tag ("SYMTAB_FUNCTION"))) x_function;
+ /* Use varpool (symbol) accessor to get varpool_node. */
+ struct varpool_node GTY ((tag ("SYMTAB_VARIABLE"))) x_variable;
+};
+
+extern GTY(()) symtab_node x_cgraph_nodes;
+#define cgraph_nodes ((struct cgraph_node *)x_cgraph_nodes)
extern GTY(()) int cgraph_n_nodes;
extern GTY(()) int cgraph_max_uid;
extern GTY(()) int cgraph_edge_max_uid;
};
extern enum cgraph_state cgraph_state;
extern bool cgraph_function_flags_ready;
-extern GTY(()) struct cgraph_node *cgraph_nodes_queue;
+extern GTY(()) symtab_node x_cgraph_nodes_queue;
+#define cgraph_nodes_queue ((struct cgraph_node *)x_cgraph_nodes_queue)
extern GTY(()) struct cgraph_node *cgraph_new_nodes;
extern GTY(()) struct cgraph_asm_node *cgraph_asm_nodes;
bool cgraph_optimize_for_size_p (struct cgraph_node *);
/* In varpool.c */
-extern GTY(()) struct varpool_node *varpool_nodes_queue;
-extern GTY(()) struct varpool_node *varpool_nodes;
+extern GTY(()) symtab_node x_varpool_nodes_queue;
+extern GTY(()) symtab_node x_varpool_nodes;
+#define varpool_nodes_queue ((struct varpool_node *)x_varpool_nodes_queue)
+#define varpool_nodes ((struct varpool_node *)x_varpool_nodes)
struct varpool_node *varpool_node (tree);
struct varpool_node *varpool_node_for_asm (tree asmname);
/* Return callgraph node for given symbol and check it is a function. */
static inline struct cgraph_node *
-cgraph (struct symtab_node *node)
+cgraph (symtab_node node)
{
- gcc_checking_assert (node->type == SYMTAB_FUNCTION);
- return (struct cgraph_node *)node;
+ gcc_checking_assert (!node || node->symbol.type == SYMTAB_FUNCTION);
+ return &node->x_function;
}
/* Return varpool node for given symbol and check it is a variable. */
static inline struct varpool_node *
-varpool (struct symtab_node *node)
+varpool (symtab_node node)
{
- gcc_checking_assert (node->type == SYMTAB_FUNCTION);
- return (struct varpool_node *)node;
+ gcc_checking_assert (!node || node->symbol.type == SYMTAB_VARIABLE);
+ return &node->x_variable;
}
struct varpool_node *node;
for (node = varpool_nodes_queue; node; node = node->next_needed)
{
- gcc_checking_assert (TREE_CODE (node->decl) == VAR_DECL);
- if (DECL_INITIAL (node->decl))
+ gcc_checking_assert (TREE_CODE (node->symbol.decl) == VAR_DECL);
+ if (DECL_INITIAL (node->symbol.decl))
return node;
}
return NULL;
{
for (node = node->next_needed; node; node = node->next_needed)
{
- gcc_checking_assert (TREE_CODE (node->decl) == VAR_DECL);
- if (DECL_INITIAL (node->decl))
+ gcc_checking_assert (TREE_CODE (node->symbol.decl) == VAR_DECL);
+ if (DECL_INITIAL (node->symbol.decl))
return node;
}
return NULL;
cgraph_only_called_directly_or_aliased_p (struct cgraph_node *node)
{
gcc_assert (!node->global.inlined_to);
- return (!node->needed && !node->address_taken
- && !node->reachable_from_other_partition
- && !DECL_STATIC_CONSTRUCTOR (node->decl)
- && !DECL_STATIC_DESTRUCTOR (node->decl)
- && !node->local.externally_visible);
+ return (!node->needed && !node->symbol.address_taken
+ && !node->symbol.used_from_other_partition
+ && !DECL_STATIC_CONSTRUCTOR (node->symbol.decl)
+ && !DECL_STATIC_DESTRUCTOR (node->symbol.decl)
+ && !node->symbol.externally_visible);
}
/* Return true when function NODE can be removed from callgraph
static inline bool
varpool_can_remove_if_no_refs (struct varpool_node *node)
{
- return (!node->force_output && !node->used_from_other_partition
- && (DECL_COMDAT (node->decl) || !node->externally_visible));
+ return (!node->force_output && !node->symbol.used_from_other_partition
+ && (DECL_COMDAT (node->symbol.decl)
+ || !node->symbol.externally_visible));
}
/* Return true when all references to VNODE must be visible in ipa_ref_list.
varpool_all_refs_explicit_p (struct varpool_node *vnode)
{
return (vnode->analyzed
- && !vnode->externally_visible
- && !vnode->used_from_other_partition
+ && !vnode->symbol.externally_visible
+ && !vnode->symbol.used_from_other_partition
&& !vnode->force_output);
}
{
struct ipa_ref *ref;
- ipa_ref_list_reference_iterate (&n->ref_list, 0, ref);
+ ipa_ref_list_reference_iterate (&n->symbol.ref_list, 0, ref);
gcc_checking_assert (ref->use == IPA_REF_ALIAS);
if (ref->refered_type == IPA_REF_CGRAPH)
return ipa_ref_node (ref);
{
struct ipa_ref *ref;
- ipa_ref_list_reference_iterate (&n->ref_list, 0, ref);
+ ipa_ref_list_reference_iterate (&n->symbol.ref_list, 0, ref);
gcc_checking_assert (ref->use == IPA_REF_ALIAS);
if (ref->refered_type == IPA_REF_VARPOOL)
return ipa_ref_varpool_node (ref);
{
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
if (e->caller->global.inlined_to)
- return e->caller->global.inlined_to->decl == callee->decl;
+ return e->caller->global.inlined_to->symbol.decl == callee->symbol.decl;
else
- return e->caller->decl == callee->decl;
+ return e->caller->symbol.decl == callee->symbol.decl;
}
/* Return true if the TM_CLONE bit is set for a given FNDECL. */
{
eh_region i;
- if (DECL_FUNCTION_PERSONALITY (node->decl))
+ if (DECL_FUNCTION_PERSONALITY (node->symbol.decl))
{
struct cgraph_node *per_node;
- per_node = cgraph_get_create_node (DECL_FUNCTION_PERSONALITY (node->decl));
+ per_node = cgraph_get_create_node (DECL_FUNCTION_PERSONALITY (node->symbol.decl));
ipa_record_reference (node, NULL, per_node, NULL, IPA_REF_ADDR, NULL);
cgraph_mark_address_taken_node (per_node);
}
gimple_stmt_iterator gsi;
cgraph_node_remove_callees (node);
- ipa_remove_all_references (&node->ref_list);
+ ipa_remove_all_references (&node->symbol.ref_list);
node->count = ENTRY_BLOCK_PTR->count;
struct cgraph_node *node = cgraph_get_node (current_function_decl);
gimple_stmt_iterator gsi;
- ipa_remove_all_references (&node->ref_list);
+ ipa_remove_all_references (&node->symbol.ref_list);
node->count = ENTRY_BLOCK_PTR->count;
cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
{
/* If the user told us it is used, then it must be so. */
- if (node->local.externally_visible)
+ if (node->symbol.externally_visible)
return true;
/* ??? If the assembler name is set by hand, it is possible to assemble
while (cgraph_new_nodes)
{
node = cgraph_new_nodes;
- fndecl = node->decl;
+ fndecl = node->symbol.decl;
cgraph_new_nodes = cgraph_new_nodes->next_needed;
switch (cgraph_state)
{
error ("caller edge frequency is too large");
error_found = true;
}
- if (gimple_has_body_p (e->caller->decl)
+ if (gimple_has_body_p (e->caller->symbol.decl)
&& !e->caller->global.inlined_to
/* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
Remove this once edges are actualy removed from the function at that time. */
<= (unsigned) e->uid)
|| !inline_edge_summary (e)->predicate)))
&& (e->frequency
- != compute_call_stmt_bb_frequency (e->caller->decl,
+ != compute_call_stmt_bb_frequency (e->caller->symbol.decl,
gimple_bb (e->call_stmt))))
{
error ("caller edge frequency %i does not match BB frequency %i",
e->frequency,
- compute_call_stmt_bb_frequency (e->caller->decl,
+ compute_call_stmt_bb_frequency (e->caller->symbol.decl,
gimple_bb (e->call_stmt)));
error_found = true;
}
/* We do not know if a node from a different partition is an alias or what it
aliases and therefore cannot do the former_clone_of check reliably. */
- if (!node || node->in_other_partition)
+ if (!node || node->symbol.in_other_partition)
return false;
node = cgraph_function_or_thunk_node (node, NULL);
- if ((e->callee->former_clone_of != node->decl
+ if ((e->callee->former_clone_of != node->symbol.decl
&& (!node->same_body_alias
|| e->callee->former_clone_of != node->thunk.alias))
/* IPA-CP sometimes redirect edge to clone and then back to the former
verify_cgraph_node (struct cgraph_node *node)
{
struct cgraph_edge *e;
- struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
+ struct function *this_cfun = DECL_STRUCT_FUNCTION (node->symbol.decl);
basic_block this_block;
gimple_stmt_iterator gsi;
bool error_found = false;
error ("execution count is negative");
error_found = true;
}
- if (node->global.inlined_to && node->local.externally_visible)
+ if (node->global.inlined_to && node->symbol.externally_visible)
{
error ("externally visible inline clone");
error_found = true;
}
- if (node->global.inlined_to && node->address_taken)
+ if (node->global.inlined_to && node->symbol.address_taken)
{
error ("inline clone with address taken");
error_found = true;
error_found = true;
}
- if (!cgraph_get_node (node->decl))
+ if (!cgraph_get_node (node->symbol.decl))
{
error ("node not found in cgraph_hash");
error_found = true;
error ("double linked list of clones corrupted");
error_found = true;
}
- if (node->same_comdat_group)
+ if (node->symbol.same_comdat_group)
{
- struct cgraph_node *n = node->same_comdat_group;
+ symtab_node n = node->symbol.same_comdat_group;
- if (!DECL_ONE_ONLY (node->decl))
+ if (!DECL_ONE_ONLY (n->symbol.decl))
{
error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
error_found = true;
}
- if (n == node)
+ if (n == (symtab_node)node)
{
error ("node is alone in a comdat group");
error_found = true;
}
do
{
- if (!n->same_comdat_group)
+ if (!n->symbol.same_comdat_group)
{
error ("same_comdat_group is not a circular list");
error_found = true;
break;
}
- n = n->same_comdat_group;
+ n = n->symbol.same_comdat_group;
}
- while (n != node);
+ while (n != (symtab_node)node);
}
if (node->analyzed && node->alias)
error ("Alias has call edges");
error_found = true;
}
- for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
+ i, ref); i++)
if (ref->use != IPA_REF_ALIAS)
{
error ("Alias has non-alias reference");
error ("More than one edge out of thunk node");
error_found = true;
}
- if (gimple_has_body_p (node->decl))
+ if (gimple_has_body_p (node->symbol.decl))
{
error ("Thunk is not supposed to have body");
error_found = true;
}
}
- else if (node->analyzed && gimple_has_body_p (node->decl)
- && !TREE_ASM_WRITTEN (node->decl)
- && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
+ else if (node->analyzed && gimple_has_body_p (node->symbol.decl)
+ && !TREE_ASM_WRITTEN (node->symbol.decl)
+ && (!DECL_EXTERNAL (node->symbol.decl) || node->global.inlined_to)
&& !flag_wpa)
{
if (this_cfun->cfg)
if (verify_edge_corresponds_to_fndecl (e, decl))
{
error ("edge points to wrong declaration:");
- debug_tree (e->callee->decl);
+ debug_tree (e->callee->symbol.decl);
fprintf (stderr," Instead of:");
debug_tree (decl);
error_found = true;
cgraph_analyze_function (struct cgraph_node *node)
{
tree save = current_function_decl;
- tree decl = node->decl;
+ tree decl = node->symbol.decl;
if (node->alias && node->thunk.alias)
{
n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL)
if (n == node)
{
- error ("function %q+D part of alias cycle", node->decl);
+ error ("function %q+D part of alias cycle", node->symbol.decl);
node->alias = false;
return;
}
- if (!VEC_length (ipa_ref_t, node->ref_list.references))
+ if (!VEC_length (ipa_ref_t, node->symbol.ref_list.references))
ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
if (node->same_body_alias)
{
- DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (node->thunk.alias);
- DECL_DECLARED_INLINE_P (node->decl)
+ DECL_VIRTUAL_P (node->symbol.decl) = DECL_VIRTUAL_P (node->thunk.alias);
+ DECL_DECLARED_INLINE_P (node->symbol.decl)
= DECL_DECLARED_INLINE_P (node->thunk.alias);
- DECL_DISREGARD_INLINE_LIMITS (node->decl)
+ DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl)
= DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
}
/* Fixup visibility nonsences C++ frontend produce on same body aliases. */
- if (TREE_PUBLIC (node->decl) && node->same_body_alias)
+ if (TREE_PUBLIC (node->symbol.decl) && node->same_body_alias)
{
- DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->thunk.alias);
+ DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (node->thunk.alias);
if (DECL_ONE_ONLY (node->thunk.alias))
{
- DECL_COMDAT (node->decl) = DECL_COMDAT (node->thunk.alias);
- DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->thunk.alias);
- if (DECL_ONE_ONLY (node->thunk.alias) && !node->same_comdat_group)
+ DECL_COMDAT (node->symbol.decl) = DECL_COMDAT (node->thunk.alias);
+ DECL_COMDAT_GROUP (node->symbol.decl) = DECL_COMDAT_GROUP (node->thunk.alias);
+ if (DECL_ONE_ONLY (node->thunk.alias) && !node->symbol.same_comdat_group)
{
struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
- node->same_comdat_group = tgt;
- if (!tgt->same_comdat_group)
- tgt->same_comdat_group = node;
+ node->symbol.same_comdat_group = (symtab_node)tgt;
+ if (!tgt->symbol.same_comdat_group)
+ tgt->symbol.same_comdat_group = (symtab_node)node;
else
{
- struct cgraph_node *n;
- for (n = tgt->same_comdat_group;
- n->same_comdat_group != tgt;
- n = n->same_comdat_group)
+ symtab_node n;
+ for (n = tgt->symbol.same_comdat_group;
+ n->symbol.same_comdat_group != (symtab_node)tgt;
+ n = n->symbol.same_comdat_group)
;
- n->same_comdat_group = node;
+ n->symbol.same_comdat_group = (symtab_node)node;
}
}
}
}
cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
- if (node->address_taken)
+ if (node->symbol.address_taken)
cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
- if (cgraph_decide_is_function_needed (node, node->decl))
+ if (cgraph_decide_is_function_needed (node, node->symbol.decl))
cgraph_mark_needed_node (node);
}
else if (node->thunk.thunk_p)
current_function_decl = decl;
push_cfun (DECL_STRUCT_FUNCTION (decl));
- assign_assembler_name_if_neeeded (node->decl);
+ assign_assembler_name_if_neeeded (node->symbol.decl);
/* Make sure to gimplify bodies only once. During analyzing a
function we lower it, which will require gimplified nested
if (!node->lowered)
{
if (node->nested)
- lower_nested_functions (node->decl);
+ lower_nested_functions (node->symbol.decl);
gcc_assert (!node->nested);
gimple_register_cfg_hooks ();
struct cgraph_node *node;
for (node = cgraph_nodes; node; node = node->next)
if (node->same_body_alias
- && !VEC_length (ipa_ref_t, node->ref_list.references))
+ && !VEC_length (ipa_ref_t, node->symbol.ref_list.references))
{
struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
for (node = cgraph_nodes; node != first; node = node->next)
{
- tree decl = node->decl;
+ tree decl = node->symbol.decl;
if (DECL_PRESERVE_P (decl))
cgraph_mark_needed_node (node);
if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
&& lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
- && TREE_PUBLIC (node->decl))
+ && TREE_PUBLIC (node->symbol.decl))
{
if (node->local.finalized)
cgraph_mark_needed_node (node);
}
else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
{
- if (! TREE_PUBLIC (node->decl))
- warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
+ if (! TREE_PUBLIC (node->symbol.decl))
+ warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
"%<externally_visible%>"
" attribute have effect only on public objects");
else if (node->local.finalized)
if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
&& (node->local.finalized && !node->alias))
{
- warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
+ warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
"%<weakref%> attribute ignored"
" because function is defined");
DECL_WEAK (decl) = 0;
}
for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
{
- tree decl = vnode->decl;
+ tree decl = vnode->symbol.decl;
if (DECL_PRESERVE_P (decl))
{
vnode->force_output = true;
}
if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
&& lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
- && TREE_PUBLIC (vnode->decl))
+ && TREE_PUBLIC (vnode->symbol.decl))
{
if (vnode->finalized)
varpool_mark_needed_node (vnode);
}
else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
{
- if (! TREE_PUBLIC (vnode->decl))
- warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
+ if (! TREE_PUBLIC (vnode->symbol.decl))
+ warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
"%<externally_visible%>"
" attribute have effect only on public objects");
else if (vnode->finalized)
&& vnode->finalized
&& DECL_INITIAL (decl))
{
- warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
+ warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
"%<weakref%> attribute ignored"
" because variable is initialized");
DECL_WEAK (decl) = 0;
while (cgraph_nodes_queue)
{
struct cgraph_edge *edge;
- tree decl = cgraph_nodes_queue->decl;
+ tree decl = cgraph_nodes_queue->symbol.decl;
node = cgraph_nodes_queue;
- cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
+ x_cgraph_nodes_queue = (symtab_node)cgraph_nodes_queue->next_needed;
node->next_needed = NULL;
/* ??? It is possible to create extern inline function and later using
if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
cgraph_mark_reachable_node (edge->caller);
- if (node->same_comdat_group)
+ if (node->symbol.same_comdat_group)
{
- for (next = node->same_comdat_group;
+ for (next = cgraph (node->symbol.same_comdat_group);
next != node;
- next = next->same_comdat_group)
+ next = cgraph (next->symbol.same_comdat_group))
cgraph_mark_reachable_node (next);
}
for (node = cgraph_nodes; node != first_analyzed; node = next)
{
- tree decl = node->decl;
+ tree decl = node->symbol.decl;
next = node->next;
if (node->local.finalized && !gimple_has_body_p (decl)
However for weakref we insist on EXTERNAL flag being set.
See gcc.dg/attr-alias-5.c */
if (DECL_EXTERNAL (p->decl))
- DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref",
- DECL_ATTRIBUTES (p->decl)) != NULL;
- cgraph_create_function_alias (p->decl, target_node->decl);
+ DECL_EXTERNAL (p->decl)
+ = lookup_attribute ("weakref",
+ DECL_ATTRIBUTES (p->decl)) != NULL;
+ cgraph_create_function_alias (p->decl, target_node->symbol.decl);
VEC_unordered_remove (alias_pair, alias_pairs, i);
}
else if (TREE_CODE (p->decl) == VAR_DECL
However for weakref we insist on EXTERNAL flag being set.
See gcc.dg/attr-alias-5.c */
if (DECL_EXTERNAL (p->decl))
- DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref",
- DECL_ATTRIBUTES (p->decl)) != NULL;
- varpool_create_variable_alias (p->decl, target_vnode->decl);
+ DECL_EXTERNAL (p->decl)
+ = lookup_attribute ("weakref",
+ DECL_ATTRIBUTES (p->decl)) != NULL;
+ varpool_create_variable_alias (p->decl, target_vnode->symbol.decl);
VEC_unordered_remove (alias_pair, alias_pairs, i);
}
/* Weakrefs with target not defined in current unit are easy to handle; they
for (node = cgraph_nodes; node; node = node->next)
{
- tree decl = node->decl;
+ tree decl = node->symbol.decl;
struct cgraph_edge *e;
- gcc_assert (!node->process || node->same_comdat_group);
+ gcc_assert (!node->process || node->symbol.same_comdat_group);
if (node->process)
continue;
&& !node->alias
&& !node->global.inlined_to
&& (!cgraph_only_called_directly_p (node)
- || ((e || ipa_ref_has_aliases_p (&node->ref_list))
+ || ((e || ipa_ref_has_aliases_p (&node->symbol.ref_list))
&& node->reachable))
&& !TREE_ASM_WRITTEN (decl)
&& !DECL_EXTERNAL (decl))
{
node->process = 1;
- if (node->same_comdat_group)
+ if (node->symbol.same_comdat_group)
{
struct cgraph_node *next;
- for (next = node->same_comdat_group;
+ for (next = cgraph (node->symbol.same_comdat_group);
next != node;
- next = next->same_comdat_group)
+ next = cgraph (next->symbol.same_comdat_group))
if (!next->thunk.thunk_p && !next->alias)
next->process = 1;
}
}
- else if (node->same_comdat_group)
+ else if (node->symbol.same_comdat_group)
{
#ifdef ENABLE_CHECKING
check_same_comdat_groups = true;
/* FIXME: in ltrans unit when offline copy is outside partition but inline copies
are inside partition, we can end up not removing the body since we no longer
have analyzed node pointing to it. */
- && !node->in_other_partition
+ && !node->symbol.in_other_partition
&& !node->alias
&& !DECL_EXTERNAL (decl))
{
#endif
gcc_assert (node->global.inlined_to
|| !gimple_has_body_p (decl)
- || node->in_other_partition
+ || node->symbol.in_other_partition
|| DECL_EXTERNAL (decl));
}
#ifdef ENABLE_CHECKING
if (check_same_comdat_groups)
for (node = cgraph_nodes; node; node = node->next)
- if (node->same_comdat_group && !node->process)
+ if (node->symbol.same_comdat_group && !node->process)
{
- tree decl = node->decl;
+ tree decl = node->symbol.decl;
if (!node->global.inlined_to
&& gimple_has_body_p (decl)
/* FIXME: in an ltrans unit when the offline copy is outside a
partition but inline copies are inside a partition, we can
end up not removing the body since we no longer have an
analyzed node pointing to it. */
- && !node->in_other_partition
+ && !node->symbol.in_other_partition
&& !DECL_EXTERNAL (decl))
{
dump_cgraph_node (stderr, node);
HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
tree virtual_offset = NULL;
tree alias = node->thunk.alias;
- tree thunk_fndecl = node->decl;
+ tree thunk_fndecl = node->symbol.decl;
tree a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
}
else
e = e->next_caller;
- for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_refering_iterate (&node->symbol.ref_list,
+ i, ref); i++)
if (ref->use == IPA_REF_ALIAS)
{
struct cgraph_node *alias = ipa_ref_refering_node (ref);
/* Force assemble_alias to really output the alias this time instead
of buffering it in same alias pairs. */
TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
- assemble_alias (alias->decl,
+ assemble_alias (alias->symbol.decl,
DECL_ASSEMBLER_NAME (alias->thunk.alias));
assemble_thunks_and_aliases (alias);
TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
static void
tree_rest_of_compilation (struct cgraph_node *node)
{
- tree fndecl = node->decl;
+ tree fndecl = node->symbol.decl;
location_t saved_loc;
timevar_push (TV_REST_OF_COMPILATION);
static void
cgraph_expand_function (struct cgraph_node *node)
{
- tree decl = node->decl;
+ tree decl = node->symbol.decl;
/* We ought to not compile any inline clones. */
gcc_assert (!node->global.inlined_to);
{
if (pf->process && !pf->thunk.thunk_p && !pf->alias)
{
- i = pf->order;
+ i = pf->symbol.order;
gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
nodes[i].kind = ORDER_FUNCTION;
nodes[i].u.f = pf;
for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
{
- i = pv->order;
+ i = pv->symbol.order;
gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
nodes[i].kind = ORDER_VAR;
nodes[i].u.v = pv;
struct cgraph_node *node;
struct varpool_node *vnode;
for (node = cgraph_nodes; node; node = node->next)
- if (node->alias && DECL_EXTERNAL (node->decl)
- && !TREE_ASM_WRITTEN (node->decl)
- && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
- assemble_alias (node->decl,
+ if (node->alias && DECL_EXTERNAL (node->symbol.decl)
+ && !TREE_ASM_WRITTEN (node->symbol.decl)
+ && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
+ assemble_alias (node->symbol.decl,
node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias)
- : get_alias_symbol (node->decl));
+ : get_alias_symbol (node->symbol.decl));
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
- if (vnode->alias && DECL_EXTERNAL (vnode->decl)
- && !TREE_ASM_WRITTEN (vnode->decl)
- && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->decl)))
- assemble_alias (vnode->decl,
+ if (vnode->alias && DECL_EXTERNAL (vnode->symbol.decl)
+ && !TREE_ASM_WRITTEN (vnode->symbol.decl)
+ && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->symbol.decl)))
+ assemble_alias (vnode->symbol.decl,
vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of)
- : get_alias_symbol (vnode->decl));
+ : get_alias_symbol (vnode->symbol.decl));
}
/* Update the call expr on the edges to call the new version. */
for (e = new_version->callers; e; e = e->next_caller)
{
- struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
- gimple_call_set_fndecl (e->call_stmt, new_version->decl);
+ struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->symbol.decl);
+ gimple_call_set_fndecl (e->call_stmt, new_version->symbol.decl);
maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
}
}
new_version->analyzed = old_version->analyzed;
new_version->local = old_version->local;
- new_version->local.externally_visible = false;
+ new_version->symbol.externally_visible = false;
new_version->local.local = true;
new_version->global = old_version->global;
new_version->rtl = old_version->rtl;
basic_block new_entry_block,
const char *clone_name)
{
- tree old_decl = old_version_node->decl;
+ tree old_decl = old_version_node->symbol.decl;
struct cgraph_node *new_version_node = NULL;
tree new_decl;
that is not weak also.
??? We cannot use COMDAT linkage because there is no
ABI support for this. */
- cgraph_make_decl_local (new_version_node->decl);
- DECL_VIRTUAL_P (new_version_node->decl) = 0;
- new_version_node->local.externally_visible = 0;
+ cgraph_make_decl_local (new_version_node->symbol.decl);
+ DECL_VIRTUAL_P (new_version_node->symbol.decl) = 0;
+ new_version_node->symbol.externally_visible = 0;
new_version_node->local.local = 1;
new_version_node->lowered = true;
cgraph_materialize_clone (struct cgraph_node *node)
{
bitmap_obstack_initialize (NULL);
- node->former_clone_of = node->clone_of->decl;
+ node->former_clone_of = node->clone_of->symbol.decl;
if (node->clone_of->former_clone_of)
node->former_clone_of = node->clone_of->former_clone_of;
/* Copy the OLD_VERSION_NODE function tree to the new version. */
- tree_function_versioning (node->clone_of->decl, node->decl,
+ tree_function_versioning (node->clone_of->symbol.decl, node->symbol.decl,
node->clone.tree_map, true,
node->clone.args_to_skip, false,
NULL, NULL);
if (cgraph_dump_file)
{
- dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
- dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
+ dump_function_to_file (node->clone_of->symbol.decl, cgraph_dump_file, dump_flags);
+ dump_function_to_file (node->symbol.decl, cgraph_dump_file, dump_flags);
}
/* Function is no longer clone. */
{
cgraph_release_function_body (node->clone_of);
cgraph_node_remove_callees (node->clone_of);
- ipa_remove_all_references (&node->clone_of->ref_list);
+ ipa_remove_all_references (&node->clone_of->symbol.ref_list);
}
node->clone_of = NULL;
bitmap_obstack_release (NULL);
#endif
if (e->indirect_unknown_callee
- || decl == e->callee->decl)
+ || decl == e->callee->symbol.decl)
return e->call_stmt;
#ifdef ENABLE_CHECKING
new_stmt
= gimple_call_copy_skip_args (e->call_stmt,
e->callee->clone.combined_args_to_skip);
- gimple_call_set_fndecl (new_stmt, e->callee->decl);
+ gimple_call_set_fndecl (new_stmt, e->callee->symbol.decl);
if (gimple_vdef (new_stmt)
&& TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
else
{
new_stmt = e->call_stmt;
- gimple_call_set_fndecl (new_stmt, e->callee->decl);
+ gimple_call_set_fndecl (new_stmt, e->callee->symbol.decl);
update_stmt (new_stmt);
}
stabilized = true;
for (node = cgraph_nodes; node; node = node->next)
{
- if (node->clone_of && node->decl != node->clone_of->decl
- && !gimple_has_body_p (node->decl))
+ if (node->clone_of && node->symbol.decl != node->clone_of->symbol.decl
+ && !gimple_has_body_p (node->symbol.decl))
{
- if (gimple_has_body_p (node->clone_of->decl))
+ if (gimple_has_body_p (node->clone_of->symbol.decl))
{
if (cgraph_dump_file)
{
for (node = cgraph_nodes; node; node = node->next)
if (node->analyzed
&& (node->global.inlined_to
- || gimple_has_body_p (node->decl)))
+ || gimple_has_body_p (node->symbol.decl)))
{
error_found = true;
dump_cgraph_node (stderr, node);
+2012-04-14 Jan Hubicka <jh@suse.cz>
+
+ * tree.c: Update field referenced for new cgraph/varpool layout.
+ * decl2.c: Likewise.
+
2012-04-13 Jason Merrill <jason@redhat.com>
PR c++/52824
{
current = varpool_node (vtbl);
if (last)
- last->same_comdat_group = current;
+ last->symbol.same_comdat_group = (symtab_node) current;
last = current;
if (!first)
first = current;
}
if (first != last)
- last->same_comdat_group = first;
+ last->symbol.same_comdat_group = (symtab_node)first;
/* Since we're writing out the vtable here, also write the debug
info. */
for (node = cgraph_nodes; node ; node = node->next)
{
- tree fndecl = node->decl;
+ tree fndecl = node->symbol.decl;
if (DECL_CONTEXT (fndecl)
&& TYPE_P (DECL_CONTEXT (fndecl))
for (node = cgraph_nodes; node ; node = node->next)
{
- tree fndecl = node->decl;
+ tree fndecl = node->symbol.decl;
if (TREE_ASM_WRITTEN (fndecl)
&& pointer_set_contains (candidates, fndecl))
static bool
clear_decl_external (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
- DECL_EXTERNAL (node->decl) = 0;
+ DECL_EXTERNAL (node->symbol.decl) = 0;
return false;
}
/* If we mark !DECL_EXTERNAL one of the symbols in some comdat
group, we need to mark all symbols in the same comdat group
that way. */
- if (node->same_comdat_group)
- for (next = node->same_comdat_group;
+ if (node->symbol.same_comdat_group)
+ for (next = cgraph (node->symbol.same_comdat_group);
next != node;
- next = next->same_comdat_group)
+ next = cgraph (next->symbol.same_comdat_group))
cgraph_for_node_and_aliases (next, clear_decl_external,
NULL, true);
}
/* Don't fix same_body aliases. Although they don't have their own
CFG, they share it with what they alias to. */
if (!node || !node->alias
- || !VEC_length (ipa_ref_t, node->ref_list.references))
+ || !VEC_length (ipa_ref_t, node->symbol.ref_list.references))
return true;
}
if (cgn)
for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
- unshare_body (cgn->decl);
+ unshare_body (cgn->symbol.decl);
}
/* Callback for walk_tree to unmark the visited trees rooted at *TP.
if (cgn)
for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
- unvisit_body (cgn->decl);
+ unvisit_body (cgn->symbol.decl);
}
/* Unconditionally make an unshared copy of EXPR. This is used when using
static inline bool
edge_within_scc (struct cgraph_edge *cs)
{
- struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
+ struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->symbol.aux;
struct ipa_dfs_info *callee_dfs;
struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
- callee_dfs = (struct ipa_dfs_info *) callee->aux;
+ callee_dfs = (struct ipa_dfs_info *) callee->symbol.aux;
return (caller_dfs
&& callee_dfs
&& caller_dfs->scc_no == callee_dfs->scc_no);
return false;
}
- if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
+ if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
{
if (dump_file)
fprintf (dump_file, "Not considering %s for cloning; "
else if (isummary->size <= MAX_INLINE_INSNS_AUTO / 2)
res += 15;
else if (isummary->size <= MAX_INLINE_INSNS_AUTO
- || DECL_DECLARED_INLINE_P (callee->decl))
+ || DECL_DECLARED_INLINE_P (callee->symbol.decl))
res += 7;
}
{
if (time_benefit == 0
|| !flag_ipa_cp_clone
- || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
+ || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->symbol.decl)))
return false;
gcc_assert (size_cost > 0);
if (!cgraph_function_with_gimple_body_p (node))
continue;
- node_dfs_info = (struct ipa_dfs_info *) node->aux;
+ node_dfs_info = (struct ipa_dfs_info *) node->symbol.aux;
/* First, iteratively propagate within the strongly connected component
until all lattices stabilize. */
v = node_dfs_info->next_cycle;
while (v)
{
push_node_to_stack (topo, v);
- v = ((struct ipa_dfs_info *) v->aux)->next_cycle;
+ v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
}
v = node;
if (!edge_within_scc (cs))
propagate_constants_accross_call (cs);
- v = ((struct ipa_dfs_info *) v->aux)->next_cycle;
+ v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle;
}
}
}
identify_dead_nodes (struct cgraph_node *node)
{
struct cgraph_node *v;
- for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
+ for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
&& !cgraph_for_node_and_aliases (v,
has_undead_caller_from_outside_scc_p,
NULL, true))
IPA_NODE_REF (v)->node_dead = 1;
- for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
+ for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
if (!IPA_NODE_REF (v)->node_dead)
spread_undeadness (v);
if (dump_file && (dump_flags & TDF_DETAILS))
{
- for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
+ for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
if (IPA_NODE_REF (v)->node_dead)
fprintf (dump_file, " Marking node as dead: %s/%i.\n",
cgraph_node_name (v), v->uid);
{
struct cgraph_node *v;
iterate = false;
- for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
+ for (v = node; v ; v = ((struct ipa_dfs_info *) v->symbol.aux)->next_cycle)
if (cgraph_function_with_gimple_body_p (v)
&& ipcp_versionable_function_p (v))
iterate |= decide_whether_version_node (v);
{
/* Unreachable nodes should have been eliminated before ipcp. */
gcc_assert (node->needed || node->reachable);
- node->local.versionable = tree_versionable_function_p (node->decl);
+ node->local.versionable
+ = tree_versionable_function_p (node->symbol.decl);
ipa_analyze_node (node);
}
}
int i;
fprintf (f, "Inline summary for %s/%i", cgraph_node_name (node),
node->uid);
- if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
+ if (DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl))
fprintf (f, " always_inline");
if (s->inlinable)
fprintf (f, " inlinable");
struct ipa_node_params *parms_info,
struct inline_summary *summary)
{
- struct function *my_function = DECL_STRUCT_FUNCTION (node->decl);
+ struct function *my_function = DECL_STRUCT_FUNCTION (node->symbol.decl);
bool done = false;
basic_block bb;
<0,2>. */
basic_block bb;
gimple_stmt_iterator bsi;
- struct function *my_function = DECL_STRUCT_FUNCTION (node->decl);
+ struct function *my_function = DECL_STRUCT_FUNCTION (node->symbol.decl);
int freq;
struct inline_summary *info = inline_summary (node);
struct predicate bb_predicate;
compute_bb_predicates (node, parms_info, info);
FOR_EACH_BB_FN (bb, my_function)
{
- freq = compute_call_stmt_bb_frequency (node->decl, bb);
+ freq = compute_call_stmt_bb_frequency (node->symbol.decl, bb);
/* TODO: Obviously predicates can be propagated down across CFG. */
if (parms_info)
}
/* Even is_gimple_min_invariant rely on current_function_decl. */
- current_function_decl = node->decl;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ current_function_decl = node->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
/* Estimate the stack size for the function if we're optimizing. */
self_stack_size = optimize ? estimated_stack_frame_size (node) : 0;
info->stack_frame_offset = 0;
/* Can this function be inlined at all? */
- info->inlinable = tree_inlinable_function_p (node->decl);
+ info->inlinable = tree_inlinable_function_p (node->symbol.decl);
/* Type attributes can use parameter indices to describe them. */
- if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
+ if (TYPE_ATTRIBUTES (TREE_TYPE (node->symbol.decl)))
node->local.can_change_signature = false;
else
{
/* Functions calling builtin_apply can not change signature. */
for (e = node->callees; e; e = e->next_callee)
{
- tree cdecl = e->callee->decl;
+ tree cdecl = e->callee->symbol.decl;
if (DECL_BUILT_IN (cdecl)
&& DECL_BUILT_IN_CLASS (cdecl) == BUILT_IN_NORMAL
&& (DECL_FUNCTION_CODE (cdecl) == BUILT_IN_APPLY_ARGS
d.growth = d.growth < info->size ? info->size : d.growth;
else
{
- if (!DECL_EXTERNAL (node->decl)
+ if (!DECL_EXTERNAL (node->symbol.decl)
&& cgraph_will_be_removed_from_program_if_no_direct_calls (node))
d.growth -= info->size;
/* COMDAT functions are very often not shared across multiple units
since they come from various template instantiations.
Take this into account. */
- else if (DECL_COMDAT (node->decl)
+ else if (DECL_COMDAT (node->symbol.decl)
&& cgraph_can_remove_if_no_direct_calls_p (node))
d.growth -= (info->size
* (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY))
static void
inline_analyze_function (struct cgraph_node *node)
{
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
- current_function_decl = node->decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
+ current_function_decl = node->symbol.decl;
if (dump_file)
fprintf (dump_file, "\nAnalyzing function: %s/%u\n",
/* FIXME: When address is taken of DECL_EXTERNAL function we still
can remove its offline copy, but we would need to keep unanalyzed node in
the callgraph so references can point to it. */
- return (!node->address_taken
- && !ipa_ref_has_aliases_p (&node->ref_list)
+ return (!node->symbol.address_taken
+ && !ipa_ref_has_aliases_p (&node->symbol.ref_list)
&& cgraph_can_remove_if_no_direct_calls_p (node)
/* Inlining might enable more devirtualizing, so we want to remove
those only after all devirtualizable virtual calls are processed.
Lacking may edges in callgraph we just preserve them post
inlining. */
- && (!DECL_VIRTUAL_P (node->decl)
- || (!DECL_COMDAT (node->decl)
- && !DECL_EXTERNAL (node->decl)))
+ && (!DECL_VIRTUAL_P (node->symbol.decl)
+ || (!DECL_COMDAT (node->symbol.decl)
+ && !DECL_EXTERNAL (node->symbol.decl)))
/* During early inlining some unanalyzed cgraph nodes might be in the
callgraph and they might reffer the function in question. */
&& !cgraph_new_nodes);
/* When we see same comdat group, we need to be sure that all
items can be removed. */
- if (!node->same_comdat_group)
+ if (!node->symbol.same_comdat_group)
return true;
- for (next = node->same_comdat_group;
- next != node; next = next->same_comdat_group)
+ for (next = cgraph (node->symbol.same_comdat_group);
+ next != node; next = cgraph (next->symbol.same_comdat_group))
if ((next->callers && next->callers != e)
|| !can_remove_node_now_p_1 (next))
return false;
For now we keep the ohter functions in the group in program until
cgraph_remove_unreachable_functions gets rid of them. */
gcc_assert (!e->callee->global.inlined_to);
- if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->decl))
+ if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->symbol.decl))
{
if (overall_size)
*overall_size -= inline_summary (e->callee)->size;
nfunctions_inlined++;
}
duplicate = false;
- e->callee->local.externally_visible = false;
+ e->callee->symbol.externally_visible = false;
update_noncloned_frequencies (e->callee, e->frequency);
}
else
{
struct cgraph_node *n;
- n = cgraph_clone_node (e->callee, e->callee->decl,
+ n = cgraph_clone_node (e->callee, e->callee->symbol.decl,
e->count, e->frequency,
update_original, NULL, true);
cgraph_redirect_edge_callee (e, n);
gcc_assert (!callee->global.inlined_to);
e->inline_failed = CIF_OK;
- DECL_POSSIBLY_INLINED (callee->decl) = true;
+ DECL_POSSIBLY_INLINED (callee->symbol.decl) = true;
to = e->caller;
if (to->global.inlined_to)
fprintf (dump_file, "\nSaving body of %s for later reuse\n",
cgraph_node_name (node));
- gcc_assert (node == cgraph_get_node (node->decl));
+ gcc_assert (node == cgraph_get_node (node->symbol.decl));
/* first_clone will be turned into real function. */
first_clone = node->clones;
- first_clone->decl = copy_node (node->decl);
+ first_clone->symbol.decl = copy_node (node->symbol.decl);
cgraph_insert_node_to_hashtable (first_clone);
- gcc_assert (first_clone == cgraph_get_node (first_clone->decl));
+ gcc_assert (first_clone == cgraph_get_node (first_clone->symbol.decl));
/* Now reshape the clone tree, so all other clones descends from
first_clone. */
if (first_clone->clones)
for (n = first_clone->clones; n != first_clone;)
{
- gcc_assert (n->decl == node->decl);
- n->decl = first_clone->decl;
+ gcc_assert (n->symbol.decl == node->symbol.decl);
+ n->symbol.decl = first_clone->symbol.decl;
if (n->clones)
n = n->clones;
else if (n->next_sibling_clone)
}
/* Copy the OLD_VERSION_NODE function tree to the new version. */
- tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
- false, NULL, NULL);
+ tree_function_versioning (node->symbol.decl, first_clone->symbol.decl,
+ NULL, true, NULL, false, NULL, NULL);
/* The function will be short lived and removed after we inline all the clones,
but make it internal so we won't confuse ourself. */
- DECL_EXTERNAL (first_clone->decl) = 0;
- DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
- TREE_PUBLIC (first_clone->decl) = 0;
- DECL_COMDAT (first_clone->decl) = 0;
+ DECL_EXTERNAL (first_clone->symbol.decl) = 0;
+ DECL_COMDAT_GROUP (first_clone->symbol.decl) = NULL_TREE;
+ TREE_PUBLIC (first_clone->symbol.decl) = 0;
+ DECL_COMDAT (first_clone->symbol.decl) = 0;
VEC_free (ipa_opt_pass, heap,
first_clone->ipa_transforms_to_apply);
first_clone->ipa_transforms_to_apply = NULL;
enum availability avail;
struct cgraph_node *callee
= cgraph_function_or_thunk_node (e->callee, &avail);
- tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->caller->decl);
+ tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->caller->symbol.decl);
tree callee_tree
- = callee ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee->decl) : NULL;
- struct function *caller_cfun = DECL_STRUCT_FUNCTION (e->caller->decl);
+ = callee ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee->symbol.decl) : NULL;
+ struct function *caller_cfun = DECL_STRUCT_FUNCTION (e->caller->symbol.decl);
struct function *callee_cfun
- = callee ? DECL_STRUCT_FUNCTION (callee->decl) : NULL;
+ = callee ? DECL_STRUCT_FUNCTION (callee->symbol.decl) : NULL;
if (!caller_cfun && e->caller->clone_of)
- caller_cfun = DECL_STRUCT_FUNCTION (e->caller->clone_of->decl);
+ caller_cfun = DECL_STRUCT_FUNCTION (e->caller->clone_of->symbol.decl);
if (!callee_cfun && callee && callee->clone_of)
- callee_cfun = DECL_STRUCT_FUNCTION (callee->clone_of->decl);
+ callee_cfun = DECL_STRUCT_FUNCTION (callee->clone_of->symbol.decl);
gcc_assert (e->inline_failed);
inlinable = false;
}
/* Don't inline if the functions have different EH personalities. */
- else if (DECL_FUNCTION_PERSONALITY (e->caller->decl)
- && DECL_FUNCTION_PERSONALITY (callee->decl)
- && (DECL_FUNCTION_PERSONALITY (e->caller->decl)
- != DECL_FUNCTION_PERSONALITY (callee->decl)))
+ else if (DECL_FUNCTION_PERSONALITY (e->caller->symbol.decl)
+ && DECL_FUNCTION_PERSONALITY (callee->symbol.decl)
+ && (DECL_FUNCTION_PERSONALITY (e->caller->symbol.decl)
+ != DECL_FUNCTION_PERSONALITY (callee->symbol.decl)))
{
e->inline_failed = CIF_EH_PERSONALITY;
inlinable = false;
}
/* TM pure functions should not be inlined into non-TM_pure
functions. */
- else if (is_tm_pure (callee->decl)
- && !is_tm_pure (e->caller->decl))
+ else if (is_tm_pure (callee->symbol.decl)
+ && !is_tm_pure (e->caller->symbol.decl))
{
e->inline_failed = CIF_UNSPECIFIED;
inlinable = false;
inlinable = false;
}
/* Check compatibility of target optimization options. */
- else if (!targetm.target_option.can_inline_p (e->caller->decl,
- callee->decl))
+ else if (!targetm.target_option.can_inline_p (e->caller->symbol.decl,
+ callee->symbol.decl))
{
e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
inlinable = false;
}
/* Check if caller growth allows the inlining. */
- else if (!DECL_DISREGARD_INLINE_LIMITS (callee->decl)
+ else if (!DECL_DISREGARD_INLINE_LIMITS (callee->symbol.decl)
&& !lookup_attribute ("flatten",
DECL_ATTRIBUTES
(e->caller->global.inlined_to
- ? e->caller->global.inlined_to->decl
- : e->caller->decl))
+ ? e->caller->global.inlined_to->symbol.decl
+ : e->caller->symbol.decl))
&& !caller_growth_limits (e))
inlinable = false;
/* Don't inline a function with a higher optimization level than the
if (((caller_opt->x_optimize > callee_opt->x_optimize)
|| (caller_opt->x_optimize_size != callee_opt->x_optimize_size))
/* gcc.dg/pr43564.c. Look at forced inline even in -O0. */
- && !DECL_DISREGARD_INLINE_LIMITS (e->callee->decl))
+ && !DECL_DISREGARD_INLINE_LIMITS (e->callee->symbol.decl))
{
e->inline_failed = CIF_OPTIMIZATION_MISMATCH;
inlinable = false;
/* Early inliner might get called at WPA stage when IPA pass adds new
function. In this case we can not really do any of early inlining
because function bodies are missing. */
- if (!gimple_has_body_p (callee->decl))
+ if (!gimple_has_body_p (callee->symbol.decl))
{
e->inline_failed = CIF_BODY_NOT_AVAILABLE;
return false;
(i.e. the callgraph is cyclic and we did not process
the callee by early inliner, yet). We don't have CIF code for this
case; later we will re-do the decision in the real inliner. */
- if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->caller->decl))
- || !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (callee->decl)))
+ if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->caller->symbol.decl))
+ || !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (callee->symbol.decl)))
{
if (dump_file)
fprintf (dump_file, " edge not inlinable: not in SSA form\n");
{
struct cgraph_edge *e;
for (e = n->callees; e; e = e->next_callee)
- if (!is_inexpensive_builtin (e->callee->decl))
+ if (!is_inexpensive_builtin (e->callee->symbol.decl))
return false;
return true;
}
bool want_inline = true;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
- if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
+ if (DECL_DISREGARD_INLINE_LIMITS (callee->symbol.decl))
;
- else if (!DECL_DECLARED_INLINE_P (callee->decl)
+ else if (!DECL_DECLARED_INLINE_P (callee->symbol.decl)
&& !flag_inline_small_functions)
{
e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
bool want_inline = true;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
- if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
+ if (DECL_DISREGARD_INLINE_LIMITS (callee->symbol.decl))
;
- else if (!DECL_DECLARED_INLINE_P (callee->decl)
+ else if (!DECL_DECLARED_INLINE_P (callee->symbol.decl)
&& !flag_inline_small_functions)
{
e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
if (growth <= 0)
;
- else if (DECL_DECLARED_INLINE_P (callee->decl)
+ else if (DECL_DECLARED_INLINE_P (callee->symbol.decl)
&& growth >= MAX_INLINE_INSNS_SINGLE)
{
e->inline_failed = CIF_MAX_INLINE_INSNS_SINGLE_LIMIT;
Consequently we ask cgraph_can_remove_if_no_direct_calls_p
instead of
cgraph_will_be_removed_from_program_if_no_direct_calls */
- && !DECL_EXTERNAL (callee->decl)
+ && !DECL_EXTERNAL (callee->symbol.decl)
&& cgraph_can_remove_if_no_direct_calls_p (callee)
&& estimate_growth (callee) <= 0)
;
- else if (!DECL_DECLARED_INLINE_P (callee->decl)
+ else if (!DECL_DECLARED_INLINE_P (callee->symbol.decl)
&& !flag_inline_functions)
{
e->inline_failed = CIF_NOT_DECLARED_INLINED;
want_inline = false;
}
- else if (!DECL_DECLARED_INLINE_P (callee->decl)
+ else if (!DECL_DECLARED_INLINE_P (callee->symbol.decl)
&& growth >= MAX_INLINE_INSNS_AUTO)
{
e->inline_failed = CIF_MAX_INLINE_INSNS_AUTO_LIMIT;
int caller_freq = CGRAPH_FREQ_BASE;
int max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH_AUTO);
- if (DECL_DECLARED_INLINE_P (edge->caller->decl))
+ if (DECL_DECLARED_INLINE_P (edge->caller->symbol.decl))
max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH);
if (!cgraph_maybe_hot_edge_p (edge))
return false;
/* External functions are not really in the unit, so inlining
them when called once would just increase the program size. */
- if (DECL_EXTERNAL (function->decl))
+ if (DECL_EXTERNAL (function->symbol.decl))
return false;
/* Offline body must be optimized out. */
if (!cgraph_will_be_removed_from_program_if_no_direct_calls (function))
NULL);
struct inline_summary *callee_info = inline_summary (callee);
- if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
+ if (DECL_DISREGARD_INLINE_LIMITS (callee->symbol.decl))
return INT_MIN;
growth = estimate_edge_growth (edge);
for (edge = where->callers; edge; edge = edge->next_caller)
if (edge->inline_failed)
reset_edge_growth_cache (edge);
- for (i = 0; ipa_ref_list_refering_iterate (&where->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_refering_iterate (&where->symbol.ref_list,
+ i, ref); i++)
if (ref->use == IPA_REF_ALIAS)
reset_edge_caches (ipa_ref_refering_node (ref));
if (!bitmap_set_bit (updated_nodes, node->uid))
return;
- for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_refering_iterate (&node->symbol.ref_list,
+ i, ref); i++)
if (ref->use == IPA_REF_ALIAS)
{
struct cgraph_node *alias = ipa_ref_refering_node (ref);
if (node->global.inlined_to)
node = node->global.inlined_to;
- if (DECL_DECLARED_INLINE_P (node->decl))
+ if (DECL_DECLARED_INLINE_P (node->symbol.decl))
limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE);
/* Make sure that function is small enough to be considered for inlining. */
depth = 1;
for (cnode = curr->caller;
cnode->global.inlined_to; cnode = cnode->callers->caller)
- if (node->decl
- == cgraph_function_or_thunk_node (curr->callee, NULL)->decl)
+ if (node->symbol.decl
+ == cgraph_function_or_thunk_node (curr->callee, NULL)->symbol.decl)
depth++;
if (!want_inline_self_recursive_call_p (curr, node, false, depth))
if (!master_clone)
{
/* We need original clone to copy around. */
- master_clone = cgraph_clone_node (node, node->decl,
+ master_clone = cgraph_clone_node (node, node->symbol.decl,
node->count, CGRAPH_FREQ_BASE,
false, NULL, true);
for (e = master_clone->callees; e; e = e->next_callee)
{
struct inline_summary *info = inline_summary (node);
- if (!DECL_EXTERNAL (node->decl))
+ if (!DECL_EXTERNAL (node->symbol.decl))
initial_size += info->size;
}
}
if (overall_size + growth > max_size
- && !DECL_DISREGARD_INLINE_LIMITS (callee->decl))
+ && !DECL_DISREGARD_INLINE_LIMITS (callee->symbol.decl))
{
edge->inline_failed = CIF_INLINE_UNIT_GROWTH_LIMIT;
report_inline_failed_reason (edge);
where = edge->caller;
while (where->global.inlined_to)
{
- if (where->decl == callee->decl)
+ if (where->symbol.decl == callee->symbol.decl)
outer_node = where, depth++;
where = where->callers->caller;
}
true, depth))
{
edge->inline_failed
- = (DECL_DISREGARD_INLINE_LIMITS (edge->callee->decl)
+ = (DECL_DISREGARD_INLINE_LIMITS (edge->callee->symbol.decl)
? CIF_RECURSIVE_INLINING : CIF_UNSPECIFIED);
continue;
}
struct cgraph_edge *e;
/* We shouldn't be called recursively when we are being processed. */
- gcc_assert (node->aux == NULL);
+ gcc_assert (node->symbol.aux == NULL);
- node->aux = (void *) node;
+ node->symbol.aux = (void *) node;
for (e = node->callees; e; e = e->next_callee)
{
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
/* We've hit cycle? It is time to give up. */
- if (callee->aux)
+ if (callee->symbol.aux)
{
if (dump_file)
fprintf (dump_file,
continue;
}
- if (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl))
- != gimple_in_ssa_p (DECL_STRUCT_FUNCTION (callee->decl)))
+ if (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->symbol.decl))
+ != gimple_in_ssa_p (DECL_STRUCT_FUNCTION (callee->symbol.decl)))
{
if (dump_file)
fprintf (dump_file, "Not inlining: SSA form does not match.\n");
orig_callee = callee;
inline_call (e, true, NULL, NULL);
if (e->callee != orig_callee)
- orig_callee->aux = (void *) node;
+ orig_callee->symbol.aux = (void *) node;
flatten_function (e->callee, early);
if (e->callee != orig_callee)
- orig_callee->aux = NULL;
+ orig_callee->symbol.aux = NULL;
}
- node->aux = NULL;
+ node->symbol.aux = NULL;
}
/* Decide on the inlining. We do so in the topological order to avoid
nnodes = ipa_reverse_postorder (order);
for (node = cgraph_nodes; node; node = node->next)
- node->aux = 0;
+ node->symbol.aux = 0;
if (dump_file)
fprintf (dump_file, "\nFlattening functions:\n");
try to flatten itself turning it into a self-recursive
function. */
if (lookup_attribute ("flatten",
- DECL_ATTRIBUTES (node->decl)) != NULL)
+ DECL_ATTRIBUTES (node->symbol.decl)) != NULL)
{
if (dump_file)
fprintf (dump_file,
for (e = node->callees; e; e = e->next_callee)
{
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
- if (!DECL_DISREGARD_INLINE_LIMITS (callee->decl))
+ if (!DECL_DISREGARD_INLINE_LIMITS (callee->symbol.decl))
continue;
if (cgraph_edge_recursive_p (e))
continue;
/* Do not consider functions not declared inline. */
- if (!DECL_DECLARED_INLINE_P (callee->decl)
+ if (!DECL_DECLARED_INLINE_P (callee->symbol.decl)
&& !flag_inline_small_functions
&& !flag_inline_functions)
continue;
cycles of edges to be always inlined in the callgraph.
We might want to be smarter and just avoid this type of inlining. */
- || DECL_DISREGARD_INLINE_LIMITS (node->decl))
+ || DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl))
;
else if (lookup_attribute ("flatten",
- DECL_ATTRIBUTES (node->decl)) != NULL)
+ DECL_ATTRIBUTES (node->symbol.decl)) != NULL)
{
/* When the function is marked to be flattened, recursively inline
all calls in it. */
= estimate_num_insns (edge->call_stmt, &eni_size_weights);
es->call_stmt_time
= estimate_num_insns (edge->call_stmt, &eni_time_weights);
- if (edge->callee->decl
+ if (edge->callee->symbol.decl
&& !gimple_check_call_matching_types (edge->call_stmt,
- edge->callee->decl))
+ edge->callee->symbol.decl))
edge->call_stmt_cannot_inline_p = true;
}
timevar_pop (TV_INTEGRATION);
tree parm;
int param_num;
- fndecl = node->decl;
+ fndecl = node->symbol.decl;
fnargs = DECL_ARGUMENTS (fndecl);
param_num = 0;
for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
{
int param_count;
- param_count = count_formal_params (node->decl);
+ param_count = count_formal_params (node->symbol.decl);
if (param_count)
{
VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
ipa_analyze_params_uses (struct cgraph_node *node,
struct param_analysis_info *parms_ainfo)
{
- tree decl = node->decl;
+ tree decl = node->symbol.decl;
basic_block bb;
struct function *func;
gimple_stmt_iterator gsi;
/* For SSA regs see if parameter is used. For non-SSA we compute
the flag during modification analysis. */
if (is_gimple_reg (parm)
- && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
+ && gimple_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl), parm))
ipa_set_param_used (info, i, true);
}
ipa_check_create_node_params ();
ipa_check_create_edge_args ();
info = IPA_NODE_REF (node);
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
- current_function_decl = node->decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
+ current_function_decl = node->symbol.decl;
ipa_initialize_node_params (node);
param_count = ipa_get_param_count (info);
if (new_direct_edge->call_stmt)
new_direct_edge->call_stmt_cannot_inline_p
= !gimple_check_call_matching_types (new_direct_edge->call_stmt,
- new_direct_edge->callee->decl);
+ new_direct_edge->callee->symbol.decl);
if (new_edges)
{
VEC_safe_push (cgraph_edge_p, heap, *new_edges,
len = VEC_length (ipa_parm_adjustment_t, adjustments);
vargs = VEC_alloc (tree, heap, len);
- callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
+ callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
gsi = gsi_for_stmt (stmt);
for (i = 0; i < len; i++)
static funct_state
analyze_function (struct cgraph_node *fn, bool ipa)
{
- tree decl = fn->decl;
+ tree decl = fn->symbol.decl;
tree old_decl = current_function_decl;
funct_state l;
basic_block this_block;
l->looping = false;
l->can_throw = false;
state_from_flags (&l->state_previously_known, &l->looping_previously_known,
- flags_from_decl_or_type (fn->decl),
+ flags_from_decl_or_type (fn->symbol.decl),
cgraph_node_cannot_return (fn));
if (fn->thunk.thunk_p || fn->alias)
fs->can_throw = bp_unpack_value (&bp, 1);
if (dump_file)
{
- int flags = flags_from_decl_or_type (node->decl);
+ int flags = flags_from_decl_or_type (node->symbol.decl);
fprintf (dump_file, "Read info for %s/%i ",
cgraph_node_name (node),
node->uid);
}
}
else if (special_builtin_state (&edge_state, &edge_looping,
- y->decl))
+ y->symbol.decl))
;
else
state_from_flags (&edge_state, &edge_looping,
- flags_from_decl_or_type (y->decl),
+ flags_from_decl_or_type (y->symbol.decl),
cgraph_edge_cannot_lead_to_return (e));
/* Merge the results with what we already know. */
break;
/* And finally all loads and stores. */
- for (i = 0; ipa_ref_list_reference_iterate (&w->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_reference_iterate (&w->symbol.ref_list, i, ref); i++)
{
enum pure_const_state_e ref_state = IPA_CONST;
bool ref_looping = false;
{
case IPA_REF_LOAD:
/* readonly reads are safe. */
- if (TREE_READONLY (ipa_ref_varpool_node (ref)->decl))
+ if (TREE_READONLY (ipa_ref_varpool_node (ref)->symbol.decl))
break;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " nonreadonly global var read\n");
if (pure_const_state == IPA_NEITHER)
break;
}
- w_info = (struct ipa_dfs_info *) w->aux;
+ w_info = (struct ipa_dfs_info *) w->symbol.aux;
w = w_info->next_cycle;
}
if (dump_file && (dump_flags & TDF_DETAILS))
switch (this_state)
{
case IPA_CONST:
- if (!TREE_READONLY (w->decl))
+ if (!TREE_READONLY (w->symbol.decl))
{
- warn_function_const (w->decl, !this_looping);
+ warn_function_const (w->symbol.decl, !this_looping);
if (dump_file)
fprintf (dump_file, "Function found to be %sconst: %s\n",
this_looping ? "looping " : "",
break;
case IPA_PURE:
- if (!DECL_PURE_P (w->decl))
+ if (!DECL_PURE_P (w->symbol.decl))
{
- warn_function_pure (w->decl, !this_looping);
+ warn_function_pure (w->symbol.decl, !this_looping);
if (dump_file)
fprintf (dump_file, "Function found to be %spure: %s\n",
this_looping ? "looping " : "",
default:
break;
}
- w_info = (struct ipa_dfs_info *) w->aux;
+ w_info = (struct ipa_dfs_info *) w->symbol.aux;
w = w_info->next_cycle;
}
}
if (can_throw)
break;
- if (y_l->can_throw && !TREE_NOTHROW (w->decl)
+ if (y_l->can_throw && !TREE_NOTHROW (w->symbol.decl)
&& e->can_throw_external)
can_throw = true;
}
- else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
+ else if (e->can_throw_external && !TREE_NOTHROW (y->symbol.decl))
can_throw = true;
}
for (ie = node->indirect_calls; ie; ie = ie->next_callee)
if (ie->can_throw_external)
can_throw = true;
- w_info = (struct ipa_dfs_info *) w->aux;
+ w_info = (struct ipa_dfs_info *) w->symbol.aux;
w = w_info->next_cycle;
}
while (w)
{
funct_state w_l = get_function_state (w);
- if (!can_throw && !TREE_NOTHROW (w->decl))
+ if (!can_throw && !TREE_NOTHROW (w->symbol.decl))
{
cgraph_set_nothrow_flag (w, true);
if (dump_file)
fprintf (dump_file, "Function found to be nothrow: %s\n",
cgraph_node_name (w));
}
- else if (can_throw && !TREE_NOTHROW (w->decl))
+ else if (can_throw && !TREE_NOTHROW (w->symbol.decl))
w_l->can_throw = true;
- w_info = (struct ipa_dfs_info *) w->aux;
+ w_info = (struct ipa_dfs_info *) w->symbol.aux;
w = w_info->next_cycle;
}
}
ipa_ref_refering_ref_list (struct ipa_ref *ref)
{
if (ref->refering_type == IPA_REF_CGRAPH)
- return &ipa_ref_refering_node (ref)->ref_list;
+ return &ipa_ref_refering_node (ref)->symbol.ref_list;
else
- return &ipa_ref_refering_varpool_node (ref)->ref_list;
+ return &ipa_ref_refering_varpool_node (ref)->symbol.ref_list;
}
/* Return reference list REF is in. */
ipa_ref_refered_ref_list (struct ipa_ref *ref)
{
if (ref->refered_type == IPA_REF_CGRAPH)
- return &ipa_ref_node (ref)->ref_list;
+ return &ipa_ref_node (ref)->symbol.ref_list;
else
- return &ipa_ref_varpool_node (ref)->ref_list;
+ return &ipa_ref_varpool_node (ref)->symbol.ref_list;
}
/* Return first reference in LIST or NULL if empty. */
gcc_assert (!stmt || refering_node);
gcc_assert (use_type != IPA_REF_ALIAS || !stmt);
- list = (refering_node ? &refering_node->ref_list
- : &refering_varpool_node->ref_list);
+ list = (refering_node ? &refering_node->symbol.ref_list
+ : &refering_varpool_node->symbol.ref_list);
old_references = list->references;
VEC_safe_grow (ipa_ref_t, gc, list->references,
VEC_length (ipa_ref_t, list->references) + 1);
ref = VEC_last (ipa_ref_t, list->references);
- list2 = (refered_node ? &refered_node->ref_list
- : &refered_varpool_node->ref_list);
+ list2 = (refered_node ? &refered_node->symbol.ref_list
+ : &refered_varpool_node->symbol.ref_list);
VEC_safe_push (ipa_ref_ptr, heap, list2->refering, ref);
ref->refered_index = VEC_length (ipa_ref_ptr, list2->refering) - 1;
if (refering_node)
info = get_reference_optimization_summary (cgraph_function_node (fn, NULL));
if (info)
return info->statics_not_read;
- else if (flags_from_decl_or_type (fn->decl) & ECF_LEAF)
+ else if (flags_from_decl_or_type (fn->symbol.decl) & ECF_LEAF)
return all_module_statics;
else
return NULL;
info = get_reference_optimization_summary (fn);
if (info)
return info->statics_not_written;
- else if (flags_from_decl_or_type (fn->decl) & ECF_LEAF)
+ else if (flags_from_decl_or_type (fn->symbol.decl) & ECF_LEAF)
return all_module_statics;
else
return NULL;
/* Only look into nodes we can propagate something. */
if (avail > AVAIL_OVERWRITABLE
|| (avail == AVAIL_OVERWRITABLE
- && (flags_from_decl_or_type (y->decl) & ECF_LEAF)))
+ && (flags_from_decl_or_type (y->symbol.decl) & ECF_LEAF)))
{
- int flags = flags_from_decl_or_type (y->decl);
+ int flags = flags_from_decl_or_type (y->symbol.decl);
if (get_reference_vars_info (y))
{
ipa_reference_vars_info_t y_info
tree var;
local = init_function_info (fn);
- for (i = 0; ipa_ref_list_reference_iterate (&fn->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_reference_iterate (&fn->symbol.ref_list, i, ref); i++)
{
if (ref->refered_type != IPA_REF_VARPOOL)
continue;
- var = ipa_ref_varpool_node (ref)->decl;
- if (ipa_ref_varpool_node (ref)->externally_visible
+ var = ipa_ref_varpool_node (ref)->symbol.decl;
+ if (ipa_ref_varpool_node (ref)->symbol.externally_visible
|| !ipa_ref_varpool_node (ref)->analyzed
|| !is_proper_for_analysis (var))
continue;
read_write_all_from_decl (struct cgraph_node *node, bool * read_all,
bool * write_all)
{
- tree decl = node->decl;
+ tree decl = node->symbol.decl;
int flags = flags_from_decl_or_type (decl);
if ((flags & ECF_LEAF)
&& cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
/* If any node in a cycle is read_all or write_all
they all are. */
- w_info = (struct ipa_dfs_info *) node->aux;
+ w_info = (struct ipa_dfs_info *) node->symbol.aux;
w = w_info->next_cycle;
while (w && (!read_all || !write_all))
{
}
}
- w_info = (struct ipa_dfs_info *) w->aux;
+ w_info = (struct ipa_dfs_info *) w->symbol.aux;
w = w_info->next_cycle;
}
}
propagate_bits (node_g, node);
- w_info = (struct ipa_dfs_info *) node->aux;
+ w_info = (struct ipa_dfs_info *) node->symbol.aux;
w = w_info->next_cycle;
while (w && (!read_all || !write_all))
{
ipa_reference_vars_info_t w_ri =
get_reference_vars_info (w);
ipa_reference_local_vars_info_t w_l = &w_ri->local;
- int flags = flags_from_decl_or_type (w->decl);
+ int flags = flags_from_decl_or_type (w->symbol.decl);
/* These global bitmaps are initialized from the local info
of all of the nodes in the region. However there is no
bitmap_ior_into (node_g->statics_written,
w_l->statics_written);
propagate_bits (node_g, w);
- w_info = (struct ipa_dfs_info *) w->aux;
+ w_info = (struct ipa_dfs_info *) w->symbol.aux;
w = w_info->next_cycle;
}
/* All nodes within a cycle have the same global info bitmaps. */
node_info->global = *node_g;
- w_info = (struct ipa_dfs_info *) node->aux;
+ w_info = (struct ipa_dfs_info *) node->symbol.aux;
w = w_info->next_cycle;
while (w)
{
w_ri->global = *node_g;
- w_info = (struct ipa_dfs_info *) w->aux;
+ w_info = (struct ipa_dfs_info *) w->symbol.aux;
w = w_info->next_cycle;
}
}
get_static_name (index));
}
- w_info = (struct ipa_dfs_info *) node->aux;
+ w_info = (struct ipa_dfs_info *) node->symbol.aux;
w = w_info->next_cycle;
while (w)
{
get_static_name (index));
}
- w_info = (struct ipa_dfs_info *) w->aux;
+ w_info = (struct ipa_dfs_info *) w->symbol.aux;
w = w_info->next_cycle;
}
fprintf (dump_file, "\n globals read: ");
node_info = get_reference_vars_info (node);
if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE
- || (flags_from_decl_or_type (node->decl) & ECF_LEAF))
+ || (flags_from_decl_or_type (node->symbol.decl) & ECF_LEAF))
{
node_g = &node_info->global;
In future we might also want to include summaries of functions references
by initializers of constant variables references in current unit. */
if (!reachable_from_this_partition_p (node, set)
- && !referenced_from_this_partition_p (&node->ref_list, set, vset))
+ && !referenced_from_this_partition_p (&node->symbol.ref_list, set, vset))
return false;
/* See if the info has non-empty intersections with vars we want to encode. */
for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
{
struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
- if (!vnode->externally_visible
+ if (!vnode->symbol.externally_visible
&& vnode->analyzed
- && bitmap_bit_p (all_module_statics, DECL_UID (vnode->decl))
- && referenced_from_this_partition_p (&vnode->ref_list, set, vset))
+ && bitmap_bit_p (all_module_statics, DECL_UID (vnode->symbol.decl))
+ && referenced_from_this_partition_p (&vnode->symbol.ref_list, set, vset))
{
- tree decl = vnode->decl;
+ tree decl = vnode->symbol.decl;
bitmap_set_bit (ltrans_statics, DECL_UID (decl));
splay_tree_insert (reference_vars_to_consider,
DECL_UID (decl), (splay_tree_value)decl);
/* For usual cloning it is enough to clear builtin only when signature
changes. For partial inlining we however can not expect the part
of builtin implementation to have same semantic as the whole. */
- if (DECL_BUILT_IN (node->decl))
+ if (DECL_BUILT_IN (node->symbol.decl))
{
- DECL_BUILT_IN_CLASS (node->decl) = NOT_BUILT_IN;
- DECL_FUNCTION_CODE (node->decl) = (enum built_in_function) 0;
+ DECL_BUILT_IN_CLASS (node->symbol.decl) = NOT_BUILT_IN;
+ DECL_FUNCTION_CODE (node->symbol.decl) = (enum built_in_function) 0;
}
cgraph_node_remove_callees (cur_node);
if (!split_part_return_p)
- TREE_THIS_VOLATILE (node->decl) = 1;
+ TREE_THIS_VOLATILE (node->symbol.decl) = 1;
if (dump_file)
- dump_function_to_file (node->decl, dump_file, dump_flags);
+ dump_function_to_file (node->symbol.decl, dump_file, dump_flags);
/* Create the basic block we place call into. It is the entry basic block
split after last label. */
false, GSI_CONTINUE_LINKING);
VEC_replace (tree, args_to_pass, i, arg);
}
- call = gimple_build_call_vec (node->decl, args_to_pass);
+ call = gimple_build_call_vec (node->symbol.decl, args_to_pass);
gimple_set_block (call, DECL_INITIAL (current_function_decl));
/* We avoid address being taken on any variable used by split part,
fprintf (dump_file, "Not splitting: not inlinable.\n");
return 0;
}
- if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
+ if (DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl))
{
if (dump_file)
fprintf (dump_file, "Not splitting: disregarding inline limits.\n");
called once. It is possible that the caller is called more then once and
then inlining would still benefit. */
if ((!node->callers || !node->callers->next_caller)
- && !node->address_taken
- && (!flag_lto || !node->local.externally_visible))
+ && !node->symbol.address_taken
+ && (!flag_lto || !node->symbol.externally_visible))
{
if (dump_file)
fprintf (dump_file, "Not splitting: not called directly "
bool (*ignore_edge) (struct cgraph_edge *))
{
struct cgraph_edge *edge;
- struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->aux;
+ struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->symbol.aux;
/* mark node as old */
v_info->new_node = false;
if (!w || (ignore_edge && ignore_edge (edge)))
continue;
- if (w->aux
+ if (w->symbol.aux
&& (avail > AVAIL_OVERWRITABLE
|| (env->allow_overwritable && avail == AVAIL_OVERWRITABLE)))
{
- w_info = (struct ipa_dfs_info *) w->aux;
+ w_info = (struct ipa_dfs_info *) w->symbol.aux;
if (w_info->new_node)
{
searchc (env, w, ignore_edge);
struct ipa_dfs_info *x_info;
do {
x = env->stack[--(env->stack_size)];
- x_info = (struct ipa_dfs_info *) x->aux;
+ x_info = (struct ipa_dfs_info *) x->symbol.aux;
x_info->on_stack = false;
x_info->scc_no = v_info->dfn_number;
&& (avail == AVAIL_OVERWRITABLE)))
{
/* Reuse the info if it is already there. */
- struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
+ struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->symbol.aux;
if (!info)
info = XCNEW (struct ipa_dfs_info);
info->new_node = true;
info->on_stack = false;
info->next_cycle = NULL;
- node->aux = info;
+ node->symbol.aux = info;
splay_tree_insert (env.nodes_marked_new,
(splay_tree_key)node->uid,
(splay_tree_value)node);
}
else
- node->aux = NULL;
+ node->symbol.aux = NULL;
}
result = splay_tree_min (env.nodes_marked_new);
while (result)
for (node = cgraph_nodes; node; node = node->next)
{
/* Get rid of the aux information. */
- if (node->aux)
+ if (node->symbol.aux)
{
- free (node->aux);
- node->aux = NULL;
+ free (node->symbol.aux);
+ node->symbol.aux = NULL;
}
}
}
to be output and put them into order as well, so we get dependencies
right through inline functions. */
for (node = cgraph_nodes; node; node = node->next)
- node->aux = NULL;
+ node->symbol.aux = NULL;
for (pass = 0; pass < 2; pass++)
for (node = cgraph_nodes; node; node = node->next)
- if (!node->aux
+ if (!node->symbol.aux
&& (pass
- || (!node->address_taken
+ || (!node->symbol.address_taken
&& !node->global.inlined_to
&& !node->alias && !node->thunk.thunk_p
&& !cgraph_only_called_directly_p (node))))
stack[stack_size].node = node;
stack[stack_size].edge = node->callers;
stack[stack_size].ref = 0;
- node->aux = (void *)(size_t)1;
+ node->symbol.aux = (void *)(size_t)1;
while (stack_size >= 0)
{
while (true)
/* Break possible cycles involving always-inline
functions by ignoring edges from always-inline
functions to non-always-inline functions. */
- if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
+ if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->symbol.decl)
&& !DECL_DISREGARD_INLINE_LIMITS
- (cgraph_function_node (edge->callee, NULL)->decl))
+ (cgraph_function_node (edge->callee, NULL)->symbol.decl))
node2 = NULL;
}
- for (;ipa_ref_list_refering_iterate (&stack[stack_size].node->ref_list,
+ for (;ipa_ref_list_refering_iterate (&stack[stack_size].node->symbol.ref_list,
stack[stack_size].ref,
ref) && !node2;
stack[stack_size].ref++)
}
if (!node2)
break;
- if (!node2->aux)
+ if (!node2->symbol.aux)
{
stack[++stack_size].node = node2;
stack[stack_size].edge = node2->callers;
stack[stack_size].ref = 0;
- node2->aux = (void *)(size_t)1;
+ node2->symbol.aux = (void *)(size_t)1;
}
}
order[order_pos++] = stack[stack_size--].node;
}
free (stack);
for (node = cgraph_nodes; node; node = node->next)
- node->aux = NULL;
+ node->symbol.aux = NULL;
return order_pos;
}
enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
{
/* Node is still in queue; do nothing. */
- if (node->aux && node->aux != (void *) 2)
+ if (node->symbol.aux && node->symbol.aux != (void *) 2)
return;
/* Node was already processed as unreachable, re-enqueue
only if it became reachable now. */
- if (node->aux == (void *)2 && !node->reachable)
+ if (node->symbol.aux == (void *)2 && !node->reachable)
return;
- node->aux = *first;
+ node->symbol.aux = *first;
*first = node;
}
static void
enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
{
- node->aux = *first;
+ node->symbol.aux = *first;
*first = node;
}
struct cgraph_node *node = ipa_ref_node (ref);
if (!node->reachable
&& node->analyzed
- && (!DECL_EXTERNAL (node->decl)
+ && (!DECL_EXTERNAL (node->symbol.decl)
|| before_inlining_p))
node->reachable = true;
enqueue_cgraph_node (node, first);
{
/* FIXME: Aliases can be local, but i386 gets thunks wrong then. */
return !(cgraph_only_called_directly_or_aliased_p (node)
- && !ipa_ref_has_aliases_p (&node->ref_list)
+ && !ipa_ref_has_aliases_p (&node->symbol.ref_list)
&& node->analyzed
- && !DECL_EXTERNAL (node->decl)
- && !node->local.externally_visible
- && !node->reachable_from_other_partition
- && !node->in_other_partition);
+ && !DECL_EXTERNAL (node->symbol.decl)
+ && !node->symbol.externally_visible
+ && !node->symbol.used_from_other_partition
+ && !node->symbol.in_other_partition);
}
/* Return true when function can be marked local. */
int i;
struct ipa_ref *ref;
- for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_refering_iterate (&node->symbol.ref_list,
+ i, ref); i++)
if (ref->use == IPA_REF_ADDR)
return true;
return false;
fprintf (file, "\nReclaiming functions:");
#ifdef ENABLE_CHECKING
for (node = cgraph_nodes; node; node = node->next)
- gcc_assert (!node->aux);
+ gcc_assert (!node->symbol.aux);
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
- gcc_assert (!vnode->aux);
+ gcc_assert (!vnode->symbol.aux);
#endif
varpool_reset_queue ();
/* Mark functions whose bodies are obviously needed.
&& (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
/* Keep around virtual functions for possible devirtualization. */
|| (before_inlining_p
- && DECL_VIRTUAL_P (node->decl)
- && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl)))))
+ && DECL_VIRTUAL_P (node->symbol.decl)
+ && (DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl)))))
{
gcc_assert (!node->global.inlined_to);
enqueue_cgraph_node (node, &first);
}
else
{
- gcc_assert (!node->aux);
+ gcc_assert (!node->symbol.aux);
node->reachable = false;
}
{
struct cgraph_edge *e;
node = first;
- first = (struct cgraph_node *) first->aux;
+ first = (struct cgraph_node *) first->symbol.aux;
if (!node->reachable)
- node->aux = (void *)2;
+ node->symbol.aux = (void *)2;
/* If we found this node reachable, first mark on the callees
reachable too, unless they are direct calls to extern inline functions
if (!e->callee->reachable
&& node->analyzed
&& (!e->inline_failed
- || !DECL_EXTERNAL (e->callee->decl)
+ || !DECL_EXTERNAL (e->callee->symbol.decl)
|| before_inlining_p))
e->callee->reachable = true;
enqueue_cgraph_node (e->callee, &first);
}
- process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
+ process_references (&node->symbol.ref_list, &first,
+ &first_varpool, before_inlining_p);
}
/* If any function in a comdat group is reachable, force
all other functions in the same comdat group to be
also reachable. */
- if (node->same_comdat_group
+ if (node->symbol.same_comdat_group
&& node->reachable
&& !node->global.inlined_to)
{
- for (next = node->same_comdat_group;
+ for (next = cgraph (node->symbol.same_comdat_group);
next != node;
- next = next->same_comdat_group)
+ next = cgraph (next->symbol.same_comdat_group))
if (!next->reachable)
{
next->reachable = true;
function is clone of real clone, we must keep it around in order to
make materialize_clones produce function body with the changes
applied. */
- while (node->clone_of && !node->clone_of->aux
- && !gimple_has_body_p (node->decl))
+ while (node->clone_of && !node->clone_of->symbol.aux
+ && !gimple_has_body_p (node->symbol.decl))
{
- bool noninline = node->clone_of->decl != node->decl;
+ bool noninline = node->clone_of->symbol.decl != node->symbol.decl;
node = node->clone_of;
- if (noninline && !node->reachable && !node->aux)
+ if (noninline && !node->reachable && !node->symbol.aux)
{
enqueue_cgraph_node (node, &first);
break;
if (first_varpool != (struct varpool_node *) (void *) 1)
{
vnode = first_varpool;
- first_varpool = (struct varpool_node *)first_varpool->aux;
- vnode->aux = NULL;
- process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
+ first_varpool = (struct varpool_node *)first_varpool->symbol.aux;
+ vnode->symbol.aux = NULL;
+ process_references (&vnode->symbol.ref_list, &first,
+ &first_varpool, before_inlining_p);
/* If any function in a comdat group is reachable, force
all other functions in the same comdat group to be
also reachable. */
- if (vnode->same_comdat_group)
+ if (vnode->symbol.same_comdat_group)
{
struct varpool_node *next;
- for (next = vnode->same_comdat_group;
+ for (next = varpool (vnode->symbol.same_comdat_group);
next != vnode;
- next = next->same_comdat_group)
+ next = varpool (next->symbol.same_comdat_group))
if (!next->needed)
{
varpool_mark_needed_node (next);
for (node = cgraph_nodes; node; node = next)
{
next = node->next;
- if (node->aux && !node->reachable)
+ if (node->symbol.aux && !node->reachable)
{
cgraph_node_remove_callees (node);
- ipa_remove_all_references (&node->ref_list);
+ ipa_remove_all_references (&node->symbol.ref_list);
node->analyzed = false;
}
- if (!node->aux)
+ if (!node->symbol.aux)
{
struct cgraph_edge *e;
bool found = false;
for (e = node->callers; e && !found; e = e->next_caller)
if (e->caller->reachable)
found = true;
- for (i = 0; (ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
+ for (i = 0; (ipa_ref_list_refering_iterate (&node->symbol.ref_list,
+ i, ref)
&& !found); i++)
if (ref->refering_type == IPA_REF_CGRAPH
&& ipa_ref_refering_node (ref)->reachable)
Otherwise we can just remove the body but keep the clone. */
for (clone = node->clones; clone;
clone = clone->next_sibling_clone)
- if (clone->aux)
+ if (clone->symbol.aux)
break;
if (!clone)
{
if (node->next_sibling_clone)
node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
if (node->clone_of)
- node->former_clone_of = node->clone_of->decl;
+ node->former_clone_of = node->clone_of->symbol.decl;
node->clone_of = NULL;
node->next_sibling_clone = NULL;
node->prev_sibling_clone = NULL;
}
else
- gcc_assert (!clone->in_other_partition);
+ gcc_assert (!clone->symbol.in_other_partition);
node->analyzed = false;
changed = true;
cgraph_node_remove_callees (node);
- ipa_remove_all_references (&node->ref_list);
+ ipa_remove_all_references (&node->symbol.ref_list);
}
}
else
node->global.inlined_to = NULL;
update_inlined_to_pointer (node, node);
}
- node->aux = NULL;
+ node->symbol.aux = NULL;
}
if (file)
if (file)
fprintf (file, "\nClearing address taken flags:");
for (node = cgraph_nodes; node; node = node->next)
- if (node->address_taken
- && !node->reachable_from_other_partition)
+ if (node->symbol.address_taken
+ && !node->symbol.used_from_other_partition)
{
if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
{
if (file)
fprintf (file, " %s", cgraph_node_name (node));
- node->address_taken = false;
+ node->symbol.address_taken = false;
changed = true;
if (cgraph_local_node_p (node))
{
fprintf (dump_file, "Clearing variable flags:");
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
- && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
+ && (TREE_ADDRESSABLE (vnode->symbol.decl)
+ || !TREE_READONLY (vnode->symbol.decl)))
{
bool written = false;
bool address_taken = false;
int i;
struct ipa_ref *ref;
- for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
+ for (i = 0; ipa_ref_list_refering_iterate (&vnode->symbol.ref_list,
+ i, ref)
&& (!written || !address_taken); i++)
switch (ref->use)
{
written = true;
break;
}
- if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
+ if (TREE_ADDRESSABLE (vnode->symbol.decl) && !address_taken)
{
if (dump_file)
fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
- TREE_ADDRESSABLE (vnode->decl) = 0;
+ TREE_ADDRESSABLE (vnode->symbol.decl) = 0;
}
- if (!TREE_READONLY (vnode->decl) && !address_taken && !written
+ if (!TREE_READONLY (vnode->symbol.decl) && !address_taken && !written
/* Making variable in explicit section readonly can cause section
type conflict.
See e.g. gcc.c-torture/compile/pr23237.c */
- && DECL_SECTION_NAME (vnode->decl) == NULL)
+ && DECL_SECTION_NAME (vnode->symbol.decl) == NULL)
{
if (dump_file)
fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
- TREE_READONLY (vnode->decl) = 1;
+ TREE_READONLY (vnode->symbol.decl) = 1;
}
}
if (dump_file)
{
int i;
struct ipa_ref *ref;
- for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_refering_iterate (&node->symbol.ref_list,
+ i, ref); i++)
if (ref->use == IPA_REF_ADDR)
{
struct varpool_node *node;
if (ref->refering_type == IPA_REF_CGRAPH)
return true;
node = ipa_ref_refering_varpool_node (ref);
- if (!DECL_VIRTUAL_P (node->decl))
+ if (!DECL_VIRTUAL_P (node->symbol.decl))
return true;
}
return false;
cgraph_comdat_can_be_unshared_p (struct cgraph_node *node)
{
if ((cgraph_address_taken_from_non_vtable_p (node)
- && !DECL_VIRTUAL_P (node->decl))
+ && !DECL_VIRTUAL_P (node->symbol.decl))
|| !node->analyzed)
return false;
- if (node->same_comdat_group)
+ if (node->symbol.same_comdat_group)
{
struct cgraph_node *next;
/* If more than one function is in the same COMDAT group, it must
be shared even if just one function in the comdat group has
address taken. */
- for (next = node->same_comdat_group;
- next != node; next = next->same_comdat_group)
+ for (next = cgraph (node->symbol.same_comdat_group);
+ next != node; next = cgraph (next->symbol.same_comdat_group))
if (cgraph_address_taken_from_non_vtable_p (next)
- && !DECL_VIRTUAL_P (next->decl))
+ && !DECL_VIRTUAL_P (next->symbol.decl))
return false;
}
return true;
{
if (!node->local.finalized)
return false;
- if (!DECL_COMDAT (node->decl)
- && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
+ if (!DECL_COMDAT (node->symbol.decl)
+ && (!TREE_PUBLIC (node->symbol.decl)
+ || DECL_EXTERNAL (node->symbol.decl)))
return false;
/* Do not even try to be smart about aliased nodes. Until we properly
using the implicit built-in declarations anymore. Similarly this enables
us to remove them as unreachable before actual calls may appear during
expansion or folding. */
- if (DECL_BUILT_IN (node->decl))
+ if (DECL_BUILT_IN (node->symbol.decl))
return true;
/* If linker counts on us, we must preserve the function. */
if (cgraph_used_from_object_file_p (node))
return true;
- if (DECL_PRESERVE_P (node->decl))
+ if (DECL_PRESERVE_P (node->symbol.decl))
return true;
- if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
+ if (lookup_attribute ("externally_visible",
+ DECL_ATTRIBUTES (node->symbol.decl)))
return true;
if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
- && lookup_attribute ("dllexport", DECL_ATTRIBUTES (node->decl)))
+ && lookup_attribute ("dllexport",
+ DECL_ATTRIBUTES (node->symbol.decl)))
return true;
- if (node->resolution == LDPR_PREVAILING_DEF_IRONLY)
+ if (node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY)
return false;
/* When doing LTO or whole program, we can bring COMDAT functoins static.
This improves code quality and we know we will duplicate them at most twice
(in the case that we are not using plugin and link with object file
implementing same COMDAT) */
if ((in_lto_p || whole_program)
- && DECL_COMDAT (node->decl)
+ && DECL_COMDAT (node->symbol.decl)
&& cgraph_comdat_can_be_unshared_p (node))
return false;
/* When doing link time optimizations, hidden symbols become local. */
if (in_lto_p
- && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
- || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
+ && (DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_HIDDEN
+ || DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_INTERNAL)
/* Be sure that node is defined in IR file, not in other object
file. In that case we don't set used_from_other_object_file. */
&& node->analyzed)
else if (!whole_program)
return true;
- if (MAIN_NAME_P (DECL_NAME (node->decl)))
+ if (MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
return true;
return false;
bool
varpool_externally_visible_p (struct varpool_node *vnode, bool aliased)
{
- if (!DECL_COMDAT (vnode->decl) && !TREE_PUBLIC (vnode->decl))
+ if (!DECL_COMDAT (vnode->symbol.decl) && !TREE_PUBLIC (vnode->symbol.decl))
return false;
/* Do not even try to be smart about aliased nodes. Until we properly
if (varpool_used_from_object_file_p (vnode))
return true;
- if (DECL_HARD_REGISTER (vnode->decl))
+ if (DECL_HARD_REGISTER (vnode->symbol.decl))
return true;
- if (DECL_PRESERVE_P (vnode->decl))
+ if (DECL_PRESERVE_P (vnode->symbol.decl))
return true;
if (lookup_attribute ("externally_visible",
- DECL_ATTRIBUTES (vnode->decl)))
+ DECL_ATTRIBUTES (vnode->symbol.decl)))
return true;
if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
&& lookup_attribute ("dllexport",
- DECL_ATTRIBUTES (vnode->decl)))
+ DECL_ATTRIBUTES (vnode->symbol.decl)))
return true;
/* See if we have linker information about symbol not being used or
This is needed for i.e. references from asm statements. */
if (varpool_used_from_object_file_p (vnode))
return true;
- if (vnode->resolution == LDPR_PREVAILING_DEF_IRONLY)
+ if (vnode->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY)
return false;
/* As a special case, the COMDAT virutal tables can be unshared.
from LTO symbol tables. */
if ((in_lto_p || flag_whole_program)
&& !vnode->force_output
- && DECL_COMDAT (vnode->decl) && DECL_VIRTUAL_P (vnode->decl))
+ && DECL_COMDAT (vnode->symbol.decl) && DECL_VIRTUAL_P (vnode->symbol.decl))
return false;
/* When doing link time optimizations, hidden symbols become local. */
if (in_lto_p
- && (DECL_VISIBILITY (vnode->decl) == VISIBILITY_HIDDEN
- || DECL_VISIBILITY (vnode->decl) == VISIBILITY_INTERNAL)
+ && (DECL_VISIBILITY (vnode->symbol.decl) == VISIBILITY_HIDDEN
+ || DECL_VISIBILITY (vnode->symbol.decl) == VISIBILITY_INTERNAL)
/* Be sure that node is defined in IR file, not in other object
file. In that case we don't set used_from_other_object_file. */
&& vnode->finalized)
FIXME: We can do so for readonly vars with no address taken and
possibly also for vtables since no direct pointer comparsion is done.
It might be interesting to do so to reduce linking overhead. */
- if (DECL_COMDAT (vnode->decl) || DECL_WEAK (vnode->decl))
+ if (DECL_COMDAT (vnode->symbol.decl) || DECL_WEAK (vnode->symbol.decl))
return true;
return false;
}
struct cgraph_node *n = node, *next;
do
{
- next = n->same_comdat_group;
- n->same_comdat_group = NULL;
+ next = cgraph (n->symbol.same_comdat_group);
+ n->symbol.same_comdat_group = NULL;
n = next;
}
while (n != node);
IDENTIFIER_POINTER (p->target));
if ((node = cgraph_node_for_asm (p->target)) != NULL
- && !DECL_EXTERNAL (node->decl))
+ && !DECL_EXTERNAL (node->symbol.decl))
{
if (!node->analyzed)
continue;
cgraph_node_name (node), node->uid);
}
else if ((vnode = varpool_node_for_asm (p->target)) != NULL
- && !DECL_EXTERNAL (vnode->decl))
+ && !DECL_EXTERNAL (vnode->symbol.decl))
{
varpool_mark_needed_node (vnode);
gcc_assert (vnode->needed);
for (node = cgraph_nodes; node; node = node->next)
{
- int flags = flags_from_decl_or_type (node->decl);
+ int flags = flags_from_decl_or_type (node->symbol.decl);
/* Optimize away PURE and CONST constructors and destructors. */
if (optimize
&& (flags & (ECF_CONST | ECF_PURE))
&& !(flags & ECF_LOOPING_CONST_OR_PURE))
{
- DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
- DECL_STATIC_DESTRUCTOR (node->decl) = 0;
+ DECL_STATIC_CONSTRUCTOR (node->symbol.decl) = 0;
+ DECL_STATIC_DESTRUCTOR (node->symbol.decl) = 0;
}
/* Frontends and alias code marks nodes as needed before parsing is finished.
We may end up marking as node external nodes where this flag is meaningless
strip it. */
if (node->needed
- && (DECL_EXTERNAL (node->decl) || !node->analyzed))
+ && (DECL_EXTERNAL (node->symbol.decl) || !node->analyzed))
node->needed = 0;
/* C++ FE on lack of COMDAT support create local COMDAT functions
(that ought to be shared but can not due to object format
limitations). It is neccesary to keep the flag to make rest of C++ FE
happy. Clear the flag here to avoid confusion in middle-end. */
- if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
- DECL_COMDAT (node->decl) = 0;
+ if (DECL_COMDAT (node->symbol.decl) && !TREE_PUBLIC (node->symbol.decl))
+ DECL_COMDAT (node->symbol.decl) = 0;
/* For external decls stop tracking same_comdat_group, it doesn't matter
what comdat group they are in when they won't be emitted in this TU,
and simplifies later passes. */
- if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
+ if (node->symbol.same_comdat_group && DECL_EXTERNAL (node->symbol.decl))
{
#ifdef ENABLE_CHECKING
- struct cgraph_node *n;
+ symtab_node n;
- for (n = node->same_comdat_group;
- n != node;
- n = n->same_comdat_group)
+ for (n = node->symbol.same_comdat_group;
+ n != (symtab_node)node;
+ n = n->symbol.same_comdat_group)
/* If at least one of same comdat group functions is external,
all of them have to be, otherwise it is a front-end bug. */
- gcc_assert (DECL_EXTERNAL (n->decl));
+ gcc_assert (DECL_EXTERNAL (n->symbol.decl));
#endif
dissolve_same_comdat_group_list (node);
}
- gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
- || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
+ gcc_assert ((!DECL_WEAK (node->symbol.decl)
+ && !DECL_COMDAT (node->symbol.decl))
+ || TREE_PUBLIC (node->symbol.decl)
+ || DECL_EXTERNAL (node->symbol.decl));
if (cgraph_externally_visible_p (node, whole_program,
pointer_set_contains (aliased_nodes,
node)))
{
gcc_assert (!node->global.inlined_to);
- node->local.externally_visible = true;
+ node->symbol.externally_visible = true;
}
else
- node->local.externally_visible = false;
- if (!node->local.externally_visible && node->analyzed
- && !DECL_EXTERNAL (node->decl))
+ node->symbol.externally_visible = false;
+ if (!node->symbol.externally_visible && node->analyzed
+ && !DECL_EXTERNAL (node->symbol.decl))
{
- gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
- cgraph_make_decl_local (node->decl);
- node->resolution = LDPR_PREVAILING_DEF_IRONLY;
- if (node->same_comdat_group)
+ gcc_assert (whole_program || in_lto_p
+ || !TREE_PUBLIC (node->symbol.decl));
+ cgraph_make_decl_local (node->symbol.decl);
+ node->symbol.resolution = LDPR_PREVAILING_DEF_IRONLY;
+ if (node->symbol.same_comdat_group)
/* cgraph_externally_visible_p has already checked all other nodes
in the group and they will all be made local. We need to
dissolve the group at once so that the predicate does not
}
if (node->thunk.thunk_p
- && TREE_PUBLIC (node->decl))
+ && TREE_PUBLIC (node->symbol.decl))
{
struct cgraph_node *decl_node = node;
/* Thunks have the same visibility as function they are attached to.
Make sure the C++ front end set this up properly. */
- if (DECL_ONE_ONLY (decl_node->decl))
+ if (DECL_ONE_ONLY (decl_node->symbol.decl))
{
- gcc_checking_assert (DECL_COMDAT (node->decl)
- == DECL_COMDAT (decl_node->decl));
- gcc_checking_assert (DECL_COMDAT_GROUP (node->decl)
- == DECL_COMDAT_GROUP (decl_node->decl));
- gcc_checking_assert (node->same_comdat_group);
+ gcc_checking_assert (DECL_COMDAT (node->symbol.decl)
+ == DECL_COMDAT (decl_node->symbol.decl));
+ gcc_checking_assert (DECL_COMDAT_GROUP (node->symbol.decl)
+ == DECL_COMDAT_GROUP (decl_node->symbol.decl));
+ gcc_checking_assert (node->symbol.same_comdat_group);
}
- if (DECL_EXTERNAL (decl_node->decl))
- DECL_EXTERNAL (node->decl) = 1;
+ if (DECL_EXTERNAL (decl_node->symbol.decl))
+ DECL_EXTERNAL (node->symbol.decl) = 1;
}
}
for (node = cgraph_nodes; node; node = node->next)
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
{
/* weak flag makes no sense on local variables. */
- gcc_assert (!DECL_WEAK (vnode->decl)
- || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
+ gcc_assert (!DECL_WEAK (vnode->symbol.decl)
+ || TREE_PUBLIC (vnode->symbol.decl)
+ || DECL_EXTERNAL (vnode->symbol.decl));
/* In several cases declarations can not be common:
- when declaration has initializer
static int a __attribute__ ((common))
Canonicalize things here and clear the redundant flag. */
- if (DECL_COMMON (vnode->decl)
- && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
- || (DECL_INITIAL (vnode->decl)
- && DECL_INITIAL (vnode->decl) != error_mark_node)
- || DECL_WEAK (vnode->decl)
- || DECL_SECTION_NAME (vnode->decl) != NULL
+ if (DECL_COMMON (vnode->symbol.decl)
+ && (!(TREE_PUBLIC (vnode->symbol.decl)
+ || DECL_EXTERNAL (vnode->symbol.decl))
+ || (DECL_INITIAL (vnode->symbol.decl)
+ && DECL_INITIAL (vnode->symbol.decl) != error_mark_node)
+ || DECL_WEAK (vnode->symbol.decl)
+ || DECL_SECTION_NAME (vnode->symbol.decl) != NULL
|| ! (ADDR_SPACE_GENERIC_P
- (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
- DECL_COMMON (vnode->decl) = 0;
+ (TYPE_ADDR_SPACE (TREE_TYPE (vnode->symbol.decl))))))
+ DECL_COMMON (vnode->symbol.decl) = 0;
}
for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
{
&& varpool_externally_visible_p
(vnode,
pointer_set_contains (aliased_vnodes, vnode)))
- vnode->externally_visible = true;
+ vnode->symbol.externally_visible = true;
else
- vnode->externally_visible = false;
- if (!vnode->externally_visible)
+ vnode->symbol.externally_visible = false;
+ if (!vnode->symbol.externally_visible)
{
- gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
- cgraph_make_decl_local (vnode->decl);
- vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
+ gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->symbol.decl));
+ cgraph_make_decl_local (vnode->symbol.decl);
+ vnode->symbol.resolution = LDPR_PREVAILING_DEF_IRONLY;
}
- gcc_assert (TREE_STATIC (vnode->decl));
+ gcc_assert (TREE_STATIC (vnode->symbol.decl));
}
pointer_set_destroy (aliased_nodes);
pointer_set_destroy (aliased_vnodes);
fprintf (dump_file, "\n\n");
fprintf (dump_file, "\nMarking externally visible functions:");
for (node = cgraph_nodes; node; node = node->next)
- if (node->local.externally_visible)
+ if (node->symbol.externally_visible)
fprintf (dump_file, " %s", cgraph_node_name (node));
fprintf (dump_file, "\n\n");
fprintf (dump_file, "\nMarking externally visible variables:");
for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
- if (vnode->externally_visible)
+ if (vnode->symbol.externally_visible)
fprintf (dump_file, " %s", varpool_node_name (vnode));
fprintf (dump_file, "\n\n");
}
function_and_variable_visibility (flag_whole_program);
for (node = cgraph_nodes; node; node = node->next)
- if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
+ if ((node->symbol.externally_visible && !DECL_COMDAT (node->symbol.decl))
&& node->local.finalized)
cgraph_mark_needed_node (node);
for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
- if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
+ if (vnode->symbol.externally_visible && !DECL_COMDAT (vnode->symbol.decl))
varpool_mark_needed_node (vnode);
if (dump_file)
{
if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
{
for (e = order[i]->callees; e; e = e->next_callee)
- if (e->callee->local.local && !e->callee->aux)
+ if (e->callee->local.local && !e->callee->symbol.aux)
{
something_changed = true;
- e->callee->aux = (void *)1;
+ e->callee->symbol.aux = (void *)1;
}
}
- order[i]->aux = NULL;
+ order[i]->symbol.aux = NULL;
}
while (something_changed)
something_changed = false;
for (i = order_pos - 1; i >= 0; i--)
{
- if (order[i]->aux && cgraph_propagate_frequency (order[i]))
+ if (order[i]->symbol.aux && cgraph_propagate_frequency (order[i]))
{
for (e = order[i]->callees; e; e = e->next_callee)
- if (e->callee->local.local && !e->callee->aux)
+ if (e->callee->local.local && !e->callee->symbol.aux)
{
something_changed = true;
- e->callee->aux = (void *)1;
+ e->callee->symbol.aux = (void *)1;
}
}
- order[i]->aux = NULL;
+ order[i]->symbol.aux = NULL;
}
}
free (order);
static void
record_cdtor_fn (struct cgraph_node *node)
{
- if (DECL_STATIC_CONSTRUCTOR (node->decl))
- VEC_safe_push (tree, heap, static_ctors, node->decl);
- if (DECL_STATIC_DESTRUCTOR (node->decl))
- VEC_safe_push (tree, heap, static_dtors, node->decl);
- node = cgraph_get_node (node->decl);
- DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
+ if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl))
+ VEC_safe_push (tree, heap, static_ctors, node->symbol.decl);
+ if (DECL_STATIC_DESTRUCTOR (node->symbol.decl))
+ VEC_safe_push (tree, heap, static_dtors, node->symbol.decl);
+ node = cgraph_get_node (node->symbol.decl);
+ DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl) = 1;
}
/* Define global constructors/destructor functions for the CDTORS, of
struct cgraph_node *node;
for (node = cgraph_nodes; node; node = node->next)
if (node->analyzed
- && (DECL_STATIC_CONSTRUCTOR (node->decl)
- || DECL_STATIC_DESTRUCTOR (node->decl)))
+ && (DECL_STATIC_CONSTRUCTOR (node->symbol.decl)
+ || DECL_STATIC_DESTRUCTOR (node->symbol.decl)))
record_cdtor_fn (node);
build_cdtor_fns ();
VEC_free (tree, heap, static_ctors);
streamer_write_hwi_stream (ob->main_stream, edge->count);
bp = bitpack_create (ob->main_stream);
- uid = (!gimple_has_body_p (edge->caller->decl)
+ uid = (!gimple_has_body_p (edge->caller->symbol.decl)
? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
bp_pack_enum (&bp, cgraph_inline_failed_enum,
CIF_N_REASONS, edge->inline_failed);
{
if (ref->refering_type == IPA_REF_CGRAPH)
{
- if (ipa_ref_refering_node (ref)->in_other_partition
+ if (ipa_ref_refering_node (ref)->symbol.in_other_partition
|| !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
return true;
}
else
{
- if (ipa_ref_refering_varpool_node (ref)->in_other_partition
+ if (ipa_ref_refering_varpool_node (ref)->symbol.in_other_partition
|| !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
vset))
return true;
if (node->global.inlined_to)
return false;
for (e = node->callers; e; e = e->next_caller)
- if (e->caller->in_other_partition
+ if (e->caller->symbol.in_other_partition
|| !cgraph_node_in_set_p (e->caller, set))
return true;
return false;
streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
tag);
- streamer_write_hwi_stream (ob->main_stream, node->order);
+ streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
/* In WPA mode, we only output part of the call-graph. Also, we
fake cgraph node attributes. There are two cases that we care.
streamer_write_hwi_stream (ob->main_stream, ref);
- lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
+ lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
streamer_write_hwi_stream (ob->main_stream, node->count);
streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
streamer_write_hwi_stream (ob->main_stream, ref);
}
- if (node->same_comdat_group && !boundary_p)
+ if (node->symbol.same_comdat_group && !boundary_p)
{
- ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
+ ref = lto_cgraph_encoder_lookup (encoder,
+ cgraph (node->symbol.same_comdat_group));
gcc_assert (ref != LCC_NOT_FOUND);
}
else
bp = bitpack_create (ob->main_stream);
bp_pack_value (&bp, node->local.local, 1);
- bp_pack_value (&bp, node->local.externally_visible, 1);
+ bp_pack_value (&bp, node->symbol.externally_visible, 1);
bp_pack_value (&bp, node->local.finalized, 1);
bp_pack_value (&bp, node->local.versionable, 1);
bp_pack_value (&bp, node->local.can_change_signature, 1);
bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
bp_pack_value (&bp, node->needed, 1);
- bp_pack_value (&bp, node->address_taken, 1);
+ bp_pack_value (&bp, node->symbol.address_taken, 1);
bp_pack_value (&bp, node->abstract_and_needed, 1);
bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
- && !DECL_EXTERNAL (node->decl)
- && !DECL_COMDAT (node->decl)
+ && !DECL_EXTERNAL (node->symbol.decl)
+ && !DECL_COMDAT (node->symbol.decl)
&& (reachable_from_other_partition_p (node, set)
- || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
+ || referenced_from_other_partition_p (&node->symbol.ref_list,
+ set, vset)), 1);
bp_pack_value (&bp, node->lowered, 1);
bp_pack_value (&bp, in_other_partition, 1);
/* Real aliases in a boundary become non-aliases. However we still stream
defined in other unit, we may use the info on aliases to resolve
symbol1 != symbol2 type tests that we can do only for locally defined objects
otherwise. */
- bp_pack_value (&bp, node->alias && (!boundary_p || DECL_EXTERNAL (node->decl)), 1);
+ bp_pack_value (&bp, node->alias && (!boundary_p || DECL_EXTERNAL (node->symbol.decl)), 1);
bp_pack_value (&bp, node->frequency, 2);
bp_pack_value (&bp, node->only_called_at_startup, 1);
bp_pack_value (&bp, node->only_called_at_exit, 1);
bp_pack_value (&bp, node->tm_clone, 1);
bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
bp_pack_enum (&bp, ld_plugin_symbol_resolution,
- LDPR_NUM_KNOWN, node->resolution);
+ LDPR_NUM_KNOWN, node->symbol.resolution);
streamer_write_bitpack (&bp);
if (node->thunk.thunk_p && !boundary_p)
streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
}
if ((node->alias || node->thunk.thunk_p)
- && (!boundary_p || (node->alias && DECL_EXTERNAL (node->decl))))
+ && (!boundary_p || (node->alias && DECL_EXTERNAL (node->symbol.decl))))
{
streamer_write_hwi_in_range (ob->main_stream, 0, 1,
node->thunk.alias != NULL);
struct bitpack_d bp;
int ref;
- streamer_write_hwi_stream (ob->main_stream, node->order);
- lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
+ streamer_write_hwi_stream (ob->main_stream, node->symbol.order);
+ lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->symbol.decl);
bp = bitpack_create (ob->main_stream);
- bp_pack_value (&bp, node->externally_visible, 1);
+ bp_pack_value (&bp, node->symbol.externally_visible, 1);
bp_pack_value (&bp, node->force_output, 1);
bp_pack_value (&bp, node->finalized, 1);
bp_pack_value (&bp, node->alias, 1);
/* Constant pool initializers can be de-unified into individual ltrans units.
FIXME: Alternatively at -Os we may want to avoid generating for them the local
labels and share them across LTRANS partitions. */
- if (DECL_IN_CONSTANT_POOL (node->decl)
- && !DECL_COMDAT (node->decl))
+ if (DECL_IN_CONSTANT_POOL (node->symbol.decl)
+ && !DECL_COMDAT (node->symbol.decl))
{
bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
bp_pack_value (&bp, 0, 1); /* in_other_partition. */
else
{
bp_pack_value (&bp, node->analyzed
- && referenced_from_other_partition_p (&node->ref_list,
+ && referenced_from_other_partition_p (&node->symbol.ref_list,
set, vset), 1);
bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
}
streamer_write_bitpack (&bp);
if (node->alias_of)
lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->alias_of);
- if (node->same_comdat_group && !boundary_p)
+ if (node->symbol.same_comdat_group && !boundary_p)
{
- ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
+ ref = lto_varpool_encoder_lookup (varpool_encoder,
+ varpool (node->symbol.same_comdat_group));
gcc_assert (ref != LCC_NOT_FOUND);
}
else
ref = LCC_NOT_FOUND;
streamer_write_hwi_stream (ob->main_stream, ref);
streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
- LDPR_NUM_KNOWN, node->resolution);
+ LDPR_NUM_KNOWN, node->symbol.resolution);
}
/* Output the varpool NODE to OB.
{
struct cgraph_node *node = csi_node (csi);
- count = ipa_ref_list_nreferences (&node->ref_list);
+ count = ipa_ref_list_nreferences (&node->symbol.ref_list);
if (count)
{
streamer_write_uhwi_stream (ob->main_stream, count);
streamer_write_uhwi_stream (ob->main_stream,
lto_cgraph_encoder_lookup (encoder, node));
- for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
+ i, ref); i++)
lto_output_ref (ob, ref, encoder, varpool_encoder);
}
}
{
struct varpool_node *node = vsi_node (vsi);
- count = ipa_ref_list_nreferences (&node->ref_list);
+ count = ipa_ref_list_nreferences (&node->symbol.ref_list);
if (count)
{
streamer_write_uhwi_stream (ob->main_stream, count);
streamer_write_uhwi_stream (ob->main_stream,
lto_varpool_encoder_lookup (varpool_encoder,
node));
- for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
+ i, ref); i++)
lto_output_ref (ob, ref, encoder, varpool_encoder);
}
}
{
node = csi_node (csi);
add_node_to (encoder, node, true);
- add_references (encoder, varpool_encoder, &node->ref_list);
+ add_references (encoder, varpool_encoder, &node->symbol.ref_list);
}
for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
{
gcc_assert (!vnode->alias || vnode->alias_of);
lto_varpool_encoder_encode (varpool_encoder, vnode);
lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
- add_references (encoder, varpool_encoder, &vnode->ref_list);
+ add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
}
/* Pickle in also the initializer of all referenced readonly variables
to help folding. Constant pool variables are not shared, so we must
for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
{
struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
- if (DECL_INITIAL (vnode->decl)
+ if (DECL_INITIAL (vnode->symbol.decl)
&& !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
vnode)
- && const_value_known_p (vnode->decl))
+ && const_value_known_p (vnode->symbol.decl))
{
lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
- add_references (encoder, varpool_encoder, &vnode->ref_list);
+ add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
}
else if (vnode->alias || vnode->alias_of)
- add_references (encoder, varpool_encoder, &vnode->ref_list);
+ add_references (encoder, varpool_encoder, &vnode->symbol.ref_list);
}
/* Go over all the nodes again to include callees that are not in
enum LTO_cgraph_tags tag,
struct bitpack_d *bp)
{
- node->aux = (void *) tag;
- node->local.lto_file_data = file_data;
+ node->symbol.aux = (void *) tag;
+ node->symbol.lto_file_data = file_data;
node->local.local = bp_unpack_value (bp, 1);
- node->local.externally_visible = bp_unpack_value (bp, 1);
+ node->symbol.externally_visible = bp_unpack_value (bp, 1);
node->local.finalized = bp_unpack_value (bp, 1);
node->local.versionable = bp_unpack_value (bp, 1);
node->local.can_change_signature = bp_unpack_value (bp, 1);
node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
node->needed = bp_unpack_value (bp, 1);
- node->address_taken = bp_unpack_value (bp, 1);
+ node->symbol.address_taken = bp_unpack_value (bp, 1);
node->abstract_and_needed = bp_unpack_value (bp, 1);
- node->reachable_from_other_partition = bp_unpack_value (bp, 1);
+ node->symbol.used_from_other_partition = bp_unpack_value (bp, 1);
node->lowered = bp_unpack_value (bp, 1);
node->analyzed = tag == LTO_cgraph_analyzed_node;
- node->in_other_partition = bp_unpack_value (bp, 1);
- if (node->in_other_partition
+ node->symbol.in_other_partition = bp_unpack_value (bp, 1);
+ if (node->symbol.in_other_partition
/* Avoid updating decl when we are seeing just inline clone.
When inlining function that has functions already inlined into it,
we produce clones of inline clones.
we might end up streaming inline clone from other partition
to support clone we are interested in. */
&& (!node->clone_of
- || node->clone_of->decl != node->decl))
+ || node->clone_of->symbol.decl != node->symbol.decl))
{
- DECL_EXTERNAL (node->decl) = 1;
- TREE_STATIC (node->decl) = 0;
+ DECL_EXTERNAL (node->symbol.decl) = 1;
+ TREE_STATIC (node->symbol.decl) = 0;
}
node->alias = bp_unpack_value (bp, 1);
node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
node->only_called_at_exit = bp_unpack_value (bp, 1);
node->tm_clone = bp_unpack_value (bp, 1);
node->thunk.thunk_p = bp_unpack_value (bp, 1);
- node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
+ node->symbol.resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
LDPR_NUM_KNOWN);
}
else
node = cgraph_get_create_node (fn_decl);
- node->order = order;
+ node->symbol.order = order;
if (order >= cgraph_order)
cgraph_order = order + 1;
have already been read will have their tag stored in the 'aux'
field. Since built-in functions can be referenced in multiple
functions, they are expected to be read more than once. */
- if (node->aux && !DECL_BUILT_IN (node->decl))
+ if (node->symbol.aux && !DECL_BUILT_IN (node->symbol.decl))
internal_error ("bytecode stream: found multiple instances of cgraph "
"node %d", node->uid);
node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
/* Store a reference for now, and fix up later to be a pointer. */
- node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
+ node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref2;
if (node->thunk.thunk_p)
{
decl_index = streamer_read_uhwi (ib);
var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
node = varpool_node (var_decl);
- node->order = order;
+ node->symbol.order = order;
if (order >= cgraph_order)
cgraph_order = order + 1;
- node->lto_file_data = file_data;
+ node->symbol.lto_file_data = file_data;
bp = streamer_read_bitpack (ib);
- node->externally_visible = bp_unpack_value (&bp, 1);
+ node->symbol.externally_visible = bp_unpack_value (&bp, 1);
node->force_output = bp_unpack_value (&bp, 1);
node->finalized = bp_unpack_value (&bp, 1);
node->alias = bp_unpack_value (&bp, 1);
non_null_aliasof = bp_unpack_value (&bp, 1);
node->analyzed = node->finalized;
- node->used_from_other_partition = bp_unpack_value (&bp, 1);
- node->in_other_partition = bp_unpack_value (&bp, 1);
- if (node->in_other_partition)
+ node->symbol.used_from_other_partition = bp_unpack_value (&bp, 1);
+ node->symbol.in_other_partition = bp_unpack_value (&bp, 1);
+ if (node->symbol.in_other_partition)
{
- DECL_EXTERNAL (node->decl) = 1;
- TREE_STATIC (node->decl) = 0;
+ DECL_EXTERNAL (node->symbol.decl) = 1;
+ TREE_STATIC (node->symbol.decl) = 0;
}
if (node->finalized)
varpool_mark_needed_node (node);
}
ref = streamer_read_hwi (ib);
/* Store a reference for now, and fix up later to be a pointer. */
- node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
- node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
- LDPR_NUM_KNOWN);
+ node->symbol.same_comdat_group = (symtab_node) (intptr_t) ref;
+ node->symbol.resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
+ LDPR_NUM_KNOWN);
return node;
}
struct cgraph_node *refering_node,
struct varpool_node *refering_varpool_node,
VEC(cgraph_node_ptr, heap) *nodes,
- VEC(varpool_node_ptr, heap) *varpool_nodes)
+ VEC(varpool_node_ptr, heap) *varpool_nodes_vec)
{
struct cgraph_node *node = NULL;
struct varpool_node *varpool_node = NULL;
if (type == IPA_REF_CGRAPH)
node = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
else
- varpool_node = VEC_index (varpool_node_ptr, varpool_nodes,
+ varpool_node = VEC_index (varpool_node_ptr, varpool_nodes_vec,
streamer_read_hwi (ib));
ipa_record_reference (refering_node, refering_varpool_node,
node, varpool_node, use, NULL);
int ecf_flags = 0;
caller = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
- if (caller == NULL || caller->decl == NULL_TREE)
+ if (caller == NULL || caller->symbol.decl == NULL_TREE)
internal_error ("bytecode stream: no caller found while reading edge");
if (!indirect)
{
callee = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
- if (callee == NULL || callee->decl == NULL_TREE)
+ if (callee == NULL || callee->symbol.decl == NULL_TREE)
internal_error ("bytecode stream: no callee found while reading edge");
}
else
else
{
node = input_node (file_data, ib, tag,nodes);
- if (node == NULL || node->decl == NULL_TREE)
+ if (node == NULL || node->symbol.decl == NULL_TREE)
internal_error ("bytecode stream: found empty cgraph node");
VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
/* AUX pointers should be all non-zero for nodes read from the stream. */
#ifdef ENABLE_CHECKING
FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
- gcc_assert (node->aux);
+ gcc_assert (node->symbol.aux);
#endif
FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
{
int ref = (int) (intptr_t) node->global.inlined_to;
/* We share declaration of builtins, so we may read same node twice. */
- if (!node->aux)
+ if (!node->symbol.aux)
continue;
- node->aux = NULL;
+ node->symbol.aux = NULL;
/* Fixup inlined_to from reference to pointer. */
if (ref != LCC_NOT_FOUND)
else
node->global.inlined_to = NULL;
- ref = (int) (intptr_t) node->same_comdat_group;
+ ref = (int) (intptr_t) node->symbol.same_comdat_group;
/* Fixup same_comdat_group from reference to pointer. */
if (ref != LCC_NOT_FOUND)
- node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
+ node->symbol.same_comdat_group = (symtab_node)VEC_index (cgraph_node_ptr, nodes, ref);
else
- node->same_comdat_group = NULL;
+ node->symbol.same_comdat_group = NULL;
}
FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
- node->aux = (void *)1;
+ node->symbol.aux = (void *)1;
return nodes;
}
}
#ifdef ENABLE_CHECKING
FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
- gcc_assert (!node->aux);
+ gcc_assert (!node->symbol.aux);
#endif
FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
{
- int ref = (int) (intptr_t) node->same_comdat_group;
+ int ref = (int) (intptr_t) node->symbol.same_comdat_group;
/* We share declaration of builtins, so we may read same node twice. */
- if (node->aux)
+ if (node->symbol.aux)
continue;
- node->aux = (void *)1;
+ node->symbol.aux = (void *)1;
/* Fixup same_comdat_group from reference to pointer. */
if (ref != LCC_NOT_FOUND)
- node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
+ node->symbol.same_comdat_group = (symtab_node)VEC_index (varpool_node_ptr, varpool, ref);
else
- node->same_comdat_group = NULL;
+ node->symbol.same_comdat_group = NULL;
}
FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
- node->aux = NULL;
+ node->symbol.aux = NULL;
return varpool;
}
During LTRANS we already have values of count_materialization_scale
computed, so just update them. */
for (node = cgraph_nodes; node; node = node->next)
- if (node->local.lto_file_data
- && node->local.lto_file_data->profile_info.runs)
+ if (node->symbol.lto_file_data
+ && node->symbol.lto_file_data->profile_info.runs)
{
int scale;
scale =
((node->count_materialization_scale * max_runs
- + node->local.lto_file_data->profile_info.runs / 2)
- / node->local.lto_file_data->profile_info.runs);
+ + node->symbol.lto_file_data->profile_info.runs / 2)
+ / node->symbol.lto_file_data->profile_info.runs);
node->count_materialization_scale = scale;
if (scale < 0)
fatal_error ("Profile information in %s corrupted",
node for the parent function was never emitted to the gimple
file, cgraph_node will create a node for it when setting the
context of the nested function. */
- if (node->local.lto_file_data)
- node->aux = NULL;
+ if (node->symbol.lto_file_data)
+ node->symbol.aux = NULL;
}
}
int parm_num;
tree parm;
- for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
+ for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm;
parm = DECL_CHAIN (parm), parm_num++)
if (map->old_tree == parm)
break;
struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
- for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
+ for (parm_num = 0, parm = DECL_ARGUMENTS (node->symbol.decl); parm_num;
parm = DECL_CHAIN (parm))
parm_num --;
map->parm_num = streamer_read_uhwi (ib_main);
basic_block bb;
struct output_block *ob;
- function = node->decl;
+ function = node->symbol.decl;
fn = DECL_STRUCT_FUNCTION (function);
ob = create_output_block (LTO_section_function_body);
{
vnode = varpool_get_node (p->decl);
return (vnode
- && referenced_from_this_partition_p (&vnode->ref_list, set, vset));
+ && referenced_from_this_partition_p (&vnode->symbol.ref_list,
+ set, vset));
}
node = cgraph_get_node (p->decl);
return (node
- && (referenced_from_this_partition_p (&node->ref_list, set, vset)
+ && (referenced_from_this_partition_p (&node->symbol.ref_list,
+ set, vset)
|| reachable_from_this_partition_p (node, set)));
}
else
static void
copy_function (struct cgraph_node *node)
{
- tree function = node->decl;
- struct lto_file_decl_data *file_data = node->local.lto_file_data;
+ tree function = node->symbol.decl;
+ struct lto_file_decl_data *file_data = node->symbol.lto_file_data;
struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
const char *data;
size_t len;
/* Copy decls. */
in_state =
- lto_get_function_in_decl_state (node->local.lto_file_data, function);
+ lto_get_function_in_decl_state (node->symbol.lto_file_data, function);
gcc_assert (in_state);
for (i = 0; i < LTO_N_DECL_STREAMS; i++)
&& !node->thunk.thunk_p)
{
#ifdef ENABLE_CHECKING
- gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
- bitmap_set_bit (output, DECL_UID (node->decl));
+ gcc_assert (!bitmap_bit_p (output, DECL_UID (node->symbol.decl)));
+ bitmap_set_bit (output, DECL_UID (node->symbol.decl));
#endif
decl_state = lto_new_out_decl_state ();
lto_push_out_decl_state (decl_state);
- if (gimple_has_body_p (node->decl))
+ if (gimple_has_body_p (node->symbol.decl))
output_function (node);
else
copy_function (node);
gcc_assert (lto_get_out_decl_state () == decl_state);
lto_pop_out_decl_state ();
- lto_record_function_out_decl_state (node->decl, decl_state);
+ lto_record_function_out_decl_state (node->symbol.decl, decl_state);
}
}
for (i = 0; i < lto_cgraph_encoder_size (encoder); i++)
{
node = lto_cgraph_encoder_deref (encoder, i);
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (node->symbol.decl))
continue;
- if (DECL_COMDAT (node->decl)
+ if (DECL_COMDAT (node->symbol.decl)
&& cgraph_comdat_can_be_unshared_p (node))
continue;
if ((node->alias && !node->thunk.alias) || node->global.inlined_to)
continue;
- write_symbol (cache, &stream, node->decl, seen, false);
+ write_symbol (cache, &stream, node->symbol.decl, seen, false);
}
for (i = 0; i < lto_cgraph_encoder_size (encoder); i++)
{
node = lto_cgraph_encoder_deref (encoder, i);
- if (!DECL_EXTERNAL (node->decl))
+ if (!DECL_EXTERNAL (node->symbol.decl))
continue;
/* We keep around unused extern inlines in order to be able to inline
them indirectly or via vtables. Do not output them to symbol
table: they end up being undefined and just consume space. */
- if (!node->address_taken && !node->callers)
+ if (!node->symbol.address_taken && !node->callers)
continue;
- if (DECL_COMDAT (node->decl)
+ if (DECL_COMDAT (node->symbol.decl)
&& cgraph_comdat_can_be_unshared_p (node))
continue;
if ((node->alias && !node->thunk.alias) || node->global.inlined_to)
continue;
- write_symbol (cache, &stream, node->decl, seen, false);
+ write_symbol (cache, &stream, node->symbol.decl, seen, false);
}
/* Write all variables. */
for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
{
vnode = lto_varpool_encoder_deref (varpool_encoder, i);
- if (DECL_EXTERNAL (vnode->decl))
+ if (DECL_EXTERNAL (vnode->symbol.decl))
continue;
/* COMDAT virtual tables can be unshared. Do not declare them
in the LTO symbol table to prevent linker from forcing them
into the output. */
- if (DECL_COMDAT (vnode->decl)
+ if (DECL_COMDAT (vnode->symbol.decl)
&& !vnode->force_output
&& vnode->finalized
- && DECL_VIRTUAL_P (vnode->decl))
+ && DECL_VIRTUAL_P (vnode->symbol.decl))
continue;
if (vnode->alias && !vnode->alias_of)
continue;
- write_symbol (cache, &stream, vnode->decl, seen, false);
+ write_symbol (cache, &stream, vnode->symbol.decl, seen, false);
}
for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
{
vnode = lto_varpool_encoder_deref (varpool_encoder, i);
- if (!DECL_EXTERNAL (vnode->decl))
+ if (!DECL_EXTERNAL (vnode->symbol.decl))
continue;
- if (DECL_COMDAT (vnode->decl)
+ if (DECL_COMDAT (vnode->symbol.decl)
&& !vnode->force_output
&& vnode->finalized
- && DECL_VIRTUAL_P (vnode->decl))
+ && DECL_VIRTUAL_P (vnode->symbol.decl))
continue;
if (vnode->alias && !vnode->alias_of)
continue;
- write_symbol (cache, &stream, vnode->decl, seen, false);
+ write_symbol (cache, &stream, vnode->symbol.decl, seen, false);
}
/* Write all aliases. */
cgraph_node_name (prevailing_node),
prevailing_node->uid,
IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name)
- (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)))));
+ (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->symbol.decl)))));
}
/* Merge node flags. */
cgraph_mark_needed_node (prevailing_node);
if (node->reachable)
cgraph_mark_reachable_node (prevailing_node);
- if (node->address_taken)
+ if (node->symbol.address_taken)
{
gcc_assert (!prevailing_node->global.inlined_to);
cgraph_mark_address_taken_node (prevailing_node);
/* Redirect all incoming edges. */
compatible_p
- = types_compatible_p (TREE_TYPE (TREE_TYPE (prevailing_node->decl)),
- TREE_TYPE (TREE_TYPE (node->decl)));
+ = types_compatible_p (TREE_TYPE (TREE_TYPE (prevailing_node->symbol.decl)),
+ TREE_TYPE (TREE_TYPE (node->symbol.decl)));
for (e = node->callers; e; e = next)
{
next = e->next_caller;
e->call_stmt_cannot_inline_p = 1;
}
/* Redirect incomming references. */
- ipa_clone_refering (prevailing_node, NULL, &node->ref_list);
+ ipa_clone_refering (prevailing_node, NULL, &node->symbol.ref_list);
/* Finally remove the replaced node. */
cgraph_remove_node (node);
gcc_assert (!vnode->finalized || prevailing_node->finalized);
gcc_assert (!vnode->analyzed || prevailing_node->analyzed);
- ipa_clone_refering (NULL, prevailing_node, &vnode->ref_list);
+ ipa_clone_refering (NULL, prevailing_node, &vnode->symbol.ref_list);
/* Be sure we can garbage collect the initializer. */
- if (DECL_INITIAL (vnode->decl))
- DECL_INITIAL (vnode->decl) = error_mark_node;
+ if (DECL_INITIAL (vnode->symbol.decl))
+ DECL_INITIAL (vnode->symbol.decl) = error_mark_node;
/* Finally remove the replaced node. */
varpool_remove_node (vnode);
}
First one would disable some whole program optimizations, while
ther second would imply to many whole program assumptions. */
if (prevailing->node && !flag_ltrans && !prevailing->guessed)
- prevailing->node->resolution = prevailing->resolution;
+ prevailing->node->symbol.resolution = prevailing->resolution;
else if (prevailing->vnode && !flag_ltrans && !prevailing->guessed)
- prevailing->vnode->resolution = prevailing->resolution;
+ prevailing->vnode->symbol.resolution = prevailing->resolution;
return 1;
}
+2012-04-14 Jan Hubicka <jh@suse.cz>
+
+ * lto.c: Update field referenced for new cgraph/varpool layout.
+ * lto-partition.c: Likewise.
+
2012-04-11 Jan Hubicka <jh@suse.cz>
* lto.c: Update copyright; remove params.h, ipa-inline.h
{
if (ref->refered_type == IPA_REF_CGRAPH
&& (DECL_COMDAT (cgraph_function_node (ipa_ref_node (ref),
- NULL)->decl)
+ NULL)->symbol.decl)
|| (ref->use == IPA_REF_ALIAS
&& lookup_attribute
- ("weakref", DECL_ATTRIBUTES (ipa_ref_node (ref)->decl))))
+ ("weakref", DECL_ATTRIBUTES (ipa_ref_node (ref)->symbol.decl))))
&& !cgraph_node_in_set_p (ipa_ref_node (ref), part->cgraph_set))
add_cgraph_node_to_partition (part, ipa_ref_node (ref));
else
if (ref->refered_type == IPA_REF_VARPOOL
- && (DECL_COMDAT (ipa_ref_varpool_node (ref)->decl)
+ && (DECL_COMDAT (ipa_ref_varpool_node (ref)->symbol.decl)
|| (ref->use == IPA_REF_ALIAS
&& lookup_attribute
("weakref",
- DECL_ATTRIBUTES (ipa_ref_varpool_node (ref)->decl))))
+ DECL_ATTRIBUTES (ipa_ref_varpool_node (ref)->symbol.decl))))
&& !varpool_node_in_set_p (ipa_ref_varpool_node (ref),
part->varpool_set))
add_varpool_node_to_partition (part, ipa_ref_varpool_node (ref));
part->cgraph_set)
&& !lookup_attribute ("weakref",
DECL_ATTRIBUTES
- (ipa_ref_refering_node (ref)->decl)))
+ (ipa_ref_refering_node (ref)->symbol.decl)))
add_cgraph_node_to_partition (part, ipa_ref_refering_node (ref));
else
if (ref->refering_type == IPA_REF_VARPOOL
part->varpool_set)
&& !lookup_attribute ("weakref",
DECL_ATTRIBUTES
- (ipa_ref_refering_varpool_node (ref)->decl)))
+ (ipa_ref_refering_varpool_node (ref)->symbol.decl)))
add_varpool_node_to_partition (part,
ipa_ref_refering_varpool_node (ref));
}
ltrans_partition part = (ltrans_partition) data;
/* non-COMDAT aliases of COMDAT functions needs to be output just once. */
- if (!DECL_COMDAT (node->decl)
+ if (!DECL_COMDAT (node->symbol.decl)
&& !node->global.inlined_to
- && node->aux)
+ && node->symbol.aux)
{
gcc_assert (node->thunk.thunk_p || node->alias);
return false;
}
- if (node->aux)
+ if (node->symbol.aux)
{
- node->in_other_partition = 1;
+ node->symbol.in_other_partition = 1;
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "Node %s/%i now used in multiple partitions\n",
cgraph_node_name (node), node->uid);
}
- node->aux = (void *)((size_t)node->aux + 1);
+ node->symbol.aux = (void *)((size_t)node->symbol.aux + 1);
cgraph_node_set_add (part->cgraph_set, node);
return false;
}
for (e = node->callees; e; e = e->next_callee)
if ((!e->inline_failed
- || DECL_COMDAT (cgraph_function_node (e->callee, NULL)->decl))
+ || DECL_COMDAT (cgraph_function_node (e->callee, NULL)->symbol.decl))
&& !cgraph_node_in_set_p (e->callee, part->cgraph_set))
add_cgraph_node_to_partition (part, e->callee);
/* The only way to assemble non-weakref alias is to add the aliased object into
the unit. */
- add_references_to_partition (part, &node->ref_list);
+ add_references_to_partition (part, &node->symbol.ref_list);
n = cgraph_function_node (node, NULL);
if (n != node
&& !lookup_attribute ("weakref",
- DECL_ATTRIBUTES (node->decl)))
+ DECL_ATTRIBUTES (node->symbol.decl)))
add_cgraph_node_to_partition (part, n);
- if (node->same_comdat_group)
- for (n = node->same_comdat_group; n != node; n = n->same_comdat_group)
+ if (node->symbol.same_comdat_group)
+ for (n = cgraph (node->symbol.same_comdat_group);
+ n != node; n = cgraph (n->symbol.same_comdat_group))
add_cgraph_node_to_partition (part, n);
}
varpool_node_set_add (part->varpool_set, vnode);
- if (vnode->aux)
+ if (vnode->symbol.aux)
{
- vnode->in_other_partition = 1;
+ vnode->symbol.in_other_partition = 1;
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "Varpool node %s now used in multiple partitions\n",
varpool_node_name (vnode));
}
- vnode->aux = (void *)((size_t)vnode->aux + 1);
+ vnode->symbol.aux = (void *)((size_t)vnode->symbol.aux + 1);
/* The only way to assemble non-weakref alias is to add the aliased object into
the unit. */
v = varpool_variable_node (vnode, NULL);
if (v != vnode
&& !lookup_attribute ("weakref",
- DECL_ATTRIBUTES (vnode->decl)))
+ DECL_ATTRIBUTES (vnode->symbol.decl)))
add_varpool_node_to_partition (part, v);
- add_references_to_partition (part, &vnode->ref_list);
+ add_references_to_partition (part, &vnode->symbol.ref_list);
- if (vnode->same_comdat_group
- && !varpool_node_in_set_p (vnode->same_comdat_group, part->varpool_set))
- add_varpool_node_to_partition (part, vnode->same_comdat_group);
+ if (vnode->symbol.same_comdat_group
+ && !varpool_node_in_set_p (varpool (vnode->symbol.same_comdat_group),
+ part->varpool_set))
+ add_varpool_node_to_partition (part, varpool (vnode->symbol.same_comdat_group));
}
/* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
n_cgraph_nodes);
partition->insns -= inline_summary (node)->self_size;
cgraph_node_set_remove (partition->cgraph_set, node);
- node->aux = (void *)((size_t)node->aux - 1);
+ node->symbol.aux = (void *)((size_t)node->symbol.aux - 1);
}
while (VEC_length (varpool_node_ptr, partition->varpool_set->nodes) >
n_varpool_nodes)
partition->varpool_set->nodes,
n_varpool_nodes);
varpool_node_set_remove (partition->varpool_set, node);
- node->aux = (void *)((size_t)node->aux - 1);
+ node->symbol.aux = (void *)((size_t)node->symbol.aux - 1);
}
}
if (!node->analyzed)
return false;
/* Extern inlines and comdat are always only in partitions they are needed. */
- if (DECL_EXTERNAL (node->decl)
- || (DECL_COMDAT (node->decl)
+ if (DECL_EXTERNAL (node->symbol.decl)
+ || (DECL_COMDAT (node->symbol.decl)
&& !cgraph_used_from_object_file_p (node)))
return false;
- if (lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
+ if (lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
return false;
return true;
}
if (vnode->alias || !vnode->needed)
return false;
/* Constant pool and comdat are always only in partitions they are needed. */
- if (DECL_IN_CONSTANT_POOL (vnode->decl)
- || (DECL_COMDAT (vnode->decl)
+ if (DECL_IN_CONSTANT_POOL (vnode->symbol.decl)
+ || (DECL_COMDAT (vnode->symbol.decl)
&& !vnode->force_output
&& !varpool_used_from_object_file_p (vnode)))
return false;
- if (lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->decl)))
+ if (lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->symbol.decl)))
return false;
return true;
}
for (node = cgraph_nodes; node; node = node->next)
{
if (!partition_cgraph_node_p (node)
- || node->aux)
+ || node->symbol.aux)
continue;
- file_data = node->local.lto_file_data;
+ file_data = node->symbol.lto_file_data;
if (file_data)
{
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
{
if (!partition_varpool_node_p (vnode)
- || vnode->aux)
+ || vnode->symbol.aux)
continue;
- file_data = vnode->lto_file_data;
+ file_data = vnode->symbol.lto_file_data;
slot = pointer_map_contains (pmap, file_data);
if (slot)
partition = (ltrans_partition) *slot;
add_varpool_node_to_partition (partition, vnode);
}
for (node = cgraph_nodes; node; node = node->next)
- node->aux = NULL;
+ node->symbol.aux = NULL;
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
- vnode->aux = NULL;
+ vnode->symbol.aux = NULL;
/* If the cgraph is empty, create one cgraph node set so that there is still
an output file for any variables that need to be exported in a DSO. */
{
const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
- return b->order - a->order;
+ return b->symbol.order - a->symbol.order;
}
/* Helper function for qsort; sort nodes by order. */
{
const struct varpool_node *a = *(const struct varpool_node * const *) pa;
const struct varpool_node *b = *(const struct varpool_node * const *) pb;
- return b->order - a->order;
+ return b->symbol.order - a->symbol.order;
}
/* Group cgraph nodes into equally-sized partitions.
int current_order = -1;
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
- gcc_assert (!vnode->aux);
+ gcc_assert (!vnode->symbol.aux);
/* Until we have better ordering facility, use toplogical order.
Include only nodes we will partition and compute estimate of program
size. Note that since nodes that are not partitioned might be put into
for (i = 0; i < n_nodes; i++)
{
- if (order[i]->aux)
+ if (order[i]->symbol.aux)
continue;
- current_order = order[i]->order;
+ current_order = order[i]->symbol.order;
if (!flag_toplevel_reorder)
- while (varpool_pos < n_varpool_nodes && varpool_order[varpool_pos]->order < current_order)
+ while (varpool_pos < n_varpool_nodes
+ && varpool_order[varpool_pos]->symbol.order < current_order)
{
- if (!varpool_order[varpool_pos]->aux)
+ if (!varpool_order[varpool_pos]->symbol.aux)
add_varpool_node_to_partition (partition, varpool_order[varpool_pos]);
varpool_pos++;
}
cgraph_p = true;
node = VEC_index (cgraph_node_ptr, partition->cgraph_set->nodes,
last_visited_cgraph_node);
- refs = &node->ref_list;
+ refs = &node->symbol.ref_list;
last_visited_cgraph_node++;
{
refs =
&VEC_index (varpool_node_ptr, partition->varpool_set->nodes,
- last_visited_varpool_node)->ref_list;
+ last_visited_varpool_node)->symbol.ref_list;
last_visited_varpool_node++;
}
vnode = ipa_ref_varpool_node (ref);
if (!vnode->finalized)
continue;
- if (!vnode->aux && flag_toplevel_reorder
+ if (!vnode->symbol.aux && flag_toplevel_reorder
&& partition_varpool_node_p (vnode))
add_varpool_node_to_partition (partition, vnode);
vsi = varpool_node_set_find (partition->varpool_set, vnode);
vnode = ipa_ref_refering_varpool_node (ref);
gcc_assert (vnode->finalized);
- if (!vnode->aux && flag_toplevel_reorder
+ if (!vnode->symbol.aux && flag_toplevel_reorder
&& partition_varpool_node_p (vnode))
add_varpool_node_to_partition (partition, vnode);
vsi = varpool_node_set_find (partition->varpool_set, vnode);
}
i = best_i;
/* When we are finished, avoid creating empty partition. */
- while (i < n_nodes - 1 && order[i + 1]->aux)
+ while (i < n_nodes - 1 && order[i + 1]->symbol.aux)
i++;
if (i == n_nodes - 1)
break;
if (flag_toplevel_reorder)
{
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
- if (partition_varpool_node_p (vnode) && !vnode->aux)
+ if (partition_varpool_node_p (vnode) && !vnode->symbol.aux)
add_varpool_node_to_partition (partition, vnode);
}
else
{
while (varpool_pos < n_varpool_nodes)
{
- if (!varpool_order[varpool_pos]->aux)
+ if (!varpool_order[varpool_pos]->symbol.aux)
add_varpool_node_to_partition (partition, varpool_order[varpool_pos]);
varpool_pos++;
}
static bool
promote_var (struct varpool_node *vnode)
{
- if (TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
+ if (TREE_PUBLIC (vnode->symbol.decl) || DECL_EXTERNAL (vnode->symbol.decl))
return false;
gcc_assert (flag_wpa);
- TREE_PUBLIC (vnode->decl) = 1;
- DECL_VISIBILITY (vnode->decl) = VISIBILITY_HIDDEN;
- DECL_VISIBILITY_SPECIFIED (vnode->decl) = true;
+ TREE_PUBLIC (vnode->symbol.decl) = 1;
+ DECL_VISIBILITY (vnode->symbol.decl) = VISIBILITY_HIDDEN;
+ DECL_VISIBILITY_SPECIFIED (vnode->symbol.decl) = true;
if (cgraph_dump_file)
fprintf (cgraph_dump_file,
"Promoting var as hidden: %s\n", varpool_node_name (vnode));
promote_fn (struct cgraph_node *node)
{
gcc_assert (flag_wpa);
- if (TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
+ if (TREE_PUBLIC (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
return false;
- TREE_PUBLIC (node->decl) = 1;
- DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
- DECL_VISIBILITY_SPECIFIED (node->decl) = true;
+ TREE_PUBLIC (node->symbol.decl) = 1;
+ DECL_VISIBILITY (node->symbol.decl) = VISIBILITY_HIDDEN;
+ DECL_VISIBILITY_SPECIFIED (node->symbol.decl) = true;
if (cgraph_dump_file)
fprintf (cgraph_dump_file,
"Promoting function as hidden: %s/%i\n",
for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
{
struct cgraph_node *node = csi_node (csi);
- if (node->local.externally_visible)
+ if (node->symbol.externally_visible)
continue;
if (node->global.inlined_to)
continue;
- if ((!DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
- && (referenced_from_other_partition_p (&node->ref_list, set, vset)
+ if ((!DECL_EXTERNAL (node->symbol.decl)
+ && !DECL_COMDAT (node->symbol.decl))
+ && (referenced_from_other_partition_p (&node->symbol.ref_list, set, vset)
|| reachable_from_other_partition_p (node, set)))
promote_fn (node);
}
/* Constant pool references use internal labels and thus can not
be made global. It is sensible to keep those ltrans local to
allow better optimization. */
- if (!DECL_IN_CONSTANT_POOL (vnode->decl) && !DECL_COMDAT (vnode->decl)
- && !vnode->externally_visible && vnode->analyzed
- && referenced_from_other_partition_p (&vnode->ref_list,
+ if (!DECL_IN_CONSTANT_POOL (vnode->symbol.decl)
+ && !DECL_COMDAT (vnode->symbol.decl)
+ && !vnode->symbol.externally_visible && vnode->analyzed
+ && referenced_from_other_partition_p (&vnode->symbol.ref_list,
set, vset))
promote_var (vnode);
}
from this partition that are not in this partition. This needs
to be done recursively. */
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
- if (const_value_known_p (vnode->decl)
- && DECL_INITIAL (vnode->decl)
+ if (const_value_known_p (vnode->symbol.decl)
+ && DECL_INITIAL (vnode->symbol.decl)
&& !varpool_node_in_set_p (vnode, vset)
- && referenced_from_this_partition_p (&vnode->ref_list, set, vset)
+ && referenced_from_this_partition_p (&vnode->symbol.ref_list, set, vset)
&& !pointer_set_insert (inserted, vnode))
VEC_safe_push (varpool_node_ptr, heap, promoted_initializers, vnode);
vnode = VEC_pop (varpool_node_ptr, promoted_initializers);
for (i = 0;
- ipa_ref_list_reference_iterate (&vnode->ref_list, i, ref);
+ ipa_ref_list_reference_iterate (&vnode->symbol.ref_list, i, ref);
i++)
{
if (ref->refered_type == IPA_REF_CGRAPH)
{
struct cgraph_node *n = ipa_ref_node (ref);
gcc_assert (!n->global.inlined_to);
- if (!n->local.externally_visible
+ if (!n->symbol.externally_visible
&& !cgraph_node_in_set_p (n, set))
promote_fn (n);
}
/* Constant pool references use internal labels and thus
cannot be made global. It is sensible to keep those
ltrans local to allow better optimization. */
- if (DECL_IN_CONSTANT_POOL (v->decl))
+ if (DECL_IN_CONSTANT_POOL (v->symbol.decl))
{
if (!pointer_set_insert (inserted, vnode))
VEC_safe_push (varpool_node_ptr, heap,
promoted_initializers, v);
}
- else if (!v->externally_visible && v->analyzed)
+ else if (!v->symbol.externally_visible && v->analyzed)
{
if (promote_var (v)
- && DECL_INITIAL (v->decl)
- && const_value_known_p (v->decl)
+ && DECL_INITIAL (v->symbol.decl)
+ && const_value_known_p (v->symbol.decl)
&& !pointer_set_insert (inserted, vnode))
VEC_safe_push (varpool_node_ptr, heap,
promoted_initializers, v);
const char *data, *name;
size_t len;
- decl = node->decl;
+ decl = node->symbol.decl;
/* Read in functions with body (analyzed nodes)
and also functions that are needed to produce virtual clones. */
if (cgraph_function_with_gimple_body_p (node) || has_analyzed_clone_p (node))
WPA mode, the body of the function is not needed. */
if (!flag_wpa)
{
- file_data = node->local.lto_file_data;
+ file_data = node->symbol.lto_file_data;
name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
/* We may have renamed the declaration, e.g., a static function. */
int ordera = -1, orderb = -1;
if (VEC_length (cgraph_node_ptr, pa->cgraph_set->nodes))
- ordera = VEC_index (cgraph_node_ptr, pa->cgraph_set->nodes, 0)->order;
+ ordera = VEC_index (cgraph_node_ptr, pa->cgraph_set->nodes, 0)->symbol.order;
else if (VEC_length (varpool_node_ptr, pa->varpool_set->nodes))
- ordera = VEC_index (varpool_node_ptr, pa->varpool_set->nodes, 0)->order;
+ ordera = VEC_index (varpool_node_ptr, pa->varpool_set->nodes, 0)->symbol.order;
if (VEC_length (cgraph_node_ptr, pb->cgraph_set->nodes))
- orderb = VEC_index (cgraph_node_ptr, pb->cgraph_set->nodes, 0)->order;
+ orderb = VEC_index (cgraph_node_ptr, pb->cgraph_set->nodes, 0)->symbol.order;
else if (VEC_length (varpool_node_ptr, pb->varpool_set->nodes))
- orderb = VEC_index (varpool_node_ptr, pb->varpool_set->nodes, 0)->order;
+ orderb = VEC_index (varpool_node_ptr, pb->varpool_set->nodes, 0)->symbol.order;
return orderb - ordera;
}
for (node = cgraph_nodes; node; node = node->next)
{
- if (node->local.lto_file_data)
+ if (node->symbol.lto_file_data)
{
lto_materialize_function (node);
lto_stats.num_input_cgraph_nodes++;
basic_block bb;
gimple_stmt_iterator gsi;
- decl = node->decl;
+ decl = node->symbol.decl;
if (node->analyzed)
{
func = DECL_STRUCT_FUNCTION (decl);
Check to see if it's of a candidate type and record it. */
for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
{
- tree var_decl = vnode->decl;
+ tree var_decl = vnode->symbol.decl;
if (!var_decl || TREE_CODE (var_decl) != VAR_DECL)
continue;
tree temp_fn;
temp_fn = current_function_decl;
- current_function_decl = node->decl;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ current_function_decl = node->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
bitmap_obstack_initialize (NULL);
gimple_register_cfg_hooks ();
tree temp_fn;
temp_fn = current_function_decl;
- current_function_decl = node->decl;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ current_function_decl = node->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
bitmap_obstack_initialize (NULL);
gimple_register_cfg_hooks ();
record_all_accesses_in_func ();
n = cgraph_nodes;
while (n)
{
- if (DECL_STRUCT_FUNCTION (n->decl))
+ if (DECL_STRUCT_FUNCTION (n->symbol.decl))
{
node = n;
break;
if (!node)
return;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
- current_function_decl = node->decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
+ current_function_decl = node->symbol.decl;
dump_pass_list (all_lowering_passes, 1);
dump_pass_list (all_small_ipa_passes, 1);
{
struct cgraph_node *node;
for (node = cgraph_nodes; node; node = node->next)
- if (node->analyzed && gimple_has_body_p (node->decl)
- && (!node->clone_of || node->decl != node->clone_of->decl))
+ if (node->analyzed && gimple_has_body_p (node->symbol.decl)
+ && (!node->clone_of || node->symbol.decl != node->clone_of->symbol.decl))
{
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
- current_function_decl = node->decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
+ current_function_decl = node->symbol.decl;
callback (data);
if (!flag_wpa)
{
node->process = 0;
if (cgraph_function_with_gimple_body_p (node))
{
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
- current_function_decl = node->decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
+ current_function_decl = node->symbol.decl;
callback (data);
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
ordering then matches the one IPA-passes get in their stmt_fixup
hooks. */
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
renumber_gimple_stmt_uids ();
pop_cfun ();
}
For functions newly born at WPA stage we need to initialize
the uids here. */
if (node->analyzed
- && gimple_has_body_p (node->decl))
+ && gimple_has_body_p (node->symbol.decl))
{
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
renumber_gimple_stmt_uids ();
pop_cfun ();
}
e;
e = e->next_caller)
{
- if (e->caller->decl == current_function_decl)
+ if (e->caller->symbol.decl == current_function_decl)
continue;
if (!cgraph_function_with_gimple_body_p (e->caller))
continue;
- if (TREE_ASM_WRITTEN (e->caller->decl))
+ if (TREE_ASM_WRITTEN (e->caller->symbol.decl))
continue;
if (!e->caller->process && !e->caller->global.inlined_to)
break;
if (traverse_aliases && (*node)->alias)
*node = cgraph_get_node ((*node)->thunk.alias);
- d = (struct tm_ipa_cg_data *) (*node)->aux;
+ d = (struct tm_ipa_cg_data *) (*node)->symbol.aux;
if (d == NULL)
{
d = (struct tm_ipa_cg_data *)
obstack_alloc (&tm_obstack.obstack, sizeof (*d));
- (*node)->aux = (void *) d;
+ (*node)->symbol.aux = (void *) d;
memset (d, 0, sizeof (*d));
}
ipa_tm_scan_calls_clone (struct cgraph_node *node,
cgraph_node_queue *callees_p)
{
- struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
+ struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
basic_block bb;
FOR_EACH_BB_FN (bb, fn)
continue;
/* Even if we think we can go irrevocable, believe the user
above all. */
- if (is_tm_safe_or_pure (e->caller->decl))
+ if (is_tm_safe_or_pure (e->caller->symbol.decl))
continue;
caller = e->caller;
bool ret = false;
/* Builtin operators (operator new, and such). */
- if (DECL_STRUCT_FUNCTION (node->decl) == NULL
- || DECL_STRUCT_FUNCTION (node->decl)->cfg == NULL)
+ if (DECL_STRUCT_FUNCTION (node->symbol.decl) == NULL
+ || DECL_STRUCT_FUNCTION (node->symbol.decl)->cfg == NULL)
return false;
- current_function_decl = node->decl;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ current_function_decl = node->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
calculate_dominance_info (CDI_DOMINATORS);
d = get_cg_data (&node, true);
unsigned flags;
d = get_cg_data (&node, true);
- decl = node->decl;
+ decl = node->symbol.decl;
flags = flags_from_decl_or_type (decl);
/* Handle some TM builtins. Ordinarily these aren't actually generated
struct cgraph_edge *e;
for (e = node->callees; e ; e = e->next_callee)
- if (!is_tm_callable (e->callee->decl)
+ if (!is_tm_callable (e->callee->symbol.decl)
&& e->callee->local.tm_may_enter_irr)
error_at (gimple_location (e->call_stmt),
"unsafe function call %qD within "
- "%<transaction_safe%> function", e->callee->decl);
+ "%<transaction_safe%> function", e->callee->symbol.decl);
}
/* Diagnose call from atomic transactions to unmarked functions
if (!node->same_body_alias)
return false;
- old_decl = node->decl;
+ old_decl = node->symbol.decl;
tm_name = tm_mangle (DECL_ASSEMBLER_NAME (old_decl));
new_decl = build_decl (DECL_SOURCE_LOCATION (old_decl),
TREE_CODE (old_decl), tm_name,
new_node = cgraph_same_body_alias (NULL, new_decl, info->new_decl);
new_node->tm_clone = true;
- new_node->local.externally_visible = info->old_node->local.externally_visible;
+ new_node->symbol.externally_visible = info->old_node->symbol.externally_visible;
/* ?? Do not traverse aliases here. */
get_cg_data (&node, false)->clone = new_node;
tree new_decl, old_decl, tm_name;
struct cgraph_node *new_node;
- old_decl = old_node->decl;
+ old_decl = old_node->symbol.decl;
new_decl = copy_node (old_decl);
/* DECL_ASSEMBLER_NAME needs to be set before we call
DECL_COMDAT_GROUP (new_decl) = tm_mangle (DECL_COMDAT_GROUP (old_decl));
new_node = cgraph_copy_node_for_versioning (old_node, new_decl, NULL, NULL);
- new_node->local.externally_visible = old_node->local.externally_visible;
+ new_node->symbol.externally_visible = old_node->symbol.externally_visible;
new_node->lowered = true;
new_node->tm_clone = 1;
get_cg_data (&old_node, true)->clone = new_node;
cgraph_get_create_node
(builtin_decl_explicit (BUILT_IN_TM_IRREVOCABLE)),
g, 0,
- compute_call_stmt_bb_frequency (node->decl,
+ compute_call_stmt_bb_frequency (node->symbol.decl,
gimple_bb (g)));
}
gsi_insert_before (gsi, g, GSI_SAME_STMT);
cgraph_create_edge (node, cgraph_get_create_node (gettm_fn), g, 0,
- compute_call_stmt_bb_frequency (node->decl,
+ compute_call_stmt_bb_frequency (node->symbol.decl,
gimple_bb(g)));
/* Cast return value from tm_gettmclone* into appropriate function
return;
}
- fndecl = new_node->decl;
+ fndecl = new_node->symbol.decl;
}
cgraph_redirect_edge_callee (e, new_node);
d = get_cg_data (&node, true);
- current_function_decl = node->decl;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ current_function_decl = node->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
calculate_dominance_info (CDI_DOMINATORS);
for (region = d->all_tm_regions; region; region = region->next)
if (!node->callees && !d->irrevocable_blocks_clone)
return;
- current_function_decl = d->clone->decl;
+ current_function_decl = d->clone->symbol.decl;
push_cfun (DECL_STRUCT_FUNCTION (current_function_decl));
calculate_dominance_info (CDI_DOMINATORS);
/* For all local functions marked tm_callable, queue them. */
for (node = cgraph_nodes; node; node = node->next)
- if (is_tm_callable (node->decl)
+ if (is_tm_callable (node->symbol.decl)
&& cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
{
d = get_cg_data (&node, true);
/* ... marked tm_pure, record that fact for the runtime by
indicating that the pure function is its own tm_callable.
No need to do this if the function's address can't be taken. */
- if (is_tm_pure (node->decl))
+ if (is_tm_pure (node->symbol.decl))
{
if (!node->local.local)
- record_tm_clone_pair (node->decl, node->decl);
+ record_tm_clone_pair (node->symbol.decl, node->symbol.decl);
continue;
}
- current_function_decl = node->decl;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ current_function_decl = node->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
calculate_dominance_info (CDI_DOMINATORS);
tm_region_init (NULL);
/* Some callees cannot be arbitrarily cloned. These will always be
irrevocable. Mark these now, so that we need not scan them. */
- if (is_tm_irrevocable (node->decl))
+ if (is_tm_irrevocable (node->symbol.decl))
ipa_tm_note_irrevocable (node, &irr_worklist);
else if (a <= AVAIL_NOT_AVAILABLE
- && !is_tm_safe_or_pure (node->decl))
+ && !is_tm_safe_or_pure (node->symbol.decl))
ipa_tm_note_irrevocable (node, &irr_worklist);
else if (a >= AVAIL_OVERWRITABLE)
{
- if (!tree_versionable_function_p (node->decl))
+ if (!tree_versionable_function_p (node->symbol.decl))
ipa_tm_note_irrevocable (node, &irr_worklist);
else if (!d->is_irrevocable)
{
for (e = node->callers; e ; e = e->next_caller)
{
caller = e->caller;
- if (!is_tm_safe_or_pure (caller->decl)
+ if (!is_tm_safe_or_pure (caller->symbol.decl)
&& !caller->local.tm_may_enter_irr)
{
d = get_cg_data (&caller, true);
}
/* Propagate back to referring aliases as well. */
- for (j = 0; ipa_ref_list_refering_iterate (&node->ref_list, j, ref); j++)
+ for (j = 0; ipa_ref_list_refering_iterate (&node->symbol.ref_list, j, ref); j++)
{
caller = ref->refering.cgraph_node;
if (ref->use == IPA_REF_ALIAS
&& cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
{
d = get_cg_data (&node, true);
- if (is_tm_safe (node->decl))
+ if (is_tm_safe (node->symbol.decl))
ipa_tm_diagnose_tm_safe (node);
else if (d->all_tm_regions)
ipa_tm_diagnose_transaction (node, d->all_tm_regions);
d = get_cg_data (&node, true);
if (a <= AVAIL_NOT_AVAILABLE)
- doit = is_tm_callable (node->decl);
- else if (a <= AVAIL_AVAILABLE && is_tm_callable (node->decl))
+ doit = is_tm_callable (node->symbol.decl);
+ else if (a <= AVAIL_AVAILABLE && is_tm_callable (node->symbol.decl))
doit = true;
else if (!d->is_irrevocable
&& d->tm_callers_normal + d->tm_callers_clone > 0)
bitmap_obstack_release (&tm_obstack);
for (node = cgraph_nodes; node; node = node->next)
- node->aux = NULL;
+ node->symbol.aux = NULL;
#ifdef ENABLE_CHECKING
verify_cgraph ();
if (!DECL_EXTERNAL (expr))
return false;
node = cgraph_function_node (cgraph_get_node (expr), NULL);
- if (node && node->in_other_partition)
+ if (node && node->symbol.in_other_partition)
return false;
return true;
}
if (!DECL_EXTERNAL (expr))
return false;
node = varpool_variable_node (varpool_get_node (expr), NULL);
- if (node && node->in_other_partition)
+ if (node && node->symbol.in_other_partition)
return false;
return true;
}
else
varpool_create_variable_alias (to,
varpool_node_for_asm
- (DECL_ASSEMBLER_NAME (alias_of))->decl);
+ (DECL_ASSEMBLER_NAME (alias_of))->symbol.decl);
return to;
}
i = emutls_index (decl);
var = VEC_index (varpool_node_ptr, control_vars, i);
- return var->decl;
+ return var->symbol.decl;
}
/* Generate a call statement to initialize CONTROL_DECL for TLS_DECL.
gimple x;
cvar = VEC_index (varpool_node_ptr, control_vars, index);
- cdecl = cvar->decl;
+ cdecl = cvar->symbol.decl;
TREE_ADDRESSABLE (cdecl) = 1;
addr = create_tmp_var (build_pointer_type (TREE_TYPE (decl)), NULL);
struct lower_emutls_data d;
bool any_edge_inserts = false;
- current_function_decl = node->decl;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ current_function_decl = node->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
d.cfun_node = node;
d.builtin_decl = builtin_decl_explicit (BUILT_IN_EMUTLS_GET_ADDRESS);
tree cdecl;
struct varpool_node *cvar;
- cdecl = new_emutls_decl (var->decl, var->alias_of);
+ cdecl = new_emutls_decl (var->symbol.decl, var->alias_of);
cvar = varpool_get_node (cdecl);
VEC_quick_push (varpool_node_ptr, control_vars, cvar);
/* Make sure the COMMON block control variable gets initialized.
Note that there's no point in doing this for aliases; we only
need to do this once for the main variable. */
- emutls_common_1 (var->decl, cdecl, (tree *)data);
+ emutls_common_1 (var->symbol.decl, cdecl, (tree *)data);
}
if (var->alias && !var->alias_of)
cvar->alias = true;
preventing the variable from re-appearing in the GIMPLE. We cheat
and use the control variable here (rather than a full call_expr),
which is special-cased inside the DWARF2 output routines. */
- SET_DECL_VALUE_EXPR (var->decl, cdecl);
- DECL_HAS_VALUE_EXPR_P (var->decl) = 1;
+ SET_DECL_VALUE_EXPR (var->symbol.decl, cdecl);
+ DECL_HAS_VALUE_EXPR_P (var->symbol.decl) = 1;
return false;
}
/* Examine all global variables for TLS variables. */
for (var = varpool_nodes; var ; var = var->next)
- if (DECL_THREAD_LOCAL_P (var->decl))
+ if (DECL_THREAD_LOCAL_P (var->symbol.decl))
{
- gcc_checking_assert (TREE_STATIC (var->decl)
- || DECL_EXTERNAL (var->decl));
+ gcc_checking_assert (TREE_STATIC (var->symbol.decl)
+ || DECL_EXTERNAL (var->symbol.decl));
varpool_node_set_add (tls_vars, var);
if (var->alias && var->analyzed)
varpool_node_set_add (tls_vars, varpool_variable_node (var, NULL));
doing so would introduce roundoff errors and make
verifier unhappy. */
edge->frequency
- = compute_call_stmt_bb_frequency (id->dst_node->decl,
+ = compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
copy_basic_block);
if (dump_file
&& profile_status_for_function (cfun) != PROFILE_ABSENT
other cases we hit a bug (incorrect node sharing is the
most common reason for missing edges). */
gcc_assert (dest->needed || !dest->analyzed
- || dest->address_taken
+ || dest->symbol.address_taken
|| !id->src_node->analyzed
|| !id->dst_node->analyzed);
if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
cgraph_create_edge_including_clones
(id->dst_node, dest, orig_stmt, stmt, bb->count,
- compute_call_stmt_bb_frequency (id->dst_node->decl,
+ compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
copy_basic_block),
CIF_ORIGINALLY_INDIRECT_CALL);
else
cgraph_create_edge (id->dst_node, dest, stmt,
bb->count,
compute_call_stmt_bb_frequency
- (id->dst_node->decl, copy_basic_block))->inline_failed
+ (id->dst_node->symbol.decl,
+ copy_basic_block))->inline_failed
= CIF_ORIGINALLY_INDIRECT_CALL;
if (dump_file)
{
If we cannot, then there is no hope of inlining the function. */
if (cg_edge->indirect_unknown_callee)
goto egress;
- fn = cg_edge->callee->decl;
+ fn = cg_edge->callee->symbol.decl;
gcc_checking_assert (fn);
/* If FN is a declaration of a function in a nested scope that was
}
goto egress;
}
- fn = cg_edge->callee->decl;
+ fn = cg_edge->callee->symbol.decl;
#ifdef ENABLE_CHECKING
- if (cg_edge->callee->decl != id->dst_node->decl)
+ if (cg_edge->callee->symbol.decl != id->dst_node->symbol.decl)
verify_cgraph_node (cg_edge->callee);
#endif
id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
/* Update the callers EH personality. */
- if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
- DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
- = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
+ if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl))
+ DECL_FUNCTION_PERSONALITY (cg_edge->caller->symbol.decl)
+ = DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl);
/* Split the block holding the GIMPLE_CALL. */
e = split_block (bb, stmt);
inlined. If we don't do this now, we can lose the information about the
variables in the function when the blocks get blown away as soon as we
remove the cgraph node. */
- (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
+ (*debug_hooks->outlining_inline_function) (cg_edge->callee->symbol.decl);
/* Update callgraph if needed. */
cgraph_remove_node (cg_edge->callee);
for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
{
- for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
+ for (arg = DECL_ARGUMENTS (cgn->symbol.decl); arg; arg = DECL_CHAIN (arg))
if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
return true;
- if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
+ if (check_for_nested_with_variably_modified (cgn->symbol.decl,
+ orig_fndecl))
return true;
}
info->var_map = pointer_map_create ();
info->mem_refs = pointer_set_create ();
info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
- info->context = cgn->decl;
+ info->context = cgn->symbol.decl;
for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
{
gimplify_all_functions (struct cgraph_node *root)
{
struct cgraph_node *iter;
- if (!gimple_body (root->decl))
- gimplify_function_tree (root->decl);
+ if (!gimple_body (root->symbol.decl))
+ gimplify_function_tree (root->symbol.decl);
for (iter = root->nested; iter; iter = iter->next_nested)
gimplify_all_functions (iter);
}
for (node = cgraph_nodes; node; node = node->next)
{
if (!node->analyzed
- || !gimple_has_body_p (node->decl))
+ || !gimple_has_body_p (node->symbol.decl))
continue;
/* Don't profile functions produced for builtin stuff. */
- if (DECL_SOURCE_LOCATION (node->decl) == BUILTINS_LOCATION
- || DECL_STRUCT_FUNCTION (node->decl)->after_tree_profile)
+ if (DECL_SOURCE_LOCATION (node->symbol.decl) == BUILTINS_LOCATION
+ || DECL_STRUCT_FUNCTION (node->symbol.decl)->after_tree_profile)
continue;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
- current_function_decl = node->decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
+ current_function_decl = node->symbol.decl;
/* Re-set global shared temporary variable for edge-counters. */
gcov_type_tmp_var = NULL_TREE;
for (node = cgraph_nodes; node; node = node->next)
{
if (!node->analyzed
- || !gimple_has_body_p (node->decl)
- || !(!node->clone_of || node->decl != node->clone_of->decl))
+ || !gimple_has_body_p (node->symbol.decl)
+ || !(!node->clone_of
+ || node->symbol.decl != node->clone_of->symbol.decl))
continue;
/* Don't profile functions produced for builtin stuff. */
- if (DECL_SOURCE_LOCATION (node->decl) == BUILTINS_LOCATION
- || DECL_STRUCT_FUNCTION (node->decl)->after_tree_profile)
+ if (DECL_SOURCE_LOCATION (node->symbol.decl) == BUILTINS_LOCATION
+ || DECL_STRUCT_FUNCTION (node->symbol.decl)->after_tree_profile)
continue;
cgraph_set_const_flag (node, false, false);
basic_block bb;
if (!node->analyzed
- || !gimple_has_body_p (node->decl)
- || !(!node->clone_of || node->decl != node->clone_of->decl))
+ || !gimple_has_body_p (node->symbol.decl)
+ || !(!node->clone_of
+ || node->symbol.decl != node->clone_of->symbol.decl))
continue;
/* Don't profile functions produced for builtin stuff. */
- if (DECL_SOURCE_LOCATION (node->decl) == BUILTINS_LOCATION
- || DECL_STRUCT_FUNCTION (node->decl)->after_tree_profile)
+ if (DECL_SOURCE_LOCATION (node->symbol.decl) == BUILTINS_LOCATION
+ || DECL_STRUCT_FUNCTION (node->symbol.decl)->after_tree_profile)
continue;
- push_cfun (DECL_STRUCT_FUNCTION (node->decl));
- current_function_decl = node->decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
+ current_function_decl = node->symbol.decl;
FOR_EACH_BB (bb)
{
for (cs = node->callers; cs; cs = cs->next_caller)
{
- current_function_decl = cs->caller->decl;
- push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
+ current_function_decl = cs->caller->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl));
if (dump_file)
fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
for (cs = node->callers; cs; cs = cs->next_caller)
if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
- && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
+ && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl)))
compute_inline_parameters (cs->caller, true);
BITMAP_FREE (recomputed_callers);
{
if (dump_file)
fprintf (dump_file, "Adjusting recursive call");
- gimple_call_set_fndecl (stmt, node->decl);
+ gimple_call_set_fndecl (stmt, node->symbol.decl);
ipa_modify_call_arguments (NULL, stmt, adjustments);
}
}
new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
false, NULL, NULL, "isra");
- current_function_decl = new_node->decl;
- push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
+ current_function_decl = new_node->symbol.decl;
+ push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
cfg_changed = ipa_sra_modify_function_body (adjustments);
sra_ipa_reset_debug_stmts (adjustments);
- convert_callers (new_node, node->decl, adjustments);
+ convert_callers (new_node, node->symbol.decl, adjustments);
cgraph_make_node_local (new_node);
return cfg_changed;
}
return false;
}
- if (!tree_versionable_function_p (node->decl))
+ if (!tree_versionable_function_p (node->symbol.decl))
{
if (dump_file)
fprintf (dump_file, "Function is not versionable.\n");
return false;
}
- if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
+ if ((DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
&& inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
{
if (dump_file)
return false;
}
- if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
+ if (TYPE_ATTRIBUTES (TREE_TYPE (node->symbol.decl)))
return false;
return true;
if (node && node->alias)
{
node = varpool_variable_node (node, NULL);
- t = node->decl;
+ t = node->symbol.decl;
}
}
associate_varinfo_to_alias (struct cgraph_node *node, void *data)
{
if (node->alias || node->thunk.thunk_p)
- insert_vi_for_tree (node->decl, (varinfo_t)data);
+ insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
return false;
}
gcc_assert (!node->clone_of);
- vi = create_function_info_for (node->decl,
- alias_get_name (node->decl));
+ vi = create_function_info_for (node->symbol.decl,
+ alias_get_name (node->symbol.decl));
cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
}
if (var->alias)
continue;
- get_vi_for_tree (var->decl);
+ get_vi_for_tree (var->symbol.decl);
}
if (dump_file)
{
fprintf (dump_file,
"Generating constraints for %s", cgraph_node_name (node));
- if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
+ if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
fprintf (dump_file, " (%s)",
- IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
+ IDENTIFIER_POINTER
+ (DECL_ASSEMBLER_NAME (node->symbol.decl)));
fprintf (dump_file, "\n");
}
- func = DECL_STRUCT_FUNCTION (node->decl);
+ func = DECL_STRUCT_FUNCTION (node->symbol.decl);
old_func_decl = current_function_decl;
push_cfun (func);
- current_function_decl = node->decl;
+ current_function_decl = node->symbol.decl;
/* For externally visible or attribute used annotated functions use
local constraints for their arguments.
For local functions we see all callers and thus do not need initial
constraints for parameters. */
- if (node->reachable_from_other_partition
- || node->local.externally_visible
+ if (node->symbol.used_from_other_partition
+ || node->symbol.externally_visible
|| node->needed)
{
intra_create_variable_infos ();
/* We also need to make function return values escape. Nothing
escapes by returning from main though. */
- if (!MAIN_NAME_P (DECL_NAME (node->decl)))
+ if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
{
varinfo_t fi, rvi;
- fi = lookup_vi_for_tree (node->decl);
+ fi = lookup_vi_for_tree (node->symbol.decl);
rvi = first_vi_for_offset (fi, fi_result);
if (rvi && rvi->offset == fi_result)
{
if (!cgraph_function_with_gimple_body_p (node))
continue;
- fn = DECL_STRUCT_FUNCTION (node->decl);
+ fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
/* Compute the points-to sets for pointer SSA_NAMEs. */
FOR_EACH_VEC_ELT (tree, fn->gimple_df->ssa_names, i, ptr)
}
/* Compute the call-use and call-clobber sets for all direct calls. */
- fi = lookup_vi_for_tree (node->decl);
+ fi = lookup_vi_for_tree (node->symbol.decl);
gcc_assert (fi->is_fn_info);
find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
&clobbers);
vnode;
vnode = vnode->next_needed)
{
- tree vectype, decl = vnode->decl;
+ tree vectype, decl = vnode->symbol.decl;
tree t;
unsigned int alignment;
unsigned ix;
tree t;
- find_decls_types (n->decl, fld);
+ find_decls_types (n->symbol.decl, fld);
- if (!gimple_has_body_p (n->decl))
+ if (!gimple_has_body_p (n->symbol.decl))
return;
gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
- fn = DECL_STRUCT_FUNCTION (n->decl);
+ fn = DECL_STRUCT_FUNCTION (n->symbol.decl);
/* Traverse locals. */
FOR_EACH_LOCAL_DECL (fn, ix, t)
static void
find_decls_types_in_var (struct varpool_node *v, struct free_lang_data_d *fld)
{
- find_decls_types (v->decl, fld);
+ find_decls_types (v->symbol.decl, fld);
}
/* If T needs an assembler name, have one created for it. */
for (n = cgraph_nodes; n; n = n->next)
{
- if (DECL_STRUCT_FUNCTION (n->decl))
+ if (DECL_STRUCT_FUNCTION (n->symbol.decl))
VEC_replace (cgraph_node_ptr, cgraph_node_map,
- DECL_STRUCT_FUNCTION (n->decl)->funcdef_no, n);
+ DECL_STRUCT_FUNCTION (n->symbol.decl)->funcdef_no, n);
}
}
check_ic_target (gimple call_stmt, struct cgraph_node *target)
{
location_t locus;
- if (gimple_check_call_matching_types (call_stmt, target->decl))
+ if (gimple_check_call_matching_types (call_stmt, target->symbol.decl))
return true;
locus = gimple_location (call_stmt);
SSA_NAME_DEF_STMT (tmp0) = load_stmt;
gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
- tmp = fold_convert (optype, build_addr (direct_call->decl,
+ tmp = fold_convert (optype, build_addr (direct_call->symbol.decl,
current_function_decl));
load_stmt = gimple_build_assign (tmp1, tmp);
SSA_NAME_DEF_STMT (tmp1) = load_stmt;
gimple_set_vuse (icall_stmt, NULL_TREE);
update_stmt (icall_stmt);
dcall_stmt = gimple_copy (icall_stmt);
- gimple_call_set_fndecl (dcall_stmt, direct_call->decl);
- dflags = flags_from_decl_or_type (direct_call->decl);
+ gimple_call_set_fndecl (dcall_stmt, direct_call->symbol.decl);
+ dflags = flags_from_decl_or_type (direct_call->symbol.decl);
if ((dflags & ECF_NORETURN) != 0)
gimple_call_set_lhs (dcall_stmt, NULL_TREE);
gsi_insert_before (&gsi, dcall_stmt, GSI_SAME_STMT);
fprintf (dump_file, "Indirect call -> direct call ");
print_generic_expr (dump_file, gimple_call_fn (stmt), TDF_SLIM);
fprintf (dump_file, "=> ");
- print_generic_expr (dump_file, direct_call->decl, TDF_SLIM);
+ print_generic_expr (dump_file, direct_call->symbol.decl, TDF_SLIM);
fprintf (dump_file, " transformation on insn ");
print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
fprintf (dump_file, " to ");
if (fnode)
{
cgraph_mark_needed_node (fnode);
- return fnode->decl;
+ return fnode->symbol.decl;
}
else if (vnode)
{
varpool_mark_needed_node (vnode);
vnode->force_output = 1;
- return vnode->decl;
+ return vnode->symbol.decl;
}
else
return NULL_TREE;
&& (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
{
struct varpool_node *vnode = varpool_get_node (exp);
- if (vnode && resolution_local_p (vnode->resolution))
+ if (vnode && resolution_local_p (vnode->symbol.resolution))
resolved_locally = true;
if (vnode
- && resolution_to_local_definition_p (vnode->resolution))
+ && resolution_to_local_definition_p (vnode->symbol.resolution))
resolved_to_local_def = true;
}
else if (TREE_CODE (exp) == FUNCTION_DECL && TREE_PUBLIC (exp))
{
struct cgraph_node *node = cgraph_get_node (exp);
if (node
- && resolution_local_p (node->resolution))
+ && resolution_local_p (node->symbol.resolution))
resolved_locally = true;
if (node
- && resolution_to_local_definition_p (node->resolution))
+ && resolution_to_local_definition_p (node->symbol.resolution))
resolved_to_local_def = true;
}
{
struct varpool_node *vnode = varpool_get_node (decl);
if (vnode
- && vnode->resolution != LDPR_UNKNOWN)
- return resolution_to_local_definition_p (vnode->resolution);
+ && vnode->symbol.resolution != LDPR_UNKNOWN)
+ return resolution_to_local_definition_p (vnode->symbol.resolution);
}
else if (TREE_CODE (decl) == FUNCTION_DECL)
{
struct cgraph_node *node = cgraph_get_node (decl);
if (node
- && node->resolution != LDPR_UNKNOWN)
- return resolution_to_local_definition_p (node->resolution);
+ && node->symbol.resolution != LDPR_UNKNOWN)
+ return resolution_to_local_definition_p (node->symbol.resolution);
}
/* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks
binds locally but still can be overwritten).
explicitly marked by frontend via VARPOOL_FINALIZE_DECL function. */
/* Hash table used to convert declarations into nodes. */
-static GTY((param_is (struct varpool_node))) htab_t varpool_hash;
+static GTY((param_is (union symtab_node_def))) htab_t varpool_hash;
/* The linked list of cgraph varpool nodes.
Linked via node->next pointer. */
-struct varpool_node *varpool_nodes;
+symtab_node x_varpool_nodes;
/* Queue of cgraph nodes scheduled to be lowered and output.
The queue is maintained via mark_needed_node, linked via node->next_needed
FIRST_UNANALYZED_NODE points to first node in queue that was not analyzed
yet and is moved via VARPOOL_ANALYZE_PENDING_DECLS. */
-struct varpool_node *varpool_nodes_queue;
-static GTY(()) struct varpool_node *varpool_last_needed_node;
-static GTY(()) struct varpool_node *varpool_first_unanalyzed_node;
+symtab_node x_varpool_nodes_queue;
+static GTY(()) symtab_node x_varpool_last_needed_node;
+#define varpool_last_needed_node ((struct varpool_node *)x_varpool_last_needed_node)
+static GTY(()) symtab_node x_varpool_first_unanalyzed_node;
+#define varpool_first_unanalyzed_node ((struct varpool_node *)x_varpool_first_unanalyzed_node)
/* Lists all assembled variables to be sent to debugger output later on. */
static GTY(()) struct varpool_node *varpool_assembled_nodes_queue;
const char *
varpool_node_name (struct varpool_node *node)
{
- return lang_hooks.decl_printable_name (node->decl, 2);
+ return lang_hooks.decl_printable_name (node->symbol.decl, 2);
}
/* Returns a hash code for P. */
hash_varpool_node (const void *p)
{
const struct varpool_node *n = (const struct varpool_node *) p;
- return (hashval_t) DECL_UID (n->decl);
+ return (hashval_t) DECL_UID (n->symbol.decl);
}
/* Returns nonzero if P1 and P2 are equal. */
(const struct varpool_node *) p1;
const struct varpool_node *n2 =
(const struct varpool_node *) p2;
- return DECL_UID (n1->decl) == DECL_UID (n2->decl);
+ return DECL_UID (n1->symbol.decl) == DECL_UID (n2->symbol.decl);
}
/* Return varpool node assigned to DECL without creating new one. */
if (!varpool_hash)
return NULL;
- key.decl = CONST_CAST2 (tree, const_tree, decl);
+ key.symbol.decl = CONST_CAST2 (tree, const_tree, decl);
slot = (struct varpool_node **)
htab_find_slot (varpool_hash, &key, NO_INSERT);
if (!slot)
if (!varpool_hash)
varpool_hash = htab_create_ggc (10, hash_varpool_node,
eq_varpool_node, NULL);
- key.decl = decl;
+ key.symbol.decl = decl;
slot = (struct varpool_node **)
htab_find_slot (varpool_hash, &key, INSERT);
if (*slot)
return *slot;
node = ggc_alloc_cleared_varpool_node ();
node->symbol.type = SYMTAB_VARIABLE;
- node->decl = decl;
- node->order = cgraph_order++;
+ node->symbol.decl = decl;
+ node->symbol.order = cgraph_order++;
node->next = varpool_nodes;
- ipa_empty_ref_list (&node->ref_list);
+ ipa_empty_ref_list (&node->symbol.ref_list);
if (varpool_nodes)
- varpool_nodes->prev = node;
- varpool_nodes = node;
+ varpool (x_varpool_nodes)->prev = node;
+ x_varpool_nodes = (symtab_node)node;
*slot = node;
return node;
}
else
{
gcc_assert (varpool_nodes == node);
- varpool_nodes = node->next;
+ x_varpool_nodes = (symtab_node)node->next;
}
if (varpool_first_unanalyzed_node == node)
- varpool_first_unanalyzed_node = node->next_needed;
+ x_varpool_first_unanalyzed_node = (symtab_node)node->next_needed;
if (node->next_needed)
node->next_needed->prev_needed = node->prev_needed;
else if (node->prev_needed)
{
gcc_assert (varpool_last_needed_node);
- varpool_last_needed_node = node->prev_needed;
+ x_varpool_last_needed_node = (symtab_node)node->prev_needed;
}
if (node->prev_needed)
node->prev_needed->next_needed = node->next_needed;
else if (node->next_needed)
{
gcc_assert (varpool_nodes_queue == node);
- varpool_nodes_queue = node->next_needed;
+ x_varpool_nodes_queue = (symtab_node)node->next_needed;
}
- if (node->same_comdat_group)
+ if (node->symbol.same_comdat_group)
{
- struct varpool_node *prev;
- for (prev = node->same_comdat_group;
- prev->same_comdat_group != node;
- prev = prev->same_comdat_group)
+ symtab_node prev;
+ for (prev = node->symbol.same_comdat_group;
+ prev->symbol.same_comdat_group != (symtab_node)node;
+ prev = prev->symbol.same_comdat_group)
;
- if (node->same_comdat_group == prev)
- prev->same_comdat_group = NULL;
+ if (node->symbol.same_comdat_group == prev)
+ prev->symbol.same_comdat_group = NULL;
else
- prev->same_comdat_group = node->same_comdat_group;
- node->same_comdat_group = NULL;
+ prev->symbol.same_comdat_group = (symtab_node)node->symbol.same_comdat_group;
+ node->symbol.same_comdat_group = NULL;
}
- ipa_remove_all_references (&node->ref_list);
- ipa_remove_all_refering (&node->ref_list);
+ ipa_remove_all_references (&node->symbol.ref_list);
+ ipa_remove_all_refering (&node->symbol.ref_list);
ggc_free (node);
}
cgraph_function_flags_ready
? cgraph_availability_names[cgraph_variable_initializer_availability (node)]
: "not-ready");
- if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
- fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
- if (DECL_INITIAL (node->decl))
+ if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
+ fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->symbol.decl)));
+ if (DECL_INITIAL (node->symbol.decl))
fprintf (f, " initialized");
- if (TREE_ASM_WRITTEN (node->decl))
+ if (TREE_ASM_WRITTEN (node->symbol.decl))
fprintf (f, " (asm written)");
if (node->needed)
fprintf (f, " needed");
fprintf (f, " finalized");
if (node->output)
fprintf (f, " output");
- if (node->externally_visible)
+ if (node->symbol.externally_visible)
fprintf (f, " externally_visible");
- if (node->resolution != LDPR_UNKNOWN)
+ if (node->symbol.resolution != LDPR_UNKNOWN)
fprintf (f, " %s",
- ld_plugin_symbol_resolution_names[(int)node->resolution]);
- if (node->in_other_partition)
+ ld_plugin_symbol_resolution_names[(int)node->symbol.resolution]);
+ if (node->symbol.in_other_partition)
fprintf (f, " in_other_partition");
- else if (node->used_from_other_partition)
+ else if (node->symbol.used_from_other_partition)
fprintf (f, " used_from_other_partition");
fprintf (f, "\n");
fprintf (f, " References: ");
- ipa_dump_references (f, &node->ref_list);
+ ipa_dump_references (f, &node->symbol.ref_list);
fprintf (f, " Refering this var: ");
- ipa_dump_refering (f, &node->ref_list);
+ ipa_dump_refering (f, &node->symbol.ref_list);
}
/* Dump the variable pool. */
struct varpool_node *node;
for (node = varpool_nodes; node ; node = node->next)
- if (decl_assembler_name_equal (node->decl, asmname))
+ if (decl_assembler_name_equal (node->symbol.decl, asmname))
return node;
return NULL;
varpool_last_needed_node->next_needed = node;
node->prev_needed = varpool_last_needed_node;
}
- varpool_last_needed_node = node;
+ x_varpool_last_needed_node = (symtab_node)node;
node->next_needed = NULL;
if (!varpool_nodes_queue)
- varpool_nodes_queue = node;
+ x_varpool_nodes_queue = (symtab_node)node;
if (!varpool_first_unanalyzed_node)
- varpool_first_unanalyzed_node = node;
- notice_global_symbol (node->decl);
+ x_varpool_first_unanalyzed_node = (symtab_node)node;
+ notice_global_symbol (node->symbol.decl);
}
/* Notify finalize_compilation_unit that given node is reachable
varpool_mark_needed_node (struct varpool_node *node)
{
if (!node->needed && node->finalized
- && !TREE_ASM_WRITTEN (node->decl))
+ && !TREE_ASM_WRITTEN (node->symbol.decl))
varpool_enqueue_needed_node (node);
node->needed = 1;
}
void
varpool_reset_queue (void)
{
- varpool_last_needed_node = NULL;
- varpool_nodes_queue = NULL;
- varpool_first_unanalyzed_node = NULL;
+ x_varpool_last_needed_node = NULL;
+ x_varpool_nodes_queue = NULL;
+ x_varpool_first_unanalyzed_node = NULL;
}
/* Determine if variable DECL is needed. That is, visible to something
if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
/* Traditionally we do not eliminate static variables when not
optimizing and when not doing toplevel reoder. */
- || (!flag_toplevel_reorder && !DECL_COMDAT (node->decl)
- && !DECL_ARTIFICIAL (node->decl)))
+ || (!flag_toplevel_reorder && !DECL_COMDAT (node->symbol.decl)
+ && !DECL_ARTIFICIAL (node->symbol.decl)))
node->force_output = true;
if (decide_is_variable_needed (node, decl))
varpool_finalize_decl (decl);
node = varpool_node (decl);
if (varpool_externally_visible_p (node, false))
- node->externally_visible = true;
+ node->symbol.externally_visible = true;
}
/* Return variable availability. See cgraph.h for description of individual
gcc_assert (cgraph_function_flags_ready);
if (!node->finalized)
return AVAIL_NOT_AVAILABLE;
- if (!TREE_PUBLIC (node->decl))
+ if (!TREE_PUBLIC (node->symbol.decl))
return AVAIL_AVAILABLE;
/* If the variable can be overwritten, return OVERWRITABLE. Takes
care of at least two notable extensions - the COMDAT variables
used to share template instantiations in C++. */
- if (!decl_replaceable_p (node->decl))
+ if (!decl_replaceable_p (node->symbol.decl))
return AVAIL_OVERWRITABLE;
return AVAIL_AVAILABLE;
}
while (varpool_first_unanalyzed_node)
{
struct varpool_node *node = varpool_first_unanalyzed_node, *next;
- tree decl = node->decl;
+ tree decl = node->symbol.decl;
bool analyzed = node->analyzed;
varpool_first_unanalyzed_node->analyzed = true;
- varpool_first_unanalyzed_node = varpool_first_unanalyzed_node->next_needed;
+ x_varpool_first_unanalyzed_node = (symtab_node)varpool_first_unanalyzed_node->next_needed;
/* When reading back varpool at LTO time, we re-construct the queue in order
to have "needed" list right by inserting all needed nodes into varpool.
n = n->analyzed ? varpool_alias_aliased_node (n) : NULL)
if (n == node)
{
- error ("variable %q+D part of alias cycle", node->decl);
+ error ("variable %q+D part of alias cycle", node->symbol.decl);
node->alias = false;
continue;
}
- if (!VEC_length (ipa_ref_t, node->ref_list.references))
+ if (!VEC_length (ipa_ref_t, node->symbol.ref_list.references))
ipa_record_reference (NULL, node, NULL, tgt, IPA_REF_ALIAS, NULL);
/* C++ FE sometimes change linkage flags after producing same body aliases. */
if (node->extra_name_alias)
{
- DECL_WEAK (node->decl) = DECL_WEAK (node->alias_of);
- TREE_PUBLIC (node->decl) = TREE_PUBLIC (node->alias_of);
- DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->alias_of);
- DECL_VISIBILITY (node->decl) = DECL_VISIBILITY (node->alias_of);
- if (TREE_PUBLIC (node->decl))
+ DECL_WEAK (node->symbol.decl) = DECL_WEAK (node->alias_of);
+ TREE_PUBLIC (node->symbol.decl) = TREE_PUBLIC (node->alias_of);
+ DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (node->alias_of);
+ DECL_VISIBILITY (node->symbol.decl) = DECL_VISIBILITY (node->alias_of);
+ if (TREE_PUBLIC (node->symbol.decl))
{
- DECL_COMDAT (node->decl) = DECL_COMDAT (node->alias_of);
- DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->alias_of);
- if (DECL_ONE_ONLY (node->alias_of) && !node->same_comdat_group)
+ DECL_COMDAT (node->symbol.decl) = DECL_COMDAT (node->alias_of);
+ DECL_COMDAT_GROUP (node->symbol.decl) = DECL_COMDAT_GROUP (node->alias_of);
+ if (DECL_ONE_ONLY (node->alias_of)
+ && !node->symbol.same_comdat_group)
{
- node->same_comdat_group = tgt;
- if (!tgt->same_comdat_group)
- tgt->same_comdat_group = node;
+ node->symbol.same_comdat_group = (symtab_node)tgt;
+ if (!tgt->symbol.same_comdat_group)
+ tgt->symbol.same_comdat_group = (symtab_node)node;
else
{
- struct varpool_node *n;
- for (n = tgt->same_comdat_group;
- n->same_comdat_group != tgt;
- n = n->same_comdat_group)
+ symtab_node n;
+ for (n = tgt->symbol.same_comdat_group;
+ n->symbol.same_comdat_group != (symtab_node)tgt;
+ n = n->symbol.same_comdat_group)
;
- n->same_comdat_group = node;
+ n->symbol.same_comdat_group = (symtab_node)node;
}
}
}
}
else if (DECL_INITIAL (decl))
record_references_in_initializer (decl, analyzed);
- if (node->same_comdat_group)
+ if (node->symbol.same_comdat_group)
{
- for (next = node->same_comdat_group;
+ for (next = varpool (node->symbol.same_comdat_group);
next != node;
- next = next->same_comdat_group)
+ next = varpool (next->symbol.same_comdat_group))
varpool_mark_needed_node (next);
}
changed = true;
{
int i;
struct ipa_ref *ref;
- for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_refering_iterate (&node->symbol.ref_list, i, ref); i++)
if (ref->use == IPA_REF_ALIAS)
{
struct varpool_node *alias = ipa_ref_refering_varpool_node (ref);
- assemble_alias (alias->decl,
+ assemble_alias (alias->symbol.decl,
DECL_ASSEMBLER_NAME (alias->alias_of));
assemble_aliases (alias);
}
bool
varpool_assemble_decl (struct varpool_node *node)
{
- tree decl = node->decl;
+ tree decl = node->symbol.decl;
if (!TREE_ASM_WRITTEN (decl)
&& !node->alias
- && !node->in_other_partition
+ && !node->symbol.in_other_partition
&& !DECL_EXTERNAL (decl)
&& (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
{
&& (!varpool_can_remove_if_no_refs (node)
/* We just expanded all function bodies. See if any of
them needed the variable. */
- || DECL_RTL_SET_P (node->decl)))
+ || DECL_RTL_SET_P (node->symbol.decl)))
varpool_mark_needed_node (node);
node = next;
void
varpool_finalize_named_section_flags (struct varpool_node *node)
{
- if (!TREE_ASM_WRITTEN (node->decl)
+ if (!TREE_ASM_WRITTEN (node->symbol.decl)
&& !node->alias
- && !node->in_other_partition
- && !DECL_EXTERNAL (node->decl)
- && TREE_CODE (node->decl) == VAR_DECL
- && !DECL_HAS_VALUE_EXPR_P (node->decl)
- && DECL_SECTION_NAME (node->decl))
- get_variable_section (node->decl, false);
+ && !node->symbol.in_other_partition
+ && !DECL_EXTERNAL (node->symbol.decl)
+ && TREE_CODE (node->symbol.decl) == VAR_DECL
+ && !DECL_HAS_VALUE_EXPR_P (node->symbol.decl)
+ && DECL_SECTION_NAME (node->symbol.decl))
+ get_variable_section (node->symbol.decl, false);
}
/* Output all variables enqueued to be assembled. */
{
struct varpool_node *node = varpool_nodes_queue;
- varpool_nodes_queue = varpool_nodes_queue->next_needed;
+ x_varpool_nodes_queue = (symtab_node)(varpool_nodes_queue->next_needed);
if (varpool_assemble_decl (node))
changed = true;
else
}
/* varpool_nodes_queue is now empty, clear the pointer to the last element
in the queue. */
- varpool_last_needed_node = NULL;
+ x_varpool_last_needed_node = NULL;
timevar_pop (TV_VAROUT);
return changed;
}
while (varpool_nodes_queue)
{
struct varpool_node *node = varpool_nodes_queue;
- varpool_nodes_queue = varpool_nodes_queue->next_needed;
+ x_varpool_nodes_queue = (symtab_node)varpool_nodes_queue->next_needed;
node->next_needed = NULL;
node->prev_needed = NULL;
}
/* varpool_nodes_queue is now empty, clear the pointer to the last element
in the queue. */
- varpool_last_needed_node = NULL;
+ x_varpool_last_needed_node = NULL;
}
/* Create a new global variable of type TYPE. */
add_referenced_var (new_decl);
varpool_finalize_decl (new_decl);
- return new_node->decl;
+ return new_node->symbol.decl;
}
/* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
bool
varpool_used_from_object_file_p (struct varpool_node *node)
{
- if (!TREE_PUBLIC (node->decl))
+ if (!TREE_PUBLIC (node->symbol.decl))
return false;
- if (resolution_used_from_other_file_p (node->resolution))
+ if (resolution_used_from_other_file_p (node->symbol.resolution))
return true;
return false;
}
if (callback (node, data))
return true;
- for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
+ for (i = 0; ipa_ref_list_refering_iterate (&node->symbol.ref_list, i, ref); i++)
if (ref->use == IPA_REF_ALIAS)
{
struct varpool_node *alias = ipa_ref_refering_varpool_node (ref);